Bump github.com/hashicorp/terraform-plugin-sdk/v2 from 2.20.0 to 2.24.1

Bumps [github.com/hashicorp/terraform-plugin-sdk/v2](https://github.com/hashicorp/terraform-plugin-sdk) from 2.20.0 to 2.24.1.
- [Release notes](https://github.com/hashicorp/terraform-plugin-sdk/releases)
- [Changelog](https://github.com/hashicorp/terraform-plugin-sdk/blob/main/CHANGELOG.md)
- [Commits](https://github.com/hashicorp/terraform-plugin-sdk/compare/v2.20.0...v2.24.1)

---
updated-dependencies:
- dependency-name: github.com/hashicorp/terraform-plugin-sdk/v2
  dependency-type: direct:production
  update-type: version-update:semver-minor
...

Signed-off-by: dependabot[bot] <support@github.com>
This commit is contained in:
dependabot[bot] 2022-12-24 16:57:19 +00:00 committed by Tobias Trabelsi
parent 683a051502
commit 282cd097f9
Signed by: lerentis
GPG Key ID: FF0C2839718CAF2E
195 changed files with 3914 additions and 3093 deletions

18
go.mod
View File

@ -5,7 +5,7 @@ go 1.18
require ( require (
code.gitea.io/sdk/gitea v0.15.1 code.gitea.io/sdk/gitea v0.15.1
github.com/hashicorp/terraform-plugin-docs v0.13.0 github.com/hashicorp/terraform-plugin-docs v0.13.0
github.com/hashicorp/terraform-plugin-sdk/v2 v2.20.0 github.com/hashicorp/terraform-plugin-sdk/v2 v2.24.1
) )
require ( require (
@ -19,7 +19,7 @@ require (
github.com/davecgh/go-spew v1.1.1 // indirect github.com/davecgh/go-spew v1.1.1 // indirect
github.com/fatih/color v1.13.0 // indirect github.com/fatih/color v1.13.0 // indirect
github.com/golang/protobuf v1.5.2 // indirect github.com/golang/protobuf v1.5.2 // indirect
github.com/google/go-cmp v0.5.8 // indirect github.com/google/go-cmp v0.5.9 // indirect
github.com/google/uuid v1.3.0 // indirect github.com/google/uuid v1.3.0 // indirect
github.com/hashicorp/errwrap v1.1.0 // indirect github.com/hashicorp/errwrap v1.1.0 // indirect
github.com/hashicorp/go-checkpoint v0.5.0 // indirect github.com/hashicorp/go-checkpoint v0.5.0 // indirect
@ -27,15 +27,15 @@ require (
github.com/hashicorp/go-cty v1.4.1-0.20200414143053-d3edf31b6320 // indirect github.com/hashicorp/go-cty v1.4.1-0.20200414143053-d3edf31b6320 // indirect
github.com/hashicorp/go-hclog v1.2.1 // indirect github.com/hashicorp/go-hclog v1.2.1 // indirect
github.com/hashicorp/go-multierror v1.1.1 // indirect github.com/hashicorp/go-multierror v1.1.1 // indirect
github.com/hashicorp/go-plugin v1.4.4 // indirect github.com/hashicorp/go-plugin v1.4.6 // indirect
github.com/hashicorp/go-uuid v1.0.3 // indirect github.com/hashicorp/go-uuid v1.0.3 // indirect
github.com/hashicorp/go-version v1.6.0 // indirect github.com/hashicorp/go-version v1.6.0 // indirect
github.com/hashicorp/hc-install v0.4.0 // indirect github.com/hashicorp/hc-install v0.4.0 // indirect
github.com/hashicorp/hcl/v2 v2.13.0 // indirect github.com/hashicorp/hcl/v2 v2.15.0 // indirect
github.com/hashicorp/logutils v1.0.0 // indirect github.com/hashicorp/logutils v1.0.0 // indirect
github.com/hashicorp/terraform-exec v0.17.2 // indirect github.com/hashicorp/terraform-exec v0.17.3 // indirect
github.com/hashicorp/terraform-json v0.14.0 // indirect github.com/hashicorp/terraform-json v0.14.0 // indirect
github.com/hashicorp/terraform-plugin-go v0.12.0 // indirect github.com/hashicorp/terraform-plugin-go v0.14.1 // indirect
github.com/hashicorp/terraform-plugin-log v0.7.0 // indirect github.com/hashicorp/terraform-plugin-log v0.7.0 // indirect
github.com/hashicorp/terraform-registry-address v0.0.0-20220623143253-7d51757b572c // indirect github.com/hashicorp/terraform-registry-address v0.0.0-20220623143253-7d51757b572c // indirect
github.com/hashicorp/terraform-svchost v0.0.0-20200729002733-f050f53b9734 // indirect github.com/hashicorp/terraform-svchost v0.0.0-20200729002733-f050f53b9734 // indirect
@ -58,13 +58,13 @@ require (
github.com/vmihailenco/msgpack v4.0.4+incompatible // indirect github.com/vmihailenco/msgpack v4.0.4+incompatible // indirect
github.com/vmihailenco/msgpack/v4 v4.3.12 // indirect github.com/vmihailenco/msgpack/v4 v4.3.12 // indirect
github.com/vmihailenco/tagparser v0.1.1 // indirect github.com/vmihailenco/tagparser v0.1.1 // indirect
github.com/zclconf/go-cty v1.10.0 // indirect github.com/zclconf/go-cty v1.12.1 // indirect
golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d // indirect golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d // indirect
golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2 // indirect golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2 // indirect
golang.org/x/sys v0.0.0-20220627191245-f75cf1eec38b // indirect golang.org/x/sys v0.0.0-20220627191245-f75cf1eec38b // indirect
golang.org/x/text v0.3.7 // indirect golang.org/x/text v0.3.7 // indirect
google.golang.org/appengine v1.6.6 // indirect google.golang.org/appengine v1.6.6 // indirect
google.golang.org/genproto v0.0.0-20200711021454-869866162049 // indirect google.golang.org/genproto v0.0.0-20200711021454-869866162049 // indirect
google.golang.org/grpc v1.48.0 // indirect google.golang.org/grpc v1.50.1 // indirect
google.golang.org/protobuf v1.28.0 // indirect google.golang.org/protobuf v1.28.1 // indirect
) )

71
go.sum
View File

@ -22,7 +22,6 @@ github.com/acomagu/bufpipe v1.0.3/go.mod h1:mxdxdup/WdsKVreO5GpW4+M/1CE2sMG4jeGJ
github.com/agext/levenshtein v1.2.2 h1:0S/Yg6LYmFJ5stwQeRp6EeOcCbj7xiqQSdNelsXvaqE= github.com/agext/levenshtein v1.2.2 h1:0S/Yg6LYmFJ5stwQeRp6EeOcCbj7xiqQSdNelsXvaqE=
github.com/agext/levenshtein v1.2.2/go.mod h1:JEDfjyjHDjOF/1e4FlBE/PkbqA9OfWu2ki2W0IB5558= github.com/agext/levenshtein v1.2.2/go.mod h1:JEDfjyjHDjOF/1e4FlBE/PkbqA9OfWu2ki2W0IB5558=
github.com/anmitsu/go-shlex v0.0.0-20161002113705-648efa622239/go.mod h1:2FmKhYUyUczH0OGQWaF5ceTx0UBShxjsH6f8oGKYe2c= github.com/anmitsu/go-shlex v0.0.0-20161002113705-648efa622239/go.mod h1:2FmKhYUyUczH0OGQWaF5ceTx0UBShxjsH6f8oGKYe2c=
github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY=
github.com/apparentlymart/go-dump v0.0.0-20190214190832-042adf3cf4a0 h1:MzVXffFUye+ZcSR6opIgz9Co7WcDx6ZcY+RjfFHoA0I= github.com/apparentlymart/go-dump v0.0.0-20190214190832-042adf3cf4a0 h1:MzVXffFUye+ZcSR6opIgz9Co7WcDx6ZcY+RjfFHoA0I=
github.com/apparentlymart/go-textseg v1.0.0/go.mod h1:z96Txxhf3xSFMPmb5X/1W05FF/Nj9VFpLOpjS5yuumk= github.com/apparentlymart/go-textseg v1.0.0/go.mod h1:z96Txxhf3xSFMPmb5X/1W05FF/Nj9VFpLOpjS5yuumk=
github.com/apparentlymart/go-textseg/v12 v12.0.0/go.mod h1:S/4uRK2UtaQttw1GenVJEynmyUenKwP++x/+DdGV/Ec= github.com/apparentlymart/go-textseg/v12 v12.0.0/go.mod h1:S/4uRK2UtaQttw1GenVJEynmyUenKwP++x/+DdGV/Ec=
@ -35,32 +34,20 @@ github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5/go.mod h1:wHh0iHkY
github.com/bgentry/speakeasy v0.1.0 h1:ByYyxL9InA1OWqxJqqp2A5pYHUrCiAL6K3J+LKSsQkY= github.com/bgentry/speakeasy v0.1.0 h1:ByYyxL9InA1OWqxJqqp2A5pYHUrCiAL6K3J+LKSsQkY=
github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs= github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs=
github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc=
github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnhtbo57+VJT5O0JRkEi1Wbu0z5j0R8u5Hbk=
github.com/cncf/udpa/go v0.0.0-20210930031921-04548b0d99d4/go.mod h1:6pvJx4me5XPnfI9Z40ddWsdw2W/uZgQLFXToKeRcDiI=
github.com/cncf/xds/go v0.0.0-20210922020428-25de7278fc84/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs=
github.com/cncf/xds/go v0.0.0-20211001041855-01bcc9b48dfe/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs=
github.com/cncf/xds/go v0.0.0-20211011173535-cb28da3451f1/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs=
github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/emirpasic/gods v1.12.0 h1:QAUIPSaCu4G+POclxeqb3F+WPpdKqFGlw36+yOzGlrg= github.com/emirpasic/gods v1.12.0 h1:QAUIPSaCu4G+POclxeqb3F+WPpdKqFGlw36+yOzGlrg=
github.com/emirpasic/gods v1.12.0/go.mod h1:YfzfFFoVP/catgzJb4IKIqXjX78Ha8FMSDh3ymbK86o= github.com/emirpasic/gods v1.12.0/go.mod h1:YfzfFFoVP/catgzJb4IKIqXjX78Ha8FMSDh3ymbK86o=
github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98=
github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk=
github.com/envoyproxy/go-control-plane v0.10.2-0.20220325020618-49ff273808a1/go.mod h1:KJwIaB5Mv44NWtYuAOFCVOjcI94vtpEz2JU/D2v6IjE=
github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c=
github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4= github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4=
github.com/fatih/color v1.13.0 h1:8LOYc1KYPPmyKMuN8QV2DNRWNbLo6LZ0iLs8+mlH53w= github.com/fatih/color v1.13.0 h1:8LOYc1KYPPmyKMuN8QV2DNRWNbLo6LZ0iLs8+mlH53w=
github.com/fatih/color v1.13.0/go.mod h1:kLAiJbzzSOZDVNGyDpeOxJ47H46qBXwg5ILebYFFOfk= github.com/fatih/color v1.13.0/go.mod h1:kLAiJbzzSOZDVNGyDpeOxJ47H46qBXwg5ILebYFFOfk=
github.com/flynn/go-shlex v0.0.0-20150515145356-3f9db97f8568/go.mod h1:xEzjJPgXI435gkrCt3MPfRiAkVrwSbHsst4LCFVfpJc= github.com/flynn/go-shlex v0.0.0-20150515145356-3f9db97f8568/go.mod h1:xEzjJPgXI435gkrCt3MPfRiAkVrwSbHsst4LCFVfpJc=
github.com/frankban/quicktest v1.14.3 h1:FJKSZTDHjyhriyC81FLQ0LY93eSai0ZyR/ZIkd3ZUKE= github.com/frankban/quicktest v1.14.3 h1:FJKSZTDHjyhriyC81FLQ0LY93eSai0ZyR/ZIkd3ZUKE=
github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04=
github.com/gliderlabs/ssh v0.2.2/go.mod h1:U7qILu1NlMHj9FlMhZLlkCdDnU1DBEAqr0aevW3Awn0= github.com/gliderlabs/ssh v0.2.2/go.mod h1:U7qILu1NlMHj9FlMhZLlkCdDnU1DBEAqr0aevW3Awn0=
github.com/go-git/gcfg v1.5.0 h1:Q5ViNfGF8zFgyJWPqYwA7qGFoMTEiBmdlkcfRmpIMa4= github.com/go-git/gcfg v1.5.0 h1:Q5ViNfGF8zFgyJWPqYwA7qGFoMTEiBmdlkcfRmpIMa4=
github.com/go-git/gcfg v1.5.0/go.mod h1:5m20vg6GwYabIxaOonVkTdrILxQMpEShl1xiMF4ua+E= github.com/go-git/gcfg v1.5.0/go.mod h1:5m20vg6GwYabIxaOonVkTdrILxQMpEShl1xiMF4ua+E=
@ -77,7 +64,6 @@ github.com/golang/protobuf v1.1.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5y
github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
github.com/golang/protobuf v1.3.3/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw=
github.com/golang/protobuf v1.3.4/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= github.com/golang/protobuf v1.3.4/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw=
github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8= github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8=
github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA= github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA=
@ -85,8 +71,6 @@ github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrU
github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:WU3c8KckQ9AFe+yFwt9sWVRKCVIyN9cPHBJSNnbL67w= github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:WU3c8KckQ9AFe+yFwt9sWVRKCVIyN9cPHBJSNnbL67w=
github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0= github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0=
github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QDs8UjoX8= github.com/golang/protobuf v1.4.1/go.mod h1:U8fpvMrcmy5pZrNK1lt4xCsGvpyWQ/VVv6QDs8UjoX8=
github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI=
github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI=
github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk=
github.com/golang/protobuf v1.5.2 h1:ROPKBNFfQgOUMifHyP+KYbvpjbdoFNs+aK7DXlji0Tw= github.com/golang/protobuf v1.5.2 h1:ROPKBNFfQgOUMifHyP+KYbvpjbdoFNs+aK7DXlji0Tw=
github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY=
@ -94,16 +78,14 @@ github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5a
github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.5.8 h1:e6P7q2lk1O+qJJb4BtCQXlK8vWEO8V1ZeuEdJNOqZyg=
github.com/google/go-cmp v0.5.8/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/go-cmp v0.5.8/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38=
github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/google/uuid v1.3.0 h1:t6JiXgmwXMjEs8VusXIJk2BXHsn+wx8BZdTaoZ5fu7I= github.com/google/uuid v1.3.0 h1:t6JiXgmwXMjEs8VusXIJk2BXHsn+wx8BZdTaoZ5fu7I=
github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw=
github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4=
github.com/hashicorp/errwrap v1.1.0 h1:OxrOeh75EUXMY8TBjag2fzXGZ40LB6IKw45YeGUDY2I= github.com/hashicorp/errwrap v1.1.0 h1:OxrOeh75EUXMY8TBjag2fzXGZ40LB6IKw45YeGUDY2I=
github.com/hashicorp/errwrap v1.1.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= github.com/hashicorp/errwrap v1.1.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4=
@ -120,8 +102,8 @@ github.com/hashicorp/go-hclog v1.2.1/go.mod h1:W4Qnvbt70Wk/zYJryRzDRU/4r0kIg0PVH
github.com/hashicorp/go-multierror v1.0.0/go.mod h1:dHtQlpGsu+cZNNAkkCN/P3hoUDHhCYQXV3UM06sGGrk= github.com/hashicorp/go-multierror v1.0.0/go.mod h1:dHtQlpGsu+cZNNAkkCN/P3hoUDHhCYQXV3UM06sGGrk=
github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+lD48awMYo= github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+lD48awMYo=
github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM= github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM=
github.com/hashicorp/go-plugin v1.4.4 h1:NVdrSdFRt3SkZtNckJ6tog7gbpRrcbOjQi/rgF7JYWQ= github.com/hashicorp/go-plugin v1.4.6 h1:MDV3UrKQBM3du3G7MApDGvOsMYy3JQJ4exhSoKBAeVA=
github.com/hashicorp/go-plugin v1.4.4/go.mod h1:viDMjcLJuDui6pXb8U4HVfb8AamCWhHGUjr2IrTF67s= github.com/hashicorp/go-plugin v1.4.6/go.mod h1:viDMjcLJuDui6pXb8U4HVfb8AamCWhHGUjr2IrTF67s=
github.com/hashicorp/go-uuid v1.0.0/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= github.com/hashicorp/go-uuid v1.0.0/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro=
github.com/hashicorp/go-uuid v1.0.3 h1:2gKiV6YVmrJ1i2CKKa9obLvRieoRGviZFL26PcT/Co8= github.com/hashicorp/go-uuid v1.0.3 h1:2gKiV6YVmrJ1i2CKKa9obLvRieoRGviZFL26PcT/Co8=
github.com/hashicorp/go-uuid v1.0.3/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= github.com/hashicorp/go-uuid v1.0.3/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro=
@ -132,22 +114,22 @@ github.com/hashicorp/go-version v1.6.0 h1:feTTfFNnjP967rlCxM/I9g701jU+RN74YKx2mO
github.com/hashicorp/go-version v1.6.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA= github.com/hashicorp/go-version v1.6.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA=
github.com/hashicorp/hc-install v0.4.0 h1:cZkRFr1WVa0Ty6x5fTvL1TuO1flul231rWkGH92oYYk= github.com/hashicorp/hc-install v0.4.0 h1:cZkRFr1WVa0Ty6x5fTvL1TuO1flul231rWkGH92oYYk=
github.com/hashicorp/hc-install v0.4.0/go.mod h1:5d155H8EC5ewegao9A4PUTMNPZaq+TbOzkJJZ4vrXeI= github.com/hashicorp/hc-install v0.4.0/go.mod h1:5d155H8EC5ewegao9A4PUTMNPZaq+TbOzkJJZ4vrXeI=
github.com/hashicorp/hcl/v2 v2.13.0 h1:0Apadu1w6M11dyGFxWnmhhcMjkbAiKCv7G1r/2QgCNc= github.com/hashicorp/hcl/v2 v2.15.0 h1:CPDXO6+uORPjKflkWCCwoWc9uRp+zSIPcCQ+BrxV7m8=
github.com/hashicorp/hcl/v2 v2.13.0/go.mod h1:e4z5nxYlWNPdDSNYX+ph14EvWYMFm3eP0zIUqPc2jr0= github.com/hashicorp/hcl/v2 v2.15.0/go.mod h1:JRmR89jycNkrrqnMmvPDMd56n1rQJ2Q6KocSLCMCXng=
github.com/hashicorp/logutils v1.0.0 h1:dLEQVugN8vlakKOUE3ihGLTZJRB4j+M2cdTm/ORI65Y= github.com/hashicorp/logutils v1.0.0 h1:dLEQVugN8vlakKOUE3ihGLTZJRB4j+M2cdTm/ORI65Y=
github.com/hashicorp/logutils v1.0.0/go.mod h1:QIAnNjmIWmVIIkWDTG1z5v++HQmx9WQRO+LraFDTW64= github.com/hashicorp/logutils v1.0.0/go.mod h1:QIAnNjmIWmVIIkWDTG1z5v++HQmx9WQRO+LraFDTW64=
github.com/hashicorp/terraform-exec v0.17.2 h1:EU7i3Fh7vDUI9nNRdMATCEfnm9axzTnad8zszYZ73Go= github.com/hashicorp/terraform-exec v0.17.3 h1:MX14Kvnka/oWGmIkyuyvL6POx25ZmKrjlaclkx3eErU=
github.com/hashicorp/terraform-exec v0.17.2/go.mod h1:tuIbsL2l4MlwwIZx9HPM+LOV9vVyEfBYu2GsO1uH3/8= github.com/hashicorp/terraform-exec v0.17.3/go.mod h1:+NELG0EqQekJzhvikkeQsOAZpsw0cv/03rbeQJqscAI=
github.com/hashicorp/terraform-json v0.14.0 h1:sh9iZ1Y8IFJLx+xQiKHGud6/TSUCM0N8e17dKDpqV7s= github.com/hashicorp/terraform-json v0.14.0 h1:sh9iZ1Y8IFJLx+xQiKHGud6/TSUCM0N8e17dKDpqV7s=
github.com/hashicorp/terraform-json v0.14.0/go.mod h1:5A9HIWPkk4e5aeeXIBbkcOvaZbIYnAIkEyqP2pNSckM= github.com/hashicorp/terraform-json v0.14.0/go.mod h1:5A9HIWPkk4e5aeeXIBbkcOvaZbIYnAIkEyqP2pNSckM=
github.com/hashicorp/terraform-plugin-docs v0.13.0 h1:6e+VIWsVGb6jYJewfzq2ok2smPzZrt1Wlm9koLeKazY= github.com/hashicorp/terraform-plugin-docs v0.13.0 h1:6e+VIWsVGb6jYJewfzq2ok2smPzZrt1Wlm9koLeKazY=
github.com/hashicorp/terraform-plugin-docs v0.13.0/go.mod h1:W0oCmHAjIlTHBbvtppWHe8fLfZ2BznQbuv8+UD8OucQ= github.com/hashicorp/terraform-plugin-docs v0.13.0/go.mod h1:W0oCmHAjIlTHBbvtppWHe8fLfZ2BznQbuv8+UD8OucQ=
github.com/hashicorp/terraform-plugin-go v0.12.0 h1:6wW9mT1dSs0Xq4LR6HXj1heQ5ovr5GxXNJwkErZzpJw= github.com/hashicorp/terraform-plugin-go v0.14.1 h1:cwZzPYla82XwAqpLhSzdVsOMU+6H29tczAwrB0z9Zek=
github.com/hashicorp/terraform-plugin-go v0.12.0/go.mod h1:kwhmaWHNDvT1B3QiSJdAtrB/D4RaKSY/v3r2BuoWK4M= github.com/hashicorp/terraform-plugin-go v0.14.1/go.mod h1:Bc/K6K26BQ2FHqIELPbpKtt2CzzbQou+0UQF3/0NsCQ=
github.com/hashicorp/terraform-plugin-log v0.7.0 h1:SDxJUyT8TwN4l5b5/VkiTIaQgY6R+Y2BQ0sRZftGKQs= github.com/hashicorp/terraform-plugin-log v0.7.0 h1:SDxJUyT8TwN4l5b5/VkiTIaQgY6R+Y2BQ0sRZftGKQs=
github.com/hashicorp/terraform-plugin-log v0.7.0/go.mod h1:p4R1jWBXRTvL4odmEkFfDdhUjHf9zcs/BCoNHAc7IK4= github.com/hashicorp/terraform-plugin-log v0.7.0/go.mod h1:p4R1jWBXRTvL4odmEkFfDdhUjHf9zcs/BCoNHAc7IK4=
github.com/hashicorp/terraform-plugin-sdk/v2 v2.20.0 h1:+KxZULPsbjpAVoP0WNj/8aVW6EqpcX5JcUcQ5wl7Da4= github.com/hashicorp/terraform-plugin-sdk/v2 v2.24.1 h1:zHcMbxY0+rFO9gY99elV/XC/UnQVg7FhRCbj1i5b7vM=
github.com/hashicorp/terraform-plugin-sdk/v2 v2.20.0/go.mod h1:DwGJG3KNxIPluVk6hexvDfYR/MS/eKGpiztJoT3Bbbw= github.com/hashicorp/terraform-plugin-sdk/v2 v2.24.1/go.mod h1:+tNlb0wkfdsDJ7JEiERLz4HzM19HyiuIoGzTsM7rPpw=
github.com/hashicorp/terraform-registry-address v0.0.0-20220623143253-7d51757b572c h1:D8aRO6+mTqHfLsK/BC3j5OAoogv1WLRWzY1AaTo3rBg= github.com/hashicorp/terraform-registry-address v0.0.0-20220623143253-7d51757b572c h1:D8aRO6+mTqHfLsK/BC3j5OAoogv1WLRWzY1AaTo3rBg=
github.com/hashicorp/terraform-registry-address v0.0.0-20220623143253-7d51757b572c/go.mod h1:Wn3Na71knbXc1G8Lh+yu/dQWWJeFQEpDeJMtWMtlmNI= github.com/hashicorp/terraform-registry-address v0.0.0-20220623143253-7d51757b572c/go.mod h1:Wn3Na71knbXc1G8Lh+yu/dQWWJeFQEpDeJMtWMtlmNI=
github.com/hashicorp/terraform-svchost v0.0.0-20200729002733-f050f53b9734 h1:HKLsbzeOsfXmKNpr3GiT18XAblV0BjCbzL8KQAMZGa0= github.com/hashicorp/terraform-svchost v0.0.0-20200729002733-f050f53b9734 h1:HKLsbzeOsfXmKNpr3GiT18XAblV0BjCbzL8KQAMZGa0=
@ -214,7 +196,6 @@ github.com/posener/complete v1.1.1/go.mod h1:em0nMJCgc9GFtwrmVmEMR/ZL6WyhyjMBndr
github.com/posener/complete v1.2.3 h1:NP0eAhjcjImqslEwo/1hq7gpajME0fTLTezBKDqfXqo= github.com/posener/complete v1.2.3 h1:NP0eAhjcjImqslEwo/1hq7gpajME0fTLTezBKDqfXqo=
github.com/posener/complete v1.2.3/go.mod h1:WZIdtGGp+qx0sLrYKtIRAruyNpv6hFCicSgv7Sy7s/s= github.com/posener/complete v1.2.3/go.mod h1:WZIdtGGp+qx0sLrYKtIRAruyNpv6hFCicSgv7Sy7s/s=
github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ=
github.com/rogpeppe/go-internal v1.6.1 h1:/FiVV8dS/e+YqF2JvO3yXRFbBLTIuSDkuC7aBOAvL+k= github.com/rogpeppe/go-internal v1.6.1 h1:/FiVV8dS/e+YqF2JvO3yXRFbBLTIuSDkuC7aBOAvL+k=
github.com/russross/blackfriday v1.6.0 h1:KqfZb0pUVN2lYqZUYRddxF4OR8ZMURnJIG5Y3VRLtww= github.com/russross/blackfriday v1.6.0 h1:KqfZb0pUVN2lYqZUYRddxF4OR8ZMURnJIG5Y3VRLtww=
github.com/russross/blackfriday v1.6.0/go.mod h1:ti0ldHuxg49ri4ksnFxlkCfN+hvslNlmVHqNRXXJNAY= github.com/russross/blackfriday v1.6.0/go.mod h1:ti0ldHuxg49ri4ksnFxlkCfN+hvslNlmVHqNRXXJNAY=
@ -250,15 +231,14 @@ github.com/xanzy/ssh-agent v0.3.0/go.mod h1:3s9xbODqPuuhK9JV1R321M/FlMZSBvE5aY6e
github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
github.com/zclconf/go-cty v1.1.0/go.mod h1:xnAOWiHeOqg2nWS62VtQ7pbOu17FtxJNW8RLEih+O3s= github.com/zclconf/go-cty v1.1.0/go.mod h1:xnAOWiHeOqg2nWS62VtQ7pbOu17FtxJNW8RLEih+O3s=
github.com/zclconf/go-cty v1.2.0/go.mod h1:hOPWgoHbaTUnI5k4D2ld+GRpFJSCe6bCM7m1q/N4PQ8= github.com/zclconf/go-cty v1.2.0/go.mod h1:hOPWgoHbaTUnI5k4D2ld+GRpFJSCe6bCM7m1q/N4PQ8=
github.com/zclconf/go-cty v1.10.0 h1:mp9ZXQeIcN8kAwuqorjH+Q+njbJKjLrvB2yIh4q7U+0=
github.com/zclconf/go-cty v1.10.0/go.mod h1:vVKLxnk3puL4qRAv72AO+W99LUD4da90g3uUAzyuvAk= github.com/zclconf/go-cty v1.10.0/go.mod h1:vVKLxnk3puL4qRAv72AO+W99LUD4da90g3uUAzyuvAk=
github.com/zclconf/go-cty v1.12.1 h1:PcupnljUm9EIvbgSHQnHhUr3fO6oFmkOrvs2BAFNXXY=
github.com/zclconf/go-cty v1.12.1/go.mod h1:s9IfD1LK5ccNMSWCVFCE2rJfHiZgi7JijgeWIMfhLvA=
github.com/zclconf/go-cty-debug v0.0.0-20191215020915-b22d67c1ba0b/go.mod h1:ZRKQfBXbGkpdV6QMzT3rU1kSTAnfu1dO8dPKjYprgj8= github.com/zclconf/go-cty-debug v0.0.0-20191215020915-b22d67c1ba0b/go.mod h1:ZRKQfBXbGkpdV6QMzT3rU1kSTAnfu1dO8dPKjYprgj8=
go.opentelemetry.io/proto/otlp v0.7.0/go.mod h1:PqfVotwruBrMGOCsRd/89rSnXhoiJIqeYNgFYFoEGnI=
golang.org/x/crypto v0.0.0-20190219172222-a4c6cb3142f2/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= golang.org/x/crypto v0.0.0-20190219172222-a4c6cb3142f2/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
golang.org/x/crypto v0.0.0-20200414173820-0848c9571904/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20200414173820-0848c9571904/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
golang.org/x/crypto v0.0.0-20200820211705-5c72a883971a/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20200820211705-5c72a883971a/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
golang.org/x/crypto v0.0.0-20210322153248-0c34fe9e7dc2/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4= golang.org/x/crypto v0.0.0-20210322153248-0c34fe9e7dc2/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4=
golang.org/x/crypto v0.0.0-20210421170649-83a5a9bb288b/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4= golang.org/x/crypto v0.0.0-20210421170649-83a5a9bb288b/go.mod h1:T9bdIzuCu7OtxOm1hfPfRQxPLYneinmdGuTeoZ9dtd4=
@ -282,8 +262,6 @@ golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLL
golang.org/x/net v0.0.0-20191009170851-d66e71096ffb/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20191009170851-d66e71096ffb/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20200301022130-244492dfa37a/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20200301022130-244492dfa37a/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA=
golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU=
golang.org/x/net v0.0.0-20210119194325-5f4716e94777/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20210119194325-5f4716e94777/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
golang.org/x/net v0.0.0-20210326060303-6b1517762897/go.mod h1:uSPa2vr4CLtc/ILN5odXGNXS6mhrKVzTaCXzk9m6W3k= golang.org/x/net v0.0.0-20210326060303-6b1517762897/go.mod h1:uSPa2vr4CLtc/ILN5odXGNXS6mhrKVzTaCXzk9m6W3k=
@ -291,7 +269,6 @@ golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2 h1:CIJ76btIcR3eFI5EgSo6k1qKw
golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
@ -306,10 +283,7 @@ golang.org/x/sys v0.0.0-20190916202348-b4ddaad3f8a3/go.mod h1:h1NjWce9XRLGQEsW7w
golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200302150141-5c8b2ff67527/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210119212857-b64e53b001e4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210320140829-1e4c9ba3b0c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210320140829-1e4c9ba3b0c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210324051608-47abb6519492/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210324051608-47abb6519492/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210502180810-71e4cd670f79/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210502180810-71e4cd670f79/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
@ -336,7 +310,6 @@ golang.org/x/tools v0.0.0-20200325010219-a49f79bcc224/go.mod h1:Sl4aGygMT6LrqrWc
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM=
google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc=
@ -344,33 +317,26 @@ google.golang.org/appengine v1.6.6 h1:lMO5rYAqUxkmaj76jAkRUvt5JZgFymx/+Q5Mzfivuh
google.golang.org/appengine v1.6.6/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc= google.golang.org/appengine v1.6.6/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc=
google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc=
google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc=
google.golang.org/genproto v0.0.0-20200513103714-09dca8ec2884/go.mod h1:55QSHmfGQM9UVYDPBsyGGes0y52j32PQ3BqQfXhyH3c=
google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo= google.golang.org/genproto v0.0.0-20200526211855-cb27e3aa2013/go.mod h1:NbSheEEYHJ7i3ixzK3sjbqSGDJWnxyFXZblF3eUsNvo=
google.golang.org/genproto v0.0.0-20200711021454-869866162049 h1:YFTFpQhgvrLrmxtiIncJxFXeCyq84ixuKWVCaCAi9Oc= google.golang.org/genproto v0.0.0-20200711021454-869866162049 h1:YFTFpQhgvrLrmxtiIncJxFXeCyq84ixuKWVCaCAi9Oc=
google.golang.org/genproto v0.0.0-20200711021454-869866162049/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no= google.golang.org/genproto v0.0.0-20200711021454-869866162049/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c=
google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg=
google.golang.org/grpc v1.25.1/go.mod h1:c3i+UQWmh7LiEpx4sFZnkU36qjEYZ0imhYfXVyQciAY=
google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= google.golang.org/grpc v1.27.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk=
google.golang.org/grpc v1.33.1/go.mod h1:fr5YgcSWrqhRRxogOsw7RzIpsmvOZ6IcH4kBYTpR3n0= google.golang.org/grpc v1.50.1 h1:DS/BukOZWp8s6p4Dt/tOaJaTQyPyOoCcrjroHuCeLzY=
google.golang.org/grpc v1.36.0/go.mod h1:qjiiYl8FncCW8feJPdyg3v6XW24KsRHe+dy9BAGRRjU= google.golang.org/grpc v1.50.1/go.mod h1:ZgQEeidpAuNRZ8iRrlBKXZQP1ghovWIVhdJRyCDK+GI=
google.golang.org/grpc v1.48.0 h1:rQOsyJ/8+ufEDJd/Gdsz7HG220Mh9HAhFHRGnIjda0w=
google.golang.org/grpc v1.48.0/go.mod h1:vN9eftEi1UMyUsIF80+uQXhHjbXYbm0uXoFCACuMGWk=
google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8=
google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0=
google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM=
google.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miEFZTKqfCUM6K7xSMQL9OKL/b6hQv+e19PK+JZNE= google.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miEFZTKqfCUM6K7xSMQL9OKL/b6hQv+e19PK+JZNE=
google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo= google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo=
google.golang.org/protobuf v1.22.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= google.golang.org/protobuf v1.22.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU=
google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU=
google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= google.golang.org/protobuf v1.23.1-0.20200526195155-81db48ad09cc/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU=
google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGjtUeSXeh4= google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGjtUeSXeh4=
google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c=
google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw=
google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc=
google.golang.org/protobuf v1.27.1/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= google.golang.org/protobuf v1.28.1 h1:d0NfwRgPtno5B1Wa6L2DAG+KivqkdutMf1UhdNx175w=
google.golang.org/protobuf v1.28.0 h1:w43yiav+6bVFTBQFZX0r7ipe9JQ1QsbMgHwbBziscLw= google.golang.org/protobuf v1.28.1/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I=
google.golang.org/protobuf v1.28.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
@ -380,7 +346,6 @@ gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EV
gopkg.in/warnings.v0 v0.1.2 h1:wFXVbFY8DY5/xOe1ECiWdKCzZlxgshcYVNkBHstARME= gopkg.in/warnings.v0 v0.1.2 h1:wFXVbFY8DY5/xOe1ECiWdKCzZlxgshcYVNkBHstARME=
gopkg.in/warnings.v0 v0.1.2/go.mod h1:jksf8JmL6Qr/oQM2OXTHunEvvTAsrWBLb6OOjuVWRNI= gopkg.in/warnings.v0 v0.1.2/go.mod h1:jksf8JmL6Qr/oQM2OXTHunEvvTAsrWBLb6OOjuVWRNI=
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.2.3/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=

View File

@ -13,21 +13,21 @@
// //
// The primary features of cmp are: // The primary features of cmp are:
// //
// • When the default behavior of equality does not suit the needs of the test, // - When the default behavior of equality does not suit the test's needs,
// custom equality functions can override the equality operation. // custom equality functions can override the equality operation.
// For example, an equality function may report floats as equal so long as they // For example, an equality function may report floats as equal so long as
// are within some tolerance of each other. // they are within some tolerance of each other.
// //
// • Types that have an Equal method may use that method to determine equality. // - Types with an Equal method may use that method to determine equality.
// This allows package authors to determine the equality operation for the types // This allows package authors to determine the equality operation
// that they define. // for the types that they define.
// //
// If no custom equality functions are used and no Equal method is defined, // - If no custom equality functions are used and no Equal method is defined,
// equality is determined by recursively comparing the primitive kinds on both // equality is determined by recursively comparing the primitive kinds on
// values, much like reflect.DeepEqual. Unlike reflect.DeepEqual, unexported // both values, much like reflect.DeepEqual. Unlike reflect.DeepEqual,
// fields are not compared by default; they result in panics unless suppressed // unexported fields are not compared by default; they result in panics
// by using an Ignore option (see cmpopts.IgnoreUnexported) or explicitly // unless suppressed by using an Ignore option (see cmpopts.IgnoreUnexported)
// compared using the Exporter option. // or explicitly compared using the Exporter option.
package cmp package cmp
import ( import (
@ -45,24 +45,24 @@ import (
// Equal reports whether x and y are equal by recursively applying the // Equal reports whether x and y are equal by recursively applying the
// following rules in the given order to x and y and all of their sub-values: // following rules in the given order to x and y and all of their sub-values:
// //
// Let S be the set of all Ignore, Transformer, and Comparer options that // - Let S be the set of all Ignore, Transformer, and Comparer options that
// remain after applying all path filters, value filters, and type filters. // remain after applying all path filters, value filters, and type filters.
// If at least one Ignore exists in S, then the comparison is ignored. // If at least one Ignore exists in S, then the comparison is ignored.
// If the number of Transformer and Comparer options in S is greater than one, // If the number of Transformer and Comparer options in S is non-zero,
// then Equal panics because it is ambiguous which option to use. // then Equal panics because it is ambiguous which option to use.
// If S contains a single Transformer, then use that to transform the current // If S contains a single Transformer, then use that to transform
// values and recursively call Equal on the output values. // the current values and recursively call Equal on the output values.
// If S contains a single Comparer, then use that to compare the current values. // If S contains a single Comparer, then use that to compare the current values.
// Otherwise, evaluation proceeds to the next rule. // Otherwise, evaluation proceeds to the next rule.
// //
// If the values have an Equal method of the form "(T) Equal(T) bool" or // - If the values have an Equal method of the form "(T) Equal(T) bool" or
// "(T) Equal(I) bool" where T is assignable to I, then use the result of // "(T) Equal(I) bool" where T is assignable to I, then use the result of
// x.Equal(y) even if x or y is nil. Otherwise, no such method exists and // x.Equal(y) even if x or y is nil. Otherwise, no such method exists and
// evaluation proceeds to the next rule. // evaluation proceeds to the next rule.
// //
// Lastly, try to compare x and y based on their basic kinds. // - Lastly, try to compare x and y based on their basic kinds.
// Simple kinds like booleans, integers, floats, complex numbers, strings, and // Simple kinds like booleans, integers, floats, complex numbers, strings,
// channels are compared using the equivalent of the == operator in Go. // and channels are compared using the equivalent of the == operator in Go.
// Functions are only equal if they are both nil, otherwise they are unequal. // Functions are only equal if they are both nil, otherwise they are unequal.
// //
// Structs are equal if recursively calling Equal on all fields report equal. // Structs are equal if recursively calling Equal on all fields report equal.
@ -144,7 +144,7 @@ func rootStep(x, y interface{}) PathStep {
// so that they have the same parent type. // so that they have the same parent type.
var t reflect.Type var t reflect.Type
if !vx.IsValid() || !vy.IsValid() || vx.Type() != vy.Type() { if !vx.IsValid() || !vy.IsValid() || vx.Type() != vy.Type() {
t = reflect.TypeOf((*interface{})(nil)).Elem() t = anyType
if vx.IsValid() { if vx.IsValid() {
vvx := reflect.New(t).Elem() vvx := reflect.New(t).Elem()
vvx.Set(vx) vvx.Set(vx)
@ -639,7 +639,9 @@ type dynChecker struct{ curr, next int }
// Next increments the state and reports whether a check should be performed. // Next increments the state and reports whether a check should be performed.
// //
// Checks occur every Nth function call, where N is a triangular number: // Checks occur every Nth function call, where N is a triangular number:
//
// 0 1 3 6 10 15 21 28 36 45 55 66 78 91 105 120 136 153 171 190 ... // 0 1 3 6 10 15 21 28 36 45 55 66 78 91 105 120 136 153 171 190 ...
//
// See https://en.wikipedia.org/wiki/Triangular_number // See https://en.wikipedia.org/wiki/Triangular_number
// //
// This sequence ensures that the cost of checks drops significantly as // This sequence ensures that the cost of checks drops significantly as

View File

@ -127,9 +127,9 @@ var randBool = rand.New(rand.NewSource(time.Now().Unix())).Intn(2) == 0
// This function returns an edit-script, which is a sequence of operations // This function returns an edit-script, which is a sequence of operations
// needed to convert one list into the other. The following invariants for // needed to convert one list into the other. The following invariants for
// the edit-script are maintained: // the edit-script are maintained:
// eq == (es.Dist()==0) // - eq == (es.Dist()==0)
// nx == es.LenX() // - nx == es.LenX()
// ny == es.LenY() // - ny == es.LenY()
// //
// This algorithm is not guaranteed to be an optimal solution (i.e., one that // This algorithm is not guaranteed to be an optimal solution (i.e., one that
// produces an edit-script with a minimal Levenshtein distance). This algorithm // produces an edit-script with a minimal Levenshtein distance). This algorithm
@ -169,12 +169,13 @@ func Difference(nx, ny int, f EqualFunc) (es EditScript) {
// A diagonal edge is equivalent to a matching symbol between both X and Y. // A diagonal edge is equivalent to a matching symbol between both X and Y.
// Invariants: // Invariants:
// 0 ≤ fwdPath.X ≤ (fwdFrontier.X, revFrontier.X) ≤ revPath.X ≤ nx // - 0 ≤ fwdPath.X ≤ (fwdFrontier.X, revFrontier.X) ≤ revPath.X ≤ nx
// 0 ≤ fwdPath.Y ≤ (fwdFrontier.Y, revFrontier.Y) ≤ revPath.Y ≤ ny // - 0 ≤ fwdPath.Y ≤ (fwdFrontier.Y, revFrontier.Y) ≤ revPath.Y ≤ ny
// //
// In general: // In general:
// • fwdFrontier.X < revFrontier.X // - fwdFrontier.X < revFrontier.X
// • fwdFrontier.Y < revFrontier.Y // - fwdFrontier.Y < revFrontier.Y
//
// Unless, it is time for the algorithm to terminate. // Unless, it is time for the algorithm to terminate.
fwdPath := path{+1, point{0, 0}, make(EditScript, 0, (nx+ny)/2)} fwdPath := path{+1, point{0, 0}, make(EditScript, 0, (nx+ny)/2)}
revPath := path{-1, point{nx, ny}, make(EditScript, 0)} revPath := path{-1, point{nx, ny}, make(EditScript, 0)}
@ -195,18 +196,20 @@ func Difference(nx, ny int, f EqualFunc) (es EditScript) {
// computing sub-optimal edit-scripts between two lists. // computing sub-optimal edit-scripts between two lists.
// //
// The algorithm is approximately as follows: // The algorithm is approximately as follows:
// Searching for differences switches back-and-forth between // - Searching for differences switches back-and-forth between
// a search that starts at the beginning (the top-left corner), and // a search that starts at the beginning (the top-left corner), and
// a search that starts at the end (the bottom-right corner). The goal of // a search that starts at the end (the bottom-right corner).
// the search is connect with the search from the opposite corner. // The goal of the search is connect with the search
// • As we search, we build a path in a greedy manner, where the first // from the opposite corner.
// match seen is added to the path (this is sub-optimal, but provides a // - As we search, we build a path in a greedy manner,
// decent result in practice). When matches are found, we try the next pair // where the first match seen is added to the path (this is sub-optimal,
// of symbols in the lists and follow all matches as far as possible. // but provides a decent result in practice). When matches are found,
// • When searching for matches, we search along a diagonal going through // we try the next pair of symbols in the lists and follow all matches
// through the "frontier" point. If no matches are found, we advance the // as far as possible.
// frontier towards the opposite corner. // - When searching for matches, we search along a diagonal going through
// • This algorithm terminates when either the X coordinates or the // through the "frontier" point. If no matches are found,
// we advance the frontier towards the opposite corner.
// - This algorithm terminates when either the X coordinates or the
// Y coordinates of the forward and reverse frontier points ever intersect. // Y coordinates of the forward and reverse frontier points ever intersect.
// This algorithm is correct even if searching only in the forward direction // This algorithm is correct even if searching only in the forward direction
@ -389,6 +392,7 @@ type point struct{ X, Y int }
func (p *point) add(dx, dy int) { p.X += dx; p.Y += dy } func (p *point) add(dx, dy int) { p.X += dx; p.Y += dy }
// zigzag maps a consecutive sequence of integers to a zig-zag sequence. // zigzag maps a consecutive sequence of integers to a zig-zag sequence.
//
// [0 1 2 3 4 5 ...] => [0 -1 +1 -2 +2 ...] // [0 1 2 3 4 5 ...] => [0 -1 +1 -2 +2 ...]
func zigzag(x int) int { func zigzag(x int) int {
if x&1 != 0 { if x&1 != 0 {

View File

@ -1,48 +0,0 @@
// Copyright 2017, The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package value
import (
"math"
"reflect"
)
// IsZero reports whether v is the zero value.
// This does not rely on Interface and so can be used on unexported fields.
func IsZero(v reflect.Value) bool {
switch v.Kind() {
case reflect.Bool:
return v.Bool() == false
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
return v.Int() == 0
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
return v.Uint() == 0
case reflect.Float32, reflect.Float64:
return math.Float64bits(v.Float()) == 0
case reflect.Complex64, reflect.Complex128:
return math.Float64bits(real(v.Complex())) == 0 && math.Float64bits(imag(v.Complex())) == 0
case reflect.String:
return v.String() == ""
case reflect.UnsafePointer:
return v.Pointer() == 0
case reflect.Chan, reflect.Func, reflect.Interface, reflect.Ptr, reflect.Map, reflect.Slice:
return v.IsNil()
case reflect.Array:
for i := 0; i < v.Len(); i++ {
if !IsZero(v.Index(i)) {
return false
}
}
return true
case reflect.Struct:
for i := 0; i < v.NumField(); i++ {
if !IsZero(v.Field(i)) {
return false
}
}
return true
}
return false
}

View File

@ -33,6 +33,7 @@ type Option interface {
} }
// applicableOption represents the following types: // applicableOption represents the following types:
//
// Fundamental: ignore | validator | *comparer | *transformer // Fundamental: ignore | validator | *comparer | *transformer
// Grouping: Options // Grouping: Options
type applicableOption interface { type applicableOption interface {
@ -43,6 +44,7 @@ type applicableOption interface {
} }
// coreOption represents the following types: // coreOption represents the following types:
//
// Fundamental: ignore | validator | *comparer | *transformer // Fundamental: ignore | validator | *comparer | *transformer
// Filters: *pathFilter | *valuesFilter // Filters: *pathFilter | *valuesFilter
type coreOption interface { type coreOption interface {
@ -336,9 +338,9 @@ func (tr transformer) String() string {
// both implement T. // both implement T.
// //
// The equality function must be: // The equality function must be:
// Symmetric: equal(x, y) == equal(y, x) // - Symmetric: equal(x, y) == equal(y, x)
// Deterministic: equal(x, y) == equal(x, y) // - Deterministic: equal(x, y) == equal(x, y)
// Pure: equal(x, y) does not modify x or y // - Pure: equal(x, y) does not modify x or y
func Comparer(f interface{}) Option { func Comparer(f interface{}) Option {
v := reflect.ValueOf(f) v := reflect.ValueOf(f)
if !function.IsType(v.Type(), function.Equal) || v.IsNil() { if !function.IsType(v.Type(), function.Equal) || v.IsNil() {
@ -430,7 +432,7 @@ func AllowUnexported(types ...interface{}) Option {
} }
// Result represents the comparison result for a single node and // Result represents the comparison result for a single node and
// is provided by cmp when calling Result (see Reporter). // is provided by cmp when calling Report (see Reporter).
type Result struct { type Result struct {
_ [0]func() // Make Result incomparable _ [0]func() // Make Result incomparable
flags resultFlags flags resultFlags

View File

@ -41,12 +41,12 @@ type PathStep interface {
// The type of each valid value is guaranteed to be identical to Type. // The type of each valid value is guaranteed to be identical to Type.
// //
// In some cases, one or both may be invalid or have restrictions: // In some cases, one or both may be invalid or have restrictions:
// For StructField, both are not interface-able if the current field // - For StructField, both are not interface-able if the current field
// is unexported and the struct type is not explicitly permitted by // is unexported and the struct type is not explicitly permitted by
// an Exporter to traverse unexported fields. // an Exporter to traverse unexported fields.
// For SliceIndex, one may be invalid if an element is missing from // - For SliceIndex, one may be invalid if an element is missing from
// either the x or y slice. // either the x or y slice.
// For MapIndex, one may be invalid if an entry is missing from // - For MapIndex, one may be invalid if an entry is missing from
// either the x or y map. // either the x or y map.
// //
// The provided values must not be mutated. // The provided values must not be mutated.
@ -94,6 +94,7 @@ func (pa Path) Index(i int) PathStep {
// The simplified path only contains struct field accesses. // The simplified path only contains struct field accesses.
// //
// For example: // For example:
//
// MyMap.MySlices.MyField // MyMap.MySlices.MyField
func (pa Path) String() string { func (pa Path) String() string {
var ss []string var ss []string
@ -108,6 +109,7 @@ func (pa Path) String() string {
// GoString returns the path to a specific node using Go syntax. // GoString returns the path to a specific node using Go syntax.
// //
// For example: // For example:
//
// (*root.MyMap["key"].(*mypkg.MyStruct).MySlices)[2][3].MyField // (*root.MyMap["key"].(*mypkg.MyStruct).MySlices)[2][3].MyField
func (pa Path) GoString() string { func (pa Path) GoString() string {
var ssPre, ssPost []string var ssPre, ssPost []string
@ -159,7 +161,7 @@ func (ps pathStep) String() string {
if ps.typ == nil { if ps.typ == nil {
return "<nil>" return "<nil>"
} }
s := ps.typ.String() s := value.TypeString(ps.typ, false)
if s == "" || strings.ContainsAny(s, "{}\n") { if s == "" || strings.ContainsAny(s, "{}\n") {
return "root" // Type too simple or complex to print return "root" // Type too simple or complex to print
} }
@ -282,7 +284,7 @@ type typeAssertion struct {
func (ta TypeAssertion) Type() reflect.Type { return ta.typ } func (ta TypeAssertion) Type() reflect.Type { return ta.typ }
func (ta TypeAssertion) Values() (vx, vy reflect.Value) { return ta.vx, ta.vy } func (ta TypeAssertion) Values() (vx, vy reflect.Value) { return ta.vx, ta.vy }
func (ta TypeAssertion) String() string { return fmt.Sprintf(".(%v)", ta.typ) } func (ta TypeAssertion) String() string { return fmt.Sprintf(".(%v)", value.TypeString(ta.typ, false)) }
// Transform is a transformation from the parent type to the current type. // Transform is a transformation from the parent type to the current type.
type Transform struct{ *transform } type Transform struct{ *transform }

View File

@ -7,8 +7,6 @@ package cmp
import ( import (
"fmt" "fmt"
"reflect" "reflect"
"github.com/google/go-cmp/cmp/internal/value"
) )
// numContextRecords is the number of surrounding equal records to print. // numContextRecords is the number of surrounding equal records to print.
@ -117,7 +115,7 @@ func (opts formatOptions) FormatDiff(v *valueNode, ptrs *pointerReferences) (out
// For leaf nodes, format the value based on the reflect.Values alone. // For leaf nodes, format the value based on the reflect.Values alone.
// As a special case, treat equal []byte as a leaf nodes. // As a special case, treat equal []byte as a leaf nodes.
isBytes := v.Type.Kind() == reflect.Slice && v.Type.Elem() == reflect.TypeOf(byte(0)) isBytes := v.Type.Kind() == reflect.Slice && v.Type.Elem() == byteType
isEqualBytes := isBytes && v.NumDiff+v.NumIgnored+v.NumTransformed == 0 isEqualBytes := isBytes && v.NumDiff+v.NumIgnored+v.NumTransformed == 0
if v.MaxDepth == 0 || isEqualBytes { if v.MaxDepth == 0 || isEqualBytes {
switch opts.DiffMode { switch opts.DiffMode {
@ -248,11 +246,11 @@ func (opts formatOptions) formatDiffList(recs []reportRecord, k reflect.Kind, pt
var isZero bool var isZero bool
switch opts.DiffMode { switch opts.DiffMode {
case diffIdentical: case diffIdentical:
isZero = value.IsZero(r.Value.ValueX) || value.IsZero(r.Value.ValueY) isZero = r.Value.ValueX.IsZero() || r.Value.ValueY.IsZero()
case diffRemoved: case diffRemoved:
isZero = value.IsZero(r.Value.ValueX) isZero = r.Value.ValueX.IsZero()
case diffInserted: case diffInserted:
isZero = value.IsZero(r.Value.ValueY) isZero = r.Value.ValueY.IsZero()
} }
if isZero { if isZero {
continue continue

View File

@ -16,6 +16,13 @@ import (
"github.com/google/go-cmp/cmp/internal/value" "github.com/google/go-cmp/cmp/internal/value"
) )
var (
anyType = reflect.TypeOf((*interface{})(nil)).Elem()
stringType = reflect.TypeOf((*string)(nil)).Elem()
bytesType = reflect.TypeOf((*[]byte)(nil)).Elem()
byteType = reflect.TypeOf((*byte)(nil)).Elem()
)
type formatValueOptions struct { type formatValueOptions struct {
// AvoidStringer controls whether to avoid calling custom stringer // AvoidStringer controls whether to avoid calling custom stringer
// methods like error.Error or fmt.Stringer.String. // methods like error.Error or fmt.Stringer.String.
@ -184,7 +191,7 @@ func (opts formatOptions) FormatValue(v reflect.Value, parentKind reflect.Kind,
} }
for i := 0; i < v.NumField(); i++ { for i := 0; i < v.NumField(); i++ {
vv := v.Field(i) vv := v.Field(i)
if value.IsZero(vv) { if vv.IsZero() {
continue // Elide fields with zero values continue // Elide fields with zero values
} }
if len(list) == maxLen { if len(list) == maxLen {
@ -205,7 +212,7 @@ func (opts formatOptions) FormatValue(v reflect.Value, parentKind reflect.Kind,
} }
// Check whether this is a []byte of text data. // Check whether this is a []byte of text data.
if t.Elem() == reflect.TypeOf(byte(0)) { if t.Elem() == byteType {
b := v.Bytes() b := v.Bytes()
isPrintSpace := func(r rune) bool { return unicode.IsPrint(r) || unicode.IsSpace(r) } isPrintSpace := func(r rune) bool { return unicode.IsPrint(r) || unicode.IsSpace(r) }
if len(b) > 0 && utf8.Valid(b) && len(bytes.TrimFunc(b, isPrintSpace)) == 0 { if len(b) > 0 && utf8.Valid(b) && len(bytes.TrimFunc(b, isPrintSpace)) == 0 {

View File

@ -104,7 +104,7 @@ func (opts formatOptions) FormatDiffSlice(v *valueNode) textNode {
case t.Kind() == reflect.String: case t.Kind() == reflect.String:
sx, sy = vx.String(), vy.String() sx, sy = vx.String(), vy.String()
isString = true isString = true
case t.Kind() == reflect.Slice && t.Elem() == reflect.TypeOf(byte(0)): case t.Kind() == reflect.Slice && t.Elem() == byteType:
sx, sy = string(vx.Bytes()), string(vy.Bytes()) sx, sy = string(vx.Bytes()), string(vy.Bytes())
isString = true isString = true
case t.Kind() == reflect.Array: case t.Kind() == reflect.Array:
@ -147,7 +147,10 @@ func (opts formatOptions) FormatDiffSlice(v *valueNode) textNode {
}) })
efficiencyLines := float64(esLines.Dist()) / float64(len(esLines)) efficiencyLines := float64(esLines.Dist()) / float64(len(esLines))
efficiencyBytes := float64(esBytes.Dist()) / float64(len(esBytes)) efficiencyBytes := float64(esBytes.Dist()) / float64(len(esBytes))
isPureLinedText = efficiencyLines < 4*efficiencyBytes quotedLength := len(strconv.Quote(sx + sy))
unquotedLength := len(sx) + len(sy)
escapeExpansionRatio := float64(quotedLength) / float64(unquotedLength)
isPureLinedText = efficiencyLines < 4*efficiencyBytes || escapeExpansionRatio > 1.1
} }
} }
@ -171,12 +174,13 @@ func (opts formatOptions) FormatDiffSlice(v *valueNode) textNode {
// differences in a string literal. This format is more readable, // differences in a string literal. This format is more readable,
// but has edge-cases where differences are visually indistinguishable. // but has edge-cases where differences are visually indistinguishable.
// This format is avoided under the following conditions: // This format is avoided under the following conditions:
// A line starts with `"""` // - A line starts with `"""`
// A line starts with "..." // - A line starts with "..."
// A line contains non-printable characters // - A line contains non-printable characters
// Adjacent different lines differ only by whitespace // - Adjacent different lines differ only by whitespace
// //
// For example: // For example:
//
// """ // """
// ... // 3 identical lines // ... // 3 identical lines
// foo // foo
@ -231,7 +235,7 @@ func (opts formatOptions) FormatDiffSlice(v *valueNode) textNode {
var out textNode = &textWrap{Prefix: "(", Value: list2, Suffix: ")"} var out textNode = &textWrap{Prefix: "(", Value: list2, Suffix: ")"}
switch t.Kind() { switch t.Kind() {
case reflect.String: case reflect.String:
if t != reflect.TypeOf(string("")) { if t != stringType {
out = opts.FormatType(t, out) out = opts.FormatType(t, out)
} }
case reflect.Slice: case reflect.Slice:
@ -326,12 +330,12 @@ func (opts formatOptions) FormatDiffSlice(v *valueNode) textNode {
switch t.Kind() { switch t.Kind() {
case reflect.String: case reflect.String:
out = &textWrap{Prefix: "strings.Join(", Value: out, Suffix: fmt.Sprintf(", %q)", delim)} out = &textWrap{Prefix: "strings.Join(", Value: out, Suffix: fmt.Sprintf(", %q)", delim)}
if t != reflect.TypeOf(string("")) { if t != stringType {
out = opts.FormatType(t, out) out = opts.FormatType(t, out)
} }
case reflect.Slice: case reflect.Slice:
out = &textWrap{Prefix: "bytes.Join(", Value: out, Suffix: fmt.Sprintf(", %q)", delim)} out = &textWrap{Prefix: "bytes.Join(", Value: out, Suffix: fmt.Sprintf(", %q)", delim)}
if t != reflect.TypeOf([]byte(nil)) { if t != bytesType {
out = opts.FormatType(t, out) out = opts.FormatType(t, out)
} }
} }
@ -446,7 +450,6 @@ func (opts formatOptions) formatDiffSlice(
// {NumIdentical: 3}, // {NumIdentical: 3},
// {NumInserted: 1}, // {NumInserted: 1},
// ] // ]
//
func coalesceAdjacentEdits(name string, es diff.EditScript) (groups []diffStats) { func coalesceAdjacentEdits(name string, es diff.EditScript) (groups []diffStats) {
var prevMode byte var prevMode byte
lastStats := func(mode byte) *diffStats { lastStats := func(mode byte) *diffStats {
@ -503,7 +506,6 @@ func coalesceAdjacentEdits(name string, es diff.EditScript) (groups []diffStats)
// {NumIdentical: 8, NumRemoved: 12, NumInserted: 3}, // {NumIdentical: 8, NumRemoved: 12, NumInserted: 3},
// {NumIdentical: 63}, // {NumIdentical: 63},
// ] // ]
//
func coalesceInterveningIdentical(groups []diffStats, windowSize int) []diffStats { func coalesceInterveningIdentical(groups []diffStats, windowSize int) []diffStats {
groups, groupsOrig := groups[:0], groups groups, groupsOrig := groups[:0], groups
for i, ds := range groupsOrig { for i, ds := range groupsOrig {
@ -548,7 +550,6 @@ func coalesceInterveningIdentical(groups []diffStats, windowSize int) []diffStat
// {NumRemoved: 9}, // {NumRemoved: 9},
// {NumIdentical: 64}, // incremented by 10 // {NumIdentical: 64}, // incremented by 10
// ] // ]
//
func cleanupSurroundingIdentical(groups []diffStats, eq func(i, j int) bool) []diffStats { func cleanupSurroundingIdentical(groups []diffStats, eq func(i, j int) bool) []diffStats {
var ix, iy int // indexes into sequence x and y var ix, iy int // indexes into sequence x and y
for i, ds := range groups { for i, ds := range groups {

View File

@ -393,6 +393,7 @@ func (s diffStats) Append(ds diffStats) diffStats {
// String prints a humanly-readable summary of coalesced records. // String prints a humanly-readable summary of coalesced records.
// //
// Example: // Example:
//
// diffStats{Name: "Field", NumIgnored: 5}.String() => "5 ignored fields" // diffStats{Name: "Field", NumIgnored: 5}.String() => "5 ignored fields"
func (s diffStats) String() string { func (s diffStats) String() string {
var ss []string var ss []string

25
vendor/github.com/hashicorp/go-plugin/CHANGELOG.md generated vendored Normal file
View File

@ -0,0 +1,25 @@
## v1.4.6
BUG FIXES:
* server: Prevent gRPC broker goroutine leak when using `GRPCServer` type `GracefulStop()` or `Stop()` methods [[GH-220](https://github.com/hashicorp/go-plugin/pull/220)]
## v1.4.5
ENHANCEMENTS:
* client: log warning when SecureConfig is nil [[GH-207](https://github.com/hashicorp/go-plugin/pull/207)]
## v1.4.4
ENHANCEMENTS:
* client: increase level of plugin exit logs [[GH-195](https://github.com/hashicorp/go-plugin/pull/195)]
BUG FIXES:
* Bidirectional communication: fix bidirectional communication when AutoMTLS is enabled [[GH-193](https://github.com/hashicorp/go-plugin/pull/193)]
* RPC: Trim a spurious log message for plugins using RPC [[GH-186](https://github.com/hashicorp/go-plugin/pull/186)]

View File

@ -1,3 +1,5 @@
Copyright (c) 2016 HashiCorp, Inc.
Mozilla Public License, version 2.0 Mozilla Public License, version 2.0
1. Definitions 1. Definitions

View File

@ -547,7 +547,9 @@ func (c *Client) Start() (addr net.Addr, err error) {
return nil, err return nil, err
} }
if c.config.SecureConfig != nil { if c.config.SecureConfig == nil {
c.logger.Warn("plugin configured with a nil SecureConfig")
} else {
if ok, err := c.config.SecureConfig.Check(cmd.Path); err != nil { if ok, err := c.config.SecureConfig.Check(cmd.Path); err != nil {
return nil, fmt.Errorf("error verifying checksum: %s", err) return nil, fmt.Errorf("error verifying checksum: %s", err)
} else if !ok { } else if !ok {

View File

@ -107,14 +107,26 @@ func (s *GRPCServer) Init() error {
return nil return nil
} }
// Stop calls Stop on the underlying grpc.Server // Stop calls Stop on the underlying grpc.Server and Close on the underlying
// grpc.Broker if present.
func (s *GRPCServer) Stop() { func (s *GRPCServer) Stop() {
s.server.Stop() s.server.Stop()
if s.broker != nil {
s.broker.Close()
s.broker = nil
}
} }
// GracefulStop calls GracefulStop on the underlying grpc.Server // GracefulStop calls GracefulStop on the underlying grpc.Server and Close on
// the underlying grpc.Broker if present.
func (s *GRPCServer) GracefulStop() { func (s *GRPCServer) GracefulStop() {
s.server.GracefulStop() s.server.GracefulStop()
if s.broker != nil {
s.broker.Close()
s.broker = nil
}
} }
// Config is the GRPCServerConfig encoded as JSON then base64. // Config is the GRPCServerConfig encoded as JSON then base64.

View File

@ -42,6 +42,8 @@ func (s *RPCServer) Config() string { return "" }
// ServerProtocol impl. // ServerProtocol impl.
func (s *RPCServer) Serve(lis net.Listener) { func (s *RPCServer) Serve(lis net.Listener) {
defer s.done()
for { for {
conn, err := lis.Accept() conn, err := lis.Accept()
if err != nil { if err != nil {
@ -82,7 +84,7 @@ func (s *RPCServer) ServeConn(conn io.ReadWriteCloser) {
// Connect the stdstreams (in, out, err) // Connect the stdstreams (in, out, err)
stdstream := make([]net.Conn, 2) stdstream := make([]net.Conn, 2)
for i, _ := range stdstream { for i := range stdstream {
stdstream[i], err = mux.Accept() stdstream[i], err = mux.Accept()
if err != nil { if err != nil {
mux.Close() mux.Close()
@ -133,13 +135,15 @@ type controlServer struct {
// Ping can be called to verify the connection (and likely the binary) // Ping can be called to verify the connection (and likely the binary)
// is still alive to a plugin. // is still alive to a plugin.
func (c *controlServer) Ping( func (c *controlServer) Ping(
null bool, response *struct{}) error { null bool, response *struct{},
) error {
*response = struct{}{} *response = struct{}{}
return nil return nil
} }
func (c *controlServer) Quit( func (c *controlServer) Quit(
null bool, response *struct{}) error { null bool, response *struct{},
) error {
// End the server // End the server
c.server.done() c.server.done()
@ -156,7 +160,8 @@ type dispenseServer struct {
} }
func (d *dispenseServer) Dispense( func (d *dispenseServer) Dispense(
name string, response *uint32) error { name string, response *uint32,
) error {
// Find the function to create this implementation // Find the function to create this implementation
p, ok := d.plugins[name] p, ok := d.plugins[name]
if !ok { if !ok {

View File

@ -1,10 +1,35 @@
# HCL Changelog # HCL Changelog
## v2.15.0 (November 10, 2022)
### Bugs Fixed
* ext/typeexpr: Skip null objects when applying defaults. This prevents crashes when null objects are creating inside collections, and stops incomplete objects being created with only optional attributes set. ([#567](https://github.com/hashicorp/hcl/pull/567))
* ext/typeexpr: Ensure default values do not have optional metadata attached. This prevents crashes when default values are inserted into concrete go-cty values that have also been stripped of their optional metadata. ([#568](https://github.com/hashicorp/hcl/pull/568))
### Enhancements
* ext/typeexpr: With the [go-cty](https://github.com/zclconf/go-cty) upstream depenendency updated to v1.12.0, the `Defaults` struct and associated functions can apply additional and more flexible 'unsafe' conversions (examples include tuples into collections such as lists and sets, and additional safety around null and dynamic values). ([#564](https://github.com/hashicorp/hcl/pull/564))
* ext/typeexpr: With the [go-cty](https://github.com/zclconf/go-cty) upstream depenendency updated to v1.12.0, users should now apply the go-cty convert functionality *before* setting defaults on a given `cty.Value`, rather than after, if they require a specific `cty.Type`. ([#564](https://github.com/hashicorp/hcl/pull/564))
## v2.14.1 (September 23, 2022)
### Bugs Fixed
* ext/typeexpr: Type convert defaults for optional object attributes when applying them. This prevents crashes in certain cases when the objects in question are part of a collection. ([#555](https://github.com/hashicorp/hcl/pull/555))
## v2.14.0 (September 1, 2022)
### Enhancements
* ext/typeexpr: Added support for optional object attributes to `TypeConstraint`. Attributes can be wrapped in the special `optional(…)` modifier, allowing the attribute to be omitted while still meeting the type constraint. For more information, [cty's documentation on conversion between object types](https://github.com/zclconf/go-cty/blob/main/docs/convert.md#conversion-between-object-types). ([#549](https://github.com/hashicorp/hcl/pull/549))
* ext/typeexpr: New function: `TypeConstraintWithDefaults`. In this mode, the `optional(…)` modifier accepts a second argument which can be used as the default value for omitted object attributes. The function returns both a `cty.Type` and associated `Defaults`, the latter of which has an `Apply` method to apply defaults to a given value. ([#549](https://github.com/hashicorp/hcl/pull/549))
## v2.13.0 (June 22, 2022) ## v2.13.0 (June 22, 2022)
### Enhancements ### Enhancements
* hcl: `hcl.Diagnostic` how has an additional field `Extra` which is intended for carrying arbitrary supporting data ("extra information") related to the diagnostic message, intended to allow diagnostic renderers to optionally tailor the presentation of messages for particular situations. ([#539](https://github.com/hashicorp/hcl/pull/539)) * hcl: `hcl.Diagnostic` now has an additional field `Extra` which is intended for carrying arbitrary supporting data ("extra information") related to the diagnostic message, intended to allow diagnostic renderers to optionally tailor the presentation of messages for particular situations. ([#539](https://github.com/hashicorp/hcl/pull/539))
* hclsyntax: When an error occurs during a function call, the returned diagnostics will include _extra information_ (as described in the previous point) about which function was being called and, if the message is about an error returned by the function itself, that raw `error` value without any post-processing. ([#539](https://github.com/hashicorp/hcl/pull/539)) * hclsyntax: When an error occurs during a function call, the returned diagnostics will include _extra information_ (as described in the previous point) about which function was being called and, if the message is about an error returned by the function itself, that raw `error` value without any post-processing. ([#539](https://github.com/hashicorp/hcl/pull/539))
### Bugs Fixed ### Bugs Fixed

View File

@ -84,7 +84,7 @@ Comments serve as program documentation and come in two forms:
sequence, and may have any characters within except the ending sequence. sequence, and may have any characters within except the ending sequence.
An inline comments is considered equivalent to a whitespace sequence. An inline comments is considered equivalent to a whitespace sequence.
Comments and whitespace cannot begin within within other comments, or within Comments and whitespace cannot begin within other comments, or within
template literals except inside an interpolation sequence or template directive. template literals except inside an interpolation sequence or template directive.
### Identifiers ### Identifiers

View File

@ -1,6 +1,6 @@
package version package version
const version = "0.17.2" const version = "0.17.3"
// ModuleVersion returns the current version of the github.com/hashicorp/terraform-exec Go module. // ModuleVersion returns the current version of the github.com/hashicorp/terraform-exec Go module.
// This is a function to allow for future possible enhancement using debug.BuildInfo. // This is a function to allow for future possible enhancement using debug.BuildInfo.

View File

@ -46,6 +46,7 @@ var (
statePlanReadErrRegexp = regexp.MustCompile( statePlanReadErrRegexp = regexp.MustCompile(
`Terraform couldn't read the given file as a state or plan file.|` + `Terraform couldn't read the given file as a state or plan file.|` +
`Error: Failed to read the given file as a state or plan file`) `Error: Failed to read the given file as a state or plan file`)
lockIdInvalidErrRegexp = regexp.MustCompile(`Failed to unlock state: `)
) )
func (tf *Terraform) wrapExitError(ctx context.Context, err error, stderr string) error { func (tf *Terraform) wrapExitError(ctx context.Context, err error, stderr string) error {
@ -160,6 +161,8 @@ func (tf *Terraform) wrapExitError(ctx context.Context, err error, stderr string
} }
case statePlanReadErrRegexp.MatchString(stderr): case statePlanReadErrRegexp.MatchString(stderr):
return &ErrStatePlanRead{stderr: stderr} return &ErrStatePlanRead{stderr: stderr}
case lockIdInvalidErrRegexp.MatchString(stderr):
return &ErrLockIdInvalid{stderr: stderr}
} }
return fmt.Errorf("%w\n%s", &unwrapper{exitErr, ctxErr}, stderr) return fmt.Errorf("%w\n%s", &unwrapper{exitErr, ctxErr}, stderr)
@ -256,6 +259,16 @@ func (e *ErrNoConfig) Error() string {
return e.stderr return e.stderr
} }
type ErrLockIdInvalid struct {
unwrapper
stderr string
}
func (e *ErrLockIdInvalid) Error() string {
return e.stderr
}
// ErrCLIUsage is returned when the combination of flags or arguments is incorrect. // ErrCLIUsage is returned when the combination of flags or arguments is incorrect.
// //
// CLI indicates usage errors in three different ways: either // CLI indicates usage errors in three different ways: either

View File

@ -2,6 +2,7 @@ package tfexec
import ( import (
"context" "context"
"fmt"
"os/exec" "os/exec"
) )
@ -21,7 +22,10 @@ func (opt *DirOption) configureForceUnlock(conf *forceUnlockConfig) {
// ForceUnlock represents the `terraform force-unlock` command // ForceUnlock represents the `terraform force-unlock` command
func (tf *Terraform) ForceUnlock(ctx context.Context, lockID string, opts ...ForceUnlockOption) error { func (tf *Terraform) ForceUnlock(ctx context.Context, lockID string, opts ...ForceUnlockOption) error {
unlockCmd := tf.forceUnlockCmd(ctx, lockID, opts...) unlockCmd, err := tf.forceUnlockCmd(ctx, lockID, opts...)
if err != nil {
return err
}
if err := tf.runTerraformCmd(ctx, unlockCmd); err != nil { if err := tf.runTerraformCmd(ctx, unlockCmd); err != nil {
return err return err
@ -30,21 +34,25 @@ func (tf *Terraform) ForceUnlock(ctx context.Context, lockID string, opts ...For
return nil return nil
} }
func (tf *Terraform) forceUnlockCmd(ctx context.Context, lockID string, opts ...ForceUnlockOption) *exec.Cmd { func (tf *Terraform) forceUnlockCmd(ctx context.Context, lockID string, opts ...ForceUnlockOption) (*exec.Cmd, error) {
c := defaultForceUnlockOptions c := defaultForceUnlockOptions
for _, o := range opts { for _, o := range opts {
o.configureForceUnlock(&c) o.configureForceUnlock(&c)
} }
args := []string{"force-unlock", "-force"} args := []string{"force-unlock", "-no-color", "-force"}
// positional arguments // positional arguments
args = append(args, lockID) args = append(args, lockID)
// optional positional arguments // optional positional arguments
if c.dir != "" { if c.dir != "" {
err := tf.compatible(ctx, nil, tf0_15_0)
if err != nil {
return nil, fmt.Errorf("[DIR] option was removed in Terraform v0.15.0")
}
args = append(args, c.dir) args = append(args, c.dir)
} }
return tf.buildTerraformCmd(ctx, nil, args...) return tf.buildTerraformCmd(ctx, nil, args...), nil
} }

View File

@ -52,6 +52,10 @@ func (opt *DirOption) configureInit(conf *initConfig) {
conf.dir = opt.path conf.dir = opt.path
} }
func (opt *ForceCopyOption) configureInit(conf *initConfig) {
conf.forceCopy = opt.forceCopy
}
func (opt *FromModuleOption) configureInit(conf *initConfig) { func (opt *FromModuleOption) configureInit(conf *initConfig) {
conf.fromModule = opt.source conf.fromModule = opt.source
} }
@ -116,7 +120,7 @@ func (tf *Terraform) initCmd(ctx context.Context, opts ...InitOption) (*exec.Cmd
o.configureInit(&c) o.configureInit(&c)
} }
args := []string{"init", "-no-color", "-force-copy", "-input=false"} args := []string{"init", "-no-color", "-input=false"}
// string opts: only pass if set // string opts: only pass if set
if c.fromModule != "" { if c.fromModule != "" {
@ -144,6 +148,10 @@ func (tf *Terraform) initCmd(ctx context.Context, opts ...InitOption) (*exec.Cmd
args = append(args, "-verify-plugins="+fmt.Sprint(c.verifyPlugins)) args = append(args, "-verify-plugins="+fmt.Sprint(c.verifyPlugins))
} }
if c.forceCopy {
args = append(args, "-force-copy")
}
// unary flags: pass if true // unary flags: pass if true
if c.reconfigure { if c.reconfigure {
args = append(args, "-reconfigure") args = append(args, "-reconfigure")

View File

@ -1,3 +1,5 @@
Copyright (c) 2020 HashiCorp, Inc.
Mozilla Public License, version 2.0 Mozilla Public License, version 2.0
1. Definitions 1. Definitions

View File

@ -25,3 +25,9 @@ func ProtocolWarn(ctx context.Context, msg string, additionalFields ...map[strin
func ProtocolTrace(ctx context.Context, msg string, additionalFields ...map[string]interface{}) { func ProtocolTrace(ctx context.Context, msg string, additionalFields ...map[string]interface{}) {
tfsdklog.SubsystemTrace(ctx, SubsystemProto, msg, additionalFields...) tfsdklog.SubsystemTrace(ctx, SubsystemProto, msg, additionalFields...)
} }
// ProtocolSetField returns a context with the additional protocol subsystem
// field set.
func ProtocolSetField(ctx context.Context, key string, value any) context.Context {
return tfsdklog.SubsystemSetField(ctx, SubsystemProto, key, value)
}

View File

@ -55,20 +55,34 @@ func ProtocolData(ctx context.Context, dataDir string, rpc string, message strin
return return
} }
fileName := fmt.Sprintf("%d_%s_%s_%s.%s", time.Now().Unix(), rpc, message, field, fileExtension) writeProtocolFile(ctx, dataDir, rpc, message, field, fileExtension, fileContents)
filePath := path.Join(dataDir, fileName) }
logFields := map[string]interface{}{KeyProtocolDataFile: filePath} // should not be persisted using With()
ProtocolTrace(ctx, "Writing protocol data file", logFields) // ProtocolPrivateData emits raw protocol private data to a file, if given a
// directory. This data is "private" in the sense that it is provider-owned,
// rather than something managed by Terraform.
//
// The directory must exist and be writable, prior to invoking this function.
//
// File names are in the format: {TIME}_{RPC}_{MESSAGE}_{FIELD}(.empty)
func ProtocolPrivateData(ctx context.Context, dataDir string, rpc string, message string, field string, data []byte) {
if dataDir == "" {
// Write a log, only once, that explains how to enable this functionality.
protocolDataSkippedLog.Do(func() {
ProtocolTrace(ctx, "Skipping protocol data file writing because no data directory is set. "+
fmt.Sprintf("Use the %s environment variable to enable this functionality.", EnvTfLogSdkProtoDataDir))
})
err := os.WriteFile(filePath, fileContents, 0644)
if err != nil {
ProtocolError(ctx, fmt.Sprintf("Unable to write protocol data file: %s", err), logFields)
return return
} }
ProtocolTrace(ctx, "Wrote protocol data file", logFields) var fileExtension string
if len(data) == 0 {
fileExtension = fileExtEmpty
}
writeProtocolFile(ctx, dataDir, rpc, message, field, fileExtension, data)
} }
func protocolDataDynamicValue5(_ context.Context, value *tfprotov5.DynamicValue) (string, []byte) { func protocolDataDynamicValue5(_ context.Context, value *tfprotov5.DynamicValue) (string, []byte) {
@ -106,3 +120,25 @@ func protocolDataDynamicValue6(_ context.Context, value *tfprotov6.DynamicValue)
return fileExtEmpty, nil return fileExtEmpty, nil
} }
func writeProtocolFile(ctx context.Context, dataDir string, rpc string, message string, field string, fileExtension string, fileContents []byte) {
fileName := fmt.Sprintf("%d_%s_%s_%s", time.Now().Unix(), rpc, message, field)
if fileExtension != "" {
fileName += "." + fileExtension
}
filePath := path.Join(dataDir, fileName)
ctx = ProtocolSetField(ctx, KeyProtocolDataFile, filePath)
ProtocolTrace(ctx, "Writing protocol data file")
err := os.WriteFile(filePath, fileContents, 0644)
if err != nil {
ProtocolError(ctx, "Unable to write protocol data file", map[string]any{KeyError: err.Error()})
return
}
ProtocolTrace(ctx, "Wrote protocol data file")
}

View File

@ -34,7 +34,6 @@ func (d Diagnostics) ErrorCount() int {
// - Error severity at ERROR level // - Error severity at ERROR level
// - Warning severity at WARN level // - Warning severity at WARN level
// - Invalid/Unknown severity at WARN level // - Invalid/Unknown severity at WARN level
//
func (d Diagnostics) Log(ctx context.Context) { func (d Diagnostics) Log(ctx context.Context) {
for _, diagnostic := range d { for _, diagnostic := range d {
if diagnostic == nil { if diagnostic == nil {

View File

@ -24,7 +24,6 @@ func DownstreamRequest(ctx context.Context) context.Context {
// - TRACE "Received downstream response" log with request duration and // - TRACE "Received downstream response" log with request duration and
// diagnostic severity counts // diagnostic severity counts
// - Per-diagnostic logs // - Per-diagnostic logs
//
func DownstreamResponse(ctx context.Context, diagnostics diag.Diagnostics) { func DownstreamResponse(ctx context.Context, diagnostics diag.Diagnostics) {
responseFields := map[string]interface{}{ responseFields := map[string]interface{}{
logging.KeyDiagnosticErrorCount: diagnostics.ErrorCount(), logging.KeyDiagnosticErrorCount: diagnostics.ErrorCount(),

View File

@ -19,7 +19,7 @@
// Code generated by protoc-gen-go. DO NOT EDIT. // Code generated by protoc-gen-go. DO NOT EDIT.
// versions: // versions:
// protoc-gen-go v1.28.0 // protoc-gen-go v1.28.1
// protoc v3.19.4 // protoc v3.19.4
// source: tfplugin5.proto // source: tfplugin5.proto
@ -1795,6 +1795,15 @@ func (x *PrepareProviderConfig_Response) GetDiagnostics() []*Diagnostic {
return nil return nil
} }
// Request is the message that is sent to the provider during the
// UpgradeResourceState RPC.
//
// This message intentionally does not include configuration data as any
// configuration-based or configuration-conditional changes should occur
// during the PlanResourceChange RPC. Additionally, the configuration is
// not guaranteed to exist (in the case of resource destruction), be wholly
// known, nor match the given prior state, which could lead to unexpected
// provider behaviors for practitioners.
type UpgradeResourceState_Request struct { type UpgradeResourceState_Request struct {
state protoimpl.MessageState state protoimpl.MessageState
sizeCache protoimpl.SizeCache sizeCache protoimpl.SizeCache
@ -2231,6 +2240,14 @@ func (x *Configure_Response) GetDiagnostics() []*Diagnostic {
return nil return nil
} }
// Request is the message that is sent to the provider during the
// ReadResource RPC.
//
// This message intentionally does not include configuration data as any
// configuration-based or configuration-conditional changes should occur
// during the PlanResourceChange RPC. Additionally, the configuration is
// not guaranteed to be wholly known nor match the given prior state, which
// could lead to unexpected provider behaviors for practitioners.
type ReadResource_Request struct { type ReadResource_Request struct {
state protoimpl.MessageState state protoimpl.MessageState
sizeCache protoimpl.SizeCache sizeCache protoimpl.SizeCache

View File

@ -183,6 +183,15 @@ message PrepareProviderConfig {
} }
message UpgradeResourceState { message UpgradeResourceState {
// Request is the message that is sent to the provider during the
// UpgradeResourceState RPC.
//
// This message intentionally does not include configuration data as any
// configuration-based or configuration-conditional changes should occur
// during the PlanResourceChange RPC. Additionally, the configuration is
// not guaranteed to exist (in the case of resource destruction), be wholly
// known, nor match the given prior state, which could lead to unexpected
// provider behaviors for practitioners.
message Request { message Request {
string type_name = 1; string type_name = 1;
@ -240,6 +249,14 @@ message Configure {
} }
message ReadResource { message ReadResource {
// Request is the message that is sent to the provider during the
// ReadResource RPC.
//
// This message intentionally does not include configuration data as any
// configuration-based or configuration-conditional changes should occur
// during the PlanResourceChange RPC. Additionally, the configuration is
// not guaranteed to be wholly known nor match the given prior state, which
// could lead to unexpected provider behaviors for practitioners.
message Request { message Request {
string type_name = 1; string type_name = 1;
DynamicValue current_state = 2; DynamicValue current_state = 2;

View File

@ -54,7 +54,7 @@ type GetProviderSchemaResponse struct {
// will be specified in the provider block of the user's configuration. // will be specified in the provider block of the user's configuration.
Provider *Schema Provider *Schema
// ProviderMeta defines the schema for the provider's metadta, which // ProviderMeta defines the schema for the provider's metadata, which
// will be specified in the provider_meta blocks of the terraform block // will be specified in the provider_meta blocks of the terraform block
// for a module. This is an advanced feature and its usage should be // for a module. This is an advanced feature and its usage should be
// coordinated with the Terraform Core team by opening an issue at // coordinated with the Terraform Core team by opening an issue at

View File

@ -77,3 +77,22 @@ func (s RawState) Unmarshal(typ tftypes.Type) (tftypes.Value, error) {
} }
return tftypes.Value{}, ErrUnknownRawStateType return tftypes.Value{}, ErrUnknownRawStateType
} }
// UnmarshalOpts contains options that can be used to modify the behaviour when
// unmarshalling. Currently, this only contains a struct for opts for JSON but
// could have a field for Flatmap in the future.
type UnmarshalOpts struct {
ValueFromJSONOpts tftypes.ValueFromJSONOpts
}
// UnmarshalWithOpts is identical to Unmarshal but also accepts a tftypes.UnmarshalOpts which contains
// options that can be used to modify the behaviour when unmarshalling JSON or Flatmap.
func (s RawState) UnmarshalWithOpts(typ tftypes.Type, opts UnmarshalOpts) (tftypes.Value, error) {
if s.JSON != nil {
return tftypes.ValueFromJSONWithOpts(s.JSON, typ, opts.ValueFromJSONOpts) //nolint:staticcheck
}
if s.Flatmap != nil {
return tftypes.Value{}, fmt.Errorf("flatmap states cannot be unmarshaled, only states written by Terraform 0.12 and higher can be unmarshaled")
}
return tftypes.Value{}, ErrUnknownRawStateType
}

View File

@ -743,6 +743,7 @@ func (s *server) ReadResource(ctx context.Context, req *tfplugin5.ReadResource_R
} }
logging.ProtocolData(ctx, s.protocolDataDir, rpc, "Request", "CurrentState", r.CurrentState) logging.ProtocolData(ctx, s.protocolDataDir, rpc, "Request", "CurrentState", r.CurrentState)
logging.ProtocolData(ctx, s.protocolDataDir, rpc, "Request", "ProviderMeta", r.ProviderMeta) logging.ProtocolData(ctx, s.protocolDataDir, rpc, "Request", "ProviderMeta", r.ProviderMeta)
logging.ProtocolPrivateData(ctx, s.protocolDataDir, rpc, "Request", "Private", r.Private)
ctx = tf5serverlogging.DownstreamRequest(ctx) ctx = tf5serverlogging.DownstreamRequest(ctx)
resp, err := s.downstream.ReadResource(ctx, r) resp, err := s.downstream.ReadResource(ctx, r)
if err != nil { if err != nil {
@ -751,6 +752,7 @@ func (s *server) ReadResource(ctx context.Context, req *tfplugin5.ReadResource_R
} }
tf5serverlogging.DownstreamResponse(ctx, resp.Diagnostics) tf5serverlogging.DownstreamResponse(ctx, resp.Diagnostics)
logging.ProtocolData(ctx, s.protocolDataDir, rpc, "Response", "NewState", resp.NewState) logging.ProtocolData(ctx, s.protocolDataDir, rpc, "Response", "NewState", resp.NewState)
logging.ProtocolPrivateData(ctx, s.protocolDataDir, rpc, "Response", "Private", resp.Private)
ret, err := toproto.ReadResource_Response(resp) ret, err := toproto.ReadResource_Response(resp)
if err != nil { if err != nil {
logging.ProtocolError(ctx, "Error converting response to protobuf", map[string]interface{}{logging.KeyError: err}) logging.ProtocolError(ctx, "Error converting response to protobuf", map[string]interface{}{logging.KeyError: err})
@ -776,6 +778,7 @@ func (s *server) PlanResourceChange(ctx context.Context, req *tfplugin5.PlanReso
logging.ProtocolData(ctx, s.protocolDataDir, rpc, "Request", "PriorState", r.PriorState) logging.ProtocolData(ctx, s.protocolDataDir, rpc, "Request", "PriorState", r.PriorState)
logging.ProtocolData(ctx, s.protocolDataDir, rpc, "Request", "ProposedNewState", r.ProposedNewState) logging.ProtocolData(ctx, s.protocolDataDir, rpc, "Request", "ProposedNewState", r.ProposedNewState)
logging.ProtocolData(ctx, s.protocolDataDir, rpc, "Request", "ProviderMeta", r.ProviderMeta) logging.ProtocolData(ctx, s.protocolDataDir, rpc, "Request", "ProviderMeta", r.ProviderMeta)
logging.ProtocolPrivateData(ctx, s.protocolDataDir, rpc, "Request", "PriorPrivate", r.PriorPrivate)
ctx = tf5serverlogging.DownstreamRequest(ctx) ctx = tf5serverlogging.DownstreamRequest(ctx)
resp, err := s.downstream.PlanResourceChange(ctx, r) resp, err := s.downstream.PlanResourceChange(ctx, r)
if err != nil { if err != nil {
@ -784,6 +787,7 @@ func (s *server) PlanResourceChange(ctx context.Context, req *tfplugin5.PlanReso
} }
tf5serverlogging.DownstreamResponse(ctx, resp.Diagnostics) tf5serverlogging.DownstreamResponse(ctx, resp.Diagnostics)
logging.ProtocolData(ctx, s.protocolDataDir, rpc, "Response", "PlannedState", resp.PlannedState) logging.ProtocolData(ctx, s.protocolDataDir, rpc, "Response", "PlannedState", resp.PlannedState)
logging.ProtocolPrivateData(ctx, s.protocolDataDir, rpc, "Response", "PlannedPrivate", resp.PlannedPrivate)
ret, err := toproto.PlanResourceChange_Response(resp) ret, err := toproto.PlanResourceChange_Response(resp)
if err != nil { if err != nil {
logging.ProtocolError(ctx, "Error converting response to protobuf", map[string]interface{}{logging.KeyError: err}) logging.ProtocolError(ctx, "Error converting response to protobuf", map[string]interface{}{logging.KeyError: err})
@ -807,8 +811,9 @@ func (s *server) ApplyResourceChange(ctx context.Context, req *tfplugin5.ApplyRe
} }
logging.ProtocolData(ctx, s.protocolDataDir, rpc, "Request", "Config", r.Config) logging.ProtocolData(ctx, s.protocolDataDir, rpc, "Request", "Config", r.Config)
logging.ProtocolData(ctx, s.protocolDataDir, rpc, "Request", "PlannedState", r.PlannedState) logging.ProtocolData(ctx, s.protocolDataDir, rpc, "Request", "PlannedState", r.PlannedState)
logging.ProtocolData(ctx, s.protocolDataDir, rpc, "Request", "Config", r.Config) logging.ProtocolData(ctx, s.protocolDataDir, rpc, "Request", "PriorState", r.PriorState)
logging.ProtocolData(ctx, s.protocolDataDir, rpc, "Request", "Config", r.Config) logging.ProtocolData(ctx, s.protocolDataDir, rpc, "Request", "ProviderMeta", r.ProviderMeta)
logging.ProtocolPrivateData(ctx, s.protocolDataDir, rpc, "Request", "PlannedPrivate", r.PlannedPrivate)
ctx = tf5serverlogging.DownstreamRequest(ctx) ctx = tf5serverlogging.DownstreamRequest(ctx)
resp, err := s.downstream.ApplyResourceChange(ctx, r) resp, err := s.downstream.ApplyResourceChange(ctx, r)
if err != nil { if err != nil {
@ -817,6 +822,7 @@ func (s *server) ApplyResourceChange(ctx context.Context, req *tfplugin5.ApplyRe
} }
tf5serverlogging.DownstreamResponse(ctx, resp.Diagnostics) tf5serverlogging.DownstreamResponse(ctx, resp.Diagnostics)
logging.ProtocolData(ctx, s.protocolDataDir, rpc, "Response", "NewState", resp.NewState) logging.ProtocolData(ctx, s.protocolDataDir, rpc, "Response", "NewState", resp.NewState)
logging.ProtocolPrivateData(ctx, s.protocolDataDir, rpc, "Response", "Private", resp.Private)
ret, err := toproto.ApplyResourceChange_Response(resp) ret, err := toproto.ApplyResourceChange_Response(resp)
if err != nil { if err != nil {
logging.ProtocolError(ctx, "Error converting response to protobuf", map[string]interface{}{logging.KeyError: err}) logging.ProtocolError(ctx, "Error converting response to protobuf", map[string]interface{}{logging.KeyError: err})
@ -847,6 +853,7 @@ func (s *server) ImportResourceState(ctx context.Context, req *tfplugin5.ImportR
tf5serverlogging.DownstreamResponse(ctx, resp.Diagnostics) tf5serverlogging.DownstreamResponse(ctx, resp.Diagnostics)
for _, importedResource := range resp.ImportedResources { for _, importedResource := range resp.ImportedResources {
logging.ProtocolData(ctx, s.protocolDataDir, rpc, "Response_ImportedResource", "State", importedResource.State) logging.ProtocolData(ctx, s.protocolDataDir, rpc, "Response_ImportedResource", "State", importedResource.State)
logging.ProtocolPrivateData(ctx, s.protocolDataDir, rpc, "Response_ImportedResource", "Private", importedResource.Private)
} }
ret, err := toproto.ImportResourceState_Response(resp) ret, err := toproto.ImportResourceState_Response(resp)
if err != nil { if err != nil {

View File

@ -34,7 +34,6 @@ func (d Diagnostics) ErrorCount() int {
// - Error severity at ERROR level // - Error severity at ERROR level
// - Warning severity at WARN level // - Warning severity at WARN level
// - Invalid/Unknown severity at WARN level // - Invalid/Unknown severity at WARN level
//
func (d Diagnostics) Log(ctx context.Context) { func (d Diagnostics) Log(ctx context.Context) {
for _, diagnostic := range d { for _, diagnostic := range d {
if diagnostic == nil { if diagnostic == nil {

View File

@ -24,7 +24,6 @@ func DownstreamRequest(ctx context.Context) context.Context {
// - TRACE "Received downstream response" log with request duration and // - TRACE "Received downstream response" log with request duration and
// diagnostic severity counts // diagnostic severity counts
// - Per-diagnostic logs // - Per-diagnostic logs
//
func DownstreamResponse(ctx context.Context, diagnostics diag.Diagnostics) { func DownstreamResponse(ctx context.Context, diagnostics diag.Diagnostics) {
responseFields := map[string]interface{}{ responseFields := map[string]interface{}{
logging.KeyDiagnosticErrorCount: diagnostics.ErrorCount(), logging.KeyDiagnosticErrorCount: diagnostics.ErrorCount(),

View File

@ -19,7 +19,7 @@
// Code generated by protoc-gen-go. DO NOT EDIT. // Code generated by protoc-gen-go. DO NOT EDIT.
// versions: // versions:
// protoc-gen-go v1.28.0 // protoc-gen-go v1.28.1
// protoc v3.19.4 // protoc v3.19.4
// source: tfplugin6.proto // source: tfplugin6.proto
@ -1814,6 +1814,15 @@ func (x *ValidateProviderConfig_Response) GetDiagnostics() []*Diagnostic {
return nil return nil
} }
// Request is the message that is sent to the provider during the
// UpgradeResourceState RPC.
//
// This message intentionally does not include configuration data as any
// configuration-based or configuration-conditional changes should occur
// during the PlanResourceChange RPC. Additionally, the configuration is
// not guaranteed to exist (in the case of resource destruction), be wholly
// known, nor match the given prior state, which could lead to unexpected
// provider behaviors for practitioners.
type UpgradeResourceState_Request struct { type UpgradeResourceState_Request struct {
state protoimpl.MessageState state protoimpl.MessageState
sizeCache protoimpl.SizeCache sizeCache protoimpl.SizeCache
@ -2250,6 +2259,14 @@ func (x *ConfigureProvider_Response) GetDiagnostics() []*Diagnostic {
return nil return nil
} }
// Request is the message that is sent to the provider during the
// ReadResource RPC.
//
// This message intentionally does not include configuration data as any
// configuration-based or configuration-conditional changes should occur
// during the PlanResourceChange RPC. Additionally, the configuration is
// not guaranteed to be wholly known nor match the given prior state, which
// could lead to unexpected provider behaviors for practitioners.
type ReadResource_Request struct { type ReadResource_Request struct {
state protoimpl.MessageState state protoimpl.MessageState
sizeCache protoimpl.SizeCache sizeCache protoimpl.SizeCache

View File

@ -201,6 +201,15 @@ message ValidateProviderConfig {
} }
message UpgradeResourceState { message UpgradeResourceState {
// Request is the message that is sent to the provider during the
// UpgradeResourceState RPC.
//
// This message intentionally does not include configuration data as any
// configuration-based or configuration-conditional changes should occur
// during the PlanResourceChange RPC. Additionally, the configuration is
// not guaranteed to exist (in the case of resource destruction), be wholly
// known, nor match the given prior state, which could lead to unexpected
// provider behaviors for practitioners.
message Request { message Request {
string type_name = 1; string type_name = 1;
@ -258,6 +267,14 @@ message ConfigureProvider {
} }
message ReadResource { message ReadResource {
// Request is the message that is sent to the provider during the
// ReadResource RPC.
//
// This message intentionally does not include configuration data as any
// configuration-based or configuration-conditional changes should occur
// during the PlanResourceChange RPC. Additionally, the configuration is
// not guaranteed to be wholly known nor match the given prior state, which
// could lead to unexpected provider behaviors for practitioners.
message Request { message Request {
string type_name = 1; string type_name = 1;
DynamicValue current_state = 2; DynamicValue current_state = 2;

View File

@ -54,7 +54,7 @@ type GetProviderSchemaResponse struct {
// will be specified in the provider block of the user's configuration. // will be specified in the provider block of the user's configuration.
Provider *Schema Provider *Schema
// ProviderMeta defines the schema for the provider's metadta, which // ProviderMeta defines the schema for the provider's metadata, which
// will be specified in the provider_meta blocks of the terraform block // will be specified in the provider_meta blocks of the terraform block
// for a module. This is an advanced feature and its usage should be // for a module. This is an advanced feature and its usage should be
// coordinated with the Terraform Core team by opening an issue at // coordinated with the Terraform Core team by opening an issue at

View File

@ -77,3 +77,22 @@ func (s RawState) Unmarshal(typ tftypes.Type) (tftypes.Value, error) {
} }
return tftypes.Value{}, ErrUnknownRawStateType return tftypes.Value{}, ErrUnknownRawStateType
} }
// UnmarshalOpts contains options that can be used to modify the behaviour when
// unmarshalling. Currently, this only contains a struct for opts for JSON but
// could have a field for Flatmap in the future.
type UnmarshalOpts struct {
ValueFromJSONOpts tftypes.ValueFromJSONOpts
}
// UnmarshalWithOpts is identical to Unmarshal but also accepts a tftypes.UnmarshalOpts which contains
// options that can be used to modify the behaviour when unmarshalling JSON or Flatmap.
func (s RawState) UnmarshalWithOpts(typ tftypes.Type, opts UnmarshalOpts) (tftypes.Value, error) {
if s.JSON != nil {
return tftypes.ValueFromJSONWithOpts(s.JSON, typ, opts.ValueFromJSONOpts) //nolint:staticcheck
}
if s.Flatmap != nil {
return tftypes.Value{}, fmt.Errorf("flatmap states cannot be unmarshaled, only states written by Terraform 0.12 and higher can be unmarshaled")
}
return tftypes.Value{}, ErrUnknownRawStateType
}

View File

@ -741,6 +741,7 @@ func (s *server) ReadResource(ctx context.Context, req *tfplugin6.ReadResource_R
} }
logging.ProtocolData(ctx, s.protocolDataDir, rpc, "Request", "CurrentState", r.CurrentState) logging.ProtocolData(ctx, s.protocolDataDir, rpc, "Request", "CurrentState", r.CurrentState)
logging.ProtocolData(ctx, s.protocolDataDir, rpc, "Request", "ProviderMeta", r.ProviderMeta) logging.ProtocolData(ctx, s.protocolDataDir, rpc, "Request", "ProviderMeta", r.ProviderMeta)
logging.ProtocolPrivateData(ctx, s.protocolDataDir, rpc, "Request", "Private", r.Private)
ctx = tf6serverlogging.DownstreamRequest(ctx) ctx = tf6serverlogging.DownstreamRequest(ctx)
resp, err := s.downstream.ReadResource(ctx, r) resp, err := s.downstream.ReadResource(ctx, r)
if err != nil { if err != nil {
@ -749,6 +750,7 @@ func (s *server) ReadResource(ctx context.Context, req *tfplugin6.ReadResource_R
} }
tf6serverlogging.DownstreamResponse(ctx, resp.Diagnostics) tf6serverlogging.DownstreamResponse(ctx, resp.Diagnostics)
logging.ProtocolData(ctx, s.protocolDataDir, rpc, "Response", "NewState", resp.NewState) logging.ProtocolData(ctx, s.protocolDataDir, rpc, "Response", "NewState", resp.NewState)
logging.ProtocolPrivateData(ctx, s.protocolDataDir, rpc, "Response", "Private", resp.Private)
ret, err := toproto.ReadResource_Response(resp) ret, err := toproto.ReadResource_Response(resp)
if err != nil { if err != nil {
logging.ProtocolError(ctx, "Error converting response to protobuf", map[string]interface{}{logging.KeyError: err}) logging.ProtocolError(ctx, "Error converting response to protobuf", map[string]interface{}{logging.KeyError: err})
@ -774,6 +776,7 @@ func (s *server) PlanResourceChange(ctx context.Context, req *tfplugin6.PlanReso
logging.ProtocolData(ctx, s.protocolDataDir, rpc, "Request", "PriorState", r.PriorState) logging.ProtocolData(ctx, s.protocolDataDir, rpc, "Request", "PriorState", r.PriorState)
logging.ProtocolData(ctx, s.protocolDataDir, rpc, "Request", "ProposedNewState", r.ProposedNewState) logging.ProtocolData(ctx, s.protocolDataDir, rpc, "Request", "ProposedNewState", r.ProposedNewState)
logging.ProtocolData(ctx, s.protocolDataDir, rpc, "Request", "ProviderMeta", r.ProviderMeta) logging.ProtocolData(ctx, s.protocolDataDir, rpc, "Request", "ProviderMeta", r.ProviderMeta)
logging.ProtocolPrivateData(ctx, s.protocolDataDir, rpc, "Request", "PriorPrivate", r.PriorPrivate)
ctx = tf6serverlogging.DownstreamRequest(ctx) ctx = tf6serverlogging.DownstreamRequest(ctx)
resp, err := s.downstream.PlanResourceChange(ctx, r) resp, err := s.downstream.PlanResourceChange(ctx, r)
if err != nil { if err != nil {
@ -782,6 +785,7 @@ func (s *server) PlanResourceChange(ctx context.Context, req *tfplugin6.PlanReso
} }
tf6serverlogging.DownstreamResponse(ctx, resp.Diagnostics) tf6serverlogging.DownstreamResponse(ctx, resp.Diagnostics)
logging.ProtocolData(ctx, s.protocolDataDir, rpc, "Response", "PlannedState", resp.PlannedState) logging.ProtocolData(ctx, s.protocolDataDir, rpc, "Response", "PlannedState", resp.PlannedState)
logging.ProtocolPrivateData(ctx, s.protocolDataDir, rpc, "Response", "PlannedPrivate", resp.PlannedPrivate)
ret, err := toproto.PlanResourceChange_Response(resp) ret, err := toproto.PlanResourceChange_Response(resp)
if err != nil { if err != nil {
logging.ProtocolError(ctx, "Error converting response to protobuf", map[string]interface{}{logging.KeyError: err}) logging.ProtocolError(ctx, "Error converting response to protobuf", map[string]interface{}{logging.KeyError: err})
@ -805,8 +809,9 @@ func (s *server) ApplyResourceChange(ctx context.Context, req *tfplugin6.ApplyRe
} }
logging.ProtocolData(ctx, s.protocolDataDir, rpc, "Request", "Config", r.Config) logging.ProtocolData(ctx, s.protocolDataDir, rpc, "Request", "Config", r.Config)
logging.ProtocolData(ctx, s.protocolDataDir, rpc, "Request", "PlannedState", r.PlannedState) logging.ProtocolData(ctx, s.protocolDataDir, rpc, "Request", "PlannedState", r.PlannedState)
logging.ProtocolData(ctx, s.protocolDataDir, rpc, "Request", "Config", r.Config) logging.ProtocolData(ctx, s.protocolDataDir, rpc, "Request", "PriorState", r.PriorState)
logging.ProtocolData(ctx, s.protocolDataDir, rpc, "Request", "Config", r.Config) logging.ProtocolData(ctx, s.protocolDataDir, rpc, "Request", "ProviderMeta", r.ProviderMeta)
logging.ProtocolPrivateData(ctx, s.protocolDataDir, rpc, "Request", "PlannedPrivate", r.PlannedPrivate)
ctx = tf6serverlogging.DownstreamRequest(ctx) ctx = tf6serverlogging.DownstreamRequest(ctx)
resp, err := s.downstream.ApplyResourceChange(ctx, r) resp, err := s.downstream.ApplyResourceChange(ctx, r)
if err != nil { if err != nil {
@ -815,6 +820,7 @@ func (s *server) ApplyResourceChange(ctx context.Context, req *tfplugin6.ApplyRe
} }
tf6serverlogging.DownstreamResponse(ctx, resp.Diagnostics) tf6serverlogging.DownstreamResponse(ctx, resp.Diagnostics)
logging.ProtocolData(ctx, s.protocolDataDir, rpc, "Response", "NewState", resp.NewState) logging.ProtocolData(ctx, s.protocolDataDir, rpc, "Response", "NewState", resp.NewState)
logging.ProtocolPrivateData(ctx, s.protocolDataDir, rpc, "Response", "Private", resp.Private)
ret, err := toproto.ApplyResourceChange_Response(resp) ret, err := toproto.ApplyResourceChange_Response(resp)
if err != nil { if err != nil {
logging.ProtocolError(ctx, "Error converting response to protobuf", map[string]interface{}{logging.KeyError: err}) logging.ProtocolError(ctx, "Error converting response to protobuf", map[string]interface{}{logging.KeyError: err})
@ -845,6 +851,7 @@ func (s *server) ImportResourceState(ctx context.Context, req *tfplugin6.ImportR
tf6serverlogging.DownstreamResponse(ctx, resp.Diagnostics) tf6serverlogging.DownstreamResponse(ctx, resp.Diagnostics)
for _, importedResource := range resp.ImportedResources { for _, importedResource := range resp.ImportedResources {
logging.ProtocolData(ctx, s.protocolDataDir, rpc, "Response_ImportedResource", "State", importedResource.State) logging.ProtocolData(ctx, s.protocolDataDir, rpc, "Response_ImportedResource", "State", importedResource.State)
logging.ProtocolPrivateData(ctx, s.protocolDataDir, rpc, "Response_ImportedResource", "Private", importedResource.Private)
} }
ret, err := toproto.ImportResourceState_Response(resp) ret, err := toproto.ImportResourceState_Response(resp)
if err != nil { if err != nil {

View File

@ -260,17 +260,13 @@ func (val Value) Copy() Value {
// //
// The builtin Value representations are: // The builtin Value representations are:
// //
// * String: string, *string // - String: string, *string
// // - Number: *big.Float, int64, *int64, int32, *int32, int16, *int16, int8,
// * Number: *big.Float, int64, *int64, int32, *int32, int16, *int16, int8,
// *int8, int, *int, uint64, *uint64, uint32, *uint32, uint16, // *int8, int, *int, uint64, *uint64, uint32, *uint32, uint16,
// *uint16, uint8, *uint8, uint, *uint, float64, *float64 // *uint16, uint8, *uint8, uint, *uint, float64, *float64
// // - Bool: bool, *bool
// * Bool: bool, *bool // - Map and Object: map[string]Value
// // - Tuple, List, and Set: []Value
// * Map and Object: map[string]Value
//
// * Tuple, List, and Set: []Value
func NewValue(t Type, val interface{}) Value { func NewValue(t Type, val interface{}) Value {
v, err := newValue(t, val) v, err := newValue(t, val)
if err != nil { if err != nil {

View File

@ -16,7 +16,24 @@ import (
// terraform-plugin-go. Third parties should not use it, and its behavior is // terraform-plugin-go. Third parties should not use it, and its behavior is
// not covered under the API compatibility guarantees. Don't use this. // not covered under the API compatibility guarantees. Don't use this.
func ValueFromJSON(data []byte, typ Type) (Value, error) { func ValueFromJSON(data []byte, typ Type) (Value, error) {
return jsonUnmarshal(data, typ, NewAttributePath()) return jsonUnmarshal(data, typ, NewAttributePath(), ValueFromJSONOpts{})
}
// ValueFromJSONOpts contains options that can be used to modify the behaviour when
// unmarshalling JSON.
type ValueFromJSONOpts struct {
// IgnoreUndefinedAttributes is used to ignore any attributes which appear in the
// JSON but do not have a corresponding entry in the schema. For example, raw state
// where an attribute has been removed from the schema.
IgnoreUndefinedAttributes bool
}
// ValueFromJSONWithOpts is identical to ValueFromJSON with the exception that it
// accepts ValueFromJSONOpts which can be used to modify the unmarshalling behaviour, such
// as ignoring undefined attributes, for instance. This can occur when the JSON
// being unmarshalled does not have a corresponding attribute in the schema.
func ValueFromJSONWithOpts(data []byte, typ Type, opts ValueFromJSONOpts) (Value, error) {
return jsonUnmarshal(data, typ, NewAttributePath(), opts)
} }
func jsonByteDecoder(buf []byte) *json.Decoder { func jsonByteDecoder(buf []byte) *json.Decoder {
@ -26,7 +43,7 @@ func jsonByteDecoder(buf []byte) *json.Decoder {
return dec return dec
} }
func jsonUnmarshal(buf []byte, typ Type, p *AttributePath) (Value, error) { func jsonUnmarshal(buf []byte, typ Type, p *AttributePath, opts ValueFromJSONOpts) (Value, error) {
dec := jsonByteDecoder(buf) dec := jsonByteDecoder(buf)
tok, err := dec.Token() tok, err := dec.Token()
@ -46,18 +63,17 @@ func jsonUnmarshal(buf []byte, typ Type, p *AttributePath) (Value, error) {
case typ.Is(Bool): case typ.Is(Bool):
return jsonUnmarshalBool(buf, typ, p) return jsonUnmarshalBool(buf, typ, p)
case typ.Is(DynamicPseudoType): case typ.Is(DynamicPseudoType):
return jsonUnmarshalDynamicPseudoType(buf, typ, p) return jsonUnmarshalDynamicPseudoType(buf, typ, p, opts)
case typ.Is(List{}): case typ.Is(List{}):
return jsonUnmarshalList(buf, typ.(List).ElementType, p) return jsonUnmarshalList(buf, typ.(List).ElementType, p, opts)
case typ.Is(Set{}): case typ.Is(Set{}):
return jsonUnmarshalSet(buf, typ.(Set).ElementType, p) return jsonUnmarshalSet(buf, typ.(Set).ElementType, p, opts)
case typ.Is(Map{}): case typ.Is(Map{}):
return jsonUnmarshalMap(buf, typ.(Map).ElementType, p) return jsonUnmarshalMap(buf, typ.(Map).ElementType, p, opts)
case typ.Is(Tuple{}): case typ.Is(Tuple{}):
return jsonUnmarshalTuple(buf, typ.(Tuple).ElementTypes, p) return jsonUnmarshalTuple(buf, typ.(Tuple).ElementTypes, p, opts)
case typ.Is(Object{}): case typ.Is(Object{}):
return jsonUnmarshalObject(buf, typ.(Object).AttributeTypes, p) return jsonUnmarshalObject(buf, typ.(Object).AttributeTypes, p, opts)
} }
return Value{}, p.NewErrorf("unknown type %s", typ) return Value{}, p.NewErrorf("unknown type %s", typ)
} }
@ -140,7 +156,7 @@ func jsonUnmarshalBool(buf []byte, _ Type, p *AttributePath) (Value, error) {
return Value{}, p.NewErrorf("unsupported type %T sent as %s", tok, Bool) return Value{}, p.NewErrorf("unsupported type %T sent as %s", tok, Bool)
} }
func jsonUnmarshalDynamicPseudoType(buf []byte, _ Type, p *AttributePath) (Value, error) { func jsonUnmarshalDynamicPseudoType(buf []byte, _ Type, p *AttributePath, opts ValueFromJSONOpts) (Value, error) {
dec := jsonByteDecoder(buf) dec := jsonByteDecoder(buf)
tok, err := dec.Token() tok, err := dec.Token()
if err != nil { if err != nil {
@ -190,10 +206,10 @@ func jsonUnmarshalDynamicPseudoType(buf []byte, _ Type, p *AttributePath) (Value
if valBody == nil { if valBody == nil {
return Value{}, p.NewErrorf("missing value in dynamically-typed value") return Value{}, p.NewErrorf("missing value in dynamically-typed value")
} }
return jsonUnmarshal(valBody, t, p) return jsonUnmarshal(valBody, t, p, opts)
} }
func jsonUnmarshalList(buf []byte, elementType Type, p *AttributePath) (Value, error) { func jsonUnmarshalList(buf []byte, elementType Type, p *AttributePath, opts ValueFromJSONOpts) (Value, error) {
dec := jsonByteDecoder(buf) dec := jsonByteDecoder(buf)
tok, err := dec.Token() tok, err := dec.Token()
@ -227,7 +243,7 @@ func jsonUnmarshalList(buf []byte, elementType Type, p *AttributePath) (Value, e
if err != nil { if err != nil {
return Value{}, innerPath.NewErrorf("error decoding value: %w", err) return Value{}, innerPath.NewErrorf("error decoding value: %w", err)
} }
val, err := jsonUnmarshal(rawVal, elementType, innerPath) val, err := jsonUnmarshal(rawVal, elementType, innerPath, opts)
if err != nil { if err != nil {
return Value{}, err return Value{}, err
} }
@ -254,7 +270,7 @@ func jsonUnmarshalList(buf []byte, elementType Type, p *AttributePath) (Value, e
}, vals), nil }, vals), nil
} }
func jsonUnmarshalSet(buf []byte, elementType Type, p *AttributePath) (Value, error) { func jsonUnmarshalSet(buf []byte, elementType Type, p *AttributePath, opts ValueFromJSONOpts) (Value, error) {
dec := jsonByteDecoder(buf) dec := jsonByteDecoder(buf)
tok, err := dec.Token() tok, err := dec.Token()
@ -284,7 +300,7 @@ func jsonUnmarshalSet(buf []byte, elementType Type, p *AttributePath) (Value, er
if err != nil { if err != nil {
return Value{}, innerPath.NewErrorf("error decoding value: %w", err) return Value{}, innerPath.NewErrorf("error decoding value: %w", err)
} }
val, err := jsonUnmarshal(rawVal, elementType, innerPath) val, err := jsonUnmarshal(rawVal, elementType, innerPath, opts)
if err != nil { if err != nil {
return Value{}, err return Value{}, err
} }
@ -310,7 +326,7 @@ func jsonUnmarshalSet(buf []byte, elementType Type, p *AttributePath) (Value, er
}, vals), nil }, vals), nil
} }
func jsonUnmarshalMap(buf []byte, attrType Type, p *AttributePath) (Value, error) { func jsonUnmarshalMap(buf []byte, attrType Type, p *AttributePath, opts ValueFromJSONOpts) (Value, error) {
dec := jsonByteDecoder(buf) dec := jsonByteDecoder(buf)
tok, err := dec.Token() tok, err := dec.Token()
@ -341,7 +357,7 @@ func jsonUnmarshalMap(buf []byte, attrType Type, p *AttributePath) (Value, error
if err != nil { if err != nil {
return Value{}, innerPath.NewErrorf("error decoding value: %w", err) return Value{}, innerPath.NewErrorf("error decoding value: %w", err)
} }
val, err := jsonUnmarshal(rawVal, attrType, innerPath) val, err := jsonUnmarshal(rawVal, attrType, innerPath, opts)
if err != nil { if err != nil {
return Value{}, err return Value{}, err
} }
@ -360,7 +376,7 @@ func jsonUnmarshalMap(buf []byte, attrType Type, p *AttributePath) (Value, error
}, vals), nil }, vals), nil
} }
func jsonUnmarshalTuple(buf []byte, elementTypes []Type, p *AttributePath) (Value, error) { func jsonUnmarshalTuple(buf []byte, elementTypes []Type, p *AttributePath, opts ValueFromJSONOpts) (Value, error) {
dec := jsonByteDecoder(buf) dec := jsonByteDecoder(buf)
tok, err := dec.Token() tok, err := dec.Token()
@ -398,7 +414,7 @@ func jsonUnmarshalTuple(buf []byte, elementTypes []Type, p *AttributePath) (Valu
if err != nil { if err != nil {
return Value{}, innerPath.NewErrorf("error decoding value: %w", err) return Value{}, innerPath.NewErrorf("error decoding value: %w", err)
} }
val, err := jsonUnmarshal(rawVal, elementType, innerPath) val, err := jsonUnmarshal(rawVal, elementType, innerPath, opts)
if err != nil { if err != nil {
return Value{}, err return Value{}, err
} }
@ -422,7 +438,9 @@ func jsonUnmarshalTuple(buf []byte, elementTypes []Type, p *AttributePath) (Valu
}, vals), nil }, vals), nil
} }
func jsonUnmarshalObject(buf []byte, attrTypes map[string]Type, p *AttributePath) (Value, error) { // jsonUnmarshalObject attempts to decode JSON object structure to tftypes.Value object.
// opts contains fields that can be used to modify the behaviour of JSON unmarshalling.
func jsonUnmarshalObject(buf []byte, attrTypes map[string]Type, p *AttributePath, opts ValueFromJSONOpts) (Value, error) {
dec := jsonByteDecoder(buf) dec := jsonByteDecoder(buf)
tok, err := dec.Token() tok, err := dec.Token()
@ -435,27 +453,32 @@ func jsonUnmarshalObject(buf []byte, attrTypes map[string]Type, p *AttributePath
vals := map[string]Value{} vals := map[string]Value{}
for dec.More() { for dec.More() {
innerPath := p.WithElementKeyValue(NewValue(String, UnknownValue))
tok, err := dec.Token() tok, err := dec.Token()
if err != nil { if err != nil {
return Value{}, innerPath.NewErrorf("error reading token: %w", err) return Value{}, p.NewErrorf("error reading object attribute key token: %w", err)
} }
key, ok := tok.(string) key, ok := tok.(string)
if !ok { if !ok {
return Value{}, innerPath.NewErrorf("object attribute key was %T, not string", tok) return Value{}, p.NewErrorf("object attribute key was %T with value %v, not string", tok, tok)
} }
innerPath := p.WithAttributeName(key)
attrType, ok := attrTypes[key] attrType, ok := attrTypes[key]
if !ok { if !ok {
if opts.IgnoreUndefinedAttributes {
// We are trying to ignore the key and value of any unsupported attribute.
_ = dec.Decode(new(json.RawMessage))
continue
}
return Value{}, innerPath.NewErrorf("unsupported attribute %q", key) return Value{}, innerPath.NewErrorf("unsupported attribute %q", key)
} }
innerPath = p.WithAttributeName(key)
var rawVal json.RawMessage var rawVal json.RawMessage
err = dec.Decode(&rawVal) err = dec.Decode(&rawVal)
if err != nil { if err != nil {
return Value{}, innerPath.NewErrorf("error decoding value: %w", err) return Value{}, innerPath.NewErrorf("error decoding value: %w", err)
} }
val, err := jsonUnmarshal(rawVal, attrType, innerPath) val, err := jsonUnmarshal(rawVal, attrType, innerPath, opts)
if err != nil { if err != nil {
return Value{}, err return Value{}, err
} }

View File

@ -1,3 +1,5 @@
Copyright (c) 2019 HashiCorp, Inc.
Mozilla Public License, version 2.0 Mozilla Public License, version 2.0
1. Definitions 1. Definitions

View File

@ -3,7 +3,6 @@ package logging
import ( import (
"fmt" "fmt"
"io" "io"
"io/ioutil"
"log" "log"
"os" "os"
"strings" "strings"
@ -32,7 +31,7 @@ var ValidLevels = []logutils.LogLevel{"TRACE", "DEBUG", "INFO", "WARN", "ERROR"}
// environment variable. Calls to tflog.* will have their output managed by the // environment variable. Calls to tflog.* will have their output managed by the
// tfsdklog sink. // tfsdklog sink.
func LogOutput(t testing.T) (logOutput io.Writer, err error) { func LogOutput(t testing.T) (logOutput io.Writer, err error) {
logOutput = ioutil.Discard logOutput = io.Discard
logLevel := LogLevel() logLevel := LogLevel()
if logLevel == "" { if logLevel == "" {
@ -88,7 +87,7 @@ func LogOutput(t testing.T) (logOutput io.Writer, err error) {
// SetOutput checks for a log destination with LogOutput, and calls // SetOutput checks for a log destination with LogOutput, and calls
// log.SetOutput with the result. If LogOutput returns nil, SetOutput uses // log.SetOutput with the result. If LogOutput returns nil, SetOutput uses
// ioutil.Discard. Any error from LogOutout is fatal. // io.Discard. Any error from LogOutout is fatal.
func SetOutput(t testing.T) { func SetOutput(t testing.T) {
out, err := LogOutput(t) out, err := LogOutput(t)
if err != nil { if err != nil {
@ -96,7 +95,7 @@ func SetOutput(t testing.T) {
} }
if out == nil { if out == nil {
out = ioutil.Discard out = io.Discard
} }
log.SetOutput(out) log.SetOutput(out)

View File

@ -3,7 +3,7 @@ package resource
import ( import (
"context" "context"
"fmt" "fmt"
"io/ioutil" "io"
"os" "os"
"strings" "strings"
"sync" "sync"
@ -157,6 +157,13 @@ func runProviderCommand(ctx context.Context, t testing.T, f func() error, wd *pl
host = v host = v
} }
// schema.Provider have a global stop context that is created outside
// the server context and have their own associated goroutine. Since
// Terraform does not call the StopProvider RPC to stop the server in
// reattach mode, ensure that we save these servers to later call that
// RPC and end those goroutines.
legacyProviderServers := make([]*schema.GRPCProviderServer, 0, len(factories.legacy))
// Spin up gRPC servers for every provider factory, start a // Spin up gRPC servers for every provider factory, start a
// WaitGroup to listen for all of the close channels. // WaitGroup to listen for all of the close channels.
var wg sync.WaitGroup var wg sync.WaitGroup
@ -180,18 +187,24 @@ func runProviderCommand(ctx context.Context, t testing.T, f func() error, wd *pl
// shut down. // shut down.
wg.Add(1) wg.Add(1)
grpcProviderServer := schema.NewGRPCProviderServer(provider)
legacyProviderServers = append(legacyProviderServers, grpcProviderServer)
// Ensure StopProvider is always called when returning early.
defer grpcProviderServer.StopProvider(ctx, nil) //nolint:errcheck // does not return errors
// configure the settings our plugin will be served with // configure the settings our plugin will be served with
// the GRPCProviderFunc wraps a non-gRPC provider server // the GRPCProviderFunc wraps a non-gRPC provider server
// into a gRPC interface, and the logger just discards logs // into a gRPC interface, and the logger just discards logs
// from go-plugin. // from go-plugin.
opts := &plugin.ServeOpts{ opts := &plugin.ServeOpts{
GRPCProviderFunc: func() tfprotov5.ProviderServer { GRPCProviderFunc: func() tfprotov5.ProviderServer {
return schema.NewGRPCProviderServer(provider) return grpcProviderServer
}, },
Logger: hclog.New(&hclog.LoggerOptions{ Logger: hclog.New(&hclog.LoggerOptions{
Name: "plugintest", Name: "plugintest",
Level: hclog.Trace, Level: hclog.Trace,
Output: ioutil.Discard, Output: io.Discard,
}), }),
NoLogOutputOverride: true, NoLogOutputOverride: true,
UseTFLogSink: t, UseTFLogSink: t,
@ -279,7 +292,7 @@ func runProviderCommand(ctx context.Context, t testing.T, f func() error, wd *pl
Logger: hclog.New(&hclog.LoggerOptions{ Logger: hclog.New(&hclog.LoggerOptions{
Name: "plugintest", Name: "plugintest",
Level: hclog.Trace, Level: hclog.Trace,
Output: ioutil.Discard, Output: io.Discard,
}), }),
NoLogOutputOverride: true, NoLogOutputOverride: true,
UseTFLogSink: t, UseTFLogSink: t,
@ -364,7 +377,7 @@ func runProviderCommand(ctx context.Context, t testing.T, f func() error, wd *pl
Logger: hclog.New(&hclog.LoggerOptions{ Logger: hclog.New(&hclog.LoggerOptions{
Name: "plugintest", Name: "plugintest",
Level: hclog.Trace, Level: hclog.Trace,
Output: ioutil.Discard, Output: io.Discard,
}), }),
NoLogOutputOverride: true, NoLogOutputOverride: true,
UseTFLogSink: t, UseTFLogSink: t,
@ -430,6 +443,12 @@ func runProviderCommand(ctx context.Context, t testing.T, f func() error, wd *pl
// get closed, and we'll hang here. // get closed, and we'll hang here.
cancel() cancel()
// For legacy providers, call the StopProvider RPC so the StopContext
// goroutine is cleaned up properly.
for _, legacyProviderServer := range legacyProviderServers {
legacyProviderServer.StopProvider(ctx, nil) //nolint:errcheck // does not return errors
}
logging.HelperResourceTrace(ctx, "Waiting for providers to stop") logging.HelperResourceTrace(ctx, "Waiting for providers to stop")
// wait for the servers to actually shut down; it may take a moment for // wait for the servers to actually shut down; it may take a moment for

View File

@ -9,7 +9,7 @@ import (
// providerConfig takes the list of providers in a TestCase and returns a // providerConfig takes the list of providers in a TestCase and returns a
// config with only empty provider blocks. This is useful for Import, where no // config with only empty provider blocks. This is useful for Import, where no
// config is provided, but the providers must be defined. // config is provided, but the providers must be defined.
func (c TestCase) providerConfig(_ context.Context) string { func (c TestCase) providerConfig(_ context.Context, skipProviderBlock bool) string {
var providerBlocks, requiredProviderBlocks strings.Builder var providerBlocks, requiredProviderBlocks strings.Builder
// [BF] The Providers field handling predates the logic being moved to this // [BF] The Providers field handling predates the logic being moved to this
@ -21,7 +21,9 @@ func (c TestCase) providerConfig(_ context.Context) string {
} }
for name, externalProvider := range c.ExternalProviders { for name, externalProvider := range c.ExternalProviders {
if !skipProviderBlock {
providerBlocks.WriteString(fmt.Sprintf("provider %q {}\n", name)) providerBlocks.WriteString(fmt.Sprintf("provider %q {}\n", name))
}
if externalProvider.Source == "" && externalProvider.VersionConstraint == "" { if externalProvider.Source == "" && externalProvider.VersionConstraint == "" {
continue continue

View File

@ -39,7 +39,6 @@ func (c TestCase) hasProviders(_ context.Context) bool {
// - No overlapping ExternalProviders and Providers entries // - No overlapping ExternalProviders and Providers entries
// - No overlapping ExternalProviders and ProviderFactories entries // - No overlapping ExternalProviders and ProviderFactories entries
// - TestStep validations performed by the (TestStep).validate() method. // - TestStep validations performed by the (TestStep).validate() method.
//
func (c TestCase) validate(ctx context.Context) error { func (c TestCase) validate(ctx context.Context) error {
logging.HelperResourceTrace(ctx, "Validating TestCase") logging.HelperResourceTrace(ctx, "Validating TestCase")

View File

@ -551,6 +551,16 @@ type TestStep struct {
// ImportStateCheck checks the results of ImportState. It should be // ImportStateCheck checks the results of ImportState. It should be
// used to verify that the resulting value of ImportState has the // used to verify that the resulting value of ImportState has the
// proper resources, IDs, and attributes. // proper resources, IDs, and attributes.
//
// Prefer ImportStateVerify over ImportStateCheck, unless the resource
// import explicitly is expected to create multiple resources (not a
// recommended resource implementation) or if attributes are imported with
// syntactically different but semantically/functionally equivalent values
// where special logic is needed.
//
// Terraform versions 1.3 and later can include data source states during
// import, which the testing framework will skip to prevent the need for
// Terraform version specific logic in provider testing.
ImportStateCheck ImportStateCheckFunc ImportStateCheck ImportStateCheckFunc
// ImportStateVerify, if true, will also check that the state values // ImportStateVerify, if true, will also check that the state values
@ -564,6 +574,28 @@ type TestStep struct {
ImportStateVerify bool ImportStateVerify bool
ImportStateVerifyIgnore []string ImportStateVerifyIgnore []string
// ImportStatePersist, if true, will update the persisted state with the
// state generated by the import operation (i.e., terraform import). When
// false (default) the state generated by the import operation is discarded
// at the end of the test step that is verifying import behavior.
ImportStatePersist bool
//---------------------------------------------------------------
// RefreshState testing
//---------------------------------------------------------------
// RefreshState, if true, will test the functionality of `terraform
// refresh` by refreshing the state, running any checks against the
// refreshed state, and running a plan to verify against unexpected plan
// differences.
//
// If the refresh is expected to result in a non-empty plan
// ExpectNonEmptyPlan should be set to true in the same TestStep.
//
// RefreshState cannot be the first TestStep and, it is mutually exclusive
// with ImportState.
RefreshState bool
// ProviderFactories can be specified for the providers that are valid for // ProviderFactories can be specified for the providers that are valid for
// this TestStep. When providers are specified at the TestStep level, all // this TestStep. When providers are specified at the TestStep level, all
// TestStep within a TestCase must declare providers. // TestStep within a TestCase must declare providers.

View File

@ -2,14 +2,13 @@ package resource
import ( import (
"context" "context"
"errors"
"fmt" "fmt"
"github.com/hashicorp/terraform-plugin-sdk/v2/internal/logging" "github.com/hashicorp/terraform-plugin-sdk/v2/internal/logging"
"github.com/hashicorp/terraform-plugin-sdk/v2/terraform" "github.com/hashicorp/terraform-plugin-sdk/v2/internal/plugintest"
) )
func testStepTaint(ctx context.Context, state *terraform.State, step TestStep) error { func testStepTaint(ctx context.Context, step TestStep, wd *plugintest.WorkingDir) error {
if len(step.Taint) == 0 { if len(step.Taint) == 0 {
return nil return nil
} }
@ -17,16 +16,10 @@ func testStepTaint(ctx context.Context, state *terraform.State, step TestStep) e
logging.HelperResourceTrace(ctx, fmt.Sprintf("Using TestStep Taint: %v", step.Taint)) logging.HelperResourceTrace(ctx, fmt.Sprintf("Using TestStep Taint: %v", step.Taint))
for _, p := range step.Taint { for _, p := range step.Taint {
m := state.RootModule() err := wd.Taint(ctx, p)
if m == nil { if err != nil {
return errors.New("no state") return fmt.Errorf("error tainting resource: %s", err)
} }
rs, ok := m.Resources[p]
if !ok {
return fmt.Errorf("resource %q not found in state", p)
}
logging.HelperResourceWarn(ctx, fmt.Sprintf("Explicitly tainting resource %q", p))
rs.Taint()
} }
return nil return nil
} }

View File

@ -8,7 +8,7 @@ import (
"github.com/davecgh/go-spew/spew" "github.com/davecgh/go-spew/spew"
tfjson "github.com/hashicorp/terraform-json" tfjson "github.com/hashicorp/terraform-json"
testing "github.com/mitchellh/go-testing-interface" "github.com/mitchellh/go-testing-interface"
"github.com/hashicorp/terraform-plugin-sdk/v2/internal/logging" "github.com/hashicorp/terraform-plugin-sdk/v2/internal/logging"
"github.com/hashicorp/terraform-plugin-sdk/v2/internal/plugintest" "github.com/hashicorp/terraform-plugin-sdk/v2/internal/plugintest"
@ -89,7 +89,7 @@ func runNewTest(ctx context.Context, t testing.T, c TestCase, helper *plugintest
}() }()
if c.hasProviders(ctx) { if c.hasProviders(ctx) {
err := wd.SetConfig(ctx, c.providerConfig(ctx)) err := wd.SetConfig(ctx, c.providerConfig(ctx, false))
if err != nil { if err != nil {
logging.HelperResourceError(ctx, logging.HelperResourceError(ctx,
@ -114,7 +114,7 @@ func runNewTest(ctx context.Context, t testing.T, c TestCase, helper *plugintest
logging.HelperResourceDebug(ctx, "Starting TestSteps") logging.HelperResourceDebug(ctx, "Starting TestSteps")
// use this to track last step succesfully applied // use this to track last step successfully applied
// acts as default for import tests // acts as default for import tests
var appliedCfg string var appliedCfg string
@ -152,29 +152,7 @@ func runNewTest(ctx context.Context, t testing.T, c TestCase, helper *plugintest
} }
if step.Config != "" && !step.Destroy && len(step.Taint) > 0 { if step.Config != "" && !step.Destroy && len(step.Taint) > 0 {
var state *terraform.State err := testStepTaint(ctx, step, wd)
err := runProviderCommand(ctx, t, func() error {
var err error
state, err = getState(ctx, t, wd)
if err != nil {
return err
}
return nil
}, wd, providers)
if err != nil {
logging.HelperResourceError(ctx,
"TestStep error reading prior state before tainting resources",
map[string]interface{}{logging.KeyError: err},
)
t.Fatalf("TestStep %d/%d error reading prior state before tainting resources: %s", stepNumber, len(c.Steps), err)
}
err = testStepTaint(ctx, state, step)
if err != nil { if err != nil {
logging.HelperResourceError(ctx, logging.HelperResourceError(ctx,
@ -192,7 +170,7 @@ func runNewTest(ctx context.Context, t testing.T, c TestCase, helper *plugintest
protov6: protov6ProviderFactories(c.ProtoV6ProviderFactories).merge(step.ProtoV6ProviderFactories), protov6: protov6ProviderFactories(c.ProtoV6ProviderFactories).merge(step.ProtoV6ProviderFactories),
} }
providerCfg := step.providerConfig(ctx) providerCfg := step.providerConfig(ctx, step.configHasProviderBlock(ctx))
err := wd.SetConfig(ctx, providerCfg) err := wd.SetConfig(ctx, providerCfg)
@ -263,6 +241,45 @@ func runNewTest(ctx context.Context, t testing.T, c TestCase, helper *plugintest
continue continue
} }
if step.RefreshState {
logging.HelperResourceTrace(ctx, "TestStep is RefreshState mode")
err := testStepNewRefreshState(ctx, t, wd, step, providers)
if step.ExpectError != nil {
logging.HelperResourceDebug(ctx, "Checking TestStep ExpectError")
if err == nil {
logging.HelperResourceError(ctx,
"Error running refresh: expected an error but got none",
)
t.Fatalf("Step %d/%d error running refresh: expected an error but got none", stepNumber, len(c.Steps))
}
if !step.ExpectError.MatchString(err.Error()) {
logging.HelperResourceError(ctx,
fmt.Sprintf("Error running refresh: expected an error with pattern (%s)", step.ExpectError.String()),
map[string]interface{}{logging.KeyError: err},
)
t.Fatalf("Step %d/%d error running refresh, expected an error with pattern (%s), no match on: %s", stepNumber, len(c.Steps), step.ExpectError.String(), err)
}
} else {
if err != nil && c.ErrorCheck != nil {
logging.HelperResourceDebug(ctx, "Calling TestCase ErrorCheck")
err = c.ErrorCheck(err)
logging.HelperResourceDebug(ctx, "Called TestCase ErrorCheck")
}
if err != nil {
logging.HelperResourceError(ctx,
"Error running refresh",
map[string]interface{}{logging.KeyError: err},
)
t.Fatalf("Step %d/%d error running refresh: %s", stepNumber, len(c.Steps), err)
}
}
logging.HelperResourceDebug(ctx, "Finished TestStep")
continue
}
if step.Config != "" { if step.Config != "" {
logging.HelperResourceTrace(ctx, "TestStep is Config mode") logging.HelperResourceTrace(ctx, "TestStep is Config mode")
@ -300,7 +317,7 @@ func runNewTest(ctx context.Context, t testing.T, c TestCase, helper *plugintest
} }
} }
appliedCfg = step.Config appliedCfg = step.mergedConfig(ctx, c)
logging.HelperResourceDebug(ctx, "Finished TestStep") logging.HelperResourceDebug(ctx, "Finished TestStep")
@ -354,7 +371,7 @@ func testIDRefresh(ctx context.Context, t testing.T, c TestCase, wd *plugintest.
// Temporarily set the config to a minimal provider config for the refresh // Temporarily set the config to a minimal provider config for the refresh
// test. After the refresh we can reset it. // test. After the refresh we can reset it.
err := wd.SetConfig(ctx, c.providerConfig(ctx)) err := wd.SetConfig(ctx, c.providerConfig(ctx, step.configHasProviderBlock(ctx)))
if err != nil { if err != nil {
t.Fatalf("Error setting import test config: %s", err) t.Fatalf("Error setting import test config: %s", err)
} }

View File

@ -16,7 +16,7 @@ import (
func testStepNewConfig(ctx context.Context, t testing.T, c TestCase, wd *plugintest.WorkingDir, step TestStep, providers *providerFactories) error { func testStepNewConfig(ctx context.Context, t testing.T, c TestCase, wd *plugintest.WorkingDir, step TestStep, providers *providerFactories) error {
t.Helper() t.Helper()
err := wd.SetConfig(ctx, step.Config) err := wd.SetConfig(ctx, step.mergedConfig(ctx, c))
if err != nil { if err != nil {
return fmt.Errorf("Error setting config: %w", err) return fmt.Errorf("Error setting config: %w", err)
} }

View File

@ -7,7 +7,7 @@ import (
"strings" "strings"
"github.com/davecgh/go-spew/spew" "github.com/davecgh/go-spew/spew"
testing "github.com/mitchellh/go-testing-interface" "github.com/mitchellh/go-testing-interface"
"github.com/hashicorp/terraform-plugin-sdk/v2/internal/logging" "github.com/hashicorp/terraform-plugin-sdk/v2/internal/logging"
"github.com/hashicorp/terraform-plugin-sdk/v2/internal/plugintest" "github.com/hashicorp/terraform-plugin-sdk/v2/internal/plugintest"
@ -86,8 +86,17 @@ func testStepNewImportState(ctx context.Context, t testing.T, helper *plugintest
t.Fatal("Cannot import state with no specified config") t.Fatal("Cannot import state with no specified config")
} }
} }
importWd := helper.RequireNewWorkingDir(ctx, t)
var importWd *plugintest.WorkingDir
// Use the same working directory to persist the state from import
if step.ImportStatePersist {
importWd = wd
} else {
importWd = helper.RequireNewWorkingDir(ctx, t)
defer importWd.Close() defer importWd.Close()
}
err = importWd.SetConfig(ctx, step.Config) err = importWd.SetConfig(ctx, step.Config)
if err != nil { if err != nil {
t.Fatalf("Error setting test config: %s", err) t.Fatalf("Error setting test config: %s", err)
@ -95,12 +104,14 @@ func testStepNewImportState(ctx context.Context, t testing.T, helper *plugintest
logging.HelperResourceDebug(ctx, "Running Terraform CLI init and import") logging.HelperResourceDebug(ctx, "Running Terraform CLI init and import")
if !step.ImportStatePersist {
err = runProviderCommand(ctx, t, func() error { err = runProviderCommand(ctx, t, func() error {
return importWd.Init(ctx) return importWd.Init(ctx)
}, importWd, providers) }, importWd, providers)
if err != nil { if err != nil {
t.Fatalf("Error running init: %s", err) t.Fatalf("Error running init: %s", err)
} }
}
err = runProviderCommand(ctx, t, func() error { err = runProviderCommand(ctx, t, func() error {
return importWd.Import(ctx, step.ResourceName, importId) return importWd.Import(ctx, step.ResourceName, importId)
@ -126,13 +137,19 @@ func testStepNewImportState(ctx context.Context, t testing.T, helper *plugintest
logging.HelperResourceTrace(ctx, "Using TestStep ImportStateCheck") logging.HelperResourceTrace(ctx, "Using TestStep ImportStateCheck")
var states []*terraform.InstanceState var states []*terraform.InstanceState
for _, r := range importState.RootModule().Resources { for address, r := range importState.RootModule().Resources {
if r.Primary != nil { if strings.HasPrefix(address, "data.") {
continue
}
if r.Primary == nil {
continue
}
is := r.Primary.DeepCopy() is := r.Primary.DeepCopy()
is.Ephemeral.Type = r.Type // otherwise the check function cannot see the type is.Ephemeral.Type = r.Type // otherwise the check function cannot see the type
states = append(states, is) states = append(states, is)
} }
}
logging.HelperResourceDebug(ctx, "Calling TestStep ImportStateCheck") logging.HelperResourceDebug(ctx, "Calling TestStep ImportStateCheck")
@ -147,20 +164,27 @@ func testStepNewImportState(ctx context.Context, t testing.T, helper *plugintest
if step.ImportStateVerify { if step.ImportStateVerify {
logging.HelperResourceTrace(ctx, "Using TestStep ImportStateVerify") logging.HelperResourceTrace(ctx, "Using TestStep ImportStateVerify")
newResources := importState.RootModule().Resources
oldResources := state.RootModule().Resources
for _, r := range newResources {
// Find the existing resource
var oldR *terraform.ResourceState
for r2Key, r2 := range oldResources {
// Ensure that we do not match against data sources as they // Ensure that we do not match against data sources as they
// cannot be imported and are not what we want to verify. // cannot be imported and are not what we want to verify.
// Mode is not present in ResourceState so we use the // Mode is not present in ResourceState so we use the
// stringified ResourceStateKey for comparison. // stringified ResourceStateKey for comparison.
if strings.HasPrefix(r2Key, "data.") { newResources := make(map[string]*terraform.ResourceState)
continue for k, v := range importState.RootModule().Resources {
if !strings.HasPrefix(k, "data.") {
newResources[k] = v
} }
}
oldResources := make(map[string]*terraform.ResourceState)
for k, v := range state.RootModule().Resources {
if !strings.HasPrefix(k, "data.") {
oldResources[k] = v
}
}
for _, r := range newResources {
// Find the existing resource
var oldR *terraform.ResourceState
for _, r2 := range oldResources {
if r2.Primary != nil && r2.Primary.ID == r.Primary.ID && r2.Type == r.Type && r2.Provider == r.Provider { if r2.Primary != nil && r2.Primary.ID == r.Primary.ID && r2.Type == r.Type && r2.Provider == r.Provider {
oldR = r2 oldR = r2

View File

@ -0,0 +1,97 @@
package resource
import (
"context"
"fmt"
"github.com/davecgh/go-spew/spew"
tfjson "github.com/hashicorp/terraform-json"
"github.com/mitchellh/go-testing-interface"
"github.com/hashicorp/terraform-plugin-sdk/v2/internal/logging"
"github.com/hashicorp/terraform-plugin-sdk/v2/internal/plugintest"
"github.com/hashicorp/terraform-plugin-sdk/v2/terraform"
)
func testStepNewRefreshState(ctx context.Context, t testing.T, wd *plugintest.WorkingDir, step TestStep, providers *providerFactories) error {
t.Helper()
spewConf := spew.NewDefaultConfig()
spewConf.SortKeys = true
var err error
// Explicitly ensure prior state exists before refresh.
err = runProviderCommand(ctx, t, func() error {
_, err = getState(ctx, t, wd)
if err != nil {
return err
}
return nil
}, wd, providers)
if err != nil {
t.Fatalf("Error getting state: %s", err)
}
err = runProviderCommand(ctx, t, func() error {
return wd.Refresh(ctx)
}, wd, providers)
if err != nil {
return err
}
var refreshState *terraform.State
err = runProviderCommand(ctx, t, func() error {
refreshState, err = getState(ctx, t, wd)
if err != nil {
return err
}
return nil
}, wd, providers)
if err != nil {
t.Fatalf("Error getting state: %s", err)
}
// Go through the refreshed state and verify
if step.Check != nil {
logging.HelperResourceDebug(ctx, "Calling TestStep Check for RefreshState")
if err := step.Check(refreshState); err != nil {
t.Fatal(err)
}
logging.HelperResourceDebug(ctx, "Called TestStep Check for RefreshState")
}
// do a plan
err = runProviderCommand(ctx, t, func() error {
return wd.CreatePlan(ctx)
}, wd, providers)
if err != nil {
return fmt.Errorf("Error running post-apply plan: %w", err)
}
var plan *tfjson.Plan
err = runProviderCommand(ctx, t, func() error {
var err error
plan, err = wd.SavedPlan(ctx)
return err
}, wd, providers)
if err != nil {
return fmt.Errorf("Error retrieving post-apply plan: %w", err)
}
if !planIsEmpty(plan) && !step.ExpectNonEmptyPlan {
var stdout string
err = runProviderCommand(ctx, t, func() error {
var err error
stdout, err = wd.SavedPlanRawStdout(ctx)
return err
}, wd, providers)
if err != nil {
return fmt.Errorf("Error retrieving formatted plan output: %w", err)
}
return fmt.Errorf("After refreshing state during this test step, a followup plan was not empty.\nstdout:\n\n%s", stdout)
}
return nil
}

View File

@ -3,17 +3,63 @@ package resource
import ( import (
"context" "context"
"fmt" "fmt"
"regexp"
"strings" "strings"
) )
var configProviderBlockRegex = regexp.MustCompile(`provider "?[a-zA-Z0-9_-]+"? {`)
// configHasProviderBlock returns true if the Config has declared a provider
// configuration block, e.g. provider "examplecloud" {...}
func (s TestStep) configHasProviderBlock(_ context.Context) bool {
return configProviderBlockRegex.MatchString(s.Config)
}
// configHasTerraformBlock returns true if the Config has declared a terraform
// configuration block, e.g. terraform {...}
func (s TestStep) configHasTerraformBlock(_ context.Context) bool {
return strings.Contains(s.Config, "terraform {")
}
// mergedConfig prepends any necessary terraform configuration blocks to the
// TestStep Config.
//
// If there are ExternalProviders configurations in either the TestCase or
// TestStep, the terraform configuration block should be included with the
// step configuration to prevent errors with providers outside the
// registry.terraform.io hostname or outside the hashicorp namespace.
func (s TestStep) mergedConfig(ctx context.Context, testCase TestCase) string {
var config strings.Builder
// Prevent issues with existing configurations containing the terraform
// configuration block.
if s.configHasTerraformBlock(ctx) {
config.WriteString(s.Config)
return config.String()
}
if testCase.hasProviders(ctx) {
config.WriteString(testCase.providerConfig(ctx, s.configHasProviderBlock(ctx)))
} else {
config.WriteString(s.providerConfig(ctx, s.configHasProviderBlock(ctx)))
}
config.WriteString(s.Config)
return config.String()
}
// providerConfig takes the list of providers in a TestStep and returns a // providerConfig takes the list of providers in a TestStep and returns a
// config with only empty provider blocks. This is useful for Import, where no // config with only empty provider blocks. This is useful for Import, where no
// config is provided, but the providers must be defined. // config is provided, but the providers must be defined.
func (s TestStep) providerConfig(_ context.Context) string { func (s TestStep) providerConfig(_ context.Context, skipProviderBlock bool) string {
var providerBlocks, requiredProviderBlocks strings.Builder var providerBlocks, requiredProviderBlocks strings.Builder
for name, externalProvider := range s.ExternalProviders { for name, externalProvider := range s.ExternalProviders {
if !skipProviderBlock {
providerBlocks.WriteString(fmt.Sprintf("provider %q {}\n", name)) providerBlocks.WriteString(fmt.Sprintf("provider %q {}\n", name))
}
if externalProvider.Source == "" && externalProvider.VersionConstraint == "" { if externalProvider.Source == "" && externalProvider.VersionConstraint == "" {
continue continue

View File

@ -43,7 +43,10 @@ func (s TestStep) hasProviders(_ context.Context) bool {
// validate ensures the TestStep is valid based on the following criteria: // validate ensures the TestStep is valid based on the following criteria:
// //
// - Config or ImportState is set. // - Config or ImportState or RefreshState is set.
// - Config and RefreshState are not both set.
// - RefreshState and Destroy are not both set.
// - RefreshState is not the first TestStep.
// - Providers are not specified (ExternalProviders, // - Providers are not specified (ExternalProviders,
// ProtoV5ProviderFactories, ProtoV6ProviderFactories, ProviderFactories) // ProtoV5ProviderFactories, ProtoV6ProviderFactories, ProviderFactories)
// if specified at the TestCase level. // if specified at the TestCase level.
@ -53,14 +56,37 @@ func (s TestStep) hasProviders(_ context.Context) bool {
// - No overlapping ExternalProviders and ProviderFactories entries // - No overlapping ExternalProviders and ProviderFactories entries
// - ResourceName is not empty when ImportState is true, ImportStateIdFunc // - ResourceName is not empty when ImportState is true, ImportStateIdFunc
// is not set, and ImportStateId is not set. // is not set, and ImportStateId is not set.
//
func (s TestStep) validate(ctx context.Context, req testStepValidateRequest) error { func (s TestStep) validate(ctx context.Context, req testStepValidateRequest) error {
ctx = logging.TestStepNumberContext(ctx, req.StepNumber) ctx = logging.TestStepNumberContext(ctx, req.StepNumber)
logging.HelperResourceTrace(ctx, "Validating TestStep") logging.HelperResourceTrace(ctx, "Validating TestStep")
if s.Config == "" && !s.ImportState { if s.Config == "" && !s.ImportState && !s.RefreshState {
err := fmt.Errorf("TestStep missing Config or ImportState") err := fmt.Errorf("TestStep missing Config or ImportState or RefreshState")
logging.HelperResourceError(ctx, "TestStep validation error", map[string]interface{}{logging.KeyError: err})
return err
}
if s.Config != "" && s.RefreshState {
err := fmt.Errorf("TestStep cannot have Config and RefreshState")
logging.HelperResourceError(ctx, "TestStep validation error", map[string]interface{}{logging.KeyError: err})
return err
}
if s.RefreshState && s.Destroy {
err := fmt.Errorf("TestStep cannot have RefreshState and Destroy")
logging.HelperResourceError(ctx, "TestStep validation error", map[string]interface{}{logging.KeyError: err})
return err
}
if s.RefreshState && req.StepNumber == 1 {
err := fmt.Errorf("TestStep cannot have RefreshState as first step")
logging.HelperResourceError(ctx, "TestStep validation error", map[string]interface{}{logging.KeyError: err})
return err
}
if s.ImportState && s.RefreshState {
err := fmt.Errorf("TestStep cannot have ImportState and RefreshState in same step")
logging.HelperResourceError(ctx, "TestStep validation error", map[string]interface{}{logging.KeyError: err}) logging.HelperResourceError(ctx, "TestStep validation error", map[string]interface{}{logging.KeyError: err})
return err return err
} }

View File

@ -160,7 +160,6 @@ func unsupportedTimeoutKeyError(key string) error {
// //
// StateEncode encodes the timeout into the ResourceData's InstanceState for // StateEncode encodes the timeout into the ResourceData's InstanceState for
// saving to state // saving to state
//
func (t *ResourceTimeout) DiffEncode(id *terraform.InstanceDiff) error { func (t *ResourceTimeout) DiffEncode(id *terraform.InstanceDiff) error {
return t.metaEncode(id) return t.metaEncode(id)
} }

View File

@ -311,24 +311,11 @@ type Schema struct {
// "parent_block_name.0.child_attribute_name". // "parent_block_name.0.child_attribute_name".
RequiredWith []string RequiredWith []string
// Deprecated defines warning diagnostic details to display to // Deprecated defines warning diagnostic details to display when
// practitioners configuring this attribute or block. The warning // practitioner configurations use this attribute or block. The warning
// diagnostic summary is automatically set to "Argument is deprecated" // diagnostic summary is automatically set to "Argument is deprecated"
// along with configuration source file and line information. // along with configuration source file and line information.
// //
// This warning diagnostic is only displayed during Terraform's validation
// phase when this field is a non-empty string, when the attribute is
// Required or Optional, and if the practitioner configuration attempts to
// set the attribute value to a known value. It cannot detect practitioner
// configuration values that are unknown ("known after apply").
//
// This field has no effect when the attribute is Computed-only (read-only;
// not Required or Optional) and a practitioner attempts to reference
// this attribute value in their configuration. There is a Terraform
// feature request to support this type of functionality:
//
// https://github.com/hashicorp/terraform/issues/7569
//
// Set this field to a practitioner actionable message such as: // Set this field to a practitioner actionable message such as:
// //
// - "Configure other_attribute instead. This attribute will be removed // - "Configure other_attribute instead. This attribute will be removed
@ -337,6 +324,20 @@ type Schema struct {
// the attribute will be removed in the next major version of the // the attribute will be removed in the next major version of the
// provider." // provider."
// //
// In Terraform 1.2.7 and later, this warning diagnostic is displayed any
// time a practitioner attempts to configure a known value for this
// attribute and certain scenarios where this attribute is referenced.
//
// In Terraform 1.2.6 and earlier, this warning diagnostic is only
// displayed when the attribute is Required or Optional, and if the
// practitioner configuration attempts to set the attribute value to a
// known value. It cannot detect practitioner configuration values that
// are unknown ("known after apply").
//
// Additional information about deprecation enhancements for read-only
// attributes can be found in:
//
// - https://github.com/hashicorp/terraform/issues/7569
Deprecated string Deprecated string
// ValidateFunc allows individual fields to define arbitrary validation // ValidateFunc allows individual fields to define arbitrary validation
@ -721,6 +722,9 @@ func (m schemaMap) Diff(
// Preserve the DestroyTainted flag // Preserve the DestroyTainted flag
result2.DestroyTainted = result.DestroyTainted result2.DestroyTainted = result.DestroyTainted
result2.RawConfig = result.RawConfig
result2.RawPlan = result.RawPlan
result2.RawState = result.RawState
// Reset the data to not contain state. We have to call init() // Reset the data to not contain state. We have to call init()
// again in order to reset the FieldReaders. // again in order to reset the FieldReaders.
@ -1089,9 +1093,10 @@ func checkKeysAgainstSchemaFlags(k string, keys []string, topSchemaMap schemaMap
return nil return nil
} }
var validFieldNameRe = regexp.MustCompile("^[a-z0-9_]+$")
func isValidFieldName(name string) bool { func isValidFieldName(name string) bool {
re := regexp.MustCompile("^[a-z0-9_]+$") return validFieldNameRe.MatchString(name)
return re.MatchString(name)
} }
// resourceDiffer is an interface that is used by the private diff functions. // resourceDiffer is an interface that is used by the private diff functions.
@ -1731,15 +1736,7 @@ func (m schemaMap) validate(
// The SDK has to allow the unknown value through initially, so that // The SDK has to allow the unknown value through initially, so that
// Required fields set via an interpolated value are accepted. // Required fields set via an interpolated value are accepted.
if !isWhollyKnown(raw) { if !isWhollyKnown(raw) {
if schema.Deprecated != "" { return nil
return append(diags, diag.Diagnostic{
Severity: diag.Warning,
Summary: "Argument is deprecated",
Detail: schema.Deprecated,
AttributePath: path,
})
}
return diags
} }
err = validateConflictingAttributes(k, schema, c) err = validateConflictingAttributes(k, schema, c)
@ -1942,7 +1939,7 @@ func (m schemaMap) validateList(
return append(diags, diag.Diagnostic{ return append(diags, diag.Diagnostic{
Severity: diag.Error, Severity: diag.Error,
Summary: "Too many list items", Summary: "Too many list items",
Detail: fmt.Sprintf("Attribute supports %d item maximum, but config has %d declared.", schema.MaxItems, rawV.Len()), Detail: fmt.Sprintf("Attribute %s supports %d item maximum, but config has %d declared.", k, schema.MaxItems, rawV.Len()),
AttributePath: path, AttributePath: path,
}) })
} }
@ -1951,7 +1948,7 @@ func (m schemaMap) validateList(
return append(diags, diag.Diagnostic{ return append(diags, diag.Diagnostic{
Severity: diag.Error, Severity: diag.Error,
Summary: "Not enough list items", Summary: "Not enough list items",
Detail: fmt.Sprintf("Attribute requires %d item minimum, but config has only %d declared.", schema.MinItems, rawV.Len()), Detail: fmt.Sprintf("Attribute %s requires %d item minimum, but config has only %d declared.", k, schema.MinItems, rawV.Len()),
AttributePath: path, AttributePath: path,
}) })
} }
@ -2130,7 +2127,7 @@ func validateMapValues(k string, m map[string]interface{}, schema *Schema, path
}) })
} }
default: default:
panic(fmt.Sprintf("Unknown validation type: %#v", schema.Type)) panic(fmt.Sprintf("Unknown validation type: %#v", valueType))
} }
} }
return diags return diags

View File

@ -31,6 +31,9 @@ const (
// The TestStep number of the test being executed. Starts at 1. // The TestStep number of the test being executed. Starts at 1.
KeyTestStepNumber = "test_step_number" KeyTestStepNumber = "test_step_number"
// Terraform configuration used during acceptance testing Terraform operations.
KeyTestTerraformConfiguration = "test_terraform_configuration"
// The Terraform CLI logging level (TF_LOG) used for an acceptance test. // The Terraform CLI logging level (TF_LOG) used for an acceptance test.
KeyTestTerraformLogLevel = "test_terraform_log_level" KeyTestTerraformLogLevel = "test_terraform_log_level"
@ -49,6 +52,9 @@ const (
// The path to the Terraform CLI used for an acceptance test. // The path to the Terraform CLI used for an acceptance test.
KeyTestTerraformPath = "test_terraform_path" KeyTestTerraformPath = "test_terraform_path"
// Terraform plan output generated during a TestStep.
KeyTestTerraformPlan = "test_terraform_plan"
// The working directory of the acceptance test. // The working directory of the acceptance test.
KeyTestWorkingDirectory = "test_working_directory" KeyTestWorkingDirectory = "test_working_directory"
) )

View File

@ -3,7 +3,6 @@ package plugintest
import ( import (
"context" "context"
"fmt" "fmt"
"io/ioutil"
"os" "os"
"strings" "strings"
@ -35,7 +34,7 @@ func DiscoverConfig(ctx context.Context, sourceDir string) (*Config, error) {
tfPath := os.Getenv(EnvTfAccTerraformPath) tfPath := os.Getenv(EnvTfAccTerraformPath)
tempDir := os.Getenv(EnvTfAccTempDir) tempDir := os.Getenv(EnvTfAccTempDir)
tfDir, err := ioutil.TempDir(tempDir, "plugintest-terraform") tfDir, err := os.MkdirTemp(tempDir, "plugintest-terraform")
if err != nil { if err != nil {
return nil, fmt.Errorf("failed to create temp dir: %w", err) return nil, fmt.Errorf("failed to create temp dir: %w", err)
} }

View File

@ -4,7 +4,6 @@ import (
"context" "context"
"errors" "errors"
"fmt" "fmt"
"io/ioutil"
"os" "os"
"strings" "strings"
@ -70,7 +69,7 @@ func AutoInitHelper(ctx context.Context, sourceDir string) (*Helper, error) {
// automatically clean those up. // automatically clean those up.
func InitHelper(ctx context.Context, config *Config) (*Helper, error) { func InitHelper(ctx context.Context, config *Config) (*Helper, error) {
tempDir := os.Getenv(EnvTfAccTempDir) tempDir := os.Getenv(EnvTfAccTempDir)
baseDir, err := ioutil.TempDir(tempDir, "plugintest") baseDir, err := os.MkdirTemp(tempDir, "plugintest")
if err != nil { if err != nil {
return nil, fmt.Errorf("failed to create temporary directory for test helper: %s", err) return nil, fmt.Errorf("failed to create temporary directory for test helper: %s", err)
} }
@ -105,7 +104,7 @@ func (h *Helper) Close() error {
// program exits, the Close method on the helper itself will attempt to // program exits, the Close method on the helper itself will attempt to
// delete it. // delete it.
func (h *Helper) NewWorkingDir(ctx context.Context, t TestControl) (*WorkingDir, error) { func (h *Helper) NewWorkingDir(ctx context.Context, t TestControl) (*WorkingDir, error) {
dir, err := ioutil.TempDir(h.baseDir, "work") dir, err := os.MkdirTemp(h.baseDir, "work")
if err != nil { if err != nil {
return nil, err return nil, err
} }

View File

@ -28,79 +28,40 @@ func symlinkFile(src string, dest string) error {
return nil return nil
} }
// symlinkDir is a simplistic function for recursively symlinking all files in a directory to a new path.
// It is intended only for limited internal use and does not cover all edge cases.
func symlinkDir(srcDir string, destDir string) (err error) {
srcInfo, err := os.Stat(srcDir)
if err != nil {
return err
}
err = os.MkdirAll(destDir, srcInfo.Mode())
if err != nil {
return err
}
directory, _ := os.Open(srcDir)
defer directory.Close()
objects, err := directory.Readdir(-1)
for _, obj := range objects {
srcPath := filepath.Join(srcDir, obj.Name())
destPath := filepath.Join(destDir, obj.Name())
if obj.IsDir() {
err = symlinkDir(srcPath, destPath)
if err != nil {
return err
}
} else {
err = symlinkFile(srcPath, destPath)
if err != nil {
return err
}
}
}
return
}
// symlinkDirectoriesOnly finds only the first-level child directories in srcDir // symlinkDirectoriesOnly finds only the first-level child directories in srcDir
// and symlinks them into destDir. // and symlinks them into destDir.
// Unlike symlinkDir, this is done non-recursively in order to limit the number // Unlike symlinkDir, this is done non-recursively in order to limit the number
// of file descriptors used. // of file descriptors used.
func symlinkDirectoriesOnly(srcDir string, destDir string) (err error) { func symlinkDirectoriesOnly(srcDir string, destDir string) error {
srcInfo, err := os.Stat(srcDir) srcInfo, err := os.Stat(srcDir)
if err != nil { if err != nil {
return err return fmt.Errorf("unable to stat source directory %q: %w", srcDir, err)
} }
err = os.MkdirAll(destDir, srcInfo.Mode()) err = os.MkdirAll(destDir, srcInfo.Mode())
if err != nil { if err != nil {
return err return fmt.Errorf("unable to make destination directory %q: %w", destDir, err)
} }
directory, err := os.Open(srcDir) dirEntries, err := os.ReadDir(srcDir)
if err != nil { if err != nil {
return err return fmt.Errorf("unable to read source directory %q: %w", srcDir, err)
} }
defer directory.Close()
objects, err := directory.Readdir(-1) for _, dirEntry := range dirEntries {
if !dirEntry.IsDir() {
continue
}
srcPath := filepath.Join(srcDir, dirEntry.Name())
destPath := filepath.Join(destDir, dirEntry.Name())
err := symlinkFile(srcPath, destPath)
if err != nil { if err != nil {
return err return fmt.Errorf("unable to symlink directory %q to %q: %w", srcPath, destPath, err)
}
for _, obj := range objects {
srcPath := filepath.Join(srcDir, obj.Name())
destPath := filepath.Join(destDir, obj.Name())
if obj.IsDir() {
err = symlinkFile(srcPath, destPath)
if err != nil {
return err
} }
} }
} return nil
return
} }

View File

@ -1,16 +1,15 @@
package plugintest package plugintest
import ( import (
"bytes"
"context" "context"
"encoding/json" "encoding/json"
"fmt" "fmt"
"io/ioutil"
"os" "os"
"path/filepath" "path/filepath"
"github.com/hashicorp/terraform-exec/tfexec" "github.com/hashicorp/terraform-exec/tfexec"
tfjson "github.com/hashicorp/terraform-json" tfjson "github.com/hashicorp/terraform-json"
"github.com/hashicorp/terraform-plugin-sdk/v2/internal/logging" "github.com/hashicorp/terraform-plugin-sdk/v2/internal/logging"
) )
@ -75,6 +74,8 @@ func (wd *WorkingDir) GetHelper() *Helper {
// Destroy to establish the configuration. Any previously-set configuration is // Destroy to establish the configuration. Any previously-set configuration is
// discarded and any saved plan is cleared. // discarded and any saved plan is cleared.
func (wd *WorkingDir) SetConfig(ctx context.Context, cfg string) error { func (wd *WorkingDir) SetConfig(ctx context.Context, cfg string) error {
logging.HelperResourceTrace(ctx, "Setting Terraform configuration", map[string]any{logging.KeyTestTerraformConfiguration: cfg})
outFilename := filepath.Join(wd.baseDir, ConfigFileName) outFilename := filepath.Join(wd.baseDir, ConfigFileName)
rmFilename := filepath.Join(wd.baseDir, ConfigFileNameJSON) rmFilename := filepath.Join(wd.baseDir, ConfigFileNameJSON)
bCfg := []byte(cfg) bCfg := []byte(cfg)
@ -84,7 +85,7 @@ func (wd *WorkingDir) SetConfig(ctx context.Context, cfg string) error {
if err := os.Remove(rmFilename); err != nil && !os.IsNotExist(err) { if err := os.Remove(rmFilename); err != nil && !os.IsNotExist(err) {
return fmt.Errorf("unable to remove %q: %w", rmFilename, err) return fmt.Errorf("unable to remove %q: %w", rmFilename, err)
} }
err := ioutil.WriteFile(outFilename, bCfg, 0700) err := os.WriteFile(outFilename, bCfg, 0700)
if err != nil { if err != nil {
return err return err
} }
@ -173,11 +174,29 @@ func (wd *WorkingDir) planFilename() string {
func (wd *WorkingDir) CreatePlan(ctx context.Context) error { func (wd *WorkingDir) CreatePlan(ctx context.Context) error {
logging.HelperResourceTrace(ctx, "Calling Terraform CLI plan command") logging.HelperResourceTrace(ctx, "Calling Terraform CLI plan command")
_, err := wd.tf.Plan(context.Background(), tfexec.Reattach(wd.reattachInfo), tfexec.Refresh(false), tfexec.Out(PlanFileName)) hasChanges, err := wd.tf.Plan(context.Background(), tfexec.Reattach(wd.reattachInfo), tfexec.Refresh(false), tfexec.Out(PlanFileName))
logging.HelperResourceTrace(ctx, "Called Terraform CLI plan command") logging.HelperResourceTrace(ctx, "Called Terraform CLI plan command")
if err != nil {
return err return err
}
if !hasChanges {
logging.HelperResourceTrace(ctx, "Created plan with no changes")
return nil
}
stdout, err := wd.SavedPlanRawStdout(ctx)
if err != nil {
return fmt.Errorf("error retrieving formatted plan output: %w", err)
}
logging.HelperResourceTrace(ctx, "Created plan with changes", map[string]any{logging.KeyTestTerraformPlan: stdout})
return nil
} }
// CreateDestroyPlan runs "terraform plan -destroy" to create a saved plan // CreateDestroyPlan runs "terraform plan -destroy" to create a saved plan
@ -185,11 +204,29 @@ func (wd *WorkingDir) CreatePlan(ctx context.Context) error {
func (wd *WorkingDir) CreateDestroyPlan(ctx context.Context) error { func (wd *WorkingDir) CreateDestroyPlan(ctx context.Context) error {
logging.HelperResourceTrace(ctx, "Calling Terraform CLI plan -destroy command") logging.HelperResourceTrace(ctx, "Calling Terraform CLI plan -destroy command")
_, err := wd.tf.Plan(context.Background(), tfexec.Reattach(wd.reattachInfo), tfexec.Refresh(false), tfexec.Out(PlanFileName), tfexec.Destroy(true)) hasChanges, err := wd.tf.Plan(context.Background(), tfexec.Reattach(wd.reattachInfo), tfexec.Refresh(false), tfexec.Out(PlanFileName), tfexec.Destroy(true))
logging.HelperResourceTrace(ctx, "Called Terraform CLI plan -destroy command") logging.HelperResourceTrace(ctx, "Called Terraform CLI plan -destroy command")
if err != nil {
return err return err
}
if !hasChanges {
logging.HelperResourceTrace(ctx, "Created destroy plan with no changes")
return nil
}
stdout, err := wd.SavedPlanRawStdout(ctx)
if err != nil {
return fmt.Errorf("error retrieving formatted plan output: %w", err)
}
logging.HelperResourceTrace(ctx, "Created destroy plan with changes", map[string]any{logging.KeyTestTerraformPlan: stdout})
return nil
} }
// Apply runs "terraform apply". If CreatePlan has previously completed // Apply runs "terraform apply". If CreatePlan has previously completed
@ -242,11 +279,11 @@ func (wd *WorkingDir) SavedPlan(ctx context.Context) (*tfjson.Plan, error) {
return nil, fmt.Errorf("there is no current saved plan") return nil, fmt.Errorf("there is no current saved plan")
} }
logging.HelperResourceTrace(ctx, "Calling Terraform CLI apply command") logging.HelperResourceTrace(ctx, "Calling Terraform CLI show command for JSON plan")
plan, err := wd.tf.ShowPlanFile(context.Background(), wd.planFilename(), tfexec.Reattach(wd.reattachInfo)) plan, err := wd.tf.ShowPlanFile(context.Background(), wd.planFilename(), tfexec.Reattach(wd.reattachInfo))
logging.HelperResourceTrace(ctx, "Calling Terraform CLI apply command") logging.HelperResourceTrace(ctx, "Calling Terraform CLI show command for JSON plan")
return plan, err return plan, err
} }
@ -260,22 +297,17 @@ func (wd *WorkingDir) SavedPlanRawStdout(ctx context.Context) (string, error) {
return "", fmt.Errorf("there is no current saved plan") return "", fmt.Errorf("there is no current saved plan")
} }
var ret bytes.Buffer logging.HelperResourceTrace(ctx, "Calling Terraform CLI show command for stdout plan")
wd.tf.SetStdout(&ret) stdout, err := wd.tf.ShowPlanFileRaw(context.Background(), wd.planFilename(), tfexec.Reattach(wd.reattachInfo))
defer wd.tf.SetStdout(ioutil.Discard)
logging.HelperResourceTrace(ctx, "Calling Terraform CLI show command") logging.HelperResourceTrace(ctx, "Called Terraform CLI show command for stdout plan")
_, err := wd.tf.ShowPlanFileRaw(context.Background(), wd.planFilename(), tfexec.Reattach(wd.reattachInfo))
logging.HelperResourceTrace(ctx, "Called Terraform CLI show command")
if err != nil { if err != nil {
return "", err return "", err
} }
return ret.String(), nil return stdout, nil
} }
// State returns an object describing the current state. // State returns an object describing the current state.
@ -283,11 +315,11 @@ func (wd *WorkingDir) SavedPlanRawStdout(ctx context.Context) (string, error) {
// If the state cannot be read, State returns an error. // If the state cannot be read, State returns an error.
func (wd *WorkingDir) State(ctx context.Context) (*tfjson.State, error) { func (wd *WorkingDir) State(ctx context.Context) (*tfjson.State, error) {
logging.HelperResourceTrace(ctx, "Calling Terraform CLI show command") logging.HelperResourceTrace(ctx, "Calling Terraform CLI show command for JSON state")
state, err := wd.tf.Show(context.Background(), tfexec.Reattach(wd.reattachInfo)) state, err := wd.tf.Show(context.Background(), tfexec.Reattach(wd.reattachInfo))
logging.HelperResourceTrace(ctx, "Called Terraform CLI show command") logging.HelperResourceTrace(ctx, "Called Terraform CLI show command for JSON state")
return state, err return state, err
} }
@ -303,6 +335,17 @@ func (wd *WorkingDir) Import(ctx context.Context, resource, id string) error {
return err return err
} }
// Taint runs terraform taint
func (wd *WorkingDir) Taint(ctx context.Context, address string) error {
logging.HelperResourceTrace(ctx, "Calling Terraform CLI taint command")
err := wd.tf.Taint(context.Background(), address)
logging.HelperResourceTrace(ctx, "Called Terraform CLI taint command")
return err
}
// Refresh runs terraform refresh // Refresh runs terraform refresh
func (wd *WorkingDir) Refresh(ctx context.Context) error { func (wd *WorkingDir) Refresh(ctx context.Context) error {
logging.HelperResourceTrace(ctx, "Calling Terraform CLI refresh command") logging.HelperResourceTrace(ctx, "Calling Terraform CLI refresh command")

View File

@ -55,6 +55,10 @@ type ServeOpts struct {
// information needed for Terraform to connect to the provider to stdout. // information needed for Terraform to connect to the provider to stdout.
// os.Interrupt will be captured and used to stop the server. // os.Interrupt will be captured and used to stop the server.
// //
// Ensure the ProviderAddr field is correctly set when this is enabled,
// otherwise the TF_REATTACH_PROVIDERS environment variable will not
// correctly point Terraform to the running provider binary.
//
// This option cannot be combined with TestConfig. // This option cannot be combined with TestConfig.
Debug bool Debug bool
@ -76,8 +80,11 @@ type ServeOpts struct {
// the terraform-plugin-log logging sink. // the terraform-plugin-log logging sink.
UseTFLogSink testing.T UseTFLogSink testing.T
// ProviderAddr is the address of the provider under test, like // ProviderAddr is the address of the provider under test or debugging,
// registry.terraform.io/hashicorp/random. // such as registry.terraform.io/hashicorp/random. This value is used in
// the TF_REATTACH_PROVIDERS environment variable during debugging so
// Terraform can correctly match the provider address in the Terraform
// configuration to the running provider binary.
ProviderAddr string ProviderAddr string
} }

View File

@ -14,8 +14,8 @@ import (
"sync" "sync"
"github.com/hashicorp/go-cty/cty" "github.com/hashicorp/go-cty/cty"
multierror "github.com/hashicorp/go-multierror" "github.com/hashicorp/go-multierror"
uuid "github.com/hashicorp/go-uuid" "github.com/hashicorp/go-uuid"
"github.com/mitchellh/copystructure" "github.com/mitchellh/copystructure"
"github.com/hashicorp/terraform-plugin-sdk/v2/internal/addrs" "github.com/hashicorp/terraform-plugin-sdk/v2/internal/addrs"
@ -1145,7 +1145,6 @@ func parseResourceStateKey(k string) (*ResourceStateKey, error) {
// //
// Extra is just extra data that a provider can return that we store // Extra is just extra data that a provider can return that we store
// for later, but is not exposed in any way to the user. // for later, but is not exposed in any way to the user.
//
type ResourceState struct { type ResourceState struct {
// This is filled in and managed by Terraform, and is the resource // This is filled in and managed by Terraform, and is the resource
// type itself such as "mycloud_instance". If a resource provider sets // type itself such as "mycloud_instance". If a resource provider sets
@ -1226,26 +1225,6 @@ func (s *ResourceState) Equal(other *ResourceState) bool {
return s.Primary.Equal(other.Primary) return s.Primary.Equal(other.Primary)
} }
// Taint marks a resource as tainted.
func (s *ResourceState) Taint() {
s.Lock()
defer s.Unlock()
if s.Primary != nil {
s.Primary.Tainted = true
}
}
// Untaint unmarks a resource as tainted.
func (s *ResourceState) Untaint() {
s.Lock()
defer s.Unlock()
if s.Primary != nil {
s.Primary.Tainted = false
}
}
func (s *ResourceState) init() { func (s *ResourceState) init() {
s.Lock() s.Lock()
defer s.Unlock() defer s.Unlock()

View File

@ -49,6 +49,18 @@ type CapsuleOps struct {
// pointer identity of the encapsulated value. // pointer identity of the encapsulated value.
RawEquals func(a, b interface{}) bool RawEquals func(a, b interface{}) bool
// HashKey provides a hashing function for values of the corresponding
// capsule type. If defined, cty will use the resulting hashes as part
// of the implementation of sets whose element type is or contains the
// corresponding capsule type.
//
// If a capsule type defines HashValue then the function _must_ return
// an equal hash value for any two values that would cause Equals or
// RawEquals to return true when given those values. If a given type
// does not uphold that assumption then sets including this type will
// not behave correctly.
HashKey func(v interface{}) string
// ConversionFrom can provide conversions from the corresponding type to // ConversionFrom can provide conversions from the corresponding type to
// some other type when values of the corresponding type are used with // some other type when values of the corresponding type are used with
// the "convert" package. (The main cty package does not use this operation.) // the "convert" package. (The main cty package does not use this operation.)

View File

@ -43,14 +43,14 @@ func getConversion(in cty.Type, out cty.Type, unsafe bool) conversion {
out = out.WithoutOptionalAttributesDeep() out = out.WithoutOptionalAttributesDeep()
if !isKnown { if !isKnown {
return cty.UnknownVal(out), nil return cty.UnknownVal(dynamicReplace(in.Type(), out)), nil
} }
if isNull { if isNull {
// We'll pass through nulls, albeit type converted, and let // We'll pass through nulls, albeit type converted, and let
// the caller deal with whatever handling they want to do in // the caller deal with whatever handling they want to do in
// case null values are considered valid in some applications. // case null values are considered valid in some applications.
return cty.NullVal(out), nil return cty.NullVal(dynamicReplace(in.Type(), out)), nil
} }
} }

View File

@ -39,6 +39,11 @@ func conversionCollectionToList(ety cty.Type, conv conversion) conversion {
return cty.NilVal, err return cty.NilVal, err
} }
} }
if val.IsNull() {
val = cty.NullVal(val.Type().WithoutOptionalAttributesDeep())
}
elems = append(elems, val) elems = append(elems, val)
i++ i++
@ -50,7 +55,7 @@ func conversionCollectionToList(ety cty.Type, conv conversion) conversion {
if ety == cty.DynamicPseudoType { if ety == cty.DynamicPseudoType {
return cty.ListValEmpty(val.Type().ElementType()), nil return cty.ListValEmpty(val.Type().ElementType()), nil
} }
return cty.ListValEmpty(ety), nil return cty.ListValEmpty(ety.WithoutOptionalAttributesDeep()), nil
} }
if !cty.CanListVal(elems) { if !cty.CanListVal(elems) {
@ -88,6 +93,11 @@ func conversionCollectionToSet(ety cty.Type, conv conversion) conversion {
return cty.NilVal, err return cty.NilVal, err
} }
} }
if val.IsNull() {
val = cty.NullVal(val.Type().WithoutOptionalAttributesDeep())
}
elems = append(elems, val) elems = append(elems, val)
i++ i++
@ -99,7 +109,7 @@ func conversionCollectionToSet(ety cty.Type, conv conversion) conversion {
if ety == cty.DynamicPseudoType { if ety == cty.DynamicPseudoType {
return cty.SetValEmpty(val.Type().ElementType()), nil return cty.SetValEmpty(val.Type().ElementType()), nil
} }
return cty.SetValEmpty(ety), nil return cty.SetValEmpty(ety.WithoutOptionalAttributesDeep()), nil
} }
if !cty.CanSetVal(elems) { if !cty.CanSetVal(elems) {
@ -180,7 +190,7 @@ func conversionTupleToSet(tupleType cty.Type, setEty cty.Type, unsafe bool) conv
if len(tupleEtys) == 0 { if len(tupleEtys) == 0 {
// Empty tuple short-circuit // Empty tuple short-circuit
return func(val cty.Value, path cty.Path) (cty.Value, error) { return func(val cty.Value, path cty.Path) (cty.Value, error) {
return cty.SetValEmpty(setEty), nil return cty.SetValEmpty(setEty.WithoutOptionalAttributesDeep()), nil
} }
} }
@ -242,6 +252,11 @@ func conversionTupleToSet(tupleType cty.Type, setEty cty.Type, unsafe bool) conv
return cty.NilVal, err return cty.NilVal, err
} }
} }
if val.IsNull() {
val = cty.NullVal(val.Type().WithoutOptionalAttributesDeep())
}
elems = append(elems, val) elems = append(elems, val)
i++ i++
@ -265,7 +280,7 @@ func conversionTupleToList(tupleType cty.Type, listEty cty.Type, unsafe bool) co
if len(tupleEtys) == 0 { if len(tupleEtys) == 0 {
// Empty tuple short-circuit // Empty tuple short-circuit
return func(val cty.Value, path cty.Path) (cty.Value, error) { return func(val cty.Value, path cty.Path) (cty.Value, error) {
return cty.ListValEmpty(listEty), nil return cty.ListValEmpty(listEty.WithoutOptionalAttributesDeep()), nil
} }
} }
@ -357,7 +372,7 @@ func conversionObjectToMap(objectType cty.Type, mapEty cty.Type, unsafe bool) co
if len(objectAtys) == 0 { if len(objectAtys) == 0 {
// Empty object short-circuit // Empty object short-circuit
return func(val cty.Value, path cty.Path) (cty.Value, error) { return func(val cty.Value, path cty.Path) (cty.Value, error) {
return cty.MapValEmpty(mapEty), nil return cty.MapValEmpty(mapEty.WithoutOptionalAttributesDeep()), nil
} }
} }
@ -448,13 +463,28 @@ func conversionMapToObject(mapType cty.Type, objType cty.Type, unsafe bool) conv
elemConvs[name] = getConversion(mapEty, objectAty, unsafe) elemConvs[name] = getConversion(mapEty, objectAty, unsafe)
if elemConvs[name] == nil { if elemConvs[name] == nil {
// If any of our element conversions are impossible, then the our // This means that this conversion is impossible. Typically, we
// whole conversion is impossible. // would give up at this point and declare the whole conversion
// impossible. But, if this attribute is optional then maybe we will
// be able to do this conversion anyway provided the actual concrete
// map doesn't have this value set.
//
// We only do this in "unsafe" mode, because we cannot guarantee
// that the returned conversion will actually succeed once applied.
if objType.AttributeOptional(name) && unsafe {
// This attribute is optional, so let's leave this conversion in
// as a nil, and we can error later if we actually have to
// convert this.
continue
}
// Otherwise, give up. This conversion is impossible as we have a
// required attribute that doesn't match the map's inner type.
return nil return nil
} }
} }
// If we fall out here then a conversion is possible, using the // If we fall out here then a conversion may be possible, using the
// element conversions in elemConvs // element conversions in elemConvs
return func(val cty.Value, path cty.Path) (cty.Value, error) { return func(val cty.Value, path cty.Path) (cty.Value, error) {
elems := make(map[string]cty.Value, len(elemConvs)) elems := make(map[string]cty.Value, len(elemConvs))
@ -474,12 +504,43 @@ func conversionMapToObject(mapType cty.Type, objType cty.Type, unsafe bool) conv
Key: name, Key: name,
} }
conv := elemConvs[name.AsString()] // There are 3 cases here:
if conv != nil { // 1. This attribute is not in elemConvs
// 2. This attribute is in elemConvs and is not nil
// 3. This attribute is in elemConvs and is nil.
// In case 1, we do not enter any of the branches below. This case
// means the attribute type is the same between the map and the
// object, and we don't need to do any conversion.
if conv, ok := elemConvs[name.AsString()]; conv != nil {
// This is case 2. The attribute type is different between the
// map and the object, and we know how to convert between them.
// So, we reset val to be the converted value and carry on.
val, err = conv(val, elemPath) val, err = conv(val, elemPath)
if err != nil { if err != nil {
return cty.NilVal, err return cty.NilVal, err
} }
} else if ok {
// This is case 3 and it is an error. The attribute types are
// different between the map and the object, but we cannot
// convert between them.
//
// Now typically, this would be picked earlier on when we were
// building elemConvs. However, in the case of optional
// attributes there was a chance we could still convert the
// overall object even if this particular attribute was not
// convertable. This is because it could have not been set in
// the map, and we could skip over it here and set a null value.
//
// Since we reached this branch, we know that map did actually
// contain a non-convertable optional attribute. This means we
// error.
return cty.NilVal, path.NewErrorf("map element type is incompatible with attribute %q: %s", name.AsString(), MismatchMessage(val.Type(), objType.AttributeType(name.AsString())))
}
if val.IsNull() {
val = cty.NullVal(val.Type().WithoutOptionalAttributesDeep())
} }
elems[name.AsString()] = val elems[name.AsString()] = val

View File

@ -31,3 +31,107 @@ func dynamicFixup(wantType cty.Type) conversion {
func dynamicPassthrough(in cty.Value, path cty.Path) (cty.Value, error) { func dynamicPassthrough(in cty.Value, path cty.Path) (cty.Value, error) {
return in, nil return in, nil
} }
// dynamicReplace aims to return the out type unchanged, but if it finds a
// dynamic type either directly or in any descendent elements it replaces them
// with the equivalent type from in.
//
// This function assumes that in and out are compatible from a Convert
// perspective, and will panic if it finds that they are not. For example if
// in is an object and out is a map, this function will still attempt to iterate
// through both as if they were the same.
func dynamicReplace(in, out cty.Type) cty.Type {
if in == cty.DynamicPseudoType || in == cty.NilType {
// Short circuit this case, there's no point worrying about this if in
// is a dynamic type or a nil type. Out is the best we can do.
return out
}
switch {
case out == cty.DynamicPseudoType:
// So replace out with in.
return in
case out.IsPrimitiveType(), out.IsCapsuleType():
// out is not dynamic and it doesn't contain descendent elements so just
// return it unchanged.
return out
case out.IsMapType():
var elemType cty.Type
// Maps are compatible with other maps or objects.
if in.IsMapType() {
elemType = dynamicReplace(in.ElementType(), out.ElementType())
}
if in.IsObjectType() {
var types []cty.Type
for _, t := range in.AttributeTypes() {
types = append(types, t)
}
unifiedType, _ := unify(types, true)
elemType = dynamicReplace(unifiedType, out.ElementType())
}
return cty.Map(elemType)
case out.IsObjectType():
// Objects are compatible with other objects and maps.
outTypes := map[string]cty.Type{}
if in.IsMapType() {
for attr, attrType := range out.AttributeTypes() {
outTypes[attr] = dynamicReplace(in.ElementType(), attrType)
}
}
if in.IsObjectType() {
for attr, attrType := range out.AttributeTypes() {
if !in.HasAttribute(attr) {
// If in does not have this attribute, then it is an
// optional attribute and there is nothing we can do except
// to return the type from out even if it is dynamic.
outTypes[attr] = attrType
continue
}
outTypes[attr] = dynamicReplace(in.AttributeType(attr), attrType)
}
}
return cty.Object(outTypes)
case out.IsSetType():
var elemType cty.Type
// Sets are compatible with other sets, lists, tuples.
if in.IsSetType() || in.IsListType() {
elemType = dynamicReplace(in.ElementType(), out.ElementType())
}
if in.IsTupleType() {
unifiedType, _ := unify(in.TupleElementTypes(), true)
elemType = dynamicReplace(unifiedType, out.ElementType())
}
return cty.Set(elemType)
case out.IsListType():
var elemType cty.Type
// Lists are compatible with other lists, sets, and tuples.
if in.IsSetType() || in.IsListType() {
elemType = dynamicReplace(in.ElementType(), out.ElementType())
}
if in.IsTupleType() {
unifiedType, _ := unify(in.TupleElementTypes(), true)
elemType = dynamicReplace(unifiedType, out.ElementType())
}
return cty.List(elemType)
case out.IsTupleType():
// Tuples are only compatible with other tuples
var types []cty.Type
for ix := 0; ix < len(out.TupleElementTypes()); ix++ {
types = append(types, dynamicReplace(in.TupleElementType(ix), out.TupleElementType(ix)))
}
return cty.Tuple(types)
default:
panic("unrecognized type " + out.FriendlyName())
}
}

View File

@ -80,13 +80,19 @@ func conversionObjectToObject(in, out cty.Type, unsafe bool) conversion {
} }
} }
if val.IsNull() {
// Strip optional attributes out of the embedded type for null
// values.
val = cty.NullVal(val.Type().WithoutOptionalAttributesDeep())
}
attrVals[name] = val attrVals[name] = val
} }
for name := range outOptionals { for name := range outOptionals {
if _, exists := attrVals[name]; !exists { if _, exists := attrVals[name]; !exists {
wantTy := outAtys[name] wantTy := outAtys[name]
attrVals[name] = cty.NullVal(wantTy) attrVals[name] = cty.NullVal(wantTy.WithoutOptionalAttributesDeep())
} }
} }

View File

@ -40,7 +40,7 @@ func GetConversionUnsafe(in cty.Type, out cty.Type) Conversion {
// This is a convenience wrapper around calling GetConversionUnsafe and then // This is a convenience wrapper around calling GetConversionUnsafe and then
// immediately passing the given value to the resulting function. // immediately passing the given value to the resulting function.
func Convert(in cty.Value, want cty.Type) (cty.Value, error) { func Convert(in cty.Value, want cty.Type) (cty.Value, error) {
if in.Type().Equals(want) { if in.Type().Equals(want.WithoutOptionalAttributesDeep()) {
return in, nil return in, nil
} }

View File

@ -447,7 +447,6 @@ func unifyTupleTypes(types []cty.Type, unsafe bool, hasDynamic bool) (cty.Type,
conversions[i] = GetConversion(ty, retTy) conversions[i] = GetConversion(ty, retTy)
} }
if conversions[i] == nil { if conversions[i] == nil {
// Shouldn't be reachable, since we were able to unify
return unifyTupleTypesToList(types, unsafe) return unifyTupleTypesToList(types, unsafe)
} }
} }
@ -483,8 +482,8 @@ func unifyTupleTypesToList(types []cty.Type, unsafe bool) (cty.Type, []Conversio
conversions[i] = GetConversion(ty, retTy) conversions[i] = GetConversion(ty, retTy)
} }
if conversions[i] == nil { if conversions[i] == nil {
// Shouldn't be reachable, since we were able to unify // no conversion was found
return unifyObjectTypesToMap(types, unsafe) return cty.NilType, nil
} }
} }
return retTy, conversions return retTy, conversions

View File

@ -66,7 +66,7 @@ func elementIterator(val Value) ElementIterator {
idx: -1, idx: -1,
} }
case val.ty.IsSetType(): case val.ty.IsSetType():
rawSet := val.v.(set.Set) rawSet := val.v.(set.Set[interface{}])
return &setElementIterator{ return &setElementIterator{
ety: val.ty.ElementType(), ety: val.ty.ElementType(),
setIt: rawSet.Iterator(), setIt: rawSet.Iterator(),
@ -139,7 +139,7 @@ func (it *mapElementIterator) Next() bool {
type setElementIterator struct { type setElementIterator struct {
ety Type ety Type
setIt *set.Iterator setIt *set.Iterator[interface{}]
} }
func (it *setElementIterator) Element() (Value, Value) { func (it *setElementIterator) Element() (Value, Value) {

View File

@ -10,6 +10,9 @@ type Parameter struct {
// value, but callers may use it for documentation, etc. // value, but callers may use it for documentation, etc.
Name string Name string
// Description is an optional description for the argument.
Description string
// A type that any argument for this parameter must conform to. // A type that any argument for this parameter must conform to.
// cty.DynamicPseudoType can be used, either at top-level or nested // cty.DynamicPseudoType can be used, either at top-level or nested
// in a parameterized type, to indicate that any type should be // in a parameterized type, to indicate that any type should be

View File

@ -14,6 +14,9 @@ type Function struct {
// Spec is the specification of a function, used to instantiate // Spec is the specification of a function, used to instantiate
// a new Function. // a new Function.
type Spec struct { type Spec struct {
// Description is an optional description for the function specification.
Description string
// Params is a description of the positional parameters for the function. // Params is a description of the positional parameters for the function.
// The standard checking logic rejects any calls that do not provide // The standard checking logic rejects any calls that do not provide
// arguments conforming to this definition, freeing the function // arguments conforming to this definition, freeing the function
@ -344,3 +347,62 @@ func (f Function) VarParam() *Parameter {
ret := *f.spec.VarParam ret := *f.spec.VarParam
return &ret return &ret
} }
// Description returns a human-readable description of the function.
func (f Function) Description() string {
return f.spec.Description
}
// WithNewDescriptions returns a new function that has the same signature
// and implementation as the receiver but has the function description and
// the parameter descriptions replaced with those given in the arguments.
//
// All descriptions may be given as an empty string to specify that there
// should be no description at all.
//
// The paramDescs argument must match the number of parameters
// the reciever expects, or this function will panic. If the function has a
// VarParam then that counts as one parameter for the sake of this rule. The
// given descriptions will be assigned in order starting with the positional
// arguments in their declared order, followed by the variadic parameter if
// any.
//
// As a special case, WithNewDescriptions will accept a paramDescs which
// does not cover the reciever's variadic parameter (if any), so that it's
// possible to add a variadic parameter to a function which didn't previously
// have one without that being a breaking change for an existing caller using
// WithNewDescriptions against that function. In this case the base description
// of the variadic parameter will be preserved.
func (f Function) WithNewDescriptions(funcDesc string, paramDescs []string) Function {
retSpec := *f.spec // shallow copy of the reciever
retSpec.Description = funcDesc
retSpec.Params = make([]Parameter, len(f.spec.Params))
copy(retSpec.Params, f.spec.Params) // shallow copy of positional parameters
if f.spec.VarParam != nil {
retVarParam := *f.spec.VarParam // shallow copy of variadic parameter
retSpec.VarParam = &retVarParam
}
if retSpec.VarParam != nil {
if with, without := len(retSpec.Params)+1, len(retSpec.Params); len(paramDescs) != with && len(paramDescs) != without {
panic(fmt.Sprintf("paramDescs must have length of either %d or %d", with, without))
}
} else {
if want := len(retSpec.Params); len(paramDescs) != want {
panic(fmt.Sprintf("paramDescs must have length %d", want))
}
}
posParamDescs := paramDescs[:len(retSpec.Params)]
varParamDescs := paramDescs[len(retSpec.Params):] // guaranteed to be zero or one elements because of the rules above
for i, desc := range posParamDescs {
retSpec.Params[i].Description = desc
}
for _, desc := range varParamDescs {
retSpec.VarParam.Description = desc
}
return New(&retSpec)
}

View File

@ -6,6 +6,7 @@ import (
) )
var NotFunc = function.New(&function.Spec{ var NotFunc = function.New(&function.Spec{
Description: `Applies the logical NOT operation to the given boolean value.`,
Params: []function.Parameter{ Params: []function.Parameter{
{ {
Name: "val", Name: "val",
@ -21,6 +22,7 @@ var NotFunc = function.New(&function.Spec{
}) })
var AndFunc = function.New(&function.Spec{ var AndFunc = function.New(&function.Spec{
Description: `Applies the logical AND operation to the given boolean values.`,
Params: []function.Parameter{ Params: []function.Parameter{
{ {
Name: "a", Name: "a",
@ -42,6 +44,7 @@ var AndFunc = function.New(&function.Spec{
}) })
var OrFunc = function.New(&function.Spec{ var OrFunc = function.New(&function.Spec{
Description: `Applies the logical OR operation to the given boolean values.`,
Params: []function.Parameter{ Params: []function.Parameter{
{ {
Name: "a", Name: "a",

View File

@ -30,6 +30,7 @@ func BytesVal(buf []byte) cty.Value {
// BytesLen is a Function that returns the length of the buffer encapsulated // BytesLen is a Function that returns the length of the buffer encapsulated
// in a Bytes value. // in a Bytes value.
var BytesLenFunc = function.New(&function.Spec{ var BytesLenFunc = function.New(&function.Spec{
Description: `Returns the total number of bytes in the given buffer.`,
Params: []function.Parameter{ Params: []function.Parameter{
{ {
Name: "buf", Name: "buf",
@ -46,6 +47,7 @@ var BytesLenFunc = function.New(&function.Spec{
// BytesSlice is a Function that returns a slice of the given Bytes value. // BytesSlice is a Function that returns a slice of the given Bytes value.
var BytesSliceFunc = function.New(&function.Spec{ var BytesSliceFunc = function.New(&function.Spec{
Description: `Extracts a subslice from the given buffer.`,
Params: []function.Parameter{ Params: []function.Parameter{
{ {
Name: "buf", Name: "buf",

View File

@ -12,6 +12,7 @@ import (
) )
var HasIndexFunc = function.New(&function.Spec{ var HasIndexFunc = function.New(&function.Spec{
Description: `Returns true if if the given collection can be indexed with the given key without producing an error, or false otherwise.`,
Params: []function.Parameter{ Params: []function.Parameter{
{ {
Name: "collection", Name: "collection",
@ -37,6 +38,7 @@ var HasIndexFunc = function.New(&function.Spec{
}) })
var IndexFunc = function.New(&function.Spec{ var IndexFunc = function.New(&function.Spec{
Description: `Returns the element with the given key from the given collection, or raises an error if there is no such element.`,
Params: []function.Parameter{ Params: []function.Parameter{
{ {
Name: "collection", Name: "collection",
@ -106,6 +108,7 @@ var IndexFunc = function.New(&function.Spec{
}) })
var LengthFunc = function.New(&function.Spec{ var LengthFunc = function.New(&function.Spec{
Description: `Returns the number of elements in the given collection.`,
Params: []function.Parameter{ Params: []function.Parameter{
{ {
Name: "collection", Name: "collection",
@ -127,6 +130,7 @@ var LengthFunc = function.New(&function.Spec{
}) })
var ElementFunc = function.New(&function.Spec{ var ElementFunc = function.New(&function.Spec{
Description: `Returns the element with the given index from the given list or tuple, applying the modulo operation to the given index if it's greater than the number of elements.`,
Params: []function.Parameter{ Params: []function.Parameter{
{ {
Name: "list", Name: "list",
@ -206,9 +210,11 @@ var ElementFunc = function.New(&function.Spec{
// CoalesceListFunc is a function that takes any number of list arguments // CoalesceListFunc is a function that takes any number of list arguments
// and returns the first one that isn't empty. // and returns the first one that isn't empty.
var CoalesceListFunc = function.New(&function.Spec{ var CoalesceListFunc = function.New(&function.Spec{
Description: `Returns the first of the given sequences that has a length greater than zero.`,
Params: []function.Parameter{}, Params: []function.Parameter{},
VarParam: &function.Parameter{ VarParam: &function.Parameter{
Name: "vals", Name: "vals",
Description: `List or tuple values to test in the given order.`,
Type: cty.DynamicPseudoType, Type: cty.DynamicPseudoType,
AllowUnknown: true, AllowUnknown: true,
AllowDynamicType: true, AllowDynamicType: true,
@ -270,6 +276,7 @@ var CoalesceListFunc = function.New(&function.Spec{
// CompactFunc is a function that takes a list of strings and returns a new list // CompactFunc is a function that takes a list of strings and returns a new list
// with any empty string elements removed. // with any empty string elements removed.
var CompactFunc = function.New(&function.Spec{ var CompactFunc = function.New(&function.Spec{
Description: `Removes all empty string elements from the given list of strings.`,
Params: []function.Parameter{ Params: []function.Parameter{
{ {
Name: "list", Name: "list",
@ -306,6 +313,7 @@ var CompactFunc = function.New(&function.Spec{
// ContainsFunc is a function that determines whether a given list or // ContainsFunc is a function that determines whether a given list or
// set contains a given single value as one of its elements. // set contains a given single value as one of its elements.
var ContainsFunc = function.New(&function.Spec{ var ContainsFunc = function.New(&function.Spec{
Description: `Returns true if the given value is a value in the given list, tuple, or set, or false otherwise.`,
Params: []function.Parameter{ Params: []function.Parameter{
{ {
Name: "list", Name: "list",
@ -364,6 +372,7 @@ var ContainsFunc = function.New(&function.Spec{
// DistinctFunc is a function that takes a list and returns a new list // DistinctFunc is a function that takes a list and returns a new list
// with any duplicate elements removed. // with any duplicate elements removed.
var DistinctFunc = function.New(&function.Spec{ var DistinctFunc = function.New(&function.Spec{
Description: `Removes any duplicate values from the given list, preserving the order of remaining elements.`,
Params: []function.Parameter{ Params: []function.Parameter{
{ {
Name: "list", Name: "list",
@ -399,14 +408,17 @@ var DistinctFunc = function.New(&function.Spec{
// ChunklistFunc is a function that splits a single list into fixed-size chunks, // ChunklistFunc is a function that splits a single list into fixed-size chunks,
// returning a list of lists. // returning a list of lists.
var ChunklistFunc = function.New(&function.Spec{ var ChunklistFunc = function.New(&function.Spec{
Description: `Splits a single list into multiple lists where each has at most the given number of elements.`,
Params: []function.Parameter{ Params: []function.Parameter{
{ {
Name: "list", Name: "list",
Description: `The list to split into chunks.`,
Type: cty.List(cty.DynamicPseudoType), Type: cty.List(cty.DynamicPseudoType),
AllowMarked: true, AllowMarked: true,
}, },
{ {
Name: "size", Name: "size",
Description: `The maximum length of each chunk. All but the last element of the result is guaranteed to be of exactly this size.`,
Type: cty.Number, Type: cty.Number,
AllowMarked: true, AllowMarked: true,
}, },
@ -471,6 +483,7 @@ var ChunklistFunc = function.New(&function.Spec{
// FlattenFunc is a function that takes a list and replaces any elements // FlattenFunc is a function that takes a list and replaces any elements
// that are lists with a flattened sequence of the list contents. // that are lists with a flattened sequence of the list contents.
var FlattenFunc = function.New(&function.Spec{ var FlattenFunc = function.New(&function.Spec{
Description: `Transforms a list, set, or tuple value into a tuple by replacing any given elements that are themselves sequences with a flattened tuple of all of the nested elements concatenated together.`,
Params: []function.Parameter{ Params: []function.Parameter{
{ {
Name: "list", Name: "list",
@ -525,6 +538,7 @@ func flattener(flattenList cty.Value) ([]cty.Value, []cty.ValueMarks, bool) {
if len(flattenListMarks) > 0 { if len(flattenListMarks) > 0 {
markses = append(markses, flattenListMarks) markses = append(markses, flattenListMarks)
} }
if !flattenList.Length().IsKnown() { if !flattenList.Length().IsKnown() {
// If we don't know the length of what we're flattening then we can't // If we don't know the length of what we're flattening then we can't
// predict the length of our result yet either. // predict the length of our result yet either.
@ -542,7 +556,7 @@ func flattener(flattenList cty.Value) ([]cty.Value, []cty.ValueMarks, bool) {
isKnown = false isKnown = false
} }
if val.Type().IsListType() || val.Type().IsSetType() || val.Type().IsTupleType() { if !val.IsNull() && (val.Type().IsListType() || val.Type().IsSetType() || val.Type().IsTupleType()) {
if !val.IsKnown() { if !val.IsKnown() {
isKnown = false isKnown = false
_, unknownMarks := val.Unmark() _, unknownMarks := val.Unmark()
@ -566,9 +580,11 @@ func flattener(flattenList cty.Value) ([]cty.Value, []cty.ValueMarks, bool) {
// KeysFunc is a function that takes a map and returns a sorted list of the map keys. // KeysFunc is a function that takes a map and returns a sorted list of the map keys.
var KeysFunc = function.New(&function.Spec{ var KeysFunc = function.New(&function.Spec{
Description: `Returns a list of the keys of the given map in lexicographical order.`,
Params: []function.Parameter{ Params: []function.Parameter{
{ {
Name: "inputMap", Name: "inputMap",
Description: `The map to extract keys from. May instead be an object-typed value, in which case the result is a tuple of the object attributes.`,
Type: cty.DynamicPseudoType, Type: cty.DynamicPseudoType,
AllowUnknown: true, AllowUnknown: true,
AllowMarked: true, AllowMarked: true,
@ -641,6 +657,7 @@ var KeysFunc = function.New(&function.Spec{
// LookupFunc is a function that performs dynamic lookups of map types. // LookupFunc is a function that performs dynamic lookups of map types.
var LookupFunc = function.New(&function.Spec{ var LookupFunc = function.New(&function.Spec{
Description: `Returns the value of the element with the given key from the given map, or returns the default value if there is no such element.`,
Params: []function.Parameter{ Params: []function.Parameter{
{ {
Name: "inputMap", Name: "inputMap",
@ -733,6 +750,7 @@ var LookupFunc = function.New(&function.Spec{
// If more than one given map or object defines the same key then the one that // If more than one given map or object defines the same key then the one that
// is later in the argument sequence takes precedence. // is later in the argument sequence takes precedence.
var MergeFunc = function.New(&function.Spec{ var MergeFunc = function.New(&function.Spec{
Description: `Merges all of the elements from the given maps into a single map, or the attributes from given objects into a single object.`,
Params: []function.Parameter{}, Params: []function.Parameter{},
VarParam: &function.Parameter{ VarParam: &function.Parameter{
Name: "maps", Name: "maps",
@ -849,6 +867,7 @@ var MergeFunc = function.New(&function.Spec{
// ReverseListFunc takes a sequence and produces a new sequence of the same length // ReverseListFunc takes a sequence and produces a new sequence of the same length
// with all of the same elements as the given sequence but in reverse order. // with all of the same elements as the given sequence but in reverse order.
var ReverseListFunc = function.New(&function.Spec{ var ReverseListFunc = function.New(&function.Spec{
Description: `Returns the given list with its elements in reverse order.`,
Params: []function.Parameter{ Params: []function.Parameter{
{ {
Name: "list", Name: "list",
@ -897,9 +916,11 @@ var ReverseListFunc = function.New(&function.Spec{
// preserving the ordering of all of the input lists. Otherwise the result is a // preserving the ordering of all of the input lists. Otherwise the result is a
// set of tuples. // set of tuples.
var SetProductFunc = function.New(&function.Spec{ var SetProductFunc = function.New(&function.Spec{
Description: `Calculates the cartesian product of two or more sets.`,
Params: []function.Parameter{}, Params: []function.Parameter{},
VarParam: &function.Parameter{ VarParam: &function.Parameter{
Name: "sets", Name: "sets",
Description: "The sets to consider. Also accepts lists and tuples, and if all arguments are of list or tuple type then the result will preserve the input ordering",
Type: cty.DynamicPseudoType, Type: cty.DynamicPseudoType,
AllowMarked: true, AllowMarked: true,
}, },
@ -1037,6 +1058,7 @@ var SetProductFunc = function.New(&function.Spec{
// SliceFunc is a function that extracts some consecutive elements // SliceFunc is a function that extracts some consecutive elements
// from within a list. // from within a list.
var SliceFunc = function.New(&function.Spec{ var SliceFunc = function.New(&function.Spec{
Description: `Extracts a subslice of the given list or tuple value.`,
Params: []function.Parameter{ Params: []function.Parameter{
{ {
Name: "list", Name: "list",
@ -1158,9 +1180,10 @@ func sliceIndexes(args []cty.Value) (int, int, bool, error) {
// ValuesFunc is a function that returns a list of the map values, // ValuesFunc is a function that returns a list of the map values,
// in the order of the sorted keys. // in the order of the sorted keys.
var ValuesFunc = function.New(&function.Spec{ var ValuesFunc = function.New(&function.Spec{
Description: `Returns the values of elements of a given map, or the values of attributes of a given object, in lexicographic order by key or attribute name.`,
Params: []function.Parameter{ Params: []function.Parameter{
{ {
Name: "values", Name: "mapping",
Type: cty.DynamicPseudoType, Type: cty.DynamicPseudoType,
AllowMarked: true, AllowMarked: true,
}, },
@ -1225,6 +1248,7 @@ var ValuesFunc = function.New(&function.Spec{
// ZipmapFunc is a function that constructs a map from a list of keys // ZipmapFunc is a function that constructs a map from a list of keys
// and a corresponding list of values. // and a corresponding list of values.
var ZipmapFunc = function.New(&function.Spec{ var ZipmapFunc = function.New(&function.Spec{
Description: `Constructs a map from a list of keys and a corresponding list of values, which must both be of the same length.`,
Params: []function.Parameter{ Params: []function.Parameter{
{ {
Name: "keys", Name: "keys",

View File

@ -1,6 +1,7 @@
package stdlib package stdlib
import ( import (
"fmt"
"strconv" "strconv"
"github.com/zclconf/go-cty/cty" "github.com/zclconf/go-cty/cty"
@ -18,6 +19,7 @@ import (
// a tuple. // a tuple.
func MakeToFunc(wantTy cty.Type) function.Function { func MakeToFunc(wantTy cty.Type) function.Function {
return function.New(&function.Spec{ return function.New(&function.Spec{
Description: fmt.Sprintf("Converts the given value to %s, or raises an error if that conversion is impossible.", wantTy.FriendlyName()),
Params: []function.Parameter{ Params: []function.Parameter{
{ {
Name: "v", Name: "v",

View File

@ -11,6 +11,7 @@ import (
) )
var CSVDecodeFunc = function.New(&function.Spec{ var CSVDecodeFunc = function.New(&function.Spec{
Description: `Parses the given string as Comma Separated Values (as defined by RFC 4180) and returns a map of objects representing the table of data, using the first row as a header row to define the object attributes.`,
Params: []function.Parameter{ Params: []function.Parameter{
{ {
Name: "str", Name: "str",

View File

@ -12,6 +12,7 @@ import (
) )
var FormatDateFunc = function.New(&function.Spec{ var FormatDateFunc = function.New(&function.Spec{
Description: `Formats a timestamp given in RFC 3339 syntax into another timestamp in some other machine-oriented time syntax, as described in the format string.`,
Params: []function.Parameter{ Params: []function.Parameter{
{ {
Name: "format", Name: "format",
@ -205,6 +206,7 @@ var FormatDateFunc = function.New(&function.Spec{
// TimeAddFunc is a function that adds a duration to a timestamp, returning a new timestamp. // TimeAddFunc is a function that adds a duration to a timestamp, returning a new timestamp.
var TimeAddFunc = function.New(&function.Spec{ var TimeAddFunc = function.New(&function.Spec{
Description: `Adds the duration represented by the given duration string to the given RFC 3339 timestamp string, returning another RFC 3339 timestamp.`,
Params: []function.Parameter{ Params: []function.Parameter{
{ {
Name: "timestamp", Name: "timestamp",

View File

@ -18,6 +18,7 @@ import (
//go:generate gofmt -w format_fsm.go //go:generate gofmt -w format_fsm.go
var FormatFunc = function.New(&function.Spec{ var FormatFunc = function.New(&function.Spec{
Description: `Constructs a string by applying formatting verbs to a series of arguments, using a similar syntax to the C function \"printf\".`,
Params: []function.Parameter{ Params: []function.Parameter{
{ {
Name: "format", Name: "format",
@ -45,6 +46,7 @@ var FormatFunc = function.New(&function.Spec{
}) })
var FormatListFunc = function.New(&function.Spec{ var FormatListFunc = function.New(&function.Spec{
Description: `Constructs a list of strings by applying formatting verbs to a series of arguments, using a similar syntax to the C function \"printf\".`,
Params: []function.Parameter{ Params: []function.Parameter{
{ {
Name: "format", Name: "format",

View File

@ -9,6 +9,7 @@ import (
) )
var EqualFunc = function.New(&function.Spec{ var EqualFunc = function.New(&function.Spec{
Description: `Returns true if the two given values are equal, or false otherwise.`,
Params: []function.Parameter{ Params: []function.Parameter{
{ {
Name: "a", Name: "a",
@ -32,6 +33,7 @@ var EqualFunc = function.New(&function.Spec{
}) })
var NotEqualFunc = function.New(&function.Spec{ var NotEqualFunc = function.New(&function.Spec{
Description: `Returns false if the two given values are equal, or true otherwise.`,
Params: []function.Parameter{ Params: []function.Parameter{
{ {
Name: "a", Name: "a",
@ -55,6 +57,7 @@ var NotEqualFunc = function.New(&function.Spec{
}) })
var CoalesceFunc = function.New(&function.Spec{ var CoalesceFunc = function.New(&function.Spec{
Description: `Returns the first of the given arguments that isn't null, or raises an error if there are no non-null arguments.`,
Params: []function.Parameter{}, Params: []function.Parameter{},
VarParam: &function.Parameter{ VarParam: &function.Parameter{
Name: "vals", Name: "vals",

View File

@ -7,6 +7,7 @@ import (
) )
var JSONEncodeFunc = function.New(&function.Spec{ var JSONEncodeFunc = function.New(&function.Spec{
Description: `Returns a string containing a JSON representation of the given value.`,
Params: []function.Parameter{ Params: []function.Parameter{
{ {
Name: "val", Name: "val",
@ -39,6 +40,7 @@ var JSONEncodeFunc = function.New(&function.Spec{
}) })
var JSONDecodeFunc = function.New(&function.Spec{ var JSONDecodeFunc = function.New(&function.Spec{
Description: `Parses the given string as JSON and returns a value corresponding to what the JSON document describes.`,
Params: []function.Parameter{ Params: []function.Parameter{
{ {
Name: "str", Name: "str",

View File

@ -11,6 +11,7 @@ import (
) )
var AbsoluteFunc = function.New(&function.Spec{ var AbsoluteFunc = function.New(&function.Spec{
Description: `If the given number is negative then returns its positive equivalent, or otherwise returns the given number unchanged.`,
Params: []function.Parameter{ Params: []function.Parameter{
{ {
Name: "num", Name: "num",
@ -26,6 +27,7 @@ var AbsoluteFunc = function.New(&function.Spec{
}) })
var AddFunc = function.New(&function.Spec{ var AddFunc = function.New(&function.Spec{
Description: `Returns the sum of the two given numbers.`,
Params: []function.Parameter{ Params: []function.Parameter{
{ {
Name: "a", Name: "a",
@ -59,6 +61,7 @@ var AddFunc = function.New(&function.Spec{
}) })
var SubtractFunc = function.New(&function.Spec{ var SubtractFunc = function.New(&function.Spec{
Description: `Returns the difference between the two given numbers.`,
Params: []function.Parameter{ Params: []function.Parameter{
{ {
Name: "a", Name: "a",
@ -92,6 +95,7 @@ var SubtractFunc = function.New(&function.Spec{
}) })
var MultiplyFunc = function.New(&function.Spec{ var MultiplyFunc = function.New(&function.Spec{
Description: `Returns the product of the two given numbers.`,
Params: []function.Parameter{ Params: []function.Parameter{
{ {
Name: "a", Name: "a",
@ -126,6 +130,7 @@ var MultiplyFunc = function.New(&function.Spec{
}) })
var DivideFunc = function.New(&function.Spec{ var DivideFunc = function.New(&function.Spec{
Description: `Divides the first given number by the second.`,
Params: []function.Parameter{ Params: []function.Parameter{
{ {
Name: "a", Name: "a",
@ -160,6 +165,7 @@ var DivideFunc = function.New(&function.Spec{
}) })
var ModuloFunc = function.New(&function.Spec{ var ModuloFunc = function.New(&function.Spec{
Description: `Divides the first given number by the second and then returns the remainder.`,
Params: []function.Parameter{ Params: []function.Parameter{
{ {
Name: "a", Name: "a",
@ -194,6 +200,7 @@ var ModuloFunc = function.New(&function.Spec{
}) })
var GreaterThanFunc = function.New(&function.Spec{ var GreaterThanFunc = function.New(&function.Spec{
Description: `Returns true if and only if the second number is greater than the first.`,
Params: []function.Parameter{ Params: []function.Parameter{
{ {
Name: "a", Name: "a",
@ -215,6 +222,7 @@ var GreaterThanFunc = function.New(&function.Spec{
}) })
var GreaterThanOrEqualToFunc = function.New(&function.Spec{ var GreaterThanOrEqualToFunc = function.New(&function.Spec{
Description: `Returns true if and only if the second number is greater than or equal to the first.`,
Params: []function.Parameter{ Params: []function.Parameter{
{ {
Name: "a", Name: "a",
@ -236,6 +244,7 @@ var GreaterThanOrEqualToFunc = function.New(&function.Spec{
}) })
var LessThanFunc = function.New(&function.Spec{ var LessThanFunc = function.New(&function.Spec{
Description: `Returns true if and only if the second number is less than the first.`,
Params: []function.Parameter{ Params: []function.Parameter{
{ {
Name: "a", Name: "a",
@ -257,6 +266,7 @@ var LessThanFunc = function.New(&function.Spec{
}) })
var LessThanOrEqualToFunc = function.New(&function.Spec{ var LessThanOrEqualToFunc = function.New(&function.Spec{
Description: `Returns true if and only if the second number is less than or equal to the first.`,
Params: []function.Parameter{ Params: []function.Parameter{
{ {
Name: "a", Name: "a",
@ -278,6 +288,7 @@ var LessThanOrEqualToFunc = function.New(&function.Spec{
}) })
var NegateFunc = function.New(&function.Spec{ var NegateFunc = function.New(&function.Spec{
Description: `Multiplies the given number by -1.`,
Params: []function.Parameter{ Params: []function.Parameter{
{ {
Name: "num", Name: "num",
@ -293,6 +304,7 @@ var NegateFunc = function.New(&function.Spec{
}) })
var MinFunc = function.New(&function.Spec{ var MinFunc = function.New(&function.Spec{
Description: `Returns the numerically smallest of all of the given numbers.`,
Params: []function.Parameter{}, Params: []function.Parameter{},
VarParam: &function.Parameter{ VarParam: &function.Parameter{
Name: "numbers", Name: "numbers",
@ -317,6 +329,7 @@ var MinFunc = function.New(&function.Spec{
}) })
var MaxFunc = function.New(&function.Spec{ var MaxFunc = function.New(&function.Spec{
Description: `Returns the numerically greatest of all of the given numbers.`,
Params: []function.Parameter{}, Params: []function.Parameter{},
VarParam: &function.Parameter{ VarParam: &function.Parameter{
Name: "numbers", Name: "numbers",
@ -341,6 +354,7 @@ var MaxFunc = function.New(&function.Spec{
}) })
var IntFunc = function.New(&function.Spec{ var IntFunc = function.New(&function.Spec{
Description: `Discards any fractional portion of the given number.`,
Params: []function.Parameter{ Params: []function.Parameter{
{ {
Name: "num", Name: "num",
@ -363,6 +377,7 @@ var IntFunc = function.New(&function.Spec{
// CeilFunc is a function that returns the closest whole number greater // CeilFunc is a function that returns the closest whole number greater
// than or equal to the given value. // than or equal to the given value.
var CeilFunc = function.New(&function.Spec{ var CeilFunc = function.New(&function.Spec{
Description: `Returns the smallest whole number that is greater than or equal to the given value.`,
Params: []function.Parameter{ Params: []function.Parameter{
{ {
Name: "num", Name: "num",
@ -392,6 +407,7 @@ var CeilFunc = function.New(&function.Spec{
// FloorFunc is a function that returns the closest whole number lesser // FloorFunc is a function that returns the closest whole number lesser
// than or equal to the given value. // than or equal to the given value.
var FloorFunc = function.New(&function.Spec{ var FloorFunc = function.New(&function.Spec{
Description: `Returns the greatest whole number that is less than or equal to the given value.`,
Params: []function.Parameter{ Params: []function.Parameter{
{ {
Name: "num", Name: "num",
@ -420,6 +436,7 @@ var FloorFunc = function.New(&function.Spec{
// LogFunc is a function that returns the logarithm of a given number in a given base. // LogFunc is a function that returns the logarithm of a given number in a given base.
var LogFunc = function.New(&function.Spec{ var LogFunc = function.New(&function.Spec{
Description: `Returns the logarithm of the given number in the given base.`,
Params: []function.Parameter{ Params: []function.Parameter{
{ {
Name: "num", Name: "num",
@ -448,6 +465,7 @@ var LogFunc = function.New(&function.Spec{
// PowFunc is a function that returns the logarithm of a given number in a given base. // PowFunc is a function that returns the logarithm of a given number in a given base.
var PowFunc = function.New(&function.Spec{ var PowFunc = function.New(&function.Spec{
Description: `Returns the given number raised to the given power (exponentiation).`,
Params: []function.Parameter{ Params: []function.Parameter{
{ {
Name: "num", Name: "num",
@ -477,6 +495,7 @@ var PowFunc = function.New(&function.Spec{
// SignumFunc is a function that determines the sign of a number, returning a // SignumFunc is a function that determines the sign of a number, returning a
// number between -1 and 1 to represent the sign.. // number between -1 and 1 to represent the sign..
var SignumFunc = function.New(&function.Spec{ var SignumFunc = function.New(&function.Spec{
Description: `Returns 0 if the given number is zero, 1 if the given number is positive, or -1 if the given number is negative.`,
Params: []function.Parameter{ Params: []function.Parameter{
{ {
Name: "num", Name: "num",
@ -502,6 +521,7 @@ var SignumFunc = function.New(&function.Spec{
// ParseIntFunc is a function that parses a string argument and returns an integer of the specified base. // ParseIntFunc is a function that parses a string argument and returns an integer of the specified base.
var ParseIntFunc = function.New(&function.Spec{ var ParseIntFunc = function.New(&function.Spec{
Description: `Parses the given string as a number of the given base, or raises an error if the string contains invalid characters.`,
Params: []function.Parameter{ Params: []function.Parameter{
{ {
Name: "number", Name: "number",

View File

@ -10,6 +10,7 @@ import (
) )
var RegexFunc = function.New(&function.Spec{ var RegexFunc = function.New(&function.Spec{
Description: `Applies the given regular expression pattern to the given string and returns information about a single match, or raises an error if there is no match.`,
Params: []function.Parameter{ Params: []function.Parameter{
{ {
Name: "pattern", Name: "pattern",
@ -54,6 +55,7 @@ var RegexFunc = function.New(&function.Spec{
}) })
var RegexAllFunc = function.New(&function.Spec{ var RegexAllFunc = function.New(&function.Spec{
Description: `Applies the given regular expression pattern to the given string and returns a list of information about all non-overlapping matches, or an empty list if there are no matches.`,
Params: []function.Parameter{ Params: []function.Parameter{
{ {
Name: "pattern", Name: "pattern",

View File

@ -9,6 +9,7 @@ import (
) )
var ConcatFunc = function.New(&function.Spec{ var ConcatFunc = function.New(&function.Spec{
Description: `Concatenates together all of the given lists or tuples into a single sequence, preserving the input order.`,
Params: []function.Parameter{}, Params: []function.Parameter{},
VarParam: &function.Parameter{ VarParam: &function.Parameter{
Name: "seqs", Name: "seqs",
@ -137,6 +138,7 @@ var ConcatFunc = function.New(&function.Spec{
}) })
var RangeFunc = function.New(&function.Spec{ var RangeFunc = function.New(&function.Spec{
Description: `Returns a list of numbers spread evenly over a particular range.`,
VarParam: &function.Parameter{ VarParam: &function.Parameter{
Name: "params", Name: "params",
Type: cty.Number, Type: cty.Number,

View File

@ -10,6 +10,7 @@ import (
) )
var SetHasElementFunc = function.New(&function.Spec{ var SetHasElementFunc = function.New(&function.Spec{
Description: `Returns true if the given set contains the given element, or false otherwise.`,
Params: []function.Parameter{ Params: []function.Parameter{
{ {
Name: "set", Name: "set",
@ -29,6 +30,7 @@ var SetHasElementFunc = function.New(&function.Spec{
}) })
var SetUnionFunc = function.New(&function.Spec{ var SetUnionFunc = function.New(&function.Spec{
Description: `Returns the union of all given sets.`,
Params: []function.Parameter{ Params: []function.Parameter{
{ {
Name: "first_set", Name: "first_set",
@ -48,6 +50,7 @@ var SetUnionFunc = function.New(&function.Spec{
}) })
var SetIntersectionFunc = function.New(&function.Spec{ var SetIntersectionFunc = function.New(&function.Spec{
Description: `Returns the intersection of all given sets.`,
Params: []function.Parameter{ Params: []function.Parameter{
{ {
Name: "first_set", Name: "first_set",
@ -67,6 +70,7 @@ var SetIntersectionFunc = function.New(&function.Spec{
}) })
var SetSubtractFunc = function.New(&function.Spec{ var SetSubtractFunc = function.New(&function.Spec{
Description: `Returns the relative complement of the two given sets.`,
Params: []function.Parameter{ Params: []function.Parameter{
{ {
Name: "a", Name: "a",
@ -86,6 +90,7 @@ var SetSubtractFunc = function.New(&function.Spec{
}) })
var SetSymmetricDifferenceFunc = function.New(&function.Spec{ var SetSymmetricDifferenceFunc = function.New(&function.Spec{
Description: `Returns the symmetric difference of the two given sets.`,
Params: []function.Parameter{ Params: []function.Parameter{
{ {
Name: "first_set", Name: "first_set",

View File

@ -14,6 +14,7 @@ import (
) )
var UpperFunc = function.New(&function.Spec{ var UpperFunc = function.New(&function.Spec{
Description: "Returns the given string with all Unicode letters translated to their uppercase equivalents.",
Params: []function.Parameter{ Params: []function.Parameter{
{ {
Name: "str", Name: "str",
@ -30,6 +31,7 @@ var UpperFunc = function.New(&function.Spec{
}) })
var LowerFunc = function.New(&function.Spec{ var LowerFunc = function.New(&function.Spec{
Description: "Returns the given string with all Unicode letters translated to their lowercase equivalents.",
Params: []function.Parameter{ Params: []function.Parameter{
{ {
Name: "str", Name: "str",
@ -46,6 +48,7 @@ var LowerFunc = function.New(&function.Spec{
}) })
var ReverseFunc = function.New(&function.Spec{ var ReverseFunc = function.New(&function.Spec{
Description: "Returns the given string with all of its Unicode characters in reverse order.",
Params: []function.Parameter{ Params: []function.Parameter{
{ {
Name: "str", Name: "str",
@ -73,6 +76,7 @@ var ReverseFunc = function.New(&function.Spec{
}) })
var StrlenFunc = function.New(&function.Spec{ var StrlenFunc = function.New(&function.Spec{
Description: "Returns the number of Unicode characters (technically: grapheme clusters) in the given string.",
Params: []function.Parameter{ Params: []function.Parameter{
{ {
Name: "str", Name: "str",
@ -97,19 +101,23 @@ var StrlenFunc = function.New(&function.Spec{
}) })
var SubstrFunc = function.New(&function.Spec{ var SubstrFunc = function.New(&function.Spec{
Description: "Extracts a substring from the given string.",
Params: []function.Parameter{ Params: []function.Parameter{
{ {
Name: "str", Name: "str",
Description: "The input string.",
Type: cty.String, Type: cty.String,
AllowDynamicType: true, AllowDynamicType: true,
}, },
{ {
Name: "offset", Name: "offset",
Description: "The starting offset in Unicode characters.",
Type: cty.Number, Type: cty.Number,
AllowDynamicType: true, AllowDynamicType: true,
}, },
{ {
Name: "length", Name: "length",
Description: "The maximum length of the result in Unicode characters.",
Type: cty.Number, Type: cty.Number,
AllowDynamicType: true, AllowDynamicType: true,
}, },
@ -197,14 +205,17 @@ var SubstrFunc = function.New(&function.Spec{
}) })
var JoinFunc = function.New(&function.Spec{ var JoinFunc = function.New(&function.Spec{
Description: "Concatenates together the elements of all given lists with a delimiter, producing a single string.",
Params: []function.Parameter{ Params: []function.Parameter{
{ {
Name: "separator", Name: "separator",
Description: "Delimiter to insert between the given strings.",
Type: cty.String, Type: cty.String,
}, },
}, },
VarParam: &function.Parameter{ VarParam: &function.Parameter{
Name: "lists", Name: "lists",
Description: "One or more lists of strings to join.",
Type: cty.List(cty.String), Type: cty.List(cty.String),
}, },
Type: function.StaticReturnType(cty.String), Type: function.StaticReturnType(cty.String),
@ -244,6 +255,7 @@ var JoinFunc = function.New(&function.Spec{
}) })
var SortFunc = function.New(&function.Spec{ var SortFunc = function.New(&function.Spec{
Description: "Applies a lexicographic sort to the elements of the given list.",
Params: []function.Parameter{ Params: []function.Parameter{
{ {
Name: "list", Name: "list",
@ -282,13 +294,16 @@ var SortFunc = function.New(&function.Spec{
}) })
var SplitFunc = function.New(&function.Spec{ var SplitFunc = function.New(&function.Spec{
Description: "Produces a list of one or more strings by splitting the given string at all instances of a given separator substring.",
Params: []function.Parameter{ Params: []function.Parameter{
{ {
Name: "separator", Name: "separator",
Description: "The substring that delimits the result strings.",
Type: cty.String, Type: cty.String,
}, },
{ {
Name: "str", Name: "str",
Description: "The string to split.",
Type: cty.String, Type: cty.String,
}, },
}, },
@ -311,6 +326,7 @@ var SplitFunc = function.New(&function.Spec{
// ChompFunc is a function that removes newline characters at the end of a // ChompFunc is a function that removes newline characters at the end of a
// string. // string.
var ChompFunc = function.New(&function.Spec{ var ChompFunc = function.New(&function.Spec{
Description: "Removes one or more newline characters from the end of the given string.",
Params: []function.Parameter{ Params: []function.Parameter{
{ {
Name: "str", Name: "str",
@ -327,13 +343,16 @@ var ChompFunc = function.New(&function.Spec{
// IndentFunc is a function that adds a given number of spaces to the // IndentFunc is a function that adds a given number of spaces to the
// beginnings of all but the first line in a given multi-line string. // beginnings of all but the first line in a given multi-line string.
var IndentFunc = function.New(&function.Spec{ var IndentFunc = function.New(&function.Spec{
Description: "Adds a given number of spaces after each newline character in the given string.",
Params: []function.Parameter{ Params: []function.Parameter{
{ {
Name: "spaces", Name: "spaces",
Description: "Number of spaces to add after each newline character.",
Type: cty.Number, Type: cty.Number,
}, },
{ {
Name: "str", Name: "str",
Description: "The string to transform.",
Type: cty.String, Type: cty.String,
}, },
}, },
@ -352,6 +371,7 @@ var IndentFunc = function.New(&function.Spec{
// TitleFunc is a function that converts the first letter of each word in the // TitleFunc is a function that converts the first letter of each word in the
// given string to uppercase. // given string to uppercase.
var TitleFunc = function.New(&function.Spec{ var TitleFunc = function.New(&function.Spec{
Description: "Replaces one letter after each non-letter and non-digit character with its uppercase equivalent.",
Params: []function.Parameter{ Params: []function.Parameter{
{ {
Name: "str", Name: "str",
@ -367,6 +387,7 @@ var TitleFunc = function.New(&function.Spec{
// TrimSpaceFunc is a function that removes any space characters from the start // TrimSpaceFunc is a function that removes any space characters from the start
// and end of the given string. // and end of the given string.
var TrimSpaceFunc = function.New(&function.Spec{ var TrimSpaceFunc = function.New(&function.Spec{
Description: "Removes any consecutive space characters (as defined by Unicode) from the start and end of the given string.",
Params: []function.Parameter{ Params: []function.Parameter{
{ {
Name: "str", Name: "str",
@ -382,13 +403,16 @@ var TrimSpaceFunc = function.New(&function.Spec{
// TrimFunc is a function that removes the specified characters from the start // TrimFunc is a function that removes the specified characters from the start
// and end of the given string. // and end of the given string.
var TrimFunc = function.New(&function.Spec{ var TrimFunc = function.New(&function.Spec{
Description: "Removes consecutive sequences of characters in \"cutset\" from the start and end of the given string.",
Params: []function.Parameter{ Params: []function.Parameter{
{ {
Name: "str", Name: "str",
Description: "The string to trim.",
Type: cty.String, Type: cty.String,
}, },
{ {
Name: "cutset", Name: "cutset",
Description: "A string containing all of the characters to trim. Each character is taken separately, so the order of characters is insignificant.",
Type: cty.String, Type: cty.String,
}, },
}, },
@ -396,6 +420,9 @@ var TrimFunc = function.New(&function.Spec{
Impl: func(args []cty.Value, retType cty.Type) (cty.Value, error) { Impl: func(args []cty.Value, retType cty.Type) (cty.Value, error) {
str := args[0].AsString() str := args[0].AsString()
cutset := args[1].AsString() cutset := args[1].AsString()
// NOTE: This doesn't properly handle any character that is encoded
// with multiple sequential code units, such as letters with
// combining diacritics and emoji modifier sequences.
return cty.StringVal(strings.Trim(str, cutset)), nil return cty.StringVal(strings.Trim(str, cutset)), nil
}, },
}) })
@ -403,13 +430,16 @@ var TrimFunc = function.New(&function.Spec{
// TrimPrefixFunc is a function that removes the specified characters from the // TrimPrefixFunc is a function that removes the specified characters from the
// start the given string. // start the given string.
var TrimPrefixFunc = function.New(&function.Spec{ var TrimPrefixFunc = function.New(&function.Spec{
Description: "Removes the given prefix from the start of the given string, if present.",
Params: []function.Parameter{ Params: []function.Parameter{
{ {
Name: "str", Name: "str",
Description: "The string to trim.",
Type: cty.String, Type: cty.String,
}, },
{ {
Name: "prefix", Name: "prefix",
Description: "The prefix to remove, if present.",
Type: cty.String, Type: cty.String,
}, },
}, },
@ -424,13 +454,16 @@ var TrimPrefixFunc = function.New(&function.Spec{
// TrimSuffixFunc is a function that removes the specified characters from the // TrimSuffixFunc is a function that removes the specified characters from the
// end of the given string. // end of the given string.
var TrimSuffixFunc = function.New(&function.Spec{ var TrimSuffixFunc = function.New(&function.Spec{
Description: "Removes the given suffix from the start of the given string, if present.",
Params: []function.Parameter{ Params: []function.Parameter{
{ {
Name: "str", Name: "str",
Description: "The string to trim.",
Type: cty.String, Type: cty.String,
}, },
{ {
Name: "suffix", Name: "suffix",
Description: "The suffix to remove, if present.",
Type: cty.String, Type: cty.String,
}, },
}, },

View File

@ -12,17 +12,21 @@ import (
// substring, and replaces each occurence with a given replacement string. // substring, and replaces each occurence with a given replacement string.
// The substr argument is a simple string. // The substr argument is a simple string.
var ReplaceFunc = function.New(&function.Spec{ var ReplaceFunc = function.New(&function.Spec{
Description: `Replaces all instances of the given substring in the given string with the given replacement string.`,
Params: []function.Parameter{ Params: []function.Parameter{
{ {
Name: "str", Name: "str",
Description: `The string to search within.`,
Type: cty.String, Type: cty.String,
}, },
{ {
Name: "substr", Name: "substr",
Description: `The substring to search for.`,
Type: cty.String, Type: cty.String,
}, },
{ {
Name: "replace", Name: "replace",
Description: `The new substring to replace substr with.`,
Type: cty.String, Type: cty.String,
}, },
}, },
@ -40,13 +44,14 @@ var ReplaceFunc = function.New(&function.Spec{
// given substring, and replaces each occurence with a given replacement // given substring, and replaces each occurence with a given replacement
// string. The substr argument must be a valid regular expression. // string. The substr argument must be a valid regular expression.
var RegexReplaceFunc = function.New(&function.Spec{ var RegexReplaceFunc = function.New(&function.Spec{
Description: `Applies the given regular expression pattern to the given string and replaces all matches with the given replacement string.`,
Params: []function.Parameter{ Params: []function.Parameter{
{ {
Name: "str", Name: "str",
Type: cty.String, Type: cty.String,
}, },
{ {
Name: "substr", Name: "pattern",
Type: cty.String, Type: cty.String,
}, },
{ {

View File

@ -1,204 +0,0 @@
package cty
import (
"bytes"
"encoding/gob"
"errors"
"fmt"
"math/big"
"github.com/zclconf/go-cty/cty/set"
)
// GobEncode is an implementation of the gob.GobEncoder interface, which
// allows Values to be included in structures encoded with encoding/gob.
//
// Currently it is not possible to represent values of capsule types in gob,
// because the types themselves cannot be represented.
func (val Value) GobEncode() ([]byte, error) {
if val.IsMarked() {
return nil, errors.New("value is marked")
}
buf := &bytes.Buffer{}
enc := gob.NewEncoder(buf)
gv := gobValue{
Version: 0,
Ty: val.ty,
V: val.v,
}
err := enc.Encode(gv)
if err != nil {
return nil, fmt.Errorf("error encoding cty.Value: %s", err)
}
return buf.Bytes(), nil
}
// GobDecode is an implementation of the gob.GobDecoder interface, which
// inverts the operation performed by GobEncode. See the documentation of
// GobEncode for considerations when using cty.Value instances with gob.
func (val *Value) GobDecode(buf []byte) error {
r := bytes.NewReader(buf)
dec := gob.NewDecoder(r)
var gv gobValue
err := dec.Decode(&gv)
if err != nil {
return fmt.Errorf("error decoding cty.Value: %s", err)
}
if gv.Version != 0 {
return fmt.Errorf("unsupported cty.Value encoding version %d; only 0 is supported", gv.Version)
}
// Because big.Float.GobEncode is implemented with a pointer reciever,
// gob encoding of an interface{} containing a *big.Float value does not
// round-trip correctly, emerging instead as a non-pointer big.Float.
// The rest of cty expects all number values to be represented by
// *big.Float, so we'll fix that up here.
gv.V = gobDecodeFixNumberPtr(gv.V, gv.Ty)
val.ty = gv.Ty
val.v = gv.V
return nil
}
// GobEncode is an implementation of the gob.GobEncoder interface, which
// allows Types to be included in structures encoded with encoding/gob.
//
// Currently it is not possible to represent capsule types in gob.
func (t Type) GobEncode() ([]byte, error) {
buf := &bytes.Buffer{}
enc := gob.NewEncoder(buf)
gt := gobType{
Version: 0,
Impl: t.typeImpl,
}
err := enc.Encode(gt)
if err != nil {
return nil, fmt.Errorf("error encoding cty.Type: %s", err)
}
return buf.Bytes(), nil
}
// GobDecode is an implementatino of the gob.GobDecoder interface, which
// reverses the encoding performed by GobEncode to allow types to be recovered
// from gob buffers.
func (t *Type) GobDecode(buf []byte) error {
r := bytes.NewReader(buf)
dec := gob.NewDecoder(r)
var gt gobType
err := dec.Decode(&gt)
if err != nil {
return fmt.Errorf("error decoding cty.Type: %s", err)
}
if gt.Version != 0 {
return fmt.Errorf("unsupported cty.Type encoding version %d; only 0 is supported", gt.Version)
}
t.typeImpl = gt.Impl
return nil
}
// Capsule types cannot currently be gob-encoded, because they rely on pointer
// equality and we have no way to recover the original pointer on decode.
func (t *capsuleType) GobEncode() ([]byte, error) {
return nil, fmt.Errorf("cannot gob-encode capsule type %q", t.FriendlyName(friendlyTypeName))
}
func (t *capsuleType) GobDecode() ([]byte, error) {
return nil, fmt.Errorf("cannot gob-decode capsule type %q", t.FriendlyName(friendlyTypeName))
}
type gobValue struct {
Version int
Ty Type
V interface{}
}
type gobType struct {
Version int
Impl typeImpl
}
type gobCapsuleTypeImpl struct {
}
// goDecodeFixNumberPtr fixes an unfortunate quirk of round-tripping cty.Number
// values through gob: the big.Float.GobEncode method is implemented on a
// pointer receiver, and so it loses the "pointer-ness" of the value on
// encode, causing the values to emerge the other end as big.Float rather than
// *big.Float as we expect elsewhere in cty.
//
// The implementation of gobDecodeFixNumberPtr mutates the given raw value
// during its work, and may either return the same value mutated or a new
// value. Callers must no longer use whatever value they pass as "raw" after
// this function is called.
func gobDecodeFixNumberPtr(raw interface{}, ty Type) interface{} {
// Unfortunately we need to work recursively here because number values
// might be embedded in structural or collection type values.
switch {
case ty.Equals(Number):
if bf, ok := raw.(big.Float); ok {
return &bf // wrap in pointer
}
case ty.IsMapType() && ty.ElementType().Equals(Number):
if m, ok := raw.(map[string]interface{}); ok {
for k, v := range m {
m[k] = gobDecodeFixNumberPtr(v, ty.ElementType())
}
}
case ty.IsListType() && ty.ElementType().Equals(Number):
if s, ok := raw.([]interface{}); ok {
for i, v := range s {
s[i] = gobDecodeFixNumberPtr(v, ty.ElementType())
}
}
case ty.IsSetType() && ty.ElementType().Equals(Number):
if s, ok := raw.(set.Set); ok {
newS := set.NewSet(s.Rules())
for it := s.Iterator(); it.Next(); {
newV := gobDecodeFixNumberPtr(it.Value(), ty.ElementType())
newS.Add(newV)
}
return newS
}
case ty.IsObjectType():
if m, ok := raw.(map[string]interface{}); ok {
for k, v := range m {
aty := ty.AttributeType(k)
m[k] = gobDecodeFixNumberPtr(v, aty)
}
}
case ty.IsTupleType():
if s, ok := raw.([]interface{}); ok {
for i, v := range s {
ety := ty.TupleElementType(i)
s[i] = gobDecodeFixNumberPtr(v, ety)
}
}
}
return raw
}
// gobDecodeFixNumberPtrVal is a helper wrapper around gobDecodeFixNumberPtr
// that works with already-constructed values. This is primarily for testing,
// to fix up intentionally-invalid number values for the parts of the test
// code that need them to be valid, such as calling GoString on them.
func gobDecodeFixNumberPtrVal(v Value) Value {
raw := gobDecodeFixNumberPtr(v.v, v.ty)
return Value{
v: raw,
ty: v.ty,
}
}

View File

@ -11,7 +11,7 @@ import (
var valueType = reflect.TypeOf(cty.Value{}) var valueType = reflect.TypeOf(cty.Value{})
var typeType = reflect.TypeOf(cty.Type{}) var typeType = reflect.TypeOf(cty.Type{})
var setType = reflect.TypeOf(set.Set{}) var setType = reflect.TypeOf(set.Set[interface{}]{})
var bigFloatType = reflect.TypeOf(big.Float{}) var bigFloatType = reflect.TypeOf(big.Float{})
var bigIntType = reflect.TypeOf(big.Int{}) var bigIntType = reflect.TypeOf(big.Int{})

View File

@ -268,7 +268,7 @@ func toCtySet(val reflect.Value, ety cty.Type, path cty.Path) (cty.Value, error)
return cty.NilVal, path.NewErrorf("can't convert Go %s to %#v", val.Type(), cty.Set(ety)) return cty.NilVal, path.NewErrorf("can't convert Go %s to %#v", val.Type(), cty.Set(ety))
} }
rawSet := val.Interface().(set.Set) rawSet := val.Interface().(set.Set[interface{}])
inVals := rawSet.Values() inVals := rawSet.Values()
if len(inVals) == 0 { if len(inVals) == 0 {

View File

@ -51,7 +51,7 @@ func (t typeMap) GoString() string {
return fmt.Sprintf("cty.Map(%#v)", t.ElementTypeT) return fmt.Sprintf("cty.Map(%#v)", t.ElementTypeT)
} }
// IsMapType returns true if the given type is a list type, regardless of its // IsMapType returns true if the given type is a map type, regardless of its
// element type. // element type.
func (t Type) IsMapType() bool { func (t Type) IsMapType() bool {
_, ok := t.typeImpl.(typeMap) _, ok := t.typeImpl.(typeMap)

View File

@ -11,14 +11,14 @@ import (
// to talk about a subset of paths within a value that meet some criteria, // to talk about a subset of paths within a value that meet some criteria,
// without directly modifying the values at those paths. // without directly modifying the values at those paths.
type PathSet struct { type PathSet struct {
set set.Set set set.Set[Path]
} }
// NewPathSet creates and returns a PathSet, with initial contents optionally // NewPathSet creates and returns a PathSet, with initial contents optionally
// set by the given arguments. // set by the given arguments.
func NewPathSet(paths ...Path) PathSet { func NewPathSet(paths ...Path) PathSet {
ret := PathSet{ ret := PathSet{
set: set.NewSet(pathSetRules{}), set: set.NewSet(set.Rules[Path](pathSetRules{})),
} }
for _, path := range paths { for _, path := range paths {
@ -61,7 +61,7 @@ func (s PathSet) List() []Path {
} }
ret := make([]Path, 0, s.set.Length()) ret := make([]Path, 0, s.set.Length())
for it := s.set.Iterator(); it.Next(); { for it := s.set.Iterator(); it.Next(); {
ret = append(ret, it.Value().(Path)) ret = append(ret, it.Value())
} }
return ret return ret
} }
@ -134,8 +134,7 @@ var indexStepPlaceholder = []byte("#")
type pathSetRules struct { type pathSetRules struct {
} }
func (r pathSetRules) Hash(v interface{}) int { func (r pathSetRules) Hash(path Path) int {
path := v.(Path)
hash := crc64.New(crc64Table) hash := crc64.New(crc64Table)
for _, rawStep := range path { for _, rawStep := range path {
@ -159,10 +158,7 @@ func (r pathSetRules) Hash(v interface{}) int {
return int(hash.Sum64()) return int(hash.Sum64())
} }
func (r pathSetRules) Equivalent(a, b interface{}) bool { func (r pathSetRules) Equivalent(aPath, bPath Path) bool {
aPath := a.(Path)
bPath := b.(Path)
if len(aPath) != len(bPath) { if len(aPath) != len(bPath) {
return false return false
} }
@ -198,7 +194,7 @@ func (r pathSetRules) Equivalent(a, b interface{}) bool {
} }
// SameRules is true if both Rules instances are pathSetRules structs. // SameRules is true if both Rules instances are pathSetRules structs.
func (r pathSetRules) SameRules(other set.Rules) bool { func (r pathSetRules) SameRules(other set.Rules[Path]) bool {
_, ok := other.(pathSetRules) _, ok := other.(pathSetRules)
return ok return ok
} }

View File

@ -74,6 +74,8 @@ func rawNumberEqual(a, b *big.Float) bool {
return false return false
case a == nil: // b == nil too then, due to previous case case a == nil: // b == nil too then, due to previous case
return true return true
case a.Sign() != b.Sign():
return false
default: default:
// This format and precision matches that used by cty/json.Marshal, // This format and precision matches that used by cty/json.Marshal,
// and thus achieves our definition of "two numbers are equal if // and thus achieves our definition of "two numbers are equal if

View File

@ -1,76 +0,0 @@
package set
import (
"bytes"
"encoding/gob"
"fmt"
)
// GobEncode is an implementation of the interface gob.GobEncoder, allowing
// sets to be included in structures encoded via gob.
//
// The set rules are included in the serialized value, so the caller must
// register its concrete rules type with gob.Register before using a
// set in a gob, and possibly also implement GobEncode/GobDecode to customize
// how any parameters are persisted.
//
// The set elements are also included, so if they are of non-primitive types
// they too must be registered with gob.
//
// If the produced gob values will persist for a long time, the caller must
// ensure compatibility of the rules implementation. In particular, if the
// definition of element equivalence changes between encoding and decoding
// then two distinct stored elements may be considered equivalent on decoding,
// causing the recovered set to have fewer elements than when it was stored.
func (s Set) GobEncode() ([]byte, error) {
gs := gobSet{
Version: 0,
Rules: s.rules,
Values: s.Values(),
}
buf := &bytes.Buffer{}
enc := gob.NewEncoder(buf)
err := enc.Encode(gs)
if err != nil {
return nil, fmt.Errorf("error encoding set.Set: %s", err)
}
return buf.Bytes(), nil
}
// GobDecode is the opposite of GobEncode. See GobEncode for information
// on the requirements for and caveats of including set values in gobs.
func (s *Set) GobDecode(buf []byte) error {
r := bytes.NewReader(buf)
dec := gob.NewDecoder(r)
var gs gobSet
err := dec.Decode(&gs)
if err != nil {
return fmt.Errorf("error decoding set.Set: %s", err)
}
if gs.Version != 0 {
return fmt.Errorf("unsupported set.Set encoding version %d; need 0", gs.Version)
}
victim := NewSetFromSlice(gs.Rules, gs.Values)
s.vals = victim.vals
s.rules = victim.rules
return nil
}
type gobSet struct {
Version int
Rules Rules
// The bucket-based representation is for efficient in-memory access, but
// for serialization it's enough to just retain the values themselves,
// which we can re-bucket using the rules (which may have changed!) when
// we re-inflate.
Values []interface{}
}
func init() {
gob.Register([]interface{}(nil))
}

View File

@ -1,15 +1,15 @@
package set package set
type Iterator struct { type Iterator[T any] struct {
vals []interface{} vals []T
idx int idx int
} }
func (it *Iterator) Value() interface{} { func (it *Iterator[T]) Value() T {
return it.vals[it.idx] return it.vals[it.idx]
} }
func (it *Iterator) Next() bool { func (it *Iterator[T]) Next() bool {
it.idx++ it.idx++
return it.idx < len(it.vals) return it.idx < len(it.vals)
} }

View File

@ -7,10 +7,10 @@ import (
// Add inserts the given value into the receiving Set. // Add inserts the given value into the receiving Set.
// //
// This mutates the set in-place. This operation is not thread-safe. // This mutates the set in-place. This operation is not thread-safe.
func (s Set) Add(val interface{}) { func (s Set[T]) Add(val T) {
hv := s.rules.Hash(val) hv := s.rules.Hash(val)
if _, ok := s.vals[hv]; !ok { if _, ok := s.vals[hv]; !ok {
s.vals[hv] = make([]interface{}, 0, 1) s.vals[hv] = make([]T, 0, 1)
} }
bucket := s.vals[hv] bucket := s.vals[hv]
@ -26,7 +26,7 @@ func (s Set) Add(val interface{}) {
// Remove deletes the given value from the receiving set, if indeed it was // Remove deletes the given value from the receiving set, if indeed it was
// there in the first place. If the value is not present, this is a no-op. // there in the first place. If the value is not present, this is a no-op.
func (s Set) Remove(val interface{}) { func (s Set[T]) Remove(val T) {
hv := s.rules.Hash(val) hv := s.rules.Hash(val)
bucket, ok := s.vals[hv] bucket, ok := s.vals[hv]
if !ok { if !ok {
@ -35,7 +35,7 @@ func (s Set) Remove(val interface{}) {
for i, ev := range bucket { for i, ev := range bucket {
if s.rules.Equivalent(val, ev) { if s.rules.Equivalent(val, ev) {
newBucket := make([]interface{}, 0, len(bucket)-1) newBucket := make([]T, 0, len(bucket)-1)
newBucket = append(newBucket, bucket[:i]...) newBucket = append(newBucket, bucket[:i]...)
newBucket = append(newBucket, bucket[i+1:]...) newBucket = append(newBucket, bucket[i+1:]...)
if len(newBucket) > 0 { if len(newBucket) > 0 {
@ -50,7 +50,7 @@ func (s Set) Remove(val interface{}) {
// Has returns true if the given value is in the receiving set, or false if // Has returns true if the given value is in the receiving set, or false if
// it is not. // it is not.
func (s Set) Has(val interface{}) bool { func (s Set[T]) Has(val T) bool {
hv := s.rules.Hash(val) hv := s.rules.Hash(val)
bucket, ok := s.vals[hv] bucket, ok := s.vals[hv]
if !ok { if !ok {
@ -67,7 +67,7 @@ func (s Set) Has(val interface{}) bool {
// Copy performs a shallow copy of the receiving set, returning a new set // Copy performs a shallow copy of the receiving set, returning a new set
// with the same rules and elements. // with the same rules and elements.
func (s Set) Copy() Set { func (s Set[T]) Copy() Set[T] {
ret := NewSet(s.rules) ret := NewSet(s.rules)
for k, v := range s.vals { for k, v := range s.vals {
ret.vals[k] = v ret.vals[k] = v
@ -92,10 +92,10 @@ func (s Set) Copy() Set {
// //
// Once an iterator has been created for a set, the set *must not* be mutated // Once an iterator has been created for a set, the set *must not* be mutated
// until the iterator is no longer in use. // until the iterator is no longer in use.
func (s Set) Iterator() *Iterator { func (s Set[T]) Iterator() *Iterator[T] {
vals := s.Values() vals := s.Values()
return &Iterator{ return &Iterator[T]{
vals: vals, vals: vals,
idx: -1, idx: -1,
} }
@ -103,7 +103,7 @@ func (s Set) Iterator() *Iterator {
// EachValue calls the given callback once for each value in the set, in an // EachValue calls the given callback once for each value in the set, in an
// undefined order that callers should not depend on. // undefined order that callers should not depend on.
func (s Set) EachValue(cb func(interface{})) { func (s Set[T]) EachValue(cb func(T)) {
it := s.Iterator() it := s.Iterator()
for it.Next() { for it.Next() {
cb(it.Value()) cb(it.Value())
@ -114,8 +114,8 @@ func (s Set) EachValue(cb func(interface{})) {
// an order then the result is in that order. If no order is provided or if // an order then the result is in that order. If no order is provided or if
// it is not a total order then the result order is undefined, but consistent // it is not a total order then the result order is undefined, but consistent
// for a particular set value within a specific release of cty. // for a particular set value within a specific release of cty.
func (s Set) Values() []interface{} { func (s Set[T]) Values() []T {
var ret []interface{} var ret []T
// Sort the bucketIds to ensure that we always traverse in a // Sort the bucketIds to ensure that we always traverse in a
// consistent order. // consistent order.
bucketIDs := make([]int, 0, len(s.vals)) bucketIDs := make([]int, 0, len(s.vals))
@ -128,7 +128,7 @@ func (s Set) Values() []interface{} {
ret = append(ret, s.vals[bucketID]...) ret = append(ret, s.vals[bucketID]...)
} }
if orderRules, ok := s.rules.(OrderedRules); ok { if orderRules, ok := s.rules.(OrderedRules[T]); ok {
sort.SliceStable(ret, func(i, j int) bool { sort.SliceStable(ret, func(i, j int) bool {
return orderRules.Less(ret[i], ret[j]) return orderRules.Less(ret[i], ret[j])
}) })
@ -138,7 +138,7 @@ func (s Set) Values() []interface{} {
} }
// Length returns the number of values in the set. // Length returns the number of values in the set.
func (s Set) Length() int { func (s Set[T]) Length() int {
var count int var count int
for _, bucket := range s.vals { for _, bucket := range s.vals {
count = count + len(bucket) count = count + len(bucket)
@ -149,13 +149,13 @@ func (s Set) Length() int {
// Union returns a new set that contains all of the members of both the // Union returns a new set that contains all of the members of both the
// receiving set and the given set. Both sets must have the same rules, or // receiving set and the given set. Both sets must have the same rules, or
// else this function will panic. // else this function will panic.
func (s1 Set) Union(s2 Set) Set { func (s1 Set[T]) Union(s2 Set[T]) Set[T] {
mustHaveSameRules(s1, s2) mustHaveSameRules(s1, s2)
rs := NewSet(s1.rules) rs := NewSet(s1.rules)
s1.EachValue(func(v interface{}) { s1.EachValue(func(v T) {
rs.Add(v) rs.Add(v)
}) })
s2.EachValue(func(v interface{}) { s2.EachValue(func(v T) {
rs.Add(v) rs.Add(v)
}) })
return rs return rs
@ -164,10 +164,10 @@ func (s1 Set) Union(s2 Set) Set {
// Intersection returns a new set that contains the values that both the // Intersection returns a new set that contains the values that both the
// receiver and given sets have in common. Both sets must have the same rules, // receiver and given sets have in common. Both sets must have the same rules,
// or else this function will panic. // or else this function will panic.
func (s1 Set) Intersection(s2 Set) Set { func (s1 Set[T]) Intersection(s2 Set[T]) Set[T] {
mustHaveSameRules(s1, s2) mustHaveSameRules(s1, s2)
rs := NewSet(s1.rules) rs := NewSet(s1.rules)
s1.EachValue(func(v interface{}) { s1.EachValue(func(v T) {
if s2.Has(v) { if s2.Has(v) {
rs.Add(v) rs.Add(v)
} }
@ -178,10 +178,10 @@ func (s1 Set) Intersection(s2 Set) Set {
// Subtract returns a new set that contains all of the values from the receiver // Subtract returns a new set that contains all of the values from the receiver
// that are not also in the given set. Both sets must have the same rules, // that are not also in the given set. Both sets must have the same rules,
// or else this function will panic. // or else this function will panic.
func (s1 Set) Subtract(s2 Set) Set { func (s1 Set[T]) Subtract(s2 Set[T]) Set[T] {
mustHaveSameRules(s1, s2) mustHaveSameRules(s1, s2)
rs := NewSet(s1.rules) rs := NewSet(s1.rules)
s1.EachValue(func(v interface{}) { s1.EachValue(func(v T) {
if !s2.Has(v) { if !s2.Has(v) {
rs.Add(v) rs.Add(v)
} }
@ -193,15 +193,15 @@ func (s1 Set) Subtract(s2 Set) Set {
// both the receiver and given sets, except those that both sets have in // both the receiver and given sets, except those that both sets have in
// common. Both sets must have the same rules, or else this function will // common. Both sets must have the same rules, or else this function will
// panic. // panic.
func (s1 Set) SymmetricDifference(s2 Set) Set { func (s1 Set[T]) SymmetricDifference(s2 Set[T]) Set[T] {
mustHaveSameRules(s1, s2) mustHaveSameRules(s1, s2)
rs := NewSet(s1.rules) rs := NewSet(s1.rules)
s1.EachValue(func(v interface{}) { s1.EachValue(func(v T) {
if !s2.Has(v) { if !s2.Has(v) {
rs.Add(v) rs.Add(v)
} }
}) })
s2.EachValue(func(v interface{}) { s2.EachValue(func(v T) {
if !s1.Has(v) { if !s1.Has(v) {
rs.Add(v) rs.Add(v)
} }

View File

@ -4,13 +4,13 @@ package set
// //
// Each Set has a Rules instance, whose methods must satisfy the interface // Each Set has a Rules instance, whose methods must satisfy the interface
// contracts given below for any value that will be added to the set. // contracts given below for any value that will be added to the set.
type Rules interface { type Rules[T any] interface {
// Hash returns an int that somewhat-uniquely identifies the given value. // Hash returns an int that somewhat-uniquely identifies the given value.
// //
// A good hash function will minimize collisions for values that will be // A good hash function will minimize collisions for values that will be
// added to the set, though collisions *are* permitted. Collisions will // added to the set, though collisions *are* permitted. Collisions will
// simply reduce the efficiency of operations on the set. // simply reduce the efficiency of operations on the set.
Hash(interface{}) int Hash(T) int
// Equivalent returns true if and only if the two values are considered // Equivalent returns true if and only if the two values are considered
// equivalent for the sake of set membership. Two values that are // equivalent for the sake of set membership. Two values that are
@ -21,11 +21,11 @@ type Rules interface {
// Two values that are equivalent *must* result in the same hash value, // Two values that are equivalent *must* result in the same hash value,
// though it is *not* required that two values with the same hash value // though it is *not* required that two values with the same hash value
// be equivalent. // be equivalent.
Equivalent(interface{}, interface{}) bool Equivalent(T, T) bool
// SameRules returns true if the instance is equivalent to another Rules // SameRules returns true if the instance is equivalent to another Rules
// instance. // instance over the same element type.
SameRules(Rules) bool SameRules(Rules[T]) bool
} }
// OrderedRules is an extension of Rules that can apply a partial order to // OrderedRules is an extension of Rules that can apply a partial order to
@ -37,8 +37,8 @@ type Rules interface {
// is undefined but consistent for a particular version of cty. The exact // is undefined but consistent for a particular version of cty. The exact
// order in that case is not part of the contract and is subject to change // order in that case is not part of the contract and is subject to change
// between versions. // between versions.
type OrderedRules interface { type OrderedRules[T any] interface {
Rules Rules[T]
// Less returns true if and only if the first argument should sort before // Less returns true if and only if the first argument should sort before
// the second argument. If the second argument should sort before the first // the second argument. If the second argument should sort before the first

View File

@ -19,20 +19,20 @@ import (
// Set operations are not optimized to minimize memory pressure. Mutating // Set operations are not optimized to minimize memory pressure. Mutating
// a set will generally create garbage and so should perhaps be avoided in // a set will generally create garbage and so should perhaps be avoided in
// tight loops where memory pressure is a concern. // tight loops where memory pressure is a concern.
type Set struct { type Set[T any] struct {
vals map[int][]interface{} vals map[int][]T
rules Rules rules Rules[T]
} }
// NewSet returns an empty set with the membership rules given. // NewSet returns an empty set with the membership rules given.
func NewSet(rules Rules) Set { func NewSet[T any](rules Rules[T]) Set[T] {
return Set{ return Set[T]{
vals: map[int][]interface{}{}, vals: map[int][]T{},
rules: rules, rules: rules,
} }
} }
func NewSetFromSlice(rules Rules, vals []interface{}) Set { func NewSetFromSlice[T any](rules Rules[T], vals []T) Set[T] {
s := NewSet(rules) s := NewSet(rules)
for _, v := range vals { for _, v := range vals {
s.Add(v) s.Add(v)
@ -40,11 +40,11 @@ func NewSetFromSlice(rules Rules, vals []interface{}) Set {
return s return s
} }
func sameRules(s1 Set, s2 Set) bool { func sameRules[T any](s1 Set[T], s2 Set[T]) bool {
return s1.rules.SameRules(s2.rules) return s1.rules.SameRules(s2.rules)
} }
func mustHaveSameRules(s1 Set, s2 Set) { func mustHaveSameRules[T any](s1 Set[T], s2 Set[T]) {
if !sameRules(s1, s2) { if !sameRules(s1, s2) {
panic(fmt.Errorf("incompatible set rules: %#v, %#v", s1.rules, s2.rules)) panic(fmt.Errorf("incompatible set rules: %#v, %#v", s1.rules, s2.rules))
} }
@ -52,11 +52,11 @@ func mustHaveSameRules(s1 Set, s2 Set) {
// HasRules returns true if and only if the receiving set has the given rules // HasRules returns true if and only if the receiving set has the given rules
// instance as its rules. // instance as its rules.
func (s Set) HasRules(rules Rules) bool { func (s Set[T]) HasRules(rules Rules[T]) bool {
return s.rules.SameRules(rules) return s.rules.SameRules(rules)
} }
// Rules returns the receiving set's rules instance. // Rules returns the receiving set's rules instance.
func (s Set) Rules() Rules { func (s Set[T]) Rules() Rules[T] {
return s.rules return s.rules
} }

View File

@ -21,15 +21,15 @@ type ValueSet struct {
// ValueSet is just a thin wrapper around a set.Set with our value-oriented // ValueSet is just a thin wrapper around a set.Set with our value-oriented
// "rules" applied. We do this so that the caller can work in terms of // "rules" applied. We do this so that the caller can work in terms of
// cty.Value objects even though the set internals use the raw values. // cty.Value objects even though the set internals use the raw values.
s set.Set s set.Set[interface{}]
} }
// NewValueSet creates and returns a new ValueSet with the given element type. // NewValueSet creates and returns a new ValueSet with the given element type.
func NewValueSet(ety Type) ValueSet { func NewValueSet(ety Type) ValueSet {
return newValueSet(set.NewSet(setRules{Type: ety})) return newValueSet(set.NewSet(newSetRules(ety)))
} }
func newValueSet(s set.Set) ValueSet { func newValueSet(s set.Set[interface{}]) ValueSet {
return ValueSet{ return ValueSet{
s: s, s: s,
} }

Some files were not shown because too many files have changed in this diff Show More