From 06f78e0083af6eb06d5c8790dfd5b8e8e116e983 Mon Sep 17 00:00:00 2001 From: Matt Krump Date: Tue, 13 Feb 2018 10:31:57 -0600 Subject: [PATCH] Handle events - Adds interfaces for developers to build handlers that update data in response to log events - Resolves #29 --- .gitignore | 1 - Gopkg.lock | 39 +- Gopkg.toml | 4 - Makefile | 2 + README.md | 10 - cmd/addFilter.go | 77 - cmd/graphql.go | 107 - cmd/sync.go | 8 +- filters/example-filter.json | 20 - integration_test/block_rewards_test.go | 16 +- integration_test/contract_test.go | 50 +- integration_test/geth_blockchain_test.go | 18 +- libraries/shared/HandlerREADME.md | 27 + libraries/shared/handler_interface.go | 23 + .../shared/shared_suite_test.go | 6 +- libraries/shared/watcher.go | 27 + libraries/shared/watcher_test.go | 69 + pkg/config/config.go | 73 - pkg/config/config_test.go | 37 +- pkg/contract_summary/console_presenter.go | 5 +- pkg/contract_summary/summary.go | 10 +- pkg/contract_summary/summary_test.go | 7 +- pkg/core/blockchain.go | 10 +- pkg/core/node_info.go | 4 +- pkg/core/watched_event_log.go | 21 +- .../inmemory/block_repository.go | 4 +- .../inmemory/contract_repository.go | 4 +- .../inmemory/in_memory.go | 0 .../postgres/postgres.go | 12 +- .../postgres/postgres_suite_test.go | 0 .../postgres/postgres_test.go | 58 +- .../repositories}/block_repository.go | 27 +- .../repositories}/block_repository_test.go | 33 +- .../repositories}/contract_repository.go | 11 +- .../repositories}/contract_repository_test.go | 22 +- .../repositories}/log_filter_repository.go | 13 +- .../log_filter_repository_test.go | 36 +- .../postgres/repositories/logs_repository.go} | 7 +- .../repositories/logs_repository_test.go} | 23 +- .../repositories}/receipt_repository.go | 9 +- .../repositories}/receipts_repository_test.go | 30 +- .../repositories/repositories_suite_test.go | 13 + .../watched_events_repository.go} | 9 +- .../watched_events_repository_test.go} | 42 +- pkg/{repositories => datastore}/repository.go | 11 +- pkg/fakes/blockchain.go | 21 +- pkg/filters/filter_query.go | 2 +- pkg/geth/abi.go | 15 +- pkg/geth/abi_test.go | 35 +- pkg/geth/block_to_core_block.go | 11 +- pkg/geth/blockchain.go | 3 +- pkg/geth/contract.go | 20 +- pkg/geth/node/node.go | 8 +- pkg/geth/testing/helpers.go | 10 +- pkg/graphql_server/schema.go | 169 - pkg/graphql_server/schema_test.go | 178 - pkg/history/populate_blocks.go | 6 +- pkg/history/populate_blocks_test.go | 2 +- pkg/history/validate_blocks.go | 6 +- pkg/history/validate_blocks_test.go | 2 +- pkg/repositories/postgres/helpers.go | 22 - test_config/test_config.go | 78 + utils/utils.go | 12 +- vendor/github.com/BurntSushi/toml/.gitignore | 5 - vendor/github.com/BurntSushi/toml/.travis.yml | 15 - vendor/github.com/BurntSushi/toml/COMPATIBLE | 3 - vendor/github.com/BurntSushi/toml/COPYING | 14 - vendor/github.com/BurntSushi/toml/Makefile | 19 - vendor/github.com/BurntSushi/toml/README.md | 218 - .../BurntSushi/toml/_examples/example.go | 61 - .../BurntSushi/toml/_examples/example.toml | 35 - .../BurntSushi/toml/_examples/hard.toml | 22 - .../BurntSushi/toml/_examples/implicit.toml | 4 - .../toml/_examples/invalid-apples.toml | 6 - .../BurntSushi/toml/_examples/invalid.toml | 35 - .../BurntSushi/toml/_examples/readme1.toml | 5 - .../BurntSushi/toml/_examples/readme2.toml | 1 - .../toml/cmd/toml-test-decoder/COPYING | 14 - .../toml/cmd/toml-test-decoder/README.md | 13 - .../toml/cmd/toml-test-decoder/main.go | 90 - .../toml/cmd/toml-test-encoder/COPYING | 14 - .../toml/cmd/toml-test-encoder/README.md | 13 - .../toml/cmd/toml-test-encoder/main.go | 131 - .../BurntSushi/toml/cmd/tomlv/COPYING | 14 - .../BurntSushi/toml/cmd/tomlv/README.md | 21 - .../BurntSushi/toml/cmd/tomlv/main.go | 61 - vendor/github.com/BurntSushi/toml/decode.go | 509 -- .../github.com/BurntSushi/toml/decode_meta.go | 121 - .../github.com/BurntSushi/toml/decode_test.go | 1447 ----- vendor/github.com/BurntSushi/toml/doc.go | 27 - vendor/github.com/BurntSushi/toml/encode.go | 568 -- .../github.com/BurntSushi/toml/encode_test.go | 615 -- .../BurntSushi/toml/encoding_types.go | 19 - .../BurntSushi/toml/encoding_types_1.1.go | 18 - vendor/github.com/BurntSushi/toml/lex.go | 953 ---- vendor/github.com/BurntSushi/toml/parse.go | 592 -- vendor/github.com/BurntSushi/toml/session.vim | 1 - .../github.com/BurntSushi/toml/type_check.go | 91 - .../github.com/BurntSushi/toml/type_fields.go | 242 - .../github.com/neelance/graphql-go/.gitignore | 1 - vendor/github.com/neelance/graphql-go/LICENSE | 24 - .../github.com/neelance/graphql-go/README.md | 57 - .../neelance/graphql-go/errors/errors.go | 41 - .../example/starwars/server/server.go | 64 - .../graphql-go/example/starwars/starwars.go | 647 --- .../neelance/graphql-go/gqltesting/testing.go | 67 - .../github.com/neelance/graphql-go/graphql.go | 185 - .../neelance/graphql-go/graphql_test.go | 1755 ------ vendor/github.com/neelance/graphql-go/id.go | 30 - .../graphql-go/internal/common/directive.go | 32 - .../graphql-go/internal/common/lexer.go | 122 - .../graphql-go/internal/common/literals.go | 206 - .../graphql-go/internal/common/types.go | 80 - .../graphql-go/internal/common/values.go | 77 - .../neelance/graphql-go/internal/exec/exec.go | 313 -- .../graphql-go/internal/exec/packer/packer.go | 367 -- .../internal/exec/resolvable/meta.go | 58 - .../internal/exec/resolvable/resolvable.go | 331 -- .../internal/exec/selected/selected.go | 238 - .../graphql-go/internal/query/query.go | 240 - .../graphql-go/internal/schema/meta.go | 190 - .../graphql-go/internal/schema/schema.go | 462 -- .../graphql-go/internal/tests/all_test.go | 75 - .../graphql-go/internal/tests/empty.go | 1 - .../internal/tests/testdata/LICENSE | 33 - .../internal/tests/testdata/export.js | 110 - .../graphql-go/internal/tests/testdata/gen.go | 4 - .../internal/tests/testdata/tests.json | 4948 ----------------- .../internal/validation/suggestion.go | 71 - .../internal/validation/validation.go | 860 --- .../neelance/graphql-go/introspection.go | 117 - .../graphql-go/introspection/introspection.go | 313 -- .../github.com/neelance/graphql-go/log/log.go | 23 - .../neelance/graphql-go/relay/relay.go | 70 - .../neelance/graphql-go/relay/relay_test.go | 36 - vendor/github.com/neelance/graphql-go/time.go | 36 - .../neelance/graphql-go/trace/trace.go | 80 - .../opentracing/opentracing-go/.gitignore | 13 - .../opentracing/opentracing-go/.travis.yml | 14 - .../opentracing/opentracing-go/CHANGELOG.md | 14 - .../opentracing/opentracing-go/LICENSE | 21 - .../opentracing/opentracing-go/Makefile | 32 - .../opentracing/opentracing-go/README.md | 147 - .../opentracing/opentracing-go/ext/tags.go | 198 - .../opentracing-go/ext/tags_test.go | 148 - .../opentracing-go/globaltracer.go | 32 - .../opentracing/opentracing-go/gocontext.go | 57 - .../opentracing-go/gocontext_test.go | 81 - .../opentracing/opentracing-go/log/field.go | 245 - .../opentracing-go/log/field_test.go | 39 - .../opentracing/opentracing-go/log/util.go | 54 - .../mocktracer/mocklogrecord.go | 105 - .../opentracing-go/mocktracer/mockspan.go | 282 - .../opentracing-go/mocktracer/mocktracer.go | 105 - .../mocktracer/mocktracer_test.go | 268 - .../opentracing-go/mocktracer/propagation.go | 120 - .../opentracing/opentracing-go/noop.go | 64 - .../opentracing-go/options_test.go | 31 - .../opentracing/opentracing-go/propagation.go | 176 - .../opentracing-go/propagation_test.go | 93 - .../opentracing/opentracing-go/span.go | 185 - .../opentracing-go/testtracer_test.go | 138 - .../opentracing/opentracing-go/tracer.go | 305 - 163 files changed, 586 insertions(+), 22397 deletions(-) delete mode 100644 cmd/addFilter.go delete mode 100644 cmd/graphql.go delete mode 100644 filters/example-filter.json create mode 100644 libraries/shared/HandlerREADME.md create mode 100644 libraries/shared/handler_interface.go rename pkg/graphql_server/graphql_server_suite_test.go => libraries/shared/shared_suite_test.go (51%) create mode 100644 libraries/shared/watcher.go create mode 100644 libraries/shared/watcher_test.go rename pkg/{repositories => datastore}/inmemory/block_repository.go (92%) rename pkg/{repositories => datastore}/inmemory/contract_repository.go (87%) rename pkg/{repositories => datastore}/inmemory/in_memory.go (100%) rename pkg/{repositories => datastore}/postgres/postgres.go (88%) rename pkg/{repositories => datastore}/postgres/postgres_suite_test.go (100%) rename pkg/{repositories => datastore}/postgres/postgres_test.go (68%) rename pkg/{repositories/postgres => datastore/postgres/repositories}/block_repository.go (93%) rename pkg/{repositories/postgres => datastore/postgres/repositories}/block_repository_test.go (92%) rename pkg/{repositories/postgres => datastore/postgres/repositories}/contract_repository.go (89%) rename pkg/{repositories/postgres => datastore/postgres/repositories}/contract_repository_test.go (82%) rename pkg/{repositories/postgres => datastore/postgres/repositories}/log_filter_repository.go (87%) rename pkg/{repositories/postgres => datastore/postgres/repositories}/log_filter_repository_test.go (71%) rename pkg/{repositories/postgres/logs.go => datastore/postgres/repositories/logs_repository.go} (92%) rename pkg/{repositories/postgres/logs_test.go => datastore/postgres/repositories/logs_repository_test.go} (91%) rename pkg/{repositories/postgres => datastore/postgres/repositories}/receipt_repository.go (85%) rename pkg/{repositories/postgres => datastore/postgres/repositories}/receipts_repository_test.go (75%) create mode 100644 pkg/datastore/postgres/repositories/repositories_suite_test.go rename pkg/{repositories/postgres/watched_events.go => datastore/postgres/repositories/watched_events_repository.go} (61%) rename pkg/{repositories/postgres/watched_events_test.go => datastore/postgres/repositories/watched_events_repository_test.go} (54%) rename pkg/{repositories => datastore}/repository.go (79%) delete mode 100644 pkg/graphql_server/schema.go delete mode 100644 pkg/graphql_server/schema_test.go delete mode 100644 pkg/repositories/postgres/helpers.go create mode 100644 test_config/test_config.go delete mode 100644 vendor/github.com/BurntSushi/toml/.gitignore delete mode 100644 vendor/github.com/BurntSushi/toml/.travis.yml delete mode 100644 vendor/github.com/BurntSushi/toml/COMPATIBLE delete mode 100644 vendor/github.com/BurntSushi/toml/COPYING delete mode 100644 vendor/github.com/BurntSushi/toml/Makefile delete mode 100644 vendor/github.com/BurntSushi/toml/README.md delete mode 100644 vendor/github.com/BurntSushi/toml/_examples/example.go delete mode 100644 vendor/github.com/BurntSushi/toml/_examples/example.toml delete mode 100644 vendor/github.com/BurntSushi/toml/_examples/hard.toml delete mode 100644 vendor/github.com/BurntSushi/toml/_examples/implicit.toml delete mode 100644 vendor/github.com/BurntSushi/toml/_examples/invalid-apples.toml delete mode 100644 vendor/github.com/BurntSushi/toml/_examples/invalid.toml delete mode 100644 vendor/github.com/BurntSushi/toml/_examples/readme1.toml delete mode 100644 vendor/github.com/BurntSushi/toml/_examples/readme2.toml delete mode 100644 vendor/github.com/BurntSushi/toml/cmd/toml-test-decoder/COPYING delete mode 100644 vendor/github.com/BurntSushi/toml/cmd/toml-test-decoder/README.md delete mode 100644 vendor/github.com/BurntSushi/toml/cmd/toml-test-decoder/main.go delete mode 100644 vendor/github.com/BurntSushi/toml/cmd/toml-test-encoder/COPYING delete mode 100644 vendor/github.com/BurntSushi/toml/cmd/toml-test-encoder/README.md delete mode 100644 vendor/github.com/BurntSushi/toml/cmd/toml-test-encoder/main.go delete mode 100644 vendor/github.com/BurntSushi/toml/cmd/tomlv/COPYING delete mode 100644 vendor/github.com/BurntSushi/toml/cmd/tomlv/README.md delete mode 100644 vendor/github.com/BurntSushi/toml/cmd/tomlv/main.go delete mode 100644 vendor/github.com/BurntSushi/toml/decode.go delete mode 100644 vendor/github.com/BurntSushi/toml/decode_meta.go delete mode 100644 vendor/github.com/BurntSushi/toml/decode_test.go delete mode 100644 vendor/github.com/BurntSushi/toml/doc.go delete mode 100644 vendor/github.com/BurntSushi/toml/encode.go delete mode 100644 vendor/github.com/BurntSushi/toml/encode_test.go delete mode 100644 vendor/github.com/BurntSushi/toml/encoding_types.go delete mode 100644 vendor/github.com/BurntSushi/toml/encoding_types_1.1.go delete mode 100644 vendor/github.com/BurntSushi/toml/lex.go delete mode 100644 vendor/github.com/BurntSushi/toml/parse.go delete mode 100644 vendor/github.com/BurntSushi/toml/session.vim delete mode 100644 vendor/github.com/BurntSushi/toml/type_check.go delete mode 100644 vendor/github.com/BurntSushi/toml/type_fields.go delete mode 100644 vendor/github.com/neelance/graphql-go/.gitignore delete mode 100644 vendor/github.com/neelance/graphql-go/LICENSE delete mode 100644 vendor/github.com/neelance/graphql-go/README.md delete mode 100644 vendor/github.com/neelance/graphql-go/errors/errors.go delete mode 100644 vendor/github.com/neelance/graphql-go/example/starwars/server/server.go delete mode 100644 vendor/github.com/neelance/graphql-go/example/starwars/starwars.go delete mode 100644 vendor/github.com/neelance/graphql-go/gqltesting/testing.go delete mode 100644 vendor/github.com/neelance/graphql-go/graphql.go delete mode 100644 vendor/github.com/neelance/graphql-go/graphql_test.go delete mode 100644 vendor/github.com/neelance/graphql-go/id.go delete mode 100644 vendor/github.com/neelance/graphql-go/internal/common/directive.go delete mode 100644 vendor/github.com/neelance/graphql-go/internal/common/lexer.go delete mode 100644 vendor/github.com/neelance/graphql-go/internal/common/literals.go delete mode 100644 vendor/github.com/neelance/graphql-go/internal/common/types.go delete mode 100644 vendor/github.com/neelance/graphql-go/internal/common/values.go delete mode 100644 vendor/github.com/neelance/graphql-go/internal/exec/exec.go delete mode 100644 vendor/github.com/neelance/graphql-go/internal/exec/packer/packer.go delete mode 100644 vendor/github.com/neelance/graphql-go/internal/exec/resolvable/meta.go delete mode 100644 vendor/github.com/neelance/graphql-go/internal/exec/resolvable/resolvable.go delete mode 100644 vendor/github.com/neelance/graphql-go/internal/exec/selected/selected.go delete mode 100644 vendor/github.com/neelance/graphql-go/internal/query/query.go delete mode 100644 vendor/github.com/neelance/graphql-go/internal/schema/meta.go delete mode 100644 vendor/github.com/neelance/graphql-go/internal/schema/schema.go delete mode 100644 vendor/github.com/neelance/graphql-go/internal/tests/all_test.go delete mode 100644 vendor/github.com/neelance/graphql-go/internal/tests/empty.go delete mode 100644 vendor/github.com/neelance/graphql-go/internal/tests/testdata/LICENSE delete mode 100644 vendor/github.com/neelance/graphql-go/internal/tests/testdata/export.js delete mode 100644 vendor/github.com/neelance/graphql-go/internal/tests/testdata/gen.go delete mode 100644 vendor/github.com/neelance/graphql-go/internal/tests/testdata/tests.json delete mode 100644 vendor/github.com/neelance/graphql-go/internal/validation/suggestion.go delete mode 100644 vendor/github.com/neelance/graphql-go/internal/validation/validation.go delete mode 100644 vendor/github.com/neelance/graphql-go/introspection.go delete mode 100644 vendor/github.com/neelance/graphql-go/introspection/introspection.go delete mode 100644 vendor/github.com/neelance/graphql-go/log/log.go delete mode 100644 vendor/github.com/neelance/graphql-go/relay/relay.go delete mode 100644 vendor/github.com/neelance/graphql-go/relay/relay_test.go delete mode 100644 vendor/github.com/neelance/graphql-go/time.go delete mode 100644 vendor/github.com/neelance/graphql-go/trace/trace.go delete mode 100644 vendor/github.com/opentracing/opentracing-go/.gitignore delete mode 100644 vendor/github.com/opentracing/opentracing-go/.travis.yml delete mode 100644 vendor/github.com/opentracing/opentracing-go/CHANGELOG.md delete mode 100644 vendor/github.com/opentracing/opentracing-go/LICENSE delete mode 100644 vendor/github.com/opentracing/opentracing-go/Makefile delete mode 100644 vendor/github.com/opentracing/opentracing-go/README.md delete mode 100644 vendor/github.com/opentracing/opentracing-go/ext/tags.go delete mode 100644 vendor/github.com/opentracing/opentracing-go/ext/tags_test.go delete mode 100644 vendor/github.com/opentracing/opentracing-go/globaltracer.go delete mode 100644 vendor/github.com/opentracing/opentracing-go/gocontext.go delete mode 100644 vendor/github.com/opentracing/opentracing-go/gocontext_test.go delete mode 100644 vendor/github.com/opentracing/opentracing-go/log/field.go delete mode 100644 vendor/github.com/opentracing/opentracing-go/log/field_test.go delete mode 100644 vendor/github.com/opentracing/opentracing-go/log/util.go delete mode 100644 vendor/github.com/opentracing/opentracing-go/mocktracer/mocklogrecord.go delete mode 100644 vendor/github.com/opentracing/opentracing-go/mocktracer/mockspan.go delete mode 100644 vendor/github.com/opentracing/opentracing-go/mocktracer/mocktracer.go delete mode 100644 vendor/github.com/opentracing/opentracing-go/mocktracer/mocktracer_test.go delete mode 100644 vendor/github.com/opentracing/opentracing-go/mocktracer/propagation.go delete mode 100644 vendor/github.com/opentracing/opentracing-go/noop.go delete mode 100644 vendor/github.com/opentracing/opentracing-go/options_test.go delete mode 100644 vendor/github.com/opentracing/opentracing-go/propagation.go delete mode 100644 vendor/github.com/opentracing/opentracing-go/propagation_test.go delete mode 100644 vendor/github.com/opentracing/opentracing-go/span.go delete mode 100644 vendor/github.com/opentracing/opentracing-go/testtracer_test.go delete mode 100644 vendor/github.com/opentracing/opentracing-go/tracer.go diff --git a/.gitignore b/.gitignore index 123e1e33..f5d38a88 100644 --- a/.gitignore +++ b/.gitignore @@ -1,5 +1,4 @@ .idea -Gododir/godobin-* test_data_dir/ contracts/* environments/*.toml diff --git a/Gopkg.lock b/Gopkg.lock index 6166f00b..5ad9d186 100644 --- a/Gopkg.lock +++ b/Gopkg.lock @@ -1,12 +1,6 @@ # This file is autogenerated, do not edit; changes may be undone by the next 'dep ensure'. -[[projects]] - name = "github.com/BurntSushi/toml" - packages = ["."] - revision = "b26d9c308763d68093482582cea63d69be07a0f0" - version = "v0.3.0" - [[projects]] branch = "master" name = "github.com/aristanetworks/goarista" @@ -152,27 +146,6 @@ packages = ["."] revision = "b4575eea38cca1123ec2dc90c26529b5c5acfcff" -[[projects]] - branch = "master" - name = "github.com/neelance/graphql-go" - packages = [ - ".", - "errors", - "internal/common", - "internal/exec", - "internal/exec/packer", - "internal/exec/resolvable", - "internal/exec/selected", - "internal/query", - "internal/schema", - "internal/validation", - "introspection", - "log", - "relay", - "trace" - ] - revision = "b46637030579abd312c5eea21d36845b6e9e7ca4" - [[projects]] name = "github.com/onsi/ginkgo" packages = [ @@ -218,16 +191,6 @@ revision = "c893efa28eb45626cdaa76c9f653b62488858837" version = "v1.2.0" -[[projects]] - name = "github.com/opentracing/opentracing-go" - packages = [ - ".", - "ext", - "log" - ] - revision = "1949ddbfd147afd4d964a9f00b24eb291e0e7c38" - version = "v1.0.2" - [[projects]] name = "github.com/pelletier/go-toml" packages = ["."] @@ -379,6 +342,6 @@ [solve-meta] analyzer-name = "dep" analyzer-version = 1 - inputs-digest = "4d1806bf6a5678261abb604455d0076906ca5aba42e8625ee09cae019fb21d40" + inputs-digest = "61dea45437b8efb926e1f7446fef75449f89530b201c2ab1893469d13a37b0c9" solver-name = "gps-cdcl" solver-version = 1 diff --git a/Gopkg.toml b/Gopkg.toml index 166cb260..c093ac0c 100644 --- a/Gopkg.toml +++ b/Gopkg.toml @@ -40,7 +40,3 @@ [[constraint]] name = "github.com/ethereum/go-ethereum" version = "1.7.3" - -[[constraint]] - branch = "master" - name = "github.com/neelance/graphql-go" diff --git a/Makefile b/Makefile index 776e49e0..2069eb75 100644 --- a/Makefile +++ b/Makefile @@ -65,6 +65,8 @@ checkdbvars: test -n "$(HOST_NAME)" # $$HOST_NAME test -n "$(PORT)" # $$PORT test -n "$(NAME)" # $$NAME + @echo $(CONNECT_STRING) + .PHONY: rollback rollback: $(MIGRATE) checkdbvars diff --git a/README.md b/README.md index 42b9df21..20b8cc90 100644 --- a/README.md +++ b/README.md @@ -39,16 +39,6 @@ The default location for Ethereum is: * see `./environments` for example config -## Watch specific events -1. Start geth -2. In a separate terminal start vulcanize_db - - `vulcanizedb sync --config --starting-block-number ` -3. Create event filter - - `vulcanizedb addFilter --config --filter-filepath ` - * see `./filters` for example filter -4. The filters are tracked in the `log_filters` table and the filtered events -will show up in the `watched_log_events` view - ## Running the Tests ### Unit Tests diff --git a/cmd/addFilter.go b/cmd/addFilter.go deleted file mode 100644 index f4b6e367..00000000 --- a/cmd/addFilter.go +++ /dev/null @@ -1,77 +0,0 @@ -package cmd - -import ( - "encoding/json" - "io/ioutil" - "log" - - "github.com/spf13/cobra" - "github.com/vulcanize/vulcanizedb/pkg/filters" - "github.com/vulcanize/vulcanizedb/pkg/geth" - "github.com/vulcanize/vulcanizedb/pkg/repositories/postgres" - "github.com/vulcanize/vulcanizedb/utils" -) - -// addFilterCmd represents the addFilter command -var addFilterCmd = &cobra.Command{ - Use: "addFilter", - Short: "Adds event filter to vulcanizedb", - Long: `An event filter is added to the vulcanize_db. -All events matching the filter conitions will be tracked -in vulcanizedb. - -vulcanizedb addFilter --config config.toml --filter-filepath filter.json - -The event filters are expected to match -the format described in the ethereum RPC wiki: - -https://github.com/ethereum/wiki/wiki/JSON-RPC#eth_newfilter - -[{ - "fromBlock": "0x1", - "toBlock": "0x2", - "address": "0x8888f1f195afa192cfee860698584c030f4c9db1", - "topics": ["0x000000000000000000000000a94f5374fce5edbc8e2a8697c15331677e6ebf0b", - null, - "0x000000000000000000000000a94f5374fce5edbc8e2a8697c15331677e6ebf0b", - "0x0000000000000000000000000aff3454fce5edbc8cca8697c15331677e6ebccc"] -}] -`, - Run: func(cmd *cobra.Command, args []string) { - addFilter() - }, -} - -var filterFilepath string - -func init() { - rootCmd.AddCommand(addFilterCmd) - - addFilterCmd.PersistentFlags().StringVar(&filterFilepath, "filter-filepath", "", "path/to/filter.json") - addFilterCmd.MarkFlagRequired("filter-filepath") -} - -func addFilter() { - if filterFilepath == "" { - log.Fatal("filter-filepath required") - } - var logFilters filters.LogFilters - blockchain := geth.NewBlockchain(ipc) - db := utils.LoadPostgres(databaseConfig, blockchain.Node()) - filterRepository := postgres.FilterRepository{DB: &db} - absFilePath := utils.AbsFilePath(filterFilepath) - logFilterBytes, err := ioutil.ReadFile(absFilePath) - if err != nil { - log.Fatal(err) - } - err = json.Unmarshal(logFilterBytes, &logFilters) - if err != nil { - log.Fatal(err) - } - for _, filter := range logFilters { - err = filterRepository.CreateFilter(filter) - if err != nil { - log.Fatal(err) - } - } -} diff --git a/cmd/graphql.go b/cmd/graphql.go deleted file mode 100644 index 20213127..00000000 --- a/cmd/graphql.go +++ /dev/null @@ -1,107 +0,0 @@ -package cmd - -import ( - "net/http" - _ "net/http/pprof" - - "log" - - "github.com/neelance/graphql-go" - "github.com/neelance/graphql-go/relay" - "github.com/spf13/cobra" - "github.com/vulcanize/vulcanizedb/pkg/geth" - "github.com/vulcanize/vulcanizedb/pkg/graphql_server" - "github.com/vulcanize/vulcanizedb/pkg/repositories/postgres" - "github.com/vulcanize/vulcanizedb/utils" -) - -var graphqlCmd = &cobra.Command{ - Use: "graphql", - Short: "Starts Vulcanize graphql server", - Long: `Starts vulcanize graphql server -and usage of using your command. For example: - -vulcanizedb graphql --port 9090 --host localhost -`, - Run: func(cmd *cobra.Command, args []string) { - schema := parseSchema() - serve(schema) - }, -} - -func init() { - var ( - port int - host string - ) - rootCmd.AddCommand(graphqlCmd) - - syncCmd.Flags().IntVar(&port, "port", 9090, "graphql: port") - syncCmd.Flags().StringVar(&host, "host", "localhost", "graphql: host") - -} - -func parseSchema() *graphql.Schema { - - blockchain := geth.NewBlockchain(ipc) - db := utils.LoadPostgres(databaseConfig, blockchain.Node()) - blockRepository := &postgres.BlockRepository{DB: &db} - logRepository := &postgres.LogRepository{DB: &db} - filterRepository := &postgres.FilterRepository{DB: &db} - watchedEventRepository := &postgres.WatchedEventRepository{DB: &db} - graphQLRepositories := graphql_server.GraphQLRepositories{ - WatchedEventRepository: watchedEventRepository, - BlockRepository: blockRepository, - LogRepository: logRepository, - FilterRepository: filterRepository, - } - schema := graphql.MustParseSchema(graphql_server.Schema, graphql_server.NewResolver(graphQLRepositories)) - return schema - -} - -func serve(schema *graphql.Schema) { - http.Handle("/", http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - w.Write(page) - })) - http.Handle("/query", &relay.Handler{Schema: schema}) - - log.Fatal(http.ListenAndServe(":9090", nil)) -} - -var page = []byte(` - - - - - - - - - - -
Loading...
- - - -`) diff --git a/cmd/sync.go b/cmd/sync.go index 84742f86..b5dd92c3 100644 --- a/cmd/sync.go +++ b/cmd/sync.go @@ -9,10 +9,10 @@ import ( "github.com/spf13/cobra" "github.com/vulcanize/vulcanizedb/pkg/core" + "github.com/vulcanize/vulcanizedb/pkg/datastore" + "github.com/vulcanize/vulcanizedb/pkg/datastore/postgres/repositories" "github.com/vulcanize/vulcanizedb/pkg/geth" "github.com/vulcanize/vulcanizedb/pkg/history" - "github.com/vulcanize/vulcanizedb/pkg/repositories" - "github.com/vulcanize/vulcanizedb/pkg/repositories/postgres" "github.com/vulcanize/vulcanizedb/utils" ) @@ -50,7 +50,7 @@ func init() { syncCmd.Flags().IntVarP(&startingBlockNumber, "starting-block-number", "s", 0, "Block number to start syncing from") } -func backFillAllBlocks(blockchain core.Blockchain, blockRepository repositories.BlockRepository, missingBlocksPopulated chan int, startingBlockNumber int64) { +func backFillAllBlocks(blockchain core.Blockchain, blockRepository datastore.BlockRepository, missingBlocksPopulated chan int, startingBlockNumber int64) { go func() { missingBlocksPopulated <- history.PopulateMissingBlocks(blockchain, blockRepository, startingBlockNumber) }() @@ -65,7 +65,7 @@ func sync() { log.Fatal("geth initial: state sync not finished") } db := utils.LoadPostgres(databaseConfig, blockchain.Node()) - blockRepository := postgres.BlockRepository{DB: &db} + blockRepository := repositories.BlockRepository{DB: &db} validator := history.NewBlockValidator(blockchain, blockRepository, 15) missingBlocksPopulated := make(chan int) diff --git a/filters/example-filter.json b/filters/example-filter.json deleted file mode 100644 index 1ac00c7e..00000000 --- a/filters/example-filter.json +++ /dev/null @@ -1,20 +0,0 @@ -[ - { - "name": "TransferFilter", - "fromBlock": "0x488290", - "toBlock": "0x488678", - "address": "0x06012c8cf97bead5deae237070f9587f8e7a266d", - "topics": [ - "0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef" - ] - }, - { - "name": "NewFilter", - "fromBlock": "0x4B34AD", - "address": "0x06012c8cf97bead5deae237070f9587f8e7a266d", - "topics": [ - "0x241ea03ca20251805084d27d4440371c34a0b85ff108f6bb5611248f73818b80" - ] - } -] - diff --git a/integration_test/block_rewards_test.go b/integration_test/block_rewards_test.go index 1fc9c206..02ca5e72 100644 --- a/integration_test/block_rewards_test.go +++ b/integration_test/block_rewards_test.go @@ -1,32 +1,22 @@ package integration import ( - "log" - . "github.com/onsi/ginkgo" . "github.com/onsi/gomega" - cfg "github.com/vulcanize/vulcanizedb/pkg/config" "github.com/vulcanize/vulcanizedb/pkg/geth" + "github.com/vulcanize/vulcanizedb/test_config" ) var _ = Describe("Rewards calculations", func() { It("calculates a block reward for a real block", func() { - config, err := cfg.NewConfig("infura") - if err != nil { - log.Fatalln(err) - } - blockchain := geth.NewBlockchain(config.Client.IPCPath) + blockchain := geth.NewBlockchain(test_config.InfuraClient.IPCPath) block := blockchain.GetBlockByNumber(1071819) Expect(block.Reward).To(Equal(5.31355)) }) It("calculates an uncle reward for a real block", func() { - config, err := cfg.NewConfig("infura") - if err != nil { - log.Fatalln(err) - } - blockchain := geth.NewBlockchain(config.Client.IPCPath) + blockchain := geth.NewBlockchain(test_config.InfuraClient.IPCPath) block := blockchain.GetBlockByNumber(1071819) Expect(block.UnclesReward).To(Equal(6.875)) }) diff --git a/integration_test/contract_test.go b/integration_test/contract_test.go index c710a7be..f5522569 100644 --- a/integration_test/contract_test.go +++ b/integration_test/contract_test.go @@ -3,25 +3,20 @@ package integration import ( "math/big" - "log" - + "github.com/ethereum/go-ethereum/common" . "github.com/onsi/ginkgo" . "github.com/onsi/gomega" - cfg "github.com/vulcanize/vulcanizedb/pkg/config" "github.com/vulcanize/vulcanizedb/pkg/core" "github.com/vulcanize/vulcanizedb/pkg/geth" "github.com/vulcanize/vulcanizedb/pkg/geth/testing" + "github.com/vulcanize/vulcanizedb/test_config" ) var _ = Describe("Reading contracts", func() { Describe("Reading the list of attributes", func() { It("returns a string attribute for a real contract", func() { - config, err := cfg.NewConfig("infura") - if err != nil { - log.Fatalln(err) - } - blockchain := geth.NewBlockchain(config.Client.IPCPath) + blockchain := geth.NewBlockchain(test_config.InfuraClient.IPCPath) contract := testing.SampleContract() contractAttributes, err := blockchain.GetAttributes(contract) @@ -34,8 +29,7 @@ var _ = Describe("Reading contracts", func() { }) It("does not return an attribute that takes an input", func() { - config, err := cfg.NewConfig("infura") - blockchain := geth.NewBlockchain(config.Client.IPCPath) + blockchain := geth.NewBlockchain(test_config.InfuraClient.IPCPath) contract := testing.SampleContract() contractAttributes, err := blockchain.GetAttributes(contract) @@ -46,8 +40,7 @@ var _ = Describe("Reading contracts", func() { }) It("does not return an attribute that is not constant", func() { - config, _ := cfg.NewConfig("infura") - blockchain := geth.NewBlockchain(config.Client.IPCPath) + blockchain := geth.NewBlockchain(test_config.InfuraClient.IPCPath) contract := testing.SampleContract() contractAttributes, err := blockchain.GetAttributes(contract) @@ -60,8 +53,7 @@ var _ = Describe("Reading contracts", func() { Describe("Getting a contract attribute", func() { It("returns the correct attribute for a real contract", func() { - config, _ := cfg.NewConfig("infura") - blockchain := geth.NewBlockchain(config.Client.IPCPath) + blockchain := geth.NewBlockchain(test_config.InfuraClient.IPCPath) contract := testing.SampleContract() name, err := blockchain.GetAttribute(contract, "name", nil) @@ -71,8 +63,7 @@ var _ = Describe("Reading contracts", func() { }) It("returns the correct attribute for a real contract", func() { - config, _ := cfg.NewConfig("infura") - blockchain := geth.NewBlockchain(config.Client.IPCPath) + blockchain := geth.NewBlockchain(test_config.InfuraClient.IPCPath) contract := testing.SampleContract() name, err := blockchain.GetAttribute(contract, "name", nil) @@ -82,8 +73,7 @@ var _ = Describe("Reading contracts", func() { }) It("returns the correct attribute for a real contract at a specific block height", func() { - config, _ := cfg.NewConfig("infura") - blockchain := geth.NewBlockchain(config.Client.IPCPath) + blockchain := geth.NewBlockchain(test_config.InfuraClient.IPCPath) contract := testing.SampleContract() name, err := blockchain.GetAttribute(contract, "name", big.NewInt(4701536)) @@ -93,8 +83,7 @@ var _ = Describe("Reading contracts", func() { }) It("returns an error when asking for an attribute that does not exist", func() { - config, _ := cfg.NewConfig("infura") - blockchain := geth.NewBlockchain(config.Client.IPCPath) + blockchain := geth.NewBlockchain(test_config.InfuraClient.IPCPath) contract := testing.SampleContract() name, err := blockchain.GetAttribute(contract, "missing_attribute", nil) @@ -115,8 +104,7 @@ var _ = Describe("Reading contracts", func() { }, Index: 19, Data: "0x0000000000000000000000000000000000000000000000000c7d713b49da0000"} - config, _ := cfg.NewConfig("infura") - blockchain := geth.NewBlockchain(config.Client.IPCPath) + blockchain := geth.NewBlockchain(test_config.InfuraClient.IPCPath) contract := testing.SampleContract() logs, err := blockchain.GetLogs(contract, big.NewInt(4703824), nil) @@ -128,8 +116,7 @@ var _ = Describe("Reading contracts", func() { }) It("returns and empty log array when no events for a given block / contract combo", func() { - config, _ := cfg.NewConfig("infura") - blockchain := geth.NewBlockchain(config.Client.IPCPath) + blockchain := geth.NewBlockchain(test_config.InfuraClient.IPCPath) logs, err := blockchain.GetLogs(core.Contract{Hash: "x123"}, big.NewInt(4703824), nil) @@ -140,4 +127,19 @@ var _ = Describe("Reading contracts", func() { }) + Describe("Fetching Contract data", func() { + It("returns the correct attribute for a real contract", func() { + blockchain := geth.NewBlockchain(test_config.InfuraClient.IPCPath) + + contract := testing.SampleContract() + var balance = new(big.Int) + args := common.HexToHash("0xd26114cd6ee289accf82350c8d8487fedb8a0c07") + err := blockchain.FetchContractData(contract.Abi, "0xd26114cd6ee289accf82350c8d8487fedb8a0c07", "balanceOf", args, &balance, 5167471) + Expect(err).NotTo(HaveOccurred()) + expected := new(big.Int) + expected.SetString("10897295492887612977137", 10) + Expect(balance).To(Equal(expected)) + }) + }) + }) diff --git a/integration_test/geth_blockchain_test.go b/integration_test/geth_blockchain_test.go index 515a37a7..9f0cb414 100644 --- a/integration_test/geth_blockchain_test.go +++ b/integration_test/geth_blockchain_test.go @@ -1,29 +1,21 @@ package integration_test import ( - "io/ioutil" - "log" - . "github.com/onsi/ginkgo" . "github.com/onsi/gomega" - "github.com/vulcanize/vulcanizedb/pkg/config" + "github.com/vulcanize/vulcanizedb/pkg/datastore/inmemory" "github.com/vulcanize/vulcanizedb/pkg/geth" "github.com/vulcanize/vulcanizedb/pkg/history" - "github.com/vulcanize/vulcanizedb/pkg/repositories/inmemory" + "github.com/vulcanize/vulcanizedb/test_config" ) -func init() { - log.SetOutput(ioutil.Discard) -} - var _ = Describe("Reading from the Geth blockchain", func() { var blockchain *geth.Blockchain var inMemory *inmemory.InMemory BeforeEach(func() { - cfg, _ := config.NewConfig("private") - blockchain = geth.NewBlockchain(cfg.Client.IPCPath) + blockchain = geth.NewBlockchain(test_config.TestClientConfig.IPCPath) inMemory = inmemory.NewInMemory() }) @@ -52,8 +44,8 @@ var _ = Describe("Reading from the Geth blockchain", func() { devNetworkNodeId := float64(1) Expect(node.GenesisBlock).To(Equal(devNetworkGenesisBlock)) - Expect(node.NetworkId).To(Equal(devNetworkNodeId)) - Expect(len(node.Id)).To(Equal(128)) + Expect(node.NetworkID).To(Equal(devNetworkNodeId)) + Expect(len(node.ID)).To(Equal(128)) Expect(node.ClientName).To(ContainSubstring("Geth")) close(done) diff --git a/libraries/shared/HandlerREADME.md b/libraries/shared/HandlerREADME.md new file mode 100644 index 00000000..878a4091 --- /dev/null +++ b/libraries/shared/HandlerREADME.md @@ -0,0 +1,27 @@ +# Handlers + +## Description +Handlers must be defined in order to define what events should trigger data updates and how those are performed. + +## Interface + +### Initializer +Accepts DB and Blockchain from Vulcanize and returns a new handler. E.g. for a new object "Cup": +`func NewCupHandler(db *postgres.DB, blockchain core.ContractDataFetcher) handlers.Handler` + +### Execute +Triggers operations to take in response to a given log event. +Can persist data from logs, fetch and persist arbitrary data from outside services (e.g. contract state), or take any number of other actions. E.g.: +`func (cupHandler *CupHandler) Execute() error` + +## Additional Requirements +Handlers must define log filters and create them so that relevant watched events can be identified and retrieved. E.g.: +```$xslt + { + Name: "CupsBite", + FromBlock: 0, + ToBlock: -1, + Address: "0x448a5065aebb8e423f0896e6c5d525c040f59af3", + Topics: core.Topics{"0x40cc885400000000000000000000000000000000000000000000000000000000"}, + }, +``` \ No newline at end of file diff --git a/libraries/shared/handler_interface.go b/libraries/shared/handler_interface.go new file mode 100644 index 00000000..951b864e --- /dev/null +++ b/libraries/shared/handler_interface.go @@ -0,0 +1,23 @@ +package shared + +import ( + "github.com/ethereum/go-ethereum/common" + "github.com/vulcanize/vulcanizedb/pkg/core" + "github.com/vulcanize/vulcanizedb/pkg/datastore/postgres" +) + +type Handler interface { + Execute() error +} + +type HandlerInitializer func(db *postgres.DB, blockchain core.ContractDataFetcher) Handler + +func HexToInt64(byteString string) int64 { + intHash := common.HexToHash(byteString) + return intHash.Big().Int64() +} + +func HexToString(byteString string) string { + value := common.HexToHash(byteString) + return value.Big().String() +} diff --git a/pkg/graphql_server/graphql_server_suite_test.go b/libraries/shared/shared_suite_test.go similarity index 51% rename from pkg/graphql_server/graphql_server_suite_test.go rename to libraries/shared/shared_suite_test.go index a7a3b735..b2c78ef2 100644 --- a/pkg/graphql_server/graphql_server_suite_test.go +++ b/libraries/shared/shared_suite_test.go @@ -1,4 +1,4 @@ -package graphql_server_test +package shared_test import ( "testing" @@ -7,7 +7,7 @@ import ( . "github.com/onsi/gomega" ) -func TestGraphqlServer(t *testing.T) { +func TestShared(t *testing.T) { RegisterFailHandler(Fail) - RunSpecs(t, "GraphqlServer Suite") + RunSpecs(t, "Shared Suite") } diff --git a/libraries/shared/watcher.go b/libraries/shared/watcher.go new file mode 100644 index 00000000..14aebb81 --- /dev/null +++ b/libraries/shared/watcher.go @@ -0,0 +1,27 @@ +package shared + +import ( + "github.com/vulcanize/vulcanizedb/pkg/core" + "github.com/vulcanize/vulcanizedb/pkg/datastore/postgres" +) + +type Watcher struct { + Handlers []Handler + DB postgres.DB + Blockchain core.ContractDataFetcher +} + +func (watcher *Watcher) AddHandlers(us []HandlerInitializer) { + for _, handlerInitializer := range us { + handler := handlerInitializer(&watcher.DB, watcher.Blockchain) + watcher.Handlers = append(watcher.Handlers, handler) + } +} + +func (watcher *Watcher) Execute() error { + var err error + for _, handler := range watcher.Handlers { + err = handler.Execute() + } + return err +} diff --git a/libraries/shared/watcher_test.go b/libraries/shared/watcher_test.go new file mode 100644 index 00000000..ada33e70 --- /dev/null +++ b/libraries/shared/watcher_test.go @@ -0,0 +1,69 @@ +package shared_test + +import ( + "errors" + + . "github.com/onsi/ginkgo" + . "github.com/onsi/gomega" + "github.com/vulcanize/vulcanizedb/libraries/shared" + "github.com/vulcanize/vulcanizedb/pkg/core" + "github.com/vulcanize/vulcanizedb/pkg/datastore/postgres" +) + +type MockHandler struct { + executeWasCalled bool + executeError error +} + +func (mh *MockHandler) Execute() error { + if mh.executeError != nil { + return mh.executeError + } + mh.executeWasCalled = true + return nil +} + +func fakeHandlerInitializer(db *postgres.DB, blockchain core.ContractDataFetcher) shared.Handler { + return &MockHandler{} +} + +var _ = Describe("Watcher", func() { + It("Adds handlers", func() { + watcher := shared.Watcher{} + + watcher.AddHandlers([]shared.HandlerInitializer{fakeHandlerInitializer}) + + Expect(len(watcher.Handlers)).To(Equal(1)) + Expect(watcher.Handlers).To(ConsistOf(&MockHandler{})) + }) + + It("Adds handlers from multiple sources", func() { + watcher := shared.Watcher{} + + watcher.AddHandlers([]shared.HandlerInitializer{fakeHandlerInitializer}) + watcher.AddHandlers([]shared.HandlerInitializer{fakeHandlerInitializer}) + + Expect(len(watcher.Handlers)).To(Equal(2)) + }) + + It("Executes each handler", func() { + watcher := shared.Watcher{} + fakeHandler := &MockHandler{} + watcher.Handlers = []shared.Handler{fakeHandler} + + watcher.Execute() + + Expect(fakeHandler.executeWasCalled).To(BeTrue()) + }) + + It("Returns an error if handler returns an error", func() { + watcher := shared.Watcher{} + fakeHandler := &MockHandler{executeError: errors.New("Something bad happened")} + watcher.Handlers = []shared.Handler{fakeHandler} + + err := watcher.Execute() + + Expect(err).To(HaveOccurred()) + Expect(fakeHandler.executeWasCalled).To(BeFalse()) + }) +}) diff --git a/pkg/config/config.go b/pkg/config/config.go index ad8374b0..d53444ea 100644 --- a/pkg/config/config.go +++ b/pkg/config/config.go @@ -1,79 +1,6 @@ package config -import ( - "os" - - "fmt" - - "path/filepath" - - "path" - "runtime" - - "errors" - - "net/url" - - "github.com/BurntSushi/toml" -) - type Config struct { Database Database Client Client } - -var NewErrConfigFileNotFound = func(environment string) error { - return errors.New(fmt.Sprintf("No configuration found for environment: %v", environment)) -} - -var NewErrBadConnectionString = func(connectionString string) error { - return errors.New(fmt.Sprintf("connection string is invalid: %v", connectionString)) -} - -func NewConfig(environment string) (Config, error) { - filenameWithExtension := fmt.Sprintf("%s.toml", environment) - absolutePath := filepath.Join(ProjectRoot(), "environments", filenameWithExtension) - config, err := parseConfigFile(absolutePath) - if err != nil { - return Config{}, NewErrConfigFileNotFound(environment) - } else { - if !filepath.IsAbs(config.Client.IPCPath) && !isUrl(config.Client.IPCPath) { - config.Client.IPCPath = filepath.Join(ProjectRoot(), config.Client.IPCPath) - } - return config, nil - } -} - -func ProjectRoot() string { - var _, filename, _, _ = runtime.Caller(0) - return path.Join(path.Dir(filename), "..", "..") -} - -func isUrl(s string) bool { - _, err := url.ParseRequestURI(s) - if err == nil { - return true - } - return false -} - -func fileExists(s string) bool { - _, err := os.Stat(s) - if err == nil { - return true - } - return false -} - -func parseConfigFile(filePath string) (Config, error) { - var cfg Config - if !isUrl(filePath) && !fileExists(filePath) { - return Config{}, NewErrBadConnectionString(filePath) - } else { - _, err := toml.DecodeFile(filePath, &cfg) - if err != nil { - return Config{}, err - } - return cfg, nil - } -} diff --git a/pkg/config/config_test.go b/pkg/config/config_test.go index 61af9ab7..1fe36a9a 100644 --- a/pkg/config/config_test.go +++ b/pkg/config/config_test.go @@ -1,41 +1,24 @@ package config_test import ( - "path/filepath" - . "github.com/onsi/ginkgo" . "github.com/onsi/gomega" - cfg "github.com/vulcanize/vulcanizedb/pkg/config" + "github.com/spf13/viper" ) var _ = Describe("Loading the config", func() { It("reads the private config using the environment", func() { - privateConfig, err := cfg.NewConfig("private") - + testConfig := viper.New() + testConfig.SetConfigName("private") + testConfig.AddConfigPath("$GOPATH/src/github.com/vulcanize/vulcanizedb/environments/") + err := testConfig.ReadInConfig() + Expect(viper.Get("client.ipcpath")).To(BeNil()) Expect(err).To(BeNil()) - Expect(privateConfig.Database.Hostname).To(Equal("localhost")) - Expect(privateConfig.Database.Name).To(Equal("vulcanize_private")) - Expect(privateConfig.Database.Port).To(Equal(5432)) - expandedPath := filepath.Join(cfg.ProjectRoot(), "test_data_dir/geth.ipc") - Expect(privateConfig.Client.IPCPath).To(Equal(expandedPath)) - }) - - It("returns an error when there is no matching config file", func() { - config, err := cfg.NewConfig("bad-config") - - Expect(config).To(Equal(cfg.Config{})) - Expect(err).NotTo(BeNil()) - }) - - It("reads the infura config using the environment", func() { - infuraConfig, err := cfg.NewConfig("infura") - - Expect(err).To(BeNil()) - Expect(infuraConfig.Database.Hostname).To(Equal("localhost")) - Expect(infuraConfig.Database.Name).To(Equal("vulcanize_private")) - Expect(infuraConfig.Database.Port).To(Equal(5432)) - Expect(infuraConfig.Client.IPCPath).To(Equal("https://mainnet.infura.io/J5Vd2fRtGsw0zZ0Ov3BL")) + Expect(testConfig.Get("database.hostname")).To(Equal("localhost")) + Expect(testConfig.Get("database.name")).To(Equal("vulcanize_private")) + Expect(testConfig.Get("database.port")).To(Equal(int64(5432))) + Expect(testConfig.Get("client.ipcpath")).To(Equal("test_data_dir/geth.ipc")) }) }) diff --git a/pkg/contract_summary/console_presenter.go b/pkg/contract_summary/console_presenter.go index b475450a..4d646566 100644 --- a/pkg/contract_summary/console_presenter.go +++ b/pkg/contract_summary/console_presenter.go @@ -30,11 +30,10 @@ func template() string { func transactionToString(transaction *core.Transaction) string { if transaction == nil { return "NONE" - } else { - return fmt.Sprintf(`Hash: %s + } + return fmt.Sprintf(`Hash: %s To: %s From: %s`, transaction.Hash, transaction.To, transaction.From) - } } func attributesString(summary ContractSummary) string { diff --git a/pkg/contract_summary/summary.go b/pkg/contract_summary/summary.go index 10488389..16cd0819 100644 --- a/pkg/contract_summary/summary.go +++ b/pkg/contract_summary/summary.go @@ -4,7 +4,7 @@ import ( "math/big" "github.com/vulcanize/vulcanizedb/pkg/core" - "github.com/vulcanize/vulcanizedb/pkg/repositories" + "github.com/vulcanize/vulcanizedb/pkg/datastore" ) type ContractSummary struct { @@ -17,13 +17,12 @@ type ContractSummary struct { blockChain core.Blockchain } -func NewSummary(blockchain core.Blockchain, contractRepository repositories.ContractRepository, contractHash string, blockNumber *big.Int) (ContractSummary, error) { +func NewSummary(blockchain core.Blockchain, contractRepository datastore.ContractRepository, contractHash string, blockNumber *big.Int) (ContractSummary, error) { contract, err := contractRepository.GetContract(contractHash) if err != nil { return ContractSummary{}, err - } else { - return newContractSummary(blockchain, contract, blockNumber), nil } + return newContractSummary(blockchain, contract, blockNumber), nil } func (contractSummary ContractSummary) GetStateAttribute(attributeName string) interface{} { @@ -48,7 +47,6 @@ func newContractSummary(blockchain core.Blockchain, contract core.Contract, bloc func lastTransaction(contract core.Contract) *core.Transaction { if len(contract.Transactions) > 0 { return &contract.Transactions[0] - } else { - return nil } + return nil } diff --git a/pkg/contract_summary/summary_test.go b/pkg/contract_summary/summary_test.go index 06fb9c3f..06d89e25 100644 --- a/pkg/contract_summary/summary_test.go +++ b/pkg/contract_summary/summary_test.go @@ -7,12 +7,12 @@ import ( . "github.com/onsi/gomega" "github.com/vulcanize/vulcanizedb/pkg/contract_summary" "github.com/vulcanize/vulcanizedb/pkg/core" + "github.com/vulcanize/vulcanizedb/pkg/datastore" + "github.com/vulcanize/vulcanizedb/pkg/datastore/inmemory" "github.com/vulcanize/vulcanizedb/pkg/fakes" - "github.com/vulcanize/vulcanizedb/pkg/repositories" - "github.com/vulcanize/vulcanizedb/pkg/repositories/inmemory" ) -func NewCurrentContractSummary(blockchain core.Blockchain, contractRepository repositories.ContractRepository, contractHash string) (contract_summary.ContractSummary, error) { +func NewCurrentContractSummary(blockchain core.Blockchain, contractRepository datastore.ContractRepository, contractHash string) (contract_summary.ContractSummary, error) { return contract_summary.NewSummary(blockchain, contractRepository, contractHash, nil) } @@ -143,6 +143,7 @@ var _ = Describe("The contract summary", func() { }, )) }) + }) }) diff --git a/pkg/core/blockchain.go b/pkg/core/blockchain.go index 62a0669c..1c6045d0 100644 --- a/pkg/core/blockchain.go +++ b/pkg/core/blockchain.go @@ -3,10 +3,14 @@ package core import "math/big" type Blockchain interface { + GetAttribute(contract Contract, attributeName string, blockNumber *big.Int) (interface{}, error) + GetAttributes(contract Contract) (ContractAttributes, error) GetBlockByNumber(blockNumber int64) Block + GetLogs(contract Contract, startingBlockNumber *big.Int, endingBlockNumber *big.Int) ([]Log, error) LastBlock() *big.Int Node() Node - GetAttributes(contract Contract) (ContractAttributes, error) - GetAttribute(contract Contract, attributeName string, blockNumber *big.Int) (interface{}, error) - GetLogs(contract Contract, startingBlockNumber *big.Int, endingBlockNumber *big.Int) ([]Log, error) +} + +type ContractDataFetcher interface { + FetchContractData(abiJSON string, address string, method string, methodArg interface{}, result interface{}, blockNumber int64) error } diff --git a/pkg/core/node_info.go b/pkg/core/node_info.go index fe88c928..1977550f 100644 --- a/pkg/core/node_info.go +++ b/pkg/core/node_info.go @@ -2,7 +2,7 @@ package core type Node struct { GenesisBlock string - NetworkId float64 - Id string + NetworkID float64 + ID string ClientName string } diff --git a/pkg/core/watched_event_log.go b/pkg/core/watched_event_log.go index 9bf23a18..04090bcf 100644 --- a/pkg/core/watched_event_log.go +++ b/pkg/core/watched_event_log.go @@ -1,14 +1,15 @@ package core type WatchedEvent struct { - Name string `json:"name"` // name - BlockNumber int64 `json:"block_number" db:"block_number"` // block_number - Address string `json:"address"` // address - TxHash string `json:"tx_hash" db:"tx_hash"` // tx_hash - Index int64 `json:"index"` // index - Topic0 string `json:"topic0"` // topic0 - Topic1 string `json:"topic1"` // topic1 - Topic2 string `json:"topic2"` // topic2 - Topic3 string `json:"topic3"` // topic3 - Data string `json:"data"` // data + LogID int64 `json:"log_id" db:"id"` + Name string `json:"name"` + BlockNumber int64 `json:"block_number" db:"block_number"` + Address string `json:"address"` + TxHash string `json:"tx_hash" db:"tx_hash"` + Index int64 `json:"index"` + Topic0 string `json:"topic0"` + Topic1 string `json:"topic1"` + Topic2 string `json:"topic2"` + Topic3 string `json:"topic3"` + Data string `json:"data"` } diff --git a/pkg/repositories/inmemory/block_repository.go b/pkg/datastore/inmemory/block_repository.go similarity index 92% rename from pkg/repositories/inmemory/block_repository.go rename to pkg/datastore/inmemory/block_repository.go index a9dda09d..40d3b365 100644 --- a/pkg/repositories/inmemory/block_repository.go +++ b/pkg/datastore/inmemory/block_repository.go @@ -2,7 +2,7 @@ package inmemory import ( "github.com/vulcanize/vulcanizedb/pkg/core" - "github.com/vulcanize/vulcanizedb/pkg/repositories" + "github.com/vulcanize/vulcanizedb/pkg/datastore" ) type BlockRepository struct { @@ -23,7 +23,7 @@ func (blockRepository *BlockRepository) GetBlock(blockNumber int64) (core.Block, if block, ok := blockRepository.blocks[blockNumber]; ok { return block, nil } - return core.Block{}, repositories.ErrBlockDoesNotExist(blockNumber) + return core.Block{}, datastore.ErrBlockDoesNotExist(blockNumber) } func (blockRepository *BlockRepository) MissingBlockNumbers(startingBlockNumber int64, endingBlockNumber int64) []int64 { diff --git a/pkg/repositories/inmemory/contract_repository.go b/pkg/datastore/inmemory/contract_repository.go similarity index 87% rename from pkg/repositories/inmemory/contract_repository.go rename to pkg/datastore/inmemory/contract_repository.go index 21db153b..6f759fd7 100644 --- a/pkg/repositories/inmemory/contract_repository.go +++ b/pkg/datastore/inmemory/contract_repository.go @@ -2,7 +2,7 @@ package inmemory import ( "github.com/vulcanize/vulcanizedb/pkg/core" - "github.com/vulcanize/vulcanizedb/pkg/repositories" + "github.com/vulcanize/vulcanizedb/pkg/datastore" ) type ContractRepostiory struct { @@ -17,7 +17,7 @@ func (contractRepository *ContractRepostiory) ContractExists(contractHash string func (contractRepository *ContractRepostiory) GetContract(contractHash string) (core.Contract, error) { contract, ok := contractRepository.contracts[contractHash] if !ok { - return core.Contract{}, repositories.ErrContractDoesNotExist(contractHash) + return core.Contract{}, datastore.ErrContractDoesNotExist(contractHash) } for _, block := range contractRepository.blocks { for _, transaction := range block.Transactions { diff --git a/pkg/repositories/inmemory/in_memory.go b/pkg/datastore/inmemory/in_memory.go similarity index 100% rename from pkg/repositories/inmemory/in_memory.go rename to pkg/datastore/inmemory/in_memory.go diff --git a/pkg/repositories/postgres/postgres.go b/pkg/datastore/postgres/postgres.go similarity index 88% rename from pkg/repositories/postgres/postgres.go rename to pkg/datastore/postgres/postgres.go index f7d8cdca..60c25255 100644 --- a/pkg/repositories/postgres/postgres.go +++ b/pkg/datastore/postgres/postgres.go @@ -4,15 +4,15 @@ import ( "errors" "github.com/jmoiron/sqlx" - _ "github.com/lib/pq" + _ "github.com/lib/pq" //postgres driver "github.com/vulcanize/vulcanizedb/pkg/config" "github.com/vulcanize/vulcanizedb/pkg/core" ) type DB struct { *sqlx.DB - node core.Node - nodeId int64 + Node core.Node + NodeID int64 } var ( @@ -28,7 +28,7 @@ func NewDB(databaseConfig config.Database, node core.Node) (*DB, error) { if err != nil { return &DB{}, ErrDBConnectionFailed } - pg := DB{DB: db, node: node} + pg := DB{DB: db, Node: node} err = pg.CreateNode(&node) if err != nil { return &DB{}, ErrUnableToSetNode @@ -48,10 +48,10 @@ func (db *DB) CreateNode(node *core.Node) error { node_id = $3, client_name = $4 RETURNING id`, - node.GenesisBlock, node.NetworkId, node.Id, node.ClientName).Scan(&nodeId) + node.GenesisBlock, node.NetworkID, node.ID, node.ClientName).Scan(&nodeId) if err != nil { return ErrUnableToSetNode } - db.nodeId = nodeId + db.NodeID = nodeId return nil } diff --git a/pkg/repositories/postgres/postgres_suite_test.go b/pkg/datastore/postgres/postgres_suite_test.go similarity index 100% rename from pkg/repositories/postgres/postgres_suite_test.go rename to pkg/datastore/postgres/postgres_suite_test.go diff --git a/pkg/repositories/postgres/postgres_test.go b/pkg/datastore/postgres/postgres_test.go similarity index 68% rename from pkg/repositories/postgres/postgres_test.go rename to pkg/datastore/postgres/postgres_test.go index 775486b1..5c512ca1 100644 --- a/pkg/repositories/postgres/postgres_test.go +++ b/pkg/datastore/postgres/postgres_test.go @@ -15,7 +15,9 @@ import ( . "github.com/onsi/gomega" "github.com/vulcanize/vulcanizedb/pkg/config" "github.com/vulcanize/vulcanizedb/pkg/core" - "github.com/vulcanize/vulcanizedb/pkg/repositories/postgres" + "github.com/vulcanize/vulcanizedb/pkg/datastore/postgres" + "github.com/vulcanize/vulcanizedb/pkg/datastore/postgres/repositories" + "github.com/vulcanize/vulcanizedb/test_config" ) func init() { @@ -23,24 +25,14 @@ func init() { } var _ = Describe("Postgres DB", func() { - var db *postgres.DB + var sqlxdb *sqlx.DB It("connects to the database", func() { - cfg, _ := config.NewConfig("private") - pgConfig := config.DbConnectionString(cfg.Database) - db, err := sqlx.Connect("postgres", pgConfig) + var err error + pgConfig := config.DbConnectionString(test_config.DBConfig) + sqlxdb, err = sqlx.Connect("postgres", pgConfig) Expect(err).Should(BeNil()) - Expect(db).ShouldNot(BeNil()) - }) - - BeforeEach(func() { - node := core.Node{ - GenesisBlock: "GENESIS", - NetworkId: 1, - Id: "b6f90c0fdd8ec9607aed8ee45c69322e47b7063f0bfb7a29c8ecafab24d0a22d24dd2329b5ee6ed4125a03cb14e57fd584e67f9e53e6c631055cbbd82f080845", - ClientName: "Geth/v1.7.2-stable-1db4ecdc/darwin-amd64/go1.9", - } - db = postgres.NewTestDB(node) + Expect(sqlxdb).ShouldNot(BeNil()) }) It("serializes big.Int to db", func() { @@ -51,9 +43,9 @@ var _ = Describe("Postgres DB", func() { // sized int, so use string representation of big.Int // and cast on insert - cfg, _ := config.NewConfig("private") - pgConfig := config.DbConnectionString(cfg.Database) - db, err := sqlx.Connect("postgres", pgConfig) + pgConnectString := config.DbConnectionString(test_config.DBConfig) + db, err := sqlx.Connect("postgres", pgConnectString) + Expect(err).NotTo(HaveOccurred()) bi := new(big.Int) bi.SetString("34940183920000000000", 10) @@ -87,10 +79,9 @@ var _ = Describe("Postgres DB", func() { Nonce: badNonce, Transactions: []core.Transaction{}, } - cfg, _ := config.NewConfig("private") - node := core.Node{GenesisBlock: "GENESIS", NetworkId: 1, Id: "x123", ClientName: "geth"} - db, _ := postgres.NewDB(cfg.Database, node) - blocksRepository := postgres.BlockRepository{DB: db} + node := core.Node{GenesisBlock: "GENESIS", NetworkID: 1, ID: "x123", ClientName: "geth"} + db := test_config.NewTestDB(node) + blocksRepository := repositories.BlockRepository{DB: db} err1 := blocksRepository.CreateOrUpdateBlock(badBlock) savedBlock, err2 := blocksRepository.GetBlock(123) @@ -102,16 +93,15 @@ var _ = Describe("Postgres DB", func() { It("throws error when can't connect to the database", func() { invalidDatabase := config.Database{} - node := core.Node{GenesisBlock: "GENESIS", NetworkId: 1, Id: "x123", ClientName: "geth"} + node := core.Node{GenesisBlock: "GENESIS", NetworkID: 1, ID: "x123", ClientName: "geth"} _, err := postgres.NewDB(invalidDatabase, node) Expect(err).To(Equal(postgres.ErrDBConnectionFailed)) }) It("throws error when can't create node", func() { - cfg, _ := config.NewConfig("private") badHash := fmt.Sprintf("x %s", strings.Repeat("1", 100)) - node := core.Node{GenesisBlock: badHash, NetworkId: 1, Id: "x123", ClientName: "geth"} - _, err := postgres.NewDB(cfg.Database, node) + node := core.Node{GenesisBlock: badHash, NetworkID: 1, ID: "x123", ClientName: "geth"} + _, err := postgres.NewDB(test_config.DBConfig, node) Expect(err).To(Equal(postgres.ErrUnableToSetNode)) }) @@ -123,10 +113,9 @@ var _ = Describe("Postgres DB", func() { BlockNumber: 1, TxHash: badTxHash, } - cfg, _ := config.NewConfig("private") - node := core.Node{GenesisBlock: "GENESIS", NetworkId: 1, Id: "x123", ClientName: "geth"} - db, _ := postgres.NewDB(cfg.Database, node) - logRepository := postgres.LogRepository{DB: db} + node := core.Node{GenesisBlock: "GENESIS", NetworkID: 1, ID: "x123", ClientName: "geth"} + db, _ := postgres.NewDB(test_config.DBConfig, node) + logRepository := repositories.LogRepository{DB: db} err := logRepository.CreateLogs([]core.Log{badLog}) savedBlock := logRepository.GetLogs("x123", 1) @@ -143,10 +132,9 @@ var _ = Describe("Postgres DB", func() { Number: 123, Transactions: []core.Transaction{badTransaction}, } - cfg, _ := config.NewConfig("private") - node := core.Node{GenesisBlock: "GENESIS", NetworkId: 1, Id: "x123", ClientName: "geth"} - db, _ := postgres.NewDB(cfg.Database, node) - blockRepository := postgres.BlockRepository{DB: db} + node := core.Node{GenesisBlock: "GENESIS", NetworkID: 1, ID: "x123", ClientName: "geth"} + db, _ := postgres.NewDB(test_config.DBConfig, node) + blockRepository := repositories.BlockRepository{DB: db} err1 := blockRepository.CreateOrUpdateBlock(block) savedBlock, err2 := blockRepository.GetBlock(123) diff --git a/pkg/repositories/postgres/block_repository.go b/pkg/datastore/postgres/repositories/block_repository.go similarity index 93% rename from pkg/repositories/postgres/block_repository.go rename to pkg/datastore/postgres/repositories/block_repository.go index e8b77465..525cfc06 100644 --- a/pkg/repositories/postgres/block_repository.go +++ b/pkg/datastore/postgres/repositories/block_repository.go @@ -1,15 +1,15 @@ -package postgres +package repositories import ( "context" "database/sql" - "fmt" "log" "github.com/jmoiron/sqlx" "github.com/vulcanize/vulcanizedb/pkg/core" - "github.com/vulcanize/vulcanizedb/pkg/repositories" + "github.com/vulcanize/vulcanizedb/pkg/datastore" + "github.com/vulcanize/vulcanizedb/pkg/datastore/postgres" ) const ( @@ -17,7 +17,7 @@ const ( ) type BlockRepository struct { - *DB + *postgres.DB } func (blockRepository BlockRepository) SetBlocksStatus(chainHead int64) { @@ -79,12 +79,12 @@ func (blockRepository BlockRepository) GetBlock(blockNumber int64) (core.Block, reward, uncles_reward FROM blocks - WHERE node_id = $1 AND number = $2`, blockRepository.nodeId, blockNumber) + WHERE node_id = $1 AND number = $2`, blockRepository.NodeID, blockNumber) savedBlock, err := blockRepository.loadBlock(blockRows) if err != nil { switch err { case sql.ErrNoRows: - return core.Block{}, repositories.ErrBlockDoesNotExist(blockNumber) + return core.Block{}, datastore.ErrBlockDoesNotExist(blockNumber) default: return savedBlock, err } @@ -100,16 +100,16 @@ func (blockRepository BlockRepository) insertBlock(block core.Block) error { (node_id, number, gaslimit, gasused, time, difficulty, hash, nonce, parenthash, size, uncle_hash, is_final, miner, extra_data, reward, uncles_reward) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12, $13, $14, $15, $16) RETURNING id `, - blockRepository.nodeId, block.Number, block.GasLimit, block.GasUsed, block.Time, block.Difficulty, block.Hash, block.Nonce, block.ParentHash, block.Size, block.UncleHash, block.IsFinal, block.Miner, block.ExtraData, block.Reward, block.UnclesReward). + blockRepository.NodeID, block.Number, block.GasLimit, block.GasUsed, block.Time, block.Difficulty, block.Hash, block.Nonce, block.ParentHash, block.Size, block.UncleHash, block.IsFinal, block.Miner, block.ExtraData, block.Reward, block.UnclesReward). Scan(&blockId) if err != nil { tx.Rollback() - return ErrDBInsertFailed + return postgres.ErrDBInsertFailed } err = blockRepository.createTransactions(tx, blockId, block.Transactions) if err != nil { tx.Rollback() - return ErrDBInsertFailed + return postgres.ErrDBInsertFailed } tx.Commit() return nil @@ -191,7 +191,7 @@ func (blockRepository BlockRepository) getBlockHash(block core.Block) (string, b `SELECT hash FROM blocks WHERE number = $1 AND node_id = $2`, - block.Number, blockRepository.nodeId) + block.Number, blockRepository.NodeID) return retrievedBlockHash, blockExists(retrievedBlockHash) } @@ -204,7 +204,7 @@ func (blockRepository BlockRepository) createLogs(tx *sql.Tx, logs []core.Log, r tlog.BlockNumber, tlog.Address, tlog.TxHash, tlog.Index, tlog.Topics[0], tlog.Topics[1], tlog.Topics[2], tlog.Topics[3], tlog.Data, receiptId, ) if err != nil { - return ErrDBInsertFailed + return postgres.ErrDBInsertFailed } } return nil @@ -219,9 +219,9 @@ func (blockRepository BlockRepository) removeBlock(blockNumber int64) error { `DELETE FROM blocks WHERE number=$1 AND node_id=$2`, - blockNumber, blockRepository.nodeId) + blockNumber, blockRepository.NodeID) if err != nil { - return ErrDBDeleteFailed + return postgres.ErrDBDeleteFailed } return nil } @@ -261,7 +261,6 @@ func (blockRepository BlockRepository) LoadTransactions(transactionRows *sqlx.Ro var transaction core.Transaction err := transactionRows.StructScan(&transaction) if err != nil { - fmt.Println(err) log.Fatal(err) } transactions = append(transactions, transaction) diff --git a/pkg/repositories/postgres/block_repository_test.go b/pkg/datastore/postgres/repositories/block_repository_test.go similarity index 92% rename from pkg/repositories/postgres/block_repository_test.go rename to pkg/datastore/postgres/repositories/block_repository_test.go index 01599bfa..cea2cb94 100644 --- a/pkg/repositories/postgres/block_repository_test.go +++ b/pkg/datastore/postgres/repositories/block_repository_test.go @@ -1,4 +1,4 @@ -package postgres_test +package repositories_test import ( "math/big" @@ -7,22 +7,24 @@ import ( . "github.com/onsi/ginkgo" . "github.com/onsi/gomega" "github.com/vulcanize/vulcanizedb/pkg/core" - "github.com/vulcanize/vulcanizedb/pkg/repositories" - "github.com/vulcanize/vulcanizedb/pkg/repositories/postgres" + "github.com/vulcanize/vulcanizedb/pkg/datastore" + "github.com/vulcanize/vulcanizedb/pkg/datastore/postgres" + "github.com/vulcanize/vulcanizedb/pkg/datastore/postgres/repositories" + "github.com/vulcanize/vulcanizedb/test_config" ) var _ = Describe("Saving blocks", func() { var db *postgres.DB - var blockRepository repositories.BlockRepository + var blockRepository datastore.BlockRepository BeforeEach(func() { node := core.Node{ GenesisBlock: "GENESIS", - NetworkId: 1, - Id: "b6f90c0fdd8ec9607aed8ee45c69322e47b7063f0bfb7a29c8ecafab24d0a22d24dd2329b5ee6ed4125a03cb14e57fd584e67f9e53e6c631055cbbd82f080845", + NetworkID: 1, + ID: "b6f90c0fdd8ec9607aed8ee45c69322e47b7063f0bfb7a29c8ecafab24d0a22d24dd2329b5ee6ed4125a03cb14e57fd584e67f9e53e6c631055cbbd82f080845", ClientName: "Geth/v1.7.2-stable-1db4ecdc/darwin-amd64/go1.9", } - db = postgres.NewTestDB(node) - blockRepository = postgres.BlockRepository{DB: db} + db = test_config.NewTestDB(node) + blockRepository = repositories.BlockRepository{DB: db} }) @@ -33,12 +35,12 @@ var _ = Describe("Saving blocks", func() { blockRepository.CreateOrUpdateBlock(block) nodeTwo := core.Node{ GenesisBlock: "0x456", - NetworkId: 1, - Id: "x123456", + NetworkID: 1, + ID: "x123456", ClientName: "Geth", } - dbTwo := postgres.NewTestDB(nodeTwo) - repositoryTwo := postgres.BlockRepository{DB: dbTwo} + dbTwo := test_config.NewTestDB(nodeTwo) + repositoryTwo := repositories.BlockRepository{DB: dbTwo} _, err := repositoryTwo.GetBlock(123) Expect(err).To(HaveOccurred()) @@ -161,10 +163,10 @@ var _ = Describe("Saving blocks", func() { blockRepository.CreateOrUpdateBlock(blockOne) nodeTwo := core.Node{ GenesisBlock: "0x456", - NetworkId: 1, + NetworkID: 1, } - dbTwo := postgres.NewTestDB(nodeTwo) - repositoryTwo := postgres.BlockRepository{DB: dbTwo} + dbTwo := test_config.NewTestDB(nodeTwo) + repositoryTwo := repositories.BlockRepository{DB: dbTwo} blockRepository.CreateOrUpdateBlock(blockOne) repositoryTwo.CreateOrUpdateBlock(blockTwo) @@ -283,6 +285,5 @@ var _ = Describe("Saving blocks", func() { Expect(err).ToNot(HaveOccurred()) Expect(blockTwo.IsFinal).To(BeFalse()) }) - }) }) diff --git a/pkg/repositories/postgres/contract_repository.go b/pkg/datastore/postgres/repositories/contract_repository.go similarity index 89% rename from pkg/repositories/postgres/contract_repository.go rename to pkg/datastore/postgres/repositories/contract_repository.go index 64846526..1640fd8a 100644 --- a/pkg/repositories/postgres/contract_repository.go +++ b/pkg/datastore/postgres/repositories/contract_repository.go @@ -1,14 +1,15 @@ -package postgres +package repositories import ( "database/sql" "github.com/vulcanize/vulcanizedb/pkg/core" - "github.com/vulcanize/vulcanizedb/pkg/repositories" + "github.com/vulcanize/vulcanizedb/pkg/datastore" + "github.com/vulcanize/vulcanizedb/pkg/datastore/postgres" ) type ContractRepository struct { - *DB + *postgres.DB } func (contractRepository ContractRepository) CreateContract(contract core.Contract) error { @@ -25,7 +26,7 @@ func (contractRepository ContractRepository) CreateContract(contract core.Contra SET contract_hash = $1, contract_abi = $2 `, contract.Hash, abiToInsert) if err != nil { - return ErrDBInsertFailed + return postgres.ErrDBInsertFailed } return nil } @@ -47,7 +48,7 @@ func (contractRepository ContractRepository) GetContract(contractHash string) (c `SELECT contract_hash, contract_abi FROM watched_contracts WHERE contract_hash=$1`, contractHash) err := contract.Scan(&hash, &abi) if err == sql.ErrNoRows { - return core.Contract{}, repositories.ErrContractDoesNotExist(contractHash) + return core.Contract{}, datastore.ErrContractDoesNotExist(contractHash) } savedContract := contractRepository.addTransactions(core.Contract{Hash: hash, Abi: abi}) return savedContract, nil diff --git a/pkg/repositories/postgres/contract_repository_test.go b/pkg/datastore/postgres/repositories/contract_repository_test.go similarity index 82% rename from pkg/repositories/postgres/contract_repository_test.go rename to pkg/datastore/postgres/repositories/contract_repository_test.go index 892318ac..6aa1e7f1 100644 --- a/pkg/repositories/postgres/contract_repository_test.go +++ b/pkg/datastore/postgres/repositories/contract_repository_test.go @@ -1,4 +1,4 @@ -package postgres_test +package repositories_test import ( "sort" @@ -6,24 +6,26 @@ import ( . "github.com/onsi/ginkgo" . "github.com/onsi/gomega" "github.com/vulcanize/vulcanizedb/pkg/core" - "github.com/vulcanize/vulcanizedb/pkg/repositories" - "github.com/vulcanize/vulcanizedb/pkg/repositories/postgres" + "github.com/vulcanize/vulcanizedb/pkg/datastore" + "github.com/vulcanize/vulcanizedb/pkg/datastore/postgres" + "github.com/vulcanize/vulcanizedb/pkg/datastore/postgres/repositories" + "github.com/vulcanize/vulcanizedb/test_config" ) var _ = Describe("Creating contracts", func() { var db *postgres.DB - var contractRepository repositories.ContractRepository + var contractRepository datastore.ContractRepository var node core.Node BeforeEach(func() { node = core.Node{ GenesisBlock: "GENESIS", - NetworkId: 1, - Id: "b6f90c0fdd8ec9607aed8ee45c69322e47b7063f0bfb7a29c8ecafab24d0a22d24dd2329b5ee6ed4125a03cb14e57fd584e67f9e53e6c631055cbbd82f080845", + NetworkID: 1, + ID: "b6f90c0fdd8ec9607aed8ee45c69322e47b7063f0bfb7a29c8ecafab24d0a22d24dd2329b5ee6ed4125a03cb14e57fd584e67f9e53e6c631055cbbd82f080845", ClientName: "Geth/v1.7.2-stable-1db4ecdc/darwin-amd64/go1.9", } - db = postgres.NewTestDB(node) - contractRepository = postgres.ContractRepository{DB: db} + db = test_config.NewTestDB(node) + contractRepository = repositories.ContractRepository{DB: db} }) It("returns the contract when it exists", func() { @@ -50,8 +52,8 @@ var _ = Describe("Creating contracts", func() { }) It("returns transactions 'To' a contract", func() { - var blockRepository repositories.BlockRepository - blockRepository = postgres.BlockRepository{DB: db} + var blockRepository datastore.BlockRepository + blockRepository = repositories.BlockRepository{DB: db} block := core.Block{ Number: 123, Transactions: []core.Transaction{ diff --git a/pkg/repositories/postgres/log_filter_repository.go b/pkg/datastore/postgres/repositories/log_filter_repository.go similarity index 87% rename from pkg/repositories/postgres/log_filter_repository.go rename to pkg/datastore/postgres/repositories/log_filter_repository.go index 4ef40af2..6b1d3d7c 100644 --- a/pkg/repositories/postgres/log_filter_repository.go +++ b/pkg/datastore/postgres/repositories/log_filter_repository.go @@ -1,4 +1,4 @@ -package postgres +package repositories import ( "database/sql" @@ -6,12 +6,13 @@ import ( "encoding/json" "errors" + "github.com/vulcanize/vulcanizedb/pkg/datastore" + "github.com/vulcanize/vulcanizedb/pkg/datastore/postgres" "github.com/vulcanize/vulcanizedb/pkg/filters" - "github.com/vulcanize/vulcanizedb/pkg/repositories" ) type FilterRepository struct { - *DB + *postgres.DB } func (filterRepository FilterRepository) CreateFilter(query filters.LogFilter) error { @@ -41,7 +42,7 @@ func (filterRepository FilterRepository) GetFilter(name string) (filters.LogFilt if err != nil { switch err { case sql.ErrNoRows: - return filters.LogFilter{}, repositories.ErrFilterDoesNotExist(name) + return filters.LogFilter{}, datastore.ErrFilterDoesNotExist(name) default: return filters.LogFilter{}, err } @@ -57,9 +58,7 @@ func (t *DBTopics) Scan(src interface{}) error { if !ok { return error(errors.New("scan source was not []byte")) } - json.Unmarshal(asBytes, &t) - - return nil + return json.Unmarshal(asBytes, &t) } type DBLogFilter struct { diff --git a/pkg/repositories/postgres/log_filter_repository_test.go b/pkg/datastore/postgres/repositories/log_filter_repository_test.go similarity index 71% rename from pkg/repositories/postgres/log_filter_repository_test.go rename to pkg/datastore/postgres/repositories/log_filter_repository_test.go index 12aaa67d..f7aac44b 100644 --- a/pkg/repositories/postgres/log_filter_repository_test.go +++ b/pkg/datastore/postgres/repositories/log_filter_repository_test.go @@ -1,27 +1,29 @@ -package postgres_test +package repositories_test import ( . "github.com/onsi/ginkgo" . "github.com/onsi/gomega" "github.com/vulcanize/vulcanizedb/pkg/core" + "github.com/vulcanize/vulcanizedb/pkg/datastore" + "github.com/vulcanize/vulcanizedb/pkg/datastore/postgres" + "github.com/vulcanize/vulcanizedb/pkg/datastore/postgres/repositories" "github.com/vulcanize/vulcanizedb/pkg/filters" - "github.com/vulcanize/vulcanizedb/pkg/repositories" - "github.com/vulcanize/vulcanizedb/pkg/repositories/postgres" + "github.com/vulcanize/vulcanizedb/test_config" ) var _ = Describe("Log Filters Repository", func() { var db *postgres.DB - var filterRepository repositories.FilterRepository + var filterRepository datastore.FilterRepository var node core.Node BeforeEach(func() { node = core.Node{ GenesisBlock: "GENESIS", - NetworkId: 1, - Id: "b6f90c0fdd8ec9607aed8ee45c69322e47b7063f0bfb7a29c8ecafab24d0a22d24dd2329b5ee6ed4125a03cb14e57fd584e67f9e53e6c631055cbbd82f080845", + NetworkID: 1, + ID: "b6f90c0fdd8ec9607aed8ee45c69322e47b7063f0bfb7a29c8ecafab24d0a22d24dd2329b5ee6ed4125a03cb14e57fd584e67f9e53e6c631055cbbd82f080845", ClientName: "Geth/v1.7.2-stable-1db4ecdc/darwin-amd64/go1.9", } - db = postgres.NewTestDB(node) - filterRepository = postgres.FilterRepository{DB: db} + db = test_config.NewTestDB(node) + filterRepository = repositories.FilterRepository{DB: db} }) Describe("LogFilter", func() { @@ -63,7 +65,7 @@ var _ = Describe("Log Filters Repository", func() { It("gets a log filter", func() { - logFilter1 := filters.LogFilter{ + expectedLogFilter1 := filters.LogFilter{ Name: "TestFilter1", FromBlock: 1, ToBlock: 2, @@ -75,9 +77,9 @@ var _ = Describe("Log Filters Repository", func() { "", }, } - err := filterRepository.CreateFilter(logFilter1) + err := filterRepository.CreateFilter(expectedLogFilter1) Expect(err).ToNot(HaveOccurred()) - logFilter2 := filters.LogFilter{ + expectedLogFilter2 := filters.LogFilter{ Name: "TestFilter2", FromBlock: 10, ToBlock: 20, @@ -89,20 +91,20 @@ var _ = Describe("Log Filters Repository", func() { "", }, } - err = filterRepository.CreateFilter(logFilter2) + err = filterRepository.CreateFilter(expectedLogFilter2) Expect(err).ToNot(HaveOccurred()) - logFilter1, err = filterRepository.GetFilter("TestFilter1") + logFilter1, err := filterRepository.GetFilter("TestFilter1") Expect(err).ToNot(HaveOccurred()) - Expect(logFilter1).To(Equal(logFilter1)) - logFilter1, err = filterRepository.GetFilter("TestFilter1") + Expect(logFilter1).To(Equal(expectedLogFilter1)) + logFilter2, err := filterRepository.GetFilter("TestFilter2") Expect(err).ToNot(HaveOccurred()) - Expect(logFilter2).To(Equal(logFilter2)) + Expect(logFilter2).To(Equal(expectedLogFilter2)) }) It("returns ErrFilterDoesNotExist error when log does not exist", func() { _, err := filterRepository.GetFilter("TestFilter1") - Expect(err).To(Equal(repositories.ErrFilterDoesNotExist("TestFilter1"))) + Expect(err).To(Equal(datastore.ErrFilterDoesNotExist("TestFilter1"))) }) }) }) diff --git a/pkg/repositories/postgres/logs.go b/pkg/datastore/postgres/repositories/logs_repository.go similarity index 92% rename from pkg/repositories/postgres/logs.go rename to pkg/datastore/postgres/repositories/logs_repository.go index 16df0b96..08ff509d 100644 --- a/pkg/repositories/postgres/logs.go +++ b/pkg/datastore/postgres/repositories/logs_repository.go @@ -1,4 +1,4 @@ -package postgres +package repositories import ( "context" @@ -6,10 +6,11 @@ import ( "database/sql" "github.com/vulcanize/vulcanizedb/pkg/core" + "github.com/vulcanize/vulcanizedb/pkg/datastore/postgres" ) type LogRepository struct { - *DB + *postgres.DB } func (logRepository LogRepository) CreateLogs(lgs []core.Log) error { @@ -23,7 +24,7 @@ func (logRepository LogRepository) CreateLogs(lgs []core.Log) error { ) if err != nil { tx.Rollback() - return ErrDBInsertFailed + return postgres.ErrDBInsertFailed } } tx.Commit() diff --git a/pkg/repositories/postgres/logs_test.go b/pkg/datastore/postgres/repositories/logs_repository_test.go similarity index 91% rename from pkg/repositories/postgres/logs_test.go rename to pkg/datastore/postgres/repositories/logs_repository_test.go index a582e592..9697a169 100644 --- a/pkg/repositories/postgres/logs_test.go +++ b/pkg/datastore/postgres/repositories/logs_repository_test.go @@ -1,4 +1,4 @@ -package postgres_test +package repositories_test import ( "sort" @@ -6,23 +6,25 @@ import ( . "github.com/onsi/ginkgo" . "github.com/onsi/gomega" "github.com/vulcanize/vulcanizedb/pkg/core" - "github.com/vulcanize/vulcanizedb/pkg/repositories" - "github.com/vulcanize/vulcanizedb/pkg/repositories/postgres" + "github.com/vulcanize/vulcanizedb/pkg/datastore" + "github.com/vulcanize/vulcanizedb/pkg/datastore/postgres" + "github.com/vulcanize/vulcanizedb/pkg/datastore/postgres/repositories" + "github.com/vulcanize/vulcanizedb/test_config" ) var _ = Describe("Logs Repository", func() { var db *postgres.DB - var logsRepository repositories.LogRepository + var logsRepository datastore.LogRepository var node core.Node BeforeEach(func() { node = core.Node{ GenesisBlock: "GENESIS", - NetworkId: 1, - Id: "b6f90c0fdd8ec9607aed8ee45c69322e47b7063f0bfb7a29c8ecafab24d0a22d24dd2329b5ee6ed4125a03cb14e57fd584e67f9e53e6c631055cbbd82f080845", + NetworkID: 1, + ID: "b6f90c0fdd8ec9607aed8ee45c69322e47b7063f0bfb7a29c8ecafab24d0a22d24dd2329b5ee6ed4125a03cb14e57fd584e67f9e53e6c631055cbbd82f080845", ClientName: "Geth/v1.7.2-stable-1db4ecdc/darwin-amd64/go1.9", } - db = postgres.NewTestDB(node) - logsRepository = postgres.LogRepository{DB: db} + db = test_config.NewTestDB(node) + logsRepository = repositories.LogRepository{DB: db} }) Describe("Saving logs", func() { @@ -115,8 +117,8 @@ var _ = Describe("Logs Repository", func() { }) It("saves the logs attached to a receipt", func() { - var blockRepository repositories.BlockRepository - blockRepository = postgres.BlockRepository{DB: db} + var blockRepository datastore.BlockRepository + blockRepository = repositories.BlockRepository{DB: db} logs := []core.Log{{ Address: "0x8a4774fe82c63484afef97ca8d89a6ea5e21f973", @@ -176,6 +178,5 @@ var _ = Describe("Logs Repository", func() { expected := logs[1:] Expect(retrievedLogs).To(Equal(expected)) }) - }) }) diff --git a/pkg/repositories/postgres/receipt_repository.go b/pkg/datastore/postgres/repositories/receipt_repository.go similarity index 85% rename from pkg/repositories/postgres/receipt_repository.go rename to pkg/datastore/postgres/repositories/receipt_repository.go index 0b892bb7..44275bf3 100644 --- a/pkg/repositories/postgres/receipt_repository.go +++ b/pkg/datastore/postgres/repositories/receipt_repository.go @@ -1,14 +1,15 @@ -package postgres +package repositories import ( "database/sql" "github.com/vulcanize/vulcanizedb/pkg/core" - "github.com/vulcanize/vulcanizedb/pkg/repositories" + "github.com/vulcanize/vulcanizedb/pkg/datastore" + "github.com/vulcanize/vulcanizedb/pkg/datastore/postgres" ) type ReceiptRepository struct { - *DB + *postgres.DB } func (receiptRepository ReceiptRepository) GetReceipt(txHash string) (core.Receipt, error) { @@ -25,7 +26,7 @@ func (receiptRepository ReceiptRepository) GetReceipt(txHash string) (core.Recei if err != nil { switch err { case sql.ErrNoRows: - return core.Receipt{}, repositories.ErrReceiptDoesNotExist(txHash) + return core.Receipt{}, datastore.ErrReceiptDoesNotExist(txHash) default: return core.Receipt{}, err } diff --git a/pkg/repositories/postgres/receipts_repository_test.go b/pkg/datastore/postgres/repositories/receipts_repository_test.go similarity index 75% rename from pkg/repositories/postgres/receipts_repository_test.go rename to pkg/datastore/postgres/repositories/receipts_repository_test.go index 22f58a40..16c9a2e2 100644 --- a/pkg/repositories/postgres/receipts_repository_test.go +++ b/pkg/datastore/postgres/repositories/receipts_repository_test.go @@ -1,33 +1,35 @@ -package postgres_test +package repositories_test import ( . "github.com/onsi/ginkgo" . "github.com/onsi/gomega" "github.com/vulcanize/vulcanizedb/pkg/core" - "github.com/vulcanize/vulcanizedb/pkg/repositories" - "github.com/vulcanize/vulcanizedb/pkg/repositories/postgres" + "github.com/vulcanize/vulcanizedb/pkg/datastore" + "github.com/vulcanize/vulcanizedb/pkg/datastore/postgres" + "github.com/vulcanize/vulcanizedb/pkg/datastore/postgres/repositories" + "github.com/vulcanize/vulcanizedb/test_config" ) var _ bool = Describe("Logs Repository", func() { - var receiptRepository repositories.ReceiptRepository + var receiptRepository datastore.ReceiptRepository var db *postgres.DB var node core.Node BeforeEach(func() { node = core.Node{ GenesisBlock: "GENESIS", - NetworkId: 1, - Id: "b6f90c0fdd8ec9607aed8ee45c69322e47b7063f0bfb7a29c8ecafab24d0a22d24dd2329b5ee6ed4125a03cb14e57fd584e67f9e53e6c631055cbbd82f080845", + NetworkID: 1, + ID: "b6f90c0fdd8ec9607aed8ee45c69322e47b7063f0bfb7a29c8ecafab24d0a22d24dd2329b5ee6ed4125a03cb14e57fd584e67f9e53e6c631055cbbd82f080845", ClientName: "Geth/v1.7.2-stable-1db4ecdc/darwin-amd64/go1.9", } - db = postgres.NewTestDB(node) - receiptRepository = postgres.ReceiptRepository{DB: db} + db = test_config.NewTestDB(node) + receiptRepository = repositories.ReceiptRepository{DB: db} }) Describe("Saving receipts", func() { It("returns the receipt when it exists", func() { - var blockRepository repositories.BlockRepository - db := postgres.NewTestDB(node) - blockRepository = postgres.BlockRepository{DB: db} + var blockRepository datastore.BlockRepository + db := test_config.NewTestDB(node) + blockRepository = repositories.BlockRepository{DB: db} expected := core.Receipt{ ContractAddress: "0xde0b295669a9fd93d5f28d9ec85e40f4cb697bae", CumulativeGasUsed: 7996119, @@ -64,9 +66,9 @@ var _ bool = Describe("Logs Repository", func() { }) It("still saves receipts without logs", func() { - var blockRepository repositories.BlockRepository - db := postgres.NewTestDB(node) - blockRepository = postgres.BlockRepository{DB: db} + var blockRepository datastore.BlockRepository + db := test_config.NewTestDB(node) + blockRepository = repositories.BlockRepository{DB: db} receipt := core.Receipt{ TxHash: "0x002c4799161d809b23f67884eb6598c9df5894929fe1a9ead97ca175d360f547", } diff --git a/pkg/datastore/postgres/repositories/repositories_suite_test.go b/pkg/datastore/postgres/repositories/repositories_suite_test.go new file mode 100644 index 00000000..b222d38c --- /dev/null +++ b/pkg/datastore/postgres/repositories/repositories_suite_test.go @@ -0,0 +1,13 @@ +package repositories_test + +import ( + "testing" + + . "github.com/onsi/ginkgo" + . "github.com/onsi/gomega" +) + +func TestRepositories(t *testing.T) { + RegisterFailHandler(Fail) + RunSpecs(t, "Repositories Suite") +} diff --git a/pkg/repositories/postgres/watched_events.go b/pkg/datastore/postgres/repositories/watched_events_repository.go similarity index 61% rename from pkg/repositories/postgres/watched_events.go rename to pkg/datastore/postgres/repositories/watched_events_repository.go index 0ccc92af..d4682d2a 100644 --- a/pkg/repositories/postgres/watched_events.go +++ b/pkg/datastore/postgres/repositories/watched_events_repository.go @@ -1,15 +1,16 @@ -package postgres +package repositories import ( "github.com/vulcanize/vulcanizedb/pkg/core" + "github.com/vulcanize/vulcanizedb/pkg/datastore/postgres" ) type WatchedEventRepository struct { - *DB + *postgres.DB } func (watchedEventRepository WatchedEventRepository) GetWatchedEvents(name string) ([]*core.WatchedEvent, error) { - rows, err := watchedEventRepository.DB.Queryx(`SELECT name, block_number, address, tx_hash, index, topic0, topic1, topic2, topic3, data FROM watched_event_logs where name=$1`, name) + rows, err := watchedEventRepository.DB.Queryx(`SELECT id, name, block_number, address, tx_hash, index, topic0, topic1, topic2, topic3, data FROM watched_event_logs where name=$1`, name) if err != nil { return nil, err } @@ -18,7 +19,7 @@ func (watchedEventRepository WatchedEventRepository) GetWatchedEvents(name strin lgs := make([]*core.WatchedEvent, 0) for rows.Next() { lg := new(core.WatchedEvent) - err := rows.StructScan(lg) + err = rows.StructScan(lg) if err != nil { return nil, err } diff --git a/pkg/repositories/postgres/watched_events_test.go b/pkg/datastore/postgres/repositories/watched_events_repository_test.go similarity index 54% rename from pkg/repositories/postgres/watched_events_test.go rename to pkg/datastore/postgres/repositories/watched_events_repository_test.go index 2cb65a53..0cbee839 100644 --- a/pkg/repositories/postgres/watched_events_test.go +++ b/pkg/datastore/postgres/repositories/watched_events_repository_test.go @@ -1,24 +1,27 @@ -package postgres_test +package repositories_test import ( . "github.com/onsi/ginkgo" . "github.com/onsi/gomega" "github.com/vulcanize/vulcanizedb/pkg/core" + "github.com/vulcanize/vulcanizedb/pkg/datastore" + "github.com/vulcanize/vulcanizedb/pkg/datastore/postgres" + "github.com/vulcanize/vulcanizedb/pkg/datastore/postgres/repositories" "github.com/vulcanize/vulcanizedb/pkg/filters" - "github.com/vulcanize/vulcanizedb/pkg/repositories/postgres" + "github.com/vulcanize/vulcanizedb/test_config" ) var _ = Describe("Watched Events Repository", func() { var db *postgres.DB - var logRepository postgres.LogRepository - var filterRepository postgres.FilterRepository - var watchedEventRepository postgres.WatchedEventRepository + var logRepository datastore.LogRepository + var filterRepository datastore.FilterRepository + var watchedEventRepository datastore.WatchedEventRepository BeforeEach(func() { - db = postgres.NewTestDB(core.Node{}) - logRepository = postgres.LogRepository{DB: db} - filterRepository = postgres.FilterRepository{DB: db} - watchedEventRepository = postgres.WatchedEventRepository{DB: db} + db = test_config.NewTestDB(core.Node{}) + logRepository = repositories.LogRepository{DB: db} + filterRepository = repositories.FilterRepository{DB: db} + watchedEventRepository = repositories.WatchedEventRepository{DB: db} }) It("retrieves watched event logs that match the event filter", func() { @@ -57,7 +60,15 @@ var _ = Describe("Watched Events Repository", func() { Expect(err).ToNot(HaveOccurred()) matchingLogs, err := watchedEventRepository.GetWatchedEvents("Filter1") Expect(err).ToNot(HaveOccurred()) - Expect(matchingLogs).To(Equal(expectedWatchedEventLog)) + Expect(len(matchingLogs)).To(Equal(1)) + Expect(matchingLogs[0].Name).To(Equal(expectedWatchedEventLog[0].Name)) + Expect(matchingLogs[0].BlockNumber).To(Equal(expectedWatchedEventLog[0].BlockNumber)) + Expect(matchingLogs[0].TxHash).To(Equal(expectedWatchedEventLog[0].TxHash)) + Expect(matchingLogs[0].Address).To(Equal(expectedWatchedEventLog[0].Address)) + Expect(matchingLogs[0].Topic0).To(Equal(expectedWatchedEventLog[0].Topic0)) + Expect(matchingLogs[0].Topic1).To(Equal(expectedWatchedEventLog[0].Topic1)) + Expect(matchingLogs[0].Topic2).To(Equal(expectedWatchedEventLog[0].Topic2)) + Expect(matchingLogs[0].Data).To(Equal(expectedWatchedEventLog[0].Data)) }) @@ -103,7 +114,14 @@ var _ = Describe("Watched Events Repository", func() { Expect(err).ToNot(HaveOccurred()) matchingLogs, err := watchedEventRepository.GetWatchedEvents("Filter1") Expect(err).ToNot(HaveOccurred()) - Expect(matchingLogs).To(Equal(expectedWatchedEventLog)) - + Expect(len(matchingLogs)).To(Equal(1)) + Expect(matchingLogs[0].Name).To(Equal(expectedWatchedEventLog[0].Name)) + Expect(matchingLogs[0].BlockNumber).To(Equal(expectedWatchedEventLog[0].BlockNumber)) + Expect(matchingLogs[0].TxHash).To(Equal(expectedWatchedEventLog[0].TxHash)) + Expect(matchingLogs[0].Address).To(Equal(expectedWatchedEventLog[0].Address)) + Expect(matchingLogs[0].Topic0).To(Equal(expectedWatchedEventLog[0].Topic0)) + Expect(matchingLogs[0].Topic1).To(Equal(expectedWatchedEventLog[0].Topic1)) + Expect(matchingLogs[0].Topic2).To(Equal(expectedWatchedEventLog[0].Topic2)) + Expect(matchingLogs[0].Data).To(Equal(expectedWatchedEventLog[0].Data)) }) }) diff --git a/pkg/repositories/repository.go b/pkg/datastore/repository.go similarity index 79% rename from pkg/repositories/repository.go rename to pkg/datastore/repository.go index d11d65ef..3b62fe12 100644 --- a/pkg/repositories/repository.go +++ b/pkg/datastore/repository.go @@ -1,7 +1,6 @@ -package repositories +package datastore import ( - "errors" "fmt" "github.com/vulcanize/vulcanizedb/pkg/core" @@ -9,7 +8,7 @@ import ( ) var ErrBlockDoesNotExist = func(blockNumber int64) error { - return errors.New(fmt.Sprintf("Block number %d does not exist", blockNumber)) + return fmt.Errorf("Block number %d does not exist", blockNumber) } type BlockRepository interface { @@ -20,7 +19,7 @@ type BlockRepository interface { } var ErrContractDoesNotExist = func(contractHash string) error { - return errors.New(fmt.Sprintf("Contract %v does not exist", contractHash)) + return fmt.Errorf("Contract %v does not exist", contractHash) } type ContractRepository interface { @@ -30,7 +29,7 @@ type ContractRepository interface { } var ErrFilterDoesNotExist = func(name string) error { - return errors.New(fmt.Sprintf("filter %s does not exist", name)) + return fmt.Errorf("filter %s does not exist", name) } type FilterRepository interface { @@ -44,7 +43,7 @@ type LogRepository interface { } var ErrReceiptDoesNotExist = func(txHash string) error { - return errors.New(fmt.Sprintf("Receipt for tx: %v does not exist", txHash)) + return fmt.Errorf("Receipt for tx: %v does not exist", txHash) } type ReceiptRepository interface { diff --git a/pkg/fakes/blockchain.go b/pkg/fakes/blockchain.go index 97e8c0d6..44543257 100644 --- a/pkg/fakes/blockchain.go +++ b/pkg/fakes/blockchain.go @@ -9,12 +9,17 @@ import ( ) type Blockchain struct { - logs map[string][]core.Log - blocks map[int64]core.Block - contractAttributes map[string]map[string]string - blocksChannel chan core.Block - WasToldToStop bool - node core.Node + logs map[string][]core.Log + blocks map[int64]core.Block + contractAttributes map[string]map[string]string + blocksChannel chan core.Block + WasToldToStop bool + node core.Node + ContractReturnValue []byte +} + +func (blockchain *Blockchain) CallContract(contractHash string, input []byte, blockNumber *big.Int) ([]byte, error) { + return blockchain.ContractReturnValue, nil } func (blockchain *Blockchain) LastBlock() *big.Int { @@ -50,7 +55,7 @@ func NewBlockchain() *Blockchain { blocks: make(map[int64]core.Block), logs: make(map[string][]core.Log), contractAttributes: make(map[string]map[string]string), - node: core.Node{GenesisBlock: "GENESIS", NetworkId: 1, Id: "x123", ClientName: "Geth"}, + node: core.Node{GenesisBlock: "GENESIS", NetworkID: 1, ID: "x123", ClientName: "Geth"}, } } @@ -91,7 +96,7 @@ func (blockchain *Blockchain) GetAttributes(contract core.Contract) (core.Contra var contractAttributes core.ContractAttributes attributes, ok := blockchain.contractAttributes[contract.Hash+"-1"] if ok { - for key, _ := range attributes { + for key := range attributes { contractAttributes = append(contractAttributes, core.ContractAttribute{Name: key, Type: "string"}) } } diff --git a/pkg/filters/filter_query.go b/pkg/filters/filter_query.go index cd02712a..60956ec8 100644 --- a/pkg/filters/filter_query.go +++ b/pkg/filters/filter_query.go @@ -31,7 +31,7 @@ func (filterQuery *LogFilter) UnmarshalJSON(input []byte) error { }{ Alias: (*Alias)(filterQuery), } - if err := json.Unmarshal(input, &aux); err != nil { + if err = json.Unmarshal(input, &aux); err != nil { return err } if filterQuery.Name == "" { diff --git a/pkg/geth/abi.go b/pkg/geth/abi.go index 51b72103..d38c6111 100644 --- a/pkg/geth/abi.go +++ b/pkg/geth/abi.go @@ -25,20 +25,19 @@ type Response struct { Result string } -type EtherScanApi struct { +type EtherScanAPI struct { client *http.Client url string } -func NewEtherScanClient(url string) *EtherScanApi { - return &EtherScanApi{ +func NewEtherScanClient(url string) *EtherScanAPI { + return &EtherScanAPI{ client: &http.Client{Timeout: 10 * time.Second}, url: url, } - } -func GenUrl(network string) string { +func GenURL(network string) string { switch network { case "ropsten": return "https://ropsten.etherscan.io" @@ -52,7 +51,7 @@ func GenUrl(network string) string { } //https://api.etherscan.io/api?module=contract&action=getabi&address=%s -func (e *EtherScanApi) GetAbi(contractHash string) (string, error) { +func (e *EtherScanAPI) GetAbi(contractHash string) (string, error) { target := new(Response) request := fmt.Sprintf("%s/api?module=contract&action=getabi&address=%s", e.url, contractHash) r, err := e.client.Get(request) @@ -60,8 +59,8 @@ func (e *EtherScanApi) GetAbi(contractHash string) (string, error) { return "", ErrApiRequestFailed } defer r.Body.Close() - json.NewDecoder(r.Body).Decode(&target) - return target.Result, nil + err = json.NewDecoder(r.Body).Decode(&target) + return target.Result, err } func ParseAbiFile(abiFilePath string) (abi.ABI, error) { diff --git a/pkg/geth/abi_test.go b/pkg/geth/abi_test.go index 436f1d6b..8843564e 100644 --- a/pkg/geth/abi_test.go +++ b/pkg/geth/abi_test.go @@ -1,20 +1,16 @@ package geth_test import ( - "path/filepath" - "net/http" "fmt" - "log" - "github.com/ethereum/go-ethereum/accounts/abi" . "github.com/onsi/ginkgo" . "github.com/onsi/gomega" "github.com/onsi/gomega/ghttp" - cfg "github.com/vulcanize/vulcanizedb/pkg/config" "github.com/vulcanize/vulcanizedb/pkg/geth" + "github.com/vulcanize/vulcanizedb/test_config" ) var _ = Describe("ABI files", func() { @@ -22,7 +18,7 @@ var _ = Describe("ABI files", func() { Describe("Reading ABI files", func() { It("loads a valid ABI file", func() { - path := filepath.Join(cfg.ProjectRoot(), "pkg", "geth", "testing", "valid_abi.json") + path := test_config.ABIFilePath + "valid_abi.json" contractAbi, err := geth.ParseAbiFile(path) @@ -31,7 +27,7 @@ var _ = Describe("ABI files", func() { }) It("reads the contents of a valid ABI file", func() { - path := filepath.Join(cfg.ProjectRoot(), "pkg", "geth", "testing", "valid_abi.json") + path := test_config.ABIFilePath + "valid_abi.json" contractAbi, err := geth.ReadAbiFile(path) @@ -40,7 +36,7 @@ var _ = Describe("ABI files", func() { }) It("returns an error when the file does not exist", func() { - path := filepath.Join(cfg.ProjectRoot(), "pkg", "geth", "testing", "missing_abi.json") + path := test_config.ABIFilePath + "missing_abi.json" contractAbi, err := geth.ParseAbiFile(path) @@ -49,7 +45,7 @@ var _ = Describe("ABI files", func() { }) It("returns an error when the file has invalid contents", func() { - path := filepath.Join(cfg.ProjectRoot(), "pkg", "geth", "testing", "invalid_abi.json") + path := test_config.ABIFilePath + "invalid_abi.json" contractAbi, err := geth.ParseAbiFile(path) @@ -61,19 +57,20 @@ var _ = Describe("ABI files", func() { var ( server *ghttp.Server - client *geth.EtherScanApi + client *geth.EtherScanAPI abiString string + err error ) BeforeEach(func() { server = ghttp.NewServer() client = geth.NewEtherScanClient(server.URL()) - path := filepath.Join(cfg.ProjectRoot(), "pkg", "geth", "testing", "sample_abi.json") - abiString, err := geth.ReadAbiFile(path) + path := test_config.ABIFilePath + "sample_abi.json" + abiString, err = geth.ReadAbiFile(path) + + Expect(err).NotTo(HaveOccurred()) _, err = geth.ParseAbi(abiString) - if err != nil { - log.Fatalln("Could not parse ABI") - } + Expect(err).NotTo(HaveOccurred()) }) AfterEach(func() { @@ -104,14 +101,14 @@ var _ = Describe("ABI files", func() { Describe("Generating etherscan endpoints based on network", func() { It("should return the main endpoint as the default", func() { - url := geth.GenUrl("") + url := geth.GenURL("") Expect(url).To(Equal("https://api.etherscan.io")) }) It("generates various test network endpoint if test network is supplied", func() { - ropstenUrl := geth.GenUrl("ropsten") - rinkebyUrl := geth.GenUrl("rinkeby") - kovanUrl := geth.GenUrl("kovan") + ropstenUrl := geth.GenURL("ropsten") + rinkebyUrl := geth.GenURL("rinkeby") + kovanUrl := geth.GenURL("kovan") Expect(ropstenUrl).To(Equal("https://ropsten.etherscan.io")) Expect(kovanUrl).To(Equal("https://kovan.etherscan.io")) diff --git a/pkg/geth/block_to_core_block.go b/pkg/geth/block_to_core_block.go index a2b0f1ff..6bfaf065 100644 --- a/pkg/geth/block_to_core_block.go +++ b/pkg/geth/block_to_core_block.go @@ -12,12 +12,12 @@ import ( "golang.org/x/net/context" ) -type GethClient interface { +type Client interface { TransactionSender(ctx context.Context, tx *types.Transaction, block common.Hash, index uint) (common.Address, error) TransactionReceipt(ctx context.Context, txHash common.Hash) (*types.Receipt, error) } -func ToCoreBlock(gethBlock *types.Block, client GethClient) core.Block { +func ToCoreBlock(gethBlock *types.Block, client Client) core.Block { transactions := convertTransactionsToCore(gethBlock, client) coreBlock := core.Block{ Difficulty: gethBlock.Difficulty().Int64(), @@ -39,7 +39,7 @@ func ToCoreBlock(gethBlock *types.Block, client GethClient) core.Block { return coreBlock } -func convertTransactionsToCore(gethBlock *types.Block, client GethClient) []core.Transaction { +func convertTransactionsToCore(gethBlock *types.Block, client Client) []core.Transaction { transactions := make([]core.Transaction, 0) for i, gethTransaction := range gethBlock.Transactions() { from, err := client.TransactionSender(context.Background(), gethTransaction, gethBlock.Hash(), uint(i)) @@ -56,7 +56,7 @@ func convertTransactionsToCore(gethBlock *types.Block, client GethClient) []core return transactions } -func appendReceiptToTransaction(client GethClient, transaction core.Transaction) (core.Transaction, error) { +func appendReceiptToTransaction(client Client, transaction core.Transaction) (core.Transaction, error) { gethReceipt, err := client.TransactionReceipt(context.Background(), common.HexToHash(transaction.Hash)) if err != nil { log.Println(err) @@ -84,7 +84,6 @@ func transToCoreTrans(transaction *types.Transaction, from *common.Address) core func addressToHex(to *common.Address) string { if to == nil { return "" - } else { - return to.Hex() } + return to.Hex() } diff --git a/pkg/geth/blockchain.go b/pkg/geth/blockchain.go index 8c040539..c731cfec 100644 --- a/pkg/geth/blockchain.go +++ b/pkg/geth/blockchain.go @@ -29,12 +29,13 @@ func NewBlockchain(ipcPath string) *Blockchain { blockchain := Blockchain{} rpcClient, err := rpc.Dial(ipcPath) if err != nil { + log.Println("Unable to connect to node") log.Fatal(err) } client := ethclient.NewClient(rpcClient) blockchain.node = node.Info(rpcClient) if infura := isInfuraNode(ipcPath); infura { - blockchain.node.Id = "infura" + blockchain.node.ID = "infura" blockchain.node.ClientName = "infura" } blockchain.client = client diff --git a/pkg/geth/contract.go b/pkg/geth/contract.go index b7db4500..a49e4b28 100644 --- a/pkg/geth/contract.go +++ b/pkg/geth/contract.go @@ -27,7 +27,7 @@ func (blockchain *Blockchain) GetAttribute(contract core.Contract, attributeName if err != nil { return nil, ErrInvalidStateAttribute } - output, err := callContract(contract.Hash, input, blockchain, blockNumber) + output, err := blockchain.callContract(contract.Hash, input, blockNumber) if err != nil { return nil, err } @@ -38,7 +38,23 @@ func (blockchain *Blockchain) GetAttribute(contract core.Contract, attributeName return result, nil } -func callContract(contractHash string, input []byte, blockchain *Blockchain, blockNumber *big.Int) ([]byte, error) { +func (blockchain *Blockchain) FetchContractData(abiJSON string, address string, method string, methodArg interface{}, result interface{}, blockNumber int64) error { + parsed, err := ParseAbi(abiJSON) + if err != nil { + return err + } + input, err := parsed.Pack(method, methodArg) + if err != nil { + return err + } + output, err := blockchain.callContract(address, input, big.NewInt(blockNumber)) + if err != nil { + return err + } + return parsed.Unpack(result, method, output) +} + +func (blockchain *Blockchain) callContract(contractHash string, input []byte, blockNumber *big.Int) ([]byte, error) { to := common.HexToAddress(contractHash) msg := ethereum.CallMsg{To: &to, Data: input} return blockchain.client.CallContract(context.Background(), msg, blockNumber) diff --git a/pkg/geth/node/node.go b/pkg/geth/node/node.go index e5102cc7..17bbdffd 100644 --- a/pkg/geth/node/node.go +++ b/pkg/geth/node/node.go @@ -13,13 +13,13 @@ import ( func Info(client *rpc.Client) core.Node { node := core.Node{} - node.NetworkId = NetworkId(client) + node.NetworkID = NetworkID(client) node.GenesisBlock = GenesisBlock(client) - node.Id, node.ClientName = IdClientName(client) + node.ID, node.ClientName = IDClientName(client) return node } -func IdClientName(client *rpc.Client) (string, string) { +func IDClientName(client *rpc.Client) (string, string) { var info p2p.NodeInfo modules, _ := client.SupportedModules() if _, ok := modules["admin"]; ok { @@ -29,7 +29,7 @@ func IdClientName(client *rpc.Client) (string, string) { return "", "" } -func NetworkId(client *rpc.Client) float64 { +func NetworkID(client *rpc.Client) float64 { var version string client.CallContext(context.Background(), &version, "net_version") networkId, _ := strconv.ParseFloat(version, 64) diff --git a/pkg/geth/testing/helpers.go b/pkg/geth/testing/helpers.go index 9b737c00..2db31190 100644 --- a/pkg/geth/testing/helpers.go +++ b/pkg/geth/testing/helpers.go @@ -1,11 +1,11 @@ package testing import ( - "path/filepath" + "log" - "github.com/vulcanize/vulcanizedb/pkg/config" "github.com/vulcanize/vulcanizedb/pkg/core" "github.com/vulcanize/vulcanizedb/pkg/geth" + "github.com/vulcanize/vulcanizedb/test_config" ) func FindAttribute(contractAttributes core.ContractAttributes, attributeName string) *core.ContractAttribute { @@ -25,7 +25,9 @@ func SampleContract() core.Contract { } func sampleAbiFileContents() string { - abiFilepath := filepath.Join(config.ProjectRoot(), "pkg", "geth", "testing", "sample_abi.json") - abiFileContents, _ := geth.ReadAbiFile(abiFilepath) + abiFileContents, err := geth.ReadAbiFile(test_config.ABIFilePath + "sample_abi.json") + if err != nil { + log.Fatal(err) + } return abiFileContents } diff --git a/pkg/graphql_server/schema.go b/pkg/graphql_server/schema.go deleted file mode 100644 index 34e51e60..00000000 --- a/pkg/graphql_server/schema.go +++ /dev/null @@ -1,169 +0,0 @@ -package graphql_server - -import ( - "github.com/vulcanize/vulcanizedb/pkg/core" - "github.com/vulcanize/vulcanizedb/pkg/filters" - "github.com/vulcanize/vulcanizedb/pkg/repositories" -) - -var Schema = ` - schema { - query: Query - } - type Query { - logFilter(name: String!): LogFilter - watchedEvents(name: String!): WatchedEventList - } - - type LogFilter { - name: String! - fromBlock: Int - toBlock: Int - address: String! - topics: [String]! - } - - type WatchedEventList{ - total: Int! - watchedEvents: [WatchedEvent]! - } - - type WatchedEvent { - name: String! - blockNumber: Int! - address: String! - tx_hash: String! - topic0: String! - topic1: String! - topic2: String! - topic3: String! - data: String! - } -` - -type GraphQLRepositories struct { - repositories.BlockRepository - repositories.LogRepository - repositories.WatchedEventRepository - repositories.FilterRepository -} - -type Resolver struct { - graphQLRepositories GraphQLRepositories -} - -func NewResolver(repositories GraphQLRepositories) *Resolver { - return &Resolver{graphQLRepositories: repositories} -} - -func (r *Resolver) LogFilter(args struct { - Name string -}) (*logFilterResolver, error) { - logFilter, err := r.graphQLRepositories.GetFilter(args.Name) - if err != nil { - return &logFilterResolver{}, err - } - return &logFilterResolver{&logFilter}, nil -} - -type logFilterResolver struct { - lf *filters.LogFilter -} - -func (lfr *logFilterResolver) Name() string { - return lfr.lf.Name -} - -func (lfr *logFilterResolver) FromBlock() *int32 { - fromBlock := int32(lfr.lf.FromBlock) - return &fromBlock -} - -func (lfr *logFilterResolver) ToBlock() *int32 { - toBlock := int32(lfr.lf.ToBlock) - return &toBlock -} - -func (lfr *logFilterResolver) Address() string { - return lfr.lf.Address -} - -func (lfr *logFilterResolver) Topics() []*string { - var topics = make([]*string, 4) - for i := range topics { - if lfr.lf.Topics[i] != "" { - topics[i] = &lfr.lf.Topics[i] - } - } - return topics -} - -func (r *Resolver) WatchedEvents(args struct { - Name string -}) (*watchedEventsResolver, error) { - watchedEvents, err := r.graphQLRepositories.GetWatchedEvents(args.Name) - if err != nil { - return &watchedEventsResolver{}, err - } - return &watchedEventsResolver{watchedEvents: watchedEvents}, err -} - -type watchedEventsResolver struct { - watchedEvents []*core.WatchedEvent -} - -func (wesr watchedEventsResolver) WatchedEvents() []*watchedEventResolver { - return resolveWatchedEvents(wesr.watchedEvents) -} - -func (wesr watchedEventsResolver) Total() int32 { - return int32(len(wesr.watchedEvents)) -} - -func resolveWatchedEvents(watchedEvents []*core.WatchedEvent) []*watchedEventResolver { - watchedEventResolvers := make([]*watchedEventResolver, 0) - for _, watchedEvent := range watchedEvents { - watchedEventResolvers = append(watchedEventResolvers, &watchedEventResolver{watchedEvent}) - } - return watchedEventResolvers -} - -type watchedEventResolver struct { - we *core.WatchedEvent -} - -func (wer watchedEventResolver) Name() string { - return wer.we.Name -} - -func (wer watchedEventResolver) BlockNumber() int32 { - return int32(wer.we.BlockNumber) -} - -func (wer watchedEventResolver) Address() string { - return wer.we.Address -} - -func (wer watchedEventResolver) TxHash() string { - return wer.we.TxHash -} - -func (wer watchedEventResolver) Topic0() string { - return wer.we.Topic0 -} - -func (wer watchedEventResolver) Topic1() string { - return wer.we.Topic1 -} - -func (wer watchedEventResolver) Topic2() string { - return wer.we.Topic2 -} - -func (wer watchedEventResolver) Topic3() string { - return wer.we.Topic3 -} - -func (wer watchedEventResolver) Data() string { - return wer.we.Data -} diff --git a/pkg/graphql_server/schema_test.go b/pkg/graphql_server/schema_test.go deleted file mode 100644 index c60978d6..00000000 --- a/pkg/graphql_server/schema_test.go +++ /dev/null @@ -1,178 +0,0 @@ -package graphql_server_test - -import ( - "log" - - "encoding/json" - - "context" - - "github.com/neelance/graphql-go" - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" - "github.com/vulcanize/vulcanizedb/pkg/config" - "github.com/vulcanize/vulcanizedb/pkg/core" - "github.com/vulcanize/vulcanizedb/pkg/filters" - "github.com/vulcanize/vulcanizedb/pkg/graphql_server" - "github.com/vulcanize/vulcanizedb/pkg/repositories/postgres" -) - -func formatJSON(data []byte) []byte { - var v interface{} - if err := json.Unmarshal(data, &v); err != nil { - log.Fatalf("invalid JSON: %s", err) - } - formatted, err := json.Marshal(v) - if err != nil { - log.Fatal(err) - } - return formatted -} - -var _ = Describe("GraphQL", func() { - var cfg config.Config - var graphQLRepositories graphql_server.GraphQLRepositories - - BeforeEach(func() { - - cfg, _ = config.NewConfig("private") - node := core.Node{GenesisBlock: "GENESIS", NetworkId: 1, Id: "x123", ClientName: "geth"} - db := postgres.NewTestDB(node) - blockRepository := &postgres.BlockRepository{DB: db} - logRepository := &postgres.LogRepository{DB: db} - filterRepository := &postgres.FilterRepository{DB: db} - watchedEventRepository := &postgres.WatchedEventRepository{DB: db} - graphQLRepositories = graphql_server.GraphQLRepositories{ - WatchedEventRepository: watchedEventRepository, - BlockRepository: blockRepository, - LogRepository: logRepository, - FilterRepository: filterRepository, - } - - err := graphQLRepositories.CreateFilter(filters.LogFilter{ - Name: "TestFilter1", - FromBlock: 1, - ToBlock: 10, - Address: "0x123456789", - Topics: core.Topics{0: "topic=1", 2: "topic=2"}, - }) - if err != nil { - log.Fatal(err) - } - filter, err := graphQLRepositories.GetFilter("TestFilter1") - if err != nil { - log.Println(filter) - log.Fatal(err) - } - - matchingEvent := core.Log{ - BlockNumber: 5, - TxHash: "0xTX1", - Address: "0x123456789", - Topics: core.Topics{0: "topic=1", 2: "topic=2"}, - Index: 0, - Data: "0xDATADATADATA", - } - nonMatchingEvent := core.Log{ - BlockNumber: 5, - TxHash: "0xTX2", - Address: "0xOTHERADDRESS", - Topics: core.Topics{0: "topic=1", 2: "topic=2"}, - Index: 0, - Data: "0xDATADATADATA", - } - err = graphQLRepositories.CreateLogs([]core.Log{matchingEvent, nonMatchingEvent}) - if err != nil { - log.Fatal(err) - } - }) - - It("Queries example schema for specific log filter", func() { - var variables map[string]interface{} - resolver := graphql_server.NewResolver(graphQLRepositories) - var schema = graphql.MustParseSchema(graphql_server.Schema, resolver) - response := schema.Exec(context.Background(), - `{ - logFilter(name: "TestFilter1") { - name - fromBlock - toBlock - address - topics - } - }`, - "", - variables) - expected := `{ - "logFilter": { - "name": "TestFilter1", - "fromBlock": 1, - "toBlock": 10, - "address": "0x123456789", - "topics": ["topic=1", null, "topic=2", null] - } - }` - var v interface{} - if len(response.Errors) != 0 { - log.Fatal(response.Errors) - } - err := json.Unmarshal(response.Data, &v) - Expect(err).ToNot(HaveOccurred()) - actualJSON := formatJSON(response.Data) - expectedJSON := formatJSON([]byte(expected)) - Expect(actualJSON).To(Equal(expectedJSON)) - }) - - It("Queries example schema for specific watched event log", func() { - var variables map[string]interface{} - - resolver := graphql_server.NewResolver(graphQLRepositories) - var schema = graphql.MustParseSchema(graphql_server.Schema, resolver) - response := schema.Exec(context.Background(), - `{ - watchedEvents(name: "TestFilter1") { - total - watchedEvents{ - name - blockNumber - address - tx_hash - topic0 - topic1 - topic2 - topic3 - data - } - } - }`, - "", - variables) - expected := `{ - "watchedEvents": - { - "total": 1, - "watchedEvents": [ - {"name":"TestFilter1", - "blockNumber": 5, - "address": "0x123456789", - "tx_hash": "0xTX1", - "topic0": "topic=1", - "topic1": "", - "topic2": "topic=2", - "topic3": "", - "data": "0xDATADATADATA" - } - ] - } - }` - var v interface{} - if len(response.Errors) != 0 { - log.Fatal(response.Errors) - } - err := json.Unmarshal(response.Data, &v) - Expect(err).ToNot(HaveOccurred()) - actualJSON := formatJSON(response.Data) - expectedJSON := formatJSON([]byte(expected)) - Expect(actualJSON).To(Equal(expectedJSON)) - }) -}) diff --git a/pkg/history/populate_blocks.go b/pkg/history/populate_blocks.go index 24c03536..19d63e72 100644 --- a/pkg/history/populate_blocks.go +++ b/pkg/history/populate_blocks.go @@ -4,10 +4,10 @@ import ( "log" "github.com/vulcanize/vulcanizedb/pkg/core" - "github.com/vulcanize/vulcanizedb/pkg/repositories" + "github.com/vulcanize/vulcanizedb/pkg/datastore" ) -func PopulateMissingBlocks(blockchain core.Blockchain, blockRepository repositories.BlockRepository, startingBlockNumber int64) int { +func PopulateMissingBlocks(blockchain core.Blockchain, blockRepository datastore.BlockRepository, startingBlockNumber int64) int { lastBlock := blockchain.LastBlock().Int64() blockRange := blockRepository.MissingBlockNumbers(startingBlockNumber, lastBlock-1) log.SetPrefix("") @@ -16,7 +16,7 @@ func PopulateMissingBlocks(blockchain core.Blockchain, blockRepository repositor return len(blockRange) } -func RetrieveAndUpdateBlocks(blockchain core.Blockchain, blockRepository repositories.BlockRepository, blockNumbers []int64) int { +func RetrieveAndUpdateBlocks(blockchain core.Blockchain, blockRepository datastore.BlockRepository, blockNumbers []int64) int { for _, blockNumber := range blockNumbers { block := blockchain.GetBlockByNumber(blockNumber) blockRepository.CreateOrUpdateBlock(block) diff --git a/pkg/history/populate_blocks_test.go b/pkg/history/populate_blocks_test.go index bb4e3a0d..14e6d61f 100644 --- a/pkg/history/populate_blocks_test.go +++ b/pkg/history/populate_blocks_test.go @@ -4,9 +4,9 @@ import ( . "github.com/onsi/ginkgo" . "github.com/onsi/gomega" "github.com/vulcanize/vulcanizedb/pkg/core" + "github.com/vulcanize/vulcanizedb/pkg/datastore/inmemory" "github.com/vulcanize/vulcanizedb/pkg/fakes" "github.com/vulcanize/vulcanizedb/pkg/history" - "github.com/vulcanize/vulcanizedb/pkg/repositories/inmemory" ) var _ = Describe("Populating blocks", func() { diff --git a/pkg/history/validate_blocks.go b/pkg/history/validate_blocks.go index 8f3be99f..1f6ecf9e 100644 --- a/pkg/history/validate_blocks.go +++ b/pkg/history/validate_blocks.go @@ -5,7 +5,7 @@ import ( "text/template" "github.com/vulcanize/vulcanizedb/pkg/core" - "github.com/vulcanize/vulcanizedb/pkg/repositories" + "github.com/vulcanize/vulcanizedb/pkg/datastore" ) const WindowTemplate = `Validating Blocks @@ -17,12 +17,12 @@ var ParsedWindowTemplate = *template.Must(template.New("window").Parse(WindowTem type BlockValidator struct { blockchain core.Blockchain - blockRepository repositories.BlockRepository + blockRepository datastore.BlockRepository windowSize int parsedLoggingTemplate template.Template } -func NewBlockValidator(blockchain core.Blockchain, blockRepository repositories.BlockRepository, windowSize int) *BlockValidator { +func NewBlockValidator(blockchain core.Blockchain, blockRepository datastore.BlockRepository, windowSize int) *BlockValidator { return &BlockValidator{ blockchain, blockRepository, diff --git a/pkg/history/validate_blocks_test.go b/pkg/history/validate_blocks_test.go index a6194451..056e84e1 100644 --- a/pkg/history/validate_blocks_test.go +++ b/pkg/history/validate_blocks_test.go @@ -9,9 +9,9 @@ import ( . "github.com/onsi/ginkgo" . "github.com/onsi/gomega" "github.com/vulcanize/vulcanizedb/pkg/core" + "github.com/vulcanize/vulcanizedb/pkg/datastore/inmemory" "github.com/vulcanize/vulcanizedb/pkg/fakes" "github.com/vulcanize/vulcanizedb/pkg/history" - "github.com/vulcanize/vulcanizedb/pkg/repositories/inmemory" ) func init() { diff --git a/pkg/repositories/postgres/helpers.go b/pkg/repositories/postgres/helpers.go deleted file mode 100644 index 9e5040c5..00000000 --- a/pkg/repositories/postgres/helpers.go +++ /dev/null @@ -1,22 +0,0 @@ -package postgres - -import ( - "github.com/vulcanize/vulcanizedb/pkg/config" - "github.com/vulcanize/vulcanizedb/pkg/core" -) - -func (db *DB) clearData() { - db.MustExec("DELETE FROM watched_contracts") - db.MustExec("DELETE FROM transactions") - db.MustExec("DELETE FROM blocks") - db.MustExec("DELETE FROM logs") - db.MustExec("DELETE FROM receipts") - db.MustExec("DELETE FROM log_filters") -} - -func NewTestDB(node core.Node) *DB { - cfg, _ := config.NewConfig("private") - db, _ := NewDB(cfg.Database, node) - db.clearData() - return db -} diff --git a/test_config/test_config.go b/test_config/test_config.go new file mode 100644 index 00000000..f8127c53 --- /dev/null +++ b/test_config/test_config.go @@ -0,0 +1,78 @@ +package test_config + +import ( + "log" + + "os" + + "github.com/spf13/viper" + "github.com/vulcanize/vulcanizedb/pkg/config" + "github.com/vulcanize/vulcanizedb/pkg/core" + "github.com/vulcanize/vulcanizedb/pkg/datastore/postgres" +) + +var TestConfig *viper.Viper +var DBConfig config.Database +var TestClientConfig config.Client +var Infura *viper.Viper +var InfuraClient config.Client +var ABIFilePath string + +func init() { + setTestConfig() + setInfuraConfig() + setABIPath() +} + +func setTestConfig() { + TestConfig = viper.New() + TestConfig.SetConfigName("private") + TestConfig.AddConfigPath("$GOPATH/src/github.com/vulcanize/vulcanizedb/environments/") + err := TestConfig.ReadInConfig() + if err != nil { + log.Fatal(err) + } + hn := TestConfig.GetString("database.hostname") + port := TestConfig.GetInt("database.port") + name := TestConfig.GetString("database.name") + DBConfig = config.Database{ + Hostname: hn, + Name: name, + Port: port, + } + ipc := TestConfig.GetString("client.ipcpath") + gopath := os.Getenv("GOPATH") + TestClientConfig = config.Client{ + IPCPath: gopath + "/src/github.com/vulcanize/vulcanizedb/" + ipc, + } +} + +func setInfuraConfig() { + Infura = viper.New() + Infura.SetConfigName("infura") + Infura.AddConfigPath("$GOPATH/src/github.com/vulcanize/vulcanizedb/environments/") + err := Infura.ReadInConfig() + ipc := Infura.GetString("client.ipcpath") + if err != nil { + log.Fatal(err) + } + InfuraClient = config.Client{ + IPCPath: ipc, + } +} + +func setABIPath() { + gp := os.Getenv("GOPATH") + ABIFilePath = gp + "/src/github.com/vulcanize/vulcanizedb/pkg/geth/testing/" +} + +func NewTestDB(node core.Node) *postgres.DB { + db, _ := postgres.NewDB(DBConfig, node) + db.MustExec("DELETE FROM watched_contracts") + db.MustExec("DELETE FROM transactions") + db.MustExec("DELETE FROM blocks") + db.MustExec("DELETE FROM logs") + db.MustExec("DELETE FROM receipts") + db.MustExec("DELETE FROM log_filters") + return db +} diff --git a/utils/utils.go b/utils/utils.go index 3399d949..7652e2ee 100644 --- a/utils/utils.go +++ b/utils/utils.go @@ -11,18 +11,10 @@ import ( "github.com/vulcanize/vulcanizedb/pkg/config" "github.com/vulcanize/vulcanizedb/pkg/core" + "github.com/vulcanize/vulcanizedb/pkg/datastore/postgres" "github.com/vulcanize/vulcanizedb/pkg/geth" - "github.com/vulcanize/vulcanizedb/pkg/repositories/postgres" ) -func LoadConfig(environment string) config.Config { - cfg, err := config.NewConfig(environment) - if err != nil { - log.Fatalf("Error loading config\n%v", err) - } - return cfg -} - func LoadPostgres(database config.Database, node core.Node) postgres.DB { db, err := postgres.NewDB(database, node) if err != nil { @@ -53,7 +45,7 @@ func GetAbi(abiFilepath string, contractHash string, network string) string { if abiFilepath != "" { contractAbiString = ReadAbiFile(abiFilepath) } else { - url := geth.GenUrl(network) + url := geth.GenURL(network) etherscan := geth.NewEtherScanClient(url) log.Printf("No ABI supplied. Retrieving ABI from Etherscan: %s", url) contractAbiString, _ = etherscan.GetAbi(contractHash) diff --git a/vendor/github.com/BurntSushi/toml/.gitignore b/vendor/github.com/BurntSushi/toml/.gitignore deleted file mode 100644 index 0cd38003..00000000 --- a/vendor/github.com/BurntSushi/toml/.gitignore +++ /dev/null @@ -1,5 +0,0 @@ -TAGS -tags -.*.swp -tomlcheck/tomlcheck -toml.test diff --git a/vendor/github.com/BurntSushi/toml/.travis.yml b/vendor/github.com/BurntSushi/toml/.travis.yml deleted file mode 100644 index 8b8afc4f..00000000 --- a/vendor/github.com/BurntSushi/toml/.travis.yml +++ /dev/null @@ -1,15 +0,0 @@ -language: go -go: - - 1.1 - - 1.2 - - 1.3 - - 1.4 - - 1.5 - - 1.6 - - tip -install: - - go install ./... - - go get github.com/BurntSushi/toml-test -script: - - export PATH="$PATH:$HOME/gopath/bin" - - make test diff --git a/vendor/github.com/BurntSushi/toml/COMPATIBLE b/vendor/github.com/BurntSushi/toml/COMPATIBLE deleted file mode 100644 index 6efcfd0c..00000000 --- a/vendor/github.com/BurntSushi/toml/COMPATIBLE +++ /dev/null @@ -1,3 +0,0 @@ -Compatible with TOML version -[v0.4.0](https://github.com/toml-lang/toml/blob/v0.4.0/versions/en/toml-v0.4.0.md) - diff --git a/vendor/github.com/BurntSushi/toml/COPYING b/vendor/github.com/BurntSushi/toml/COPYING deleted file mode 100644 index 5a8e3325..00000000 --- a/vendor/github.com/BurntSushi/toml/COPYING +++ /dev/null @@ -1,14 +0,0 @@ - DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE - Version 2, December 2004 - - Copyright (C) 2004 Sam Hocevar - - Everyone is permitted to copy and distribute verbatim or modified - copies of this license document, and changing it is allowed as long - as the name is changed. - - DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE - TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION - - 0. You just DO WHAT THE FUCK YOU WANT TO. - diff --git a/vendor/github.com/BurntSushi/toml/Makefile b/vendor/github.com/BurntSushi/toml/Makefile deleted file mode 100644 index 3600848d..00000000 --- a/vendor/github.com/BurntSushi/toml/Makefile +++ /dev/null @@ -1,19 +0,0 @@ -install: - go install ./... - -test: install - go test -v - toml-test toml-test-decoder - toml-test -encoder toml-test-encoder - -fmt: - gofmt -w *.go */*.go - colcheck *.go */*.go - -tags: - find ./ -name '*.go' -print0 | xargs -0 gotags > TAGS - -push: - git push origin master - git push github master - diff --git a/vendor/github.com/BurntSushi/toml/README.md b/vendor/github.com/BurntSushi/toml/README.md deleted file mode 100644 index 7c1b37ec..00000000 --- a/vendor/github.com/BurntSushi/toml/README.md +++ /dev/null @@ -1,218 +0,0 @@ -## TOML parser and encoder for Go with reflection - -TOML stands for Tom's Obvious, Minimal Language. This Go package provides a -reflection interface similar to Go's standard library `json` and `xml` -packages. This package also supports the `encoding.TextUnmarshaler` and -`encoding.TextMarshaler` interfaces so that you can define custom data -representations. (There is an example of this below.) - -Spec: https://github.com/toml-lang/toml - -Compatible with TOML version -[v0.4.0](https://github.com/toml-lang/toml/blob/master/versions/en/toml-v0.4.0.md) - -Documentation: https://godoc.org/github.com/BurntSushi/toml - -Installation: - -```bash -go get github.com/BurntSushi/toml -``` - -Try the toml validator: - -```bash -go get github.com/BurntSushi/toml/cmd/tomlv -tomlv some-toml-file.toml -``` - -[![Build Status](https://travis-ci.org/BurntSushi/toml.svg?branch=master)](https://travis-ci.org/BurntSushi/toml) [![GoDoc](https://godoc.org/github.com/BurntSushi/toml?status.svg)](https://godoc.org/github.com/BurntSushi/toml) - -### Testing - -This package passes all tests in -[toml-test](https://github.com/BurntSushi/toml-test) for both the decoder -and the encoder. - -### Examples - -This package works similarly to how the Go standard library handles `XML` -and `JSON`. Namely, data is loaded into Go values via reflection. - -For the simplest example, consider some TOML file as just a list of keys -and values: - -```toml -Age = 25 -Cats = [ "Cauchy", "Plato" ] -Pi = 3.14 -Perfection = [ 6, 28, 496, 8128 ] -DOB = 1987-07-05T05:45:00Z -``` - -Which could be defined in Go as: - -```go -type Config struct { - Age int - Cats []string - Pi float64 - Perfection []int - DOB time.Time // requires `import time` -} -``` - -And then decoded with: - -```go -var conf Config -if _, err := toml.Decode(tomlData, &conf); err != nil { - // handle error -} -``` - -You can also use struct tags if your struct field name doesn't map to a TOML -key value directly: - -```toml -some_key_NAME = "wat" -``` - -```go -type TOML struct { - ObscureKey string `toml:"some_key_NAME"` -} -``` - -### Using the `encoding.TextUnmarshaler` interface - -Here's an example that automatically parses duration strings into -`time.Duration` values: - -```toml -[[song]] -name = "Thunder Road" -duration = "4m49s" - -[[song]] -name = "Stairway to Heaven" -duration = "8m03s" -``` - -Which can be decoded with: - -```go -type song struct { - Name string - Duration duration -} -type songs struct { - Song []song -} -var favorites songs -if _, err := toml.Decode(blob, &favorites); err != nil { - log.Fatal(err) -} - -for _, s := range favorites.Song { - fmt.Printf("%s (%s)\n", s.Name, s.Duration) -} -``` - -And you'll also need a `duration` type that satisfies the -`encoding.TextUnmarshaler` interface: - -```go -type duration struct { - time.Duration -} - -func (d *duration) UnmarshalText(text []byte) error { - var err error - d.Duration, err = time.ParseDuration(string(text)) - return err -} -``` - -### More complex usage - -Here's an example of how to load the example from the official spec page: - -```toml -# This is a TOML document. Boom. - -title = "TOML Example" - -[owner] -name = "Tom Preston-Werner" -organization = "GitHub" -bio = "GitHub Cofounder & CEO\nLikes tater tots and beer." -dob = 1979-05-27T07:32:00Z # First class dates? Why not? - -[database] -server = "192.168.1.1" -ports = [ 8001, 8001, 8002 ] -connection_max = 5000 -enabled = true - -[servers] - - # You can indent as you please. Tabs or spaces. TOML don't care. - [servers.alpha] - ip = "10.0.0.1" - dc = "eqdc10" - - [servers.beta] - ip = "10.0.0.2" - dc = "eqdc10" - -[clients] -data = [ ["gamma", "delta"], [1, 2] ] # just an update to make sure parsers support it - -# Line breaks are OK when inside arrays -hosts = [ - "alpha", - "omega" -] -``` - -And the corresponding Go types are: - -```go -type tomlConfig struct { - Title string - Owner ownerInfo - DB database `toml:"database"` - Servers map[string]server - Clients clients -} - -type ownerInfo struct { - Name string - Org string `toml:"organization"` - Bio string - DOB time.Time -} - -type database struct { - Server string - Ports []int - ConnMax int `toml:"connection_max"` - Enabled bool -} - -type server struct { - IP string - DC string -} - -type clients struct { - Data [][]interface{} - Hosts []string -} -``` - -Note that a case insensitive match will be tried if an exact match can't be -found. - -A working example of the above can be found in `_examples/example.{go,toml}`. diff --git a/vendor/github.com/BurntSushi/toml/_examples/example.go b/vendor/github.com/BurntSushi/toml/_examples/example.go deleted file mode 100644 index 79f31f27..00000000 --- a/vendor/github.com/BurntSushi/toml/_examples/example.go +++ /dev/null @@ -1,61 +0,0 @@ -package main - -import ( - "fmt" - "time" - - "github.com/BurntSushi/toml" -) - -type tomlConfig struct { - Title string - Owner ownerInfo - DB database `toml:"database"` - Servers map[string]server - Clients clients -} - -type ownerInfo struct { - Name string - Org string `toml:"organization"` - Bio string - DOB time.Time -} - -type database struct { - Server string - Ports []int - ConnMax int `toml:"connection_max"` - Enabled bool -} - -type server struct { - IP string - DC string -} - -type clients struct { - Data [][]interface{} - Hosts []string -} - -func main() { - var config tomlConfig - if _, err := toml.DecodeFile("example.toml", &config); err != nil { - fmt.Println(err) - return - } - - fmt.Printf("Title: %s\n", config.Title) - fmt.Printf("Owner: %s (%s, %s), Born: %s\n", - config.Owner.Name, config.Owner.Org, config.Owner.Bio, - config.Owner.DOB) - fmt.Printf("Database: %s %v (Max conn. %d), Enabled? %v\n", - config.DB.Server, config.DB.Ports, config.DB.ConnMax, - config.DB.Enabled) - for serverName, server := range config.Servers { - fmt.Printf("Server: %s (%s, %s)\n", serverName, server.IP, server.DC) - } - fmt.Printf("Client data: %v\n", config.Clients.Data) - fmt.Printf("Client hosts: %v\n", config.Clients.Hosts) -} diff --git a/vendor/github.com/BurntSushi/toml/_examples/example.toml b/vendor/github.com/BurntSushi/toml/_examples/example.toml deleted file mode 100644 index 32c7a4fa..00000000 --- a/vendor/github.com/BurntSushi/toml/_examples/example.toml +++ /dev/null @@ -1,35 +0,0 @@ -# This is a TOML document. Boom. - -title = "TOML Example" - -[owner] -name = "Tom Preston-Werner" -organization = "GitHub" -bio = "GitHub Cofounder & CEO\nLikes tater tots and beer." -dob = 1979-05-27T07:32:00Z # First class dates? Why not? - -[database] -server = "192.168.1.1" -ports = [ 8001, 8001, 8002 ] -connection_max = 5000 -enabled = true - -[servers] - - # You can indent as you please. Tabs or spaces. TOML don't care. - [servers.alpha] - ip = "10.0.0.1" - dc = "eqdc10" - - [servers.beta] - ip = "10.0.0.2" - dc = "eqdc10" - -[clients] -data = [ ["gamma", "delta"], [1, 2] ] # just an update to make sure parsers support it - -# Line breaks are OK when inside arrays -hosts = [ - "alpha", - "omega" -] diff --git a/vendor/github.com/BurntSushi/toml/_examples/hard.toml b/vendor/github.com/BurntSushi/toml/_examples/hard.toml deleted file mode 100644 index 26145d2b..00000000 --- a/vendor/github.com/BurntSushi/toml/_examples/hard.toml +++ /dev/null @@ -1,22 +0,0 @@ -# Test file for TOML -# Only this one tries to emulate a TOML file written by a user of the kind of parser writers probably hate -# This part you'll really hate - -[the] -test_string = "You'll hate me after this - #" # " Annoying, isn't it? - - [the.hard] - test_array = [ "] ", " # "] # ] There you go, parse this! - test_array2 = [ "Test #11 ]proved that", "Experiment #9 was a success" ] - # You didn't think it'd as easy as chucking out the last #, did you? - another_test_string = " Same thing, but with a string #" - harder_test_string = " And when \"'s are in the string, along with # \"" # "and comments are there too" - # Things will get harder - - [the.hard.bit#] - what? = "You don't think some user won't do that?" - multi_line_array = [ - "]", - # ] Oh yes I did - ] - diff --git a/vendor/github.com/BurntSushi/toml/_examples/implicit.toml b/vendor/github.com/BurntSushi/toml/_examples/implicit.toml deleted file mode 100644 index 1dea5ceb..00000000 --- a/vendor/github.com/BurntSushi/toml/_examples/implicit.toml +++ /dev/null @@ -1,4 +0,0 @@ -# [x] you -# [x.y] don't -# [x.y.z] need these -[x.y.z.w] # for this to work diff --git a/vendor/github.com/BurntSushi/toml/_examples/invalid-apples.toml b/vendor/github.com/BurntSushi/toml/_examples/invalid-apples.toml deleted file mode 100644 index 74e9e337..00000000 --- a/vendor/github.com/BurntSushi/toml/_examples/invalid-apples.toml +++ /dev/null @@ -1,6 +0,0 @@ -# DO NOT WANT -[fruit] -type = "apple" - -[fruit.type] -apple = "yes" diff --git a/vendor/github.com/BurntSushi/toml/_examples/invalid.toml b/vendor/github.com/BurntSushi/toml/_examples/invalid.toml deleted file mode 100644 index beb1dba5..00000000 --- a/vendor/github.com/BurntSushi/toml/_examples/invalid.toml +++ /dev/null @@ -1,35 +0,0 @@ -# This is an INVALID TOML document. Boom. -# Can you spot the error without help? - -title = "TOML Example" - -[owner] -name = "Tom Preston-Werner" -organization = "GitHub" -bio = "GitHub Cofounder & CEO\nLikes tater tots and beer." -dob = 1979-05-27T7:32:00Z # First class dates? Why not? - -[database] -server = "192.168.1.1" -ports = [ 8001, 8001, 8002 ] -connection_max = 5000 -enabled = true - -[servers] - # You can indent as you please. Tabs or spaces. TOML don't care. - [servers.alpha] - ip = "10.0.0.1" - dc = "eqdc10" - - [servers.beta] - ip = "10.0.0.2" - dc = "eqdc10" - -[clients] -data = [ ["gamma", "delta"], [1, 2] ] # just an update to make sure parsers support it - -# Line breaks are OK when inside arrays -hosts = [ - "alpha", - "omega" -] diff --git a/vendor/github.com/BurntSushi/toml/_examples/readme1.toml b/vendor/github.com/BurntSushi/toml/_examples/readme1.toml deleted file mode 100644 index 3e1261d4..00000000 --- a/vendor/github.com/BurntSushi/toml/_examples/readme1.toml +++ /dev/null @@ -1,5 +0,0 @@ -Age = 25 -Cats = [ "Cauchy", "Plato" ] -Pi = 3.14 -Perfection = [ 6, 28, 496, 8128 ] -DOB = 1987-07-05T05:45:00Z diff --git a/vendor/github.com/BurntSushi/toml/_examples/readme2.toml b/vendor/github.com/BurntSushi/toml/_examples/readme2.toml deleted file mode 100644 index b51cd934..00000000 --- a/vendor/github.com/BurntSushi/toml/_examples/readme2.toml +++ /dev/null @@ -1 +0,0 @@ -some_key_NAME = "wat" diff --git a/vendor/github.com/BurntSushi/toml/cmd/toml-test-decoder/COPYING b/vendor/github.com/BurntSushi/toml/cmd/toml-test-decoder/COPYING deleted file mode 100644 index 5a8e3325..00000000 --- a/vendor/github.com/BurntSushi/toml/cmd/toml-test-decoder/COPYING +++ /dev/null @@ -1,14 +0,0 @@ - DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE - Version 2, December 2004 - - Copyright (C) 2004 Sam Hocevar - - Everyone is permitted to copy and distribute verbatim or modified - copies of this license document, and changing it is allowed as long - as the name is changed. - - DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE - TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION - - 0. You just DO WHAT THE FUCK YOU WANT TO. - diff --git a/vendor/github.com/BurntSushi/toml/cmd/toml-test-decoder/README.md b/vendor/github.com/BurntSushi/toml/cmd/toml-test-decoder/README.md deleted file mode 100644 index 93f4e3a0..00000000 --- a/vendor/github.com/BurntSushi/toml/cmd/toml-test-decoder/README.md +++ /dev/null @@ -1,13 +0,0 @@ -# Implements the TOML test suite interface - -This is an implementation of the interface expected by -[toml-test](https://github.com/BurntSushi/toml-test) for my -[toml parser written in Go](https://github.com/BurntSushi/toml). -In particular, it maps TOML data on `stdin` to a JSON format on `stdout`. - - -Compatible with TOML version -[v0.4.0](https://github.com/toml-lang/toml/blob/master/versions/en/toml-v0.4.0.md) - -Compatible with `toml-test` version -[v0.2.0](https://github.com/BurntSushi/toml-test/tree/v0.2.0) diff --git a/vendor/github.com/BurntSushi/toml/cmd/toml-test-decoder/main.go b/vendor/github.com/BurntSushi/toml/cmd/toml-test-decoder/main.go deleted file mode 100644 index 14e75570..00000000 --- a/vendor/github.com/BurntSushi/toml/cmd/toml-test-decoder/main.go +++ /dev/null @@ -1,90 +0,0 @@ -// Command toml-test-decoder satisfies the toml-test interface for testing -// TOML decoders. Namely, it accepts TOML on stdin and outputs JSON on stdout. -package main - -import ( - "encoding/json" - "flag" - "fmt" - "log" - "os" - "path" - "time" - - "github.com/BurntSushi/toml" -) - -func init() { - log.SetFlags(0) - - flag.Usage = usage - flag.Parse() -} - -func usage() { - log.Printf("Usage: %s < toml-file\n", path.Base(os.Args[0])) - flag.PrintDefaults() - - os.Exit(1) -} - -func main() { - if flag.NArg() != 0 { - flag.Usage() - } - - var tmp interface{} - if _, err := toml.DecodeReader(os.Stdin, &tmp); err != nil { - log.Fatalf("Error decoding TOML: %s", err) - } - - typedTmp := translate(tmp) - if err := json.NewEncoder(os.Stdout).Encode(typedTmp); err != nil { - log.Fatalf("Error encoding JSON: %s", err) - } -} - -func translate(tomlData interface{}) interface{} { - switch orig := tomlData.(type) { - case map[string]interface{}: - typed := make(map[string]interface{}, len(orig)) - for k, v := range orig { - typed[k] = translate(v) - } - return typed - case []map[string]interface{}: - typed := make([]map[string]interface{}, len(orig)) - for i, v := range orig { - typed[i] = translate(v).(map[string]interface{}) - } - return typed - case []interface{}: - typed := make([]interface{}, len(orig)) - for i, v := range orig { - typed[i] = translate(v) - } - - // We don't really need to tag arrays, but let's be future proof. - // (If TOML ever supports tuples, we'll need this.) - return tag("array", typed) - case time.Time: - return tag("datetime", orig.Format("2006-01-02T15:04:05Z")) - case bool: - return tag("bool", fmt.Sprintf("%v", orig)) - case int64: - return tag("integer", fmt.Sprintf("%d", orig)) - case float64: - return tag("float", fmt.Sprintf("%v", orig)) - case string: - return tag("string", orig) - } - - panic(fmt.Sprintf("Unknown type: %T", tomlData)) -} - -func tag(typeName string, data interface{}) map[string]interface{} { - return map[string]interface{}{ - "type": typeName, - "value": data, - } -} diff --git a/vendor/github.com/BurntSushi/toml/cmd/toml-test-encoder/COPYING b/vendor/github.com/BurntSushi/toml/cmd/toml-test-encoder/COPYING deleted file mode 100644 index 5a8e3325..00000000 --- a/vendor/github.com/BurntSushi/toml/cmd/toml-test-encoder/COPYING +++ /dev/null @@ -1,14 +0,0 @@ - DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE - Version 2, December 2004 - - Copyright (C) 2004 Sam Hocevar - - Everyone is permitted to copy and distribute verbatim or modified - copies of this license document, and changing it is allowed as long - as the name is changed. - - DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE - TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION - - 0. You just DO WHAT THE FUCK YOU WANT TO. - diff --git a/vendor/github.com/BurntSushi/toml/cmd/toml-test-encoder/README.md b/vendor/github.com/BurntSushi/toml/cmd/toml-test-encoder/README.md deleted file mode 100644 index a45bd4da..00000000 --- a/vendor/github.com/BurntSushi/toml/cmd/toml-test-encoder/README.md +++ /dev/null @@ -1,13 +0,0 @@ -# Implements the TOML test suite interface for TOML encoders - -This is an implementation of the interface expected by -[toml-test](https://github.com/BurntSushi/toml-test) for the -[TOML encoder](https://github.com/BurntSushi/toml). -In particular, it maps JSON data on `stdin` to a TOML format on `stdout`. - - -Compatible with TOML version -[v0.4.0](https://github.com/toml-lang/toml/blob/master/versions/en/toml-v0.4.0.md) - -Compatible with `toml-test` version -[v0.2.0](https://github.com/BurntSushi/toml-test/tree/v0.2.0) diff --git a/vendor/github.com/BurntSushi/toml/cmd/toml-test-encoder/main.go b/vendor/github.com/BurntSushi/toml/cmd/toml-test-encoder/main.go deleted file mode 100644 index 092cc684..00000000 --- a/vendor/github.com/BurntSushi/toml/cmd/toml-test-encoder/main.go +++ /dev/null @@ -1,131 +0,0 @@ -// Command toml-test-encoder satisfies the toml-test interface for testing -// TOML encoders. Namely, it accepts JSON on stdin and outputs TOML on stdout. -package main - -import ( - "encoding/json" - "flag" - "log" - "os" - "path" - "strconv" - "time" - - "github.com/BurntSushi/toml" -) - -func init() { - log.SetFlags(0) - - flag.Usage = usage - flag.Parse() -} - -func usage() { - log.Printf("Usage: %s < json-file\n", path.Base(os.Args[0])) - flag.PrintDefaults() - - os.Exit(1) -} - -func main() { - if flag.NArg() != 0 { - flag.Usage() - } - - var tmp interface{} - if err := json.NewDecoder(os.Stdin).Decode(&tmp); err != nil { - log.Fatalf("Error decoding JSON: %s", err) - } - - tomlData := translate(tmp) - if err := toml.NewEncoder(os.Stdout).Encode(tomlData); err != nil { - log.Fatalf("Error encoding TOML: %s", err) - } -} - -func translate(typedJson interface{}) interface{} { - switch v := typedJson.(type) { - case map[string]interface{}: - if len(v) == 2 && in("type", v) && in("value", v) { - return untag(v) - } - m := make(map[string]interface{}, len(v)) - for k, v2 := range v { - m[k] = translate(v2) - } - return m - case []interface{}: - tabArray := make([]map[string]interface{}, len(v)) - for i := range v { - if m, ok := translate(v[i]).(map[string]interface{}); ok { - tabArray[i] = m - } else { - log.Fatalf("JSON arrays may only contain objects. This " + - "corresponds to only tables being allowed in " + - "TOML table arrays.") - } - } - return tabArray - } - log.Fatalf("Unrecognized JSON format '%T'.", typedJson) - panic("unreachable") -} - -func untag(typed map[string]interface{}) interface{} { - t := typed["type"].(string) - v := typed["value"] - switch t { - case "string": - return v.(string) - case "integer": - v := v.(string) - n, err := strconv.Atoi(v) - if err != nil { - log.Fatalf("Could not parse '%s' as integer: %s", v, err) - } - return n - case "float": - v := v.(string) - f, err := strconv.ParseFloat(v, 64) - if err != nil { - log.Fatalf("Could not parse '%s' as float64: %s", v, err) - } - return f - case "datetime": - v := v.(string) - t, err := time.Parse("2006-01-02T15:04:05Z", v) - if err != nil { - log.Fatalf("Could not parse '%s' as a datetime: %s", v, err) - } - return t - case "bool": - v := v.(string) - switch v { - case "true": - return true - case "false": - return false - } - log.Fatalf("Could not parse '%s' as a boolean.", v) - case "array": - v := v.([]interface{}) - array := make([]interface{}, len(v)) - for i := range v { - if m, ok := v[i].(map[string]interface{}); ok { - array[i] = untag(m) - } else { - log.Fatalf("Arrays may only contain other arrays or "+ - "primitive values, but found a '%T'.", m) - } - } - return array - } - log.Fatalf("Unrecognized tag type '%s'.", t) - panic("unreachable") -} - -func in(key string, m map[string]interface{}) bool { - _, ok := m[key] - return ok -} diff --git a/vendor/github.com/BurntSushi/toml/cmd/tomlv/COPYING b/vendor/github.com/BurntSushi/toml/cmd/tomlv/COPYING deleted file mode 100644 index 5a8e3325..00000000 --- a/vendor/github.com/BurntSushi/toml/cmd/tomlv/COPYING +++ /dev/null @@ -1,14 +0,0 @@ - DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE - Version 2, December 2004 - - Copyright (C) 2004 Sam Hocevar - - Everyone is permitted to copy and distribute verbatim or modified - copies of this license document, and changing it is allowed as long - as the name is changed. - - DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE - TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION - - 0. You just DO WHAT THE FUCK YOU WANT TO. - diff --git a/vendor/github.com/BurntSushi/toml/cmd/tomlv/README.md b/vendor/github.com/BurntSushi/toml/cmd/tomlv/README.md deleted file mode 100644 index 51231e29..00000000 --- a/vendor/github.com/BurntSushi/toml/cmd/tomlv/README.md +++ /dev/null @@ -1,21 +0,0 @@ -# TOML Validator - -If Go is installed, it's simple to try it out: - -```bash -go get github.com/BurntSushi/toml/cmd/tomlv -tomlv some-toml-file.toml -``` - -You can see the types of every key in a TOML file with: - -```bash -tomlv -types some-toml-file.toml -``` - -At the moment, only one error message is reported at a time. Error messages -include line numbers. No output means that the files given are valid TOML, or -there is a bug in `tomlv`. - -Compatible with TOML version -[v0.4.0](https://github.com/toml-lang/toml/blob/master/versions/en/toml-v0.4.0.md) diff --git a/vendor/github.com/BurntSushi/toml/cmd/tomlv/main.go b/vendor/github.com/BurntSushi/toml/cmd/tomlv/main.go deleted file mode 100644 index c7d689a7..00000000 --- a/vendor/github.com/BurntSushi/toml/cmd/tomlv/main.go +++ /dev/null @@ -1,61 +0,0 @@ -// Command tomlv validates TOML documents and prints each key's type. -package main - -import ( - "flag" - "fmt" - "log" - "os" - "path" - "strings" - "text/tabwriter" - - "github.com/BurntSushi/toml" -) - -var ( - flagTypes = false -) - -func init() { - log.SetFlags(0) - - flag.BoolVar(&flagTypes, "types", flagTypes, - "When set, the types of every defined key will be shown.") - - flag.Usage = usage - flag.Parse() -} - -func usage() { - log.Printf("Usage: %s toml-file [ toml-file ... ]\n", - path.Base(os.Args[0])) - flag.PrintDefaults() - - os.Exit(1) -} - -func main() { - if flag.NArg() < 1 { - flag.Usage() - } - for _, f := range flag.Args() { - var tmp interface{} - md, err := toml.DecodeFile(f, &tmp) - if err != nil { - log.Fatalf("Error in '%s': %s", f, err) - } - if flagTypes { - printTypes(md) - } - } -} - -func printTypes(md toml.MetaData) { - tabw := tabwriter.NewWriter(os.Stdout, 0, 0, 2, ' ', 0) - for _, key := range md.Keys() { - fmt.Fprintf(tabw, "%s%s\t%s\n", - strings.Repeat(" ", len(key)-1), key, md.Type(key...)) - } - tabw.Flush() -} diff --git a/vendor/github.com/BurntSushi/toml/decode.go b/vendor/github.com/BurntSushi/toml/decode.go deleted file mode 100644 index b0fd51d5..00000000 --- a/vendor/github.com/BurntSushi/toml/decode.go +++ /dev/null @@ -1,509 +0,0 @@ -package toml - -import ( - "fmt" - "io" - "io/ioutil" - "math" - "reflect" - "strings" - "time" -) - -func e(format string, args ...interface{}) error { - return fmt.Errorf("toml: "+format, args...) -} - -// Unmarshaler is the interface implemented by objects that can unmarshal a -// TOML description of themselves. -type Unmarshaler interface { - UnmarshalTOML(interface{}) error -} - -// Unmarshal decodes the contents of `p` in TOML format into a pointer `v`. -func Unmarshal(p []byte, v interface{}) error { - _, err := Decode(string(p), v) - return err -} - -// Primitive is a TOML value that hasn't been decoded into a Go value. -// When using the various `Decode*` functions, the type `Primitive` may -// be given to any value, and its decoding will be delayed. -// -// A `Primitive` value can be decoded using the `PrimitiveDecode` function. -// -// The underlying representation of a `Primitive` value is subject to change. -// Do not rely on it. -// -// N.B. Primitive values are still parsed, so using them will only avoid -// the overhead of reflection. They can be useful when you don't know the -// exact type of TOML data until run time. -type Primitive struct { - undecoded interface{} - context Key -} - -// DEPRECATED! -// -// Use MetaData.PrimitiveDecode instead. -func PrimitiveDecode(primValue Primitive, v interface{}) error { - md := MetaData{decoded: make(map[string]bool)} - return md.unify(primValue.undecoded, rvalue(v)) -} - -// PrimitiveDecode is just like the other `Decode*` functions, except it -// decodes a TOML value that has already been parsed. Valid primitive values -// can *only* be obtained from values filled by the decoder functions, -// including this method. (i.e., `v` may contain more `Primitive` -// values.) -// -// Meta data for primitive values is included in the meta data returned by -// the `Decode*` functions with one exception: keys returned by the Undecoded -// method will only reflect keys that were decoded. Namely, any keys hidden -// behind a Primitive will be considered undecoded. Executing this method will -// update the undecoded keys in the meta data. (See the example.) -func (md *MetaData) PrimitiveDecode(primValue Primitive, v interface{}) error { - md.context = primValue.context - defer func() { md.context = nil }() - return md.unify(primValue.undecoded, rvalue(v)) -} - -// Decode will decode the contents of `data` in TOML format into a pointer -// `v`. -// -// TOML hashes correspond to Go structs or maps. (Dealer's choice. They can be -// used interchangeably.) -// -// TOML arrays of tables correspond to either a slice of structs or a slice -// of maps. -// -// TOML datetimes correspond to Go `time.Time` values. -// -// All other TOML types (float, string, int, bool and array) correspond -// to the obvious Go types. -// -// An exception to the above rules is if a type implements the -// encoding.TextUnmarshaler interface. In this case, any primitive TOML value -// (floats, strings, integers, booleans and datetimes) will be converted to -// a byte string and given to the value's UnmarshalText method. See the -// Unmarshaler example for a demonstration with time duration strings. -// -// Key mapping -// -// TOML keys can map to either keys in a Go map or field names in a Go -// struct. The special `toml` struct tag may be used to map TOML keys to -// struct fields that don't match the key name exactly. (See the example.) -// A case insensitive match to struct names will be tried if an exact match -// can't be found. -// -// The mapping between TOML values and Go values is loose. That is, there -// may exist TOML values that cannot be placed into your representation, and -// there may be parts of your representation that do not correspond to -// TOML values. This loose mapping can be made stricter by using the IsDefined -// and/or Undecoded methods on the MetaData returned. -// -// This decoder will not handle cyclic types. If a cyclic type is passed, -// `Decode` will not terminate. -func Decode(data string, v interface{}) (MetaData, error) { - rv := reflect.ValueOf(v) - if rv.Kind() != reflect.Ptr { - return MetaData{}, e("Decode of non-pointer %s", reflect.TypeOf(v)) - } - if rv.IsNil() { - return MetaData{}, e("Decode of nil %s", reflect.TypeOf(v)) - } - p, err := parse(data) - if err != nil { - return MetaData{}, err - } - md := MetaData{ - p.mapping, p.types, p.ordered, - make(map[string]bool, len(p.ordered)), nil, - } - return md, md.unify(p.mapping, indirect(rv)) -} - -// DecodeFile is just like Decode, except it will automatically read the -// contents of the file at `fpath` and decode it for you. -func DecodeFile(fpath string, v interface{}) (MetaData, error) { - bs, err := ioutil.ReadFile(fpath) - if err != nil { - return MetaData{}, err - } - return Decode(string(bs), v) -} - -// DecodeReader is just like Decode, except it will consume all bytes -// from the reader and decode it for you. -func DecodeReader(r io.Reader, v interface{}) (MetaData, error) { - bs, err := ioutil.ReadAll(r) - if err != nil { - return MetaData{}, err - } - return Decode(string(bs), v) -} - -// unify performs a sort of type unification based on the structure of `rv`, -// which is the client representation. -// -// Any type mismatch produces an error. Finding a type that we don't know -// how to handle produces an unsupported type error. -func (md *MetaData) unify(data interface{}, rv reflect.Value) error { - - // Special case. Look for a `Primitive` value. - if rv.Type() == reflect.TypeOf((*Primitive)(nil)).Elem() { - // Save the undecoded data and the key context into the primitive - // value. - context := make(Key, len(md.context)) - copy(context, md.context) - rv.Set(reflect.ValueOf(Primitive{ - undecoded: data, - context: context, - })) - return nil - } - - // Special case. Unmarshaler Interface support. - if rv.CanAddr() { - if v, ok := rv.Addr().Interface().(Unmarshaler); ok { - return v.UnmarshalTOML(data) - } - } - - // Special case. Handle time.Time values specifically. - // TODO: Remove this code when we decide to drop support for Go 1.1. - // This isn't necessary in Go 1.2 because time.Time satisfies the encoding - // interfaces. - if rv.Type().AssignableTo(rvalue(time.Time{}).Type()) { - return md.unifyDatetime(data, rv) - } - - // Special case. Look for a value satisfying the TextUnmarshaler interface. - if v, ok := rv.Interface().(TextUnmarshaler); ok { - return md.unifyText(data, v) - } - // BUG(burntsushi) - // The behavior here is incorrect whenever a Go type satisfies the - // encoding.TextUnmarshaler interface but also corresponds to a TOML - // hash or array. In particular, the unmarshaler should only be applied - // to primitive TOML values. But at this point, it will be applied to - // all kinds of values and produce an incorrect error whenever those values - // are hashes or arrays (including arrays of tables). - - k := rv.Kind() - - // laziness - if k >= reflect.Int && k <= reflect.Uint64 { - return md.unifyInt(data, rv) - } - switch k { - case reflect.Ptr: - elem := reflect.New(rv.Type().Elem()) - err := md.unify(data, reflect.Indirect(elem)) - if err != nil { - return err - } - rv.Set(elem) - return nil - case reflect.Struct: - return md.unifyStruct(data, rv) - case reflect.Map: - return md.unifyMap(data, rv) - case reflect.Array: - return md.unifyArray(data, rv) - case reflect.Slice: - return md.unifySlice(data, rv) - case reflect.String: - return md.unifyString(data, rv) - case reflect.Bool: - return md.unifyBool(data, rv) - case reflect.Interface: - // we only support empty interfaces. - if rv.NumMethod() > 0 { - return e("unsupported type %s", rv.Type()) - } - return md.unifyAnything(data, rv) - case reflect.Float32: - fallthrough - case reflect.Float64: - return md.unifyFloat64(data, rv) - } - return e("unsupported type %s", rv.Kind()) -} - -func (md *MetaData) unifyStruct(mapping interface{}, rv reflect.Value) error { - tmap, ok := mapping.(map[string]interface{}) - if !ok { - if mapping == nil { - return nil - } - return e("type mismatch for %s: expected table but found %T", - rv.Type().String(), mapping) - } - - for key, datum := range tmap { - var f *field - fields := cachedTypeFields(rv.Type()) - for i := range fields { - ff := &fields[i] - if ff.name == key { - f = ff - break - } - if f == nil && strings.EqualFold(ff.name, key) { - f = ff - } - } - if f != nil { - subv := rv - for _, i := range f.index { - subv = indirect(subv.Field(i)) - } - if isUnifiable(subv) { - md.decoded[md.context.add(key).String()] = true - md.context = append(md.context, key) - if err := md.unify(datum, subv); err != nil { - return err - } - md.context = md.context[0 : len(md.context)-1] - } else if f.name != "" { - // Bad user! No soup for you! - return e("cannot write unexported field %s.%s", - rv.Type().String(), f.name) - } - } - } - return nil -} - -func (md *MetaData) unifyMap(mapping interface{}, rv reflect.Value) error { - tmap, ok := mapping.(map[string]interface{}) - if !ok { - if tmap == nil { - return nil - } - return badtype("map", mapping) - } - if rv.IsNil() { - rv.Set(reflect.MakeMap(rv.Type())) - } - for k, v := range tmap { - md.decoded[md.context.add(k).String()] = true - md.context = append(md.context, k) - - rvkey := indirect(reflect.New(rv.Type().Key())) - rvval := reflect.Indirect(reflect.New(rv.Type().Elem())) - if err := md.unify(v, rvval); err != nil { - return err - } - md.context = md.context[0 : len(md.context)-1] - - rvkey.SetString(k) - rv.SetMapIndex(rvkey, rvval) - } - return nil -} - -func (md *MetaData) unifyArray(data interface{}, rv reflect.Value) error { - datav := reflect.ValueOf(data) - if datav.Kind() != reflect.Slice { - if !datav.IsValid() { - return nil - } - return badtype("slice", data) - } - sliceLen := datav.Len() - if sliceLen != rv.Len() { - return e("expected array length %d; got TOML array of length %d", - rv.Len(), sliceLen) - } - return md.unifySliceArray(datav, rv) -} - -func (md *MetaData) unifySlice(data interface{}, rv reflect.Value) error { - datav := reflect.ValueOf(data) - if datav.Kind() != reflect.Slice { - if !datav.IsValid() { - return nil - } - return badtype("slice", data) - } - n := datav.Len() - if rv.IsNil() || rv.Cap() < n { - rv.Set(reflect.MakeSlice(rv.Type(), n, n)) - } - rv.SetLen(n) - return md.unifySliceArray(datav, rv) -} - -func (md *MetaData) unifySliceArray(data, rv reflect.Value) error { - sliceLen := data.Len() - for i := 0; i < sliceLen; i++ { - v := data.Index(i).Interface() - sliceval := indirect(rv.Index(i)) - if err := md.unify(v, sliceval); err != nil { - return err - } - } - return nil -} - -func (md *MetaData) unifyDatetime(data interface{}, rv reflect.Value) error { - if _, ok := data.(time.Time); ok { - rv.Set(reflect.ValueOf(data)) - return nil - } - return badtype("time.Time", data) -} - -func (md *MetaData) unifyString(data interface{}, rv reflect.Value) error { - if s, ok := data.(string); ok { - rv.SetString(s) - return nil - } - return badtype("string", data) -} - -func (md *MetaData) unifyFloat64(data interface{}, rv reflect.Value) error { - if num, ok := data.(float64); ok { - switch rv.Kind() { - case reflect.Float32: - fallthrough - case reflect.Float64: - rv.SetFloat(num) - default: - panic("bug") - } - return nil - } - return badtype("float", data) -} - -func (md *MetaData) unifyInt(data interface{}, rv reflect.Value) error { - if num, ok := data.(int64); ok { - if rv.Kind() >= reflect.Int && rv.Kind() <= reflect.Int64 { - switch rv.Kind() { - case reflect.Int, reflect.Int64: - // No bounds checking necessary. - case reflect.Int8: - if num < math.MinInt8 || num > math.MaxInt8 { - return e("value %d is out of range for int8", num) - } - case reflect.Int16: - if num < math.MinInt16 || num > math.MaxInt16 { - return e("value %d is out of range for int16", num) - } - case reflect.Int32: - if num < math.MinInt32 || num > math.MaxInt32 { - return e("value %d is out of range for int32", num) - } - } - rv.SetInt(num) - } else if rv.Kind() >= reflect.Uint && rv.Kind() <= reflect.Uint64 { - unum := uint64(num) - switch rv.Kind() { - case reflect.Uint, reflect.Uint64: - // No bounds checking necessary. - case reflect.Uint8: - if num < 0 || unum > math.MaxUint8 { - return e("value %d is out of range for uint8", num) - } - case reflect.Uint16: - if num < 0 || unum > math.MaxUint16 { - return e("value %d is out of range for uint16", num) - } - case reflect.Uint32: - if num < 0 || unum > math.MaxUint32 { - return e("value %d is out of range for uint32", num) - } - } - rv.SetUint(unum) - } else { - panic("unreachable") - } - return nil - } - return badtype("integer", data) -} - -func (md *MetaData) unifyBool(data interface{}, rv reflect.Value) error { - if b, ok := data.(bool); ok { - rv.SetBool(b) - return nil - } - return badtype("boolean", data) -} - -func (md *MetaData) unifyAnything(data interface{}, rv reflect.Value) error { - rv.Set(reflect.ValueOf(data)) - return nil -} - -func (md *MetaData) unifyText(data interface{}, v TextUnmarshaler) error { - var s string - switch sdata := data.(type) { - case TextMarshaler: - text, err := sdata.MarshalText() - if err != nil { - return err - } - s = string(text) - case fmt.Stringer: - s = sdata.String() - case string: - s = sdata - case bool: - s = fmt.Sprintf("%v", sdata) - case int64: - s = fmt.Sprintf("%d", sdata) - case float64: - s = fmt.Sprintf("%f", sdata) - default: - return badtype("primitive (string-like)", data) - } - if err := v.UnmarshalText([]byte(s)); err != nil { - return err - } - return nil -} - -// rvalue returns a reflect.Value of `v`. All pointers are resolved. -func rvalue(v interface{}) reflect.Value { - return indirect(reflect.ValueOf(v)) -} - -// indirect returns the value pointed to by a pointer. -// Pointers are followed until the value is not a pointer. -// New values are allocated for each nil pointer. -// -// An exception to this rule is if the value satisfies an interface of -// interest to us (like encoding.TextUnmarshaler). -func indirect(v reflect.Value) reflect.Value { - if v.Kind() != reflect.Ptr { - if v.CanSet() { - pv := v.Addr() - if _, ok := pv.Interface().(TextUnmarshaler); ok { - return pv - } - } - return v - } - if v.IsNil() { - v.Set(reflect.New(v.Type().Elem())) - } - return indirect(reflect.Indirect(v)) -} - -func isUnifiable(rv reflect.Value) bool { - if rv.CanSet() { - return true - } - if _, ok := rv.Interface().(TextUnmarshaler); ok { - return true - } - return false -} - -func badtype(expected string, data interface{}) error { - return e("cannot load TOML value of type %T into a Go %s", data, expected) -} diff --git a/vendor/github.com/BurntSushi/toml/decode_meta.go b/vendor/github.com/BurntSushi/toml/decode_meta.go deleted file mode 100644 index b9914a67..00000000 --- a/vendor/github.com/BurntSushi/toml/decode_meta.go +++ /dev/null @@ -1,121 +0,0 @@ -package toml - -import "strings" - -// MetaData allows access to meta information about TOML data that may not -// be inferrable via reflection. In particular, whether a key has been defined -// and the TOML type of a key. -type MetaData struct { - mapping map[string]interface{} - types map[string]tomlType - keys []Key - decoded map[string]bool - context Key // Used only during decoding. -} - -// IsDefined returns true if the key given exists in the TOML data. The key -// should be specified hierarchially. e.g., -// -// // access the TOML key 'a.b.c' -// IsDefined("a", "b", "c") -// -// IsDefined will return false if an empty key given. Keys are case sensitive. -func (md *MetaData) IsDefined(key ...string) bool { - if len(key) == 0 { - return false - } - - var hash map[string]interface{} - var ok bool - var hashOrVal interface{} = md.mapping - for _, k := range key { - if hash, ok = hashOrVal.(map[string]interface{}); !ok { - return false - } - if hashOrVal, ok = hash[k]; !ok { - return false - } - } - return true -} - -// Type returns a string representation of the type of the key specified. -// -// Type will return the empty string if given an empty key or a key that -// does not exist. Keys are case sensitive. -func (md *MetaData) Type(key ...string) string { - fullkey := strings.Join(key, ".") - if typ, ok := md.types[fullkey]; ok { - return typ.typeString() - } - return "" -} - -// Key is the type of any TOML key, including key groups. Use (MetaData).Keys -// to get values of this type. -type Key []string - -func (k Key) String() string { - return strings.Join(k, ".") -} - -func (k Key) maybeQuotedAll() string { - var ss []string - for i := range k { - ss = append(ss, k.maybeQuoted(i)) - } - return strings.Join(ss, ".") -} - -func (k Key) maybeQuoted(i int) string { - quote := false - for _, c := range k[i] { - if !isBareKeyChar(c) { - quote = true - break - } - } - if quote { - return "\"" + strings.Replace(k[i], "\"", "\\\"", -1) + "\"" - } - return k[i] -} - -func (k Key) add(piece string) Key { - newKey := make(Key, len(k)+1) - copy(newKey, k) - newKey[len(k)] = piece - return newKey -} - -// Keys returns a slice of every key in the TOML data, including key groups. -// Each key is itself a slice, where the first element is the top of the -// hierarchy and the last is the most specific. -// -// The list will have the same order as the keys appeared in the TOML data. -// -// All keys returned are non-empty. -func (md *MetaData) Keys() []Key { - return md.keys -} - -// Undecoded returns all keys that have not been decoded in the order in which -// they appear in the original TOML document. -// -// This includes keys that haven't been decoded because of a Primitive value. -// Once the Primitive value is decoded, the keys will be considered decoded. -// -// Also note that decoding into an empty interface will result in no decoding, -// and so no keys will be considered decoded. -// -// In this sense, the Undecoded keys correspond to keys in the TOML document -// that do not have a concrete type in your representation. -func (md *MetaData) Undecoded() []Key { - undecoded := make([]Key, 0, len(md.keys)) - for _, key := range md.keys { - if !md.decoded[key.String()] { - undecoded = append(undecoded, key) - } - } - return undecoded -} diff --git a/vendor/github.com/BurntSushi/toml/decode_test.go b/vendor/github.com/BurntSushi/toml/decode_test.go deleted file mode 100644 index 0c36b33e..00000000 --- a/vendor/github.com/BurntSushi/toml/decode_test.go +++ /dev/null @@ -1,1447 +0,0 @@ -package toml - -import ( - "fmt" - "log" - "math" - "reflect" - "strings" - "testing" - "time" -) - -func TestDecodeSimple(t *testing.T) { - var testSimple = ` -age = 250 -andrew = "gallant" -kait = "brady" -now = 1987-07-05T05:45:00Z -yesOrNo = true -pi = 3.14 -colors = [ - ["red", "green", "blue"], - ["cyan", "magenta", "yellow", "black"], -] - -[My.Cats] -plato = "cat 1" -cauchy = "cat 2" -` - - type cats struct { - Plato string - Cauchy string - } - type simple struct { - Age int - Colors [][]string - Pi float64 - YesOrNo bool - Now time.Time - Andrew string - Kait string - My map[string]cats - } - - var val simple - _, err := Decode(testSimple, &val) - if err != nil { - t.Fatal(err) - } - - now, err := time.Parse("2006-01-02T15:04:05", "1987-07-05T05:45:00") - if err != nil { - panic(err) - } - var answer = simple{ - Age: 250, - Andrew: "gallant", - Kait: "brady", - Now: now, - YesOrNo: true, - Pi: 3.14, - Colors: [][]string{ - {"red", "green", "blue"}, - {"cyan", "magenta", "yellow", "black"}, - }, - My: map[string]cats{ - "Cats": {Plato: "cat 1", Cauchy: "cat 2"}, - }, - } - if !reflect.DeepEqual(val, answer) { - t.Fatalf("Expected\n-----\n%#v\n-----\nbut got\n-----\n%#v\n", - answer, val) - } -} - -func TestDecodeEmbedded(t *testing.T) { - type Dog struct{ Name string } - type Age int - type cat struct{ Name string } - - for _, test := range []struct { - label string - input string - decodeInto interface{} - wantDecoded interface{} - }{ - { - label: "embedded struct", - input: `Name = "milton"`, - decodeInto: &struct{ Dog }{}, - wantDecoded: &struct{ Dog }{Dog{"milton"}}, - }, - { - label: "embedded non-nil pointer to struct", - input: `Name = "milton"`, - decodeInto: &struct{ *Dog }{}, - wantDecoded: &struct{ *Dog }{&Dog{"milton"}}, - }, - { - label: "embedded nil pointer to struct", - input: ``, - decodeInto: &struct{ *Dog }{}, - wantDecoded: &struct{ *Dog }{nil}, - }, - { - label: "unexported embedded struct", - input: `Name = "socks"`, - decodeInto: &struct{ cat }{}, - wantDecoded: &struct{ cat }{cat{"socks"}}, - }, - { - label: "embedded int", - input: `Age = -5`, - decodeInto: &struct{ Age }{}, - wantDecoded: &struct{ Age }{-5}, - }, - } { - _, err := Decode(test.input, test.decodeInto) - if err != nil { - t.Fatal(err) - } - if !reflect.DeepEqual(test.wantDecoded, test.decodeInto) { - t.Errorf("%s: want decoded == %+v, got %+v", - test.label, test.wantDecoded, test.decodeInto) - } - } -} - -func TestDecodeIgnoredFields(t *testing.T) { - type simple struct { - Number int `toml:"-"` - } - const input = ` -Number = 123 -- = 234 -` - var s simple - if _, err := Decode(input, &s); err != nil { - t.Fatal(err) - } - if s.Number != 0 { - t.Errorf("got: %d; want 0", s.Number) - } -} - -func TestTableArrays(t *testing.T) { - var tomlTableArrays = ` -[[albums]] -name = "Born to Run" - - [[albums.songs]] - name = "Jungleland" - - [[albums.songs]] - name = "Meeting Across the River" - -[[albums]] -name = "Born in the USA" - - [[albums.songs]] - name = "Glory Days" - - [[albums.songs]] - name = "Dancing in the Dark" -` - - type Song struct { - Name string - } - - type Album struct { - Name string - Songs []Song - } - - type Music struct { - Albums []Album - } - - expected := Music{[]Album{ - {"Born to Run", []Song{{"Jungleland"}, {"Meeting Across the River"}}}, - {"Born in the USA", []Song{{"Glory Days"}, {"Dancing in the Dark"}}}, - }} - var got Music - if _, err := Decode(tomlTableArrays, &got); err != nil { - t.Fatal(err) - } - if !reflect.DeepEqual(expected, got) { - t.Fatalf("\n%#v\n!=\n%#v\n", expected, got) - } -} - -func TestTableNesting(t *testing.T) { - for _, tt := range []struct { - t string - want []string - }{ - {"[a.b.c]", []string{"a", "b", "c"}}, - {`[a."b.c"]`, []string{"a", "b.c"}}, - {`[a.'b.c']`, []string{"a", "b.c"}}, - {`[a.' b ']`, []string{"a", " b "}}, - {"[ d.e.f ]", []string{"d", "e", "f"}}, - {"[ g . h . i ]", []string{"g", "h", "i"}}, - {`[ j . "ʞ" . 'l' ]`, []string{"j", "ʞ", "l"}}, - } { - var m map[string]interface{} - if _, err := Decode(tt.t, &m); err != nil { - t.Errorf("Decode(%q): got error: %s", tt.t, err) - continue - } - if keys := extractNestedKeys(m); !reflect.DeepEqual(keys, tt.want) { - t.Errorf("Decode(%q): got nested keys %#v; want %#v", - tt.t, keys, tt.want) - } - } -} - -func extractNestedKeys(v map[string]interface{}) []string { - var result []string - for { - if len(v) != 1 { - return result - } - for k, m := range v { - result = append(result, k) - var ok bool - v, ok = m.(map[string]interface{}) - if !ok { - return result - } - } - - } -} - -// Case insensitive matching tests. -// A bit more comprehensive than needed given the current implementation, -// but implementations change. -// Probably still missing demonstrations of some ugly corner cases regarding -// case insensitive matching and multiple fields. -func TestCase(t *testing.T) { - var caseToml = ` -tOpString = "string" -tOpInt = 1 -tOpFloat = 1.1 -tOpBool = true -tOpdate = 2006-01-02T15:04:05Z -tOparray = [ "array" ] -Match = "i should be in Match only" -MatcH = "i should be in MatcH only" -once = "just once" -[nEst.eD] -nEstedString = "another string" -` - - type InsensitiveEd struct { - NestedString string - } - - type InsensitiveNest struct { - Ed InsensitiveEd - } - - type Insensitive struct { - TopString string - TopInt int - TopFloat float64 - TopBool bool - TopDate time.Time - TopArray []string - Match string - MatcH string - Once string - OncE string - Nest InsensitiveNest - } - - tme, err := time.Parse(time.RFC3339, time.RFC3339[:len(time.RFC3339)-5]) - if err != nil { - panic(err) - } - expected := Insensitive{ - TopString: "string", - TopInt: 1, - TopFloat: 1.1, - TopBool: true, - TopDate: tme, - TopArray: []string{"array"}, - MatcH: "i should be in MatcH only", - Match: "i should be in Match only", - Once: "just once", - OncE: "", - Nest: InsensitiveNest{ - Ed: InsensitiveEd{NestedString: "another string"}, - }, - } - var got Insensitive - if _, err := Decode(caseToml, &got); err != nil { - t.Fatal(err) - } - if !reflect.DeepEqual(expected, got) { - t.Fatalf("\n%#v\n!=\n%#v\n", expected, got) - } -} - -func TestPointers(t *testing.T) { - type Object struct { - Type string - Description string - } - - type Dict struct { - NamedObject map[string]*Object - BaseObject *Object - Strptr *string - Strptrs []*string - } - s1, s2, s3 := "blah", "abc", "def" - expected := &Dict{ - Strptr: &s1, - Strptrs: []*string{&s2, &s3}, - NamedObject: map[string]*Object{ - "foo": {"FOO", "fooooo!!!"}, - "bar": {"BAR", "ba-ba-ba-ba-barrrr!!!"}, - }, - BaseObject: &Object{"BASE", "da base"}, - } - - ex1 := ` -Strptr = "blah" -Strptrs = ["abc", "def"] - -[NamedObject.foo] -Type = "FOO" -Description = "fooooo!!!" - -[NamedObject.bar] -Type = "BAR" -Description = "ba-ba-ba-ba-barrrr!!!" - -[BaseObject] -Type = "BASE" -Description = "da base" -` - dict := new(Dict) - _, err := Decode(ex1, dict) - if err != nil { - t.Errorf("Decode error: %v", err) - } - if !reflect.DeepEqual(expected, dict) { - t.Fatalf("\n%#v\n!=\n%#v\n", expected, dict) - } -} - -func TestDecodeDatetime(t *testing.T) { - const noTimestamp = "2006-01-02T15:04:05" - for _, tt := range []struct { - s string - t string - format string - }{ - {"1979-05-27T07:32:00Z", "1979-05-27T07:32:00Z", time.RFC3339}, - {"1979-05-27T00:32:00-07:00", "1979-05-27T00:32:00-07:00", time.RFC3339}, - { - "1979-05-27T00:32:00.999999-07:00", - "1979-05-27T00:32:00.999999-07:00", - time.RFC3339, - }, - {"1979-05-27T07:32:00", "1979-05-27T07:32:00", noTimestamp}, - { - "1979-05-27T00:32:00.999999", - "1979-05-27T00:32:00.999999", - noTimestamp, - }, - {"1979-05-27", "1979-05-27T00:00:00", noTimestamp}, - } { - var x struct{ D time.Time } - input := "d = " + tt.s - if _, err := Decode(input, &x); err != nil { - t.Errorf("Decode(%q): got error: %s", input, err) - continue - } - want, err := time.ParseInLocation(tt.format, tt.t, time.Local) - if err != nil { - panic(err) - } - if !x.D.Equal(want) { - t.Errorf("Decode(%q): got %s; want %s", input, x.D, want) - } - } -} - -func TestDecodeBadDatetime(t *testing.T) { - var x struct{ T time.Time } - for _, s := range []string{ - "123", - "2006-01-50T00:00:00Z", - "2006-01-30T00:00", - "2006-01-30T", - } { - input := "T = " + s - if _, err := Decode(input, &x); err == nil { - t.Errorf("Expected invalid DateTime error for %q", s) - } - } -} - -func TestDecodeMultilineStrings(t *testing.T) { - var x struct { - S string - } - const s0 = `s = """ -a b \n c -d e f -"""` - if _, err := Decode(s0, &x); err != nil { - t.Fatal(err) - } - if want := "a b \n c\nd e f\n"; x.S != want { - t.Errorf("got: %q; want: %q", x.S, want) - } - const s1 = `s = """a b c\ -"""` - if _, err := Decode(s1, &x); err != nil { - t.Fatal(err) - } - if want := "a b c"; x.S != want { - t.Errorf("got: %q; want: %q", x.S, want) - } -} - -type sphere struct { - Center [3]float64 - Radius float64 -} - -func TestDecodeSimpleArray(t *testing.T) { - var s1 sphere - if _, err := Decode(`center = [0.0, 1.5, 0.0]`, &s1); err != nil { - t.Fatal(err) - } -} - -func TestDecodeArrayWrongSize(t *testing.T) { - var s1 sphere - if _, err := Decode(`center = [0.1, 2.3]`, &s1); err == nil { - t.Fatal("Expected array type mismatch error") - } -} - -func TestDecodeLargeIntoSmallInt(t *testing.T) { - type table struct { - Value int8 - } - var tab table - if _, err := Decode(`value = 500`, &tab); err == nil { - t.Fatal("Expected integer out-of-bounds error.") - } -} - -func TestDecodeSizedInts(t *testing.T) { - type table struct { - U8 uint8 - U16 uint16 - U32 uint32 - U64 uint64 - U uint - I8 int8 - I16 int16 - I32 int32 - I64 int64 - I int - } - answer := table{1, 1, 1, 1, 1, -1, -1, -1, -1, -1} - toml := ` - u8 = 1 - u16 = 1 - u32 = 1 - u64 = 1 - u = 1 - i8 = -1 - i16 = -1 - i32 = -1 - i64 = -1 - i = -1 - ` - var tab table - if _, err := Decode(toml, &tab); err != nil { - t.Fatal(err.Error()) - } - if answer != tab { - t.Fatalf("Expected %#v but got %#v", answer, tab) - } -} - -func TestDecodeInts(t *testing.T) { - for _, tt := range []struct { - s string - want int64 - }{ - {"0", 0}, - {"+99", 99}, - {"-10", -10}, - {"1_234_567", 1234567}, - {"1_2_3_4", 1234}, - {"-9_223_372_036_854_775_808", math.MinInt64}, - {"9_223_372_036_854_775_807", math.MaxInt64}, - } { - var x struct{ N int64 } - input := "n = " + tt.s - if _, err := Decode(input, &x); err != nil { - t.Errorf("Decode(%q): got error: %s", input, err) - continue - } - if x.N != tt.want { - t.Errorf("Decode(%q): got %d; want %d", input, x.N, tt.want) - } - } -} - -func TestDecodeFloats(t *testing.T) { - for _, tt := range []struct { - s string - want float64 - }{ - {"+1.0", 1}, - {"3.1415", 3.1415}, - {"-0.01", -0.01}, - {"5e+22", 5e22}, - {"1e6", 1e6}, - {"-2E-2", -2e-2}, - {"6.626e-34", 6.626e-34}, - {"9_224_617.445_991_228_313", 9224617.445991228313}, - {"9_876.54_32e1_0", 9876.5432e10}, - } { - var x struct{ N float64 } - input := "n = " + tt.s - if _, err := Decode(input, &x); err != nil { - t.Errorf("Decode(%q): got error: %s", input, err) - continue - } - if x.N != tt.want { - t.Errorf("Decode(%q): got %f; want %f", input, x.N, tt.want) - } - } -} - -func TestDecodeMalformedNumbers(t *testing.T) { - for _, tt := range []struct { - s string - want string - }{ - {"++99", "expected a digit"}, - {"0..1", "must be followed by one or more digits"}, - {"0.1.2", "Invalid float value"}, - {"1e2.3", "Invalid float value"}, - {"1e2e3", "Invalid float value"}, - {"_123", "expected value"}, - {"123_", "surrounded by digits"}, - {"1._23", "surrounded by digits"}, - {"1e__23", "surrounded by digits"}, - {"123.", "must be followed by one or more digits"}, - {"1.e2", "must be followed by one or more digits"}, - } { - var x struct{ N interface{} } - input := "n = " + tt.s - _, err := Decode(input, &x) - if err == nil { - t.Errorf("Decode(%q): got nil, want error containing %q", - input, tt.want) - continue - } - if !strings.Contains(err.Error(), tt.want) { - t.Errorf("Decode(%q): got %q, want error containing %q", - input, err, tt.want) - } - } -} - -func TestDecodeBadValues(t *testing.T) { - for _, tt := range []struct { - v interface{} - want string - }{ - {3, "non-pointer int"}, - {(*int)(nil), "nil"}, - } { - _, err := Decode(`x = 3`, tt.v) - if err == nil { - t.Errorf("Decode(%v): got nil; want error containing %q", - tt.v, tt.want) - continue - } - if !strings.Contains(err.Error(), tt.want) { - t.Errorf("Decode(%v): got %q; want error containing %q", - tt.v, err, tt.want) - } - } -} - -func TestUnmarshaler(t *testing.T) { - - var tomlBlob = ` -[dishes.hamboogie] -name = "Hamboogie with fries" -price = 10.99 - -[[dishes.hamboogie.ingredients]] -name = "Bread Bun" - -[[dishes.hamboogie.ingredients]] -name = "Lettuce" - -[[dishes.hamboogie.ingredients]] -name = "Real Beef Patty" - -[[dishes.hamboogie.ingredients]] -name = "Tomato" - -[dishes.eggsalad] -name = "Egg Salad with rice" -price = 3.99 - -[[dishes.eggsalad.ingredients]] -name = "Egg" - -[[dishes.eggsalad.ingredients]] -name = "Mayo" - -[[dishes.eggsalad.ingredients]] -name = "Rice" -` - m := &menu{} - if _, err := Decode(tomlBlob, m); err != nil { - t.Fatal(err) - } - - if len(m.Dishes) != 2 { - t.Log("two dishes should be loaded with UnmarshalTOML()") - t.Errorf("expected %d but got %d", 2, len(m.Dishes)) - } - - eggSalad := m.Dishes["eggsalad"] - if _, ok := interface{}(eggSalad).(dish); !ok { - t.Errorf("expected a dish") - } - - if eggSalad.Name != "Egg Salad with rice" { - t.Errorf("expected the dish to be named 'Egg Salad with rice'") - } - - if len(eggSalad.Ingredients) != 3 { - t.Log("dish should be loaded with UnmarshalTOML()") - t.Errorf("expected %d but got %d", 3, len(eggSalad.Ingredients)) - } - - found := false - for _, i := range eggSalad.Ingredients { - if i.Name == "Rice" { - found = true - break - } - } - if !found { - t.Error("Rice was not loaded in UnmarshalTOML()") - } - - // test on a value - must be passed as * - o := menu{} - if _, err := Decode(tomlBlob, &o); err != nil { - t.Fatal(err) - } - -} - -func TestDecodeInlineTable(t *testing.T) { - input := ` -[CookieJar] -Types = {Chocolate = "yummy", Oatmeal = "best ever"} - -[Seasons] -Locations = {NY = {Temp = "not cold", Rating = 4}, MI = {Temp = "freezing", Rating = 9}} -` - type cookieJar struct { - Types map[string]string - } - type properties struct { - Temp string - Rating int - } - type seasons struct { - Locations map[string]properties - } - type wrapper struct { - CookieJar cookieJar - Seasons seasons - } - var got wrapper - - meta, err := Decode(input, &got) - if err != nil { - t.Fatal(err) - } - want := wrapper{ - CookieJar: cookieJar{ - Types: map[string]string{ - "Chocolate": "yummy", - "Oatmeal": "best ever", - }, - }, - Seasons: seasons{ - Locations: map[string]properties{ - "NY": { - Temp: "not cold", - Rating: 4, - }, - "MI": { - Temp: "freezing", - Rating: 9, - }, - }, - }, - } - if !reflect.DeepEqual(got, want) { - t.Fatalf("after decode, got:\n\n%#v\n\nwant:\n\n%#v", got, want) - } - if len(meta.keys) != 12 { - t.Errorf("after decode, got %d meta keys; want 12", len(meta.keys)) - } - if len(meta.types) != 12 { - t.Errorf("after decode, got %d meta types; want 12", len(meta.types)) - } -} - -func TestDecodeInlineTableArray(t *testing.T) { - type point struct { - X, Y, Z int - } - var got struct { - Points []point - } - // Example inline table array from the spec. - const in = ` -points = [ { x = 1, y = 2, z = 3 }, - { x = 7, y = 8, z = 9 }, - { x = 2, y = 4, z = 8 } ] - -` - if _, err := Decode(in, &got); err != nil { - t.Fatal(err) - } - want := []point{ - {X: 1, Y: 2, Z: 3}, - {X: 7, Y: 8, Z: 9}, - {X: 2, Y: 4, Z: 8}, - } - if !reflect.DeepEqual(got.Points, want) { - t.Errorf("got %#v; want %#v", got.Points, want) - } -} - -func TestDecodeMalformedInlineTable(t *testing.T) { - for _, tt := range []struct { - s string - want string - }{ - {"{,}", "unexpected comma"}, - {"{x = 3 y = 4}", "expected a comma or an inline table terminator"}, - {"{x=3,,y=4}", "unexpected comma"}, - {"{x=3,\ny=4}", "newlines not allowed"}, - {"{x=3\n,y=4}", "newlines not allowed"}, - } { - var x struct{ A map[string]int } - input := "a = " + tt.s - _, err := Decode(input, &x) - if err == nil { - t.Errorf("Decode(%q): got nil, want error containing %q", - input, tt.want) - continue - } - if !strings.Contains(err.Error(), tt.want) { - t.Errorf("Decode(%q): got %q, want error containing %q", - input, err, tt.want) - } - } -} - -type menu struct { - Dishes map[string]dish -} - -func (m *menu) UnmarshalTOML(p interface{}) error { - m.Dishes = make(map[string]dish) - data, _ := p.(map[string]interface{}) - dishes := data["dishes"].(map[string]interface{}) - for n, v := range dishes { - if d, ok := v.(map[string]interface{}); ok { - nd := dish{} - nd.UnmarshalTOML(d) - m.Dishes[n] = nd - } else { - return fmt.Errorf("not a dish") - } - } - return nil -} - -type dish struct { - Name string - Price float32 - Ingredients []ingredient -} - -func (d *dish) UnmarshalTOML(p interface{}) error { - data, _ := p.(map[string]interface{}) - d.Name, _ = data["name"].(string) - d.Price, _ = data["price"].(float32) - ingredients, _ := data["ingredients"].([]map[string]interface{}) - for _, e := range ingredients { - n, _ := interface{}(e).(map[string]interface{}) - name, _ := n["name"].(string) - i := ingredient{name} - d.Ingredients = append(d.Ingredients, i) - } - return nil -} - -type ingredient struct { - Name string -} - -func TestDecodeSlices(t *testing.T) { - type T struct { - S []string - } - for i, tt := range []struct { - v T - input string - want T - }{ - {T{}, "", T{}}, - {T{[]string{}}, "", T{[]string{}}}, - {T{[]string{"a", "b"}}, "", T{[]string{"a", "b"}}}, - {T{}, "S = []", T{[]string{}}}, - {T{[]string{}}, "S = []", T{[]string{}}}, - {T{[]string{"a", "b"}}, "S = []", T{[]string{}}}, - {T{}, `S = ["x"]`, T{[]string{"x"}}}, - {T{[]string{}}, `S = ["x"]`, T{[]string{"x"}}}, - {T{[]string{"a", "b"}}, `S = ["x"]`, T{[]string{"x"}}}, - } { - if _, err := Decode(tt.input, &tt.v); err != nil { - t.Errorf("[%d] %s", i, err) - continue - } - if !reflect.DeepEqual(tt.v, tt.want) { - t.Errorf("[%d] got %#v; want %#v", i, tt.v, tt.want) - } - } -} - -func TestDecodePrimitive(t *testing.T) { - type S struct { - P Primitive - } - type T struct { - S []int - } - slicep := func(s []int) *[]int { return &s } - arrayp := func(a [2]int) *[2]int { return &a } - mapp := func(m map[string]int) *map[string]int { return &m } - for i, tt := range []struct { - v interface{} - input string - want interface{} - }{ - // slices - {slicep(nil), "", slicep(nil)}, - {slicep([]int{}), "", slicep([]int{})}, - {slicep([]int{1, 2, 3}), "", slicep([]int{1, 2, 3})}, - {slicep(nil), "P = [1,2]", slicep([]int{1, 2})}, - {slicep([]int{}), "P = [1,2]", slicep([]int{1, 2})}, - {slicep([]int{1, 2, 3}), "P = [1,2]", slicep([]int{1, 2})}, - - // arrays - {arrayp([2]int{2, 3}), "", arrayp([2]int{2, 3})}, - {arrayp([2]int{2, 3}), "P = [3,4]", arrayp([2]int{3, 4})}, - - // maps - {mapp(nil), "", mapp(nil)}, - {mapp(map[string]int{}), "", mapp(map[string]int{})}, - {mapp(map[string]int{"a": 1}), "", mapp(map[string]int{"a": 1})}, - {mapp(nil), "[P]\na = 2", mapp(map[string]int{"a": 2})}, - {mapp(map[string]int{}), "[P]\na = 2", mapp(map[string]int{"a": 2})}, - {mapp(map[string]int{"a": 1, "b": 3}), "[P]\na = 2", mapp(map[string]int{"a": 2, "b": 3})}, - - // structs - {&T{nil}, "[P]", &T{nil}}, - {&T{[]int{}}, "[P]", &T{[]int{}}}, - {&T{[]int{1, 2, 3}}, "[P]", &T{[]int{1, 2, 3}}}, - {&T{nil}, "[P]\nS = [1,2]", &T{[]int{1, 2}}}, - {&T{[]int{}}, "[P]\nS = [1,2]", &T{[]int{1, 2}}}, - {&T{[]int{1, 2, 3}}, "[P]\nS = [1,2]", &T{[]int{1, 2}}}, - } { - var s S - md, err := Decode(tt.input, &s) - if err != nil { - t.Errorf("[%d] Decode error: %s", i, err) - continue - } - if err := md.PrimitiveDecode(s.P, tt.v); err != nil { - t.Errorf("[%d] PrimitiveDecode error: %s", i, err) - continue - } - if !reflect.DeepEqual(tt.v, tt.want) { - t.Errorf("[%d] got %#v; want %#v", i, tt.v, tt.want) - } - } -} - -func TestDecodeErrors(t *testing.T) { - for _, s := range []string{ - `x="`, - `x='`, - `x='''`, - - // Cases found by fuzzing in - // https://github.com/BurntSushi/toml/issues/155. - `""�`, // used to panic with index out of range - `e="""`, // used to hang - } { - var x struct{} - _, err := Decode(s, &x) - if err == nil { - t.Errorf("Decode(%q): got nil error", s) - } - } -} - -// Test for https://github.com/BurntSushi/toml/pull/166. -func TestDecodeBoolArray(t *testing.T) { - for _, tt := range []struct { - s string - got interface{} - want interface{} - }{ - { - "a = [true, false]", - &struct{ A []bool }{}, - &struct{ A []bool }{[]bool{true, false}}, - }, - { - "a = {a = true, b = false}", - &struct{ A map[string]bool }{}, - &struct{ A map[string]bool }{map[string]bool{"a": true, "b": false}}, - }, - } { - if _, err := Decode(tt.s, tt.got); err != nil { - t.Errorf("Decode(%q): %s", tt.s, err) - continue - } - if !reflect.DeepEqual(tt.got, tt.want) { - t.Errorf("Decode(%q): got %#v; want %#v", tt.s, tt.got, tt.want) - } - } -} - -func ExampleMetaData_PrimitiveDecode() { - var md MetaData - var err error - - var tomlBlob = ` -ranking = ["Springsteen", "J Geils"] - -[bands.Springsteen] -started = 1973 -albums = ["Greetings", "WIESS", "Born to Run", "Darkness"] - -[bands."J Geils"] -started = 1970 -albums = ["The J. Geils Band", "Full House", "Blow Your Face Out"] -` - - type band struct { - Started int - Albums []string - } - type classics struct { - Ranking []string - Bands map[string]Primitive - } - - // Do the initial decode. Reflection is delayed on Primitive values. - var music classics - if md, err = Decode(tomlBlob, &music); err != nil { - log.Fatal(err) - } - - // MetaData still includes information on Primitive values. - fmt.Printf("Is `bands.Springsteen` defined? %v\n", - md.IsDefined("bands", "Springsteen")) - - // Decode primitive data into Go values. - for _, artist := range music.Ranking { - // A band is a primitive value, so we need to decode it to get a - // real `band` value. - primValue := music.Bands[artist] - - var aBand band - if err = md.PrimitiveDecode(primValue, &aBand); err != nil { - log.Fatal(err) - } - fmt.Printf("%s started in %d.\n", artist, aBand.Started) - } - // Check to see if there were any fields left undecoded. - // Note that this won't be empty before decoding the Primitive value! - fmt.Printf("Undecoded: %q\n", md.Undecoded()) - - // Output: - // Is `bands.Springsteen` defined? true - // Springsteen started in 1973. - // J Geils started in 1970. - // Undecoded: [] -} - -func ExampleDecode() { - var tomlBlob = ` -# Some comments. -[alpha] -ip = "10.0.0.1" - - [alpha.config] - Ports = [ 8001, 8002 ] - Location = "Toronto" - Created = 1987-07-05T05:45:00Z - -[beta] -ip = "10.0.0.2" - - [beta.config] - Ports = [ 9001, 9002 ] - Location = "New Jersey" - Created = 1887-01-05T05:55:00Z -` - - type serverConfig struct { - Ports []int - Location string - Created time.Time - } - - type server struct { - IP string `toml:"ip,omitempty"` - Config serverConfig `toml:"config"` - } - - type servers map[string]server - - var config servers - if _, err := Decode(tomlBlob, &config); err != nil { - log.Fatal(err) - } - - for _, name := range []string{"alpha", "beta"} { - s := config[name] - fmt.Printf("Server: %s (ip: %s) in %s created on %s\n", - name, s.IP, s.Config.Location, - s.Config.Created.Format("2006-01-02")) - fmt.Printf("Ports: %v\n", s.Config.Ports) - } - - // Output: - // Server: alpha (ip: 10.0.0.1) in Toronto created on 1987-07-05 - // Ports: [8001 8002] - // Server: beta (ip: 10.0.0.2) in New Jersey created on 1887-01-05 - // Ports: [9001 9002] -} - -type duration struct { - time.Duration -} - -func (d *duration) UnmarshalText(text []byte) error { - var err error - d.Duration, err = time.ParseDuration(string(text)) - return err -} - -// Example Unmarshaler shows how to decode TOML strings into your own -// custom data type. -func Example_unmarshaler() { - blob := ` -[[song]] -name = "Thunder Road" -duration = "4m49s" - -[[song]] -name = "Stairway to Heaven" -duration = "8m03s" -` - type song struct { - Name string - Duration duration - } - type songs struct { - Song []song - } - var favorites songs - if _, err := Decode(blob, &favorites); err != nil { - log.Fatal(err) - } - - // Code to implement the TextUnmarshaler interface for `duration`: - // - // type duration struct { - // time.Duration - // } - // - // func (d *duration) UnmarshalText(text []byte) error { - // var err error - // d.Duration, err = time.ParseDuration(string(text)) - // return err - // } - - for _, s := range favorites.Song { - fmt.Printf("%s (%s)\n", s.Name, s.Duration) - } - // Output: - // Thunder Road (4m49s) - // Stairway to Heaven (8m3s) -} - -// Example StrictDecoding shows how to detect whether there are keys in the -// TOML document that weren't decoded into the value given. This is useful -// for returning an error to the user if they've included extraneous fields -// in their configuration. -func Example_strictDecoding() { - var blob = ` -key1 = "value1" -key2 = "value2" -key3 = "value3" -` - type config struct { - Key1 string - Key3 string - } - - var conf config - md, err := Decode(blob, &conf) - if err != nil { - log.Fatal(err) - } - fmt.Printf("Undecoded keys: %q\n", md.Undecoded()) - // Output: - // Undecoded keys: ["key2"] -} - -// Example UnmarshalTOML shows how to implement a struct type that knows how to -// unmarshal itself. The struct must take full responsibility for mapping the -// values passed into the struct. The method may be used with interfaces in a -// struct in cases where the actual type is not known until the data is -// examined. -func Example_unmarshalTOML() { - - var blob = ` -[[parts]] -type = "valve" -id = "valve-1" -size = 1.2 -rating = 4 - -[[parts]] -type = "valve" -id = "valve-2" -size = 2.1 -rating = 5 - -[[parts]] -type = "pipe" -id = "pipe-1" -length = 2.1 -diameter = 12 - -[[parts]] -type = "cable" -id = "cable-1" -length = 12 -rating = 3.1 -` - o := &order{} - err := Unmarshal([]byte(blob), o) - if err != nil { - log.Fatal(err) - } - - fmt.Println(len(o.parts)) - - for _, part := range o.parts { - fmt.Println(part.Name()) - } - - // Code to implement UmarshalJSON. - - // type order struct { - // // NOTE `order.parts` is a private slice of type `part` which is an - // // interface and may only be loaded from toml using the - // // UnmarshalTOML() method of the Umarshaler interface. - // parts parts - // } - - // func (o *order) UnmarshalTOML(data interface{}) error { - - // // NOTE the example below contains detailed type casting to show how - // // the 'data' is retrieved. In operational use, a type cast wrapper - // // may be preferred e.g. - // // - // // func AsMap(v interface{}) (map[string]interface{}, error) { - // // return v.(map[string]interface{}) - // // } - // // - // // resulting in: - // // d, _ := AsMap(data) - // // - - // d, _ := data.(map[string]interface{}) - // parts, _ := d["parts"].([]map[string]interface{}) - - // for _, p := range parts { - - // typ, _ := p["type"].(string) - // id, _ := p["id"].(string) - - // // detect the type of part and handle each case - // switch p["type"] { - // case "valve": - - // size := float32(p["size"].(float64)) - // rating := int(p["rating"].(int64)) - - // valve := &valve{ - // Type: typ, - // ID: id, - // Size: size, - // Rating: rating, - // } - - // o.parts = append(o.parts, valve) - - // case "pipe": - - // length := float32(p["length"].(float64)) - // diameter := int(p["diameter"].(int64)) - - // pipe := &pipe{ - // Type: typ, - // ID: id, - // Length: length, - // Diameter: diameter, - // } - - // o.parts = append(o.parts, pipe) - - // case "cable": - - // length := int(p["length"].(int64)) - // rating := float32(p["rating"].(float64)) - - // cable := &cable{ - // Type: typ, - // ID: id, - // Length: length, - // Rating: rating, - // } - - // o.parts = append(o.parts, cable) - - // } - // } - - // return nil - // } - - // type parts []part - - // type part interface { - // Name() string - // } - - // type valve struct { - // Type string - // ID string - // Size float32 - // Rating int - // } - - // func (v *valve) Name() string { - // return fmt.Sprintf("VALVE: %s", v.ID) - // } - - // type pipe struct { - // Type string - // ID string - // Length float32 - // Diameter int - // } - - // func (p *pipe) Name() string { - // return fmt.Sprintf("PIPE: %s", p.ID) - // } - - // type cable struct { - // Type string - // ID string - // Length int - // Rating float32 - // } - - // func (c *cable) Name() string { - // return fmt.Sprintf("CABLE: %s", c.ID) - // } - - // Output: - // 4 - // VALVE: valve-1 - // VALVE: valve-2 - // PIPE: pipe-1 - // CABLE: cable-1 - -} - -type order struct { - // NOTE `order.parts` is a private slice of type `part` which is an - // interface and may only be loaded from toml using the UnmarshalTOML() - // method of the Umarshaler interface. - parts parts -} - -func (o *order) UnmarshalTOML(data interface{}) error { - - // NOTE the example below contains detailed type casting to show how - // the 'data' is retrieved. In operational use, a type cast wrapper - // may be preferred e.g. - // - // func AsMap(v interface{}) (map[string]interface{}, error) { - // return v.(map[string]interface{}) - // } - // - // resulting in: - // d, _ := AsMap(data) - // - - d, _ := data.(map[string]interface{}) - parts, _ := d["parts"].([]map[string]interface{}) - - for _, p := range parts { - - typ, _ := p["type"].(string) - id, _ := p["id"].(string) - - // detect the type of part and handle each case - switch p["type"] { - case "valve": - - size := float32(p["size"].(float64)) - rating := int(p["rating"].(int64)) - - valve := &valve{ - Type: typ, - ID: id, - Size: size, - Rating: rating, - } - - o.parts = append(o.parts, valve) - - case "pipe": - - length := float32(p["length"].(float64)) - diameter := int(p["diameter"].(int64)) - - pipe := &pipe{ - Type: typ, - ID: id, - Length: length, - Diameter: diameter, - } - - o.parts = append(o.parts, pipe) - - case "cable": - - length := int(p["length"].(int64)) - rating := float32(p["rating"].(float64)) - - cable := &cable{ - Type: typ, - ID: id, - Length: length, - Rating: rating, - } - - o.parts = append(o.parts, cable) - - } - } - - return nil -} - -type parts []part - -type part interface { - Name() string -} - -type valve struct { - Type string - ID string - Size float32 - Rating int -} - -func (v *valve) Name() string { - return fmt.Sprintf("VALVE: %s", v.ID) -} - -type pipe struct { - Type string - ID string - Length float32 - Diameter int -} - -func (p *pipe) Name() string { - return fmt.Sprintf("PIPE: %s", p.ID) -} - -type cable struct { - Type string - ID string - Length int - Rating float32 -} - -func (c *cable) Name() string { - return fmt.Sprintf("CABLE: %s", c.ID) -} diff --git a/vendor/github.com/BurntSushi/toml/doc.go b/vendor/github.com/BurntSushi/toml/doc.go deleted file mode 100644 index b371f396..00000000 --- a/vendor/github.com/BurntSushi/toml/doc.go +++ /dev/null @@ -1,27 +0,0 @@ -/* -Package toml provides facilities for decoding and encoding TOML configuration -files via reflection. There is also support for delaying decoding with -the Primitive type, and querying the set of keys in a TOML document with the -MetaData type. - -The specification implemented: https://github.com/toml-lang/toml - -The sub-command github.com/BurntSushi/toml/cmd/tomlv can be used to verify -whether a file is a valid TOML document. It can also be used to print the -type of each key in a TOML document. - -Testing - -There are two important types of tests used for this package. The first is -contained inside '*_test.go' files and uses the standard Go unit testing -framework. These tests are primarily devoted to holistically testing the -decoder and encoder. - -The second type of testing is used to verify the implementation's adherence -to the TOML specification. These tests have been factored into their own -project: https://github.com/BurntSushi/toml-test - -The reason the tests are in a separate project is so that they can be used by -any implementation of TOML. Namely, it is language agnostic. -*/ -package toml diff --git a/vendor/github.com/BurntSushi/toml/encode.go b/vendor/github.com/BurntSushi/toml/encode.go deleted file mode 100644 index d905c21a..00000000 --- a/vendor/github.com/BurntSushi/toml/encode.go +++ /dev/null @@ -1,568 +0,0 @@ -package toml - -import ( - "bufio" - "errors" - "fmt" - "io" - "reflect" - "sort" - "strconv" - "strings" - "time" -) - -type tomlEncodeError struct{ error } - -var ( - errArrayMixedElementTypes = errors.New( - "toml: cannot encode array with mixed element types") - errArrayNilElement = errors.New( - "toml: cannot encode array with nil element") - errNonString = errors.New( - "toml: cannot encode a map with non-string key type") - errAnonNonStruct = errors.New( - "toml: cannot encode an anonymous field that is not a struct") - errArrayNoTable = errors.New( - "toml: TOML array element cannot contain a table") - errNoKey = errors.New( - "toml: top-level values must be Go maps or structs") - errAnything = errors.New("") // used in testing -) - -var quotedReplacer = strings.NewReplacer( - "\t", "\\t", - "\n", "\\n", - "\r", "\\r", - "\"", "\\\"", - "\\", "\\\\", -) - -// Encoder controls the encoding of Go values to a TOML document to some -// io.Writer. -// -// The indentation level can be controlled with the Indent field. -type Encoder struct { - // A single indentation level. By default it is two spaces. - Indent string - - // hasWritten is whether we have written any output to w yet. - hasWritten bool - w *bufio.Writer -} - -// NewEncoder returns a TOML encoder that encodes Go values to the io.Writer -// given. By default, a single indentation level is 2 spaces. -func NewEncoder(w io.Writer) *Encoder { - return &Encoder{ - w: bufio.NewWriter(w), - Indent: " ", - } -} - -// Encode writes a TOML representation of the Go value to the underlying -// io.Writer. If the value given cannot be encoded to a valid TOML document, -// then an error is returned. -// -// The mapping between Go values and TOML values should be precisely the same -// as for the Decode* functions. Similarly, the TextMarshaler interface is -// supported by encoding the resulting bytes as strings. (If you want to write -// arbitrary binary data then you will need to use something like base64 since -// TOML does not have any binary types.) -// -// When encoding TOML hashes (i.e., Go maps or structs), keys without any -// sub-hashes are encoded first. -// -// If a Go map is encoded, then its keys are sorted alphabetically for -// deterministic output. More control over this behavior may be provided if -// there is demand for it. -// -// Encoding Go values without a corresponding TOML representation---like map -// types with non-string keys---will cause an error to be returned. Similarly -// for mixed arrays/slices, arrays/slices with nil elements, embedded -// non-struct types and nested slices containing maps or structs. -// (e.g., [][]map[string]string is not allowed but []map[string]string is OK -// and so is []map[string][]string.) -func (enc *Encoder) Encode(v interface{}) error { - rv := eindirect(reflect.ValueOf(v)) - if err := enc.safeEncode(Key([]string{}), rv); err != nil { - return err - } - return enc.w.Flush() -} - -func (enc *Encoder) safeEncode(key Key, rv reflect.Value) (err error) { - defer func() { - if r := recover(); r != nil { - if terr, ok := r.(tomlEncodeError); ok { - err = terr.error - return - } - panic(r) - } - }() - enc.encode(key, rv) - return nil -} - -func (enc *Encoder) encode(key Key, rv reflect.Value) { - // Special case. Time needs to be in ISO8601 format. - // Special case. If we can marshal the type to text, then we used that. - // Basically, this prevents the encoder for handling these types as - // generic structs (or whatever the underlying type of a TextMarshaler is). - switch rv.Interface().(type) { - case time.Time, TextMarshaler: - enc.keyEqElement(key, rv) - return - } - - k := rv.Kind() - switch k { - case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, - reflect.Int64, - reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, - reflect.Uint64, - reflect.Float32, reflect.Float64, reflect.String, reflect.Bool: - enc.keyEqElement(key, rv) - case reflect.Array, reflect.Slice: - if typeEqual(tomlArrayHash, tomlTypeOfGo(rv)) { - enc.eArrayOfTables(key, rv) - } else { - enc.keyEqElement(key, rv) - } - case reflect.Interface: - if rv.IsNil() { - return - } - enc.encode(key, rv.Elem()) - case reflect.Map: - if rv.IsNil() { - return - } - enc.eTable(key, rv) - case reflect.Ptr: - if rv.IsNil() { - return - } - enc.encode(key, rv.Elem()) - case reflect.Struct: - enc.eTable(key, rv) - default: - panic(e("unsupported type for key '%s': %s", key, k)) - } -} - -// eElement encodes any value that can be an array element (primitives and -// arrays). -func (enc *Encoder) eElement(rv reflect.Value) { - switch v := rv.Interface().(type) { - case time.Time: - // Special case time.Time as a primitive. Has to come before - // TextMarshaler below because time.Time implements - // encoding.TextMarshaler, but we need to always use UTC. - enc.wf(v.UTC().Format("2006-01-02T15:04:05Z")) - return - case TextMarshaler: - // Special case. Use text marshaler if it's available for this value. - if s, err := v.MarshalText(); err != nil { - encPanic(err) - } else { - enc.writeQuoted(string(s)) - } - return - } - switch rv.Kind() { - case reflect.Bool: - enc.wf(strconv.FormatBool(rv.Bool())) - case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, - reflect.Int64: - enc.wf(strconv.FormatInt(rv.Int(), 10)) - case reflect.Uint, reflect.Uint8, reflect.Uint16, - reflect.Uint32, reflect.Uint64: - enc.wf(strconv.FormatUint(rv.Uint(), 10)) - case reflect.Float32: - enc.wf(floatAddDecimal(strconv.FormatFloat(rv.Float(), 'f', -1, 32))) - case reflect.Float64: - enc.wf(floatAddDecimal(strconv.FormatFloat(rv.Float(), 'f', -1, 64))) - case reflect.Array, reflect.Slice: - enc.eArrayOrSliceElement(rv) - case reflect.Interface: - enc.eElement(rv.Elem()) - case reflect.String: - enc.writeQuoted(rv.String()) - default: - panic(e("unexpected primitive type: %s", rv.Kind())) - } -} - -// By the TOML spec, all floats must have a decimal with at least one -// number on either side. -func floatAddDecimal(fstr string) string { - if !strings.Contains(fstr, ".") { - return fstr + ".0" - } - return fstr -} - -func (enc *Encoder) writeQuoted(s string) { - enc.wf("\"%s\"", quotedReplacer.Replace(s)) -} - -func (enc *Encoder) eArrayOrSliceElement(rv reflect.Value) { - length := rv.Len() - enc.wf("[") - for i := 0; i < length; i++ { - elem := rv.Index(i) - enc.eElement(elem) - if i != length-1 { - enc.wf(", ") - } - } - enc.wf("]") -} - -func (enc *Encoder) eArrayOfTables(key Key, rv reflect.Value) { - if len(key) == 0 { - encPanic(errNoKey) - } - for i := 0; i < rv.Len(); i++ { - trv := rv.Index(i) - if isNil(trv) { - continue - } - panicIfInvalidKey(key) - enc.newline() - enc.wf("%s[[%s]]", enc.indentStr(key), key.maybeQuotedAll()) - enc.newline() - enc.eMapOrStruct(key, trv) - } -} - -func (enc *Encoder) eTable(key Key, rv reflect.Value) { - panicIfInvalidKey(key) - if len(key) == 1 { - // Output an extra newline between top-level tables. - // (The newline isn't written if nothing else has been written though.) - enc.newline() - } - if len(key) > 0 { - enc.wf("%s[%s]", enc.indentStr(key), key.maybeQuotedAll()) - enc.newline() - } - enc.eMapOrStruct(key, rv) -} - -func (enc *Encoder) eMapOrStruct(key Key, rv reflect.Value) { - switch rv := eindirect(rv); rv.Kind() { - case reflect.Map: - enc.eMap(key, rv) - case reflect.Struct: - enc.eStruct(key, rv) - default: - panic("eTable: unhandled reflect.Value Kind: " + rv.Kind().String()) - } -} - -func (enc *Encoder) eMap(key Key, rv reflect.Value) { - rt := rv.Type() - if rt.Key().Kind() != reflect.String { - encPanic(errNonString) - } - - // Sort keys so that we have deterministic output. And write keys directly - // underneath this key first, before writing sub-structs or sub-maps. - var mapKeysDirect, mapKeysSub []string - for _, mapKey := range rv.MapKeys() { - k := mapKey.String() - if typeIsHash(tomlTypeOfGo(rv.MapIndex(mapKey))) { - mapKeysSub = append(mapKeysSub, k) - } else { - mapKeysDirect = append(mapKeysDirect, k) - } - } - - var writeMapKeys = func(mapKeys []string) { - sort.Strings(mapKeys) - for _, mapKey := range mapKeys { - mrv := rv.MapIndex(reflect.ValueOf(mapKey)) - if isNil(mrv) { - // Don't write anything for nil fields. - continue - } - enc.encode(key.add(mapKey), mrv) - } - } - writeMapKeys(mapKeysDirect) - writeMapKeys(mapKeysSub) -} - -func (enc *Encoder) eStruct(key Key, rv reflect.Value) { - // Write keys for fields directly under this key first, because if we write - // a field that creates a new table, then all keys under it will be in that - // table (not the one we're writing here). - rt := rv.Type() - var fieldsDirect, fieldsSub [][]int - var addFields func(rt reflect.Type, rv reflect.Value, start []int) - addFields = func(rt reflect.Type, rv reflect.Value, start []int) { - for i := 0; i < rt.NumField(); i++ { - f := rt.Field(i) - // skip unexported fields - if f.PkgPath != "" && !f.Anonymous { - continue - } - frv := rv.Field(i) - if f.Anonymous { - t := f.Type - switch t.Kind() { - case reflect.Struct: - // Treat anonymous struct fields with - // tag names as though they are not - // anonymous, like encoding/json does. - if getOptions(f.Tag).name == "" { - addFields(t, frv, f.Index) - continue - } - case reflect.Ptr: - if t.Elem().Kind() == reflect.Struct && - getOptions(f.Tag).name == "" { - if !frv.IsNil() { - addFields(t.Elem(), frv.Elem(), f.Index) - } - continue - } - // Fall through to the normal field encoding logic below - // for non-struct anonymous fields. - } - } - - if typeIsHash(tomlTypeOfGo(frv)) { - fieldsSub = append(fieldsSub, append(start, f.Index...)) - } else { - fieldsDirect = append(fieldsDirect, append(start, f.Index...)) - } - } - } - addFields(rt, rv, nil) - - var writeFields = func(fields [][]int) { - for _, fieldIndex := range fields { - sft := rt.FieldByIndex(fieldIndex) - sf := rv.FieldByIndex(fieldIndex) - if isNil(sf) { - // Don't write anything for nil fields. - continue - } - - opts := getOptions(sft.Tag) - if opts.skip { - continue - } - keyName := sft.Name - if opts.name != "" { - keyName = opts.name - } - if opts.omitempty && isEmpty(sf) { - continue - } - if opts.omitzero && isZero(sf) { - continue - } - - enc.encode(key.add(keyName), sf) - } - } - writeFields(fieldsDirect) - writeFields(fieldsSub) -} - -// tomlTypeName returns the TOML type name of the Go value's type. It is -// used to determine whether the types of array elements are mixed (which is -// forbidden). If the Go value is nil, then it is illegal for it to be an array -// element, and valueIsNil is returned as true. - -// Returns the TOML type of a Go value. The type may be `nil`, which means -// no concrete TOML type could be found. -func tomlTypeOfGo(rv reflect.Value) tomlType { - if isNil(rv) || !rv.IsValid() { - return nil - } - switch rv.Kind() { - case reflect.Bool: - return tomlBool - case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, - reflect.Int64, - reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, - reflect.Uint64: - return tomlInteger - case reflect.Float32, reflect.Float64: - return tomlFloat - case reflect.Array, reflect.Slice: - if typeEqual(tomlHash, tomlArrayType(rv)) { - return tomlArrayHash - } - return tomlArray - case reflect.Ptr, reflect.Interface: - return tomlTypeOfGo(rv.Elem()) - case reflect.String: - return tomlString - case reflect.Map: - return tomlHash - case reflect.Struct: - switch rv.Interface().(type) { - case time.Time: - return tomlDatetime - case TextMarshaler: - return tomlString - default: - return tomlHash - } - default: - panic("unexpected reflect.Kind: " + rv.Kind().String()) - } -} - -// tomlArrayType returns the element type of a TOML array. The type returned -// may be nil if it cannot be determined (e.g., a nil slice or a zero length -// slize). This function may also panic if it finds a type that cannot be -// expressed in TOML (such as nil elements, heterogeneous arrays or directly -// nested arrays of tables). -func tomlArrayType(rv reflect.Value) tomlType { - if isNil(rv) || !rv.IsValid() || rv.Len() == 0 { - return nil - } - firstType := tomlTypeOfGo(rv.Index(0)) - if firstType == nil { - encPanic(errArrayNilElement) - } - - rvlen := rv.Len() - for i := 1; i < rvlen; i++ { - elem := rv.Index(i) - switch elemType := tomlTypeOfGo(elem); { - case elemType == nil: - encPanic(errArrayNilElement) - case !typeEqual(firstType, elemType): - encPanic(errArrayMixedElementTypes) - } - } - // If we have a nested array, then we must make sure that the nested - // array contains ONLY primitives. - // This checks arbitrarily nested arrays. - if typeEqual(firstType, tomlArray) || typeEqual(firstType, tomlArrayHash) { - nest := tomlArrayType(eindirect(rv.Index(0))) - if typeEqual(nest, tomlHash) || typeEqual(nest, tomlArrayHash) { - encPanic(errArrayNoTable) - } - } - return firstType -} - -type tagOptions struct { - skip bool // "-" - name string - omitempty bool - omitzero bool -} - -func getOptions(tag reflect.StructTag) tagOptions { - t := tag.Get("toml") - if t == "-" { - return tagOptions{skip: true} - } - var opts tagOptions - parts := strings.Split(t, ",") - opts.name = parts[0] - for _, s := range parts[1:] { - switch s { - case "omitempty": - opts.omitempty = true - case "omitzero": - opts.omitzero = true - } - } - return opts -} - -func isZero(rv reflect.Value) bool { - switch rv.Kind() { - case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: - return rv.Int() == 0 - case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64: - return rv.Uint() == 0 - case reflect.Float32, reflect.Float64: - return rv.Float() == 0.0 - } - return false -} - -func isEmpty(rv reflect.Value) bool { - switch rv.Kind() { - case reflect.Array, reflect.Slice, reflect.Map, reflect.String: - return rv.Len() == 0 - case reflect.Bool: - return !rv.Bool() - } - return false -} - -func (enc *Encoder) newline() { - if enc.hasWritten { - enc.wf("\n") - } -} - -func (enc *Encoder) keyEqElement(key Key, val reflect.Value) { - if len(key) == 0 { - encPanic(errNoKey) - } - panicIfInvalidKey(key) - enc.wf("%s%s = ", enc.indentStr(key), key.maybeQuoted(len(key)-1)) - enc.eElement(val) - enc.newline() -} - -func (enc *Encoder) wf(format string, v ...interface{}) { - if _, err := fmt.Fprintf(enc.w, format, v...); err != nil { - encPanic(err) - } - enc.hasWritten = true -} - -func (enc *Encoder) indentStr(key Key) string { - return strings.Repeat(enc.Indent, len(key)-1) -} - -func encPanic(err error) { - panic(tomlEncodeError{err}) -} - -func eindirect(v reflect.Value) reflect.Value { - switch v.Kind() { - case reflect.Ptr, reflect.Interface: - return eindirect(v.Elem()) - default: - return v - } -} - -func isNil(rv reflect.Value) bool { - switch rv.Kind() { - case reflect.Interface, reflect.Map, reflect.Ptr, reflect.Slice: - return rv.IsNil() - default: - return false - } -} - -func panicIfInvalidKey(key Key) { - for _, k := range key { - if len(k) == 0 { - encPanic(e("Key '%s' is not a valid table name. Key names "+ - "cannot be empty.", key.maybeQuotedAll())) - } - } -} - -func isValidKeyName(s string) bool { - return len(s) != 0 -} diff --git a/vendor/github.com/BurntSushi/toml/encode_test.go b/vendor/github.com/BurntSushi/toml/encode_test.go deleted file mode 100644 index 673b7b00..00000000 --- a/vendor/github.com/BurntSushi/toml/encode_test.go +++ /dev/null @@ -1,615 +0,0 @@ -package toml - -import ( - "bytes" - "fmt" - "log" - "net" - "testing" - "time" -) - -func TestEncodeRoundTrip(t *testing.T) { - type Config struct { - Age int - Cats []string - Pi float64 - Perfection []int - DOB time.Time - Ipaddress net.IP - } - - var inputs = Config{ - 13, - []string{"one", "two", "three"}, - 3.145, - []int{11, 2, 3, 4}, - time.Now(), - net.ParseIP("192.168.59.254"), - } - - var firstBuffer bytes.Buffer - e := NewEncoder(&firstBuffer) - err := e.Encode(inputs) - if err != nil { - t.Fatal(err) - } - var outputs Config - if _, err := Decode(firstBuffer.String(), &outputs); err != nil { - t.Logf("Could not decode:\n-----\n%s\n-----\n", - firstBuffer.String()) - t.Fatal(err) - } - - // could test each value individually, but I'm lazy - var secondBuffer bytes.Buffer - e2 := NewEncoder(&secondBuffer) - err = e2.Encode(outputs) - if err != nil { - t.Fatal(err) - } - if firstBuffer.String() != secondBuffer.String() { - t.Error( - firstBuffer.String(), - "\n\n is not identical to\n\n", - secondBuffer.String()) - } -} - -// XXX(burntsushi) -// I think these tests probably should be removed. They are good, but they -// ought to be obsolete by toml-test. -func TestEncode(t *testing.T) { - type Embedded struct { - Int int `toml:"_int"` - } - type NonStruct int - - date := time.Date(2014, 5, 11, 20, 30, 40, 0, time.FixedZone("IST", 3600)) - dateStr := "2014-05-11T19:30:40Z" - - tests := map[string]struct { - input interface{} - wantOutput string - wantError error - }{ - "bool field": { - input: struct { - BoolTrue bool - BoolFalse bool - }{true, false}, - wantOutput: "BoolTrue = true\nBoolFalse = false\n", - }, - "int fields": { - input: struct { - Int int - Int8 int8 - Int16 int16 - Int32 int32 - Int64 int64 - }{1, 2, 3, 4, 5}, - wantOutput: "Int = 1\nInt8 = 2\nInt16 = 3\nInt32 = 4\nInt64 = 5\n", - }, - "uint fields": { - input: struct { - Uint uint - Uint8 uint8 - Uint16 uint16 - Uint32 uint32 - Uint64 uint64 - }{1, 2, 3, 4, 5}, - wantOutput: "Uint = 1\nUint8 = 2\nUint16 = 3\nUint32 = 4" + - "\nUint64 = 5\n", - }, - "float fields": { - input: struct { - Float32 float32 - Float64 float64 - }{1.5, 2.5}, - wantOutput: "Float32 = 1.5\nFloat64 = 2.5\n", - }, - "string field": { - input: struct{ String string }{"foo"}, - wantOutput: "String = \"foo\"\n", - }, - "string field and unexported field": { - input: struct { - String string - unexported int - }{"foo", 0}, - wantOutput: "String = \"foo\"\n", - }, - "datetime field in UTC": { - input: struct{ Date time.Time }{date}, - wantOutput: fmt.Sprintf("Date = %s\n", dateStr), - }, - "datetime field as primitive": { - // Using a map here to fail if isStructOrMap() returns true for - // time.Time. - input: map[string]interface{}{ - "Date": date, - "Int": 1, - }, - wantOutput: fmt.Sprintf("Date = %s\nInt = 1\n", dateStr), - }, - "array fields": { - input: struct { - IntArray0 [0]int - IntArray3 [3]int - }{[0]int{}, [3]int{1, 2, 3}}, - wantOutput: "IntArray0 = []\nIntArray3 = [1, 2, 3]\n", - }, - "slice fields": { - input: struct{ IntSliceNil, IntSlice0, IntSlice3 []int }{ - nil, []int{}, []int{1, 2, 3}, - }, - wantOutput: "IntSlice0 = []\nIntSlice3 = [1, 2, 3]\n", - }, - "datetime slices": { - input: struct{ DatetimeSlice []time.Time }{ - []time.Time{date, date}, - }, - wantOutput: fmt.Sprintf("DatetimeSlice = [%s, %s]\n", - dateStr, dateStr), - }, - "nested arrays and slices": { - input: struct { - SliceOfArrays [][2]int - ArrayOfSlices [2][]int - SliceOfArraysOfSlices [][2][]int - ArrayOfSlicesOfArrays [2][][2]int - SliceOfMixedArrays [][2]interface{} - ArrayOfMixedSlices [2][]interface{} - }{ - [][2]int{{1, 2}, {3, 4}}, - [2][]int{{1, 2}, {3, 4}}, - [][2][]int{ - { - {1, 2}, {3, 4}, - }, - { - {5, 6}, {7, 8}, - }, - }, - [2][][2]int{ - { - {1, 2}, {3, 4}, - }, - { - {5, 6}, {7, 8}, - }, - }, - [][2]interface{}{ - {1, 2}, {"a", "b"}, - }, - [2][]interface{}{ - {1, 2}, {"a", "b"}, - }, - }, - wantOutput: `SliceOfArrays = [[1, 2], [3, 4]] -ArrayOfSlices = [[1, 2], [3, 4]] -SliceOfArraysOfSlices = [[[1, 2], [3, 4]], [[5, 6], [7, 8]]] -ArrayOfSlicesOfArrays = [[[1, 2], [3, 4]], [[5, 6], [7, 8]]] -SliceOfMixedArrays = [[1, 2], ["a", "b"]] -ArrayOfMixedSlices = [[1, 2], ["a", "b"]] -`, - }, - "empty slice": { - input: struct{ Empty []interface{} }{[]interface{}{}}, - wantOutput: "Empty = []\n", - }, - "(error) slice with element type mismatch (string and integer)": { - input: struct{ Mixed []interface{} }{[]interface{}{1, "a"}}, - wantError: errArrayMixedElementTypes, - }, - "(error) slice with element type mismatch (integer and float)": { - input: struct{ Mixed []interface{} }{[]interface{}{1, 2.5}}, - wantError: errArrayMixedElementTypes, - }, - "slice with elems of differing Go types, same TOML types": { - input: struct { - MixedInts []interface{} - MixedFloats []interface{} - }{ - []interface{}{ - int(1), int8(2), int16(3), int32(4), int64(5), - uint(1), uint8(2), uint16(3), uint32(4), uint64(5), - }, - []interface{}{float32(1.5), float64(2.5)}, - }, - wantOutput: "MixedInts = [1, 2, 3, 4, 5, 1, 2, 3, 4, 5]\n" + - "MixedFloats = [1.5, 2.5]\n", - }, - "(error) slice w/ element type mismatch (one is nested array)": { - input: struct{ Mixed []interface{} }{ - []interface{}{1, []interface{}{2}}, - }, - wantError: errArrayMixedElementTypes, - }, - "(error) slice with 1 nil element": { - input: struct{ NilElement1 []interface{} }{[]interface{}{nil}}, - wantError: errArrayNilElement, - }, - "(error) slice with 1 nil element (and other non-nil elements)": { - input: struct{ NilElement []interface{} }{ - []interface{}{1, nil}, - }, - wantError: errArrayNilElement, - }, - "simple map": { - input: map[string]int{"a": 1, "b": 2}, - wantOutput: "a = 1\nb = 2\n", - }, - "map with interface{} value type": { - input: map[string]interface{}{"a": 1, "b": "c"}, - wantOutput: "a = 1\nb = \"c\"\n", - }, - "map with interface{} value type, some of which are structs": { - input: map[string]interface{}{ - "a": struct{ Int int }{2}, - "b": 1, - }, - wantOutput: "b = 1\n\n[a]\n Int = 2\n", - }, - "nested map": { - input: map[string]map[string]int{ - "a": {"b": 1}, - "c": {"d": 2}, - }, - wantOutput: "[a]\n b = 1\n\n[c]\n d = 2\n", - }, - "nested struct": { - input: struct{ Struct struct{ Int int } }{ - struct{ Int int }{1}, - }, - wantOutput: "[Struct]\n Int = 1\n", - }, - "nested struct and non-struct field": { - input: struct { - Struct struct{ Int int } - Bool bool - }{struct{ Int int }{1}, true}, - wantOutput: "Bool = true\n\n[Struct]\n Int = 1\n", - }, - "2 nested structs": { - input: struct{ Struct1, Struct2 struct{ Int int } }{ - struct{ Int int }{1}, struct{ Int int }{2}, - }, - wantOutput: "[Struct1]\n Int = 1\n\n[Struct2]\n Int = 2\n", - }, - "deeply nested structs": { - input: struct { - Struct1, Struct2 struct{ Struct3 *struct{ Int int } } - }{ - struct{ Struct3 *struct{ Int int } }{&struct{ Int int }{1}}, - struct{ Struct3 *struct{ Int int } }{nil}, - }, - wantOutput: "[Struct1]\n [Struct1.Struct3]\n Int = 1" + - "\n\n[Struct2]\n", - }, - "nested struct with nil struct elem": { - input: struct { - Struct struct{ Inner *struct{ Int int } } - }{ - struct{ Inner *struct{ Int int } }{nil}, - }, - wantOutput: "[Struct]\n", - }, - "nested struct with no fields": { - input: struct { - Struct struct{ Inner struct{} } - }{ - struct{ Inner struct{} }{struct{}{}}, - }, - wantOutput: "[Struct]\n [Struct.Inner]\n", - }, - "struct with tags": { - input: struct { - Struct struct { - Int int `toml:"_int"` - } `toml:"_struct"` - Bool bool `toml:"_bool"` - }{ - struct { - Int int `toml:"_int"` - }{1}, true, - }, - wantOutput: "_bool = true\n\n[_struct]\n _int = 1\n", - }, - "embedded struct": { - input: struct{ Embedded }{Embedded{1}}, - wantOutput: "_int = 1\n", - }, - "embedded *struct": { - input: struct{ *Embedded }{&Embedded{1}}, - wantOutput: "_int = 1\n", - }, - "nested embedded struct": { - input: struct { - Struct struct{ Embedded } `toml:"_struct"` - }{struct{ Embedded }{Embedded{1}}}, - wantOutput: "[_struct]\n _int = 1\n", - }, - "nested embedded *struct": { - input: struct { - Struct struct{ *Embedded } `toml:"_struct"` - }{struct{ *Embedded }{&Embedded{1}}}, - wantOutput: "[_struct]\n _int = 1\n", - }, - "embedded non-struct": { - input: struct{ NonStruct }{5}, - wantOutput: "NonStruct = 5\n", - }, - "array of tables": { - input: struct { - Structs []*struct{ Int int } `toml:"struct"` - }{ - []*struct{ Int int }{{1}, {3}}, - }, - wantOutput: "[[struct]]\n Int = 1\n\n[[struct]]\n Int = 3\n", - }, - "array of tables order": { - input: map[string]interface{}{ - "map": map[string]interface{}{ - "zero": 5, - "arr": []map[string]int{ - { - "friend": 5, - }, - }, - }, - }, - wantOutput: "[map]\n zero = 5\n\n [[map.arr]]\n friend = 5\n", - }, - "(error) top-level slice": { - input: []struct{ Int int }{{1}, {2}, {3}}, - wantError: errNoKey, - }, - "(error) slice of slice": { - input: struct { - Slices [][]struct{ Int int } - }{ - [][]struct{ Int int }{{{1}}, {{2}}, {{3}}}, - }, - wantError: errArrayNoTable, - }, - "(error) map no string key": { - input: map[int]string{1: ""}, - wantError: errNonString, - }, - "(error) empty key name": { - input: map[string]int{"": 1}, - wantError: errAnything, - }, - "(error) empty map name": { - input: map[string]interface{}{ - "": map[string]int{"v": 1}, - }, - wantError: errAnything, - }, - } - for label, test := range tests { - encodeExpected(t, label, test.input, test.wantOutput, test.wantError) - } -} - -func TestEncodeNestedTableArrays(t *testing.T) { - type song struct { - Name string `toml:"name"` - } - type album struct { - Name string `toml:"name"` - Songs []song `toml:"songs"` - } - type springsteen struct { - Albums []album `toml:"albums"` - } - value := springsteen{ - []album{ - {"Born to Run", - []song{{"Jungleland"}, {"Meeting Across the River"}}}, - {"Born in the USA", - []song{{"Glory Days"}, {"Dancing in the Dark"}}}, - }, - } - expected := `[[albums]] - name = "Born to Run" - - [[albums.songs]] - name = "Jungleland" - - [[albums.songs]] - name = "Meeting Across the River" - -[[albums]] - name = "Born in the USA" - - [[albums.songs]] - name = "Glory Days" - - [[albums.songs]] - name = "Dancing in the Dark" -` - encodeExpected(t, "nested table arrays", value, expected, nil) -} - -func TestEncodeArrayHashWithNormalHashOrder(t *testing.T) { - type Alpha struct { - V int - } - type Beta struct { - V int - } - type Conf struct { - V int - A Alpha - B []Beta - } - - val := Conf{ - V: 1, - A: Alpha{2}, - B: []Beta{{3}}, - } - expected := "V = 1\n\n[A]\n V = 2\n\n[[B]]\n V = 3\n" - encodeExpected(t, "array hash with normal hash order", val, expected, nil) -} - -func TestEncodeWithOmitEmpty(t *testing.T) { - type simple struct { - Bool bool `toml:"bool,omitempty"` - String string `toml:"string,omitempty"` - Array [0]byte `toml:"array,omitempty"` - Slice []int `toml:"slice,omitempty"` - Map map[string]string `toml:"map,omitempty"` - } - - var v simple - encodeExpected(t, "fields with omitempty are omitted when empty", v, "", nil) - v = simple{ - Bool: true, - String: " ", - Slice: []int{2, 3, 4}, - Map: map[string]string{"foo": "bar"}, - } - expected := `bool = true -string = " " -slice = [2, 3, 4] - -[map] - foo = "bar" -` - encodeExpected(t, "fields with omitempty are not omitted when non-empty", - v, expected, nil) -} - -func TestEncodeWithOmitZero(t *testing.T) { - type simple struct { - Number int `toml:"number,omitzero"` - Real float64 `toml:"real,omitzero"` - Unsigned uint `toml:"unsigned,omitzero"` - } - - value := simple{0, 0.0, uint(0)} - expected := "" - - encodeExpected(t, "simple with omitzero, all zero", value, expected, nil) - - value.Number = 10 - value.Real = 20 - value.Unsigned = 5 - expected = `number = 10 -real = 20.0 -unsigned = 5 -` - encodeExpected(t, "simple with omitzero, non-zero", value, expected, nil) -} - -func TestEncodeOmitemptyWithEmptyName(t *testing.T) { - type simple struct { - S []int `toml:",omitempty"` - } - v := simple{[]int{1, 2, 3}} - expected := "S = [1, 2, 3]\n" - encodeExpected(t, "simple with omitempty, no name, non-empty field", - v, expected, nil) -} - -func TestEncodeAnonymousStruct(t *testing.T) { - type Inner struct{ N int } - type Outer0 struct{ Inner } - type Outer1 struct { - Inner `toml:"inner"` - } - - v0 := Outer0{Inner{3}} - expected := "N = 3\n" - encodeExpected(t, "embedded anonymous untagged struct", v0, expected, nil) - - v1 := Outer1{Inner{3}} - expected = "[inner]\n N = 3\n" - encodeExpected(t, "embedded anonymous tagged struct", v1, expected, nil) -} - -func TestEncodeAnonymousStructPointerField(t *testing.T) { - type Inner struct{ N int } - type Outer0 struct{ *Inner } - type Outer1 struct { - *Inner `toml:"inner"` - } - - v0 := Outer0{} - expected := "" - encodeExpected(t, "nil anonymous untagged struct pointer field", v0, expected, nil) - - v0 = Outer0{&Inner{3}} - expected = "N = 3\n" - encodeExpected(t, "non-nil anonymous untagged struct pointer field", v0, expected, nil) - - v1 := Outer1{} - expected = "" - encodeExpected(t, "nil anonymous tagged struct pointer field", v1, expected, nil) - - v1 = Outer1{&Inner{3}} - expected = "[inner]\n N = 3\n" - encodeExpected(t, "non-nil anonymous tagged struct pointer field", v1, expected, nil) -} - -func TestEncodeIgnoredFields(t *testing.T) { - type simple struct { - Number int `toml:"-"` - } - value := simple{} - expected := "" - encodeExpected(t, "ignored field", value, expected, nil) -} - -func encodeExpected( - t *testing.T, label string, val interface{}, wantStr string, wantErr error, -) { - var buf bytes.Buffer - enc := NewEncoder(&buf) - err := enc.Encode(val) - if err != wantErr { - if wantErr != nil { - if wantErr == errAnything && err != nil { - return - } - t.Errorf("%s: want Encode error %v, got %v", label, wantErr, err) - } else { - t.Errorf("%s: Encode failed: %s", label, err) - } - } - if err != nil { - return - } - if got := buf.String(); wantStr != got { - t.Errorf("%s: want\n-----\n%q\n-----\nbut got\n-----\n%q\n-----\n", - label, wantStr, got) - } -} - -func ExampleEncoder_Encode() { - date, _ := time.Parse(time.RFC822, "14 Mar 10 18:00 UTC") - var config = map[string]interface{}{ - "date": date, - "counts": []int{1, 1, 2, 3, 5, 8}, - "hash": map[string]string{ - "key1": "val1", - "key2": "val2", - }, - } - buf := new(bytes.Buffer) - if err := NewEncoder(buf).Encode(config); err != nil { - log.Fatal(err) - } - fmt.Println(buf.String()) - - // Output: - // counts = [1, 1, 2, 3, 5, 8] - // date = 2010-03-14T18:00:00Z - // - // [hash] - // key1 = "val1" - // key2 = "val2" -} diff --git a/vendor/github.com/BurntSushi/toml/encoding_types.go b/vendor/github.com/BurntSushi/toml/encoding_types.go deleted file mode 100644 index d36e1dd6..00000000 --- a/vendor/github.com/BurntSushi/toml/encoding_types.go +++ /dev/null @@ -1,19 +0,0 @@ -// +build go1.2 - -package toml - -// In order to support Go 1.1, we define our own TextMarshaler and -// TextUnmarshaler types. For Go 1.2+, we just alias them with the -// standard library interfaces. - -import ( - "encoding" -) - -// TextMarshaler is a synonym for encoding.TextMarshaler. It is defined here -// so that Go 1.1 can be supported. -type TextMarshaler encoding.TextMarshaler - -// TextUnmarshaler is a synonym for encoding.TextUnmarshaler. It is defined -// here so that Go 1.1 can be supported. -type TextUnmarshaler encoding.TextUnmarshaler diff --git a/vendor/github.com/BurntSushi/toml/encoding_types_1.1.go b/vendor/github.com/BurntSushi/toml/encoding_types_1.1.go deleted file mode 100644 index e8d503d0..00000000 --- a/vendor/github.com/BurntSushi/toml/encoding_types_1.1.go +++ /dev/null @@ -1,18 +0,0 @@ -// +build !go1.2 - -package toml - -// These interfaces were introduced in Go 1.2, so we add them manually when -// compiling for Go 1.1. - -// TextMarshaler is a synonym for encoding.TextMarshaler. It is defined here -// so that Go 1.1 can be supported. -type TextMarshaler interface { - MarshalText() (text []byte, err error) -} - -// TextUnmarshaler is a synonym for encoding.TextUnmarshaler. It is defined -// here so that Go 1.1 can be supported. -type TextUnmarshaler interface { - UnmarshalText(text []byte) error -} diff --git a/vendor/github.com/BurntSushi/toml/lex.go b/vendor/github.com/BurntSushi/toml/lex.go deleted file mode 100644 index 6dee7fc7..00000000 --- a/vendor/github.com/BurntSushi/toml/lex.go +++ /dev/null @@ -1,953 +0,0 @@ -package toml - -import ( - "fmt" - "strings" - "unicode" - "unicode/utf8" -) - -type itemType int - -const ( - itemError itemType = iota - itemNIL // used in the parser to indicate no type - itemEOF - itemText - itemString - itemRawString - itemMultilineString - itemRawMultilineString - itemBool - itemInteger - itemFloat - itemDatetime - itemArray // the start of an array - itemArrayEnd - itemTableStart - itemTableEnd - itemArrayTableStart - itemArrayTableEnd - itemKeyStart - itemCommentStart - itemInlineTableStart - itemInlineTableEnd -) - -const ( - eof = 0 - comma = ',' - tableStart = '[' - tableEnd = ']' - arrayTableStart = '[' - arrayTableEnd = ']' - tableSep = '.' - keySep = '=' - arrayStart = '[' - arrayEnd = ']' - commentStart = '#' - stringStart = '"' - stringEnd = '"' - rawStringStart = '\'' - rawStringEnd = '\'' - inlineTableStart = '{' - inlineTableEnd = '}' -) - -type stateFn func(lx *lexer) stateFn - -type lexer struct { - input string - start int - pos int - line int - state stateFn - items chan item - - // Allow for backing up up to three runes. - // This is necessary because TOML contains 3-rune tokens (""" and '''). - prevWidths [3]int - nprev int // how many of prevWidths are in use - // If we emit an eof, we can still back up, but it is not OK to call - // next again. - atEOF bool - - // A stack of state functions used to maintain context. - // The idea is to reuse parts of the state machine in various places. - // For example, values can appear at the top level or within arbitrarily - // nested arrays. The last state on the stack is used after a value has - // been lexed. Similarly for comments. - stack []stateFn -} - -type item struct { - typ itemType - val string - line int -} - -func (lx *lexer) nextItem() item { - for { - select { - case item := <-lx.items: - return item - default: - lx.state = lx.state(lx) - } - } -} - -func lex(input string) *lexer { - lx := &lexer{ - input: input, - state: lexTop, - line: 1, - items: make(chan item, 10), - stack: make([]stateFn, 0, 10), - } - return lx -} - -func (lx *lexer) push(state stateFn) { - lx.stack = append(lx.stack, state) -} - -func (lx *lexer) pop() stateFn { - if len(lx.stack) == 0 { - return lx.errorf("BUG in lexer: no states to pop") - } - last := lx.stack[len(lx.stack)-1] - lx.stack = lx.stack[0 : len(lx.stack)-1] - return last -} - -func (lx *lexer) current() string { - return lx.input[lx.start:lx.pos] -} - -func (lx *lexer) emit(typ itemType) { - lx.items <- item{typ, lx.current(), lx.line} - lx.start = lx.pos -} - -func (lx *lexer) emitTrim(typ itemType) { - lx.items <- item{typ, strings.TrimSpace(lx.current()), lx.line} - lx.start = lx.pos -} - -func (lx *lexer) next() (r rune) { - if lx.atEOF { - panic("next called after EOF") - } - if lx.pos >= len(lx.input) { - lx.atEOF = true - return eof - } - - if lx.input[lx.pos] == '\n' { - lx.line++ - } - lx.prevWidths[2] = lx.prevWidths[1] - lx.prevWidths[1] = lx.prevWidths[0] - if lx.nprev < 3 { - lx.nprev++ - } - r, w := utf8.DecodeRuneInString(lx.input[lx.pos:]) - lx.prevWidths[0] = w - lx.pos += w - return r -} - -// ignore skips over the pending input before this point. -func (lx *lexer) ignore() { - lx.start = lx.pos -} - -// backup steps back one rune. Can be called only twice between calls to next. -func (lx *lexer) backup() { - if lx.atEOF { - lx.atEOF = false - return - } - if lx.nprev < 1 { - panic("backed up too far") - } - w := lx.prevWidths[0] - lx.prevWidths[0] = lx.prevWidths[1] - lx.prevWidths[1] = lx.prevWidths[2] - lx.nprev-- - lx.pos -= w - if lx.pos < len(lx.input) && lx.input[lx.pos] == '\n' { - lx.line-- - } -} - -// accept consumes the next rune if it's equal to `valid`. -func (lx *lexer) accept(valid rune) bool { - if lx.next() == valid { - return true - } - lx.backup() - return false -} - -// peek returns but does not consume the next rune in the input. -func (lx *lexer) peek() rune { - r := lx.next() - lx.backup() - return r -} - -// skip ignores all input that matches the given predicate. -func (lx *lexer) skip(pred func(rune) bool) { - for { - r := lx.next() - if pred(r) { - continue - } - lx.backup() - lx.ignore() - return - } -} - -// errorf stops all lexing by emitting an error and returning `nil`. -// Note that any value that is a character is escaped if it's a special -// character (newlines, tabs, etc.). -func (lx *lexer) errorf(format string, values ...interface{}) stateFn { - lx.items <- item{ - itemError, - fmt.Sprintf(format, values...), - lx.line, - } - return nil -} - -// lexTop consumes elements at the top level of TOML data. -func lexTop(lx *lexer) stateFn { - r := lx.next() - if isWhitespace(r) || isNL(r) { - return lexSkip(lx, lexTop) - } - switch r { - case commentStart: - lx.push(lexTop) - return lexCommentStart - case tableStart: - return lexTableStart - case eof: - if lx.pos > lx.start { - return lx.errorf("unexpected EOF") - } - lx.emit(itemEOF) - return nil - } - - // At this point, the only valid item can be a key, so we back up - // and let the key lexer do the rest. - lx.backup() - lx.push(lexTopEnd) - return lexKeyStart -} - -// lexTopEnd is entered whenever a top-level item has been consumed. (A value -// or a table.) It must see only whitespace, and will turn back to lexTop -// upon a newline. If it sees EOF, it will quit the lexer successfully. -func lexTopEnd(lx *lexer) stateFn { - r := lx.next() - switch { - case r == commentStart: - // a comment will read to a newline for us. - lx.push(lexTop) - return lexCommentStart - case isWhitespace(r): - return lexTopEnd - case isNL(r): - lx.ignore() - return lexTop - case r == eof: - lx.emit(itemEOF) - return nil - } - return lx.errorf("expected a top-level item to end with a newline, "+ - "comment, or EOF, but got %q instead", r) -} - -// lexTable lexes the beginning of a table. Namely, it makes sure that -// it starts with a character other than '.' and ']'. -// It assumes that '[' has already been consumed. -// It also handles the case that this is an item in an array of tables. -// e.g., '[[name]]'. -func lexTableStart(lx *lexer) stateFn { - if lx.peek() == arrayTableStart { - lx.next() - lx.emit(itemArrayTableStart) - lx.push(lexArrayTableEnd) - } else { - lx.emit(itemTableStart) - lx.push(lexTableEnd) - } - return lexTableNameStart -} - -func lexTableEnd(lx *lexer) stateFn { - lx.emit(itemTableEnd) - return lexTopEnd -} - -func lexArrayTableEnd(lx *lexer) stateFn { - if r := lx.next(); r != arrayTableEnd { - return lx.errorf("expected end of table array name delimiter %q, "+ - "but got %q instead", arrayTableEnd, r) - } - lx.emit(itemArrayTableEnd) - return lexTopEnd -} - -func lexTableNameStart(lx *lexer) stateFn { - lx.skip(isWhitespace) - switch r := lx.peek(); { - case r == tableEnd || r == eof: - return lx.errorf("unexpected end of table name " + - "(table names cannot be empty)") - case r == tableSep: - return lx.errorf("unexpected table separator " + - "(table names cannot be empty)") - case r == stringStart || r == rawStringStart: - lx.ignore() - lx.push(lexTableNameEnd) - return lexValue // reuse string lexing - default: - return lexBareTableName - } -} - -// lexBareTableName lexes the name of a table. It assumes that at least one -// valid character for the table has already been read. -func lexBareTableName(lx *lexer) stateFn { - r := lx.next() - if isBareKeyChar(r) { - return lexBareTableName - } - lx.backup() - lx.emit(itemText) - return lexTableNameEnd -} - -// lexTableNameEnd reads the end of a piece of a table name, optionally -// consuming whitespace. -func lexTableNameEnd(lx *lexer) stateFn { - lx.skip(isWhitespace) - switch r := lx.next(); { - case isWhitespace(r): - return lexTableNameEnd - case r == tableSep: - lx.ignore() - return lexTableNameStart - case r == tableEnd: - return lx.pop() - default: - return lx.errorf("expected '.' or ']' to end table name, "+ - "but got %q instead", r) - } -} - -// lexKeyStart consumes a key name up until the first non-whitespace character. -// lexKeyStart will ignore whitespace. -func lexKeyStart(lx *lexer) stateFn { - r := lx.peek() - switch { - case r == keySep: - return lx.errorf("unexpected key separator %q", keySep) - case isWhitespace(r) || isNL(r): - lx.next() - return lexSkip(lx, lexKeyStart) - case r == stringStart || r == rawStringStart: - lx.ignore() - lx.emit(itemKeyStart) - lx.push(lexKeyEnd) - return lexValue // reuse string lexing - default: - lx.ignore() - lx.emit(itemKeyStart) - return lexBareKey - } -} - -// lexBareKey consumes the text of a bare key. Assumes that the first character -// (which is not whitespace) has not yet been consumed. -func lexBareKey(lx *lexer) stateFn { - switch r := lx.next(); { - case isBareKeyChar(r): - return lexBareKey - case isWhitespace(r): - lx.backup() - lx.emit(itemText) - return lexKeyEnd - case r == keySep: - lx.backup() - lx.emit(itemText) - return lexKeyEnd - default: - return lx.errorf("bare keys cannot contain %q", r) - } -} - -// lexKeyEnd consumes the end of a key and trims whitespace (up to the key -// separator). -func lexKeyEnd(lx *lexer) stateFn { - switch r := lx.next(); { - case r == keySep: - return lexSkip(lx, lexValue) - case isWhitespace(r): - return lexSkip(lx, lexKeyEnd) - default: - return lx.errorf("expected key separator %q, but got %q instead", - keySep, r) - } -} - -// lexValue starts the consumption of a value anywhere a value is expected. -// lexValue will ignore whitespace. -// After a value is lexed, the last state on the next is popped and returned. -func lexValue(lx *lexer) stateFn { - // We allow whitespace to precede a value, but NOT newlines. - // In array syntax, the array states are responsible for ignoring newlines. - r := lx.next() - switch { - case isWhitespace(r): - return lexSkip(lx, lexValue) - case isDigit(r): - lx.backup() // avoid an extra state and use the same as above - return lexNumberOrDateStart - } - switch r { - case arrayStart: - lx.ignore() - lx.emit(itemArray) - return lexArrayValue - case inlineTableStart: - lx.ignore() - lx.emit(itemInlineTableStart) - return lexInlineTableValue - case stringStart: - if lx.accept(stringStart) { - if lx.accept(stringStart) { - lx.ignore() // Ignore """ - return lexMultilineString - } - lx.backup() - } - lx.ignore() // ignore the '"' - return lexString - case rawStringStart: - if lx.accept(rawStringStart) { - if lx.accept(rawStringStart) { - lx.ignore() // Ignore """ - return lexMultilineRawString - } - lx.backup() - } - lx.ignore() // ignore the "'" - return lexRawString - case '+', '-': - return lexNumberStart - case '.': // special error case, be kind to users - return lx.errorf("floats must start with a digit, not '.'") - } - if unicode.IsLetter(r) { - // Be permissive here; lexBool will give a nice error if the - // user wrote something like - // x = foo - // (i.e. not 'true' or 'false' but is something else word-like.) - lx.backup() - return lexBool - } - return lx.errorf("expected value but found %q instead", r) -} - -// lexArrayValue consumes one value in an array. It assumes that '[' or ',' -// have already been consumed. All whitespace and newlines are ignored. -func lexArrayValue(lx *lexer) stateFn { - r := lx.next() - switch { - case isWhitespace(r) || isNL(r): - return lexSkip(lx, lexArrayValue) - case r == commentStart: - lx.push(lexArrayValue) - return lexCommentStart - case r == comma: - return lx.errorf("unexpected comma") - case r == arrayEnd: - // NOTE(caleb): The spec isn't clear about whether you can have - // a trailing comma or not, so we'll allow it. - return lexArrayEnd - } - - lx.backup() - lx.push(lexArrayValueEnd) - return lexValue -} - -// lexArrayValueEnd consumes everything between the end of an array value and -// the next value (or the end of the array): it ignores whitespace and newlines -// and expects either a ',' or a ']'. -func lexArrayValueEnd(lx *lexer) stateFn { - r := lx.next() - switch { - case isWhitespace(r) || isNL(r): - return lexSkip(lx, lexArrayValueEnd) - case r == commentStart: - lx.push(lexArrayValueEnd) - return lexCommentStart - case r == comma: - lx.ignore() - return lexArrayValue // move on to the next value - case r == arrayEnd: - return lexArrayEnd - } - return lx.errorf( - "expected a comma or array terminator %q, but got %q instead", - arrayEnd, r, - ) -} - -// lexArrayEnd finishes the lexing of an array. -// It assumes that a ']' has just been consumed. -func lexArrayEnd(lx *lexer) stateFn { - lx.ignore() - lx.emit(itemArrayEnd) - return lx.pop() -} - -// lexInlineTableValue consumes one key/value pair in an inline table. -// It assumes that '{' or ',' have already been consumed. Whitespace is ignored. -func lexInlineTableValue(lx *lexer) stateFn { - r := lx.next() - switch { - case isWhitespace(r): - return lexSkip(lx, lexInlineTableValue) - case isNL(r): - return lx.errorf("newlines not allowed within inline tables") - case r == commentStart: - lx.push(lexInlineTableValue) - return lexCommentStart - case r == comma: - return lx.errorf("unexpected comma") - case r == inlineTableEnd: - return lexInlineTableEnd - } - lx.backup() - lx.push(lexInlineTableValueEnd) - return lexKeyStart -} - -// lexInlineTableValueEnd consumes everything between the end of an inline table -// key/value pair and the next pair (or the end of the table): -// it ignores whitespace and expects either a ',' or a '}'. -func lexInlineTableValueEnd(lx *lexer) stateFn { - r := lx.next() - switch { - case isWhitespace(r): - return lexSkip(lx, lexInlineTableValueEnd) - case isNL(r): - return lx.errorf("newlines not allowed within inline tables") - case r == commentStart: - lx.push(lexInlineTableValueEnd) - return lexCommentStart - case r == comma: - lx.ignore() - return lexInlineTableValue - case r == inlineTableEnd: - return lexInlineTableEnd - } - return lx.errorf("expected a comma or an inline table terminator %q, "+ - "but got %q instead", inlineTableEnd, r) -} - -// lexInlineTableEnd finishes the lexing of an inline table. -// It assumes that a '}' has just been consumed. -func lexInlineTableEnd(lx *lexer) stateFn { - lx.ignore() - lx.emit(itemInlineTableEnd) - return lx.pop() -} - -// lexString consumes the inner contents of a string. It assumes that the -// beginning '"' has already been consumed and ignored. -func lexString(lx *lexer) stateFn { - r := lx.next() - switch { - case r == eof: - return lx.errorf("unexpected EOF") - case isNL(r): - return lx.errorf("strings cannot contain newlines") - case r == '\\': - lx.push(lexString) - return lexStringEscape - case r == stringEnd: - lx.backup() - lx.emit(itemString) - lx.next() - lx.ignore() - return lx.pop() - } - return lexString -} - -// lexMultilineString consumes the inner contents of a string. It assumes that -// the beginning '"""' has already been consumed and ignored. -func lexMultilineString(lx *lexer) stateFn { - switch lx.next() { - case eof: - return lx.errorf("unexpected EOF") - case '\\': - return lexMultilineStringEscape - case stringEnd: - if lx.accept(stringEnd) { - if lx.accept(stringEnd) { - lx.backup() - lx.backup() - lx.backup() - lx.emit(itemMultilineString) - lx.next() - lx.next() - lx.next() - lx.ignore() - return lx.pop() - } - lx.backup() - } - } - return lexMultilineString -} - -// lexRawString consumes a raw string. Nothing can be escaped in such a string. -// It assumes that the beginning "'" has already been consumed and ignored. -func lexRawString(lx *lexer) stateFn { - r := lx.next() - switch { - case r == eof: - return lx.errorf("unexpected EOF") - case isNL(r): - return lx.errorf("strings cannot contain newlines") - case r == rawStringEnd: - lx.backup() - lx.emit(itemRawString) - lx.next() - lx.ignore() - return lx.pop() - } - return lexRawString -} - -// lexMultilineRawString consumes a raw string. Nothing can be escaped in such -// a string. It assumes that the beginning "'''" has already been consumed and -// ignored. -func lexMultilineRawString(lx *lexer) stateFn { - switch lx.next() { - case eof: - return lx.errorf("unexpected EOF") - case rawStringEnd: - if lx.accept(rawStringEnd) { - if lx.accept(rawStringEnd) { - lx.backup() - lx.backup() - lx.backup() - lx.emit(itemRawMultilineString) - lx.next() - lx.next() - lx.next() - lx.ignore() - return lx.pop() - } - lx.backup() - } - } - return lexMultilineRawString -} - -// lexMultilineStringEscape consumes an escaped character. It assumes that the -// preceding '\\' has already been consumed. -func lexMultilineStringEscape(lx *lexer) stateFn { - // Handle the special case first: - if isNL(lx.next()) { - return lexMultilineString - } - lx.backup() - lx.push(lexMultilineString) - return lexStringEscape(lx) -} - -func lexStringEscape(lx *lexer) stateFn { - r := lx.next() - switch r { - case 'b': - fallthrough - case 't': - fallthrough - case 'n': - fallthrough - case 'f': - fallthrough - case 'r': - fallthrough - case '"': - fallthrough - case '\\': - return lx.pop() - case 'u': - return lexShortUnicodeEscape - case 'U': - return lexLongUnicodeEscape - } - return lx.errorf("invalid escape character %q; only the following "+ - "escape characters are allowed: "+ - `\b, \t, \n, \f, \r, \", \\, \uXXXX, and \UXXXXXXXX`, r) -} - -func lexShortUnicodeEscape(lx *lexer) stateFn { - var r rune - for i := 0; i < 4; i++ { - r = lx.next() - if !isHexadecimal(r) { - return lx.errorf(`expected four hexadecimal digits after '\u', `+ - "but got %q instead", lx.current()) - } - } - return lx.pop() -} - -func lexLongUnicodeEscape(lx *lexer) stateFn { - var r rune - for i := 0; i < 8; i++ { - r = lx.next() - if !isHexadecimal(r) { - return lx.errorf(`expected eight hexadecimal digits after '\U', `+ - "but got %q instead", lx.current()) - } - } - return lx.pop() -} - -// lexNumberOrDateStart consumes either an integer, a float, or datetime. -func lexNumberOrDateStart(lx *lexer) stateFn { - r := lx.next() - if isDigit(r) { - return lexNumberOrDate - } - switch r { - case '_': - return lexNumber - case 'e', 'E': - return lexFloat - case '.': - return lx.errorf("floats must start with a digit, not '.'") - } - return lx.errorf("expected a digit but got %q", r) -} - -// lexNumberOrDate consumes either an integer, float or datetime. -func lexNumberOrDate(lx *lexer) stateFn { - r := lx.next() - if isDigit(r) { - return lexNumberOrDate - } - switch r { - case '-': - return lexDatetime - case '_': - return lexNumber - case '.', 'e', 'E': - return lexFloat - } - - lx.backup() - lx.emit(itemInteger) - return lx.pop() -} - -// lexDatetime consumes a Datetime, to a first approximation. -// The parser validates that it matches one of the accepted formats. -func lexDatetime(lx *lexer) stateFn { - r := lx.next() - if isDigit(r) { - return lexDatetime - } - switch r { - case '-', 'T', ':', '.', 'Z': - return lexDatetime - } - - lx.backup() - lx.emit(itemDatetime) - return lx.pop() -} - -// lexNumberStart consumes either an integer or a float. It assumes that a sign -// has already been read, but that *no* digits have been consumed. -// lexNumberStart will move to the appropriate integer or float states. -func lexNumberStart(lx *lexer) stateFn { - // We MUST see a digit. Even floats have to start with a digit. - r := lx.next() - if !isDigit(r) { - if r == '.' { - return lx.errorf("floats must start with a digit, not '.'") - } - return lx.errorf("expected a digit but got %q", r) - } - return lexNumber -} - -// lexNumber consumes an integer or a float after seeing the first digit. -func lexNumber(lx *lexer) stateFn { - r := lx.next() - if isDigit(r) { - return lexNumber - } - switch r { - case '_': - return lexNumber - case '.', 'e', 'E': - return lexFloat - } - - lx.backup() - lx.emit(itemInteger) - return lx.pop() -} - -// lexFloat consumes the elements of a float. It allows any sequence of -// float-like characters, so floats emitted by the lexer are only a first -// approximation and must be validated by the parser. -func lexFloat(lx *lexer) stateFn { - r := lx.next() - if isDigit(r) { - return lexFloat - } - switch r { - case '_', '.', '-', '+', 'e', 'E': - return lexFloat - } - - lx.backup() - lx.emit(itemFloat) - return lx.pop() -} - -// lexBool consumes a bool string: 'true' or 'false. -func lexBool(lx *lexer) stateFn { - var rs []rune - for { - r := lx.next() - if !unicode.IsLetter(r) { - lx.backup() - break - } - rs = append(rs, r) - } - s := string(rs) - switch s { - case "true", "false": - lx.emit(itemBool) - return lx.pop() - } - return lx.errorf("expected value but found %q instead", s) -} - -// lexCommentStart begins the lexing of a comment. It will emit -// itemCommentStart and consume no characters, passing control to lexComment. -func lexCommentStart(lx *lexer) stateFn { - lx.ignore() - lx.emit(itemCommentStart) - return lexComment -} - -// lexComment lexes an entire comment. It assumes that '#' has been consumed. -// It will consume *up to* the first newline character, and pass control -// back to the last state on the stack. -func lexComment(lx *lexer) stateFn { - r := lx.peek() - if isNL(r) || r == eof { - lx.emit(itemText) - return lx.pop() - } - lx.next() - return lexComment -} - -// lexSkip ignores all slurped input and moves on to the next state. -func lexSkip(lx *lexer, nextState stateFn) stateFn { - return func(lx *lexer) stateFn { - lx.ignore() - return nextState - } -} - -// isWhitespace returns true if `r` is a whitespace character according -// to the spec. -func isWhitespace(r rune) bool { - return r == '\t' || r == ' ' -} - -func isNL(r rune) bool { - return r == '\n' || r == '\r' -} - -func isDigit(r rune) bool { - return r >= '0' && r <= '9' -} - -func isHexadecimal(r rune) bool { - return (r >= '0' && r <= '9') || - (r >= 'a' && r <= 'f') || - (r >= 'A' && r <= 'F') -} - -func isBareKeyChar(r rune) bool { - return (r >= 'A' && r <= 'Z') || - (r >= 'a' && r <= 'z') || - (r >= '0' && r <= '9') || - r == '_' || - r == '-' -} - -func (itype itemType) String() string { - switch itype { - case itemError: - return "Error" - case itemNIL: - return "NIL" - case itemEOF: - return "EOF" - case itemText: - return "Text" - case itemString, itemRawString, itemMultilineString, itemRawMultilineString: - return "String" - case itemBool: - return "Bool" - case itemInteger: - return "Integer" - case itemFloat: - return "Float" - case itemDatetime: - return "DateTime" - case itemTableStart: - return "TableStart" - case itemTableEnd: - return "TableEnd" - case itemKeyStart: - return "KeyStart" - case itemArray: - return "Array" - case itemArrayEnd: - return "ArrayEnd" - case itemCommentStart: - return "CommentStart" - } - panic(fmt.Sprintf("BUG: Unknown type '%d'.", int(itype))) -} - -func (item item) String() string { - return fmt.Sprintf("(%s, %s)", item.typ.String(), item.val) -} diff --git a/vendor/github.com/BurntSushi/toml/parse.go b/vendor/github.com/BurntSushi/toml/parse.go deleted file mode 100644 index 50869ef9..00000000 --- a/vendor/github.com/BurntSushi/toml/parse.go +++ /dev/null @@ -1,592 +0,0 @@ -package toml - -import ( - "fmt" - "strconv" - "strings" - "time" - "unicode" - "unicode/utf8" -) - -type parser struct { - mapping map[string]interface{} - types map[string]tomlType - lx *lexer - - // A list of keys in the order that they appear in the TOML data. - ordered []Key - - // the full key for the current hash in scope - context Key - - // the base key name for everything except hashes - currentKey string - - // rough approximation of line number - approxLine int - - // A map of 'key.group.names' to whether they were created implicitly. - implicits map[string]bool -} - -type parseError string - -func (pe parseError) Error() string { - return string(pe) -} - -func parse(data string) (p *parser, err error) { - defer func() { - if r := recover(); r != nil { - var ok bool - if err, ok = r.(parseError); ok { - return - } - panic(r) - } - }() - - p = &parser{ - mapping: make(map[string]interface{}), - types: make(map[string]tomlType), - lx: lex(data), - ordered: make([]Key, 0), - implicits: make(map[string]bool), - } - for { - item := p.next() - if item.typ == itemEOF { - break - } - p.topLevel(item) - } - - return p, nil -} - -func (p *parser) panicf(format string, v ...interface{}) { - msg := fmt.Sprintf("Near line %d (last key parsed '%s'): %s", - p.approxLine, p.current(), fmt.Sprintf(format, v...)) - panic(parseError(msg)) -} - -func (p *parser) next() item { - it := p.lx.nextItem() - if it.typ == itemError { - p.panicf("%s", it.val) - } - return it -} - -func (p *parser) bug(format string, v ...interface{}) { - panic(fmt.Sprintf("BUG: "+format+"\n\n", v...)) -} - -func (p *parser) expect(typ itemType) item { - it := p.next() - p.assertEqual(typ, it.typ) - return it -} - -func (p *parser) assertEqual(expected, got itemType) { - if expected != got { - p.bug("Expected '%s' but got '%s'.", expected, got) - } -} - -func (p *parser) topLevel(item item) { - switch item.typ { - case itemCommentStart: - p.approxLine = item.line - p.expect(itemText) - case itemTableStart: - kg := p.next() - p.approxLine = kg.line - - var key Key - for ; kg.typ != itemTableEnd && kg.typ != itemEOF; kg = p.next() { - key = append(key, p.keyString(kg)) - } - p.assertEqual(itemTableEnd, kg.typ) - - p.establishContext(key, false) - p.setType("", tomlHash) - p.ordered = append(p.ordered, key) - case itemArrayTableStart: - kg := p.next() - p.approxLine = kg.line - - var key Key - for ; kg.typ != itemArrayTableEnd && kg.typ != itemEOF; kg = p.next() { - key = append(key, p.keyString(kg)) - } - p.assertEqual(itemArrayTableEnd, kg.typ) - - p.establishContext(key, true) - p.setType("", tomlArrayHash) - p.ordered = append(p.ordered, key) - case itemKeyStart: - kname := p.next() - p.approxLine = kname.line - p.currentKey = p.keyString(kname) - - val, typ := p.value(p.next()) - p.setValue(p.currentKey, val) - p.setType(p.currentKey, typ) - p.ordered = append(p.ordered, p.context.add(p.currentKey)) - p.currentKey = "" - default: - p.bug("Unexpected type at top level: %s", item.typ) - } -} - -// Gets a string for a key (or part of a key in a table name). -func (p *parser) keyString(it item) string { - switch it.typ { - case itemText: - return it.val - case itemString, itemMultilineString, - itemRawString, itemRawMultilineString: - s, _ := p.value(it) - return s.(string) - default: - p.bug("Unexpected key type: %s", it.typ) - panic("unreachable") - } -} - -// value translates an expected value from the lexer into a Go value wrapped -// as an empty interface. -func (p *parser) value(it item) (interface{}, tomlType) { - switch it.typ { - case itemString: - return p.replaceEscapes(it.val), p.typeOfPrimitive(it) - case itemMultilineString: - trimmed := stripFirstNewline(stripEscapedWhitespace(it.val)) - return p.replaceEscapes(trimmed), p.typeOfPrimitive(it) - case itemRawString: - return it.val, p.typeOfPrimitive(it) - case itemRawMultilineString: - return stripFirstNewline(it.val), p.typeOfPrimitive(it) - case itemBool: - switch it.val { - case "true": - return true, p.typeOfPrimitive(it) - case "false": - return false, p.typeOfPrimitive(it) - } - p.bug("Expected boolean value, but got '%s'.", it.val) - case itemInteger: - if !numUnderscoresOK(it.val) { - p.panicf("Invalid integer %q: underscores must be surrounded by digits", - it.val) - } - val := strings.Replace(it.val, "_", "", -1) - num, err := strconv.ParseInt(val, 10, 64) - if err != nil { - // Distinguish integer values. Normally, it'd be a bug if the lexer - // provides an invalid integer, but it's possible that the number is - // out of range of valid values (which the lexer cannot determine). - // So mark the former as a bug but the latter as a legitimate user - // error. - if e, ok := err.(*strconv.NumError); ok && - e.Err == strconv.ErrRange { - - p.panicf("Integer '%s' is out of the range of 64-bit "+ - "signed integers.", it.val) - } else { - p.bug("Expected integer value, but got '%s'.", it.val) - } - } - return num, p.typeOfPrimitive(it) - case itemFloat: - parts := strings.FieldsFunc(it.val, func(r rune) bool { - switch r { - case '.', 'e', 'E': - return true - } - return false - }) - for _, part := range parts { - if !numUnderscoresOK(part) { - p.panicf("Invalid float %q: underscores must be "+ - "surrounded by digits", it.val) - } - } - if !numPeriodsOK(it.val) { - // As a special case, numbers like '123.' or '1.e2', - // which are valid as far as Go/strconv are concerned, - // must be rejected because TOML says that a fractional - // part consists of '.' followed by 1+ digits. - p.panicf("Invalid float %q: '.' must be followed "+ - "by one or more digits", it.val) - } - val := strings.Replace(it.val, "_", "", -1) - num, err := strconv.ParseFloat(val, 64) - if err != nil { - if e, ok := err.(*strconv.NumError); ok && - e.Err == strconv.ErrRange { - - p.panicf("Float '%s' is out of the range of 64-bit "+ - "IEEE-754 floating-point numbers.", it.val) - } else { - p.panicf("Invalid float value: %q", it.val) - } - } - return num, p.typeOfPrimitive(it) - case itemDatetime: - var t time.Time - var ok bool - var err error - for _, format := range []string{ - "2006-01-02T15:04:05Z07:00", - "2006-01-02T15:04:05", - "2006-01-02", - } { - t, err = time.ParseInLocation(format, it.val, time.Local) - if err == nil { - ok = true - break - } - } - if !ok { - p.panicf("Invalid TOML Datetime: %q.", it.val) - } - return t, p.typeOfPrimitive(it) - case itemArray: - array := make([]interface{}, 0) - types := make([]tomlType, 0) - - for it = p.next(); it.typ != itemArrayEnd; it = p.next() { - if it.typ == itemCommentStart { - p.expect(itemText) - continue - } - - val, typ := p.value(it) - array = append(array, val) - types = append(types, typ) - } - return array, p.typeOfArray(types) - case itemInlineTableStart: - var ( - hash = make(map[string]interface{}) - outerContext = p.context - outerKey = p.currentKey - ) - - p.context = append(p.context, p.currentKey) - p.currentKey = "" - for it := p.next(); it.typ != itemInlineTableEnd; it = p.next() { - if it.typ != itemKeyStart { - p.bug("Expected key start but instead found %q, around line %d", - it.val, p.approxLine) - } - if it.typ == itemCommentStart { - p.expect(itemText) - continue - } - - // retrieve key - k := p.next() - p.approxLine = k.line - kname := p.keyString(k) - - // retrieve value - p.currentKey = kname - val, typ := p.value(p.next()) - // make sure we keep metadata up to date - p.setType(kname, typ) - p.ordered = append(p.ordered, p.context.add(p.currentKey)) - hash[kname] = val - } - p.context = outerContext - p.currentKey = outerKey - return hash, tomlHash - } - p.bug("Unexpected value type: %s", it.typ) - panic("unreachable") -} - -// numUnderscoresOK checks whether each underscore in s is surrounded by -// characters that are not underscores. -func numUnderscoresOK(s string) bool { - accept := false - for _, r := range s { - if r == '_' { - if !accept { - return false - } - accept = false - continue - } - accept = true - } - return accept -} - -// numPeriodsOK checks whether every period in s is followed by a digit. -func numPeriodsOK(s string) bool { - period := false - for _, r := range s { - if period && !isDigit(r) { - return false - } - period = r == '.' - } - return !period -} - -// establishContext sets the current context of the parser, -// where the context is either a hash or an array of hashes. Which one is -// set depends on the value of the `array` parameter. -// -// Establishing the context also makes sure that the key isn't a duplicate, and -// will create implicit hashes automatically. -func (p *parser) establishContext(key Key, array bool) { - var ok bool - - // Always start at the top level and drill down for our context. - hashContext := p.mapping - keyContext := make(Key, 0) - - // We only need implicit hashes for key[0:-1] - for _, k := range key[0 : len(key)-1] { - _, ok = hashContext[k] - keyContext = append(keyContext, k) - - // No key? Make an implicit hash and move on. - if !ok { - p.addImplicit(keyContext) - hashContext[k] = make(map[string]interface{}) - } - - // If the hash context is actually an array of tables, then set - // the hash context to the last element in that array. - // - // Otherwise, it better be a table, since this MUST be a key group (by - // virtue of it not being the last element in a key). - switch t := hashContext[k].(type) { - case []map[string]interface{}: - hashContext = t[len(t)-1] - case map[string]interface{}: - hashContext = t - default: - p.panicf("Key '%s' was already created as a hash.", keyContext) - } - } - - p.context = keyContext - if array { - // If this is the first element for this array, then allocate a new - // list of tables for it. - k := key[len(key)-1] - if _, ok := hashContext[k]; !ok { - hashContext[k] = make([]map[string]interface{}, 0, 5) - } - - // Add a new table. But make sure the key hasn't already been used - // for something else. - if hash, ok := hashContext[k].([]map[string]interface{}); ok { - hashContext[k] = append(hash, make(map[string]interface{})) - } else { - p.panicf("Key '%s' was already created and cannot be used as "+ - "an array.", keyContext) - } - } else { - p.setValue(key[len(key)-1], make(map[string]interface{})) - } - p.context = append(p.context, key[len(key)-1]) -} - -// setValue sets the given key to the given value in the current context. -// It will make sure that the key hasn't already been defined, account for -// implicit key groups. -func (p *parser) setValue(key string, value interface{}) { - var tmpHash interface{} - var ok bool - - hash := p.mapping - keyContext := make(Key, 0) - for _, k := range p.context { - keyContext = append(keyContext, k) - if tmpHash, ok = hash[k]; !ok { - p.bug("Context for key '%s' has not been established.", keyContext) - } - switch t := tmpHash.(type) { - case []map[string]interface{}: - // The context is a table of hashes. Pick the most recent table - // defined as the current hash. - hash = t[len(t)-1] - case map[string]interface{}: - hash = t - default: - p.bug("Expected hash to have type 'map[string]interface{}', but "+ - "it has '%T' instead.", tmpHash) - } - } - keyContext = append(keyContext, key) - - if _, ok := hash[key]; ok { - // Typically, if the given key has already been set, then we have - // to raise an error since duplicate keys are disallowed. However, - // it's possible that a key was previously defined implicitly. In this - // case, it is allowed to be redefined concretely. (See the - // `tests/valid/implicit-and-explicit-after.toml` test in `toml-test`.) - // - // But we have to make sure to stop marking it as an implicit. (So that - // another redefinition provokes an error.) - // - // Note that since it has already been defined (as a hash), we don't - // want to overwrite it. So our business is done. - if p.isImplicit(keyContext) { - p.removeImplicit(keyContext) - return - } - - // Otherwise, we have a concrete key trying to override a previous - // key, which is *always* wrong. - p.panicf("Key '%s' has already been defined.", keyContext) - } - hash[key] = value -} - -// setType sets the type of a particular value at a given key. -// It should be called immediately AFTER setValue. -// -// Note that if `key` is empty, then the type given will be applied to the -// current context (which is either a table or an array of tables). -func (p *parser) setType(key string, typ tomlType) { - keyContext := make(Key, 0, len(p.context)+1) - for _, k := range p.context { - keyContext = append(keyContext, k) - } - if len(key) > 0 { // allow type setting for hashes - keyContext = append(keyContext, key) - } - p.types[keyContext.String()] = typ -} - -// addImplicit sets the given Key as having been created implicitly. -func (p *parser) addImplicit(key Key) { - p.implicits[key.String()] = true -} - -// removeImplicit stops tagging the given key as having been implicitly -// created. -func (p *parser) removeImplicit(key Key) { - p.implicits[key.String()] = false -} - -// isImplicit returns true if the key group pointed to by the key was created -// implicitly. -func (p *parser) isImplicit(key Key) bool { - return p.implicits[key.String()] -} - -// current returns the full key name of the current context. -func (p *parser) current() string { - if len(p.currentKey) == 0 { - return p.context.String() - } - if len(p.context) == 0 { - return p.currentKey - } - return fmt.Sprintf("%s.%s", p.context, p.currentKey) -} - -func stripFirstNewline(s string) string { - if len(s) == 0 || s[0] != '\n' { - return s - } - return s[1:] -} - -func stripEscapedWhitespace(s string) string { - esc := strings.Split(s, "\\\n") - if len(esc) > 1 { - for i := 1; i < len(esc); i++ { - esc[i] = strings.TrimLeftFunc(esc[i], unicode.IsSpace) - } - } - return strings.Join(esc, "") -} - -func (p *parser) replaceEscapes(str string) string { - var replaced []rune - s := []byte(str) - r := 0 - for r < len(s) { - if s[r] != '\\' { - c, size := utf8.DecodeRune(s[r:]) - r += size - replaced = append(replaced, c) - continue - } - r += 1 - if r >= len(s) { - p.bug("Escape sequence at end of string.") - return "" - } - switch s[r] { - default: - p.bug("Expected valid escape code after \\, but got %q.", s[r]) - return "" - case 'b': - replaced = append(replaced, rune(0x0008)) - r += 1 - case 't': - replaced = append(replaced, rune(0x0009)) - r += 1 - case 'n': - replaced = append(replaced, rune(0x000A)) - r += 1 - case 'f': - replaced = append(replaced, rune(0x000C)) - r += 1 - case 'r': - replaced = append(replaced, rune(0x000D)) - r += 1 - case '"': - replaced = append(replaced, rune(0x0022)) - r += 1 - case '\\': - replaced = append(replaced, rune(0x005C)) - r += 1 - case 'u': - // At this point, we know we have a Unicode escape of the form - // `uXXXX` at [r, r+5). (Because the lexer guarantees this - // for us.) - escaped := p.asciiEscapeToUnicode(s[r+1 : r+5]) - replaced = append(replaced, escaped) - r += 5 - case 'U': - // At this point, we know we have a Unicode escape of the form - // `uXXXX` at [r, r+9). (Because the lexer guarantees this - // for us.) - escaped := p.asciiEscapeToUnicode(s[r+1 : r+9]) - replaced = append(replaced, escaped) - r += 9 - } - } - return string(replaced) -} - -func (p *parser) asciiEscapeToUnicode(bs []byte) rune { - s := string(bs) - hex, err := strconv.ParseUint(strings.ToLower(s), 16, 32) - if err != nil { - p.bug("Could not parse '%s' as a hexadecimal number, but the "+ - "lexer claims it's OK: %s", s, err) - } - if !utf8.ValidRune(rune(hex)) { - p.panicf("Escaped character '\\u%s' is not valid UTF-8.", s) - } - return rune(hex) -} - -func isStringType(ty itemType) bool { - return ty == itemString || ty == itemMultilineString || - ty == itemRawString || ty == itemRawMultilineString -} diff --git a/vendor/github.com/BurntSushi/toml/session.vim b/vendor/github.com/BurntSushi/toml/session.vim deleted file mode 100644 index 562164be..00000000 --- a/vendor/github.com/BurntSushi/toml/session.vim +++ /dev/null @@ -1 +0,0 @@ -au BufWritePost *.go silent!make tags > /dev/null 2>&1 diff --git a/vendor/github.com/BurntSushi/toml/type_check.go b/vendor/github.com/BurntSushi/toml/type_check.go deleted file mode 100644 index c73f8afc..00000000 --- a/vendor/github.com/BurntSushi/toml/type_check.go +++ /dev/null @@ -1,91 +0,0 @@ -package toml - -// tomlType represents any Go type that corresponds to a TOML type. -// While the first draft of the TOML spec has a simplistic type system that -// probably doesn't need this level of sophistication, we seem to be militating -// toward adding real composite types. -type tomlType interface { - typeString() string -} - -// typeEqual accepts any two types and returns true if they are equal. -func typeEqual(t1, t2 tomlType) bool { - if t1 == nil || t2 == nil { - return false - } - return t1.typeString() == t2.typeString() -} - -func typeIsHash(t tomlType) bool { - return typeEqual(t, tomlHash) || typeEqual(t, tomlArrayHash) -} - -type tomlBaseType string - -func (btype tomlBaseType) typeString() string { - return string(btype) -} - -func (btype tomlBaseType) String() string { - return btype.typeString() -} - -var ( - tomlInteger tomlBaseType = "Integer" - tomlFloat tomlBaseType = "Float" - tomlDatetime tomlBaseType = "Datetime" - tomlString tomlBaseType = "String" - tomlBool tomlBaseType = "Bool" - tomlArray tomlBaseType = "Array" - tomlHash tomlBaseType = "Hash" - tomlArrayHash tomlBaseType = "ArrayHash" -) - -// typeOfPrimitive returns a tomlType of any primitive value in TOML. -// Primitive values are: Integer, Float, Datetime, String and Bool. -// -// Passing a lexer item other than the following will cause a BUG message -// to occur: itemString, itemBool, itemInteger, itemFloat, itemDatetime. -func (p *parser) typeOfPrimitive(lexItem item) tomlType { - switch lexItem.typ { - case itemInteger: - return tomlInteger - case itemFloat: - return tomlFloat - case itemDatetime: - return tomlDatetime - case itemString: - return tomlString - case itemMultilineString: - return tomlString - case itemRawString: - return tomlString - case itemRawMultilineString: - return tomlString - case itemBool: - return tomlBool - } - p.bug("Cannot infer primitive type of lex item '%s'.", lexItem) - panic("unreachable") -} - -// typeOfArray returns a tomlType for an array given a list of types of its -// values. -// -// In the current spec, if an array is homogeneous, then its type is always -// "Array". If the array is not homogeneous, an error is generated. -func (p *parser) typeOfArray(types []tomlType) tomlType { - // Empty arrays are cool. - if len(types) == 0 { - return tomlArray - } - - theType := types[0] - for _, t := range types[1:] { - if !typeEqual(theType, t) { - p.panicf("Array contains values of type '%s' and '%s', but "+ - "arrays must be homogeneous.", theType, t) - } - } - return tomlArray -} diff --git a/vendor/github.com/BurntSushi/toml/type_fields.go b/vendor/github.com/BurntSushi/toml/type_fields.go deleted file mode 100644 index 608997c2..00000000 --- a/vendor/github.com/BurntSushi/toml/type_fields.go +++ /dev/null @@ -1,242 +0,0 @@ -package toml - -// Struct field handling is adapted from code in encoding/json: -// -// Copyright 2010 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the Go distribution. - -import ( - "reflect" - "sort" - "sync" -) - -// A field represents a single field found in a struct. -type field struct { - name string // the name of the field (`toml` tag included) - tag bool // whether field has a `toml` tag - index []int // represents the depth of an anonymous field - typ reflect.Type // the type of the field -} - -// byName sorts field by name, breaking ties with depth, -// then breaking ties with "name came from toml tag", then -// breaking ties with index sequence. -type byName []field - -func (x byName) Len() int { return len(x) } - -func (x byName) Swap(i, j int) { x[i], x[j] = x[j], x[i] } - -func (x byName) Less(i, j int) bool { - if x[i].name != x[j].name { - return x[i].name < x[j].name - } - if len(x[i].index) != len(x[j].index) { - return len(x[i].index) < len(x[j].index) - } - if x[i].tag != x[j].tag { - return x[i].tag - } - return byIndex(x).Less(i, j) -} - -// byIndex sorts field by index sequence. -type byIndex []field - -func (x byIndex) Len() int { return len(x) } - -func (x byIndex) Swap(i, j int) { x[i], x[j] = x[j], x[i] } - -func (x byIndex) Less(i, j int) bool { - for k, xik := range x[i].index { - if k >= len(x[j].index) { - return false - } - if xik != x[j].index[k] { - return xik < x[j].index[k] - } - } - return len(x[i].index) < len(x[j].index) -} - -// typeFields returns a list of fields that TOML should recognize for the given -// type. The algorithm is breadth-first search over the set of structs to -// include - the top struct and then any reachable anonymous structs. -func typeFields(t reflect.Type) []field { - // Anonymous fields to explore at the current level and the next. - current := []field{} - next := []field{{typ: t}} - - // Count of queued names for current level and the next. - count := map[reflect.Type]int{} - nextCount := map[reflect.Type]int{} - - // Types already visited at an earlier level. - visited := map[reflect.Type]bool{} - - // Fields found. - var fields []field - - for len(next) > 0 { - current, next = next, current[:0] - count, nextCount = nextCount, map[reflect.Type]int{} - - for _, f := range current { - if visited[f.typ] { - continue - } - visited[f.typ] = true - - // Scan f.typ for fields to include. - for i := 0; i < f.typ.NumField(); i++ { - sf := f.typ.Field(i) - if sf.PkgPath != "" && !sf.Anonymous { // unexported - continue - } - opts := getOptions(sf.Tag) - if opts.skip { - continue - } - index := make([]int, len(f.index)+1) - copy(index, f.index) - index[len(f.index)] = i - - ft := sf.Type - if ft.Name() == "" && ft.Kind() == reflect.Ptr { - // Follow pointer. - ft = ft.Elem() - } - - // Record found field and index sequence. - if opts.name != "" || !sf.Anonymous || ft.Kind() != reflect.Struct { - tagged := opts.name != "" - name := opts.name - if name == "" { - name = sf.Name - } - fields = append(fields, field{name, tagged, index, ft}) - if count[f.typ] > 1 { - // If there were multiple instances, add a second, - // so that the annihilation code will see a duplicate. - // It only cares about the distinction between 1 or 2, - // so don't bother generating any more copies. - fields = append(fields, fields[len(fields)-1]) - } - continue - } - - // Record new anonymous struct to explore in next round. - nextCount[ft]++ - if nextCount[ft] == 1 { - f := field{name: ft.Name(), index: index, typ: ft} - next = append(next, f) - } - } - } - } - - sort.Sort(byName(fields)) - - // Delete all fields that are hidden by the Go rules for embedded fields, - // except that fields with TOML tags are promoted. - - // The fields are sorted in primary order of name, secondary order - // of field index length. Loop over names; for each name, delete - // hidden fields by choosing the one dominant field that survives. - out := fields[:0] - for advance, i := 0, 0; i < len(fields); i += advance { - // One iteration per name. - // Find the sequence of fields with the name of this first field. - fi := fields[i] - name := fi.name - for advance = 1; i+advance < len(fields); advance++ { - fj := fields[i+advance] - if fj.name != name { - break - } - } - if advance == 1 { // Only one field with this name - out = append(out, fi) - continue - } - dominant, ok := dominantField(fields[i : i+advance]) - if ok { - out = append(out, dominant) - } - } - - fields = out - sort.Sort(byIndex(fields)) - - return fields -} - -// dominantField looks through the fields, all of which are known to -// have the same name, to find the single field that dominates the -// others using Go's embedding rules, modified by the presence of -// TOML tags. If there are multiple top-level fields, the boolean -// will be false: This condition is an error in Go and we skip all -// the fields. -func dominantField(fields []field) (field, bool) { - // The fields are sorted in increasing index-length order. The winner - // must therefore be one with the shortest index length. Drop all - // longer entries, which is easy: just truncate the slice. - length := len(fields[0].index) - tagged := -1 // Index of first tagged field. - for i, f := range fields { - if len(f.index) > length { - fields = fields[:i] - break - } - if f.tag { - if tagged >= 0 { - // Multiple tagged fields at the same level: conflict. - // Return no field. - return field{}, false - } - tagged = i - } - } - if tagged >= 0 { - return fields[tagged], true - } - // All remaining fields have the same length. If there's more than one, - // we have a conflict (two fields named "X" at the same level) and we - // return no field. - if len(fields) > 1 { - return field{}, false - } - return fields[0], true -} - -var fieldCache struct { - sync.RWMutex - m map[reflect.Type][]field -} - -// cachedTypeFields is like typeFields but uses a cache to avoid repeated work. -func cachedTypeFields(t reflect.Type) []field { - fieldCache.RLock() - f := fieldCache.m[t] - fieldCache.RUnlock() - if f != nil { - return f - } - - // Compute fields without lock. - // Might duplicate effort but won't hold other computations back. - f = typeFields(t) - if f == nil { - f = []field{} - } - - fieldCache.Lock() - if fieldCache.m == nil { - fieldCache.m = map[reflect.Type][]field{} - } - fieldCache.m[t] = f - fieldCache.Unlock() - return f -} diff --git a/vendor/github.com/neelance/graphql-go/.gitignore b/vendor/github.com/neelance/graphql-go/.gitignore deleted file mode 100644 index 32b9e0f1..00000000 --- a/vendor/github.com/neelance/graphql-go/.gitignore +++ /dev/null @@ -1 +0,0 @@ -/internal/tests/testdata/graphql-js diff --git a/vendor/github.com/neelance/graphql-go/LICENSE b/vendor/github.com/neelance/graphql-go/LICENSE deleted file mode 100644 index 3907ceca..00000000 --- a/vendor/github.com/neelance/graphql-go/LICENSE +++ /dev/null @@ -1,24 +0,0 @@ -Copyright (c) 2016 Richard Musiol. All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - - * Redistributions of source code must retain the above copyright -notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above -copyright notice, this list of conditions and the following disclaimer -in the documentation and/or other materials provided with the -distribution. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/neelance/graphql-go/README.md b/vendor/github.com/neelance/graphql-go/README.md deleted file mode 100644 index eead9b08..00000000 --- a/vendor/github.com/neelance/graphql-go/README.md +++ /dev/null @@ -1,57 +0,0 @@ -# graphql-go - -[![Sourcegraph](https://sourcegraph.com/github.com/neelance/graphql-go/-/badge.svg)](https://sourcegraph.com/github.com/neelance/graphql-go?badge) -[![Build Status](https://semaphoreci.com/api/v1/neelance/graphql-go/branches/master/badge.svg)](https://semaphoreci.com/neelance/graphql-go) -[![GoDoc](https://godoc.org/github.com/neelance/graphql-go?status.svg)](https://godoc.org/github.com/neelance/graphql-go) - -## Status - -The project is under heavy development. It is stable enough so we use it in production at [Sourcegraph](https://sourcegraph.com), but expect changes. - -## Goals - -* [ ] full support of [GraphQL spec (October 2016)](https://facebook.github.io/graphql/) - * [ ] propagation of `null` on resolver errors - * [x] everything else -* [x] minimal API -* [x] support for context.Context and OpenTracing -* [x] early error detection at application startup by type-checking if the given resolver matches the schema -* [x] resolvers are purely based on method sets (e.g. it's up to you if you want to resolve a GraphQL interface with a Go interface or a Go struct) -* [ ] nice error messages (no internal panics, even with an invalid schema or resolver; please file a bug if you see an internal panic) - * [x] nice errors on resolver validation - * [ ] nice errors on all invalid schemas - * [ ] nice errors on all invalid queries -* [x] panic handling (a panic in a resolver should not take down the whole app) -* [x] parallel execution of resolvers - -## (Some) Documentation - -### Resolvers - -A resolver must have one method for each field of the GraphQL type it resolves. The method name has to be [exported](https://golang.org/ref/spec#Exported_identifiers) and match the field's name in a non-case-sensitive way. - -The method has up to two arguments: - -- Optional `context.Context` argument. -- Mandatory `*struct { ... }` argument if the corresponding GraphQL field has arguments. The names of the struct fields have to be [exported](https://golang.org/ref/spec#Exported_identifiers) and have to match the names of the GraphQL arguments in a non-case-sensitive way. - -The method has up to two results: - -- The GraphQL field's value as determined by the resolver. -- Optional `error` result. - -Example for a simple resolver method: - -```go -func (r *helloWorldResolver) Hello() string { - return "Hello world!" -} -``` - -The following signature is also allowed: - -```go -func (r *helloWorldResolver) Hello(ctx context.Context) (string, error) { - return "Hello world!", nil -} -``` diff --git a/vendor/github.com/neelance/graphql-go/errors/errors.go b/vendor/github.com/neelance/graphql-go/errors/errors.go deleted file mode 100644 index fdfa6202..00000000 --- a/vendor/github.com/neelance/graphql-go/errors/errors.go +++ /dev/null @@ -1,41 +0,0 @@ -package errors - -import ( - "fmt" -) - -type QueryError struct { - Message string `json:"message"` - Locations []Location `json:"locations,omitempty"` - Path []interface{} `json:"path,omitempty"` - Rule string `json:"-"` - ResolverError error `json:"-"` -} - -type Location struct { - Line int `json:"line"` - Column int `json:"column"` -} - -func (a Location) Before(b Location) bool { - return a.Line < b.Line || (a.Line == b.Line && a.Column < b.Column) -} - -func Errorf(format string, a ...interface{}) *QueryError { - return &QueryError{ - Message: fmt.Sprintf(format, a...), - } -} - -func (err *QueryError) Error() string { - if err == nil { - return "" - } - str := fmt.Sprintf("graphql: %s", err.Message) - for _, loc := range err.Locations { - str += fmt.Sprintf(" (line %d, column %d)", loc.Line, loc.Column) - } - return str -} - -var _ error = &QueryError{} diff --git a/vendor/github.com/neelance/graphql-go/example/starwars/server/server.go b/vendor/github.com/neelance/graphql-go/example/starwars/server/server.go deleted file mode 100644 index ce1a30fe..00000000 --- a/vendor/github.com/neelance/graphql-go/example/starwars/server/server.go +++ /dev/null @@ -1,64 +0,0 @@ -package main - -import ( - "log" - "net/http" - - "github.com/neelance/graphql-go" - "github.com/neelance/graphql-go/example/starwars" - "github.com/neelance/graphql-go/relay" -) - -var schema *graphql.Schema - -func init() { - schema = graphql.MustParseSchema(starwars.Schema, &starwars.Resolver{}) -} - -func main() { - http.Handle("/", http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - w.Write(page) - })) - - http.Handle("/query", &relay.Handler{Schema: schema}) - - log.Fatal(http.ListenAndServe(":8080", nil)) -} - -var page = []byte(` - - - - - - - - - - -
Loading...
- - - -`) diff --git a/vendor/github.com/neelance/graphql-go/example/starwars/starwars.go b/vendor/github.com/neelance/graphql-go/example/starwars/starwars.go deleted file mode 100644 index 0c559373..00000000 --- a/vendor/github.com/neelance/graphql-go/example/starwars/starwars.go +++ /dev/null @@ -1,647 +0,0 @@ -// Package starwars provides a example schema and resolver based on Star Wars characters. -// -// Source: https://github.com/graphql/graphql.github.io/blob/source/site/_core/swapiSchema.js -package starwars - -import ( - "encoding/base64" - "fmt" - "strconv" - "strings" - - graphql "github.com/neelance/graphql-go" -) - -var Schema = ` - schema { - query: Query - mutation: Mutation - } - # The query type, represents all of the entry points into our object graph - type Query { - hero(episode: Episode = NEWHOPE): Character - reviews(episode: Episode!): [Review]! - search(text: String!): [SearchResult]! - character(id: ID!): Character - droid(id: ID!): Droid - human(id: ID!): Human - starship(id: ID!): Starship - } - # The mutation type, represents all updates we can make to our data - type Mutation { - createReview(episode: Episode!, review: ReviewInput!): Review - } - # The episodes in the Star Wars trilogy - enum Episode { - # Star Wars Episode IV: A New Hope, released in 1977. - NEWHOPE - # Star Wars Episode V: The Empire Strikes Back, released in 1980. - EMPIRE - # Star Wars Episode VI: Return of the Jedi, released in 1983. - JEDI - } - # A character from the Star Wars universe - interface Character { - # The ID of the character - id: ID! - # The name of the character - name: String! - # The friends of the character, or an empty list if they have none - friends: [Character] - # The friends of the character exposed as a connection with edges - friendsConnection(first: Int, after: ID): FriendsConnection! - # The movies this character appears in - appearsIn: [Episode!]! - } - # Units of height - enum LengthUnit { - # The standard unit around the world - METER - # Primarily used in the United States - FOOT - } - # A humanoid creature from the Star Wars universe - type Human implements Character { - # The ID of the human - id: ID! - # What this human calls themselves - name: String! - # Height in the preferred unit, default is meters - height(unit: LengthUnit = METER): Float! - # Mass in kilograms, or null if unknown - mass: Float - # This human's friends, or an empty list if they have none - friends: [Character] - # The friends of the human exposed as a connection with edges - friendsConnection(first: Int, after: ID): FriendsConnection! - # The movies this human appears in - appearsIn: [Episode!]! - # A list of starships this person has piloted, or an empty list if none - starships: [Starship] - } - # An autonomous mechanical character in the Star Wars universe - type Droid implements Character { - # The ID of the droid - id: ID! - # What others call this droid - name: String! - # This droid's friends, or an empty list if they have none - friends: [Character] - # The friends of the droid exposed as a connection with edges - friendsConnection(first: Int, after: ID): FriendsConnection! - # The movies this droid appears in - appearsIn: [Episode!]! - # This droid's primary function - primaryFunction: String - } - # A connection object for a character's friends - type FriendsConnection { - # The total number of friends - totalCount: Int! - # The edges for each of the character's friends. - edges: [FriendsEdge] - # A list of the friends, as a convenience when edges are not needed. - friends: [Character] - # Information for paginating this connection - pageInfo: PageInfo! - } - # An edge object for a character's friends - type FriendsEdge { - # A cursor used for pagination - cursor: ID! - # The character represented by this friendship edge - node: Character - } - # Information for paginating this connection - type PageInfo { - startCursor: ID - endCursor: ID - hasNextPage: Boolean! - } - # Represents a review for a movie - type Review { - # The number of stars this review gave, 1-5 - stars: Int! - # Comment about the movie - commentary: String - } - # The input object sent when someone is creating a new review - input ReviewInput { - # 0-5 stars - stars: Int! - # Comment about the movie, optional - commentary: String - } - type Starship { - # The ID of the starship - id: ID! - # The name of the starship - name: String! - # Length of the starship, along the longest axis - length(unit: LengthUnit = METER): Float! - } - union SearchResult = Human | Droid | Starship -` - -type human struct { - ID graphql.ID - Name string - Friends []graphql.ID - AppearsIn []string - Height float64 - Mass int - Starships []graphql.ID -} - -var humans = []*human{ - { - ID: "1000", - Name: "Luke Skywalker", - Friends: []graphql.ID{"1002", "1003", "2000", "2001"}, - AppearsIn: []string{"NEWHOPE", "EMPIRE", "JEDI"}, - Height: 1.72, - Mass: 77, - Starships: []graphql.ID{"3001", "3003"}, - }, - { - ID: "1001", - Name: "Darth Vader", - Friends: []graphql.ID{"1004"}, - AppearsIn: []string{"NEWHOPE", "EMPIRE", "JEDI"}, - Height: 2.02, - Mass: 136, - Starships: []graphql.ID{"3002"}, - }, - { - ID: "1002", - Name: "Han Solo", - Friends: []graphql.ID{"1000", "1003", "2001"}, - AppearsIn: []string{"NEWHOPE", "EMPIRE", "JEDI"}, - Height: 1.8, - Mass: 80, - Starships: []graphql.ID{"3000", "3003"}, - }, - { - ID: "1003", - Name: "Leia Organa", - Friends: []graphql.ID{"1000", "1002", "2000", "2001"}, - AppearsIn: []string{"NEWHOPE", "EMPIRE", "JEDI"}, - Height: 1.5, - Mass: 49, - }, - { - ID: "1004", - Name: "Wilhuff Tarkin", - Friends: []graphql.ID{"1001"}, - AppearsIn: []string{"NEWHOPE"}, - Height: 1.8, - Mass: 0, - }, -} - -var humanData = make(map[graphql.ID]*human) - -func init() { - for _, h := range humans { - humanData[h.ID] = h - } -} - -type droid struct { - ID graphql.ID - Name string - Friends []graphql.ID - AppearsIn []string - PrimaryFunction string -} - -var droids = []*droid{ - { - ID: "2000", - Name: "C-3PO", - Friends: []graphql.ID{"1000", "1002", "1003", "2001"}, - AppearsIn: []string{"NEWHOPE", "EMPIRE", "JEDI"}, - PrimaryFunction: "Protocol", - }, - { - ID: "2001", - Name: "R2-D2", - Friends: []graphql.ID{"1000", "1002", "1003"}, - AppearsIn: []string{"NEWHOPE", "EMPIRE", "JEDI"}, - PrimaryFunction: "Astromech", - }, -} - -var droidData = make(map[graphql.ID]*droid) - -func init() { - for _, d := range droids { - droidData[d.ID] = d - } -} - -type starship struct { - ID graphql.ID - Name string - Length float64 -} - -var starships = []*starship{ - { - ID: "3000", - Name: "Millennium Falcon", - Length: 34.37, - }, - { - ID: "3001", - Name: "X-Wing", - Length: 12.5, - }, - { - ID: "3002", - Name: "TIE Advanced x1", - Length: 9.2, - }, - { - ID: "3003", - Name: "Imperial shuttle", - Length: 20, - }, -} - -var starshipData = make(map[graphql.ID]*starship) - -func init() { - for _, s := range starships { - starshipData[s.ID] = s - } -} - -type review struct { - stars int32 - commentary *string -} - -var reviews = make(map[string][]*review) - -type Resolver struct{} - -func (r *Resolver) Hero(args struct{ Episode string }) *characterResolver { - if args.Episode == "EMPIRE" { - return &characterResolver{&humanResolver{humanData["1000"]}} - } - return &characterResolver{&droidResolver{droidData["2001"]}} -} - -func (r *Resolver) Reviews(args struct{ Episode string }) []*reviewResolver { - var l []*reviewResolver - for _, review := range reviews[args.Episode] { - l = append(l, &reviewResolver{review}) - } - return l -} - -func (r *Resolver) Search(args struct{ Text string }) []*searchResultResolver { - var l []*searchResultResolver - for _, h := range humans { - if strings.Contains(h.Name, args.Text) { - l = append(l, &searchResultResolver{&humanResolver{h}}) - } - } - for _, d := range droids { - if strings.Contains(d.Name, args.Text) { - l = append(l, &searchResultResolver{&droidResolver{d}}) - } - } - for _, s := range starships { - if strings.Contains(s.Name, args.Text) { - l = append(l, &searchResultResolver{&starshipResolver{s}}) - } - } - return l -} - -func (r *Resolver) Character(args struct{ ID graphql.ID }) *characterResolver { - if h := humanData[args.ID]; h != nil { - return &characterResolver{&humanResolver{h}} - } - if d := droidData[args.ID]; d != nil { - return &characterResolver{&droidResolver{d}} - } - return nil -} - -func (r *Resolver) Human(args struct{ ID graphql.ID }) *humanResolver { - if h := humanData[args.ID]; h != nil { - return &humanResolver{h} - } - return nil -} - -func (r *Resolver) Droid(args struct{ ID graphql.ID }) *droidResolver { - if d := droidData[args.ID]; d != nil { - return &droidResolver{d} - } - return nil -} - -func (r *Resolver) Starship(args struct{ ID graphql.ID }) *starshipResolver { - if s := starshipData[args.ID]; s != nil { - return &starshipResolver{s} - } - return nil -} - -func (r *Resolver) CreateReview(args *struct { - Episode string - Review *reviewInput -}) *reviewResolver { - review := &review{ - stars: args.Review.Stars, - commentary: args.Review.Commentary, - } - reviews[args.Episode] = append(reviews[args.Episode], review) - return &reviewResolver{review} -} - -type friendsConnectionArgs struct { - First *int32 - After *graphql.ID -} - -type character interface { - ID() graphql.ID - Name() string - Friends() *[]*characterResolver - FriendsConnection(friendsConnectionArgs) (*friendsConnectionResolver, error) - AppearsIn() []string -} - -type characterResolver struct { - character -} - -func (r *characterResolver) ToHuman() (*humanResolver, bool) { - c, ok := r.character.(*humanResolver) - return c, ok -} - -func (r *characterResolver) ToDroid() (*droidResolver, bool) { - c, ok := r.character.(*droidResolver) - return c, ok -} - -type humanResolver struct { - h *human -} - -func (r *humanResolver) ID() graphql.ID { - return r.h.ID -} - -func (r *humanResolver) Name() string { - return r.h.Name -} - -func (r *humanResolver) Height(args struct{ Unit string }) float64 { - return convertLength(r.h.Height, args.Unit) -} - -func (r *humanResolver) Mass() *float64 { - if r.h.Mass == 0 { - return nil - } - f := float64(r.h.Mass) - return &f -} - -func (r *humanResolver) Friends() *[]*characterResolver { - return resolveCharacters(r.h.Friends) -} - -func (r *humanResolver) FriendsConnection(args friendsConnectionArgs) (*friendsConnectionResolver, error) { - return newFriendsConnectionResolver(r.h.Friends, args) -} - -func (r *humanResolver) AppearsIn() []string { - return r.h.AppearsIn -} - -func (r *humanResolver) Starships() *[]*starshipResolver { - l := make([]*starshipResolver, len(r.h.Starships)) - for i, id := range r.h.Starships { - l[i] = &starshipResolver{starshipData[id]} - } - return &l -} - -type droidResolver struct { - d *droid -} - -func (r *droidResolver) ID() graphql.ID { - return r.d.ID -} - -func (r *droidResolver) Name() string { - return r.d.Name -} - -func (r *droidResolver) Friends() *[]*characterResolver { - return resolveCharacters(r.d.Friends) -} - -func (r *droidResolver) FriendsConnection(args friendsConnectionArgs) (*friendsConnectionResolver, error) { - return newFriendsConnectionResolver(r.d.Friends, args) -} - -func (r *droidResolver) AppearsIn() []string { - return r.d.AppearsIn -} - -func (r *droidResolver) PrimaryFunction() *string { - if r.d.PrimaryFunction == "" { - return nil - } - return &r.d.PrimaryFunction -} - -type starshipResolver struct { - s *starship -} - -func (r *starshipResolver) ID() graphql.ID { - return r.s.ID -} - -func (r *starshipResolver) Name() string { - return r.s.Name -} - -func (r *starshipResolver) Length(args struct{ Unit string }) float64 { - return convertLength(r.s.Length, args.Unit) -} - -type searchResultResolver struct { - result interface{} -} - -func (r *searchResultResolver) ToHuman() (*humanResolver, bool) { - res, ok := r.result.(*humanResolver) - return res, ok -} - -func (r *searchResultResolver) ToDroid() (*droidResolver, bool) { - res, ok := r.result.(*droidResolver) - return res, ok -} - -func (r *searchResultResolver) ToStarship() (*starshipResolver, bool) { - res, ok := r.result.(*starshipResolver) - return res, ok -} - -func convertLength(meters float64, unit string) float64 { - switch unit { - case "METER": - return meters - case "FOOT": - return meters * 3.28084 - default: - panic("invalid unit") - } -} - -func resolveCharacters(ids []graphql.ID) *[]*characterResolver { - var characters []*characterResolver - for _, id := range ids { - if c := resolveCharacter(id); c != nil { - characters = append(characters, c) - } - } - return &characters -} - -func resolveCharacter(id graphql.ID) *characterResolver { - if h, ok := humanData[id]; ok { - return &characterResolver{&humanResolver{h}} - } - if d, ok := droidData[id]; ok { - return &characterResolver{&droidResolver{d}} - } - return nil -} - -type reviewResolver struct { - r *review -} - -func (r *reviewResolver) Stars() int32 { - return r.r.stars -} - -func (r *reviewResolver) Commentary() *string { - return r.r.commentary -} - -type friendsConnectionResolver struct { - ids []graphql.ID - from int - to int -} - -func newFriendsConnectionResolver(ids []graphql.ID, args friendsConnectionArgs) (*friendsConnectionResolver, error) { - from := 0 - if args.After != nil { - b, err := base64.StdEncoding.DecodeString(string(*args.After)) - if err != nil { - return nil, err - } - i, err := strconv.Atoi(strings.TrimPrefix(string(b), "cursor")) - if err != nil { - return nil, err - } - from = i - } - - to := len(ids) - if args.First != nil { - to = from + int(*args.First) - if to > len(ids) { - to = len(ids) - } - } - - return &friendsConnectionResolver{ - ids: ids, - from: from, - to: to, - }, nil -} - -func (r *friendsConnectionResolver) TotalCount() int32 { - return int32(len(r.ids)) -} - -func (r *friendsConnectionResolver) Edges() *[]*friendsEdgeResolver { - l := make([]*friendsEdgeResolver, r.to-r.from) - for i := range l { - l[i] = &friendsEdgeResolver{ - cursor: encodeCursor(r.from + i), - id: r.ids[r.from+i], - } - } - return &l -} - -func (r *friendsConnectionResolver) Friends() *[]*characterResolver { - return resolveCharacters(r.ids[r.from:r.to]) -} - -func (r *friendsConnectionResolver) PageInfo() *pageInfoResolver { - return &pageInfoResolver{ - startCursor: encodeCursor(r.from), - endCursor: encodeCursor(r.to - 1), - hasNextPage: r.to < len(r.ids), - } -} - -func encodeCursor(i int) graphql.ID { - return graphql.ID(base64.StdEncoding.EncodeToString([]byte(fmt.Sprintf("cursor%d", i+1)))) -} - -type friendsEdgeResolver struct { - cursor graphql.ID - id graphql.ID -} - -func (r *friendsEdgeResolver) Cursor() graphql.ID { - return r.cursor -} - -func (r *friendsEdgeResolver) Node() *characterResolver { - return resolveCharacter(r.id) -} - -type pageInfoResolver struct { - startCursor graphql.ID - endCursor graphql.ID - hasNextPage bool -} - -func (r *pageInfoResolver) StartCursor() *graphql.ID { - return &r.startCursor -} - -func (r *pageInfoResolver) EndCursor() *graphql.ID { - return &r.endCursor -} - -func (r *pageInfoResolver) HasNextPage() bool { - return r.hasNextPage -} - -type reviewInput struct { - Stars int32 - Commentary *string -} diff --git a/vendor/github.com/neelance/graphql-go/gqltesting/testing.go b/vendor/github.com/neelance/graphql-go/gqltesting/testing.go deleted file mode 100644 index 56f90e4c..00000000 --- a/vendor/github.com/neelance/graphql-go/gqltesting/testing.go +++ /dev/null @@ -1,67 +0,0 @@ -package gqltesting - -import ( - "bytes" - "context" - "encoding/json" - "strconv" - "testing" - - graphql "github.com/neelance/graphql-go" -) - -// Test is a GraphQL test case to be used with RunTest(s). -type Test struct { - Context context.Context - Schema *graphql.Schema - Query string - OperationName string - Variables map[string]interface{} - ExpectedResult string -} - -// RunTests runs the given GraphQL test cases as subtests. -func RunTests(t *testing.T, tests []*Test) { - if len(tests) == 1 { - RunTest(t, tests[0]) - return - } - - for i, test := range tests { - t.Run(strconv.Itoa(i+1), func(t *testing.T) { - RunTest(t, test) - }) - } -} - -// RunTest runs a single GraphQL test case. -func RunTest(t *testing.T, test *Test) { - if test.Context == nil { - test.Context = context.Background() - } - result := test.Schema.Exec(test.Context, test.Query, test.OperationName, test.Variables) - if len(result.Errors) != 0 { - t.Fatal(result.Errors[0]) - } - got := formatJSON(t, result.Data) - - want := formatJSON(t, []byte(test.ExpectedResult)) - - if !bytes.Equal(got, want) { - t.Logf("got: %s", got) - t.Logf("want: %s", want) - t.Fail() - } -} - -func formatJSON(t *testing.T, data []byte) []byte { - var v interface{} - if err := json.Unmarshal(data, &v); err != nil { - t.Fatalf("invalid JSON: %s", err) - } - formatted, err := json.Marshal(v) - if err != nil { - t.Fatal(err) - } - return formatted -} diff --git a/vendor/github.com/neelance/graphql-go/graphql.go b/vendor/github.com/neelance/graphql-go/graphql.go deleted file mode 100644 index f63242fa..00000000 --- a/vendor/github.com/neelance/graphql-go/graphql.go +++ /dev/null @@ -1,185 +0,0 @@ -package graphql - -import ( - "context" - "fmt" - - "encoding/json" - - "github.com/neelance/graphql-go/errors" - "github.com/neelance/graphql-go/internal/common" - "github.com/neelance/graphql-go/internal/exec" - "github.com/neelance/graphql-go/internal/exec/resolvable" - "github.com/neelance/graphql-go/internal/exec/selected" - "github.com/neelance/graphql-go/internal/query" - "github.com/neelance/graphql-go/internal/schema" - "github.com/neelance/graphql-go/internal/validation" - "github.com/neelance/graphql-go/introspection" - "github.com/neelance/graphql-go/log" - "github.com/neelance/graphql-go/trace" -) - -// ParseSchema parses a GraphQL schema and attaches the given root resolver. It returns an error if -// the Go type signature of the resolvers does not match the schema. If nil is passed as the -// resolver, then the schema can not be executed, but it may be inspected (e.g. with ToJSON). -func ParseSchema(schemaString string, resolver interface{}, opts ...SchemaOpt) (*Schema, error) { - s := &Schema{ - schema: schema.New(), - maxParallelism: 10, - tracer: trace.OpenTracingTracer{}, - logger: &log.DefaultLogger{}, - } - for _, opt := range opts { - opt(s) - } - - if err := s.schema.Parse(schemaString); err != nil { - return nil, err - } - - if resolver != nil { - r, err := resolvable.ApplyResolver(s.schema, resolver) - if err != nil { - return nil, err - } - s.res = r - } - - return s, nil -} - -// MustParseSchema calls ParseSchema and panics on error. -func MustParseSchema(schemaString string, resolver interface{}, opts ...SchemaOpt) *Schema { - s, err := ParseSchema(schemaString, resolver, opts...) - if err != nil { - panic(err) - } - return s -} - -// Schema represents a GraphQL schema with an optional resolver. -type Schema struct { - schema *schema.Schema - res *resolvable.Schema - - maxParallelism int - tracer trace.Tracer - logger log.Logger -} - -// SchemaOpt is an option to pass to ParseSchema or MustParseSchema. -type SchemaOpt func(*Schema) - -// MaxParallelism specifies the maximum number of resolvers per request allowed to run in parallel. The default is 10. -func MaxParallelism(n int) SchemaOpt { - return func(s *Schema) { - s.maxParallelism = n - } -} - -// Tracer is used to trace queries and fields. It defaults to trace.OpenTracingTracer. -func Tracer(tracer trace.Tracer) SchemaOpt { - return func(s *Schema) { - s.tracer = tracer - } -} - -// Logger is used to log panics durring query execution. It defaults to exec.DefaultLogger. -func Logger(logger log.Logger) SchemaOpt { - return func(s *Schema) { - s.logger = logger - } -} - -// Response represents a typical response of a GraphQL server. It may be encoded to JSON directly or -// it may be further processed to a custom response type, for example to include custom error data. -type Response struct { - Data json.RawMessage `json:"data,omitempty"` - Errors []*errors.QueryError `json:"errors,omitempty"` - Extensions map[string]interface{} `json:"extensions,omitempty"` -} - -// Validate validates the given query with the schema. -func (s *Schema) Validate(queryString string) []*errors.QueryError { - doc, qErr := query.Parse(queryString) - if qErr != nil { - return []*errors.QueryError{qErr} - } - - return validation.Validate(s.schema, doc) -} - -// Exec executes the given query with the schema's resolver. It panics if the schema was created -// without a resolver. If the context get cancelled, no further resolvers will be called and a -// the context error will be returned as soon as possible (not immediately). -func (s *Schema) Exec(ctx context.Context, queryString string, operationName string, variables map[string]interface{}) *Response { - if s.res == nil { - panic("schema created without resolver, can not exec") - } - return s.exec(ctx, queryString, operationName, variables, s.res) -} - -func (s *Schema) exec(ctx context.Context, queryString string, operationName string, variables map[string]interface{}, res *resolvable.Schema) *Response { - doc, qErr := query.Parse(queryString) - if qErr != nil { - return &Response{Errors: []*errors.QueryError{qErr}} - } - - errs := validation.Validate(s.schema, doc) - if len(errs) != 0 { - return &Response{Errors: errs} - } - - op, err := getOperation(doc, operationName) - if err != nil { - return &Response{Errors: []*errors.QueryError{errors.Errorf("%s", err)}} - } - - r := &exec.Request{ - Request: selected.Request{ - Doc: doc, - Vars: variables, - Schema: s.schema, - }, - Limiter: make(chan struct{}, s.maxParallelism), - Tracer: s.tracer, - Logger: s.logger, - } - varTypes := make(map[string]*introspection.Type) - for _, v := range op.Vars { - t, err := common.ResolveType(v.Type, s.schema.Resolve) - if err != nil { - return &Response{Errors: []*errors.QueryError{err}} - } - varTypes[v.Name.Name] = introspection.WrapType(t) - } - traceCtx, finish := s.tracer.TraceQuery(ctx, queryString, operationName, variables, varTypes) - data, errs := r.Execute(traceCtx, res, op) - finish(errs) - - return &Response{ - Data: data, - Errors: errs, - } -} - -func getOperation(document *query.Document, operationName string) (*query.Operation, error) { - if len(document.Operations) == 0 { - return nil, fmt.Errorf("no operations in query document") - } - - if operationName == "" { - if len(document.Operations) > 1 { - return nil, fmt.Errorf("more than one operation in query document and no operation name given") - } - for _, op := range document.Operations { - return op, nil // return the one and only operation - } - } - - op := document.Operations.Get(operationName) - if op == nil { - return nil, fmt.Errorf("no operation with name %q", operationName) - } - return op, nil -} diff --git a/vendor/github.com/neelance/graphql-go/graphql_test.go b/vendor/github.com/neelance/graphql-go/graphql_test.go deleted file mode 100644 index 8e581afb..00000000 --- a/vendor/github.com/neelance/graphql-go/graphql_test.go +++ /dev/null @@ -1,1755 +0,0 @@ -package graphql_test - -import ( - "context" - "testing" - "time" - - "github.com/neelance/graphql-go" - "github.com/neelance/graphql-go/example/starwars" - "github.com/neelance/graphql-go/gqltesting" -) - -type helloWorldResolver1 struct{} - -func (r *helloWorldResolver1) Hello() string { - return "Hello world!" -} - -type helloWorldResolver2 struct{} - -func (r *helloWorldResolver2) Hello(ctx context.Context) (string, error) { - return "Hello world!", nil -} - -type helloSnakeResolver1 struct{} - -func (r *helloSnakeResolver1) HelloHTML() string { - return "Hello snake!" -} - -func (r *helloSnakeResolver1) SayHello(args struct{ FullName string }) string { - return "Hello " + args.FullName + "!" -} - -type helloSnakeResolver2 struct{} - -func (r *helloSnakeResolver2) HelloHTML(ctx context.Context) (string, error) { - return "Hello snake!", nil -} - -func (r *helloSnakeResolver2) SayHello(ctx context.Context, args struct{ FullName string }) (string, error) { - return "Hello " + args.FullName + "!", nil -} - -type theNumberResolver struct { - number int32 -} - -func (r *theNumberResolver) TheNumber() int32 { - return r.number -} - -func (r *theNumberResolver) ChangeTheNumber(args struct{ NewNumber int32 }) *theNumberResolver { - r.number = args.NewNumber - return r -} - -type timeResolver struct{} - -func (r *timeResolver) AddHour(args struct{ Time graphql.Time }) graphql.Time { - return graphql.Time{Time: args.Time.Add(time.Hour)} -} - -var starwarsSchema = graphql.MustParseSchema(starwars.Schema, &starwars.Resolver{}) - -func TestHelloWorld(t *testing.T) { - gqltesting.RunTests(t, []*gqltesting.Test{ - { - Schema: graphql.MustParseSchema(` - schema { - query: Query - } - - type Query { - hello: String! - } - `, &helloWorldResolver1{}), - Query: ` - { - hello - } - `, - ExpectedResult: ` - { - "hello": "Hello world!" - } - `, - }, - - { - Schema: graphql.MustParseSchema(` - schema { - query: Query - } - - type Query { - hello: String! - } - `, &helloWorldResolver2{}), - Query: ` - { - hello - } - `, - ExpectedResult: ` - { - "hello": "Hello world!" - } - `, - }, - }) -} - -func TestHelloSnake(t *testing.T) { - gqltesting.RunTests(t, []*gqltesting.Test{ - { - Schema: graphql.MustParseSchema(` - schema { - query: Query - } - - type Query { - hello_html: String! - } - `, &helloSnakeResolver1{}), - Query: ` - { - hello_html - } - `, - ExpectedResult: ` - { - "hello_html": "Hello snake!" - } - `, - }, - - { - Schema: graphql.MustParseSchema(` - schema { - query: Query - } - - type Query { - hello_html: String! - } - `, &helloSnakeResolver2{}), - Query: ` - { - hello_html - } - `, - ExpectedResult: ` - { - "hello_html": "Hello snake!" - } - `, - }, - }) -} - -func TestHelloSnakeArguments(t *testing.T) { - gqltesting.RunTests(t, []*gqltesting.Test{ - { - Schema: graphql.MustParseSchema(` - schema { - query: Query - } - - type Query { - say_hello(full_name: String!): String! - } - `, &helloSnakeResolver1{}), - Query: ` - { - say_hello(full_name: "Rob Pike") - } - `, - ExpectedResult: ` - { - "say_hello": "Hello Rob Pike!" - } - `, - }, - - { - Schema: graphql.MustParseSchema(` - schema { - query: Query - } - - type Query { - say_hello(full_name: String!): String! - } - `, &helloSnakeResolver2{}), - Query: ` - { - say_hello(full_name: "Rob Pike") - } - `, - ExpectedResult: ` - { - "say_hello": "Hello Rob Pike!" - } - `, - }, - }) -} - -func TestBasic(t *testing.T) { - gqltesting.RunTests(t, []*gqltesting.Test{ - { - Schema: starwarsSchema, - Query: ` - { - hero { - id - name - friends { - name - } - } - } - `, - ExpectedResult: ` - { - "hero": { - "id": "2001", - "name": "R2-D2", - "friends": [ - { - "name": "Luke Skywalker" - }, - { - "name": "Han Solo" - }, - { - "name": "Leia Organa" - } - ] - } - } - `, - }, - }) -} - -func TestArguments(t *testing.T) { - gqltesting.RunTests(t, []*gqltesting.Test{ - { - Schema: starwarsSchema, - Query: ` - { - human(id: "1000") { - name - height - } - } - `, - ExpectedResult: ` - { - "human": { - "name": "Luke Skywalker", - "height": 1.72 - } - } - `, - }, - - { - Schema: starwarsSchema, - Query: ` - { - human(id: "1000") { - name - height(unit: FOOT) - } - } - `, - ExpectedResult: ` - { - "human": { - "name": "Luke Skywalker", - "height": 5.6430448 - } - } - `, - }, - }) -} - -func TestAliases(t *testing.T) { - gqltesting.RunTests(t, []*gqltesting.Test{ - { - Schema: starwarsSchema, - Query: ` - { - empireHero: hero(episode: EMPIRE) { - name - } - jediHero: hero(episode: JEDI) { - name - } - } - `, - ExpectedResult: ` - { - "empireHero": { - "name": "Luke Skywalker" - }, - "jediHero": { - "name": "R2-D2" - } - } - `, - }, - }) -} - -func TestFragments(t *testing.T) { - gqltesting.RunTests(t, []*gqltesting.Test{ - { - Schema: starwarsSchema, - Query: ` - { - leftComparison: hero(episode: EMPIRE) { - ...comparisonFields - ...height - } - rightComparison: hero(episode: JEDI) { - ...comparisonFields - ...height - } - } - - fragment comparisonFields on Character { - name - appearsIn - friends { - name - } - } - - fragment height on Human { - height - } - `, - ExpectedResult: ` - { - "leftComparison": { - "name": "Luke Skywalker", - "appearsIn": [ - "NEWHOPE", - "EMPIRE", - "JEDI" - ], - "friends": [ - { - "name": "Han Solo" - }, - { - "name": "Leia Organa" - }, - { - "name": "C-3PO" - }, - { - "name": "R2-D2" - } - ], - "height": 1.72 - }, - "rightComparison": { - "name": "R2-D2", - "appearsIn": [ - "NEWHOPE", - "EMPIRE", - "JEDI" - ], - "friends": [ - { - "name": "Luke Skywalker" - }, - { - "name": "Han Solo" - }, - { - "name": "Leia Organa" - } - ] - } - } - `, - }, - }) -} - -func TestVariables(t *testing.T) { - gqltesting.RunTests(t, []*gqltesting.Test{ - { - Schema: starwarsSchema, - Query: ` - query HeroNameAndFriends($episode: Episode) { - hero(episode: $episode) { - name - } - } - `, - Variables: map[string]interface{}{ - "episode": "JEDI", - }, - ExpectedResult: ` - { - "hero": { - "name": "R2-D2" - } - } - `, - }, - - { - Schema: starwarsSchema, - Query: ` - query HeroNameAndFriends($episode: Episode) { - hero(episode: $episode) { - name - } - } - `, - Variables: map[string]interface{}{ - "episode": "EMPIRE", - }, - ExpectedResult: ` - { - "hero": { - "name": "Luke Skywalker" - } - } - `, - }, - }) -} - -func TestSkipDirective(t *testing.T) { - gqltesting.RunTests(t, []*gqltesting.Test{ - { - Schema: starwarsSchema, - Query: ` - query Hero($episode: Episode, $withoutFriends: Boolean!) { - hero(episode: $episode) { - name - friends @skip(if: $withoutFriends) { - name - } - } - } - `, - Variables: map[string]interface{}{ - "episode": "JEDI", - "withoutFriends": true, - }, - ExpectedResult: ` - { - "hero": { - "name": "R2-D2" - } - } - `, - }, - - { - Schema: starwarsSchema, - Query: ` - query Hero($episode: Episode, $withoutFriends: Boolean!) { - hero(episode: $episode) { - name - friends @skip(if: $withoutFriends) { - name - } - } - } - `, - Variables: map[string]interface{}{ - "episode": "JEDI", - "withoutFriends": false, - }, - ExpectedResult: ` - { - "hero": { - "name": "R2-D2", - "friends": [ - { - "name": "Luke Skywalker" - }, - { - "name": "Han Solo" - }, - { - "name": "Leia Organa" - } - ] - } - } - `, - }, - }) -} - -func TestIncludeDirective(t *testing.T) { - gqltesting.RunTests(t, []*gqltesting.Test{ - { - Schema: starwarsSchema, - Query: ` - query Hero($episode: Episode, $withFriends: Boolean!) { - hero(episode: $episode) { - name - ...friendsFragment @include(if: $withFriends) - } - } - - fragment friendsFragment on Character { - friends { - name - } - } - `, - Variables: map[string]interface{}{ - "episode": "JEDI", - "withFriends": false, - }, - ExpectedResult: ` - { - "hero": { - "name": "R2-D2" - } - } - `, - }, - - { - Schema: starwarsSchema, - Query: ` - query Hero($episode: Episode, $withFriends: Boolean!) { - hero(episode: $episode) { - name - ...friendsFragment @include(if: $withFriends) - } - } - - fragment friendsFragment on Character { - friends { - name - } - } - `, - Variables: map[string]interface{}{ - "episode": "JEDI", - "withFriends": true, - }, - ExpectedResult: ` - { - "hero": { - "name": "R2-D2", - "friends": [ - { - "name": "Luke Skywalker" - }, - { - "name": "Han Solo" - }, - { - "name": "Leia Organa" - } - ] - } - } - `, - }, - }) -} - -type testDeprecatedDirectiveResolver struct{} - -func (r *testDeprecatedDirectiveResolver) A() int32 { - return 0 -} - -func (r *testDeprecatedDirectiveResolver) B() int32 { - return 0 -} - -func (r *testDeprecatedDirectiveResolver) C() int32 { - return 0 -} - -func TestDeprecatedDirective(t *testing.T) { - gqltesting.RunTests(t, []*gqltesting.Test{ - { - Schema: graphql.MustParseSchema(` - schema { - query: Query - } - - type Query { - a: Int! - b: Int! @deprecated - c: Int! @deprecated(reason: "We don't like it") - } - `, &testDeprecatedDirectiveResolver{}), - Query: ` - { - __type(name: "Query") { - fields { - name - } - allFields: fields(includeDeprecated: true) { - name - isDeprecated - deprecationReason - } - } - } - `, - ExpectedResult: ` - { - "__type": { - "fields": [ - { "name": "a" } - ], - "allFields": [ - { "name": "a", "isDeprecated": false, "deprecationReason": null }, - { "name": "b", "isDeprecated": true, "deprecationReason": "No longer supported" }, - { "name": "c", "isDeprecated": true, "deprecationReason": "We don't like it" } - ] - } - } - `, - }, - { - Schema: graphql.MustParseSchema(` - schema { - query: Query - } - - type Query { - } - - enum Test { - A - B @deprecated - C @deprecated(reason: "We don't like it") - } - `, &testDeprecatedDirectiveResolver{}), - Query: ` - { - __type(name: "Test") { - enumValues { - name - } - allEnumValues: enumValues(includeDeprecated: true) { - name - isDeprecated - deprecationReason - } - } - } - `, - ExpectedResult: ` - { - "__type": { - "enumValues": [ - { "name": "A" } - ], - "allEnumValues": [ - { "name": "A", "isDeprecated": false, "deprecationReason": null }, - { "name": "B", "isDeprecated": true, "deprecationReason": "No longer supported" }, - { "name": "C", "isDeprecated": true, "deprecationReason": "We don't like it" } - ] - } - } - `, - }, - }) -} - -func TestInlineFragments(t *testing.T) { - gqltesting.RunTests(t, []*gqltesting.Test{ - { - Schema: starwarsSchema, - Query: ` - query HeroForEpisode($episode: Episode!) { - hero(episode: $episode) { - name - ... on Droid { - primaryFunction - } - ... on Human { - height - } - } - } - `, - Variables: map[string]interface{}{ - "episode": "JEDI", - }, - ExpectedResult: ` - { - "hero": { - "name": "R2-D2", - "primaryFunction": "Astromech" - } - } - `, - }, - - { - Schema: starwarsSchema, - Query: ` - query HeroForEpisode($episode: Episode!) { - hero(episode: $episode) { - name - ... on Droid { - primaryFunction - } - ... on Human { - height - } - } - } - `, - Variables: map[string]interface{}{ - "episode": "EMPIRE", - }, - ExpectedResult: ` - { - "hero": { - "name": "Luke Skywalker", - "height": 1.72 - } - } - `, - }, - }) -} - -func TestTypeName(t *testing.T) { - gqltesting.RunTests(t, []*gqltesting.Test{ - { - Schema: starwarsSchema, - Query: ` - { - search(text: "an") { - __typename - ... on Human { - name - } - ... on Droid { - name - } - ... on Starship { - name - } - } - } - `, - ExpectedResult: ` - { - "search": [ - { - "__typename": "Human", - "name": "Han Solo" - }, - { - "__typename": "Human", - "name": "Leia Organa" - }, - { - "__typename": "Starship", - "name": "TIE Advanced x1" - } - ] - } - `, - }, - - { - Schema: starwarsSchema, - Query: ` - { - human(id: "1000") { - __typename - name - } - } - `, - ExpectedResult: ` - { - "human": { - "__typename": "Human", - "name": "Luke Skywalker" - } - } - `, - }, - }) -} - -func TestConnections(t *testing.T) { - gqltesting.RunTests(t, []*gqltesting.Test{ - { - Schema: starwarsSchema, - Query: ` - { - hero { - name - friendsConnection { - totalCount - pageInfo { - startCursor - endCursor - hasNextPage - } - edges { - cursor - node { - name - } - } - } - } - } - `, - ExpectedResult: ` - { - "hero": { - "name": "R2-D2", - "friendsConnection": { - "totalCount": 3, - "pageInfo": { - "startCursor": "Y3Vyc29yMQ==", - "endCursor": "Y3Vyc29yMw==", - "hasNextPage": false - }, - "edges": [ - { - "cursor": "Y3Vyc29yMQ==", - "node": { - "name": "Luke Skywalker" - } - }, - { - "cursor": "Y3Vyc29yMg==", - "node": { - "name": "Han Solo" - } - }, - { - "cursor": "Y3Vyc29yMw==", - "node": { - "name": "Leia Organa" - } - } - ] - } - } - } - `, - }, - - { - Schema: starwarsSchema, - Query: ` - { - hero { - name - friendsConnection(first: 1, after: "Y3Vyc29yMQ==") { - totalCount - pageInfo { - startCursor - endCursor - hasNextPage - } - edges { - cursor - node { - name - } - } - } - }, - moreFriends: hero { - name - friendsConnection(first: 1, after: "Y3Vyc29yMg==") { - totalCount - pageInfo { - startCursor - endCursor - hasNextPage - } - edges { - cursor - node { - name - } - } - } - } - } - `, - ExpectedResult: ` - { - "hero": { - "name": "R2-D2", - "friendsConnection": { - "totalCount": 3, - "pageInfo": { - "startCursor": "Y3Vyc29yMg==", - "endCursor": "Y3Vyc29yMg==", - "hasNextPage": true - }, - "edges": [ - { - "cursor": "Y3Vyc29yMg==", - "node": { - "name": "Han Solo" - } - } - ] - } - }, - "moreFriends": { - "name": "R2-D2", - "friendsConnection": { - "totalCount": 3, - "pageInfo": { - "startCursor": "Y3Vyc29yMw==", - "endCursor": "Y3Vyc29yMw==", - "hasNextPage": false - }, - "edges": [ - { - "cursor": "Y3Vyc29yMw==", - "node": { - "name": "Leia Organa" - } - } - ] - } - } - } - `, - }, - }) -} - -func TestMutation(t *testing.T) { - gqltesting.RunTests(t, []*gqltesting.Test{ - { - Schema: starwarsSchema, - Query: ` - { - reviews(episode: JEDI) { - stars - commentary - } - } - `, - ExpectedResult: ` - { - "reviews": [] - } - `, - }, - - { - Schema: starwarsSchema, - Query: ` - mutation CreateReviewForEpisode($ep: Episode!, $review: ReviewInput!) { - createReview(episode: $ep, review: $review) { - stars - commentary - } - } - `, - Variables: map[string]interface{}{ - "ep": "JEDI", - "review": map[string]interface{}{ - "stars": 5, - "commentary": "This is a great movie!", - }, - }, - ExpectedResult: ` - { - "createReview": { - "stars": 5, - "commentary": "This is a great movie!" - } - } - `, - }, - - { - Schema: starwarsSchema, - Query: ` - mutation CreateReviewForEpisode($ep: Episode!, $review: ReviewInput!) { - createReview(episode: $ep, review: $review) { - stars - commentary - } - } - `, - Variables: map[string]interface{}{ - "ep": "EMPIRE", - "review": map[string]interface{}{ - "stars": float64(4), - }, - }, - ExpectedResult: ` - { - "createReview": { - "stars": 4, - "commentary": null - } - } - `, - }, - - { - Schema: starwarsSchema, - Query: ` - { - reviews(episode: JEDI) { - stars - commentary - } - } - `, - ExpectedResult: ` - { - "reviews": [{ - "stars": 5, - "commentary": "This is a great movie!" - }] - } - `, - }, - }) -} - -func TestIntrospection(t *testing.T) { - gqltesting.RunTests(t, []*gqltesting.Test{ - { - Schema: starwarsSchema, - Query: ` - { - __schema { - types { - name - } - } - } - `, - ExpectedResult: ` - { - "__schema": { - "types": [ - { "name": "Boolean" }, - { "name": "Character" }, - { "name": "Droid" }, - { "name": "Episode" }, - { "name": "Float" }, - { "name": "FriendsConnection" }, - { "name": "FriendsEdge" }, - { "name": "Human" }, - { "name": "ID" }, - { "name": "Int" }, - { "name": "LengthUnit" }, - { "name": "Mutation" }, - { "name": "PageInfo" }, - { "name": "Query" }, - { "name": "Review" }, - { "name": "ReviewInput" }, - { "name": "SearchResult" }, - { "name": "Starship" }, - { "name": "String" }, - { "name": "__Directive" }, - { "name": "__DirectiveLocation" }, - { "name": "__EnumValue" }, - { "name": "__Field" }, - { "name": "__InputValue" }, - { "name": "__Schema" }, - { "name": "__Type" }, - { "name": "__TypeKind" } - ] - } - } - `, - }, - - { - Schema: starwarsSchema, - Query: ` - { - __schema { - queryType { - name - } - } - } - `, - ExpectedResult: ` - { - "__schema": { - "queryType": { - "name": "Query" - } - } - } - `, - }, - - { - Schema: starwarsSchema, - Query: ` - { - a: __type(name: "Droid") { - name - kind - interfaces { - name - } - possibleTypes { - name - } - }, - b: __type(name: "Character") { - name - kind - interfaces { - name - } - possibleTypes { - name - } - } - c: __type(name: "SearchResult") { - name - kind - interfaces { - name - } - possibleTypes { - name - } - } - } - `, - ExpectedResult: ` - { - "a": { - "name": "Droid", - "kind": "OBJECT", - "interfaces": [ - { - "name": "Character" - } - ], - "possibleTypes": null - }, - "b": { - "name": "Character", - "kind": "INTERFACE", - "interfaces": null, - "possibleTypes": [ - { - "name": "Human" - }, - { - "name": "Droid" - } - ] - }, - "c": { - "name": "SearchResult", - "kind": "UNION", - "interfaces": null, - "possibleTypes": [ - { - "name": "Human" - }, - { - "name": "Droid" - }, - { - "name": "Starship" - } - ] - } - } - `, - }, - - { - Schema: starwarsSchema, - Query: ` - { - __type(name: "Droid") { - name - fields { - name - args { - name - type { - name - } - defaultValue - } - type { - name - kind - } - } - } - } - `, - ExpectedResult: ` - { - "__type": { - "name": "Droid", - "fields": [ - { - "name": "id", - "args": [], - "type": { - "name": null, - "kind": "NON_NULL" - } - }, - { - "name": "name", - "args": [], - "type": { - "name": null, - "kind": "NON_NULL" - } - }, - { - "name": "friends", - "args": [], - "type": { - "name": null, - "kind": "LIST" - } - }, - { - "name": "friendsConnection", - "args": [ - { - "name": "first", - "type": { - "name": "Int" - }, - "defaultValue": null - }, - { - "name": "after", - "type": { - "name": "ID" - }, - "defaultValue": null - } - ], - "type": { - "name": null, - "kind": "NON_NULL" - } - }, - { - "name": "appearsIn", - "args": [], - "type": { - "name": null, - "kind": "NON_NULL" - } - }, - { - "name": "primaryFunction", - "args": [], - "type": { - "name": "String", - "kind": "SCALAR" - } - } - ] - } - } - `, - }, - - { - Schema: starwarsSchema, - Query: ` - { - __type(name: "Episode") { - enumValues { - name - } - } - } - `, - ExpectedResult: ` - { - "__type": { - "enumValues": [ - { - "name": "NEWHOPE" - }, - { - "name": "EMPIRE" - }, - { - "name": "JEDI" - } - ] - } - } - `, - }, - - { - Schema: starwarsSchema, - Query: ` - { - __schema { - directives { - name - description - locations - args { - name - description - type { - kind - ofType { - kind - name - } - } - } - } - } - } - `, - ExpectedResult: ` - { - "__schema": { - "directives": [ - { - "name": "deprecated", - "description": "Marks an element of a GraphQL schema as no longer supported.", - "locations": [ - "FIELD_DEFINITION", - "ENUM_VALUE" - ], - "args": [ - { - "name": "reason", - "description": "Explains why this element was deprecated, usually also including a suggestion\nfor how to access supported similar data. Formatted in\n[Markdown](https://daringfireball.net/projects/markdown/).", - "type": { - "kind": "SCALAR", - "ofType": null - } - } - ] - }, - { - "name": "include", - "description": "Directs the executor to include this field or fragment only when the ` + "`" + `if` + "`" + ` argument is true.", - "locations": [ - "FIELD", - "FRAGMENT_SPREAD", - "INLINE_FRAGMENT" - ], - "args": [ - { - "name": "if", - "description": "Included when true.", - "type": { - "kind": "NON_NULL", - "ofType": { - "kind": "SCALAR", - "name": "Boolean" - } - } - } - ] - }, - { - "name": "skip", - "description": "Directs the executor to skip this field or fragment when the ` + "`" + `if` + "`" + ` argument is true.", - "locations": [ - "FIELD", - "FRAGMENT_SPREAD", - "INLINE_FRAGMENT" - ], - "args": [ - { - "name": "if", - "description": "Skipped when true.", - "type": { - "kind": "NON_NULL", - "ofType": { - "kind": "SCALAR", - "name": "Boolean" - } - } - } - ] - } - ] - } - } - `, - }, - }) -} - -func TestMutationOrder(t *testing.T) { - gqltesting.RunTests(t, []*gqltesting.Test{ - { - Schema: graphql.MustParseSchema(` - schema { - query: Query - mutation: Mutation - } - - type Query { - theNumber: Int! - } - - type Mutation { - changeTheNumber(newNumber: Int!): Query - } - `, &theNumberResolver{}), - Query: ` - mutation { - first: changeTheNumber(newNumber: 1) { - theNumber - } - second: changeTheNumber(newNumber: 3) { - theNumber - } - third: changeTheNumber(newNumber: 2) { - theNumber - } - } - `, - ExpectedResult: ` - { - "first": { - "theNumber": 1 - }, - "second": { - "theNumber": 3 - }, - "third": { - "theNumber": 2 - } - } - `, - }, - }) -} - -func TestTime(t *testing.T) { - gqltesting.RunTests(t, []*gqltesting.Test{ - { - Schema: graphql.MustParseSchema(` - schema { - query: Query - } - - type Query { - addHour(time: Time = "2001-02-03T04:05:06Z"): Time! - } - - scalar Time - `, &timeResolver{}), - Query: ` - query($t: Time!) { - a: addHour(time: $t) - b: addHour - } - `, - Variables: map[string]interface{}{ - "t": time.Date(2000, 2, 3, 4, 5, 6, 0, time.UTC), - }, - ExpectedResult: ` - { - "a": "2000-02-03T05:05:06Z", - "b": "2001-02-03T05:05:06Z" - } - `, - }, - }) -} - -type resolverWithUnexportedMethod struct{} - -func (r *resolverWithUnexportedMethod) changeTheNumber(args struct{ NewNumber int32 }) int32 { - return args.NewNumber -} - -func TestUnexportedMethod(t *testing.T) { - _, err := graphql.ParseSchema(` - schema { - mutation: Mutation - } - - type Mutation { - changeTheNumber(newNumber: Int!): Int! - } - `, &resolverWithUnexportedMethod{}) - if err == nil { - t.Error("error expected") - } -} - -type resolverWithUnexportedField struct{} - -func (r *resolverWithUnexportedField) ChangeTheNumber(args struct{ newNumber int32 }) int32 { - return args.newNumber -} - -func TestUnexportedField(t *testing.T) { - _, err := graphql.ParseSchema(` - schema { - mutation: Mutation - } - - type Mutation { - changeTheNumber(newNumber: Int!): Int! - } - `, &resolverWithUnexportedField{}) - if err == nil { - t.Error("error expected") - } -} - -type inputResolver struct{} - -func (r *inputResolver) Int(args struct{ Value int32 }) int32 { - return args.Value -} - -func (r *inputResolver) Float(args struct{ Value float64 }) float64 { - return args.Value -} - -func (r *inputResolver) String(args struct{ Value string }) string { - return args.Value -} - -func (r *inputResolver) Boolean(args struct{ Value bool }) bool { - return args.Value -} - -func (r *inputResolver) Nullable(args struct{ Value *int32 }) *int32 { - return args.Value -} - -func (r *inputResolver) List(args struct{ Value []*struct{ V int32 } }) []int32 { - l := make([]int32, len(args.Value)) - for i, entry := range args.Value { - l[i] = entry.V - } - return l -} - -func (r *inputResolver) NullableList(args struct{ Value *[]*struct{ V int32 } }) *[]*int32 { - if args.Value == nil { - return nil - } - l := make([]*int32, len(*args.Value)) - for i, entry := range *args.Value { - if entry != nil { - l[i] = &entry.V - } - } - return &l -} - -func (r *inputResolver) Enum(args struct{ Value string }) string { - return args.Value -} - -func (r *inputResolver) NullableEnum(args struct{ Value *string }) *string { - return args.Value -} - -type recursive struct { - Next *recursive -} - -func (r *inputResolver) Recursive(args struct{ Value *recursive }) int32 { - n := int32(0) - v := args.Value - for v != nil { - v = v.Next - n++ - } - return n -} - -func (r *inputResolver) ID(args struct{ Value graphql.ID }) graphql.ID { - return args.Value -} - -func TestInput(t *testing.T) { - coercionSchema := graphql.MustParseSchema(` - schema { - query: Query - } - - type Query { - int(value: Int!): Int! - float(value: Float!): Float! - string(value: String!): String! - boolean(value: Boolean!): Boolean! - nullable(value: Int): Int - list(value: [Input!]!): [Int!]! - nullableList(value: [Input]): [Int] - enum(value: Enum!): Enum! - nullableEnum(value: Enum): Enum - recursive(value: RecursiveInput!): Int! - id(value: ID!): ID! - } - - input Input { - v: Int! - } - - input RecursiveInput { - next: RecursiveInput - } - - enum Enum { - Option1 - Option2 - } - `, &inputResolver{}) - gqltesting.RunTests(t, []*gqltesting.Test{ - { - Schema: coercionSchema, - Query: ` - { - int(value: 42) - float1: float(value: 42) - float2: float(value: 42.5) - string(value: "foo") - boolean(value: true) - nullable1: nullable(value: 42) - nullable2: nullable(value: null) - list1: list(value: [{v: 41}, {v: 42}, {v: 43}]) - list2: list(value: {v: 42}) - nullableList1: nullableList(value: [{v: 41}, null, {v: 43}]) - nullableList2: nullableList(value: null) - enum(value: Option2) - nullableEnum1: nullableEnum(value: Option2) - nullableEnum2: nullableEnum(value: null) - recursive(value: {next: {next: {}}}) - intID: id(value: 1234) - strID: id(value: "1234") - } - `, - ExpectedResult: ` - { - "int": 42, - "float1": 42, - "float2": 42.5, - "string": "foo", - "boolean": true, - "nullable1": 42, - "nullable2": null, - "list1": [41, 42, 43], - "list2": [42], - "nullableList1": [41, null, 43], - "nullableList2": null, - "enum": "Option2", - "nullableEnum1": "Option2", - "nullableEnum2": null, - "recursive": 3, - "intID": "1234", - "strID": "1234" - } - `, - }, - }) -} - -func TestComposedFragments(t *testing.T) { - gqltesting.RunTests(t, []*gqltesting.Test{ - { - Schema: starwarsSchema, - Query: ` - { - composed: hero(episode: EMPIRE) { - name - ...friendsNames - ...friendsIds - } - } - - fragment friendsNames on Character { - name - friends { - name - } - } - - fragment friendsIds on Character { - name - friends { - id - } - } - `, - ExpectedResult: ` - { - "composed": { - "name": "Luke Skywalker", - "friends": [ - { - "id": "1002", - "name": "Han Solo" - }, - { - "id": "1003", - "name": "Leia Organa" - }, - { - "id": "2000", - "name": "C-3PO" - }, - { - "id": "2001", - "name": "R2-D2" - } - ] - } - } - `, - }, - }) -} diff --git a/vendor/github.com/neelance/graphql-go/id.go b/vendor/github.com/neelance/graphql-go/id.go deleted file mode 100644 index 3ae2c38c..00000000 --- a/vendor/github.com/neelance/graphql-go/id.go +++ /dev/null @@ -1,30 +0,0 @@ -package graphql - -import ( - "errors" - "strconv" -) - -// ID represents GraphQL's "ID" scalar type. A custom type may be used instead. -type ID string - -func (_ ID) ImplementsGraphQLType(name string) bool { - return name == "ID" -} - -func (id *ID) UnmarshalGraphQL(input interface{}) error { - var err error - switch input := input.(type) { - case string: - *id = ID(input) - case int32: - *id = ID(strconv.Itoa(int(input))) - default: - err = errors.New("wrong type") - } - return err -} - -func (id ID) MarshalJSON() ([]byte, error) { - return strconv.AppendQuote(nil, string(id)), nil -} diff --git a/vendor/github.com/neelance/graphql-go/internal/common/directive.go b/vendor/github.com/neelance/graphql-go/internal/common/directive.go deleted file mode 100644 index 62dca47f..00000000 --- a/vendor/github.com/neelance/graphql-go/internal/common/directive.go +++ /dev/null @@ -1,32 +0,0 @@ -package common - -type Directive struct { - Name Ident - Args ArgumentList -} - -func ParseDirectives(l *Lexer) DirectiveList { - var directives DirectiveList - for l.Peek() == '@' { - l.ConsumeToken('@') - d := &Directive{} - d.Name = l.ConsumeIdentWithLoc() - d.Name.Loc.Column-- - if l.Peek() == '(' { - d.Args = ParseArguments(l) - } - directives = append(directives, d) - } - return directives -} - -type DirectiveList []*Directive - -func (l DirectiveList) Get(name string) *Directive { - for _, d := range l { - if d.Name.Name == name { - return d - } - } - return nil -} diff --git a/vendor/github.com/neelance/graphql-go/internal/common/lexer.go b/vendor/github.com/neelance/graphql-go/internal/common/lexer.go deleted file mode 100644 index f67dc31e..00000000 --- a/vendor/github.com/neelance/graphql-go/internal/common/lexer.go +++ /dev/null @@ -1,122 +0,0 @@ -package common - -import ( - "fmt" - "text/scanner" - - "github.com/neelance/graphql-go/errors" -) - -type syntaxError string - -type Lexer struct { - sc *scanner.Scanner - next rune - descComment string -} - -type Ident struct { - Name string - Loc errors.Location -} - -func New(sc *scanner.Scanner) *Lexer { - l := &Lexer{sc: sc} - l.Consume() - return l -} - -func (l *Lexer) CatchSyntaxError(f func()) (errRes *errors.QueryError) { - defer func() { - if err := recover(); err != nil { - if err, ok := err.(syntaxError); ok { - errRes = errors.Errorf("syntax error: %s", err) - errRes.Locations = []errors.Location{l.Location()} - return - } - panic(err) - } - }() - - f() - return -} - -func (l *Lexer) Peek() rune { - return l.next -} - -func (l *Lexer) Consume() { - l.descComment = "" - for { - l.next = l.sc.Scan() - if l.next == ',' { - continue - } - if l.next == '#' { - if l.sc.Peek() == ' ' { - l.sc.Next() - } - if l.descComment != "" { - l.descComment += "\n" - } - for { - next := l.sc.Next() - if next == '\n' || next == scanner.EOF { - break - } - l.descComment += string(next) - } - continue - } - break - } -} - -func (l *Lexer) ConsumeIdent() string { - name := l.sc.TokenText() - l.ConsumeToken(scanner.Ident) - return name -} - -func (l *Lexer) ConsumeIdentWithLoc() Ident { - loc := l.Location() - name := l.sc.TokenText() - l.ConsumeToken(scanner.Ident) - return Ident{name, loc} -} - -func (l *Lexer) ConsumeKeyword(keyword string) { - if l.next != scanner.Ident || l.sc.TokenText() != keyword { - l.SyntaxError(fmt.Sprintf("unexpected %q, expecting %q", l.sc.TokenText(), keyword)) - } - l.Consume() -} - -func (l *Lexer) ConsumeLiteral() *BasicLit { - lit := &BasicLit{Type: l.next, Text: l.sc.TokenText()} - l.Consume() - return lit -} - -func (l *Lexer) ConsumeToken(expected rune) { - if l.next != expected { - l.SyntaxError(fmt.Sprintf("unexpected %q, expecting %s", l.sc.TokenText(), scanner.TokenString(expected))) - } - l.Consume() -} - -func (l *Lexer) DescComment() string { - return l.descComment -} - -func (l *Lexer) SyntaxError(message string) { - panic(syntaxError(message)) -} - -func (l *Lexer) Location() errors.Location { - return errors.Location{ - Line: l.sc.Line, - Column: l.sc.Column, - } -} diff --git a/vendor/github.com/neelance/graphql-go/internal/common/literals.go b/vendor/github.com/neelance/graphql-go/internal/common/literals.go deleted file mode 100644 index d1c84e3a..00000000 --- a/vendor/github.com/neelance/graphql-go/internal/common/literals.go +++ /dev/null @@ -1,206 +0,0 @@ -package common - -import ( - "strconv" - "strings" - "text/scanner" - - "github.com/neelance/graphql-go/errors" -) - -type Literal interface { - Value(vars map[string]interface{}) interface{} - String() string - Location() errors.Location -} - -type BasicLit struct { - Type rune - Text string - Loc errors.Location -} - -func (lit *BasicLit) Value(vars map[string]interface{}) interface{} { - switch lit.Type { - case scanner.Int: - value, err := strconv.ParseInt(lit.Text, 10, 32) - if err != nil { - panic(err) - } - return int32(value) - - case scanner.Float: - value, err := strconv.ParseFloat(lit.Text, 64) - if err != nil { - panic(err) - } - return value - - case scanner.String: - value, err := strconv.Unquote(lit.Text) - if err != nil { - panic(err) - } - return value - - case scanner.Ident: - switch lit.Text { - case "true": - return true - case "false": - return false - default: - return lit.Text - } - - default: - panic("invalid literal") - } -} - -func (lit *BasicLit) String() string { - return lit.Text -} - -func (lit *BasicLit) Location() errors.Location { - return lit.Loc -} - -type ListLit struct { - Entries []Literal - Loc errors.Location -} - -func (lit *ListLit) Value(vars map[string]interface{}) interface{} { - entries := make([]interface{}, len(lit.Entries)) - for i, entry := range lit.Entries { - entries[i] = entry.Value(vars) - } - return entries -} - -func (lit *ListLit) String() string { - entries := make([]string, len(lit.Entries)) - for i, entry := range lit.Entries { - entries[i] = entry.String() - } - return "[" + strings.Join(entries, ", ") + "]" -} - -func (lit *ListLit) Location() errors.Location { - return lit.Loc -} - -type ObjectLit struct { - Fields []*ObjectLitField - Loc errors.Location -} - -type ObjectLitField struct { - Name Ident - Value Literal -} - -func (lit *ObjectLit) Value(vars map[string]interface{}) interface{} { - fields := make(map[string]interface{}, len(lit.Fields)) - for _, f := range lit.Fields { - fields[f.Name.Name] = f.Value.Value(vars) - } - return fields -} - -func (lit *ObjectLit) String() string { - entries := make([]string, 0, len(lit.Fields)) - for _, f := range lit.Fields { - entries = append(entries, f.Name.Name+": "+f.Value.String()) - } - return "{" + strings.Join(entries, ", ") + "}" -} - -func (lit *ObjectLit) Location() errors.Location { - return lit.Loc -} - -type NullLit struct { - Loc errors.Location -} - -func (lit *NullLit) Value(vars map[string]interface{}) interface{} { - return nil -} - -func (lit *NullLit) String() string { - return "null" -} - -func (lit *NullLit) Location() errors.Location { - return lit.Loc -} - -type Variable struct { - Name string - Loc errors.Location -} - -func (v Variable) Value(vars map[string]interface{}) interface{} { - return vars[v.Name] -} - -func (v Variable) String() string { - return "$" + v.Name -} - -func (v *Variable) Location() errors.Location { - return v.Loc -} - -func ParseLiteral(l *Lexer, constOnly bool) Literal { - loc := l.Location() - switch l.Peek() { - case '$': - if constOnly { - l.SyntaxError("variable not allowed") - panic("unreachable") - } - l.ConsumeToken('$') - return &Variable{l.ConsumeIdent(), loc} - - case scanner.Int, scanner.Float, scanner.String, scanner.Ident: - lit := l.ConsumeLiteral() - if lit.Type == scanner.Ident && lit.Text == "null" { - return &NullLit{loc} - } - lit.Loc = loc - return lit - case '-': - l.ConsumeToken('-') - lit := l.ConsumeLiteral() - lit.Text = "-" + lit.Text - lit.Loc = loc - return lit - case '[': - l.ConsumeToken('[') - var list []Literal - for l.Peek() != ']' { - list = append(list, ParseLiteral(l, constOnly)) - } - l.ConsumeToken(']') - return &ListLit{list, loc} - - case '{': - l.ConsumeToken('{') - var fields []*ObjectLitField - for l.Peek() != '}' { - name := l.ConsumeIdentWithLoc() - l.ConsumeToken(':') - value := ParseLiteral(l, constOnly) - fields = append(fields, &ObjectLitField{name, value}) - } - l.ConsumeToken('}') - return &ObjectLit{fields, loc} - - default: - l.SyntaxError("invalid value") - panic("unreachable") - } -} diff --git a/vendor/github.com/neelance/graphql-go/internal/common/types.go b/vendor/github.com/neelance/graphql-go/internal/common/types.go deleted file mode 100644 index 6a017f56..00000000 --- a/vendor/github.com/neelance/graphql-go/internal/common/types.go +++ /dev/null @@ -1,80 +0,0 @@ -package common - -import ( - "github.com/neelance/graphql-go/errors" -) - -type Type interface { - Kind() string - String() string -} - -type List struct { - OfType Type -} - -type NonNull struct { - OfType Type -} - -type TypeName struct { - Ident -} - -func (*List) Kind() string { return "LIST" } -func (*NonNull) Kind() string { return "NON_NULL" } -func (*TypeName) Kind() string { panic("TypeName needs to be resolved to actual type") } - -func (t *List) String() string { return "[" + t.OfType.String() + "]" } -func (t *NonNull) String() string { return t.OfType.String() + "!" } -func (*TypeName) String() string { panic("TypeName needs to be resolved to actual type") } - -func ParseType(l *Lexer) Type { - t := parseNullType(l) - if l.Peek() == '!' { - l.ConsumeToken('!') - return &NonNull{OfType: t} - } - return t -} - -func parseNullType(l *Lexer) Type { - if l.Peek() == '[' { - l.ConsumeToken('[') - ofType := ParseType(l) - l.ConsumeToken(']') - return &List{OfType: ofType} - } - - return &TypeName{Ident: l.ConsumeIdentWithLoc()} -} - -type Resolver func(name string) Type - -func ResolveType(t Type, resolver Resolver) (Type, *errors.QueryError) { - switch t := t.(type) { - case *List: - ofType, err := ResolveType(t.OfType, resolver) - if err != nil { - return nil, err - } - return &List{OfType: ofType}, nil - case *NonNull: - ofType, err := ResolveType(t.OfType, resolver) - if err != nil { - return nil, err - } - return &NonNull{OfType: ofType}, nil - case *TypeName: - refT := resolver(t.Name) - if refT == nil { - err := errors.Errorf("Unknown type %q.", t.Name) - err.Rule = "KnownTypeNames" - err.Locations = []errors.Location{t.Loc} - return nil, err - } - return refT, nil - default: - return t, nil - } -} diff --git a/vendor/github.com/neelance/graphql-go/internal/common/values.go b/vendor/github.com/neelance/graphql-go/internal/common/values.go deleted file mode 100644 index 794f68de..00000000 --- a/vendor/github.com/neelance/graphql-go/internal/common/values.go +++ /dev/null @@ -1,77 +0,0 @@ -package common - -import ( - "github.com/neelance/graphql-go/errors" -) - -type InputValue struct { - Name Ident - Type Type - Default Literal - Desc string - Loc errors.Location - TypeLoc errors.Location -} - -type InputValueList []*InputValue - -func (l InputValueList) Get(name string) *InputValue { - for _, v := range l { - if v.Name.Name == name { - return v - } - } - return nil -} - -func ParseInputValue(l *Lexer) *InputValue { - p := &InputValue{} - p.Loc = l.Location() - p.Desc = l.DescComment() - p.Name = l.ConsumeIdentWithLoc() - l.ConsumeToken(':') - p.TypeLoc = l.Location() - p.Type = ParseType(l) - if l.Peek() == '=' { - l.ConsumeToken('=') - p.Default = ParseLiteral(l, true) - } - return p -} - -type Argument struct { - Name Ident - Value Literal -} - -type ArgumentList []Argument - -func (l ArgumentList) Get(name string) (Literal, bool) { - for _, arg := range l { - if arg.Name.Name == name { - return arg.Value, true - } - } - return nil, false -} - -func (l ArgumentList) MustGet(name string) Literal { - value, ok := l.Get(name) - if !ok { - panic("argument not found") - } - return value -} - -func ParseArguments(l *Lexer) ArgumentList { - var args ArgumentList - l.ConsumeToken('(') - for l.Peek() != ')' { - name := l.ConsumeIdentWithLoc() - l.ConsumeToken(':') - value := ParseLiteral(l, false) - args = append(args, Argument{Name: name, Value: value}) - } - l.ConsumeToken(')') - return args -} diff --git a/vendor/github.com/neelance/graphql-go/internal/exec/exec.go b/vendor/github.com/neelance/graphql-go/internal/exec/exec.go deleted file mode 100644 index 39b6456a..00000000 --- a/vendor/github.com/neelance/graphql-go/internal/exec/exec.go +++ /dev/null @@ -1,313 +0,0 @@ -package exec - -import ( - "bytes" - "context" - "encoding/json" - "reflect" - "sync" - - "github.com/neelance/graphql-go/errors" - "github.com/neelance/graphql-go/internal/common" - "github.com/neelance/graphql-go/internal/exec/resolvable" - "github.com/neelance/graphql-go/internal/exec/selected" - "github.com/neelance/graphql-go/internal/query" - "github.com/neelance/graphql-go/internal/schema" - "github.com/neelance/graphql-go/log" - "github.com/neelance/graphql-go/trace" -) - -type Request struct { - selected.Request - Limiter chan struct{} - Tracer trace.Tracer - Logger log.Logger -} - -type fieldResult struct { - name string - value []byte -} - -func (r *Request) handlePanic(ctx context.Context) { - if value := recover(); value != nil { - r.Logger.LogPanic(ctx, value) - r.AddError(makePanicError(value)) - } -} - -func makePanicError(value interface{}) *errors.QueryError { - return errors.Errorf("graphql: panic occurred: %v", value) -} - -func (r *Request) Execute(ctx context.Context, s *resolvable.Schema, op *query.Operation) ([]byte, []*errors.QueryError) { - var out bytes.Buffer - func() { - defer r.handlePanic(ctx) - sels := selected.ApplyOperation(&r.Request, s, op) - r.execSelections(ctx, sels, nil, s.Resolver, &out, op.Type == query.Mutation) - }() - - if err := ctx.Err(); err != nil { - return nil, []*errors.QueryError{errors.Errorf("%s", err)} - } - - return out.Bytes(), r.Errs -} - -type fieldToExec struct { - field *selected.SchemaField - sels []selected.Selection - resolver reflect.Value - out *bytes.Buffer -} - -func (r *Request) execSelections(ctx context.Context, sels []selected.Selection, path *pathSegment, resolver reflect.Value, out *bytes.Buffer, serially bool) { - async := !serially && selected.HasAsyncSel(sels) - - var fields []*fieldToExec - collectFieldsToResolve(sels, resolver, &fields, make(map[string]*fieldToExec)) - - if async { - var wg sync.WaitGroup - wg.Add(len(fields)) - for _, f := range fields { - go func(f *fieldToExec) { - defer wg.Done() - defer r.handlePanic(ctx) - f.out = new(bytes.Buffer) - execFieldSelection(ctx, r, f, &pathSegment{path, f.field.Alias}, true) - }(f) - } - wg.Wait() - } - - out.WriteByte('{') - for i, f := range fields { - if i > 0 { - out.WriteByte(',') - } - out.WriteByte('"') - out.WriteString(f.field.Alias) - out.WriteByte('"') - out.WriteByte(':') - if async { - out.Write(f.out.Bytes()) - continue - } - f.out = out - execFieldSelection(ctx, r, f, &pathSegment{path, f.field.Alias}, false) - } - out.WriteByte('}') -} - -func collectFieldsToResolve(sels []selected.Selection, resolver reflect.Value, fields *[]*fieldToExec, fieldByAlias map[string]*fieldToExec) { - for _, sel := range sels { - switch sel := sel.(type) { - case *selected.SchemaField: - field, ok := fieldByAlias[sel.Alias] - if !ok { // validation already checked for conflict (TODO) - field = &fieldToExec{field: sel, resolver: resolver} - fieldByAlias[sel.Alias] = field - *fields = append(*fields, field) - } - field.sels = append(field.sels, sel.Sels...) - - case *selected.TypenameField: - sf := &selected.SchemaField{ - Field: resolvable.MetaFieldTypename, - Alias: sel.Alias, - FixedResult: reflect.ValueOf(typeOf(sel, resolver)), - } - *fields = append(*fields, &fieldToExec{field: sf, resolver: resolver}) - - case *selected.TypeAssertion: - out := resolver.Method(sel.MethodIndex).Call(nil) - if !out[1].Bool() { - continue - } - collectFieldsToResolve(sel.Sels, out[0], fields, fieldByAlias) - - default: - panic("unreachable") - } - } -} - -func typeOf(tf *selected.TypenameField, resolver reflect.Value) string { - if len(tf.TypeAssertions) == 0 { - return tf.Name - } - for name, a := range tf.TypeAssertions { - out := resolver.Method(a.MethodIndex).Call(nil) - if out[1].Bool() { - return name - } - } - return "" -} - -func execFieldSelection(ctx context.Context, r *Request, f *fieldToExec, path *pathSegment, applyLimiter bool) { - if applyLimiter { - r.Limiter <- struct{}{} - } - - var result reflect.Value - var err *errors.QueryError - - traceCtx, finish := r.Tracer.TraceField(ctx, f.field.TraceLabel, f.field.TypeName, f.field.Name, !f.field.Async, f.field.Args) - defer func() { - finish(err) - }() - - err = func() (err *errors.QueryError) { - defer func() { - if panicValue := recover(); panicValue != nil { - r.Logger.LogPanic(ctx, panicValue) - err = makePanicError(panicValue) - err.Path = path.toSlice() - } - }() - - if f.field.FixedResult.IsValid() { - result = f.field.FixedResult - return nil - } - - if err := traceCtx.Err(); err != nil { - return errors.Errorf("%s", err) // don't execute any more resolvers if context got cancelled - } - - var in []reflect.Value - if f.field.HasContext { - in = append(in, reflect.ValueOf(traceCtx)) - } - if f.field.ArgsPacker != nil { - in = append(in, f.field.PackedArgs) - } - callOut := f.resolver.Method(f.field.MethodIndex).Call(in) - result = callOut[0] - if f.field.HasError && !callOut[1].IsNil() { - resolverErr := callOut[1].Interface().(error) - err := errors.Errorf("%s", resolverErr) - err.Path = path.toSlice() - err.ResolverError = resolverErr - return err - } - return nil - }() - - if applyLimiter { - <-r.Limiter - } - - if err != nil { - r.AddError(err) - f.out.WriteString("null") // TODO handle non-nil - return - } - - r.execSelectionSet(traceCtx, f.sels, f.field.Type, path, result, f.out) -} - -func (r *Request) execSelectionSet(ctx context.Context, sels []selected.Selection, typ common.Type, path *pathSegment, resolver reflect.Value, out *bytes.Buffer) { - t, nonNull := unwrapNonNull(typ) - switch t := t.(type) { - case *schema.Object, *schema.Interface, *schema.Union: - if resolver.Kind() == reflect.Ptr && resolver.IsNil() { - if nonNull { - panic(errors.Errorf("got nil for non-null %q", t)) - } - out.WriteString("null") - return - } - - r.execSelections(ctx, sels, path, resolver, out, false) - return - } - - if !nonNull { - if resolver.IsNil() { - out.WriteString("null") - return - } - resolver = resolver.Elem() - } - - switch t := t.(type) { - case *common.List: - l := resolver.Len() - - if selected.HasAsyncSel(sels) { - var wg sync.WaitGroup - wg.Add(l) - entryouts := make([]bytes.Buffer, l) - for i := 0; i < l; i++ { - go func(i int) { - defer wg.Done() - defer r.handlePanic(ctx) - r.execSelectionSet(ctx, sels, t.OfType, &pathSegment{path, i}, resolver.Index(i), &entryouts[i]) - }(i) - } - wg.Wait() - - out.WriteByte('[') - for i, entryout := range entryouts { - if i > 0 { - out.WriteByte(',') - } - out.Write(entryout.Bytes()) - } - out.WriteByte(']') - return - } - - out.WriteByte('[') - for i := 0; i < l; i++ { - if i > 0 { - out.WriteByte(',') - } - r.execSelectionSet(ctx, sels, t.OfType, &pathSegment{path, i}, resolver.Index(i), out) - } - out.WriteByte(']') - - case *schema.Scalar: - v := resolver.Interface() - data, err := json.Marshal(v) - if err != nil { - panic(errors.Errorf("could not marshal %v", v)) - } - out.Write(data) - - case *schema.Enum: - out.WriteByte('"') - out.WriteString(resolver.String()) - out.WriteByte('"') - - default: - panic("unreachable") - } -} - -func unwrapNonNull(t common.Type) (common.Type, bool) { - if nn, ok := t.(*common.NonNull); ok { - return nn.OfType, true - } - return t, false -} - -type marshaler interface { - MarshalJSON() ([]byte, error) -} - -type pathSegment struct { - parent *pathSegment - value interface{} -} - -func (p *pathSegment) toSlice() []interface{} { - if p == nil { - return nil - } - return append(p.parent.toSlice(), p.value) -} diff --git a/vendor/github.com/neelance/graphql-go/internal/exec/packer/packer.go b/vendor/github.com/neelance/graphql-go/internal/exec/packer/packer.go deleted file mode 100644 index 02b9d832..00000000 --- a/vendor/github.com/neelance/graphql-go/internal/exec/packer/packer.go +++ /dev/null @@ -1,367 +0,0 @@ -package packer - -import ( - "fmt" - "math" - "reflect" - "strings" - - "github.com/neelance/graphql-go/errors" - "github.com/neelance/graphql-go/internal/common" - "github.com/neelance/graphql-go/internal/schema" -) - -type packer interface { - Pack(value interface{}) (reflect.Value, error) -} - -type Builder struct { - packerMap map[typePair]*packerMapEntry - structPackers []*StructPacker -} - -type typePair struct { - graphQLType common.Type - resolverType reflect.Type -} - -type packerMapEntry struct { - packer packer - targets []*packer -} - -func NewBuilder() *Builder { - return &Builder{ - packerMap: make(map[typePair]*packerMapEntry), - } -} - -func (b *Builder) Finish() error { - for _, entry := range b.packerMap { - for _, target := range entry.targets { - *target = entry.packer - } - } - - for _, p := range b.structPackers { - p.defaultStruct = reflect.New(p.structType).Elem() - for _, f := range p.fields { - if defaultVal := f.field.Default; defaultVal != nil { - v, err := f.fieldPacker.Pack(defaultVal.Value(nil)) - if err != nil { - return err - } - p.defaultStruct.FieldByIndex(f.fieldIndex).Set(v) - } - } - } - - return nil -} - -func (b *Builder) assignPacker(target *packer, schemaType common.Type, reflectType reflect.Type) error { - k := typePair{schemaType, reflectType} - ref, ok := b.packerMap[k] - if !ok { - ref = &packerMapEntry{} - b.packerMap[k] = ref - var err error - ref.packer, err = b.makePacker(schemaType, reflectType) - if err != nil { - return err - } - } - ref.targets = append(ref.targets, target) - return nil -} - -func (b *Builder) makePacker(schemaType common.Type, reflectType reflect.Type) (packer, error) { - t, nonNull := unwrapNonNull(schemaType) - if !nonNull { - if reflectType.Kind() != reflect.Ptr { - return nil, fmt.Errorf("%s is not a pointer", reflectType) - } - elemType := reflectType.Elem() - addPtr := true - if _, ok := t.(*schema.InputObject); ok { - elemType = reflectType // keep pointer for input objects - addPtr = false - } - elem, err := b.makeNonNullPacker(t, elemType) - if err != nil { - return nil, err - } - return &nullPacker{ - elemPacker: elem, - valueType: reflectType, - addPtr: addPtr, - }, nil - } - - return b.makeNonNullPacker(t, reflectType) -} - -func (b *Builder) makeNonNullPacker(schemaType common.Type, reflectType reflect.Type) (packer, error) { - if u, ok := reflect.New(reflectType).Interface().(Unmarshaler); ok { - if !u.ImplementsGraphQLType(schemaType.String()) { - return nil, fmt.Errorf("can not unmarshal %s into %s", schemaType, reflectType) - } - return &unmarshalerPacker{ - ValueType: reflectType, - }, nil - } - - switch t := schemaType.(type) { - case *schema.Scalar: - return &ValuePacker{ - ValueType: reflectType, - }, nil - - case *schema.Enum: - want := reflect.TypeOf("") - if reflectType != want { - return nil, fmt.Errorf("wrong type, expected %s", want) - } - return &ValuePacker{ - ValueType: reflectType, - }, nil - - case *schema.InputObject: - e, err := b.MakeStructPacker(t.Values, reflectType) - if err != nil { - return nil, err - } - return e, nil - - case *common.List: - if reflectType.Kind() != reflect.Slice { - return nil, fmt.Errorf("expected slice, got %s", reflectType) - } - p := &listPacker{ - sliceType: reflectType, - } - if err := b.assignPacker(&p.elem, t.OfType, reflectType.Elem()); err != nil { - return nil, err - } - return p, nil - - case *schema.Object, *schema.Interface, *schema.Union: - return nil, fmt.Errorf("type of kind %s can not be used as input", t.Kind()) - - default: - panic("unreachable") - } -} - -func (b *Builder) MakeStructPacker(values common.InputValueList, typ reflect.Type) (*StructPacker, error) { - structType := typ - usePtr := false - if typ.Kind() == reflect.Ptr { - structType = typ.Elem() - usePtr = true - } - if structType.Kind() != reflect.Struct { - return nil, fmt.Errorf("expected struct or pointer to struct, got %s", typ) - } - - var fields []*structPackerField - for _, v := range values { - fe := &structPackerField{field: v} - fx := func(n string) bool { - return strings.EqualFold(stripUnderscore(n), stripUnderscore(v.Name.Name)) - } - - sf, ok := structType.FieldByNameFunc(fx) - if !ok { - return nil, fmt.Errorf("missing argument %q", v.Name) - } - if sf.PkgPath != "" { - return nil, fmt.Errorf("field %q must be exported", sf.Name) - } - fe.fieldIndex = sf.Index - - ft := v.Type - if v.Default != nil { - ft, _ = unwrapNonNull(ft) - ft = &common.NonNull{OfType: ft} - } - - if err := b.assignPacker(&fe.fieldPacker, ft, sf.Type); err != nil { - return nil, fmt.Errorf("field %q: %s", sf.Name, err) - } - - fields = append(fields, fe) - } - - p := &StructPacker{ - structType: structType, - usePtr: usePtr, - fields: fields, - } - b.structPackers = append(b.structPackers, p) - return p, nil -} - -type StructPacker struct { - structType reflect.Type - usePtr bool - defaultStruct reflect.Value - fields []*structPackerField -} - -type structPackerField struct { - field *common.InputValue - fieldIndex []int - fieldPacker packer -} - -func (p *StructPacker) Pack(value interface{}) (reflect.Value, error) { - if value == nil { - return reflect.Value{}, errors.Errorf("got null for non-null") - } - - values := value.(map[string]interface{}) - v := reflect.New(p.structType) - v.Elem().Set(p.defaultStruct) - for _, f := range p.fields { - if value, ok := values[f.field.Name.Name]; ok { - packed, err := f.fieldPacker.Pack(value) - if err != nil { - return reflect.Value{}, err - } - v.Elem().FieldByIndex(f.fieldIndex).Set(packed) - } - } - if !p.usePtr { - return v.Elem(), nil - } - return v, nil -} - -type listPacker struct { - sliceType reflect.Type - elem packer -} - -func (e *listPacker) Pack(value interface{}) (reflect.Value, error) { - list, ok := value.([]interface{}) - if !ok { - list = []interface{}{value} - } - - v := reflect.MakeSlice(e.sliceType, len(list), len(list)) - for i := range list { - packed, err := e.elem.Pack(list[i]) - if err != nil { - return reflect.Value{}, err - } - v.Index(i).Set(packed) - } - return v, nil -} - -type nullPacker struct { - elemPacker packer - valueType reflect.Type - addPtr bool -} - -func (p *nullPacker) Pack(value interface{}) (reflect.Value, error) { - if value == nil { - return reflect.Zero(p.valueType), nil - } - - v, err := p.elemPacker.Pack(value) - if err != nil { - return reflect.Value{}, err - } - - if p.addPtr { - ptr := reflect.New(p.valueType.Elem()) - ptr.Elem().Set(v) - return ptr, nil - } - - return v, nil -} - -type ValuePacker struct { - ValueType reflect.Type -} - -func (p *ValuePacker) Pack(value interface{}) (reflect.Value, error) { - if value == nil { - return reflect.Value{}, errors.Errorf("got null for non-null") - } - - coerced, err := unmarshalInput(p.ValueType, value) - if err != nil { - return reflect.Value{}, fmt.Errorf("could not unmarshal %#v (%T) into %s: %s", value, value, p.ValueType, err) - } - return reflect.ValueOf(coerced), nil -} - -type unmarshalerPacker struct { - ValueType reflect.Type -} - -func (p *unmarshalerPacker) Pack(value interface{}) (reflect.Value, error) { - if value == nil { - return reflect.Value{}, errors.Errorf("got null for non-null") - } - - v := reflect.New(p.ValueType) - if err := v.Interface().(Unmarshaler).UnmarshalGraphQL(value); err != nil { - return reflect.Value{}, err - } - return v.Elem(), nil -} - -type Unmarshaler interface { - ImplementsGraphQLType(name string) bool - UnmarshalGraphQL(input interface{}) error -} - -func unmarshalInput(typ reflect.Type, input interface{}) (interface{}, error) { - if reflect.TypeOf(input) == typ { - return input, nil - } - - switch typ.Kind() { - case reflect.Int32: - switch input := input.(type) { - case int: - if input < math.MinInt32 || input > math.MaxInt32 { - return nil, fmt.Errorf("not a 32-bit integer") - } - return int32(input), nil - case float64: - coerced := int32(input) - if input < math.MinInt32 || input > math.MaxInt32 || float64(coerced) != input { - return nil, fmt.Errorf("not a 32-bit integer") - } - return coerced, nil - } - - case reflect.Float64: - switch input := input.(type) { - case int32: - return float64(input), nil - case int: - return float64(input), nil - } - } - - return nil, fmt.Errorf("incompatible type") -} - -func unwrapNonNull(t common.Type) (common.Type, bool) { - if nn, ok := t.(*common.NonNull); ok { - return nn.OfType, true - } - return t, false -} - -func stripUnderscore(s string) string { - return strings.Replace(s, "_", "", -1) -} diff --git a/vendor/github.com/neelance/graphql-go/internal/exec/resolvable/meta.go b/vendor/github.com/neelance/graphql-go/internal/exec/resolvable/meta.go deleted file mode 100644 index f9b0bb92..00000000 --- a/vendor/github.com/neelance/graphql-go/internal/exec/resolvable/meta.go +++ /dev/null @@ -1,58 +0,0 @@ -package resolvable - -import ( - "fmt" - "reflect" - - "github.com/neelance/graphql-go/internal/common" - "github.com/neelance/graphql-go/internal/schema" - "github.com/neelance/graphql-go/introspection" -) - -var MetaSchema *Object -var MetaType *Object - -func init() { - var err error - b := newBuilder(schema.Meta) - - metaSchema := schema.Meta.Types["__Schema"].(*schema.Object) - MetaSchema, err = b.makeObjectExec(metaSchema.Name, metaSchema.Fields, nil, false, reflect.TypeOf(&introspection.Schema{})) - if err != nil { - panic(err) - } - - metaType := schema.Meta.Types["__Type"].(*schema.Object) - MetaType, err = b.makeObjectExec(metaType.Name, metaType.Fields, nil, false, reflect.TypeOf(&introspection.Type{})) - if err != nil { - panic(err) - } - - if err := b.finish(); err != nil { - panic(err) - } -} - -var MetaFieldTypename = Field{ - Field: schema.Field{ - Name: "__typename", - Type: &common.NonNull{OfType: schema.Meta.Types["String"]}, - }, - TraceLabel: fmt.Sprintf("GraphQL field: __typename"), -} - -var MetaFieldSchema = Field{ - Field: schema.Field{ - Name: "__schema", - Type: schema.Meta.Types["__Schema"], - }, - TraceLabel: fmt.Sprintf("GraphQL field: __schema"), -} - -var MetaFieldType = Field{ - Field: schema.Field{ - Name: "__type", - Type: schema.Meta.Types["__Type"], - }, - TraceLabel: fmt.Sprintf("GraphQL field: __type"), -} diff --git a/vendor/github.com/neelance/graphql-go/internal/exec/resolvable/resolvable.go b/vendor/github.com/neelance/graphql-go/internal/exec/resolvable/resolvable.go deleted file mode 100644 index c681cf20..00000000 --- a/vendor/github.com/neelance/graphql-go/internal/exec/resolvable/resolvable.go +++ /dev/null @@ -1,331 +0,0 @@ -package resolvable - -import ( - "context" - "fmt" - "reflect" - "strings" - - "github.com/neelance/graphql-go/internal/common" - "github.com/neelance/graphql-go/internal/exec/packer" - "github.com/neelance/graphql-go/internal/schema" -) - -type Schema struct { - schema.Schema - Query Resolvable - Mutation Resolvable - Resolver reflect.Value -} - -type Resolvable interface { - isResolvable() -} - -type Object struct { - Name string - Fields map[string]*Field - TypeAssertions map[string]*TypeAssertion -} - -type Field struct { - schema.Field - TypeName string - MethodIndex int - HasContext bool - ArgsPacker *packer.StructPacker - HasError bool - ValueExec Resolvable - TraceLabel string -} - -type TypeAssertion struct { - MethodIndex int - TypeExec Resolvable -} - -type List struct { - Elem Resolvable -} - -type Scalar struct{} - -func (*Object) isResolvable() {} -func (*List) isResolvable() {} -func (*Scalar) isResolvable() {} - -func ApplyResolver(s *schema.Schema, resolver interface{}) (*Schema, error) { - b := newBuilder(s) - - var query, mutation Resolvable - - if t, ok := s.EntryPoints["query"]; ok { - if err := b.assignExec(&query, t, reflect.TypeOf(resolver)); err != nil { - return nil, err - } - } - - if t, ok := s.EntryPoints["mutation"]; ok { - if err := b.assignExec(&mutation, t, reflect.TypeOf(resolver)); err != nil { - return nil, err - } - } - - if err := b.finish(); err != nil { - return nil, err - } - - return &Schema{ - Schema: *s, - Resolver: reflect.ValueOf(resolver), - Query: query, - Mutation: mutation, - }, nil -} - -type execBuilder struct { - schema *schema.Schema - resMap map[typePair]*resMapEntry - packerBuilder *packer.Builder -} - -type typePair struct { - graphQLType common.Type - resolverType reflect.Type -} - -type resMapEntry struct { - exec Resolvable - targets []*Resolvable -} - -func newBuilder(s *schema.Schema) *execBuilder { - return &execBuilder{ - schema: s, - resMap: make(map[typePair]*resMapEntry), - packerBuilder: packer.NewBuilder(), - } -} - -func (b *execBuilder) finish() error { - for _, entry := range b.resMap { - for _, target := range entry.targets { - *target = entry.exec - } - } - - return b.packerBuilder.Finish() -} - -func (b *execBuilder) assignExec(target *Resolvable, t common.Type, resolverType reflect.Type) error { - k := typePair{t, resolverType} - ref, ok := b.resMap[k] - if !ok { - ref = &resMapEntry{} - b.resMap[k] = ref - var err error - ref.exec, err = b.makeExec(t, resolverType) - if err != nil { - return err - } - } - ref.targets = append(ref.targets, target) - return nil -} - -func (b *execBuilder) makeExec(t common.Type, resolverType reflect.Type) (Resolvable, error) { - var nonNull bool - t, nonNull = unwrapNonNull(t) - - switch t := t.(type) { - case *schema.Object: - return b.makeObjectExec(t.Name, t.Fields, nil, nonNull, resolverType) - - case *schema.Interface: - return b.makeObjectExec(t.Name, t.Fields, t.PossibleTypes, nonNull, resolverType) - - case *schema.Union: - return b.makeObjectExec(t.Name, nil, t.PossibleTypes, nonNull, resolverType) - } - - if !nonNull { - if resolverType.Kind() != reflect.Ptr { - return nil, fmt.Errorf("%s is not a pointer", resolverType) - } - resolverType = resolverType.Elem() - } - - switch t := t.(type) { - case *schema.Scalar: - return makeScalarExec(t, resolverType) - - case *schema.Enum: - return &Scalar{}, nil - - case *common.List: - if resolverType.Kind() != reflect.Slice { - return nil, fmt.Errorf("%s is not a slice", resolverType) - } - e := &List{} - if err := b.assignExec(&e.Elem, t.OfType, resolverType.Elem()); err != nil { - return nil, err - } - return e, nil - - default: - panic("invalid type") - } -} - -func makeScalarExec(t *schema.Scalar, resolverType reflect.Type) (Resolvable, error) { - implementsType := false - switch r := reflect.New(resolverType).Interface().(type) { - case *int32: - implementsType = (t.Name == "Int") - case *float64: - implementsType = (t.Name == "Float") - case *string: - implementsType = (t.Name == "String") - case *bool: - implementsType = (t.Name == "Boolean") - case packer.Unmarshaler: - implementsType = r.ImplementsGraphQLType(t.Name) - } - if !implementsType { - return nil, fmt.Errorf("can not use %s as %s", resolverType, t.Name) - } - return &Scalar{}, nil -} - -func (b *execBuilder) makeObjectExec(typeName string, fields schema.FieldList, possibleTypes []*schema.Object, nonNull bool, resolverType reflect.Type) (*Object, error) { - if !nonNull { - if resolverType.Kind() != reflect.Ptr && resolverType.Kind() != reflect.Interface { - return nil, fmt.Errorf("%s is not a pointer or interface", resolverType) - } - } - - methodHasReceiver := resolverType.Kind() != reflect.Interface - - Fields := make(map[string]*Field) - for _, f := range fields { - methodIndex := findMethod(resolverType, f.Name) - if methodIndex == -1 { - hint := "" - if findMethod(reflect.PtrTo(resolverType), f.Name) != -1 { - hint = " (hint: the method exists on the pointer type)" - } - return nil, fmt.Errorf("%s does not resolve %q: missing method for field %q%s", resolverType, typeName, f.Name, hint) - } - - m := resolverType.Method(methodIndex) - fe, err := b.makeFieldExec(typeName, f, m, methodIndex, methodHasReceiver) - if err != nil { - return nil, fmt.Errorf("%s\n\treturned by (%s).%s", err, resolverType, m.Name) - } - Fields[f.Name] = fe - } - - typeAssertions := make(map[string]*TypeAssertion) - for _, impl := range possibleTypes { - methodIndex := findMethod(resolverType, "to"+impl.Name) - if methodIndex == -1 { - return nil, fmt.Errorf("%s does not resolve %q: missing method %q to convert to %q", resolverType, typeName, "to"+impl.Name, impl.Name) - } - if resolverType.Method(methodIndex).Type.NumOut() != 2 { - return nil, fmt.Errorf("%s does not resolve %q: method %q should return a value and a bool indicating success", resolverType, typeName, "to"+impl.Name) - } - a := &TypeAssertion{ - MethodIndex: methodIndex, - } - if err := b.assignExec(&a.TypeExec, impl, resolverType.Method(methodIndex).Type.Out(0)); err != nil { - return nil, err - } - typeAssertions[impl.Name] = a - } - - return &Object{ - Name: typeName, - Fields: Fields, - TypeAssertions: typeAssertions, - }, nil -} - -var contextType = reflect.TypeOf((*context.Context)(nil)).Elem() -var errorType = reflect.TypeOf((*error)(nil)).Elem() - -func (b *execBuilder) makeFieldExec(typeName string, f *schema.Field, m reflect.Method, methodIndex int, methodHasReceiver bool) (*Field, error) { - in := make([]reflect.Type, m.Type.NumIn()) - for i := range in { - in[i] = m.Type.In(i) - } - if methodHasReceiver { - in = in[1:] // first parameter is receiver - } - - hasContext := len(in) > 0 && in[0] == contextType - if hasContext { - in = in[1:] - } - - var argsPacker *packer.StructPacker - if len(f.Args) > 0 { - if len(in) == 0 { - return nil, fmt.Errorf("must have parameter for field arguments") - } - var err error - argsPacker, err = b.packerBuilder.MakeStructPacker(f.Args, in[0]) - if err != nil { - return nil, err - } - in = in[1:] - } - - if len(in) > 0 { - return nil, fmt.Errorf("too many parameters") - } - - if m.Type.NumOut() > 2 { - return nil, fmt.Errorf("too many return values") - } - - hasError := m.Type.NumOut() == 2 - if hasError { - if m.Type.Out(1) != errorType { - return nil, fmt.Errorf(`must have "error" as its second return value`) - } - } - - fe := &Field{ - Field: *f, - TypeName: typeName, - MethodIndex: methodIndex, - HasContext: hasContext, - ArgsPacker: argsPacker, - HasError: hasError, - TraceLabel: fmt.Sprintf("GraphQL field: %s.%s", typeName, f.Name), - } - if err := b.assignExec(&fe.ValueExec, f.Type, m.Type.Out(0)); err != nil { - return nil, err - } - return fe, nil -} - -func findMethod(t reflect.Type, name string) int { - for i := 0; i < t.NumMethod(); i++ { - if strings.EqualFold(stripUnderscore(name), stripUnderscore(t.Method(i).Name)) { - return i - } - } - return -1 -} - -func unwrapNonNull(t common.Type) (common.Type, bool) { - if nn, ok := t.(*common.NonNull); ok { - return nn.OfType, true - } - return t, false -} - -func stripUnderscore(s string) string { - return strings.Replace(s, "_", "", -1) -} diff --git a/vendor/github.com/neelance/graphql-go/internal/exec/selected/selected.go b/vendor/github.com/neelance/graphql-go/internal/exec/selected/selected.go deleted file mode 100644 index eecdcf38..00000000 --- a/vendor/github.com/neelance/graphql-go/internal/exec/selected/selected.go +++ /dev/null @@ -1,238 +0,0 @@ -package selected - -import ( - "fmt" - "reflect" - "sync" - - "github.com/neelance/graphql-go/errors" - "github.com/neelance/graphql-go/internal/common" - "github.com/neelance/graphql-go/internal/exec/packer" - "github.com/neelance/graphql-go/internal/exec/resolvable" - "github.com/neelance/graphql-go/internal/query" - "github.com/neelance/graphql-go/internal/schema" - "github.com/neelance/graphql-go/introspection" -) - -type Request struct { - Schema *schema.Schema - Doc *query.Document - Vars map[string]interface{} - Mu sync.Mutex - Errs []*errors.QueryError -} - -func (r *Request) AddError(err *errors.QueryError) { - r.Mu.Lock() - r.Errs = append(r.Errs, err) - r.Mu.Unlock() -} - -func ApplyOperation(r *Request, s *resolvable.Schema, op *query.Operation) []Selection { - var obj *resolvable.Object - switch op.Type { - case query.Query: - obj = s.Query.(*resolvable.Object) - case query.Mutation: - obj = s.Mutation.(*resolvable.Object) - } - return applySelectionSet(r, obj, op.Selections) -} - -type Selection interface { - isSelection() -} - -type SchemaField struct { - resolvable.Field - Alias string - Args map[string]interface{} - PackedArgs reflect.Value - Sels []Selection - Async bool - FixedResult reflect.Value -} - -type TypeAssertion struct { - resolvable.TypeAssertion - Sels []Selection -} - -type TypenameField struct { - resolvable.Object - Alias string -} - -func (*SchemaField) isSelection() {} -func (*TypeAssertion) isSelection() {} -func (*TypenameField) isSelection() {} - -func applySelectionSet(r *Request, e *resolvable.Object, sels []query.Selection) (flattenedSels []Selection) { - for _, sel := range sels { - switch sel := sel.(type) { - case *query.Field: - field := sel - if skipByDirective(r, field.Directives) { - continue - } - - switch field.Name.Name { - case "__typename": - flattenedSels = append(flattenedSels, &TypenameField{ - Object: *e, - Alias: field.Alias.Name, - }) - - case "__schema": - flattenedSels = append(flattenedSels, &SchemaField{ - Field: resolvable.MetaFieldSchema, - Alias: field.Alias.Name, - Sels: applySelectionSet(r, resolvable.MetaSchema, field.Selections), - Async: true, - FixedResult: reflect.ValueOf(introspection.WrapSchema(r.Schema)), - }) - - case "__type": - p := packer.ValuePacker{ValueType: reflect.TypeOf("")} - v, err := p.Pack(field.Arguments.MustGet("name").Value(r.Vars)) - if err != nil { - r.AddError(errors.Errorf("%s", err)) - return nil - } - - t, ok := r.Schema.Types[v.String()] - if !ok { - return nil - } - - flattenedSels = append(flattenedSels, &SchemaField{ - Field: resolvable.MetaFieldType, - Alias: field.Alias.Name, - Sels: applySelectionSet(r, resolvable.MetaType, field.Selections), - Async: true, - FixedResult: reflect.ValueOf(introspection.WrapType(t)), - }) - - default: - fe := e.Fields[field.Name.Name] - - var args map[string]interface{} - var packedArgs reflect.Value - if fe.ArgsPacker != nil { - args = make(map[string]interface{}) - for _, arg := range field.Arguments { - args[arg.Name.Name] = arg.Value.Value(r.Vars) - } - var err error - packedArgs, err = fe.ArgsPacker.Pack(args) - if err != nil { - r.AddError(errors.Errorf("%s", err)) - return - } - } - - fieldSels := applyField(r, fe.ValueExec, field.Selections) - flattenedSels = append(flattenedSels, &SchemaField{ - Field: *fe, - Alias: field.Alias.Name, - Args: args, - PackedArgs: packedArgs, - Sels: fieldSels, - Async: fe.HasContext || fe.ArgsPacker != nil || fe.HasError || HasAsyncSel(fieldSels), - }) - } - - case *query.InlineFragment: - frag := sel - if skipByDirective(r, frag.Directives) { - continue - } - flattenedSels = append(flattenedSels, applyFragment(r, e, &frag.Fragment)...) - - case *query.FragmentSpread: - spread := sel - if skipByDirective(r, spread.Directives) { - continue - } - flattenedSels = append(flattenedSels, applyFragment(r, e, &r.Doc.Fragments.Get(spread.Name.Name).Fragment)...) - - default: - panic("invalid type") - } - } - return -} - -func applyFragment(r *Request, e *resolvable.Object, frag *query.Fragment) []Selection { - if frag.On.Name != "" && frag.On.Name != e.Name { - a, ok := e.TypeAssertions[frag.On.Name] - if !ok { - panic(fmt.Errorf("%q does not implement %q", frag.On, e.Name)) // TODO proper error handling - } - - return []Selection{&TypeAssertion{ - TypeAssertion: *a, - Sels: applySelectionSet(r, a.TypeExec.(*resolvable.Object), frag.Selections), - }} - } - return applySelectionSet(r, e, frag.Selections) -} - -func applyField(r *Request, e resolvable.Resolvable, sels []query.Selection) []Selection { - switch e := e.(type) { - case *resolvable.Object: - return applySelectionSet(r, e, sels) - case *resolvable.List: - return applyField(r, e.Elem, sels) - case *resolvable.Scalar: - return nil - default: - panic("unreachable") - } -} - -func skipByDirective(r *Request, directives common.DirectiveList) bool { - if d := directives.Get("skip"); d != nil { - p := packer.ValuePacker{ValueType: reflect.TypeOf(false)} - v, err := p.Pack(d.Args.MustGet("if").Value(r.Vars)) - if err != nil { - r.AddError(errors.Errorf("%s", err)) - } - if err == nil && v.Bool() { - return true - } - } - - if d := directives.Get("include"); d != nil { - p := packer.ValuePacker{ValueType: reflect.TypeOf(false)} - v, err := p.Pack(d.Args.MustGet("if").Value(r.Vars)) - if err != nil { - r.AddError(errors.Errorf("%s", err)) - } - if err == nil && !v.Bool() { - return true - } - } - - return false -} - -func HasAsyncSel(sels []Selection) bool { - for _, sel := range sels { - switch sel := sel.(type) { - case *SchemaField: - if sel.Async { - return true - } - case *TypeAssertion: - if HasAsyncSel(sel.Sels) { - return true - } - case *TypenameField: - // sync - default: - panic("unreachable") - } - } - return false -} diff --git a/vendor/github.com/neelance/graphql-go/internal/query/query.go b/vendor/github.com/neelance/graphql-go/internal/query/query.go deleted file mode 100644 index f11b1b77..00000000 --- a/vendor/github.com/neelance/graphql-go/internal/query/query.go +++ /dev/null @@ -1,240 +0,0 @@ -package query - -import ( - "fmt" - "strings" - "text/scanner" - - "github.com/neelance/graphql-go/errors" - "github.com/neelance/graphql-go/internal/common" -) - -type Document struct { - Operations OperationList - Fragments FragmentList -} - -type OperationList []*Operation - -func (l OperationList) Get(name string) *Operation { - for _, f := range l { - if f.Name.Name == name { - return f - } - } - return nil -} - -type FragmentList []*FragmentDecl - -func (l FragmentList) Get(name string) *FragmentDecl { - for _, f := range l { - if f.Name.Name == name { - return f - } - } - return nil -} - -type Operation struct { - Type OperationType - Name common.Ident - Vars common.InputValueList - Selections []Selection - Directives common.DirectiveList - Loc errors.Location -} - -type OperationType string - -const ( - Query OperationType = "QUERY" - Mutation = "MUTATION" - Subscription = "SUBSCRIPTION" -) - -type Fragment struct { - On common.TypeName - Selections []Selection -} - -type FragmentDecl struct { - Fragment - Name common.Ident - Directives common.DirectiveList - Loc errors.Location -} - -type Selection interface { - isSelection() -} - -type Field struct { - Alias common.Ident - Name common.Ident - Arguments common.ArgumentList - Directives common.DirectiveList - Selections []Selection - SelectionSetLoc errors.Location -} - -type InlineFragment struct { - Fragment - Directives common.DirectiveList - Loc errors.Location -} - -type FragmentSpread struct { - Name common.Ident - Directives common.DirectiveList - Loc errors.Location -} - -func (Field) isSelection() {} -func (InlineFragment) isSelection() {} -func (FragmentSpread) isSelection() {} - -func Parse(queryString string) (*Document, *errors.QueryError) { - sc := &scanner.Scanner{ - Mode: scanner.ScanIdents | scanner.ScanInts | scanner.ScanFloats | scanner.ScanStrings, - } - sc.Init(strings.NewReader(queryString)) - - l := common.New(sc) - var doc *Document - err := l.CatchSyntaxError(func() { - doc = parseDocument(l) - }) - if err != nil { - return nil, err - } - - return doc, nil -} - -func parseDocument(l *common.Lexer) *Document { - d := &Document{} - for l.Peek() != scanner.EOF { - if l.Peek() == '{' { - op := &Operation{Type: Query, Loc: l.Location()} - op.Selections = parseSelectionSet(l) - d.Operations = append(d.Operations, op) - continue - } - - loc := l.Location() - switch x := l.ConsumeIdent(); x { - case "query": - op := parseOperation(l, Query) - op.Loc = loc - d.Operations = append(d.Operations, op) - - case "mutation": - d.Operations = append(d.Operations, parseOperation(l, Mutation)) - - case "subscription": - d.Operations = append(d.Operations, parseOperation(l, Subscription)) - - case "fragment": - frag := parseFragment(l) - frag.Loc = loc - d.Fragments = append(d.Fragments, frag) - - default: - l.SyntaxError(fmt.Sprintf(`unexpected %q, expecting "fragment"`, x)) - } - } - return d -} - -func parseOperation(l *common.Lexer, opType OperationType) *Operation { - op := &Operation{Type: opType} - op.Name.Loc = l.Location() - if l.Peek() == scanner.Ident { - op.Name = l.ConsumeIdentWithLoc() - } - op.Directives = common.ParseDirectives(l) - if l.Peek() == '(' { - l.ConsumeToken('(') - for l.Peek() != ')' { - loc := l.Location() - l.ConsumeToken('$') - iv := common.ParseInputValue(l) - iv.Loc = loc - op.Vars = append(op.Vars, iv) - } - l.ConsumeToken(')') - } - op.Selections = parseSelectionSet(l) - return op -} - -func parseFragment(l *common.Lexer) *FragmentDecl { - f := &FragmentDecl{} - f.Name = l.ConsumeIdentWithLoc() - l.ConsumeKeyword("on") - f.On = common.TypeName{Ident: l.ConsumeIdentWithLoc()} - f.Directives = common.ParseDirectives(l) - f.Selections = parseSelectionSet(l) - return f -} - -func parseSelectionSet(l *common.Lexer) []Selection { - var sels []Selection - l.ConsumeToken('{') - for l.Peek() != '}' { - sels = append(sels, parseSelection(l)) - } - l.ConsumeToken('}') - return sels -} - -func parseSelection(l *common.Lexer) Selection { - if l.Peek() == '.' { - return parseSpread(l) - } - return parseField(l) -} - -func parseField(l *common.Lexer) *Field { - f := &Field{} - f.Alias = l.ConsumeIdentWithLoc() - f.Name = f.Alias - if l.Peek() == ':' { - l.ConsumeToken(':') - f.Name = l.ConsumeIdentWithLoc() - } - if l.Peek() == '(' { - f.Arguments = common.ParseArguments(l) - } - f.Directives = common.ParseDirectives(l) - if l.Peek() == '{' { - f.SelectionSetLoc = l.Location() - f.Selections = parseSelectionSet(l) - } - return f -} - -func parseSpread(l *common.Lexer) Selection { - loc := l.Location() - l.ConsumeToken('.') - l.ConsumeToken('.') - l.ConsumeToken('.') - - f := &InlineFragment{Loc: loc} - if l.Peek() == scanner.Ident { - ident := l.ConsumeIdentWithLoc() - if ident.Name != "on" { - fs := &FragmentSpread{ - Name: ident, - Loc: loc, - } - fs.Directives = common.ParseDirectives(l) - return fs - } - f.On = common.TypeName{Ident: l.ConsumeIdentWithLoc()} - } - f.Directives = common.ParseDirectives(l) - f.Selections = parseSelectionSet(l) - return f -} diff --git a/vendor/github.com/neelance/graphql-go/internal/schema/meta.go b/vendor/github.com/neelance/graphql-go/internal/schema/meta.go deleted file mode 100644 index b48bf7ac..00000000 --- a/vendor/github.com/neelance/graphql-go/internal/schema/meta.go +++ /dev/null @@ -1,190 +0,0 @@ -package schema - -var Meta *Schema - -func init() { - Meta = &Schema{} // bootstrap - Meta = New() - if err := Meta.Parse(metaSrc); err != nil { - panic(err) - } -} - -var metaSrc = ` - # The ` + "`" + `Int` + "`" + ` scalar type represents non-fractional signed whole numeric values. Int can represent values between -(2^31) and 2^31 - 1. - scalar Int - - # The ` + "`" + `Float` + "`" + ` scalar type represents signed double-precision fractional values as specified by [IEEE 754](http://en.wikipedia.org/wiki/IEEE_floating_point). - scalar Float - - # The ` + "`" + `String` + "`" + ` scalar type represents textual data, represented as UTF-8 character sequences. The String type is most often used by GraphQL to represent free-form human-readable text. - scalar String - - # The ` + "`" + `Boolean` + "`" + ` scalar type represents ` + "`" + `true` + "`" + ` or ` + "`" + `false` + "`" + `. - scalar Boolean - - # The ` + "`" + `ID` + "`" + ` scalar type represents a unique identifier, often used to refetch an object or as key for a cache. The ID type appears in a JSON response as a String; however, it is not intended to be human-readable. When expected as an input type, any string (such as ` + "`" + `"4"` + "`" + `) or integer (such as ` + "`" + `4` + "`" + `) input value will be accepted as an ID. - scalar ID - - # Directs the executor to include this field or fragment only when the ` + "`" + `if` + "`" + ` argument is true. - directive @include( - # Included when true. - if: Boolean! - ) on FIELD | FRAGMENT_SPREAD | INLINE_FRAGMENT - - # Directs the executor to skip this field or fragment when the ` + "`" + `if` + "`" + ` argument is true. - directive @skip( - # Skipped when true. - if: Boolean! - ) on FIELD | FRAGMENT_SPREAD | INLINE_FRAGMENT - - # Marks an element of a GraphQL schema as no longer supported. - directive @deprecated( - # Explains why this element was deprecated, usually also including a suggestion - # for how to access supported similar data. Formatted in - # [Markdown](https://daringfireball.net/projects/markdown/). - reason: String = "No longer supported" - ) on FIELD_DEFINITION | ENUM_VALUE - - # A Directive provides a way to describe alternate runtime execution and type validation behavior in a GraphQL document. - # - # In some cases, you need to provide options to alter GraphQL's execution behavior - # in ways field arguments will not suffice, such as conditionally including or - # skipping a field. Directives provide this by describing additional information - # to the executor. - type __Directive { - name: String! - description: String - locations: [__DirectiveLocation!]! - args: [__InputValue!]! - } - - # A Directive can be adjacent to many parts of the GraphQL language, a - # __DirectiveLocation describes one such possible adjacencies. - enum __DirectiveLocation { - # Location adjacent to a query operation. - QUERY - # Location adjacent to a mutation operation. - MUTATION - # Location adjacent to a subscription operation. - SUBSCRIPTION - # Location adjacent to a field. - FIELD - # Location adjacent to a fragment definition. - FRAGMENT_DEFINITION - # Location adjacent to a fragment spread. - FRAGMENT_SPREAD - # Location adjacent to an inline fragment. - INLINE_FRAGMENT - # Location adjacent to a schema definition. - SCHEMA - # Location adjacent to a scalar definition. - SCALAR - # Location adjacent to an object type definition. - OBJECT - # Location adjacent to a field definition. - FIELD_DEFINITION - # Location adjacent to an argument definition. - ARGUMENT_DEFINITION - # Location adjacent to an interface definition. - INTERFACE - # Location adjacent to a union definition. - UNION - # Location adjacent to an enum definition. - ENUM - # Location adjacent to an enum value definition. - ENUM_VALUE - # Location adjacent to an input object type definition. - INPUT_OBJECT - # Location adjacent to an input object field definition. - INPUT_FIELD_DEFINITION - } - - # One possible value for a given Enum. Enum values are unique values, not a - # placeholder for a string or numeric value. However an Enum value is returned in - # a JSON response as a string. - type __EnumValue { - name: String! - description: String - isDeprecated: Boolean! - deprecationReason: String - } - - # Object and Interface types are described by a list of Fields, each of which has - # a name, potentially a list of arguments, and a return type. - type __Field { - name: String! - description: String - args: [__InputValue!]! - type: __Type! - isDeprecated: Boolean! - deprecationReason: String - } - - # Arguments provided to Fields or Directives and the input fields of an - # InputObject are represented as Input Values which describe their type and - # optionally a default value. - type __InputValue { - name: String! - description: String - type: __Type! - # A GraphQL-formatted string representing the default value for this input value. - defaultValue: String - } - - # A GraphQL Schema defines the capabilities of a GraphQL server. It exposes all - # available types and directives on the server, as well as the entry points for - # query, mutation, and subscription operations. - type __Schema { - # A list of all types supported by this server. - types: [__Type!]! - # The type that query operations will be rooted at. - queryType: __Type! - # If this server supports mutation, the type that mutation operations will be rooted at. - mutationType: __Type - # If this server support subscription, the type that subscription operations will be rooted at. - subscriptionType: __Type - # A list of all directives supported by this server. - directives: [__Directive!]! - } - - # The fundamental unit of any GraphQL Schema is the type. There are many kinds of - # types in GraphQL as represented by the ` + "`" + `__TypeKind` + "`" + ` enum. - # - # Depending on the kind of a type, certain fields describe information about that - # type. Scalar types provide no information beyond a name and description, while - # Enum types provide their values. Object and Interface types provide the fields - # they describe. Abstract types, Union and Interface, provide the Object types - # possible at runtime. List and NonNull types compose other types. - type __Type { - kind: __TypeKind! - name: String - description: String - fields(includeDeprecated: Boolean = false): [__Field!] - interfaces: [__Type!] - possibleTypes: [__Type!] - enumValues(includeDeprecated: Boolean = false): [__EnumValue!] - inputFields: [__InputValue!] - ofType: __Type - } - - # An enum describing what kind of type a given ` + "`" + `__Type` + "`" + ` is. - enum __TypeKind { - # Indicates this type is a scalar. - SCALAR - # Indicates this type is an object. ` + "`" + `fields` + "`" + ` and ` + "`" + `interfaces` + "`" + ` are valid fields. - OBJECT - # Indicates this type is an interface. ` + "`" + `fields` + "`" + ` and ` + "`" + `possibleTypes` + "`" + ` are valid fields. - INTERFACE - # Indicates this type is a union. ` + "`" + `possibleTypes` + "`" + ` is a valid field. - UNION - # Indicates this type is an enum. ` + "`" + `enumValues` + "`" + ` is a valid field. - ENUM - # Indicates this type is an input object. ` + "`" + `inputFields` + "`" + ` is a valid field. - INPUT_OBJECT - # Indicates this type is a list. ` + "`" + `ofType` + "`" + ` is a valid field. - LIST - # Indicates this type is a non-null. ` + "`" + `ofType` + "`" + ` is a valid field. - NON_NULL - } -` diff --git a/vendor/github.com/neelance/graphql-go/internal/schema/schema.go b/vendor/github.com/neelance/graphql-go/internal/schema/schema.go deleted file mode 100644 index 0cada3a9..00000000 --- a/vendor/github.com/neelance/graphql-go/internal/schema/schema.go +++ /dev/null @@ -1,462 +0,0 @@ -package schema - -import ( - "fmt" - "strings" - "text/scanner" - - "github.com/neelance/graphql-go/errors" - "github.com/neelance/graphql-go/internal/common" -) - -type Schema struct { - EntryPoints map[string]NamedType - Types map[string]NamedType - Directives map[string]*DirectiveDecl - - entryPointNames map[string]string - objects []*Object - unions []*Union - enums []*Enum -} - -func (s *Schema) Resolve(name string) common.Type { - return s.Types[name] -} - -type NamedType interface { - common.Type - TypeName() string - Description() string -} - -type Scalar struct { - Name string - Desc string -} - -type Object struct { - Name string - Interfaces []*Interface - Fields FieldList - Desc string - - interfaceNames []string -} - -type Interface struct { - Name string - PossibleTypes []*Object - Fields FieldList - Desc string -} - -type Union struct { - Name string - PossibleTypes []*Object - Desc string - - typeNames []string -} - -type Enum struct { - Name string - Values []*EnumValue - Desc string -} - -type EnumValue struct { - Name string - Directives common.DirectiveList - Desc string -} - -type InputObject struct { - Name string - Desc string - Values common.InputValueList -} - -type FieldList []*Field - -func (l FieldList) Get(name string) *Field { - for _, f := range l { - if f.Name == name { - return f - } - } - return nil -} - -func (l FieldList) Names() []string { - names := make([]string, len(l)) - for i, f := range l { - names[i] = f.Name - } - return names -} - -type DirectiveDecl struct { - Name string - Desc string - Locs []string - Args common.InputValueList -} - -func (*Scalar) Kind() string { return "SCALAR" } -func (*Object) Kind() string { return "OBJECT" } -func (*Interface) Kind() string { return "INTERFACE" } -func (*Union) Kind() string { return "UNION" } -func (*Enum) Kind() string { return "ENUM" } -func (*InputObject) Kind() string { return "INPUT_OBJECT" } - -func (t *Scalar) String() string { return t.Name } -func (t *Object) String() string { return t.Name } -func (t *Interface) String() string { return t.Name } -func (t *Union) String() string { return t.Name } -func (t *Enum) String() string { return t.Name } -func (t *InputObject) String() string { return t.Name } - -func (t *Scalar) TypeName() string { return t.Name } -func (t *Object) TypeName() string { return t.Name } -func (t *Interface) TypeName() string { return t.Name } -func (t *Union) TypeName() string { return t.Name } -func (t *Enum) TypeName() string { return t.Name } -func (t *InputObject) TypeName() string { return t.Name } - -func (t *Scalar) Description() string { return t.Desc } -func (t *Object) Description() string { return t.Desc } -func (t *Interface) Description() string { return t.Desc } -func (t *Union) Description() string { return t.Desc } -func (t *Enum) Description() string { return t.Desc } -func (t *InputObject) Description() string { return t.Desc } - -type Field struct { - Name string - Args common.InputValueList - Type common.Type - Directives common.DirectiveList - Desc string -} - -func New() *Schema { - s := &Schema{ - entryPointNames: make(map[string]string), - Types: make(map[string]NamedType), - Directives: make(map[string]*DirectiveDecl), - } - for n, t := range Meta.Types { - s.Types[n] = t - } - for n, d := range Meta.Directives { - s.Directives[n] = d - } - return s -} - -func (s *Schema) Parse(schemaString string) error { - sc := &scanner.Scanner{ - Mode: scanner.ScanIdents | scanner.ScanInts | scanner.ScanFloats | scanner.ScanStrings, - } - sc.Init(strings.NewReader(schemaString)) - - l := common.New(sc) - err := l.CatchSyntaxError(func() { - parseSchema(s, l) - }) - if err != nil { - return err - } - - for _, t := range s.Types { - if err := resolveNamedType(s, t); err != nil { - return err - } - } - for _, d := range s.Directives { - for _, arg := range d.Args { - t, err := common.ResolveType(arg.Type, s.Resolve) - if err != nil { - return err - } - arg.Type = t - } - } - - s.EntryPoints = make(map[string]NamedType) - for key, name := range s.entryPointNames { - t, ok := s.Types[name] - if !ok { - if !ok { - return errors.Errorf("type %q not found", name) - } - } - s.EntryPoints[key] = t - } - - for _, obj := range s.objects { - obj.Interfaces = make([]*Interface, len(obj.interfaceNames)) - for i, intfName := range obj.interfaceNames { - t, ok := s.Types[intfName] - if !ok { - return errors.Errorf("interface %q not found", intfName) - } - intf, ok := t.(*Interface) - if !ok { - return errors.Errorf("type %q is not an interface", intfName) - } - obj.Interfaces[i] = intf - intf.PossibleTypes = append(intf.PossibleTypes, obj) - } - } - - for _, union := range s.unions { - union.PossibleTypes = make([]*Object, len(union.typeNames)) - for i, name := range union.typeNames { - t, ok := s.Types[name] - if !ok { - return errors.Errorf("object type %q not found", name) - } - obj, ok := t.(*Object) - if !ok { - return errors.Errorf("type %q is not an object", name) - } - union.PossibleTypes[i] = obj - } - } - - for _, enum := range s.enums { - for _, value := range enum.Values { - if err := resolveDirectives(s, value.Directives); err != nil { - return err - } - } - } - - return nil -} - -func resolveNamedType(s *Schema, t NamedType) error { - switch t := t.(type) { - case *Object: - for _, f := range t.Fields { - if err := resolveField(s, f); err != nil { - return err - } - } - case *Interface: - for _, f := range t.Fields { - if err := resolveField(s, f); err != nil { - return err - } - } - case *InputObject: - if err := resolveInputObject(s, t.Values); err != nil { - return err - } - } - return nil -} - -func resolveField(s *Schema, f *Field) error { - t, err := common.ResolveType(f.Type, s.Resolve) - if err != nil { - return err - } - f.Type = t - if err := resolveDirectives(s, f.Directives); err != nil { - return err - } - return resolveInputObject(s, f.Args) -} - -func resolveDirectives(s *Schema, directives common.DirectiveList) error { - for _, d := range directives { - dirName := d.Name.Name - dd, ok := s.Directives[dirName] - if !ok { - return errors.Errorf("directive %q not found", dirName) - } - for _, arg := range d.Args { - if dd.Args.Get(arg.Name.Name) == nil { - return errors.Errorf("invalid argument %q for directive %q", arg.Name.Name, dirName) - } - } - for _, arg := range dd.Args { - if _, ok := d.Args.Get(arg.Name.Name); !ok { - d.Args = append(d.Args, common.Argument{Name: arg.Name, Value: arg.Default}) - } - } - } - return nil -} - -func resolveInputObject(s *Schema, values common.InputValueList) error { - for _, v := range values { - t, err := common.ResolveType(v.Type, s.Resolve) - if err != nil { - return err - } - v.Type = t - } - return nil -} - -func parseSchema(s *Schema, l *common.Lexer) { - for l.Peek() != scanner.EOF { - desc := l.DescComment() - switch x := l.ConsumeIdent(); x { - case "schema": - l.ConsumeToken('{') - for l.Peek() != '}' { - name := l.ConsumeIdent() - l.ConsumeToken(':') - typ := l.ConsumeIdent() - s.entryPointNames[name] = typ - } - l.ConsumeToken('}') - case "type": - obj := parseObjectDecl(l) - obj.Desc = desc - s.Types[obj.Name] = obj - s.objects = append(s.objects, obj) - case "interface": - intf := parseInterfaceDecl(l) - intf.Desc = desc - s.Types[intf.Name] = intf - case "union": - union := parseUnionDecl(l) - union.Desc = desc - s.Types[union.Name] = union - s.unions = append(s.unions, union) - case "enum": - enum := parseEnumDecl(l) - enum.Desc = desc - s.Types[enum.Name] = enum - s.enums = append(s.enums, enum) - case "input": - input := parseInputDecl(l) - input.Desc = desc - s.Types[input.Name] = input - case "scalar": - name := l.ConsumeIdent() - s.Types[name] = &Scalar{Name: name, Desc: desc} - case "directive": - directive := parseDirectiveDecl(l) - directive.Desc = desc - s.Directives[directive.Name] = directive - default: - l.SyntaxError(fmt.Sprintf(`unexpected %q, expecting "schema", "type", "enum", "interface", "union", "input", "scalar" or "directive"`, x)) - } - } -} - -func parseObjectDecl(l *common.Lexer) *Object { - o := &Object{} - o.Name = l.ConsumeIdent() - if l.Peek() == scanner.Ident { - l.ConsumeKeyword("implements") - for { - o.interfaceNames = append(o.interfaceNames, l.ConsumeIdent()) - if l.Peek() == '{' { - break - } - } - } - l.ConsumeToken('{') - o.Fields = parseFields(l) - l.ConsumeToken('}') - return o -} - -func parseInterfaceDecl(l *common.Lexer) *Interface { - i := &Interface{} - i.Name = l.ConsumeIdent() - l.ConsumeToken('{') - i.Fields = parseFields(l) - l.ConsumeToken('}') - return i -} - -func parseUnionDecl(l *common.Lexer) *Union { - union := &Union{} - union.Name = l.ConsumeIdent() - l.ConsumeToken('=') - union.typeNames = []string{l.ConsumeIdent()} - for l.Peek() == '|' { - l.ConsumeToken('|') - union.typeNames = append(union.typeNames, l.ConsumeIdent()) - } - return union -} - -func parseInputDecl(l *common.Lexer) *InputObject { - i := &InputObject{} - i.Name = l.ConsumeIdent() - l.ConsumeToken('{') - for l.Peek() != '}' { - i.Values = append(i.Values, common.ParseInputValue(l)) - } - l.ConsumeToken('}') - return i -} - -func parseEnumDecl(l *common.Lexer) *Enum { - enum := &Enum{} - enum.Name = l.ConsumeIdent() - l.ConsumeToken('{') - for l.Peek() != '}' { - v := &EnumValue{} - v.Desc = l.DescComment() - v.Name = l.ConsumeIdent() - v.Directives = common.ParseDirectives(l) - enum.Values = append(enum.Values, v) - } - l.ConsumeToken('}') - return enum -} - -func parseDirectiveDecl(l *common.Lexer) *DirectiveDecl { - d := &DirectiveDecl{} - l.ConsumeToken('@') - d.Name = l.ConsumeIdent() - if l.Peek() == '(' { - l.ConsumeToken('(') - for l.Peek() != ')' { - v := common.ParseInputValue(l) - d.Args = append(d.Args, v) - } - l.ConsumeToken(')') - } - l.ConsumeKeyword("on") - for { - loc := l.ConsumeIdent() - d.Locs = append(d.Locs, loc) - if l.Peek() != '|' { - break - } - l.ConsumeToken('|') - } - return d -} - -func parseFields(l *common.Lexer) FieldList { - var fields FieldList - for l.Peek() != '}' { - f := &Field{} - f.Desc = l.DescComment() - f.Name = l.ConsumeIdent() - if l.Peek() == '(' { - l.ConsumeToken('(') - for l.Peek() != ')' { - f.Args = append(f.Args, common.ParseInputValue(l)) - } - l.ConsumeToken(')') - } - l.ConsumeToken(':') - f.Type = common.ParseType(l) - f.Directives = common.ParseDirectives(l) - fields = append(fields, f) - } - return fields -} diff --git a/vendor/github.com/neelance/graphql-go/internal/tests/all_test.go b/vendor/github.com/neelance/graphql-go/internal/tests/all_test.go deleted file mode 100644 index 7d31673f..00000000 --- a/vendor/github.com/neelance/graphql-go/internal/tests/all_test.go +++ /dev/null @@ -1,75 +0,0 @@ -package tests - -import ( - "os" - "reflect" - "sort" - "testing" - - "encoding/json" - - "github.com/neelance/graphql-go/errors" - "github.com/neelance/graphql-go/internal/query" - "github.com/neelance/graphql-go/internal/schema" - "github.com/neelance/graphql-go/internal/validation" -) - -type Test struct { - Name string - Rule string - Schema int - Query string - Errors []*errors.QueryError -} - -func TestAll(t *testing.T) { - f, err := os.Open("testdata/tests.json") - if err != nil { - t.Fatal(err) - } - - var testData struct { - Schemas []string - Tests []*Test - } - if err := json.NewDecoder(f).Decode(&testData); err != nil { - t.Fatal(err) - } - - schemas := make([]*schema.Schema, len(testData.Schemas)) - for i, schemaStr := range testData.Schemas { - schemas[i] = schema.New() - if err := schemas[i].Parse(schemaStr); err != nil { - t.Fatal(err) - } - } - - for _, test := range testData.Tests { - t.Run(test.Name, func(t *testing.T) { - d, err := query.Parse(test.Query) - if err != nil { - t.Fatal(err) - } - errs := validation.Validate(schemas[test.Schema], d) - got := []*errors.QueryError{} - for _, err := range errs { - if err.Rule == test.Rule { - err.Rule = "" - got = append(got, err) - } - } - sortLocations(test.Errors) - sortLocations(got) - if !reflect.DeepEqual(test.Errors, got) { - t.Errorf("wrong errors\nexpected: %v\ngot: %v", test.Errors, got) - } - }) - } -} - -func sortLocations(errs []*errors.QueryError) { - for _, err := range errs { - locs := err.Locations - sort.Slice(locs, func(i, j int) bool { return locs[i].Before(locs[j]) }) - } -} diff --git a/vendor/github.com/neelance/graphql-go/internal/tests/empty.go b/vendor/github.com/neelance/graphql-go/internal/tests/empty.go deleted file mode 100644 index ca8701d2..00000000 --- a/vendor/github.com/neelance/graphql-go/internal/tests/empty.go +++ /dev/null @@ -1 +0,0 @@ -package tests diff --git a/vendor/github.com/neelance/graphql-go/internal/tests/testdata/LICENSE b/vendor/github.com/neelance/graphql-go/internal/tests/testdata/LICENSE deleted file mode 100644 index fce4519e..00000000 --- a/vendor/github.com/neelance/graphql-go/internal/tests/testdata/LICENSE +++ /dev/null @@ -1,33 +0,0 @@ -The files in this testdata directory are derived from the graphql-js project: -https://github.com/graphql/graphql-js - -BSD License - -For GraphQL software - -Copyright (c) 2015, Facebook, Inc. All rights reserved. - -Redistribution and use in source and binary forms, with or without modification, -are permitted provided that the following conditions are met: - - * Redistributions of source code must retain the above copyright notice, this - list of conditions and the following disclaimer. - - * Redistributions in binary form must reproduce the above copyright notice, - this list of conditions and the following disclaimer in the documentation - and/or other materials provided with the distribution. - - * Neither the name Facebook nor the names of its contributors may be used to - endorse or promote products derived from this software without specific - prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR -ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES -(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; -LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON -ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS -SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. \ No newline at end of file diff --git a/vendor/github.com/neelance/graphql-go/internal/tests/testdata/export.js b/vendor/github.com/neelance/graphql-go/internal/tests/testdata/export.js deleted file mode 100644 index b89f5574..00000000 --- a/vendor/github.com/neelance/graphql-go/internal/tests/testdata/export.js +++ /dev/null @@ -1,110 +0,0 @@ -import fs from 'fs'; -import Module from 'module'; -import { testSchema } from './src/validation/__tests__/harness'; -import { printSchema } from './src/utilities'; - -let schemas = []; -function registerSchema(schema) { - for (let i = 0; i < schemas.length; i++) { - if (schemas[i] == schema) { - return i; - } - } - schemas.push(schema); - return schemas.length - 1; -} - -const harness = { - expectPassesRule(rule, queryString) { - harness.expectPassesRuleWithSchema(testSchema, rule, queryString); - }, - expectPassesRuleWithSchema(schema, rule, queryString, errors) { - tests.push({ - name: names.join('/'), - rule: rule.name, - schema: registerSchema(schema), - query: queryString, - errors: [], - }); - }, - expectFailsRule(rule, queryString, errors) { - harness.expectFailsRuleWithSchema(testSchema, rule, queryString, errors); - }, - expectFailsRuleWithSchema(schema, rule, queryString, errors) { - tests.push({ - name: names.join('/'), - rule: rule.name, - schema: registerSchema(schema), - query: queryString, - errors: errors, - }); - } -}; - -let tests = []; -let names = [] -const fakeModules = { - 'mocha': { - describe(name, f) { - switch (name) { - case 'within schema language': - return; - } - names.push(name); - f(); - names.pop(); - }, - it(name, f) { - switch (name) { - case 'ignores type definitions': - case 'reports correctly when a non-exclusive follows an exclusive': - case 'disallows differing subfields': - return; - } - names.push(name); - f(); - names.pop(); - }, - }, - './harness': harness, -}; - -const originalLoader = Module._load; -Module._load = function(request, parent, isMain) { - return fakeModules[request] || originalLoader(request, parent, isMain); -}; - -require('./src/validation/__tests__/ArgumentsOfCorrectType-test'); -require('./src/validation/__tests__/DefaultValuesOfCorrectType-test'); -require('./src/validation/__tests__/FieldsOnCorrectType-test'); -require('./src/validation/__tests__/FragmentsOnCompositeTypes-test'); -require('./src/validation/__tests__/KnownArgumentNames-test'); -require('./src/validation/__tests__/KnownDirectives-test'); -require('./src/validation/__tests__/KnownFragmentNames-test'); -require('./src/validation/__tests__/KnownTypeNames-test'); -require('./src/validation/__tests__/LoneAnonymousOperation-test'); -require('./src/validation/__tests__/NoFragmentCycles-test'); -require('./src/validation/__tests__/NoUndefinedVariables-test'); -require('./src/validation/__tests__/NoUnusedFragments-test'); -require('./src/validation/__tests__/NoUnusedVariables-test'); -require('./src/validation/__tests__/OverlappingFieldsCanBeMerged-test'); -require('./src/validation/__tests__/PossibleFragmentSpreads-test'); -require('./src/validation/__tests__/ProvidedNonNullArguments-test'); -require('./src/validation/__tests__/ScalarLeafs-test'); -require('./src/validation/__tests__/UniqueArgumentNames-test'); -require('./src/validation/__tests__/UniqueDirectivesPerLocation-test'); -require('./src/validation/__tests__/UniqueFragmentNames-test'); -require('./src/validation/__tests__/UniqueInputFieldNames-test'); -require('./src/validation/__tests__/UniqueOperationNames-test'); -require('./src/validation/__tests__/UniqueVariableNames-test'); -require('./src/validation/__tests__/VariablesAreInputTypes-test'); -require('./src/validation/__tests__/VariablesInAllowedPosition-test'); - -let output = JSON.stringify({ - schemas: schemas.map(s => printSchema(s)), - tests: tests, -}, null, 2) -output = output.replace(' Did you mean to use an inline fragment on \\"Dog\\" or \\"Cat\\"?', ''); -output = output.replace(' Did you mean to use an inline fragment on \\"Being\\", \\"Pet\\", \\"Canine\\", \\"Dog\\", or \\"Cat\\"?', ''); -output = output.replace(' Did you mean \\"Pet\\"?', ''); -fs.writeFileSync("tests.json", output); diff --git a/vendor/github.com/neelance/graphql-go/internal/tests/testdata/gen.go b/vendor/github.com/neelance/graphql-go/internal/tests/testdata/gen.go deleted file mode 100644 index 6d5ac9e6..00000000 --- a/vendor/github.com/neelance/graphql-go/internal/tests/testdata/gen.go +++ /dev/null @@ -1,4 +0,0 @@ -package testdata - -//go:generate cp export.js graphql-js/export.js -//go:generate babel-node graphql-js/export.js diff --git a/vendor/github.com/neelance/graphql-go/internal/tests/testdata/tests.json b/vendor/github.com/neelance/graphql-go/internal/tests/testdata/tests.json deleted file mode 100644 index 35511c6a..00000000 --- a/vendor/github.com/neelance/graphql-go/internal/tests/testdata/tests.json +++ /dev/null @@ -1,4948 +0,0 @@ -{ - "schemas": [ - "schema {\n query: QueryRoot\n}\n\ndirective @onQuery on QUERY\n\ndirective @onMutation on MUTATION\n\ndirective @onSubscription on SUBSCRIPTION\n\ndirective @onField on FIELD\n\ndirective @onFragmentDefinition on FRAGMENT_DEFINITION\n\ndirective @onFragmentSpread on FRAGMENT_SPREAD\n\ndirective @onInlineFragment on INLINE_FRAGMENT\n\ndirective @onSchema on SCHEMA\n\ndirective @onScalar on SCALAR\n\ndirective @onObject on OBJECT\n\ndirective @onFieldDefinition on FIELD_DEFINITION\n\ndirective @onArgumentDefinition on ARGUMENT_DEFINITION\n\ndirective @onInterface on INTERFACE\n\ndirective @onUnion on UNION\n\ndirective @onEnum on ENUM\n\ndirective @onEnumValue on ENUM_VALUE\n\ndirective @onInputObject on INPUT_OBJECT\n\ndirective @onInputFieldDefinition on INPUT_FIELD_DEFINITION\n\ntype Alien implements Being, Intelligent {\n iq: Int\n name(surname: Boolean): String\n numEyes: Int\n}\n\ninterface Being {\n name(surname: Boolean): String\n}\n\ninterface Canine {\n name(surname: Boolean): String\n}\n\ntype Cat implements Being, Pet {\n furColor: FurColor\n meowVolume: Int\n meows: Boolean\n name(surname: Boolean): String\n nickname: String\n}\n\nunion CatOrDog = Dog | Cat\n\ninput ComplexInput {\n booleanField: Boolean\n intField: Int\n requiredField: Boolean!\n stringField: String\n stringListField: [String]\n}\n\ntype ComplicatedArgs {\n booleanArgField(booleanArg: Boolean): String\n complexArgField(complexArg: ComplexInput): String\n enumArgField(enumArg: FurColor): String\n floatArgField(floatArg: Float): String\n idArgField(idArg: ID): String\n intArgField(intArg: Int): String\n multipleOptAndReq(req1: Int!, req2: Int!, opt1: Int = 0, opt2: Int = 0): String\n multipleOpts(opt1: Int = 0, opt2: Int = 0): String\n multipleReqs(req1: Int!, req2: Int!): String\n nonNullIntArgField(nonNullIntArg: Int!): String\n stringArgField(stringArg: String): String\n stringListArgField(stringListArg: [String]): String\n}\n\ntype Dog implements Being, Pet, Canine {\n barkVolume: Int\n barks: Boolean\n doesKnowCommand(dogCommand: DogCommand): Boolean\n isAtLocation(x: Int, y: Int): Boolean\n isHousetrained(atOtherHomes: Boolean = true): Boolean\n name(surname: Boolean): String\n nickname: String\n}\n\nenum DogCommand {\n SIT\n HEEL\n DOWN\n}\n\nunion DogOrHuman = Dog | Human\n\nenum FurColor {\n BROWN\n BLACK\n TAN\n SPOTTED\n NO_FUR\n UNKNOWN\n}\n\ntype Human implements Being, Intelligent {\n iq: Int\n name(surname: Boolean): String\n pets: [Pet]\n relatives: [Human]\n}\n\nunion HumanOrAlien = Human | Alien\n\ninterface Intelligent {\n iq: Int\n}\n\ninterface Pet {\n name(surname: Boolean): String\n}\n\ntype QueryRoot {\n alien: Alien\n cat: Cat\n catOrDog: CatOrDog\n complicatedArgs: ComplicatedArgs\n dog: Dog\n dogOrHuman: DogOrHuman\n human(id: ID): Human\n humanOrAlien: HumanOrAlien\n pet: Pet\n}\n", - "schema {\n query: QueryRoot\n}\n\ntype Connection {\n edges: [Edge]\n}\n\ntype Edge {\n node: Node\n}\n\ntype IntBox implements SomeBox {\n deepBox: IntBox\n intBox: IntBox\n listStringBox: [StringBox]\n scalar: Int\n stringBox: StringBox\n unrelatedField: String\n}\n\ntype Node {\n id: ID\n name: String\n}\n\ninterface NonNullStringBox1 {\n scalar: String!\n}\n\ntype NonNullStringBox1Impl implements SomeBox, NonNullStringBox1 {\n deepBox: SomeBox\n scalar: String!\n unrelatedField: String\n}\n\ninterface NonNullStringBox2 {\n scalar: String!\n}\n\ntype NonNullStringBox2Impl implements SomeBox, NonNullStringBox2 {\n deepBox: SomeBox\n scalar: String!\n unrelatedField: String\n}\n\ntype QueryRoot {\n connection: Connection\n someBox: SomeBox\n}\n\ninterface SomeBox {\n deepBox: SomeBox\n unrelatedField: String\n}\n\ntype StringBox implements SomeBox {\n deepBox: StringBox\n intBox: IntBox\n listStringBox: [StringBox]\n scalar: String\n stringBox: StringBox\n unrelatedField: String\n}\n", - "type Foo {\n constructor: String\n}\n\ntype Query {\n foo: Foo\n}\n" - ], - "tests": [ - { - "name": "Validate: Argument values of correct type/Valid values/Good int value", - "rule": "ArgumentsOfCorrectType", - "schema": 0, - "query": "\n {\n complicatedArgs {\n intArgField(intArg: 2)\n }\n }\n ", - "errors": [] - }, - { - "name": "Validate: Argument values of correct type/Valid values/Good negative int value", - "rule": "ArgumentsOfCorrectType", - "schema": 0, - "query": "\n {\n complicatedArgs {\n intArgField(intArg: -2)\n }\n }\n ", - "errors": [] - }, - { - "name": "Validate: Argument values of correct type/Valid values/Good boolean value", - "rule": "ArgumentsOfCorrectType", - "schema": 0, - "query": "\n {\n complicatedArgs {\n booleanArgField(booleanArg: true)\n }\n }\n ", - "errors": [] - }, - { - "name": "Validate: Argument values of correct type/Valid values/Good string value", - "rule": "ArgumentsOfCorrectType", - "schema": 0, - "query": "\n {\n complicatedArgs {\n stringArgField(stringArg: \"foo\")\n }\n }\n ", - "errors": [] - }, - { - "name": "Validate: Argument values of correct type/Valid values/Good float value", - "rule": "ArgumentsOfCorrectType", - "schema": 0, - "query": "\n {\n complicatedArgs {\n floatArgField(floatArg: 1.1)\n }\n }\n ", - "errors": [] - }, - { - "name": "Validate: Argument values of correct type/Valid values/Good negative float value", - "rule": "ArgumentsOfCorrectType", - "schema": 0, - "query": "\n {\n complicatedArgs {\n floatArgField(floatArg: -1.1)\n }\n }\n ", - "errors": [] - }, - { - "name": "Validate: Argument values of correct type/Valid values/Int into Float", - "rule": "ArgumentsOfCorrectType", - "schema": 0, - "query": "\n {\n complicatedArgs {\n floatArgField(floatArg: 1)\n }\n }\n ", - "errors": [] - }, - { - "name": "Validate: Argument values of correct type/Valid values/Int into ID", - "rule": "ArgumentsOfCorrectType", - "schema": 0, - "query": "\n {\n complicatedArgs {\n idArgField(idArg: 1)\n }\n }\n ", - "errors": [] - }, - { - "name": "Validate: Argument values of correct type/Valid values/String into ID", - "rule": "ArgumentsOfCorrectType", - "schema": 0, - "query": "\n {\n complicatedArgs {\n idArgField(idArg: \"someIdString\")\n }\n }\n ", - "errors": [] - }, - { - "name": "Validate: Argument values of correct type/Valid values/Good enum value", - "rule": "ArgumentsOfCorrectType", - "schema": 0, - "query": "\n {\n dog {\n doesKnowCommand(dogCommand: SIT)\n }\n }\n ", - "errors": [] - }, - { - "name": "Validate: Argument values of correct type/Valid values/Enum with undefined value", - "rule": "ArgumentsOfCorrectType", - "schema": 0, - "query": "\n {\n complicatedArgs {\n enumArgField(enumArg: UNKNOWN)\n }\n }\n ", - "errors": [] - }, - { - "name": "Validate: Argument values of correct type/Valid values/Enum with null value", - "rule": "ArgumentsOfCorrectType", - "schema": 0, - "query": "\n {\n complicatedArgs {\n enumArgField(enumArg: NO_FUR)\n }\n }\n ", - "errors": [] - }, - { - "name": "Validate: Argument values of correct type/Valid values/null into nullable type", - "rule": "ArgumentsOfCorrectType", - "schema": 0, - "query": "\n {\n complicatedArgs {\n intArgField(intArg: null)\n }\n }\n ", - "errors": [] - }, - { - "name": "Validate: Argument values of correct type/Valid values/null into nullable type", - "rule": "ArgumentsOfCorrectType", - "schema": 0, - "query": "\n {\n dog(a: null, b: null, c:{ requiredField: true, intField: null }) {\n name\n }\n }\n ", - "errors": [] - }, - { - "name": "Validate: Argument values of correct type/Invalid String values/Int into String", - "rule": "ArgumentsOfCorrectType", - "schema": 0, - "query": "\n {\n complicatedArgs {\n stringArgField(stringArg: 1)\n }\n }\n ", - "errors": [ - { - "message": "Argument \"stringArg\" has invalid value 1.\nExpected type \"String\", found 1.", - "locations": [ - { - "line": 4, - "column": 39 - } - ] - } - ] - }, - { - "name": "Validate: Argument values of correct type/Invalid String values/Float into String", - "rule": "ArgumentsOfCorrectType", - "schema": 0, - "query": "\n {\n complicatedArgs {\n stringArgField(stringArg: 1.0)\n }\n }\n ", - "errors": [ - { - "message": "Argument \"stringArg\" has invalid value 1.0.\nExpected type \"String\", found 1.0.", - "locations": [ - { - "line": 4, - "column": 39 - } - ] - } - ] - }, - { - "name": "Validate: Argument values of correct type/Invalid String values/Boolean into String", - "rule": "ArgumentsOfCorrectType", - "schema": 0, - "query": "\n {\n complicatedArgs {\n stringArgField(stringArg: true)\n }\n }\n ", - "errors": [ - { - "message": "Argument \"stringArg\" has invalid value true.\nExpected type \"String\", found true.", - "locations": [ - { - "line": 4, - "column": 39 - } - ] - } - ] - }, - { - "name": "Validate: Argument values of correct type/Invalid String values/Unquoted String into String", - "rule": "ArgumentsOfCorrectType", - "schema": 0, - "query": "\n {\n complicatedArgs {\n stringArgField(stringArg: BAR)\n }\n }\n ", - "errors": [ - { - "message": "Argument \"stringArg\" has invalid value BAR.\nExpected type \"String\", found BAR.", - "locations": [ - { - "line": 4, - "column": 39 - } - ] - } - ] - }, - { - "name": "Validate: Argument values of correct type/Invalid Int values/String into Int", - "rule": "ArgumentsOfCorrectType", - "schema": 0, - "query": "\n {\n complicatedArgs {\n intArgField(intArg: \"3\")\n }\n }\n ", - "errors": [ - { - "message": "Argument \"intArg\" has invalid value \"3\".\nExpected type \"Int\", found \"3\".", - "locations": [ - { - "line": 4, - "column": 33 - } - ] - } - ] - }, - { - "name": "Validate: Argument values of correct type/Invalid Int values/Big Int into Int", - "rule": "ArgumentsOfCorrectType", - "schema": 0, - "query": "\n {\n complicatedArgs {\n intArgField(intArg: 829384293849283498239482938)\n }\n }\n ", - "errors": [ - { - "message": "Argument \"intArg\" has invalid value 829384293849283498239482938.\nExpected type \"Int\", found 829384293849283498239482938.", - "locations": [ - { - "line": 4, - "column": 33 - } - ] - } - ] - }, - { - "name": "Validate: Argument values of correct type/Invalid Int values/Unquoted String into Int", - "rule": "ArgumentsOfCorrectType", - "schema": 0, - "query": "\n {\n complicatedArgs {\n intArgField(intArg: FOO)\n }\n }\n ", - "errors": [ - { - "message": "Argument \"intArg\" has invalid value FOO.\nExpected type \"Int\", found FOO.", - "locations": [ - { - "line": 4, - "column": 33 - } - ] - } - ] - }, - { - "name": "Validate: Argument values of correct type/Invalid Int values/Simple Float into Int", - "rule": "ArgumentsOfCorrectType", - "schema": 0, - "query": "\n {\n complicatedArgs {\n intArgField(intArg: 3.0)\n }\n }\n ", - "errors": [ - { - "message": "Argument \"intArg\" has invalid value 3.0.\nExpected type \"Int\", found 3.0.", - "locations": [ - { - "line": 4, - "column": 33 - } - ] - } - ] - }, - { - "name": "Validate: Argument values of correct type/Invalid Int values/Float into Int", - "rule": "ArgumentsOfCorrectType", - "schema": 0, - "query": "\n {\n complicatedArgs {\n intArgField(intArg: 3.333)\n }\n }\n ", - "errors": [ - { - "message": "Argument \"intArg\" has invalid value 3.333.\nExpected type \"Int\", found 3.333.", - "locations": [ - { - "line": 4, - "column": 33 - } - ] - } - ] - }, - { - "name": "Validate: Argument values of correct type/Invalid Float values/String into Float", - "rule": "ArgumentsOfCorrectType", - "schema": 0, - "query": "\n {\n complicatedArgs {\n floatArgField(floatArg: \"3.333\")\n }\n }\n ", - "errors": [ - { - "message": "Argument \"floatArg\" has invalid value \"3.333\".\nExpected type \"Float\", found \"3.333\".", - "locations": [ - { - "line": 4, - "column": 37 - } - ] - } - ] - }, - { - "name": "Validate: Argument values of correct type/Invalid Float values/Boolean into Float", - "rule": "ArgumentsOfCorrectType", - "schema": 0, - "query": "\n {\n complicatedArgs {\n floatArgField(floatArg: true)\n }\n }\n ", - "errors": [ - { - "message": "Argument \"floatArg\" has invalid value true.\nExpected type \"Float\", found true.", - "locations": [ - { - "line": 4, - "column": 37 - } - ] - } - ] - }, - { - "name": "Validate: Argument values of correct type/Invalid Float values/Unquoted into Float", - "rule": "ArgumentsOfCorrectType", - "schema": 0, - "query": "\n {\n complicatedArgs {\n floatArgField(floatArg: FOO)\n }\n }\n ", - "errors": [ - { - "message": "Argument \"floatArg\" has invalid value FOO.\nExpected type \"Float\", found FOO.", - "locations": [ - { - "line": 4, - "column": 37 - } - ] - } - ] - }, - { - "name": "Validate: Argument values of correct type/Invalid Boolean value/Int into Boolean", - "rule": "ArgumentsOfCorrectType", - "schema": 0, - "query": "\n {\n complicatedArgs {\n booleanArgField(booleanArg: 2)\n }\n }\n ", - "errors": [ - { - "message": "Argument \"booleanArg\" has invalid value 2.\nExpected type \"Boolean\", found 2.", - "locations": [ - { - "line": 4, - "column": 41 - } - ] - } - ] - }, - { - "name": "Validate: Argument values of correct type/Invalid Boolean value/Float into Boolean", - "rule": "ArgumentsOfCorrectType", - "schema": 0, - "query": "\n {\n complicatedArgs {\n booleanArgField(booleanArg: 1.0)\n }\n }\n ", - "errors": [ - { - "message": "Argument \"booleanArg\" has invalid value 1.0.\nExpected type \"Boolean\", found 1.0.", - "locations": [ - { - "line": 4, - "column": 41 - } - ] - } - ] - }, - { - "name": "Validate: Argument values of correct type/Invalid Boolean value/String into Boolean", - "rule": "ArgumentsOfCorrectType", - "schema": 0, - "query": "\n {\n complicatedArgs {\n booleanArgField(booleanArg: \"true\")\n }\n }\n ", - "errors": [ - { - "message": "Argument \"booleanArg\" has invalid value \"true\".\nExpected type \"Boolean\", found \"true\".", - "locations": [ - { - "line": 4, - "column": 41 - } - ] - } - ] - }, - { - "name": "Validate: Argument values of correct type/Invalid Boolean value/Unquoted into Boolean", - "rule": "ArgumentsOfCorrectType", - "schema": 0, - "query": "\n {\n complicatedArgs {\n booleanArgField(booleanArg: TRUE)\n }\n }\n ", - "errors": [ - { - "message": "Argument \"booleanArg\" has invalid value TRUE.\nExpected type \"Boolean\", found TRUE.", - "locations": [ - { - "line": 4, - "column": 41 - } - ] - } - ] - }, - { - "name": "Validate: Argument values of correct type/Invalid ID value/Float into ID", - "rule": "ArgumentsOfCorrectType", - "schema": 0, - "query": "\n {\n complicatedArgs {\n idArgField(idArg: 1.0)\n }\n }\n ", - "errors": [ - { - "message": "Argument \"idArg\" has invalid value 1.0.\nExpected type \"ID\", found 1.0.", - "locations": [ - { - "line": 4, - "column": 31 - } - ] - } - ] - }, - { - "name": "Validate: Argument values of correct type/Invalid ID value/Boolean into ID", - "rule": "ArgumentsOfCorrectType", - "schema": 0, - "query": "\n {\n complicatedArgs {\n idArgField(idArg: true)\n }\n }\n ", - "errors": [ - { - "message": "Argument \"idArg\" has invalid value true.\nExpected type \"ID\", found true.", - "locations": [ - { - "line": 4, - "column": 31 - } - ] - } - ] - }, - { - "name": "Validate: Argument values of correct type/Invalid ID value/Unquoted into ID", - "rule": "ArgumentsOfCorrectType", - "schema": 0, - "query": "\n {\n complicatedArgs {\n idArgField(idArg: SOMETHING)\n }\n }\n ", - "errors": [ - { - "message": "Argument \"idArg\" has invalid value SOMETHING.\nExpected type \"ID\", found SOMETHING.", - "locations": [ - { - "line": 4, - "column": 31 - } - ] - } - ] - }, - { - "name": "Validate: Argument values of correct type/Invalid Enum value/Int into Enum", - "rule": "ArgumentsOfCorrectType", - "schema": 0, - "query": "\n {\n dog {\n doesKnowCommand(dogCommand: 2)\n }\n }\n ", - "errors": [ - { - "message": "Argument \"dogCommand\" has invalid value 2.\nExpected type \"DogCommand\", found 2.", - "locations": [ - { - "line": 4, - "column": 41 - } - ] - } - ] - }, - { - "name": "Validate: Argument values of correct type/Invalid Enum value/Float into Enum", - "rule": "ArgumentsOfCorrectType", - "schema": 0, - "query": "\n {\n dog {\n doesKnowCommand(dogCommand: 1.0)\n }\n }\n ", - "errors": [ - { - "message": "Argument \"dogCommand\" has invalid value 1.0.\nExpected type \"DogCommand\", found 1.0.", - "locations": [ - { - "line": 4, - "column": 41 - } - ] - } - ] - }, - { - "name": "Validate: Argument values of correct type/Invalid Enum value/String into Enum", - "rule": "ArgumentsOfCorrectType", - "schema": 0, - "query": "\n {\n dog {\n doesKnowCommand(dogCommand: \"SIT\")\n }\n }\n ", - "errors": [ - { - "message": "Argument \"dogCommand\" has invalid value \"SIT\".\nExpected type \"DogCommand\", found \"SIT\".", - "locations": [ - { - "line": 4, - "column": 41 - } - ] - } - ] - }, - { - "name": "Validate: Argument values of correct type/Invalid Enum value/Boolean into Enum", - "rule": "ArgumentsOfCorrectType", - "schema": 0, - "query": "\n {\n dog {\n doesKnowCommand(dogCommand: true)\n }\n }\n ", - "errors": [ - { - "message": "Argument \"dogCommand\" has invalid value true.\nExpected type \"DogCommand\", found true.", - "locations": [ - { - "line": 4, - "column": 41 - } - ] - } - ] - }, - { - "name": "Validate: Argument values of correct type/Invalid Enum value/Unknown Enum Value into Enum", - "rule": "ArgumentsOfCorrectType", - "schema": 0, - "query": "\n {\n dog {\n doesKnowCommand(dogCommand: JUGGLE)\n }\n }\n ", - "errors": [ - { - "message": "Argument \"dogCommand\" has invalid value JUGGLE.\nExpected type \"DogCommand\", found JUGGLE.", - "locations": [ - { - "line": 4, - "column": 41 - } - ] - } - ] - }, - { - "name": "Validate: Argument values of correct type/Invalid Enum value/Different case Enum Value into Enum", - "rule": "ArgumentsOfCorrectType", - "schema": 0, - "query": "\n {\n dog {\n doesKnowCommand(dogCommand: sit)\n }\n }\n ", - "errors": [ - { - "message": "Argument \"dogCommand\" has invalid value sit.\nExpected type \"DogCommand\", found sit.", - "locations": [ - { - "line": 4, - "column": 41 - } - ] - } - ] - }, - { - "name": "Validate: Argument values of correct type/Valid List value/Good list value", - "rule": "ArgumentsOfCorrectType", - "schema": 0, - "query": "\n {\n complicatedArgs {\n stringListArgField(stringListArg: [\"one\", null, \"two\"])\n }\n }\n ", - "errors": [] - }, - { - "name": "Validate: Argument values of correct type/Valid List value/Empty list value", - "rule": "ArgumentsOfCorrectType", - "schema": 0, - "query": "\n {\n complicatedArgs {\n stringListArgField(stringListArg: [])\n }\n }\n ", - "errors": [] - }, - { - "name": "Validate: Argument values of correct type/Valid List value/Null value", - "rule": "ArgumentsOfCorrectType", - "schema": 0, - "query": "\n {\n complicatedArgs {\n stringListArgField(stringListArg: null)\n }\n }\n ", - "errors": [] - }, - { - "name": "Validate: Argument values of correct type/Valid List value/Single value into List", - "rule": "ArgumentsOfCorrectType", - "schema": 0, - "query": "\n {\n complicatedArgs {\n stringListArgField(stringListArg: \"one\")\n }\n }\n ", - "errors": [] - }, - { - "name": "Validate: Argument values of correct type/Invalid List value/Incorrect item type", - "rule": "ArgumentsOfCorrectType", - "schema": 0, - "query": "\n {\n complicatedArgs {\n stringListArgField(stringListArg: [\"one\", 2])\n }\n }\n ", - "errors": [ - { - "message": "Argument \"stringListArg\" has invalid value [\"one\", 2].\nIn element #1: Expected type \"String\", found 2.", - "locations": [ - { - "line": 4, - "column": 47 - } - ] - } - ] - }, - { - "name": "Validate: Argument values of correct type/Invalid List value/Single value of incorrect type", - "rule": "ArgumentsOfCorrectType", - "schema": 0, - "query": "\n {\n complicatedArgs {\n stringListArgField(stringListArg: 1)\n }\n }\n ", - "errors": [ - { - "message": "Argument \"stringListArg\" has invalid value 1.\nExpected type \"String\", found 1.", - "locations": [ - { - "line": 4, - "column": 47 - } - ] - } - ] - }, - { - "name": "Validate: Argument values of correct type/Valid non-nullable value/Arg on optional arg", - "rule": "ArgumentsOfCorrectType", - "schema": 0, - "query": "\n {\n dog {\n isHousetrained(atOtherHomes: true)\n }\n }\n ", - "errors": [] - }, - { - "name": "Validate: Argument values of correct type/Valid non-nullable value/No Arg on optional arg", - "rule": "ArgumentsOfCorrectType", - "schema": 0, - "query": "\n {\n dog {\n isHousetrained\n }\n }\n ", - "errors": [] - }, - { - "name": "Validate: Argument values of correct type/Valid non-nullable value/Multiple args", - "rule": "ArgumentsOfCorrectType", - "schema": 0, - "query": "\n {\n complicatedArgs {\n multipleReqs(req1: 1, req2: 2)\n }\n }\n ", - "errors": [] - }, - { - "name": "Validate: Argument values of correct type/Valid non-nullable value/Multiple args reverse order", - "rule": "ArgumentsOfCorrectType", - "schema": 0, - "query": "\n {\n complicatedArgs {\n multipleReqs(req2: 2, req1: 1)\n }\n }\n ", - "errors": [] - }, - { - "name": "Validate: Argument values of correct type/Valid non-nullable value/No args on multiple optional", - "rule": "ArgumentsOfCorrectType", - "schema": 0, - "query": "\n {\n complicatedArgs {\n multipleOpts\n }\n }\n ", - "errors": [] - }, - { - "name": "Validate: Argument values of correct type/Valid non-nullable value/One arg on multiple optional", - "rule": "ArgumentsOfCorrectType", - "schema": 0, - "query": "\n {\n complicatedArgs {\n multipleOpts(opt1: 1)\n }\n }\n ", - "errors": [] - }, - { - "name": "Validate: Argument values of correct type/Valid non-nullable value/Second arg on multiple optional", - "rule": "ArgumentsOfCorrectType", - "schema": 0, - "query": "\n {\n complicatedArgs {\n multipleOpts(opt2: 1)\n }\n }\n ", - "errors": [] - }, - { - "name": "Validate: Argument values of correct type/Valid non-nullable value/Multiple reqs on mixedList", - "rule": "ArgumentsOfCorrectType", - "schema": 0, - "query": "\n {\n complicatedArgs {\n multipleOptAndReq(req1: 3, req2: 4)\n }\n }\n ", - "errors": [] - }, - { - "name": "Validate: Argument values of correct type/Valid non-nullable value/Multiple reqs and one opt on mixedList", - "rule": "ArgumentsOfCorrectType", - "schema": 0, - "query": "\n {\n complicatedArgs {\n multipleOptAndReq(req1: 3, req2: 4, opt1: 5)\n }\n }\n ", - "errors": [] - }, - { - "name": "Validate: Argument values of correct type/Valid non-nullable value/All reqs and opts on mixedList", - "rule": "ArgumentsOfCorrectType", - "schema": 0, - "query": "\n {\n complicatedArgs {\n multipleOptAndReq(req1: 3, req2: 4, opt1: 5, opt2: 6)\n }\n }\n ", - "errors": [] - }, - { - "name": "Validate: Argument values of correct type/Invalid non-nullable value/Incorrect value type", - "rule": "ArgumentsOfCorrectType", - "schema": 0, - "query": "\n {\n complicatedArgs {\n multipleReqs(req2: \"two\", req1: \"one\")\n }\n }\n ", - "errors": [ - { - "message": "Argument \"req2\" has invalid value \"two\".\nExpected type \"Int\", found \"two\".", - "locations": [ - { - "line": 4, - "column": 32 - } - ] - }, - { - "message": "Argument \"req1\" has invalid value \"one\".\nExpected type \"Int\", found \"one\".", - "locations": [ - { - "line": 4, - "column": 45 - } - ] - } - ] - }, - { - "name": "Validate: Argument values of correct type/Invalid non-nullable value/Incorrect value and missing argument", - "rule": "ArgumentsOfCorrectType", - "schema": 0, - "query": "\n {\n complicatedArgs {\n multipleReqs(req1: \"one\")\n }\n }\n ", - "errors": [ - { - "message": "Argument \"req1\" has invalid value \"one\".\nExpected type \"Int\", found \"one\".", - "locations": [ - { - "line": 4, - "column": 32 - } - ] - } - ] - }, - { - "name": "Validate: Argument values of correct type/Invalid non-nullable value/Null value", - "rule": "ArgumentsOfCorrectType", - "schema": 0, - "query": "\n {\n complicatedArgs {\n multipleReqs(req1: null)\n }\n }\n ", - "errors": [ - { - "message": "Argument \"req1\" has invalid value null.\nExpected \"Int!\", found null.", - "locations": [ - { - "line": 4, - "column": 32 - } - ] - } - ] - }, - { - "name": "Validate: Argument values of correct type/Valid input object value/Optional arg, despite required field in type", - "rule": "ArgumentsOfCorrectType", - "schema": 0, - "query": "\n {\n complicatedArgs {\n complexArgField\n }\n }\n ", - "errors": [] - }, - { - "name": "Validate: Argument values of correct type/Valid input object value/Partial object, only required", - "rule": "ArgumentsOfCorrectType", - "schema": 0, - "query": "\n {\n complicatedArgs {\n complexArgField(complexArg: { requiredField: true })\n }\n }\n ", - "errors": [] - }, - { - "name": "Validate: Argument values of correct type/Valid input object value/Partial object, required field can be falsey", - "rule": "ArgumentsOfCorrectType", - "schema": 0, - "query": "\n {\n complicatedArgs {\n complexArgField(complexArg: { requiredField: false })\n }\n }\n ", - "errors": [] - }, - { - "name": "Validate: Argument values of correct type/Valid input object value/Partial object, including required", - "rule": "ArgumentsOfCorrectType", - "schema": 0, - "query": "\n {\n complicatedArgs {\n complexArgField(complexArg: { requiredField: true, intField: 4 })\n }\n }\n ", - "errors": [] - }, - { - "name": "Validate: Argument values of correct type/Valid input object value/Full object", - "rule": "ArgumentsOfCorrectType", - "schema": 0, - "query": "\n {\n complicatedArgs {\n complexArgField(complexArg: {\n requiredField: true,\n intField: 4,\n stringField: \"foo\",\n booleanField: false,\n stringListField: [\"one\", \"two\"]\n })\n }\n }\n ", - "errors": [] - }, - { - "name": "Validate: Argument values of correct type/Valid input object value/Full object with fields in different order", - "rule": "ArgumentsOfCorrectType", - "schema": 0, - "query": "\n {\n complicatedArgs {\n complexArgField(complexArg: {\n stringListField: [\"one\", \"two\"],\n booleanField: false,\n requiredField: true,\n stringField: \"foo\",\n intField: 4,\n })\n }\n }\n ", - "errors": [] - }, - { - "name": "Validate: Argument values of correct type/Invalid input object value/Partial object, missing required", - "rule": "ArgumentsOfCorrectType", - "schema": 0, - "query": "\n {\n complicatedArgs {\n complexArgField(complexArg: { intField: 4 })\n }\n }\n ", - "errors": [ - { - "message": "Argument \"complexArg\" has invalid value {intField: 4}.\nIn field \"requiredField\": Expected \"Boolean!\", found null.", - "locations": [ - { - "line": 4, - "column": 41 - } - ] - } - ] - }, - { - "name": "Validate: Argument values of correct type/Invalid input object value/Partial object, invalid field type", - "rule": "ArgumentsOfCorrectType", - "schema": 0, - "query": "\n {\n complicatedArgs {\n complexArgField(complexArg: {\n stringListField: [\"one\", 2],\n requiredField: true,\n })\n }\n }\n ", - "errors": [ - { - "message": "Argument \"complexArg\" has invalid value {stringListField: [\"one\", 2], requiredField: true}.\nIn field \"stringListField\": In element #1: Expected type \"String\", found 2.", - "locations": [ - { - "line": 4, - "column": 41 - } - ] - } - ] - }, - { - "name": "Validate: Argument values of correct type/Invalid input object value/Partial object, unknown field arg", - "rule": "ArgumentsOfCorrectType", - "schema": 0, - "query": "\n {\n complicatedArgs {\n complexArgField(complexArg: {\n requiredField: true,\n unknownField: \"value\"\n })\n }\n }\n ", - "errors": [ - { - "message": "Argument \"complexArg\" has invalid value {requiredField: true, unknownField: \"value\"}.\nIn field \"unknownField\": Unknown field.", - "locations": [ - { - "line": 4, - "column": 41 - } - ] - } - ] - }, - { - "name": "Validate: Argument values of correct type/Directive arguments/with directives of valid types", - "rule": "ArgumentsOfCorrectType", - "schema": 0, - "query": "\n {\n dog @include(if: true) {\n name\n }\n human @skip(if: false) {\n name\n }\n }\n ", - "errors": [] - }, - { - "name": "Validate: Argument values of correct type/Directive arguments/with directive with incorrect types", - "rule": "ArgumentsOfCorrectType", - "schema": 0, - "query": "\n {\n dog @include(if: \"yes\") {\n name @skip(if: ENUM)\n }\n }\n ", - "errors": [ - { - "message": "Argument \"if\" has invalid value \"yes\".\nExpected type \"Boolean\", found \"yes\".", - "locations": [ - { - "line": 3, - "column": 28 - } - ] - }, - { - "message": "Argument \"if\" has invalid value ENUM.\nExpected type \"Boolean\", found ENUM.", - "locations": [ - { - "line": 4, - "column": 28 - } - ] - } - ] - }, - { - "name": "Validate: Variable default values of correct type/variables with no default values", - "rule": "DefaultValuesOfCorrectType", - "schema": 0, - "query": "\n query NullableValues($a: Int, $b: String, $c: ComplexInput) {\n dog { name }\n }\n ", - "errors": [] - }, - { - "name": "Validate: Variable default values of correct type/required variables without default values", - "rule": "DefaultValuesOfCorrectType", - "schema": 0, - "query": "\n query RequiredValues($a: Int!, $b: String!) {\n dog { name }\n }\n ", - "errors": [] - }, - { - "name": "Validate: Variable default values of correct type/variables with valid default values", - "rule": "DefaultValuesOfCorrectType", - "schema": 0, - "query": "\n query WithDefaultValues(\n $a: Int = 1,\n $b: String = \"ok\",\n $c: ComplexInput = { requiredField: true, intField: 3 }\n ) {\n dog { name }\n }\n ", - "errors": [] - }, - { - "name": "Validate: Variable default values of correct type/variables with valid default null values", - "rule": "DefaultValuesOfCorrectType", - "schema": 0, - "query": "\n query WithDefaultValues(\n $a: Int = null,\n $b: String = null,\n $c: ComplexInput = { requiredField: true, intField: null }\n ) {\n dog { name }\n }\n ", - "errors": [] - }, - { - "name": "Validate: Variable default values of correct type/variables with invalid default null values", - "rule": "DefaultValuesOfCorrectType", - "schema": 0, - "query": "\n query WithDefaultValues(\n $a: Int! = null,\n $b: String! = null,\n $c: ComplexInput = { requiredField: null, intField: null }\n ) {\n dog { name }\n }\n ", - "errors": [ - { - "message": "Variable \"$a\" of type \"Int!\" is required and will not use the default value. Perhaps you meant to use type \"Int\".", - "locations": [ - { - "line": 3, - "column": 20 - } - ] - }, - { - "message": "Variable \"$a\" of type \"Int!\" has invalid default value null.\nExpected \"Int!\", found null.", - "locations": [ - { - "line": 3, - "column": 20 - } - ] - }, - { - "message": "Variable \"$b\" of type \"String!\" is required and will not use the default value. Perhaps you meant to use type \"String\".", - "locations": [ - { - "line": 4, - "column": 23 - } - ] - }, - { - "message": "Variable \"$b\" of type \"String!\" has invalid default value null.\nExpected \"String!\", found null.", - "locations": [ - { - "line": 4, - "column": 23 - } - ] - }, - { - "message": "Variable \"$c\" of type \"ComplexInput\" has invalid default value {requiredField: null, intField: null}.\nIn field \"requiredField\": Expected \"Boolean!\", found null.", - "locations": [ - { - "line": 5, - "column": 28 - } - ] - } - ] - }, - { - "name": "Validate: Variable default values of correct type/no required variables with default values", - "rule": "DefaultValuesOfCorrectType", - "schema": 0, - "query": "\n query UnreachableDefaultValues($a: Int! = 3, $b: String! = \"default\") {\n dog { name }\n }\n ", - "errors": [ - { - "message": "Variable \"$a\" of type \"Int!\" is required and will not use the default value. Perhaps you meant to use type \"Int\".", - "locations": [ - { - "line": 2, - "column": 49 - } - ] - }, - { - "message": "Variable \"$b\" of type \"String!\" is required and will not use the default value. Perhaps you meant to use type \"String\".", - "locations": [ - { - "line": 2, - "column": 66 - } - ] - } - ] - }, - { - "name": "Validate: Variable default values of correct type/variables with invalid default values", - "rule": "DefaultValuesOfCorrectType", - "schema": 0, - "query": "\n query InvalidDefaultValues(\n $a: Int = \"one\",\n $b: String = 4,\n $c: ComplexInput = \"notverycomplex\"\n ) {\n dog { name }\n }\n ", - "errors": [ - { - "message": "Variable \"$a\" of type \"Int\" has invalid default value \"one\".\nExpected type \"Int\", found \"one\".", - "locations": [ - { - "line": 3, - "column": 19 - } - ] - }, - { - "message": "Variable \"$b\" of type \"String\" has invalid default value 4.\nExpected type \"String\", found 4.", - "locations": [ - { - "line": 4, - "column": 22 - } - ] - }, - { - "message": "Variable \"$c\" of type \"ComplexInput\" has invalid default value \"notverycomplex\".\nExpected \"ComplexInput\", found not an object.", - "locations": [ - { - "line": 5, - "column": 28 - } - ] - } - ] - }, - { - "name": "Validate: Variable default values of correct type/complex variables missing required field", - "rule": "DefaultValuesOfCorrectType", - "schema": 0, - "query": "\n query MissingRequiredField($a: ComplexInput = {intField: 3}) {\n dog { name }\n }\n ", - "errors": [ - { - "message": "Variable \"$a\" of type \"ComplexInput\" has invalid default value {intField: 3}.\nIn field \"requiredField\": Expected \"Boolean!\", found null.", - "locations": [ - { - "line": 2, - "column": 53 - } - ] - } - ] - }, - { - "name": "Validate: Variable default values of correct type/list variables with invalid item", - "rule": "DefaultValuesOfCorrectType", - "schema": 0, - "query": "\n query InvalidItem($a: [String] = [\"one\", 2]) {\n dog { name }\n }\n ", - "errors": [ - { - "message": "Variable \"$a\" of type \"[String]\" has invalid default value [\"one\", 2].\nIn element #1: Expected type \"String\", found 2.", - "locations": [ - { - "line": 2, - "column": 40 - } - ] - } - ] - }, - { - "name": "Validate: Fields on correct type/Object field selection", - "rule": "FieldsOnCorrectType", - "schema": 0, - "query": "\n fragment objectFieldSelection on Dog {\n __typename\n name\n }\n ", - "errors": [] - }, - { - "name": "Validate: Fields on correct type/Aliased object field selection", - "rule": "FieldsOnCorrectType", - "schema": 0, - "query": "\n fragment aliasedObjectFieldSelection on Dog {\n tn : __typename\n otherName : name\n }\n ", - "errors": [] - }, - { - "name": "Validate: Fields on correct type/Interface field selection", - "rule": "FieldsOnCorrectType", - "schema": 0, - "query": "\n fragment interfaceFieldSelection on Pet {\n __typename\n name\n }\n ", - "errors": [] - }, - { - "name": "Validate: Fields on correct type/Aliased interface field selection", - "rule": "FieldsOnCorrectType", - "schema": 0, - "query": "\n fragment interfaceFieldSelection on Pet {\n otherName : name\n }\n ", - "errors": [] - }, - { - "name": "Validate: Fields on correct type/Lying alias selection", - "rule": "FieldsOnCorrectType", - "schema": 0, - "query": "\n fragment lyingAliasSelection on Dog {\n name : nickname\n }\n ", - "errors": [] - }, - { - "name": "Validate: Fields on correct type/Ignores fields on unknown type", - "rule": "FieldsOnCorrectType", - "schema": 0, - "query": "\n fragment unknownSelection on UnknownType {\n unknownField\n }\n ", - "errors": [] - }, - { - "name": "Validate: Fields on correct type/reports errors when type is known again", - "rule": "FieldsOnCorrectType", - "schema": 0, - "query": "\n fragment typeKnownAgain on Pet {\n unknown_pet_field {\n ... on Cat {\n unknown_cat_field\n }\n }\n }", - "errors": [ - { - "message": "Cannot query field \"unknown_pet_field\" on type \"Pet\".", - "locations": [ - { - "line": 3, - "column": 9 - } - ] - }, - { - "message": "Cannot query field \"unknown_cat_field\" on type \"Cat\".", - "locations": [ - { - "line": 5, - "column": 13 - } - ] - } - ] - }, - { - "name": "Validate: Fields on correct type/Field not defined on fragment", - "rule": "FieldsOnCorrectType", - "schema": 0, - "query": "\n fragment fieldNotDefined on Dog {\n meowVolume\n }", - "errors": [ - { - "message": "Cannot query field \"meowVolume\" on type \"Dog\". Did you mean \"barkVolume\"?", - "locations": [ - { - "line": 3, - "column": 9 - } - ] - } - ] - }, - { - "name": "Validate: Fields on correct type/Ignores deeply unknown field", - "rule": "FieldsOnCorrectType", - "schema": 0, - "query": "\n fragment deepFieldNotDefined on Dog {\n unknown_field {\n deeper_unknown_field\n }\n }", - "errors": [ - { - "message": "Cannot query field \"unknown_field\" on type \"Dog\".", - "locations": [ - { - "line": 3, - "column": 9 - } - ] - } - ] - }, - { - "name": "Validate: Fields on correct type/Sub-field not defined", - "rule": "FieldsOnCorrectType", - "schema": 0, - "query": "\n fragment subFieldNotDefined on Human {\n pets {\n unknown_field\n }\n }", - "errors": [ - { - "message": "Cannot query field \"unknown_field\" on type \"Pet\".", - "locations": [ - { - "line": 4, - "column": 11 - } - ] - } - ] - }, - { - "name": "Validate: Fields on correct type/Field not defined on inline fragment", - "rule": "FieldsOnCorrectType", - "schema": 0, - "query": "\n fragment fieldNotDefined on Pet {\n ... on Dog {\n meowVolume\n }\n }", - "errors": [ - { - "message": "Cannot query field \"meowVolume\" on type \"Dog\". Did you mean \"barkVolume\"?", - "locations": [ - { - "line": 4, - "column": 11 - } - ] - } - ] - }, - { - "name": "Validate: Fields on correct type/Aliased field target not defined", - "rule": "FieldsOnCorrectType", - "schema": 0, - "query": "\n fragment aliasedFieldTargetNotDefined on Dog {\n volume : mooVolume\n }", - "errors": [ - { - "message": "Cannot query field \"mooVolume\" on type \"Dog\". Did you mean \"barkVolume\"?", - "locations": [ - { - "line": 3, - "column": 9 - } - ] - } - ] - }, - { - "name": "Validate: Fields on correct type/Aliased lying field target not defined", - "rule": "FieldsOnCorrectType", - "schema": 0, - "query": "\n fragment aliasedLyingFieldTargetNotDefined on Dog {\n barkVolume : kawVolume\n }", - "errors": [ - { - "message": "Cannot query field \"kawVolume\" on type \"Dog\". Did you mean \"barkVolume\"?", - "locations": [ - { - "line": 3, - "column": 9 - } - ] - } - ] - }, - { - "name": "Validate: Fields on correct type/Not defined on interface", - "rule": "FieldsOnCorrectType", - "schema": 0, - "query": "\n fragment notDefinedOnInterface on Pet {\n tailLength\n }", - "errors": [ - { - "message": "Cannot query field \"tailLength\" on type \"Pet\".", - "locations": [ - { - "line": 3, - "column": 9 - } - ] - } - ] - }, - { - "name": "Validate: Fields on correct type/Defined on implementors but not on interface", - "rule": "FieldsOnCorrectType", - "schema": 0, - "query": "\n fragment definedOnImplementorsButNotInterface on Pet {\n nickname\n }", - "errors": [ - { - "message": "Cannot query field \"nickname\" on type \"Pet\".", - "locations": [ - { - "line": 3, - "column": 9 - } - ] - } - ] - }, - { - "name": "Validate: Fields on correct type/Meta field selection on union", - "rule": "FieldsOnCorrectType", - "schema": 0, - "query": "\n fragment directFieldSelectionOnUnion on CatOrDog {\n __typename\n }", - "errors": [] - }, - { - "name": "Validate: Fields on correct type/Direct field selection on union", - "rule": "FieldsOnCorrectType", - "schema": 0, - "query": "\n fragment directFieldSelectionOnUnion on CatOrDog {\n directField\n }", - "errors": [ - { - "message": "Cannot query field \"directField\" on type \"CatOrDog\".", - "locations": [ - { - "line": 3, - "column": 9 - } - ] - } - ] - }, - { - "name": "Validate: Fields on correct type/Defined on implementors queried on union", - "rule": "FieldsOnCorrectType", - "schema": 0, - "query": "\n fragment definedOnImplementorsQueriedOnUnion on CatOrDog {\n name\n }", - "errors": [ - { - "message": "Cannot query field \"name\" on type \"CatOrDog\".", - "locations": [ - { - "line": 3, - "column": 9 - } - ] - } - ] - }, - { - "name": "Validate: Fields on correct type/valid field in inline fragment", - "rule": "FieldsOnCorrectType", - "schema": 0, - "query": "\n fragment objectFieldSelection on Pet {\n ... on Dog {\n name\n }\n ... {\n name\n }\n }\n ", - "errors": [] - }, - { - "name": "Validate: Fragments on composite types/object is valid fragment type", - "rule": "FragmentsOnCompositeTypes", - "schema": 0, - "query": "\n fragment validFragment on Dog {\n barks\n }\n ", - "errors": [] - }, - { - "name": "Validate: Fragments on composite types/interface is valid fragment type", - "rule": "FragmentsOnCompositeTypes", - "schema": 0, - "query": "\n fragment validFragment on Pet {\n name\n }\n ", - "errors": [] - }, - { - "name": "Validate: Fragments on composite types/object is valid inline fragment type", - "rule": "FragmentsOnCompositeTypes", - "schema": 0, - "query": "\n fragment validFragment on Pet {\n ... on Dog {\n barks\n }\n }\n ", - "errors": [] - }, - { - "name": "Validate: Fragments on composite types/inline fragment without type is valid", - "rule": "FragmentsOnCompositeTypes", - "schema": 0, - "query": "\n fragment validFragment on Pet {\n ... {\n name\n }\n }\n ", - "errors": [] - }, - { - "name": "Validate: Fragments on composite types/union is valid fragment type", - "rule": "FragmentsOnCompositeTypes", - "schema": 0, - "query": "\n fragment validFragment on CatOrDog {\n __typename\n }\n ", - "errors": [] - }, - { - "name": "Validate: Fragments on composite types/scalar is invalid fragment type", - "rule": "FragmentsOnCompositeTypes", - "schema": 0, - "query": "\n fragment scalarFragment on Boolean {\n bad\n }\n ", - "errors": [ - { - "message": "Fragment \"scalarFragment\" cannot condition on non composite type \"Boolean\".", - "locations": [ - { - "line": 2, - "column": 34 - } - ] - } - ] - }, - { - "name": "Validate: Fragments on composite types/enum is invalid fragment type", - "rule": "FragmentsOnCompositeTypes", - "schema": 0, - "query": "\n fragment scalarFragment on FurColor {\n bad\n }\n ", - "errors": [ - { - "message": "Fragment \"scalarFragment\" cannot condition on non composite type \"FurColor\".", - "locations": [ - { - "line": 2, - "column": 34 - } - ] - } - ] - }, - { - "name": "Validate: Fragments on composite types/input object is invalid fragment type", - "rule": "FragmentsOnCompositeTypes", - "schema": 0, - "query": "\n fragment inputFragment on ComplexInput {\n stringField\n }\n ", - "errors": [ - { - "message": "Fragment \"inputFragment\" cannot condition on non composite type \"ComplexInput\".", - "locations": [ - { - "line": 2, - "column": 33 - } - ] - } - ] - }, - { - "name": "Validate: Fragments on composite types/scalar is invalid inline fragment type", - "rule": "FragmentsOnCompositeTypes", - "schema": 0, - "query": "\n fragment invalidFragment on Pet {\n ... on String {\n barks\n }\n }\n ", - "errors": [ - { - "message": "Fragment cannot condition on non composite type \"String\".", - "locations": [ - { - "line": 3, - "column": 16 - } - ] - } - ] - }, - { - "name": "Validate: Known argument names/single arg is known", - "rule": "KnownArgumentNames", - "schema": 0, - "query": "\n fragment argOnRequiredArg on Dog {\n doesKnowCommand(dogCommand: SIT)\n }\n ", - "errors": [] - }, - { - "name": "Validate: Known argument names/multiple args are known", - "rule": "KnownArgumentNames", - "schema": 0, - "query": "\n fragment multipleArgs on ComplicatedArgs {\n multipleReqs(req1: 1, req2: 2)\n }\n ", - "errors": [] - }, - { - "name": "Validate: Known argument names/ignores args of unknown fields", - "rule": "KnownArgumentNames", - "schema": 0, - "query": "\n fragment argOnUnknownField on Dog {\n unknownField(unknownArg: SIT)\n }\n ", - "errors": [] - }, - { - "name": "Validate: Known argument names/multiple args in reverse order are known", - "rule": "KnownArgumentNames", - "schema": 0, - "query": "\n fragment multipleArgsReverseOrder on ComplicatedArgs {\n multipleReqs(req2: 2, req1: 1)\n }\n ", - "errors": [] - }, - { - "name": "Validate: Known argument names/no args on optional arg", - "rule": "KnownArgumentNames", - "schema": 0, - "query": "\n fragment noArgOnOptionalArg on Dog {\n isHousetrained\n }\n ", - "errors": [] - }, - { - "name": "Validate: Known argument names/args are known deeply", - "rule": "KnownArgumentNames", - "schema": 0, - "query": "\n {\n dog {\n doesKnowCommand(dogCommand: SIT)\n }\n human {\n pet {\n ... on Dog {\n doesKnowCommand(dogCommand: SIT)\n }\n }\n }\n }\n ", - "errors": [] - }, - { - "name": "Validate: Known argument names/directive args are known", - "rule": "KnownArgumentNames", - "schema": 0, - "query": "\n {\n dog @skip(if: true)\n }\n ", - "errors": [] - }, - { - "name": "Validate: Known argument names/undirective args are invalid", - "rule": "KnownArgumentNames", - "schema": 0, - "query": "\n {\n dog @skip(unless: true)\n }\n ", - "errors": [ - { - "message": "Unknown argument \"unless\" on directive \"@skip\".", - "locations": [ - { - "line": 3, - "column": 19 - } - ] - } - ] - }, - { - "name": "Validate: Known argument names/invalid arg name", - "rule": "KnownArgumentNames", - "schema": 0, - "query": "\n fragment invalidArgName on Dog {\n doesKnowCommand(unknown: true)\n }\n ", - "errors": [ - { - "message": "Unknown argument \"unknown\" on field \"doesKnowCommand\" of type \"Dog\".", - "locations": [ - { - "line": 3, - "column": 25 - } - ] - } - ] - }, - { - "name": "Validate: Known argument names/unknown args amongst known args", - "rule": "KnownArgumentNames", - "schema": 0, - "query": "\n fragment oneGoodArgOneInvalidArg on Dog {\n doesKnowCommand(whoknows: 1, dogCommand: SIT, unknown: true)\n }\n ", - "errors": [ - { - "message": "Unknown argument \"whoknows\" on field \"doesKnowCommand\" of type \"Dog\".", - "locations": [ - { - "line": 3, - "column": 25 - } - ] - }, - { - "message": "Unknown argument \"unknown\" on field \"doesKnowCommand\" of type \"Dog\".", - "locations": [ - { - "line": 3, - "column": 55 - } - ] - } - ] - }, - { - "name": "Validate: Known argument names/unknown args deeply", - "rule": "KnownArgumentNames", - "schema": 0, - "query": "\n {\n dog {\n doesKnowCommand(unknown: true)\n }\n human {\n pet {\n ... on Dog {\n doesKnowCommand(unknown: true)\n }\n }\n }\n }\n ", - "errors": [ - { - "message": "Unknown argument \"unknown\" on field \"doesKnowCommand\" of type \"Dog\".", - "locations": [ - { - "line": 4, - "column": 27 - } - ] - }, - { - "message": "Unknown argument \"unknown\" on field \"doesKnowCommand\" of type \"Dog\".", - "locations": [ - { - "line": 9, - "column": 31 - } - ] - } - ] - }, - { - "name": "Validate: Known directives/with no directives", - "rule": "KnownDirectives", - "schema": 0, - "query": "\n query Foo {\n name\n ...Frag\n }\n\n fragment Frag on Dog {\n name\n }\n ", - "errors": [] - }, - { - "name": "Validate: Known directives/with known directives", - "rule": "KnownDirectives", - "schema": 0, - "query": "\n {\n dog @include(if: true) {\n name\n }\n human @skip(if: false) {\n name\n }\n }\n ", - "errors": [] - }, - { - "name": "Validate: Known directives/with unknown directive", - "rule": "KnownDirectives", - "schema": 0, - "query": "\n {\n dog @unknown(directive: \"value\") {\n name\n }\n }\n ", - "errors": [ - { - "message": "Unknown directive \"unknown\".", - "locations": [ - { - "line": 3, - "column": 13 - } - ] - } - ] - }, - { - "name": "Validate: Known directives/with many unknown directives", - "rule": "KnownDirectives", - "schema": 0, - "query": "\n {\n dog @unknown(directive: \"value\") {\n name\n }\n human @unknown(directive: \"value\") {\n name\n pets @unknown(directive: \"value\") {\n name\n }\n }\n }\n ", - "errors": [ - { - "message": "Unknown directive \"unknown\".", - "locations": [ - { - "line": 3, - "column": 13 - } - ] - }, - { - "message": "Unknown directive \"unknown\".", - "locations": [ - { - "line": 6, - "column": 15 - } - ] - }, - { - "message": "Unknown directive \"unknown\".", - "locations": [ - { - "line": 8, - "column": 16 - } - ] - } - ] - }, - { - "name": "Validate: Known directives/with well placed directives", - "rule": "KnownDirectives", - "schema": 0, - "query": "\n query Foo @onQuery {\n name @include(if: true)\n ...Frag @include(if: true)\n skippedField @skip(if: true)\n ...SkippedFrag @skip(if: true)\n }\n\n mutation Bar @onMutation {\n someField\n }\n ", - "errors": [] - }, - { - "name": "Validate: Known directives/with misplaced directives", - "rule": "KnownDirectives", - "schema": 0, - "query": "\n query Foo @include(if: true) {\n name @onQuery\n ...Frag @onQuery\n }\n\n mutation Bar @onQuery {\n someField\n }\n ", - "errors": [ - { - "message": "Directive \"include\" may not be used on QUERY.", - "locations": [ - { - "line": 2, - "column": 17 - } - ] - }, - { - "message": "Directive \"onQuery\" may not be used on FIELD.", - "locations": [ - { - "line": 3, - "column": 14 - } - ] - }, - { - "message": "Directive \"onQuery\" may not be used on FRAGMENT_SPREAD.", - "locations": [ - { - "line": 4, - "column": 17 - } - ] - }, - { - "message": "Directive \"onQuery\" may not be used on MUTATION.", - "locations": [ - { - "line": 7, - "column": 20 - } - ] - } - ] - }, - { - "name": "Validate: Known fragment names/known fragment names are valid", - "rule": "KnownFragmentNames", - "schema": 0, - "query": "\n {\n human(id: 4) {\n ...HumanFields1\n ... on Human {\n ...HumanFields2\n }\n ... {\n name\n }\n }\n }\n fragment HumanFields1 on Human {\n name\n ...HumanFields3\n }\n fragment HumanFields2 on Human {\n name\n }\n fragment HumanFields3 on Human {\n name\n }\n ", - "errors": [] - }, - { - "name": "Validate: Known fragment names/unknown fragment names are invalid", - "rule": "KnownFragmentNames", - "schema": 0, - "query": "\n {\n human(id: 4) {\n ...UnknownFragment1\n ... on Human {\n ...UnknownFragment2\n }\n }\n }\n fragment HumanFields on Human {\n name\n ...UnknownFragment3\n }\n ", - "errors": [ - { - "message": "Unknown fragment \"UnknownFragment1\".", - "locations": [ - { - "line": 4, - "column": 14 - } - ] - }, - { - "message": "Unknown fragment \"UnknownFragment2\".", - "locations": [ - { - "line": 6, - "column": 16 - } - ] - }, - { - "message": "Unknown fragment \"UnknownFragment3\".", - "locations": [ - { - "line": 12, - "column": 12 - } - ] - } - ] - }, - { - "name": "Validate: Known type names/known type names are valid", - "rule": "KnownTypeNames", - "schema": 0, - "query": "\n query Foo($var: String, $required: [String!]!) {\n user(id: 4) {\n pets { ... on Pet { name }, ...PetFields, ... { name } }\n }\n }\n fragment PetFields on Pet {\n name\n }\n ", - "errors": [] - }, - { - "name": "Validate: Known type names/unknown type names are invalid", - "rule": "KnownTypeNames", - "schema": 0, - "query": "\n query Foo($var: JumbledUpLetters) {\n user(id: 4) {\n name\n pets { ... on Badger { name }, ...PetFields }\n }\n }\n fragment PetFields on Peettt {\n name\n }\n ", - "errors": [ - { - "message": "Unknown type \"JumbledUpLetters\".", - "locations": [ - { - "line": 2, - "column": 23 - } - ] - }, - { - "message": "Unknown type \"Badger\".", - "locations": [ - { - "line": 5, - "column": 25 - } - ] - }, - { - "message": "Unknown type \"Peettt\".", - "locations": [ - { - "line": 8, - "column": 29 - } - ] - } - ] - }, - { - "name": "Validate: Anonymous operation must be alone/no operations", - "rule": "LoneAnonymousOperation", - "schema": 0, - "query": "\n fragment fragA on Type {\n field\n }\n ", - "errors": [] - }, - { - "name": "Validate: Anonymous operation must be alone/one anon operation", - "rule": "LoneAnonymousOperation", - "schema": 0, - "query": "\n {\n field\n }\n ", - "errors": [] - }, - { - "name": "Validate: Anonymous operation must be alone/multiple named operations", - "rule": "LoneAnonymousOperation", - "schema": 0, - "query": "\n query Foo {\n field\n }\n\n query Bar {\n field\n }\n ", - "errors": [] - }, - { - "name": "Validate: Anonymous operation must be alone/anon operation with fragment", - "rule": "LoneAnonymousOperation", - "schema": 0, - "query": "\n {\n ...Foo\n }\n fragment Foo on Type {\n field\n }\n ", - "errors": [] - }, - { - "name": "Validate: Anonymous operation must be alone/multiple anon operations", - "rule": "LoneAnonymousOperation", - "schema": 0, - "query": "\n {\n fieldA\n }\n {\n fieldB\n }\n ", - "errors": [ - { - "message": "This anonymous operation must be the only defined operation.", - "locations": [ - { - "line": 2, - "column": 7 - } - ] - }, - { - "message": "This anonymous operation must be the only defined operation.", - "locations": [ - { - "line": 5, - "column": 7 - } - ] - } - ] - }, - { - "name": "Validate: Anonymous operation must be alone/anon operation with a mutation", - "rule": "LoneAnonymousOperation", - "schema": 0, - "query": "\n {\n fieldA\n }\n mutation Foo {\n fieldB\n }\n ", - "errors": [ - { - "message": "This anonymous operation must be the only defined operation.", - "locations": [ - { - "line": 2, - "column": 7 - } - ] - } - ] - }, - { - "name": "Validate: Anonymous operation must be alone/anon operation with a subscription", - "rule": "LoneAnonymousOperation", - "schema": 0, - "query": "\n {\n fieldA\n }\n subscription Foo {\n fieldB\n }\n ", - "errors": [ - { - "message": "This anonymous operation must be the only defined operation.", - "locations": [ - { - "line": 2, - "column": 7 - } - ] - } - ] - }, - { - "name": "Validate: No circular fragment spreads/single reference is valid", - "rule": "NoFragmentCycles", - "schema": 0, - "query": "\n fragment fragA on Dog { ...fragB }\n fragment fragB on Dog { name }\n ", - "errors": [] - }, - { - "name": "Validate: No circular fragment spreads/spreading twice is not circular", - "rule": "NoFragmentCycles", - "schema": 0, - "query": "\n fragment fragA on Dog { ...fragB, ...fragB }\n fragment fragB on Dog { name }\n ", - "errors": [] - }, - { - "name": "Validate: No circular fragment spreads/spreading twice indirectly is not circular", - "rule": "NoFragmentCycles", - "schema": 0, - "query": "\n fragment fragA on Dog { ...fragB, ...fragC }\n fragment fragB on Dog { ...fragC }\n fragment fragC on Dog { name }\n ", - "errors": [] - }, - { - "name": "Validate: No circular fragment spreads/double spread within abstract types", - "rule": "NoFragmentCycles", - "schema": 0, - "query": "\n fragment nameFragment on Pet {\n ... on Dog { name }\n ... on Cat { name }\n }\n\n fragment spreadsInAnon on Pet {\n ... on Dog { ...nameFragment }\n ... on Cat { ...nameFragment }\n }\n ", - "errors": [] - }, - { - "name": "Validate: No circular fragment spreads/does not false positive on unknown fragment", - "rule": "NoFragmentCycles", - "schema": 0, - "query": "\n fragment nameFragment on Pet {\n ...UnknownFragment\n }\n ", - "errors": [] - }, - { - "name": "Validate: No circular fragment spreads/spreading recursively within field fails", - "rule": "NoFragmentCycles", - "schema": 0, - "query": "\n fragment fragA on Human { relatives { ...fragA } },\n ", - "errors": [ - { - "message": "Cannot spread fragment \"fragA\" within itself.", - "locations": [ - { - "line": 2, - "column": 45 - } - ] - } - ] - }, - { - "name": "Validate: No circular fragment spreads/no spreading itself directly", - "rule": "NoFragmentCycles", - "schema": 0, - "query": "\n fragment fragA on Dog { ...fragA }\n ", - "errors": [ - { - "message": "Cannot spread fragment \"fragA\" within itself.", - "locations": [ - { - "line": 2, - "column": 31 - } - ] - } - ] - }, - { - "name": "Validate: No circular fragment spreads/no spreading itself directly within inline fragment", - "rule": "NoFragmentCycles", - "schema": 0, - "query": "\n fragment fragA on Pet {\n ... on Dog {\n ...fragA\n }\n }\n ", - "errors": [ - { - "message": "Cannot spread fragment \"fragA\" within itself.", - "locations": [ - { - "line": 4, - "column": 11 - } - ] - } - ] - }, - { - "name": "Validate: No circular fragment spreads/no spreading itself indirectly", - "rule": "NoFragmentCycles", - "schema": 0, - "query": "\n fragment fragA on Dog { ...fragB }\n fragment fragB on Dog { ...fragA }\n ", - "errors": [ - { - "message": "Cannot spread fragment \"fragA\" within itself via fragB.", - "locations": [ - { - "line": 2, - "column": 31 - }, - { - "line": 3, - "column": 31 - } - ] - } - ] - }, - { - "name": "Validate: No circular fragment spreads/no spreading itself indirectly reports opposite order", - "rule": "NoFragmentCycles", - "schema": 0, - "query": "\n fragment fragB on Dog { ...fragA }\n fragment fragA on Dog { ...fragB }\n ", - "errors": [ - { - "message": "Cannot spread fragment \"fragB\" within itself via fragA.", - "locations": [ - { - "line": 2, - "column": 31 - }, - { - "line": 3, - "column": 31 - } - ] - } - ] - }, - { - "name": "Validate: No circular fragment spreads/no spreading itself indirectly within inline fragment", - "rule": "NoFragmentCycles", - "schema": 0, - "query": "\n fragment fragA on Pet {\n ... on Dog {\n ...fragB\n }\n }\n fragment fragB on Pet {\n ... on Dog {\n ...fragA\n }\n }\n ", - "errors": [ - { - "message": "Cannot spread fragment \"fragA\" within itself via fragB.", - "locations": [ - { - "line": 4, - "column": 11 - }, - { - "line": 9, - "column": 11 - } - ] - } - ] - }, - { - "name": "Validate: No circular fragment spreads/no spreading itself deeply", - "rule": "NoFragmentCycles", - "schema": 0, - "query": "\n fragment fragA on Dog { ...fragB }\n fragment fragB on Dog { ...fragC }\n fragment fragC on Dog { ...fragO }\n fragment fragX on Dog { ...fragY }\n fragment fragY on Dog { ...fragZ }\n fragment fragZ on Dog { ...fragO }\n fragment fragO on Dog { ...fragP }\n fragment fragP on Dog { ...fragA, ...fragX }\n ", - "errors": [ - { - "message": "Cannot spread fragment \"fragA\" within itself via fragB, fragC, fragO, fragP.", - "locations": [ - { - "line": 2, - "column": 31 - }, - { - "line": 3, - "column": 31 - }, - { - "line": 4, - "column": 31 - }, - { - "line": 8, - "column": 31 - }, - { - "line": 9, - "column": 31 - } - ] - }, - { - "message": "Cannot spread fragment \"fragO\" within itself via fragP, fragX, fragY, fragZ.", - "locations": [ - { - "line": 8, - "column": 31 - }, - { - "line": 9, - "column": 41 - }, - { - "line": 5, - "column": 31 - }, - { - "line": 6, - "column": 31 - }, - { - "line": 7, - "column": 31 - } - ] - } - ] - }, - { - "name": "Validate: No circular fragment spreads/no spreading itself deeply two paths", - "rule": "NoFragmentCycles", - "schema": 0, - "query": "\n fragment fragA on Dog { ...fragB, ...fragC }\n fragment fragB on Dog { ...fragA }\n fragment fragC on Dog { ...fragA }\n ", - "errors": [ - { - "message": "Cannot spread fragment \"fragA\" within itself via fragB.", - "locations": [ - { - "line": 2, - "column": 31 - }, - { - "line": 3, - "column": 31 - } - ] - }, - { - "message": "Cannot spread fragment \"fragA\" within itself via fragC.", - "locations": [ - { - "line": 2, - "column": 41 - }, - { - "line": 4, - "column": 31 - } - ] - } - ] - }, - { - "name": "Validate: No circular fragment spreads/no spreading itself deeply two paths -- alt traverse order", - "rule": "NoFragmentCycles", - "schema": 0, - "query": "\n fragment fragA on Dog { ...fragC }\n fragment fragB on Dog { ...fragC }\n fragment fragC on Dog { ...fragA, ...fragB }\n ", - "errors": [ - { - "message": "Cannot spread fragment \"fragA\" within itself via fragC.", - "locations": [ - { - "line": 2, - "column": 31 - }, - { - "line": 4, - "column": 31 - } - ] - }, - { - "message": "Cannot spread fragment \"fragC\" within itself via fragB.", - "locations": [ - { - "line": 4, - "column": 41 - }, - { - "line": 3, - "column": 31 - } - ] - } - ] - }, - { - "name": "Validate: No circular fragment spreads/no spreading itself deeply and immediately", - "rule": "NoFragmentCycles", - "schema": 0, - "query": "\n fragment fragA on Dog { ...fragB }\n fragment fragB on Dog { ...fragB, ...fragC }\n fragment fragC on Dog { ...fragA, ...fragB }\n ", - "errors": [ - { - "message": "Cannot spread fragment \"fragB\" within itself.", - "locations": [ - { - "line": 3, - "column": 31 - } - ] - }, - { - "message": "Cannot spread fragment \"fragA\" within itself via fragB, fragC.", - "locations": [ - { - "line": 2, - "column": 31 - }, - { - "line": 3, - "column": 41 - }, - { - "line": 4, - "column": 31 - } - ] - }, - { - "message": "Cannot spread fragment \"fragB\" within itself via fragC.", - "locations": [ - { - "line": 3, - "column": 41 - }, - { - "line": 4, - "column": 41 - } - ] - } - ] - }, - { - "name": "Validate: No undefined variables/all variables defined", - "rule": "NoUndefinedVariables", - "schema": 0, - "query": "\n query Foo($a: String, $b: String, $c: String) {\n field(a: $a, b: $b, c: $c)\n }\n ", - "errors": [] - }, - { - "name": "Validate: No undefined variables/all variables deeply defined", - "rule": "NoUndefinedVariables", - "schema": 0, - "query": "\n query Foo($a: String, $b: String, $c: String) {\n field(a: $a) {\n field(b: $b) {\n field(c: $c)\n }\n }\n }\n ", - "errors": [] - }, - { - "name": "Validate: No undefined variables/all variables deeply in inline fragments defined", - "rule": "NoUndefinedVariables", - "schema": 0, - "query": "\n query Foo($a: String, $b: String, $c: String) {\n ... on Type {\n field(a: $a) {\n field(b: $b) {\n ... on Type {\n field(c: $c)\n }\n }\n }\n }\n }\n ", - "errors": [] - }, - { - "name": "Validate: No undefined variables/all variables in fragments deeply defined", - "rule": "NoUndefinedVariables", - "schema": 0, - "query": "\n query Foo($a: String, $b: String, $c: String) {\n ...FragA\n }\n fragment FragA on Type {\n field(a: $a) {\n ...FragB\n }\n }\n fragment FragB on Type {\n field(b: $b) {\n ...FragC\n }\n }\n fragment FragC on Type {\n field(c: $c)\n }\n ", - "errors": [] - }, - { - "name": "Validate: No undefined variables/variable within single fragment defined in multiple operations", - "rule": "NoUndefinedVariables", - "schema": 0, - "query": "\n query Foo($a: String) {\n ...FragA\n }\n query Bar($a: String) {\n ...FragA\n }\n fragment FragA on Type {\n field(a: $a)\n }\n ", - "errors": [] - }, - { - "name": "Validate: No undefined variables/variable within fragments defined in operations", - "rule": "NoUndefinedVariables", - "schema": 0, - "query": "\n query Foo($a: String) {\n ...FragA\n }\n query Bar($b: String) {\n ...FragB\n }\n fragment FragA on Type {\n field(a: $a)\n }\n fragment FragB on Type {\n field(b: $b)\n }\n ", - "errors": [] - }, - { - "name": "Validate: No undefined variables/variable within recursive fragment defined", - "rule": "NoUndefinedVariables", - "schema": 0, - "query": "\n query Foo($a: String) {\n ...FragA\n }\n fragment FragA on Type {\n field(a: $a) {\n ...FragA\n }\n }\n ", - "errors": [] - }, - { - "name": "Validate: No undefined variables/variable not defined", - "rule": "NoUndefinedVariables", - "schema": 0, - "query": "\n query Foo($a: String, $b: String, $c: String) {\n field(a: $a, b: $b, c: $c, d: $d)\n }\n ", - "errors": [ - { - "message": "Variable \"$d\" is not defined by operation \"Foo\".", - "locations": [ - { - "line": 3, - "column": 39 - }, - { - "line": 2, - "column": 7 - } - ] - } - ] - }, - { - "name": "Validate: No undefined variables/variable not defined by un-named query", - "rule": "NoUndefinedVariables", - "schema": 0, - "query": "\n {\n field(a: $a)\n }\n ", - "errors": [ - { - "message": "Variable \"$a\" is not defined.", - "locations": [ - { - "line": 3, - "column": 18 - }, - { - "line": 2, - "column": 7 - } - ] - } - ] - }, - { - "name": "Validate: No undefined variables/multiple variables not defined", - "rule": "NoUndefinedVariables", - "schema": 0, - "query": "\n query Foo($b: String) {\n field(a: $a, b: $b, c: $c)\n }\n ", - "errors": [ - { - "message": "Variable \"$a\" is not defined by operation \"Foo\".", - "locations": [ - { - "line": 3, - "column": 18 - }, - { - "line": 2, - "column": 7 - } - ] - }, - { - "message": "Variable \"$c\" is not defined by operation \"Foo\".", - "locations": [ - { - "line": 3, - "column": 32 - }, - { - "line": 2, - "column": 7 - } - ] - } - ] - }, - { - "name": "Validate: No undefined variables/variable in fragment not defined by un-named query", - "rule": "NoUndefinedVariables", - "schema": 0, - "query": "\n {\n ...FragA\n }\n fragment FragA on Type {\n field(a: $a)\n }\n ", - "errors": [ - { - "message": "Variable \"$a\" is not defined.", - "locations": [ - { - "line": 6, - "column": 18 - }, - { - "line": 2, - "column": 7 - } - ] - } - ] - }, - { - "name": "Validate: No undefined variables/variable in fragment not defined by operation", - "rule": "NoUndefinedVariables", - "schema": 0, - "query": "\n query Foo($a: String, $b: String) {\n ...FragA\n }\n fragment FragA on Type {\n field(a: $a) {\n ...FragB\n }\n }\n fragment FragB on Type {\n field(b: $b) {\n ...FragC\n }\n }\n fragment FragC on Type {\n field(c: $c)\n }\n ", - "errors": [ - { - "message": "Variable \"$c\" is not defined by operation \"Foo\".", - "locations": [ - { - "line": 16, - "column": 18 - }, - { - "line": 2, - "column": 7 - } - ] - } - ] - }, - { - "name": "Validate: No undefined variables/multiple variables in fragments not defined", - "rule": "NoUndefinedVariables", - "schema": 0, - "query": "\n query Foo($b: String) {\n ...FragA\n }\n fragment FragA on Type {\n field(a: $a) {\n ...FragB\n }\n }\n fragment FragB on Type {\n field(b: $b) {\n ...FragC\n }\n }\n fragment FragC on Type {\n field(c: $c)\n }\n ", - "errors": [ - { - "message": "Variable \"$a\" is not defined by operation \"Foo\".", - "locations": [ - { - "line": 6, - "column": 18 - }, - { - "line": 2, - "column": 7 - } - ] - }, - { - "message": "Variable \"$c\" is not defined by operation \"Foo\".", - "locations": [ - { - "line": 16, - "column": 18 - }, - { - "line": 2, - "column": 7 - } - ] - } - ] - }, - { - "name": "Validate: No undefined variables/single variable in fragment not defined by multiple operations", - "rule": "NoUndefinedVariables", - "schema": 0, - "query": "\n query Foo($a: String) {\n ...FragAB\n }\n query Bar($a: String) {\n ...FragAB\n }\n fragment FragAB on Type {\n field(a: $a, b: $b)\n }\n ", - "errors": [ - { - "message": "Variable \"$b\" is not defined by operation \"Foo\".", - "locations": [ - { - "line": 9, - "column": 25 - }, - { - "line": 2, - "column": 7 - } - ] - }, - { - "message": "Variable \"$b\" is not defined by operation \"Bar\".", - "locations": [ - { - "line": 9, - "column": 25 - }, - { - "line": 5, - "column": 7 - } - ] - } - ] - }, - { - "name": "Validate: No undefined variables/variables in fragment not defined by multiple operations", - "rule": "NoUndefinedVariables", - "schema": 0, - "query": "\n query Foo($b: String) {\n ...FragAB\n }\n query Bar($a: String) {\n ...FragAB\n }\n fragment FragAB on Type {\n field(a: $a, b: $b)\n }\n ", - "errors": [ - { - "message": "Variable \"$a\" is not defined by operation \"Foo\".", - "locations": [ - { - "line": 9, - "column": 18 - }, - { - "line": 2, - "column": 7 - } - ] - }, - { - "message": "Variable \"$b\" is not defined by operation \"Bar\".", - "locations": [ - { - "line": 9, - "column": 25 - }, - { - "line": 5, - "column": 7 - } - ] - } - ] - }, - { - "name": "Validate: No undefined variables/variable in fragment used by other operation", - "rule": "NoUndefinedVariables", - "schema": 0, - "query": "\n query Foo($b: String) {\n ...FragA\n }\n query Bar($a: String) {\n ...FragB\n }\n fragment FragA on Type {\n field(a: $a)\n }\n fragment FragB on Type {\n field(b: $b)\n }\n ", - "errors": [ - { - "message": "Variable \"$a\" is not defined by operation \"Foo\".", - "locations": [ - { - "line": 9, - "column": 18 - }, - { - "line": 2, - "column": 7 - } - ] - }, - { - "message": "Variable \"$b\" is not defined by operation \"Bar\".", - "locations": [ - { - "line": 12, - "column": 18 - }, - { - "line": 5, - "column": 7 - } - ] - } - ] - }, - { - "name": "Validate: No undefined variables/multiple undefined variables produce multiple errors", - "rule": "NoUndefinedVariables", - "schema": 0, - "query": "\n query Foo($b: String) {\n ...FragAB\n }\n query Bar($a: String) {\n ...FragAB\n }\n fragment FragAB on Type {\n field1(a: $a, b: $b)\n ...FragC\n field3(a: $a, b: $b)\n }\n fragment FragC on Type {\n field2(c: $c)\n }\n ", - "errors": [ - { - "message": "Variable \"$a\" is not defined by operation \"Foo\".", - "locations": [ - { - "line": 9, - "column": 19 - }, - { - "line": 2, - "column": 7 - } - ] - }, - { - "message": "Variable \"$a\" is not defined by operation \"Foo\".", - "locations": [ - { - "line": 11, - "column": 19 - }, - { - "line": 2, - "column": 7 - } - ] - }, - { - "message": "Variable \"$c\" is not defined by operation \"Foo\".", - "locations": [ - { - "line": 14, - "column": 19 - }, - { - "line": 2, - "column": 7 - } - ] - }, - { - "message": "Variable \"$b\" is not defined by operation \"Bar\".", - "locations": [ - { - "line": 9, - "column": 26 - }, - { - "line": 5, - "column": 7 - } - ] - }, - { - "message": "Variable \"$b\" is not defined by operation \"Bar\".", - "locations": [ - { - "line": 11, - "column": 26 - }, - { - "line": 5, - "column": 7 - } - ] - }, - { - "message": "Variable \"$c\" is not defined by operation \"Bar\".", - "locations": [ - { - "line": 14, - "column": 19 - }, - { - "line": 5, - "column": 7 - } - ] - } - ] - }, - { - "name": "Validate: No unused fragments/all fragment names are used", - "rule": "NoUnusedFragments", - "schema": 0, - "query": "\n {\n human(id: 4) {\n ...HumanFields1\n ... on Human {\n ...HumanFields2\n }\n }\n }\n fragment HumanFields1 on Human {\n name\n ...HumanFields3\n }\n fragment HumanFields2 on Human {\n name\n }\n fragment HumanFields3 on Human {\n name\n }\n ", - "errors": [] - }, - { - "name": "Validate: No unused fragments/all fragment names are used by multiple operations", - "rule": "NoUnusedFragments", - "schema": 0, - "query": "\n query Foo {\n human(id: 4) {\n ...HumanFields1\n }\n }\n query Bar {\n human(id: 4) {\n ...HumanFields2\n }\n }\n fragment HumanFields1 on Human {\n name\n ...HumanFields3\n }\n fragment HumanFields2 on Human {\n name\n }\n fragment HumanFields3 on Human {\n name\n }\n ", - "errors": [] - }, - { - "name": "Validate: No unused fragments/contains unknown fragments", - "rule": "NoUnusedFragments", - "schema": 0, - "query": "\n query Foo {\n human(id: 4) {\n ...HumanFields1\n }\n }\n query Bar {\n human(id: 4) {\n ...HumanFields2\n }\n }\n fragment HumanFields1 on Human {\n name\n ...HumanFields3\n }\n fragment HumanFields2 on Human {\n name\n }\n fragment HumanFields3 on Human {\n name\n }\n fragment Unused1 on Human {\n name\n }\n fragment Unused2 on Human {\n name\n }\n ", - "errors": [ - { - "message": "Fragment \"Unused1\" is never used.", - "locations": [ - { - "line": 22, - "column": 7 - } - ] - }, - { - "message": "Fragment \"Unused2\" is never used.", - "locations": [ - { - "line": 25, - "column": 7 - } - ] - } - ] - }, - { - "name": "Validate: No unused fragments/contains unknown fragments with ref cycle", - "rule": "NoUnusedFragments", - "schema": 0, - "query": "\n query Foo {\n human(id: 4) {\n ...HumanFields1\n }\n }\n query Bar {\n human(id: 4) {\n ...HumanFields2\n }\n }\n fragment HumanFields1 on Human {\n name\n ...HumanFields3\n }\n fragment HumanFields2 on Human {\n name\n }\n fragment HumanFields3 on Human {\n name\n }\n fragment Unused1 on Human {\n name\n ...Unused2\n }\n fragment Unused2 on Human {\n name\n ...Unused1\n }\n ", - "errors": [ - { - "message": "Fragment \"Unused1\" is never used.", - "locations": [ - { - "line": 22, - "column": 7 - } - ] - }, - { - "message": "Fragment \"Unused2\" is never used.", - "locations": [ - { - "line": 26, - "column": 7 - } - ] - } - ] - }, - { - "name": "Validate: No unused fragments/contains unknown and undef fragments", - "rule": "NoUnusedFragments", - "schema": 0, - "query": "\n query Foo {\n human(id: 4) {\n ...bar\n }\n }\n fragment foo on Human {\n name\n }\n ", - "errors": [ - { - "message": "Fragment \"foo\" is never used.", - "locations": [ - { - "line": 7, - "column": 7 - } - ] - } - ] - }, - { - "name": "Validate: No unused variables/uses all variables", - "rule": "NoUnusedVariables", - "schema": 0, - "query": "\n query ($a: String, $b: String, $c: String) {\n field(a: $a, b: $b, c: $c)\n }\n ", - "errors": [] - }, - { - "name": "Validate: No unused variables/uses all variables deeply", - "rule": "NoUnusedVariables", - "schema": 0, - "query": "\n query Foo($a: String, $b: String, $c: String) {\n field(a: $a) {\n field(b: $b) {\n field(c: $c)\n }\n }\n }\n ", - "errors": [] - }, - { - "name": "Validate: No unused variables/uses all variables deeply in inline fragments", - "rule": "NoUnusedVariables", - "schema": 0, - "query": "\n query Foo($a: String, $b: String, $c: String) {\n ... on Type {\n field(a: $a) {\n field(b: $b) {\n ... on Type {\n field(c: $c)\n }\n }\n }\n }\n }\n ", - "errors": [] - }, - { - "name": "Validate: No unused variables/uses all variables in fragments", - "rule": "NoUnusedVariables", - "schema": 0, - "query": "\n query Foo($a: String, $b: String, $c: String) {\n ...FragA\n }\n fragment FragA on Type {\n field(a: $a) {\n ...FragB\n }\n }\n fragment FragB on Type {\n field(b: $b) {\n ...FragC\n }\n }\n fragment FragC on Type {\n field(c: $c)\n }\n ", - "errors": [] - }, - { - "name": "Validate: No unused variables/variable used by fragment in multiple operations", - "rule": "NoUnusedVariables", - "schema": 0, - "query": "\n query Foo($a: String) {\n ...FragA\n }\n query Bar($b: String) {\n ...FragB\n }\n fragment FragA on Type {\n field(a: $a)\n }\n fragment FragB on Type {\n field(b: $b)\n }\n ", - "errors": [] - }, - { - "name": "Validate: No unused variables/variable used by recursive fragment", - "rule": "NoUnusedVariables", - "schema": 0, - "query": "\n query Foo($a: String) {\n ...FragA\n }\n fragment FragA on Type {\n field(a: $a) {\n ...FragA\n }\n }\n ", - "errors": [] - }, - { - "name": "Validate: No unused variables/variable not used", - "rule": "NoUnusedVariables", - "schema": 0, - "query": "\n query ($a: String, $b: String, $c: String) {\n field(a: $a, b: $b)\n }\n ", - "errors": [ - { - "message": "Variable \"$c\" is never used.", - "locations": [ - { - "line": 2, - "column": 38 - } - ] - } - ] - }, - { - "name": "Validate: No unused variables/multiple variables not used", - "rule": "NoUnusedVariables", - "schema": 0, - "query": "\n query Foo($a: String, $b: String, $c: String) {\n field(b: $b)\n }\n ", - "errors": [ - { - "message": "Variable \"$a\" is never used in operation \"Foo\".", - "locations": [ - { - "line": 2, - "column": 17 - } - ] - }, - { - "message": "Variable \"$c\" is never used in operation \"Foo\".", - "locations": [ - { - "line": 2, - "column": 41 - } - ] - } - ] - }, - { - "name": "Validate: No unused variables/variable not used in fragments", - "rule": "NoUnusedVariables", - "schema": 0, - "query": "\n query Foo($a: String, $b: String, $c: String) {\n ...FragA\n }\n fragment FragA on Type {\n field(a: $a) {\n ...FragB\n }\n }\n fragment FragB on Type {\n field(b: $b) {\n ...FragC\n }\n }\n fragment FragC on Type {\n field\n }\n ", - "errors": [ - { - "message": "Variable \"$c\" is never used in operation \"Foo\".", - "locations": [ - { - "line": 2, - "column": 41 - } - ] - } - ] - }, - { - "name": "Validate: No unused variables/multiple variables not used in fragments", - "rule": "NoUnusedVariables", - "schema": 0, - "query": "\n query Foo($a: String, $b: String, $c: String) {\n ...FragA\n }\n fragment FragA on Type {\n field {\n ...FragB\n }\n }\n fragment FragB on Type {\n field(b: $b) {\n ...FragC\n }\n }\n fragment FragC on Type {\n field\n }\n ", - "errors": [ - { - "message": "Variable \"$a\" is never used in operation \"Foo\".", - "locations": [ - { - "line": 2, - "column": 17 - } - ] - }, - { - "message": "Variable \"$c\" is never used in operation \"Foo\".", - "locations": [ - { - "line": 2, - "column": 41 - } - ] - } - ] - }, - { - "name": "Validate: No unused variables/variable not used by unreferenced fragment", - "rule": "NoUnusedVariables", - "schema": 0, - "query": "\n query Foo($b: String) {\n ...FragA\n }\n fragment FragA on Type {\n field(a: $a)\n }\n fragment FragB on Type {\n field(b: $b)\n }\n ", - "errors": [ - { - "message": "Variable \"$b\" is never used in operation \"Foo\".", - "locations": [ - { - "line": 2, - "column": 17 - } - ] - } - ] - }, - { - "name": "Validate: No unused variables/variable not used by fragment used by other operation", - "rule": "NoUnusedVariables", - "schema": 0, - "query": "\n query Foo($b: String) {\n ...FragA\n }\n query Bar($a: String) {\n ...FragB\n }\n fragment FragA on Type {\n field(a: $a)\n }\n fragment FragB on Type {\n field(b: $b)\n }\n ", - "errors": [ - { - "message": "Variable \"$b\" is never used in operation \"Foo\".", - "locations": [ - { - "line": 2, - "column": 17 - } - ] - }, - { - "message": "Variable \"$a\" is never used in operation \"Bar\".", - "locations": [ - { - "line": 5, - "column": 17 - } - ] - } - ] - }, - { - "name": "Validate: Overlapping fields can be merged/unique fields", - "rule": "OverlappingFieldsCanBeMerged", - "schema": 0, - "query": "\n fragment uniqueFields on Dog {\n name\n nickname\n }\n ", - "errors": [] - }, - { - "name": "Validate: Overlapping fields can be merged/identical fields", - "rule": "OverlappingFieldsCanBeMerged", - "schema": 0, - "query": "\n fragment mergeIdenticalFields on Dog {\n name\n name\n }\n ", - "errors": [] - }, - { - "name": "Validate: Overlapping fields can be merged/identical fields with identical args", - "rule": "OverlappingFieldsCanBeMerged", - "schema": 0, - "query": "\n fragment mergeIdenticalFieldsWithIdenticalArgs on Dog {\n doesKnowCommand(dogCommand: SIT)\n doesKnowCommand(dogCommand: SIT)\n }\n ", - "errors": [] - }, - { - "name": "Validate: Overlapping fields can be merged/identical fields with identical directives", - "rule": "OverlappingFieldsCanBeMerged", - "schema": 0, - "query": "\n fragment mergeSameFieldsWithSameDirectives on Dog {\n name @include(if: true)\n name @include(if: true)\n }\n ", - "errors": [] - }, - { - "name": "Validate: Overlapping fields can be merged/different args with different aliases", - "rule": "OverlappingFieldsCanBeMerged", - "schema": 0, - "query": "\n fragment differentArgsWithDifferentAliases on Dog {\n knowsSit: doesKnowCommand(dogCommand: SIT)\n knowsDown: doesKnowCommand(dogCommand: DOWN)\n }\n ", - "errors": [] - }, - { - "name": "Validate: Overlapping fields can be merged/different directives with different aliases", - "rule": "OverlappingFieldsCanBeMerged", - "schema": 0, - "query": "\n fragment differentDirectivesWithDifferentAliases on Dog {\n nameIfTrue: name @include(if: true)\n nameIfFalse: name @include(if: false)\n }\n ", - "errors": [] - }, - { - "name": "Validate: Overlapping fields can be merged/different skip/include directives accepted", - "rule": "OverlappingFieldsCanBeMerged", - "schema": 0, - "query": "\n fragment differentDirectivesWithDifferentAliases on Dog {\n name @include(if: true)\n name @include(if: false)\n }\n ", - "errors": [] - }, - { - "name": "Validate: Overlapping fields can be merged/Same aliases with different field targets", - "rule": "OverlappingFieldsCanBeMerged", - "schema": 0, - "query": "\n fragment sameAliasesWithDifferentFieldTargets on Dog {\n fido: name\n fido: nickname\n }\n ", - "errors": [ - { - "message": "Fields \"fido\" conflict because name and nickname are different fields. Use different aliases on the fields to fetch both if this was intentional.", - "locations": [ - { - "line": 3, - "column": 9 - }, - { - "line": 4, - "column": 9 - } - ] - } - ] - }, - { - "name": "Validate: Overlapping fields can be merged/Same aliases allowed on non-overlapping fields", - "rule": "OverlappingFieldsCanBeMerged", - "schema": 0, - "query": "\n fragment sameAliasesWithDifferentFieldTargets on Pet {\n ... on Dog {\n name\n }\n ... on Cat {\n name: nickname\n }\n }\n ", - "errors": [] - }, - { - "name": "Validate: Overlapping fields can be merged/Alias masking direct field access", - "rule": "OverlappingFieldsCanBeMerged", - "schema": 0, - "query": "\n fragment aliasMaskingDirectFieldAccess on Dog {\n name: nickname\n name\n }\n ", - "errors": [ - { - "message": "Fields \"name\" conflict because nickname and name are different fields. Use different aliases on the fields to fetch both if this was intentional.", - "locations": [ - { - "line": 3, - "column": 9 - }, - { - "line": 4, - "column": 9 - } - ] - } - ] - }, - { - "name": "Validate: Overlapping fields can be merged/different args, second adds an argument", - "rule": "OverlappingFieldsCanBeMerged", - "schema": 0, - "query": "\n fragment conflictingArgs on Dog {\n doesKnowCommand\n doesKnowCommand(dogCommand: HEEL)\n }\n ", - "errors": [ - { - "message": "Fields \"doesKnowCommand\" conflict because they have differing arguments. Use different aliases on the fields to fetch both if this was intentional.", - "locations": [ - { - "line": 3, - "column": 9 - }, - { - "line": 4, - "column": 9 - } - ] - } - ] - }, - { - "name": "Validate: Overlapping fields can be merged/different args, second missing an argument", - "rule": "OverlappingFieldsCanBeMerged", - "schema": 0, - "query": "\n fragment conflictingArgs on Dog {\n doesKnowCommand(dogCommand: SIT)\n doesKnowCommand\n }\n ", - "errors": [ - { - "message": "Fields \"doesKnowCommand\" conflict because they have differing arguments. Use different aliases on the fields to fetch both if this was intentional.", - "locations": [ - { - "line": 3, - "column": 9 - }, - { - "line": 4, - "column": 9 - } - ] - } - ] - }, - { - "name": "Validate: Overlapping fields can be merged/conflicting args", - "rule": "OverlappingFieldsCanBeMerged", - "schema": 0, - "query": "\n fragment conflictingArgs on Dog {\n doesKnowCommand(dogCommand: SIT)\n doesKnowCommand(dogCommand: HEEL)\n }\n ", - "errors": [ - { - "message": "Fields \"doesKnowCommand\" conflict because they have differing arguments. Use different aliases on the fields to fetch both if this was intentional.", - "locations": [ - { - "line": 3, - "column": 9 - }, - { - "line": 4, - "column": 9 - } - ] - } - ] - }, - { - "name": "Validate: Overlapping fields can be merged/allows different args where no conflict is possible", - "rule": "OverlappingFieldsCanBeMerged", - "schema": 0, - "query": "\n fragment conflictingArgs on Pet {\n ... on Dog {\n name(surname: true)\n }\n ... on Cat {\n name\n }\n }\n ", - "errors": [] - }, - { - "name": "Validate: Overlapping fields can be merged/encounters conflict in fragments", - "rule": "OverlappingFieldsCanBeMerged", - "schema": 0, - "query": "\n {\n ...A\n ...B\n }\n fragment A on Type {\n x: a\n }\n fragment B on Type {\n x: b\n }\n ", - "errors": [ - { - "message": "Fields \"x\" conflict because a and b are different fields. Use different aliases on the fields to fetch both if this was intentional.", - "locations": [ - { - "line": 7, - "column": 9 - }, - { - "line": 10, - "column": 9 - } - ] - } - ] - }, - { - "name": "Validate: Overlapping fields can be merged/reports each conflict once", - "rule": "OverlappingFieldsCanBeMerged", - "schema": 0, - "query": "\n {\n f1 {\n ...A\n ...B\n }\n f2 {\n ...B\n ...A\n }\n f3 {\n ...A\n ...B\n x: c\n }\n }\n fragment A on Type {\n x: a\n }\n fragment B on Type {\n x: b\n }\n ", - "errors": [ - { - "message": "Fields \"x\" conflict because a and b are different fields. Use different aliases on the fields to fetch both if this was intentional.", - "locations": [ - { - "line": 18, - "column": 9 - }, - { - "line": 21, - "column": 9 - } - ] - }, - { - "message": "Fields \"x\" conflict because c and a are different fields. Use different aliases on the fields to fetch both if this was intentional.", - "locations": [ - { - "line": 14, - "column": 11 - }, - { - "line": 18, - "column": 9 - } - ] - }, - { - "message": "Fields \"x\" conflict because c and b are different fields. Use different aliases on the fields to fetch both if this was intentional.", - "locations": [ - { - "line": 14, - "column": 11 - }, - { - "line": 21, - "column": 9 - } - ] - } - ] - }, - { - "name": "Validate: Overlapping fields can be merged/deep conflict", - "rule": "OverlappingFieldsCanBeMerged", - "schema": 0, - "query": "\n {\n field {\n x: a\n },\n field {\n x: b\n }\n }\n ", - "errors": [ - { - "message": "Fields \"field\" conflict because subfields \"x\" conflict because a and b are different fields. Use different aliases on the fields to fetch both if this was intentional.", - "locations": [ - { - "line": 3, - "column": 9 - }, - { - "line": 4, - "column": 11 - }, - { - "line": 6, - "column": 9 - }, - { - "line": 7, - "column": 11 - } - ] - } - ] - }, - { - "name": "Validate: Overlapping fields can be merged/deep conflict with multiple issues", - "rule": "OverlappingFieldsCanBeMerged", - "schema": 0, - "query": "\n {\n field {\n x: a\n y: c\n },\n field {\n x: b\n y: d\n }\n }\n ", - "errors": [ - { - "message": "Fields \"field\" conflict because subfields \"x\" conflict because a and b are different fields and subfields \"y\" conflict because c and d are different fields. Use different aliases on the fields to fetch both if this was intentional.", - "locations": [ - { - "line": 3, - "column": 9 - }, - { - "line": 4, - "column": 11 - }, - { - "line": 5, - "column": 11 - }, - { - "line": 7, - "column": 9 - }, - { - "line": 8, - "column": 11 - }, - { - "line": 9, - "column": 11 - } - ] - } - ] - }, - { - "name": "Validate: Overlapping fields can be merged/very deep conflict", - "rule": "OverlappingFieldsCanBeMerged", - "schema": 0, - "query": "\n {\n field {\n deepField {\n x: a\n }\n },\n field {\n deepField {\n x: b\n }\n }\n }\n ", - "errors": [ - { - "message": "Fields \"field\" conflict because subfields \"deepField\" conflict because subfields \"x\" conflict because a and b are different fields. Use different aliases on the fields to fetch both if this was intentional.", - "locations": [ - { - "line": 3, - "column": 9 - }, - { - "line": 4, - "column": 11 - }, - { - "line": 5, - "column": 13 - }, - { - "line": 8, - "column": 9 - }, - { - "line": 9, - "column": 11 - }, - { - "line": 10, - "column": 13 - } - ] - } - ] - }, - { - "name": "Validate: Overlapping fields can be merged/reports deep conflict to nearest common ancestor", - "rule": "OverlappingFieldsCanBeMerged", - "schema": 0, - "query": "\n {\n field {\n deepField {\n x: a\n }\n deepField {\n x: b\n }\n },\n field {\n deepField {\n y\n }\n }\n }\n ", - "errors": [ - { - "message": "Fields \"deepField\" conflict because subfields \"x\" conflict because a and b are different fields. Use different aliases on the fields to fetch both if this was intentional.", - "locations": [ - { - "line": 4, - "column": 11 - }, - { - "line": 5, - "column": 13 - }, - { - "line": 7, - "column": 11 - }, - { - "line": 8, - "column": 13 - } - ] - } - ] - }, - { - "name": "Validate: Overlapping fields can be merged/reports deep conflict to nearest common ancestor in fragments", - "rule": "OverlappingFieldsCanBeMerged", - "schema": 0, - "query": "\n {\n field {\n ...F\n }\n field {\n ...F\n }\n }\n fragment F on T {\n deepField {\n deeperField {\n x: a\n }\n deeperField {\n x: b\n }\n },\n deepField {\n deeperField {\n y\n }\n }\n }\n ", - "errors": [ - { - "message": "Fields \"deeperField\" conflict because subfields \"x\" conflict because a and b are different fields. Use different aliases on the fields to fetch both if this was intentional.", - "locations": [ - { - "line": 12, - "column": 11 - }, - { - "line": 13, - "column": 13 - }, - { - "line": 15, - "column": 11 - }, - { - "line": 16, - "column": 13 - } - ] - } - ] - }, - { - "name": "Validate: Overlapping fields can be merged/reports deep conflict in nested fragments", - "rule": "OverlappingFieldsCanBeMerged", - "schema": 0, - "query": "\n {\n field {\n ...F\n }\n field {\n ...I\n }\n }\n fragment F on T {\n x: a\n ...G\n }\n fragment G on T {\n y: c\n }\n fragment I on T {\n y: d\n ...J\n }\n fragment J on T {\n x: b\n }\n ", - "errors": [ - { - "message": "Fields \"field\" conflict because subfields \"x\" conflict because a and b are different fields and subfields \"y\" conflict because c and d are different fields. Use different aliases on the fields to fetch both if this was intentional.", - "locations": [ - { - "line": 3, - "column": 9 - }, - { - "line": 11, - "column": 9 - }, - { - "line": 15, - "column": 9 - }, - { - "line": 6, - "column": 9 - }, - { - "line": 22, - "column": 9 - }, - { - "line": 18, - "column": 9 - } - ] - } - ] - }, - { - "name": "Validate: Overlapping fields can be merged/ignores unknown fragments", - "rule": "OverlappingFieldsCanBeMerged", - "schema": 0, - "query": "\n {\n field\n ...Unknown\n ...Known\n }\n\n fragment Known on T {\n field\n ...OtherUnknown\n }\n ", - "errors": [] - }, - { - "name": "Validate: Overlapping fields can be merged/return types must be unambiguous/conflicting return types which potentially overlap", - "rule": "OverlappingFieldsCanBeMerged", - "schema": 1, - "query": "\n {\n someBox {\n ...on IntBox {\n scalar\n }\n ...on NonNullStringBox1 {\n scalar\n }\n }\n }\n ", - "errors": [ - { - "message": "Fields \"scalar\" conflict because they return conflicting types Int and String!. Use different aliases on the fields to fetch both if this was intentional.", - "locations": [ - { - "line": 5, - "column": 15 - }, - { - "line": 8, - "column": 15 - } - ] - } - ] - }, - { - "name": "Validate: Overlapping fields can be merged/return types must be unambiguous/compatible return shapes on different return types", - "rule": "OverlappingFieldsCanBeMerged", - "schema": 1, - "query": "\n {\n someBox {\n ... on SomeBox {\n deepBox {\n unrelatedField\n }\n }\n ... on StringBox {\n deepBox {\n unrelatedField\n }\n }\n }\n }\n ", - "errors": [] - }, - { - "name": "Validate: Overlapping fields can be merged/return types must be unambiguous/disallows differing return types despite no overlap", - "rule": "OverlappingFieldsCanBeMerged", - "schema": 1, - "query": "\n {\n someBox {\n ... on IntBox {\n scalar\n }\n ... on StringBox {\n scalar\n }\n }\n }\n ", - "errors": [ - { - "message": "Fields \"scalar\" conflict because they return conflicting types Int and String. Use different aliases on the fields to fetch both if this was intentional.", - "locations": [ - { - "line": 5, - "column": 15 - }, - { - "line": 8, - "column": 15 - } - ] - } - ] - }, - { - "name": "Validate: Overlapping fields can be merged/return types must be unambiguous/disallows differing return type nullability despite no overlap", - "rule": "OverlappingFieldsCanBeMerged", - "schema": 1, - "query": "\n {\n someBox {\n ... on NonNullStringBox1 {\n scalar\n }\n ... on StringBox {\n scalar\n }\n }\n }\n ", - "errors": [ - { - "message": "Fields \"scalar\" conflict because they return conflicting types String! and String. Use different aliases on the fields to fetch both if this was intentional.", - "locations": [ - { - "line": 5, - "column": 15 - }, - { - "line": 8, - "column": 15 - } - ] - } - ] - }, - { - "name": "Validate: Overlapping fields can be merged/return types must be unambiguous/disallows differing return type list despite no overlap", - "rule": "OverlappingFieldsCanBeMerged", - "schema": 1, - "query": "\n {\n someBox {\n ... on IntBox {\n box: listStringBox {\n scalar\n }\n }\n ... on StringBox {\n box: stringBox {\n scalar\n }\n }\n }\n }\n ", - "errors": [ - { - "message": "Fields \"box\" conflict because they return conflicting types [StringBox] and StringBox. Use different aliases on the fields to fetch both if this was intentional.", - "locations": [ - { - "line": 5, - "column": 15 - }, - { - "line": 10, - "column": 15 - } - ] - } - ] - }, - { - "name": "Validate: Overlapping fields can be merged/return types must be unambiguous/disallows differing return type list despite no overlap", - "rule": "OverlappingFieldsCanBeMerged", - "schema": 1, - "query": "\n {\n someBox {\n ... on IntBox {\n box: stringBox {\n scalar\n }\n }\n ... on StringBox {\n box: listStringBox {\n scalar\n }\n }\n }\n }\n ", - "errors": [ - { - "message": "Fields \"box\" conflict because they return conflicting types StringBox and [StringBox]. Use different aliases on the fields to fetch both if this was intentional.", - "locations": [ - { - "line": 5, - "column": 15 - }, - { - "line": 10, - "column": 15 - } - ] - } - ] - }, - { - "name": "Validate: Overlapping fields can be merged/return types must be unambiguous/disallows differing deep return types despite no overlap", - "rule": "OverlappingFieldsCanBeMerged", - "schema": 1, - "query": "\n {\n someBox {\n ... on IntBox {\n box: stringBox {\n scalar\n }\n }\n ... on StringBox {\n box: intBox {\n scalar\n }\n }\n }\n }\n ", - "errors": [ - { - "message": "Fields \"box\" conflict because subfields \"scalar\" conflict because they return conflicting types String and Int. Use different aliases on the fields to fetch both if this was intentional.", - "locations": [ - { - "line": 5, - "column": 15 - }, - { - "line": 6, - "column": 17 - }, - { - "line": 10, - "column": 15 - }, - { - "line": 11, - "column": 17 - } - ] - } - ] - }, - { - "name": "Validate: Overlapping fields can be merged/return types must be unambiguous/allows non-conflicting overlaping types", - "rule": "OverlappingFieldsCanBeMerged", - "schema": 1, - "query": "\n {\n someBox {\n ... on IntBox {\n scalar: unrelatedField\n }\n ... on StringBox {\n scalar\n }\n }\n }\n ", - "errors": [] - }, - { - "name": "Validate: Overlapping fields can be merged/return types must be unambiguous/same wrapped scalar return types", - "rule": "OverlappingFieldsCanBeMerged", - "schema": 1, - "query": "\n {\n someBox {\n ...on NonNullStringBox1 {\n scalar\n }\n ...on NonNullStringBox2 {\n scalar\n }\n }\n }\n ", - "errors": [] - }, - { - "name": "Validate: Overlapping fields can be merged/return types must be unambiguous/allows inline typeless fragments", - "rule": "OverlappingFieldsCanBeMerged", - "schema": 1, - "query": "\n {\n a\n ... {\n a\n }\n }\n ", - "errors": [] - }, - { - "name": "Validate: Overlapping fields can be merged/return types must be unambiguous/compares deep types including list", - "rule": "OverlappingFieldsCanBeMerged", - "schema": 1, - "query": "\n {\n connection {\n ...edgeID\n edges {\n node {\n id: name\n }\n }\n }\n }\n\n fragment edgeID on Connection {\n edges {\n node {\n id\n }\n }\n }\n ", - "errors": [ - { - "message": "Fields \"edges\" conflict because subfields \"node\" conflict because subfields \"id\" conflict because name and id are different fields. Use different aliases on the fields to fetch both if this was intentional.", - "locations": [ - { - "line": 5, - "column": 13 - }, - { - "line": 6, - "column": 15 - }, - { - "line": 7, - "column": 17 - }, - { - "line": 14, - "column": 11 - }, - { - "line": 15, - "column": 13 - }, - { - "line": 16, - "column": 15 - } - ] - } - ] - }, - { - "name": "Validate: Overlapping fields can be merged/return types must be unambiguous/ignores unknown types", - "rule": "OverlappingFieldsCanBeMerged", - "schema": 1, - "query": "\n {\n someBox {\n ...on UnknownType {\n scalar\n }\n ...on NonNullStringBox2 {\n scalar\n }\n }\n }\n ", - "errors": [] - }, - { - "name": "Validate: Overlapping fields can be merged/return types must be unambiguous/works for field names that are JS keywords", - "rule": "OverlappingFieldsCanBeMerged", - "schema": 2, - "query": "{\n foo {\n constructor\n }\n }", - "errors": [] - }, - { - "name": "Validate: Possible fragment spreads/of the same object", - "rule": "PossibleFragmentSpreads", - "schema": 0, - "query": "\n fragment objectWithinObject on Dog { ...dogFragment }\n fragment dogFragment on Dog { barkVolume }\n ", - "errors": [] - }, - { - "name": "Validate: Possible fragment spreads/of the same object with inline fragment", - "rule": "PossibleFragmentSpreads", - "schema": 0, - "query": "\n fragment objectWithinObjectAnon on Dog { ... on Dog { barkVolume } }\n ", - "errors": [] - }, - { - "name": "Validate: Possible fragment spreads/object into an implemented interface", - "rule": "PossibleFragmentSpreads", - "schema": 0, - "query": "\n fragment objectWithinInterface on Pet { ...dogFragment }\n fragment dogFragment on Dog { barkVolume }\n ", - "errors": [] - }, - { - "name": "Validate: Possible fragment spreads/object into containing union", - "rule": "PossibleFragmentSpreads", - "schema": 0, - "query": "\n fragment objectWithinUnion on CatOrDog { ...dogFragment }\n fragment dogFragment on Dog { barkVolume }\n ", - "errors": [] - }, - { - "name": "Validate: Possible fragment spreads/union into contained object", - "rule": "PossibleFragmentSpreads", - "schema": 0, - "query": "\n fragment unionWithinObject on Dog { ...catOrDogFragment }\n fragment catOrDogFragment on CatOrDog { __typename }\n ", - "errors": [] - }, - { - "name": "Validate: Possible fragment spreads/union into overlapping interface", - "rule": "PossibleFragmentSpreads", - "schema": 0, - "query": "\n fragment unionWithinInterface on Pet { ...catOrDogFragment }\n fragment catOrDogFragment on CatOrDog { __typename }\n ", - "errors": [] - }, - { - "name": "Validate: Possible fragment spreads/union into overlapping union", - "rule": "PossibleFragmentSpreads", - "schema": 0, - "query": "\n fragment unionWithinUnion on DogOrHuman { ...catOrDogFragment }\n fragment catOrDogFragment on CatOrDog { __typename }\n ", - "errors": [] - }, - { - "name": "Validate: Possible fragment spreads/interface into implemented object", - "rule": "PossibleFragmentSpreads", - "schema": 0, - "query": "\n fragment interfaceWithinObject on Dog { ...petFragment }\n fragment petFragment on Pet { name }\n ", - "errors": [] - }, - { - "name": "Validate: Possible fragment spreads/interface into overlapping interface", - "rule": "PossibleFragmentSpreads", - "schema": 0, - "query": "\n fragment interfaceWithinInterface on Pet { ...beingFragment }\n fragment beingFragment on Being { name }\n ", - "errors": [] - }, - { - "name": "Validate: Possible fragment spreads/interface into overlapping interface in inline fragment", - "rule": "PossibleFragmentSpreads", - "schema": 0, - "query": "\n fragment interfaceWithinInterface on Pet { ... on Being { name } }\n ", - "errors": [] - }, - { - "name": "Validate: Possible fragment spreads/interface into overlapping union", - "rule": "PossibleFragmentSpreads", - "schema": 0, - "query": "\n fragment interfaceWithinUnion on CatOrDog { ...petFragment }\n fragment petFragment on Pet { name }\n ", - "errors": [] - }, - { - "name": "Validate: Possible fragment spreads/different object into object", - "rule": "PossibleFragmentSpreads", - "schema": 0, - "query": "\n fragment invalidObjectWithinObject on Cat { ...dogFragment }\n fragment dogFragment on Dog { barkVolume }\n ", - "errors": [ - { - "message": "Fragment \"dogFragment\" cannot be spread here as objects of type \"Cat\" can never be of type \"Dog\".", - "locations": [ - { - "line": 2, - "column": 51 - } - ] - } - ] - }, - { - "name": "Validate: Possible fragment spreads/different object into object in inline fragment", - "rule": "PossibleFragmentSpreads", - "schema": 0, - "query": "\n fragment invalidObjectWithinObjectAnon on Cat {\n ... on Dog { barkVolume }\n }\n ", - "errors": [ - { - "message": "Fragment cannot be spread here as objects of type \"Cat\" can never be of type \"Dog\".", - "locations": [ - { - "line": 3, - "column": 9 - } - ] - } - ] - }, - { - "name": "Validate: Possible fragment spreads/object into not implementing interface", - "rule": "PossibleFragmentSpreads", - "schema": 0, - "query": "\n fragment invalidObjectWithinInterface on Pet { ...humanFragment }\n fragment humanFragment on Human { pets { name } }\n ", - "errors": [ - { - "message": "Fragment \"humanFragment\" cannot be spread here as objects of type \"Pet\" can never be of type \"Human\".", - "locations": [ - { - "line": 2, - "column": 54 - } - ] - } - ] - }, - { - "name": "Validate: Possible fragment spreads/object into not containing union", - "rule": "PossibleFragmentSpreads", - "schema": 0, - "query": "\n fragment invalidObjectWithinUnion on CatOrDog { ...humanFragment }\n fragment humanFragment on Human { pets { name } }\n ", - "errors": [ - { - "message": "Fragment \"humanFragment\" cannot be spread here as objects of type \"CatOrDog\" can never be of type \"Human\".", - "locations": [ - { - "line": 2, - "column": 55 - } - ] - } - ] - }, - { - "name": "Validate: Possible fragment spreads/union into not contained object", - "rule": "PossibleFragmentSpreads", - "schema": 0, - "query": "\n fragment invalidUnionWithinObject on Human { ...catOrDogFragment }\n fragment catOrDogFragment on CatOrDog { __typename }\n ", - "errors": [ - { - "message": "Fragment \"catOrDogFragment\" cannot be spread here as objects of type \"Human\" can never be of type \"CatOrDog\".", - "locations": [ - { - "line": 2, - "column": 52 - } - ] - } - ] - }, - { - "name": "Validate: Possible fragment spreads/union into non overlapping interface", - "rule": "PossibleFragmentSpreads", - "schema": 0, - "query": "\n fragment invalidUnionWithinInterface on Pet { ...humanOrAlienFragment }\n fragment humanOrAlienFragment on HumanOrAlien { __typename }\n ", - "errors": [ - { - "message": "Fragment \"humanOrAlienFragment\" cannot be spread here as objects of type \"Pet\" can never be of type \"HumanOrAlien\".", - "locations": [ - { - "line": 2, - "column": 53 - } - ] - } - ] - }, - { - "name": "Validate: Possible fragment spreads/union into non overlapping union", - "rule": "PossibleFragmentSpreads", - "schema": 0, - "query": "\n fragment invalidUnionWithinUnion on CatOrDog { ...humanOrAlienFragment }\n fragment humanOrAlienFragment on HumanOrAlien { __typename }\n ", - "errors": [ - { - "message": "Fragment \"humanOrAlienFragment\" cannot be spread here as objects of type \"CatOrDog\" can never be of type \"HumanOrAlien\".", - "locations": [ - { - "line": 2, - "column": 54 - } - ] - } - ] - }, - { - "name": "Validate: Possible fragment spreads/interface into non implementing object", - "rule": "PossibleFragmentSpreads", - "schema": 0, - "query": "\n fragment invalidInterfaceWithinObject on Cat { ...intelligentFragment }\n fragment intelligentFragment on Intelligent { iq }\n ", - "errors": [ - { - "message": "Fragment \"intelligentFragment\" cannot be spread here as objects of type \"Cat\" can never be of type \"Intelligent\".", - "locations": [ - { - "line": 2, - "column": 54 - } - ] - } - ] - }, - { - "name": "Validate: Possible fragment spreads/interface into non overlapping interface", - "rule": "PossibleFragmentSpreads", - "schema": 0, - "query": "\n fragment invalidInterfaceWithinInterface on Pet {\n ...intelligentFragment\n }\n fragment intelligentFragment on Intelligent { iq }\n ", - "errors": [ - { - "message": "Fragment \"intelligentFragment\" cannot be spread here as objects of type \"Pet\" can never be of type \"Intelligent\".", - "locations": [ - { - "line": 3, - "column": 9 - } - ] - } - ] - }, - { - "name": "Validate: Possible fragment spreads/interface into non overlapping interface in inline fragment", - "rule": "PossibleFragmentSpreads", - "schema": 0, - "query": "\n fragment invalidInterfaceWithinInterfaceAnon on Pet {\n ...on Intelligent { iq }\n }\n ", - "errors": [ - { - "message": "Fragment cannot be spread here as objects of type \"Pet\" can never be of type \"Intelligent\".", - "locations": [ - { - "line": 3, - "column": 9 - } - ] - } - ] - }, - { - "name": "Validate: Possible fragment spreads/interface into non overlapping union", - "rule": "PossibleFragmentSpreads", - "schema": 0, - "query": "\n fragment invalidInterfaceWithinUnion on HumanOrAlien { ...petFragment }\n fragment petFragment on Pet { name }\n ", - "errors": [ - { - "message": "Fragment \"petFragment\" cannot be spread here as objects of type \"HumanOrAlien\" can never be of type \"Pet\".", - "locations": [ - { - "line": 2, - "column": 62 - } - ] - } - ] - }, - { - "name": "Validate: Provided required arguments/ignores unknown arguments", - "rule": "ProvidedNonNullArguments", - "schema": 0, - "query": "\n {\n dog {\n isHousetrained(unknownArgument: true)\n }\n }\n ", - "errors": [] - }, - { - "name": "Validate: Provided required arguments/Valid non-nullable value/Arg on optional arg", - "rule": "ProvidedNonNullArguments", - "schema": 0, - "query": "\n {\n dog {\n isHousetrained(atOtherHomes: true)\n }\n }\n ", - "errors": [] - }, - { - "name": "Validate: Provided required arguments/Valid non-nullable value/No Arg on optional arg", - "rule": "ProvidedNonNullArguments", - "schema": 0, - "query": "\n {\n dog {\n isHousetrained\n }\n }\n ", - "errors": [] - }, - { - "name": "Validate: Provided required arguments/Valid non-nullable value/Multiple args", - "rule": "ProvidedNonNullArguments", - "schema": 0, - "query": "\n {\n complicatedArgs {\n multipleReqs(req1: 1, req2: 2)\n }\n }\n ", - "errors": [] - }, - { - "name": "Validate: Provided required arguments/Valid non-nullable value/Multiple args reverse order", - "rule": "ProvidedNonNullArguments", - "schema": 0, - "query": "\n {\n complicatedArgs {\n multipleReqs(req2: 2, req1: 1)\n }\n }\n ", - "errors": [] - }, - { - "name": "Validate: Provided required arguments/Valid non-nullable value/No args on multiple optional", - "rule": "ProvidedNonNullArguments", - "schema": 0, - "query": "\n {\n complicatedArgs {\n multipleOpts\n }\n }\n ", - "errors": [] - }, - { - "name": "Validate: Provided required arguments/Valid non-nullable value/One arg on multiple optional", - "rule": "ProvidedNonNullArguments", - "schema": 0, - "query": "\n {\n complicatedArgs {\n multipleOpts(opt1: 1)\n }\n }\n ", - "errors": [] - }, - { - "name": "Validate: Provided required arguments/Valid non-nullable value/Second arg on multiple optional", - "rule": "ProvidedNonNullArguments", - "schema": 0, - "query": "\n {\n complicatedArgs {\n multipleOpts(opt2: 1)\n }\n }\n ", - "errors": [] - }, - { - "name": "Validate: Provided required arguments/Valid non-nullable value/Multiple reqs on mixedList", - "rule": "ProvidedNonNullArguments", - "schema": 0, - "query": "\n {\n complicatedArgs {\n multipleOptAndReq(req1: 3, req2: 4)\n }\n }\n ", - "errors": [] - }, - { - "name": "Validate: Provided required arguments/Valid non-nullable value/Multiple reqs and one opt on mixedList", - "rule": "ProvidedNonNullArguments", - "schema": 0, - "query": "\n {\n complicatedArgs {\n multipleOptAndReq(req1: 3, req2: 4, opt1: 5)\n }\n }\n ", - "errors": [] - }, - { - "name": "Validate: Provided required arguments/Valid non-nullable value/All reqs and opts on mixedList", - "rule": "ProvidedNonNullArguments", - "schema": 0, - "query": "\n {\n complicatedArgs {\n multipleOptAndReq(req1: 3, req2: 4, opt1: 5, opt2: 6)\n }\n }\n ", - "errors": [] - }, - { - "name": "Validate: Provided required arguments/Invalid non-nullable value/Missing one non-nullable argument", - "rule": "ProvidedNonNullArguments", - "schema": 0, - "query": "\n {\n complicatedArgs {\n multipleReqs(req2: 2)\n }\n }\n ", - "errors": [ - { - "message": "Field \"multipleReqs\" argument \"req1\" of type \"Int!\" is required but not provided.", - "locations": [ - { - "line": 4, - "column": 13 - } - ] - } - ] - }, - { - "name": "Validate: Provided required arguments/Invalid non-nullable value/Missing multiple non-nullable arguments", - "rule": "ProvidedNonNullArguments", - "schema": 0, - "query": "\n {\n complicatedArgs {\n multipleReqs\n }\n }\n ", - "errors": [ - { - "message": "Field \"multipleReqs\" argument \"req1\" of type \"Int!\" is required but not provided.", - "locations": [ - { - "line": 4, - "column": 13 - } - ] - }, - { - "message": "Field \"multipleReqs\" argument \"req2\" of type \"Int!\" is required but not provided.", - "locations": [ - { - "line": 4, - "column": 13 - } - ] - } - ] - }, - { - "name": "Validate: Provided required arguments/Invalid non-nullable value/Incorrect value and missing argument", - "rule": "ProvidedNonNullArguments", - "schema": 0, - "query": "\n {\n complicatedArgs {\n multipleReqs(req1: \"one\")\n }\n }\n ", - "errors": [ - { - "message": "Field \"multipleReqs\" argument \"req2\" of type \"Int!\" is required but not provided.", - "locations": [ - { - "line": 4, - "column": 13 - } - ] - } - ] - }, - { - "name": "Validate: Provided required arguments/Directive arguments/ignores unknown directives", - "rule": "ProvidedNonNullArguments", - "schema": 0, - "query": "\n {\n dog @unknown\n }\n ", - "errors": [] - }, - { - "name": "Validate: Provided required arguments/Directive arguments/with directives of valid types", - "rule": "ProvidedNonNullArguments", - "schema": 0, - "query": "\n {\n dog @include(if: true) {\n name\n }\n human @skip(if: false) {\n name\n }\n }\n ", - "errors": [] - }, - { - "name": "Validate: Provided required arguments/Directive arguments/with directive with missing types", - "rule": "ProvidedNonNullArguments", - "schema": 0, - "query": "\n {\n dog @include {\n name @skip\n }\n }\n ", - "errors": [ - { - "message": "Directive \"@include\" argument \"if\" of type \"Boolean!\" is required but not provided.", - "locations": [ - { - "line": 3, - "column": 15 - } - ] - }, - { - "message": "Directive \"@skip\" argument \"if\" of type \"Boolean!\" is required but not provided.", - "locations": [ - { - "line": 4, - "column": 18 - } - ] - } - ] - }, - { - "name": "Validate: Scalar leafs/valid scalar selection", - "rule": "ScalarLeafs", - "schema": 0, - "query": "\n fragment scalarSelection on Dog {\n barks\n }\n ", - "errors": [] - }, - { - "name": "Validate: Scalar leafs/object type missing selection", - "rule": "ScalarLeafs", - "schema": 0, - "query": "\n query directQueryOnObjectWithoutSubFields {\n human\n }\n ", - "errors": [ - { - "message": "Field \"human\" of type \"Human\" must have a selection of subfields. Did you mean \"human { ... }\"?", - "locations": [ - { - "line": 3, - "column": 9 - } - ] - } - ] - }, - { - "name": "Validate: Scalar leafs/interface type missing selection", - "rule": "ScalarLeafs", - "schema": 0, - "query": "\n {\n human { pets }\n }\n ", - "errors": [ - { - "message": "Field \"pets\" of type \"[Pet]\" must have a selection of subfields. Did you mean \"pets { ... }\"?", - "locations": [ - { - "line": 3, - "column": 17 - } - ] - } - ] - }, - { - "name": "Validate: Scalar leafs/valid scalar selection with args", - "rule": "ScalarLeafs", - "schema": 0, - "query": "\n fragment scalarSelectionWithArgs on Dog {\n doesKnowCommand(dogCommand: SIT)\n }\n ", - "errors": [] - }, - { - "name": "Validate: Scalar leafs/scalar selection not allowed on Boolean", - "rule": "ScalarLeafs", - "schema": 0, - "query": "\n fragment scalarSelectionsNotAllowedOnBoolean on Dog {\n barks { sinceWhen }\n }\n ", - "errors": [ - { - "message": "Field \"barks\" must not have a selection since type \"Boolean\" has no subfields.", - "locations": [ - { - "line": 3, - "column": 15 - } - ] - } - ] - }, - { - "name": "Validate: Scalar leafs/scalar selection not allowed on Enum", - "rule": "ScalarLeafs", - "schema": 0, - "query": "\n fragment scalarSelectionsNotAllowedOnEnum on Cat {\n furColor { inHexdec }\n }\n ", - "errors": [ - { - "message": "Field \"furColor\" must not have a selection since type \"FurColor\" has no subfields.", - "locations": [ - { - "line": 3, - "column": 18 - } - ] - } - ] - }, - { - "name": "Validate: Scalar leafs/scalar selection not allowed with args", - "rule": "ScalarLeafs", - "schema": 0, - "query": "\n fragment scalarSelectionsNotAllowedWithArgs on Dog {\n doesKnowCommand(dogCommand: SIT) { sinceWhen }\n }\n ", - "errors": [ - { - "message": "Field \"doesKnowCommand\" must not have a selection since type \"Boolean\" has no subfields.", - "locations": [ - { - "line": 3, - "column": 42 - } - ] - } - ] - }, - { - "name": "Validate: Scalar leafs/Scalar selection not allowed with directives", - "rule": "ScalarLeafs", - "schema": 0, - "query": "\n fragment scalarSelectionsNotAllowedWithDirectives on Dog {\n name @include(if: true) { isAlsoHumanName }\n }\n ", - "errors": [ - { - "message": "Field \"name\" must not have a selection since type \"String\" has no subfields.", - "locations": [ - { - "line": 3, - "column": 33 - } - ] - } - ] - }, - { - "name": "Validate: Scalar leafs/Scalar selection not allowed with directives and args", - "rule": "ScalarLeafs", - "schema": 0, - "query": "\n fragment scalarSelectionsNotAllowedWithDirectivesAndArgs on Dog {\n doesKnowCommand(dogCommand: SIT) @include(if: true) { sinceWhen }\n }\n ", - "errors": [ - { - "message": "Field \"doesKnowCommand\" must not have a selection since type \"Boolean\" has no subfields.", - "locations": [ - { - "line": 3, - "column": 61 - } - ] - } - ] - }, - { - "name": "Validate: Unique argument names/no arguments on field", - "rule": "UniqueArgumentNames", - "schema": 0, - "query": "\n {\n field\n }\n ", - "errors": [] - }, - { - "name": "Validate: Unique argument names/no arguments on directive", - "rule": "UniqueArgumentNames", - "schema": 0, - "query": "\n {\n field @directive\n }\n ", - "errors": [] - }, - { - "name": "Validate: Unique argument names/argument on field", - "rule": "UniqueArgumentNames", - "schema": 0, - "query": "\n {\n field(arg: \"value\")\n }\n ", - "errors": [] - }, - { - "name": "Validate: Unique argument names/argument on directive", - "rule": "UniqueArgumentNames", - "schema": 0, - "query": "\n {\n field @directive(arg: \"value\")\n }\n ", - "errors": [] - }, - { - "name": "Validate: Unique argument names/same argument on two fields", - "rule": "UniqueArgumentNames", - "schema": 0, - "query": "\n {\n one: field(arg: \"value\")\n two: field(arg: \"value\")\n }\n ", - "errors": [] - }, - { - "name": "Validate: Unique argument names/same argument on field and directive", - "rule": "UniqueArgumentNames", - "schema": 0, - "query": "\n {\n field(arg: \"value\") @directive(arg: \"value\")\n }\n ", - "errors": [] - }, - { - "name": "Validate: Unique argument names/same argument on two directives", - "rule": "UniqueArgumentNames", - "schema": 0, - "query": "\n {\n field @directive1(arg: \"value\") @directive2(arg: \"value\")\n }\n ", - "errors": [] - }, - { - "name": "Validate: Unique argument names/multiple field arguments", - "rule": "UniqueArgumentNames", - "schema": 0, - "query": "\n {\n field(arg1: \"value\", arg2: \"value\", arg3: \"value\")\n }\n ", - "errors": [] - }, - { - "name": "Validate: Unique argument names/multiple directive arguments", - "rule": "UniqueArgumentNames", - "schema": 0, - "query": "\n {\n field @directive(arg1: \"value\", arg2: \"value\", arg3: \"value\")\n }\n ", - "errors": [] - }, - { - "name": "Validate: Unique argument names/duplicate field arguments", - "rule": "UniqueArgumentNames", - "schema": 0, - "query": "\n {\n field(arg1: \"value\", arg1: \"value\")\n }\n ", - "errors": [ - { - "message": "There can be only one argument named \"arg1\".", - "locations": [ - { - "line": 3, - "column": 15 - }, - { - "line": 3, - "column": 30 - } - ] - } - ] - }, - { - "name": "Validate: Unique argument names/many duplicate field arguments", - "rule": "UniqueArgumentNames", - "schema": 0, - "query": "\n {\n field(arg1: \"value\", arg1: \"value\", arg1: \"value\")\n }\n ", - "errors": [ - { - "message": "There can be only one argument named \"arg1\".", - "locations": [ - { - "line": 3, - "column": 15 - }, - { - "line": 3, - "column": 30 - } - ] - }, - { - "message": "There can be only one argument named \"arg1\".", - "locations": [ - { - "line": 3, - "column": 15 - }, - { - "line": 3, - "column": 45 - } - ] - } - ] - }, - { - "name": "Validate: Unique argument names/duplicate directive arguments", - "rule": "UniqueArgumentNames", - "schema": 0, - "query": "\n {\n field @directive(arg1: \"value\", arg1: \"value\")\n }\n ", - "errors": [ - { - "message": "There can be only one argument named \"arg1\".", - "locations": [ - { - "line": 3, - "column": 26 - }, - { - "line": 3, - "column": 41 - } - ] - } - ] - }, - { - "name": "Validate: Unique argument names/many duplicate directive arguments", - "rule": "UniqueArgumentNames", - "schema": 0, - "query": "\n {\n field @directive(arg1: \"value\", arg1: \"value\", arg1: \"value\")\n }\n ", - "errors": [ - { - "message": "There can be only one argument named \"arg1\".", - "locations": [ - { - "line": 3, - "column": 26 - }, - { - "line": 3, - "column": 41 - } - ] - }, - { - "message": "There can be only one argument named \"arg1\".", - "locations": [ - { - "line": 3, - "column": 26 - }, - { - "line": 3, - "column": 56 - } - ] - } - ] - }, - { - "name": "Validate: Directives Are Unique Per Location/no directives", - "rule": "UniqueDirectivesPerLocation", - "schema": 0, - "query": "\n fragment Test on Type {\n field\n }\n ", - "errors": [] - }, - { - "name": "Validate: Directives Are Unique Per Location/unique directives in different locations", - "rule": "UniqueDirectivesPerLocation", - "schema": 0, - "query": "\n fragment Test on Type @directiveA {\n field @directiveB\n }\n ", - "errors": [] - }, - { - "name": "Validate: Directives Are Unique Per Location/unique directives in same locations", - "rule": "UniqueDirectivesPerLocation", - "schema": 0, - "query": "\n fragment Test on Type @directiveA @directiveB {\n field @directiveA @directiveB\n }\n ", - "errors": [] - }, - { - "name": "Validate: Directives Are Unique Per Location/same directives in different locations", - "rule": "UniqueDirectivesPerLocation", - "schema": 0, - "query": "\n fragment Test on Type @directiveA {\n field @directiveA\n }\n ", - "errors": [] - }, - { - "name": "Validate: Directives Are Unique Per Location/same directives in similar locations", - "rule": "UniqueDirectivesPerLocation", - "schema": 0, - "query": "\n fragment Test on Type {\n field @directive\n field @directive\n }\n ", - "errors": [] - }, - { - "name": "Validate: Directives Are Unique Per Location/duplicate directives in one location", - "rule": "UniqueDirectivesPerLocation", - "schema": 0, - "query": "\n fragment Test on Type {\n field @directive @directive\n }\n ", - "errors": [ - { - "message": "The directive \"directive\" can only be used once at this location.", - "locations": [ - { - "line": 3, - "column": 15 - }, - { - "line": 3, - "column": 26 - } - ] - } - ] - }, - { - "name": "Validate: Directives Are Unique Per Location/many duplicate directives in one location", - "rule": "UniqueDirectivesPerLocation", - "schema": 0, - "query": "\n fragment Test on Type {\n field @directive @directive @directive\n }\n ", - "errors": [ - { - "message": "The directive \"directive\" can only be used once at this location.", - "locations": [ - { - "line": 3, - "column": 15 - }, - { - "line": 3, - "column": 26 - } - ] - }, - { - "message": "The directive \"directive\" can only be used once at this location.", - "locations": [ - { - "line": 3, - "column": 15 - }, - { - "line": 3, - "column": 37 - } - ] - } - ] - }, - { - "name": "Validate: Directives Are Unique Per Location/different duplicate directives in one location", - "rule": "UniqueDirectivesPerLocation", - "schema": 0, - "query": "\n fragment Test on Type {\n field @directiveA @directiveB @directiveA @directiveB\n }\n ", - "errors": [ - { - "message": "The directive \"directiveA\" can only be used once at this location.", - "locations": [ - { - "line": 3, - "column": 15 - }, - { - "line": 3, - "column": 39 - } - ] - }, - { - "message": "The directive \"directiveB\" can only be used once at this location.", - "locations": [ - { - "line": 3, - "column": 27 - }, - { - "line": 3, - "column": 51 - } - ] - } - ] - }, - { - "name": "Validate: Directives Are Unique Per Location/duplicate directives in many locations", - "rule": "UniqueDirectivesPerLocation", - "schema": 0, - "query": "\n fragment Test on Type @directive @directive {\n field @directive @directive\n }\n ", - "errors": [ - { - "message": "The directive \"directive\" can only be used once at this location.", - "locations": [ - { - "line": 2, - "column": 29 - }, - { - "line": 2, - "column": 40 - } - ] - }, - { - "message": "The directive \"directive\" can only be used once at this location.", - "locations": [ - { - "line": 3, - "column": 15 - }, - { - "line": 3, - "column": 26 - } - ] - } - ] - }, - { - "name": "Validate: Unique fragment names/no fragments", - "rule": "UniqueFragmentNames", - "schema": 0, - "query": "\n {\n field\n }\n ", - "errors": [] - }, - { - "name": "Validate: Unique fragment names/one fragment", - "rule": "UniqueFragmentNames", - "schema": 0, - "query": "\n {\n ...fragA\n }\n\n fragment fragA on Type {\n field\n }\n ", - "errors": [] - }, - { - "name": "Validate: Unique fragment names/many fragments", - "rule": "UniqueFragmentNames", - "schema": 0, - "query": "\n {\n ...fragA\n ...fragB\n ...fragC\n }\n fragment fragA on Type {\n fieldA\n }\n fragment fragB on Type {\n fieldB\n }\n fragment fragC on Type {\n fieldC\n }\n ", - "errors": [] - }, - { - "name": "Validate: Unique fragment names/inline fragments are always unique", - "rule": "UniqueFragmentNames", - "schema": 0, - "query": "\n {\n ...on Type {\n fieldA\n }\n ...on Type {\n fieldB\n }\n }\n ", - "errors": [] - }, - { - "name": "Validate: Unique fragment names/fragment and operation named the same", - "rule": "UniqueFragmentNames", - "schema": 0, - "query": "\n query Foo {\n ...Foo\n }\n fragment Foo on Type {\n field\n }\n ", - "errors": [] - }, - { - "name": "Validate: Unique fragment names/fragments named the same", - "rule": "UniqueFragmentNames", - "schema": 0, - "query": "\n {\n ...fragA\n }\n fragment fragA on Type {\n fieldA\n }\n fragment fragA on Type {\n fieldB\n }\n ", - "errors": [ - { - "message": "There can be only one fragment named \"fragA\".", - "locations": [ - { - "line": 5, - "column": 16 - }, - { - "line": 8, - "column": 16 - } - ] - } - ] - }, - { - "name": "Validate: Unique fragment names/fragments named the same without being referenced", - "rule": "UniqueFragmentNames", - "schema": 0, - "query": "\n fragment fragA on Type {\n fieldA\n }\n fragment fragA on Type {\n fieldB\n }\n ", - "errors": [ - { - "message": "There can be only one fragment named \"fragA\".", - "locations": [ - { - "line": 2, - "column": 16 - }, - { - "line": 5, - "column": 16 - } - ] - } - ] - }, - { - "name": "Validate: Unique input field names/input object with fields", - "rule": "UniqueInputFieldNames", - "schema": 0, - "query": "\n {\n field(arg: { f: true })\n }\n ", - "errors": [] - }, - { - "name": "Validate: Unique input field names/same input object within two args", - "rule": "UniqueInputFieldNames", - "schema": 0, - "query": "\n {\n field(arg1: { f: true }, arg2: { f: true })\n }\n ", - "errors": [] - }, - { - "name": "Validate: Unique input field names/multiple input object fields", - "rule": "UniqueInputFieldNames", - "schema": 0, - "query": "\n {\n field(arg: { f1: \"value\", f2: \"value\", f3: \"value\" })\n }\n ", - "errors": [] - }, - { - "name": "Validate: Unique input field names/allows for nested input objects with similar fields", - "rule": "UniqueInputFieldNames", - "schema": 0, - "query": "\n {\n field(arg: {\n deep: {\n deep: {\n id: 1\n }\n id: 1\n }\n id: 1\n })\n }\n ", - "errors": [] - }, - { - "name": "Validate: Unique input field names/duplicate input object fields", - "rule": "UniqueInputFieldNames", - "schema": 0, - "query": "\n {\n field(arg: { f1: \"value\", f1: \"value\" })\n }\n ", - "errors": [ - { - "message": "There can be only one input field named \"f1\".", - "locations": [ - { - "line": 3, - "column": 22 - }, - { - "line": 3, - "column": 35 - } - ] - } - ] - }, - { - "name": "Validate: Unique input field names/many duplicate input object fields", - "rule": "UniqueInputFieldNames", - "schema": 0, - "query": "\n {\n field(arg: { f1: \"value\", f1: \"value\", f1: \"value\" })\n }\n ", - "errors": [ - { - "message": "There can be only one input field named \"f1\".", - "locations": [ - { - "line": 3, - "column": 22 - }, - { - "line": 3, - "column": 35 - } - ] - }, - { - "message": "There can be only one input field named \"f1\".", - "locations": [ - { - "line": 3, - "column": 22 - }, - { - "line": 3, - "column": 48 - } - ] - } - ] - }, - { - "name": "Validate: Unique operation names/no operations", - "rule": "UniqueOperationNames", - "schema": 0, - "query": "\n fragment fragA on Type {\n field\n }\n ", - "errors": [] - }, - { - "name": "Validate: Unique operation names/one anon operation", - "rule": "UniqueOperationNames", - "schema": 0, - "query": "\n {\n field\n }\n ", - "errors": [] - }, - { - "name": "Validate: Unique operation names/one named operation", - "rule": "UniqueOperationNames", - "schema": 0, - "query": "\n query Foo {\n field\n }\n ", - "errors": [] - }, - { - "name": "Validate: Unique operation names/multiple operations", - "rule": "UniqueOperationNames", - "schema": 0, - "query": "\n query Foo {\n field\n }\n\n query Bar {\n field\n }\n ", - "errors": [] - }, - { - "name": "Validate: Unique operation names/multiple operations of different types", - "rule": "UniqueOperationNames", - "schema": 0, - "query": "\n query Foo {\n field\n }\n\n mutation Bar {\n field\n }\n\n subscription Baz {\n field\n }\n ", - "errors": [] - }, - { - "name": "Validate: Unique operation names/fragment and operation named the same", - "rule": "UniqueOperationNames", - "schema": 0, - "query": "\n query Foo {\n ...Foo\n }\n fragment Foo on Type {\n field\n }\n ", - "errors": [] - }, - { - "name": "Validate: Unique operation names/multiple operations of same name", - "rule": "UniqueOperationNames", - "schema": 0, - "query": "\n query Foo {\n fieldA\n }\n query Foo {\n fieldB\n }\n ", - "errors": [ - { - "message": "There can be only one operation named \"Foo\".", - "locations": [ - { - "line": 2, - "column": 13 - }, - { - "line": 5, - "column": 13 - } - ] - } - ] - }, - { - "name": "Validate: Unique operation names/multiple ops of same name of different types (mutation)", - "rule": "UniqueOperationNames", - "schema": 0, - "query": "\n query Foo {\n fieldA\n }\n mutation Foo {\n fieldB\n }\n ", - "errors": [ - { - "message": "There can be only one operation named \"Foo\".", - "locations": [ - { - "line": 2, - "column": 13 - }, - { - "line": 5, - "column": 16 - } - ] - } - ] - }, - { - "name": "Validate: Unique operation names/multiple ops of same name of different types (subscription)", - "rule": "UniqueOperationNames", - "schema": 0, - "query": "\n query Foo {\n fieldA\n }\n subscription Foo {\n fieldB\n }\n ", - "errors": [ - { - "message": "There can be only one operation named \"Foo\".", - "locations": [ - { - "line": 2, - "column": 13 - }, - { - "line": 5, - "column": 20 - } - ] - } - ] - }, - { - "name": "Validate: Unique variable names/unique variable names", - "rule": "UniqueVariableNames", - "schema": 0, - "query": "\n query A($x: Int, $y: String) { __typename }\n query B($x: String, $y: Int) { __typename }\n ", - "errors": [] - }, - { - "name": "Validate: Unique variable names/duplicate variable names", - "rule": "UniqueVariableNames", - "schema": 0, - "query": "\n query A($x: Int, $x: Int, $x: String) { __typename }\n query B($x: String, $x: Int) { __typename }\n query C($x: Int, $x: Int) { __typename }\n ", - "errors": [ - { - "message": "There can be only one variable named \"x\".", - "locations": [ - { - "line": 2, - "column": 16 - }, - { - "line": 2, - "column": 25 - } - ] - }, - { - "message": "There can be only one variable named \"x\".", - "locations": [ - { - "line": 2, - "column": 16 - }, - { - "line": 2, - "column": 34 - } - ] - }, - { - "message": "There can be only one variable named \"x\".", - "locations": [ - { - "line": 3, - "column": 16 - }, - { - "line": 3, - "column": 28 - } - ] - }, - { - "message": "There can be only one variable named \"x\".", - "locations": [ - { - "line": 4, - "column": 16 - }, - { - "line": 4, - "column": 25 - } - ] - } - ] - }, - { - "name": "Validate: Variables are input types/input types are valid", - "rule": "VariablesAreInputTypes", - "schema": 0, - "query": "\n query Foo($a: String, $b: [Boolean!]!, $c: ComplexInput) {\n field(a: $a, b: $b, c: $c)\n }\n ", - "errors": [] - }, - { - "name": "Validate: Variables are input types/output types are invalid", - "rule": "VariablesAreInputTypes", - "schema": 0, - "query": "\n query Foo($a: Dog, $b: [[CatOrDog!]]!, $c: Pet) {\n field(a: $a, b: $b, c: $c)\n }\n ", - "errors": [ - { - "locations": [ - { - "line": 2, - "column": 21 - } - ], - "message": "Variable \"$a\" cannot be non-input type \"Dog\"." - }, - { - "locations": [ - { - "line": 2, - "column": 30 - } - ], - "message": "Variable \"$b\" cannot be non-input type \"[[CatOrDog!]]!\"." - }, - { - "locations": [ - { - "line": 2, - "column": 50 - } - ], - "message": "Variable \"$c\" cannot be non-input type \"Pet\"." - } - ] - }, - { - "name": "Validate: Variables are in allowed positions/Boolean => Boolean", - "rule": "VariablesInAllowedPosition", - "schema": 0, - "query": "\n query Query($booleanArg: Boolean)\n {\n complicatedArgs {\n booleanArgField(booleanArg: $booleanArg)\n }\n }\n ", - "errors": [] - }, - { - "name": "Validate: Variables are in allowed positions/Boolean => Boolean within fragment", - "rule": "VariablesInAllowedPosition", - "schema": 0, - "query": "\n fragment booleanArgFrag on ComplicatedArgs {\n booleanArgField(booleanArg: $booleanArg)\n }\n query Query($booleanArg: Boolean)\n {\n complicatedArgs {\n ...booleanArgFrag\n }\n }\n ", - "errors": [] - }, - { - "name": "Validate: Variables are in allowed positions/Boolean => Boolean within fragment", - "rule": "VariablesInAllowedPosition", - "schema": 0, - "query": "\n query Query($booleanArg: Boolean)\n {\n complicatedArgs {\n ...booleanArgFrag\n }\n }\n fragment booleanArgFrag on ComplicatedArgs {\n booleanArgField(booleanArg: $booleanArg)\n }\n ", - "errors": [] - }, - { - "name": "Validate: Variables are in allowed positions/Boolean! => Boolean", - "rule": "VariablesInAllowedPosition", - "schema": 0, - "query": "\n query Query($nonNullBooleanArg: Boolean!)\n {\n complicatedArgs {\n booleanArgField(booleanArg: $nonNullBooleanArg)\n }\n }\n ", - "errors": [] - }, - { - "name": "Validate: Variables are in allowed positions/Boolean! => Boolean within fragment", - "rule": "VariablesInAllowedPosition", - "schema": 0, - "query": "\n fragment booleanArgFrag on ComplicatedArgs {\n booleanArgField(booleanArg: $nonNullBooleanArg)\n }\n\n query Query($nonNullBooleanArg: Boolean!)\n {\n complicatedArgs {\n ...booleanArgFrag\n }\n }\n ", - "errors": [] - }, - { - "name": "Validate: Variables are in allowed positions/Int => Int! with default", - "rule": "VariablesInAllowedPosition", - "schema": 0, - "query": "\n query Query($intArg: Int = 1)\n {\n complicatedArgs {\n nonNullIntArgField(nonNullIntArg: $intArg)\n }\n }\n ", - "errors": [] - }, - { - "name": "Validate: Variables are in allowed positions/[String] => [String]", - "rule": "VariablesInAllowedPosition", - "schema": 0, - "query": "\n query Query($stringListVar: [String])\n {\n complicatedArgs {\n stringListArgField(stringListArg: $stringListVar)\n }\n }\n ", - "errors": [] - }, - { - "name": "Validate: Variables are in allowed positions/[String!] => [String]", - "rule": "VariablesInAllowedPosition", - "schema": 0, - "query": "\n query Query($stringListVar: [String!])\n {\n complicatedArgs {\n stringListArgField(stringListArg: $stringListVar)\n }\n }\n ", - "errors": [] - }, - { - "name": "Validate: Variables are in allowed positions/String => [String] in item position", - "rule": "VariablesInAllowedPosition", - "schema": 0, - "query": "\n query Query($stringVar: String)\n {\n complicatedArgs {\n stringListArgField(stringListArg: [$stringVar])\n }\n }\n ", - "errors": [] - }, - { - "name": "Validate: Variables are in allowed positions/String! => [String] in item position", - "rule": "VariablesInAllowedPosition", - "schema": 0, - "query": "\n query Query($stringVar: String!)\n {\n complicatedArgs {\n stringListArgField(stringListArg: [$stringVar])\n }\n }\n ", - "errors": [] - }, - { - "name": "Validate: Variables are in allowed positions/ComplexInput => ComplexInput", - "rule": "VariablesInAllowedPosition", - "schema": 0, - "query": "\n query Query($complexVar: ComplexInput)\n {\n complicatedArgs {\n complexArgField(complexArg: $complexVar)\n }\n }\n ", - "errors": [] - }, - { - "name": "Validate: Variables are in allowed positions/ComplexInput => ComplexInput in field position", - "rule": "VariablesInAllowedPosition", - "schema": 0, - "query": "\n query Query($boolVar: Boolean = false)\n {\n complicatedArgs {\n complexArgField(complexArg: {requiredArg: $boolVar})\n }\n }\n ", - "errors": [] - }, - { - "name": "Validate: Variables are in allowed positions/Boolean! => Boolean! in directive", - "rule": "VariablesInAllowedPosition", - "schema": 0, - "query": "\n query Query($boolVar: Boolean!)\n {\n dog @include(if: $boolVar)\n }\n ", - "errors": [] - }, - { - "name": "Validate: Variables are in allowed positions/Boolean => Boolean! in directive with default", - "rule": "VariablesInAllowedPosition", - "schema": 0, - "query": "\n query Query($boolVar: Boolean = false)\n {\n dog @include(if: $boolVar)\n }\n ", - "errors": [] - }, - { - "name": "Validate: Variables are in allowed positions/Int => Int!", - "rule": "VariablesInAllowedPosition", - "schema": 0, - "query": "\n query Query($intArg: Int) {\n complicatedArgs {\n nonNullIntArgField(nonNullIntArg: $intArg)\n }\n }\n ", - "errors": [ - { - "message": "Variable \"$intArg\" of type \"Int\" used in position expecting type \"Int!\".", - "locations": [ - { - "line": 2, - "column": 19 - }, - { - "line": 4, - "column": 45 - } - ] - } - ] - }, - { - "name": "Validate: Variables are in allowed positions/Int => Int! within fragment", - "rule": "VariablesInAllowedPosition", - "schema": 0, - "query": "\n fragment nonNullIntArgFieldFrag on ComplicatedArgs {\n nonNullIntArgField(nonNullIntArg: $intArg)\n }\n\n query Query($intArg: Int) {\n complicatedArgs {\n ...nonNullIntArgFieldFrag\n }\n }\n ", - "errors": [ - { - "message": "Variable \"$intArg\" of type \"Int\" used in position expecting type \"Int!\".", - "locations": [ - { - "line": 6, - "column": 19 - }, - { - "line": 3, - "column": 43 - } - ] - } - ] - }, - { - "name": "Validate: Variables are in allowed positions/Int => Int! within nested fragment", - "rule": "VariablesInAllowedPosition", - "schema": 0, - "query": "\n fragment outerFrag on ComplicatedArgs {\n ...nonNullIntArgFieldFrag\n }\n\n fragment nonNullIntArgFieldFrag on ComplicatedArgs {\n nonNullIntArgField(nonNullIntArg: $intArg)\n }\n\n query Query($intArg: Int) {\n complicatedArgs {\n ...outerFrag\n }\n }\n ", - "errors": [ - { - "message": "Variable \"$intArg\" of type \"Int\" used in position expecting type \"Int!\".", - "locations": [ - { - "line": 10, - "column": 19 - }, - { - "line": 7, - "column": 43 - } - ] - } - ] - }, - { - "name": "Validate: Variables are in allowed positions/String over Boolean", - "rule": "VariablesInAllowedPosition", - "schema": 0, - "query": "\n query Query($stringVar: String) {\n complicatedArgs {\n booleanArgField(booleanArg: $stringVar)\n }\n }\n ", - "errors": [ - { - "message": "Variable \"$stringVar\" of type \"String\" used in position expecting type \"Boolean\".", - "locations": [ - { - "line": 2, - "column": 19 - }, - { - "line": 4, - "column": 39 - } - ] - } - ] - }, - { - "name": "Validate: Variables are in allowed positions/String => [String]", - "rule": "VariablesInAllowedPosition", - "schema": 0, - "query": "\n query Query($stringVar: String) {\n complicatedArgs {\n stringListArgField(stringListArg: $stringVar)\n }\n }\n ", - "errors": [ - { - "message": "Variable \"$stringVar\" of type \"String\" used in position expecting type \"[String]\".", - "locations": [ - { - "line": 2, - "column": 19 - }, - { - "line": 4, - "column": 45 - } - ] - } - ] - }, - { - "name": "Validate: Variables are in allowed positions/Boolean => Boolean! in directive", - "rule": "VariablesInAllowedPosition", - "schema": 0, - "query": "\n query Query($boolVar: Boolean) {\n dog @include(if: $boolVar)\n }\n ", - "errors": [ - { - "message": "Variable \"$boolVar\" of type \"Boolean\" used in position expecting type \"Boolean!\".", - "locations": [ - { - "line": 2, - "column": 19 - }, - { - "line": 3, - "column": 26 - } - ] - } - ] - }, - { - "name": "Validate: Variables are in allowed positions/String => Boolean! in directive", - "rule": "VariablesInAllowedPosition", - "schema": 0, - "query": "\n query Query($stringVar: String) {\n dog @include(if: $stringVar)\n }\n ", - "errors": [ - { - "message": "Variable \"$stringVar\" of type \"String\" used in position expecting type \"Boolean!\".", - "locations": [ - { - "line": 2, - "column": 19 - }, - { - "line": 3, - "column": 26 - } - ] - } - ] - } - ] -} \ No newline at end of file diff --git a/vendor/github.com/neelance/graphql-go/internal/validation/suggestion.go b/vendor/github.com/neelance/graphql-go/internal/validation/suggestion.go deleted file mode 100644 index 9702b5f5..00000000 --- a/vendor/github.com/neelance/graphql-go/internal/validation/suggestion.go +++ /dev/null @@ -1,71 +0,0 @@ -package validation - -import ( - "fmt" - "sort" - "strconv" - "strings" -) - -func makeSuggestion(prefix string, options []string, input string) string { - var selected []string - distances := make(map[string]int) - for _, opt := range options { - distance := levenshteinDistance(input, opt) - threshold := max(len(input)/2, max(len(opt)/2, 1)) - if distance < threshold { - selected = append(selected, opt) - distances[opt] = distance - } - } - - if len(selected) == 0 { - return "" - } - sort.Slice(selected, func(i, j int) bool { - return distances[selected[i]] < distances[selected[j]] - }) - - parts := make([]string, len(selected)) - for i, opt := range selected { - parts[i] = strconv.Quote(opt) - } - if len(parts) > 1 { - parts[len(parts)-1] = "or " + parts[len(parts)-1] - } - return fmt.Sprintf(" %s %s?", prefix, strings.Join(parts, ", ")) -} - -func levenshteinDistance(s1, s2 string) int { - column := make([]int, len(s1)+1) - for y := range s1 { - column[y+1] = y + 1 - } - for x, rx := range s2 { - column[0] = x + 1 - lastdiag := x - for y, ry := range s1 { - olddiag := column[y+1] - if rx != ry { - lastdiag++ - } - column[y+1] = min(column[y+1]+1, min(column[y]+1, lastdiag)) - lastdiag = olddiag - } - } - return column[len(s1)] -} - -func min(a, b int) int { - if a < b { - return a - } - return b -} - -func max(a, b int) int { - if a > b { - return a - } - return b -} diff --git a/vendor/github.com/neelance/graphql-go/internal/validation/validation.go b/vendor/github.com/neelance/graphql-go/internal/validation/validation.go deleted file mode 100644 index a537d458..00000000 --- a/vendor/github.com/neelance/graphql-go/internal/validation/validation.go +++ /dev/null @@ -1,860 +0,0 @@ -package validation - -import ( - "fmt" - "math" - "reflect" - "strconv" - "strings" - "text/scanner" - - "github.com/neelance/graphql-go/errors" - "github.com/neelance/graphql-go/internal/common" - "github.com/neelance/graphql-go/internal/query" - "github.com/neelance/graphql-go/internal/schema" -) - -type varSet map[*common.InputValue]struct{} - -type selectionPair struct{ a, b query.Selection } - -type fieldInfo struct { - sf *schema.Field - parent schema.NamedType -} - -type context struct { - schema *schema.Schema - doc *query.Document - errs []*errors.QueryError - opErrs map[*query.Operation][]*errors.QueryError - usedVars map[*query.Operation]varSet - fieldMap map[*query.Field]fieldInfo - overlapValidated map[selectionPair]struct{} -} - -func (c *context) addErr(loc errors.Location, rule string, format string, a ...interface{}) { - c.addErrMultiLoc([]errors.Location{loc}, rule, format, a...) -} - -func (c *context) addErrMultiLoc(locs []errors.Location, rule string, format string, a ...interface{}) { - c.errs = append(c.errs, &errors.QueryError{ - Message: fmt.Sprintf(format, a...), - Locations: locs, - Rule: rule, - }) -} - -type opContext struct { - *context - ops []*query.Operation -} - -func Validate(s *schema.Schema, doc *query.Document) []*errors.QueryError { - c := &context{ - schema: s, - doc: doc, - opErrs: make(map[*query.Operation][]*errors.QueryError), - usedVars: make(map[*query.Operation]varSet), - fieldMap: make(map[*query.Field]fieldInfo), - overlapValidated: make(map[selectionPair]struct{}), - } - - opNames := make(nameSet) - fragUsedBy := make(map[*query.FragmentDecl][]*query.Operation) - for _, op := range doc.Operations { - c.usedVars[op] = make(varSet) - opc := &opContext{c, []*query.Operation{op}} - - if op.Name.Name == "" && len(doc.Operations) != 1 { - c.addErr(op.Loc, "LoneAnonymousOperation", "This anonymous operation must be the only defined operation.") - } - if op.Name.Name != "" { - validateName(c, opNames, op.Name, "UniqueOperationNames", "operation") - } - - validateDirectives(opc, string(op.Type), op.Directives) - - varNames := make(nameSet) - for _, v := range op.Vars { - validateName(c, varNames, v.Name, "UniqueVariableNames", "variable") - - t := resolveType(c, v.Type) - if !canBeInput(t) { - c.addErr(v.TypeLoc, "VariablesAreInputTypes", "Variable %q cannot be non-input type %q.", "$"+v.Name.Name, t) - } - - if v.Default != nil { - validateLiteral(opc, v.Default) - - if t != nil { - if nn, ok := t.(*common.NonNull); ok { - c.addErr(v.Default.Location(), "DefaultValuesOfCorrectType", "Variable %q of type %q is required and will not use the default value. Perhaps you meant to use type %q.", "$"+v.Name.Name, t, nn.OfType) - } - - if ok, reason := validateValueType(opc, v.Default, t); !ok { - c.addErr(v.Default.Location(), "DefaultValuesOfCorrectType", "Variable %q of type %q has invalid default value %s.\n%s", "$"+v.Name.Name, t, v.Default, reason) - } - } - } - } - - var entryPoint schema.NamedType - switch op.Type { - case query.Query: - entryPoint = s.EntryPoints["query"] - case query.Mutation: - entryPoint = s.EntryPoints["mutation"] - case query.Subscription: - entryPoint = s.EntryPoints["subscription"] - default: - panic("unreachable") - } - - validateSelectionSet(opc, op.Selections, entryPoint) - - fragUsed := make(map[*query.FragmentDecl]struct{}) - markUsedFragments(c, op.Selections, fragUsed) - for frag := range fragUsed { - fragUsedBy[frag] = append(fragUsedBy[frag], op) - } - } - - fragNames := make(nameSet) - fragVisited := make(map[*query.FragmentDecl]struct{}) - for _, frag := range doc.Fragments { - opc := &opContext{c, fragUsedBy[frag]} - - validateName(c, fragNames, frag.Name, "UniqueFragmentNames", "fragment") - validateDirectives(opc, "FRAGMENT_DEFINITION", frag.Directives) - - t := unwrapType(resolveType(c, &frag.On)) - // continue even if t is nil - if t != nil && !canBeFragment(t) { - c.addErr(frag.On.Loc, "FragmentsOnCompositeTypes", "Fragment %q cannot condition on non composite type %q.", frag.Name.Name, t) - continue - } - - validateSelectionSet(opc, frag.Selections, t) - - if _, ok := fragVisited[frag]; !ok { - detectFragmentCycle(c, frag.Selections, fragVisited, nil, map[string]int{frag.Name.Name: 0}) - } - } - - for _, frag := range doc.Fragments { - if len(fragUsedBy[frag]) == 0 { - c.addErr(frag.Loc, "NoUnusedFragments", "Fragment %q is never used.", frag.Name.Name) - } - } - - for _, op := range doc.Operations { - c.errs = append(c.errs, c.opErrs[op]...) - - opUsedVars := c.usedVars[op] - for _, v := range op.Vars { - if _, ok := opUsedVars[v]; !ok { - opSuffix := "" - if op.Name.Name != "" { - opSuffix = fmt.Sprintf(" in operation %q", op.Name.Name) - } - c.addErr(v.Loc, "NoUnusedVariables", "Variable %q is never used%s.", "$"+v.Name.Name, opSuffix) - } - } - } - - return c.errs -} - -func validateSelectionSet(c *opContext, sels []query.Selection, t schema.NamedType) { - for _, sel := range sels { - validateSelection(c, sel, t) - } - - for i, a := range sels { - for _, b := range sels[i+1:] { - c.validateOverlap(a, b, nil, nil) - } - } -} - -func validateSelection(c *opContext, sel query.Selection, t schema.NamedType) { - switch sel := sel.(type) { - case *query.Field: - validateDirectives(c, "FIELD", sel.Directives) - - fieldName := sel.Name.Name - var f *schema.Field - switch fieldName { - case "__typename": - f = &schema.Field{ - Name: "__typename", - Type: c.schema.Types["String"], - } - case "__schema": - f = &schema.Field{ - Name: "__schema", - Type: c.schema.Types["__Schema"], - } - case "__type": - f = &schema.Field{ - Name: "__type", - Args: common.InputValueList{ - &common.InputValue{ - Name: common.Ident{Name: "name"}, - Type: &common.NonNull{OfType: c.schema.Types["String"]}, - }, - }, - Type: c.schema.Types["__Type"], - } - default: - f = fields(t).Get(fieldName) - if f == nil && t != nil { - suggestion := makeSuggestion("Did you mean", fields(t).Names(), fieldName) - c.addErr(sel.Alias.Loc, "FieldsOnCorrectType", "Cannot query field %q on type %q.%s", fieldName, t, suggestion) - } - } - c.fieldMap[sel] = fieldInfo{sf: f, parent: t} - - validateArgumentLiterals(c, sel.Arguments) - if f != nil { - validateArgumentTypes(c, sel.Arguments, f.Args, sel.Alias.Loc, - func() string { return fmt.Sprintf("field %q of type %q", fieldName, t) }, - func() string { return fmt.Sprintf("Field %q", fieldName) }, - ) - } - - var ft common.Type - if f != nil { - ft = f.Type - sf := hasSubfields(ft) - if sf && sel.Selections == nil { - c.addErr(sel.Alias.Loc, "ScalarLeafs", "Field %q of type %q must have a selection of subfields. Did you mean \"%s { ... }\"?", fieldName, ft, fieldName) - } - if !sf && sel.Selections != nil { - c.addErr(sel.SelectionSetLoc, "ScalarLeafs", "Field %q must not have a selection since type %q has no subfields.", fieldName, ft) - } - } - if sel.Selections != nil { - validateSelectionSet(c, sel.Selections, unwrapType(ft)) - } - - case *query.InlineFragment: - validateDirectives(c, "INLINE_FRAGMENT", sel.Directives) - if sel.On.Name != "" { - fragTyp := unwrapType(resolveType(c.context, &sel.On)) - if fragTyp != nil && !compatible(t, fragTyp) { - c.addErr(sel.Loc, "PossibleFragmentSpreads", "Fragment cannot be spread here as objects of type %q can never be of type %q.", t, fragTyp) - } - t = fragTyp - // continue even if t is nil - } - if t != nil && !canBeFragment(t) { - c.addErr(sel.On.Loc, "FragmentsOnCompositeTypes", "Fragment cannot condition on non composite type %q.", t) - return - } - validateSelectionSet(c, sel.Selections, unwrapType(t)) - - case *query.FragmentSpread: - validateDirectives(c, "FRAGMENT_SPREAD", sel.Directives) - frag := c.doc.Fragments.Get(sel.Name.Name) - if frag == nil { - c.addErr(sel.Name.Loc, "KnownFragmentNames", "Unknown fragment %q.", sel.Name.Name) - return - } - fragTyp := c.schema.Types[frag.On.Name] - if !compatible(t, fragTyp) { - c.addErr(sel.Loc, "PossibleFragmentSpreads", "Fragment %q cannot be spread here as objects of type %q can never be of type %q.", frag.Name.Name, t, fragTyp) - } - - default: - panic("unreachable") - } -} - -func compatible(a, b common.Type) bool { - for _, pta := range possibleTypes(a) { - for _, ptb := range possibleTypes(b) { - if pta == ptb { - return true - } - } - } - return false -} - -func possibleTypes(t common.Type) []*schema.Object { - switch t := t.(type) { - case *schema.Object: - return []*schema.Object{t} - case *schema.Interface: - return t.PossibleTypes - case *schema.Union: - return t.PossibleTypes - default: - return nil - } -} - -func markUsedFragments(c *context, sels []query.Selection, fragUsed map[*query.FragmentDecl]struct{}) { - for _, sel := range sels { - switch sel := sel.(type) { - case *query.Field: - if sel.Selections != nil { - markUsedFragments(c, sel.Selections, fragUsed) - } - - case *query.InlineFragment: - markUsedFragments(c, sel.Selections, fragUsed) - - case *query.FragmentSpread: - frag := c.doc.Fragments.Get(sel.Name.Name) - if frag == nil { - return - } - - if _, ok := fragUsed[frag]; ok { - return - } - fragUsed[frag] = struct{}{} - markUsedFragments(c, frag.Selections, fragUsed) - - default: - panic("unreachable") - } - } -} - -func detectFragmentCycle(c *context, sels []query.Selection, fragVisited map[*query.FragmentDecl]struct{}, spreadPath []*query.FragmentSpread, spreadPathIndex map[string]int) { - for _, sel := range sels { - detectFragmentCycleSel(c, sel, fragVisited, spreadPath, spreadPathIndex) - } -} - -func detectFragmentCycleSel(c *context, sel query.Selection, fragVisited map[*query.FragmentDecl]struct{}, spreadPath []*query.FragmentSpread, spreadPathIndex map[string]int) { - switch sel := sel.(type) { - case *query.Field: - if sel.Selections != nil { - detectFragmentCycle(c, sel.Selections, fragVisited, spreadPath, spreadPathIndex) - } - - case *query.InlineFragment: - detectFragmentCycle(c, sel.Selections, fragVisited, spreadPath, spreadPathIndex) - - case *query.FragmentSpread: - frag := c.doc.Fragments.Get(sel.Name.Name) - if frag == nil { - return - } - - spreadPath = append(spreadPath, sel) - if i, ok := spreadPathIndex[frag.Name.Name]; ok { - cyclePath := spreadPath[i:] - via := "" - if len(cyclePath) > 1 { - names := make([]string, len(cyclePath)-1) - for i, frag := range cyclePath[:len(cyclePath)-1] { - names[i] = frag.Name.Name - } - via = " via " + strings.Join(names, ", ") - } - - locs := make([]errors.Location, len(cyclePath)) - for i, frag := range cyclePath { - locs[i] = frag.Loc - } - c.addErrMultiLoc(locs, "NoFragmentCycles", "Cannot spread fragment %q within itself%s.", frag.Name.Name, via) - return - } - - if _, ok := fragVisited[frag]; ok { - return - } - fragVisited[frag] = struct{}{} - - spreadPathIndex[frag.Name.Name] = len(spreadPath) - detectFragmentCycle(c, frag.Selections, fragVisited, spreadPath, spreadPathIndex) - delete(spreadPathIndex, frag.Name.Name) - - default: - panic("unreachable") - } -} - -func (c *context) validateOverlap(a, b query.Selection, reasons *[]string, locs *[]errors.Location) { - if a == b { - return - } - - if _, ok := c.overlapValidated[selectionPair{a, b}]; ok { - return - } - c.overlapValidated[selectionPair{a, b}] = struct{}{} - c.overlapValidated[selectionPair{b, a}] = struct{}{} - - switch a := a.(type) { - case *query.Field: - switch b := b.(type) { - case *query.Field: - if b.Alias.Loc.Before(a.Alias.Loc) { - a, b = b, a - } - if reasons2, locs2 := c.validateFieldOverlap(a, b); len(reasons2) != 0 { - locs2 = append(locs2, a.Alias.Loc, b.Alias.Loc) - if reasons == nil { - c.addErrMultiLoc(locs2, "OverlappingFieldsCanBeMerged", "Fields %q conflict because %s. Use different aliases on the fields to fetch both if this was intentional.", a.Alias.Name, strings.Join(reasons2, " and ")) - return - } - for _, r := range reasons2 { - *reasons = append(*reasons, fmt.Sprintf("subfields %q conflict because %s", a.Alias.Name, r)) - } - *locs = append(*locs, locs2...) - } - - case *query.InlineFragment: - for _, sel := range b.Selections { - c.validateOverlap(a, sel, reasons, locs) - } - - case *query.FragmentSpread: - if frag := c.doc.Fragments.Get(b.Name.Name); frag != nil { - for _, sel := range frag.Selections { - c.validateOverlap(a, sel, reasons, locs) - } - } - - default: - panic("unreachable") - } - - case *query.InlineFragment: - for _, sel := range a.Selections { - c.validateOverlap(sel, b, reasons, locs) - } - - case *query.FragmentSpread: - if frag := c.doc.Fragments.Get(a.Name.Name); frag != nil { - for _, sel := range frag.Selections { - c.validateOverlap(sel, b, reasons, locs) - } - } - - default: - panic("unreachable") - } -} - -func (c *context) validateFieldOverlap(a, b *query.Field) ([]string, []errors.Location) { - if a.Alias.Name != b.Alias.Name { - return nil, nil - } - - if asf := c.fieldMap[a].sf; asf != nil { - if bsf := c.fieldMap[b].sf; bsf != nil { - if !typesCompatible(asf.Type, bsf.Type) { - return []string{fmt.Sprintf("they return conflicting types %s and %s", asf.Type, bsf.Type)}, nil - } - } - } - - at := c.fieldMap[a].parent - bt := c.fieldMap[b].parent - if at == nil || bt == nil || at == bt { - if a.Name.Name != b.Name.Name { - return []string{fmt.Sprintf("%s and %s are different fields", a.Name.Name, b.Name.Name)}, nil - } - - if argumentsConflict(a.Arguments, b.Arguments) { - return []string{"they have differing arguments"}, nil - } - } - - var reasons []string - var locs []errors.Location - for _, a2 := range a.Selections { - for _, b2 := range b.Selections { - c.validateOverlap(a2, b2, &reasons, &locs) - } - } - return reasons, locs -} - -func argumentsConflict(a, b common.ArgumentList) bool { - if len(a) != len(b) { - return true - } - for _, argA := range a { - valB, ok := b.Get(argA.Name.Name) - if !ok || !reflect.DeepEqual(argA.Value.Value(nil), valB.Value(nil)) { - return true - } - } - return false -} - -func fields(t common.Type) schema.FieldList { - switch t := t.(type) { - case *schema.Object: - return t.Fields - case *schema.Interface: - return t.Fields - default: - return nil - } -} - -func unwrapType(t common.Type) schema.NamedType { - if t == nil { - return nil - } - for { - switch t2 := t.(type) { - case schema.NamedType: - return t2 - case *common.List: - t = t2.OfType - case *common.NonNull: - t = t2.OfType - default: - panic("unreachable") - } - } -} - -func resolveType(c *context, t common.Type) common.Type { - t2, err := common.ResolveType(t, c.schema.Resolve) - if err != nil { - c.errs = append(c.errs, err) - } - return t2 -} - -func validateDirectives(c *opContext, loc string, directives common.DirectiveList) { - directiveNames := make(nameSet) - for _, d := range directives { - dirName := d.Name.Name - validateNameCustomMsg(c.context, directiveNames, d.Name, "UniqueDirectivesPerLocation", func() string { - return fmt.Sprintf("The directive %q can only be used once at this location.", dirName) - }) - - validateArgumentLiterals(c, d.Args) - - dd, ok := c.schema.Directives[dirName] - if !ok { - c.addErr(d.Name.Loc, "KnownDirectives", "Unknown directive %q.", dirName) - continue - } - - locOK := false - for _, allowedLoc := range dd.Locs { - if loc == allowedLoc { - locOK = true - break - } - } - if !locOK { - c.addErr(d.Name.Loc, "KnownDirectives", "Directive %q may not be used on %s.", dirName, loc) - } - - validateArgumentTypes(c, d.Args, dd.Args, d.Name.Loc, - func() string { return fmt.Sprintf("directive %q", "@"+dirName) }, - func() string { return fmt.Sprintf("Directive %q", "@"+dirName) }, - ) - } - return -} - -type nameSet map[string]errors.Location - -func validateName(c *context, set nameSet, name common.Ident, rule string, kind string) { - validateNameCustomMsg(c, set, name, rule, func() string { - return fmt.Sprintf("There can be only one %s named %q.", kind, name.Name) - }) -} - -func validateNameCustomMsg(c *context, set nameSet, name common.Ident, rule string, msg func() string) { - if loc, ok := set[name.Name]; ok { - c.addErrMultiLoc([]errors.Location{loc, name.Loc}, rule, msg()) - return - } - set[name.Name] = name.Loc - return -} - -func validateArgumentTypes(c *opContext, args common.ArgumentList, argDecls common.InputValueList, loc errors.Location, owner1, owner2 func() string) { - for _, selArg := range args { - arg := argDecls.Get(selArg.Name.Name) - if arg == nil { - c.addErr(selArg.Name.Loc, "KnownArgumentNames", "Unknown argument %q on %s.", selArg.Name.Name, owner1()) - continue - } - value := selArg.Value - if ok, reason := validateValueType(c, value, arg.Type); !ok { - c.addErr(value.Location(), "ArgumentsOfCorrectType", "Argument %q has invalid value %s.\n%s", arg.Name.Name, value, reason) - } - } - for _, decl := range argDecls { - if _, ok := decl.Type.(*common.NonNull); ok { - if _, ok := args.Get(decl.Name.Name); !ok { - c.addErr(loc, "ProvidedNonNullArguments", "%s argument %q of type %q is required but not provided.", owner2(), decl.Name.Name, decl.Type) - } - } - } -} - -func validateArgumentLiterals(c *opContext, args common.ArgumentList) { - argNames := make(nameSet) - for _, arg := range args { - validateName(c.context, argNames, arg.Name, "UniqueArgumentNames", "argument") - validateLiteral(c, arg.Value) - } -} - -func validateLiteral(c *opContext, l common.Literal) { - switch l := l.(type) { - case *common.ObjectLit: - fieldNames := make(nameSet) - for _, f := range l.Fields { - validateName(c.context, fieldNames, f.Name, "UniqueInputFieldNames", "input field") - validateLiteral(c, f.Value) - } - case *common.ListLit: - for _, entry := range l.Entries { - validateLiteral(c, entry) - } - case *common.Variable: - for _, op := range c.ops { - v := op.Vars.Get(l.Name) - if v == nil { - byOp := "" - if op.Name.Name != "" { - byOp = fmt.Sprintf(" by operation %q", op.Name.Name) - } - c.opErrs[op] = append(c.opErrs[op], &errors.QueryError{ - Message: fmt.Sprintf("Variable %q is not defined%s.", "$"+l.Name, byOp), - Locations: []errors.Location{l.Loc, op.Loc}, - Rule: "NoUndefinedVariables", - }) - continue - } - c.usedVars[op][v] = struct{}{} - } - } -} - -func validateValueType(c *opContext, v common.Literal, t common.Type) (bool, string) { - if v, ok := v.(*common.Variable); ok { - for _, op := range c.ops { - if v2 := op.Vars.Get(v.Name); v2 != nil { - t2, err := common.ResolveType(v2.Type, c.schema.Resolve) - if _, ok := t2.(*common.NonNull); !ok && v2.Default != nil { - t2 = &common.NonNull{OfType: t2} - } - if err == nil && !typeCanBeUsedAs(t2, t) { - c.addErrMultiLoc([]errors.Location{v2.Loc, v.Loc}, "VariablesInAllowedPosition", "Variable %q of type %q used in position expecting type %q.", "$"+v.Name, t2, t) - } - } - } - return true, "" - } - - if nn, ok := t.(*common.NonNull); ok { - if isNull(v) { - return false, fmt.Sprintf("Expected %q, found null.", t) - } - t = nn.OfType - } - if isNull(v) { - return true, "" - } - - switch t := t.(type) { - case *schema.Scalar, *schema.Enum: - if lit, ok := v.(*common.BasicLit); ok { - if validateBasicLit(lit, t) { - return true, "" - } - } - - case *common.List: - list, ok := v.(*common.ListLit) - if !ok { - return validateValueType(c, v, t.OfType) // single value instead of list - } - for i, entry := range list.Entries { - if ok, reason := validateValueType(c, entry, t.OfType); !ok { - return false, fmt.Sprintf("In element #%d: %s", i, reason) - } - } - return true, "" - - case *schema.InputObject: - v, ok := v.(*common.ObjectLit) - if !ok { - return false, fmt.Sprintf("Expected %q, found not an object.", t) - } - for _, f := range v.Fields { - name := f.Name.Name - iv := t.Values.Get(name) - if iv == nil { - return false, fmt.Sprintf("In field %q: Unknown field.", name) - } - if ok, reason := validateValueType(c, f.Value, iv.Type); !ok { - return false, fmt.Sprintf("In field %q: %s", name, reason) - } - } - for _, iv := range t.Values { - found := false - for _, f := range v.Fields { - if f.Name.Name == iv.Name.Name { - found = true - break - } - } - if !found { - if _, ok := iv.Type.(*common.NonNull); ok && iv.Default == nil { - return false, fmt.Sprintf("In field %q: Expected %q, found null.", iv.Name.Name, iv.Type) - } - } - } - return true, "" - } - - return false, fmt.Sprintf("Expected type %q, found %s.", t, v) -} - -func validateBasicLit(v *common.BasicLit, t common.Type) bool { - switch t := t.(type) { - case *schema.Scalar: - switch t.Name { - case "Int": - if v.Type != scanner.Int { - return false - } - f, err := strconv.ParseFloat(v.Text, 64) - if err != nil { - panic(err) - } - return f >= math.MinInt32 && f <= math.MaxInt32 - case "Float": - return v.Type == scanner.Int || v.Type == scanner.Float - case "String": - return v.Type == scanner.String - case "Boolean": - return v.Type == scanner.Ident && (v.Text == "true" || v.Text == "false") - case "ID": - return v.Type == scanner.Int || v.Type == scanner.String - default: - //TODO: Type-check against expected type by Unmarshalling - return true - } - - case *schema.Enum: - if v.Type != scanner.Ident { - return false - } - for _, option := range t.Values { - if option.Name == v.Text { - return true - } - } - return false - } - - return false -} - -func canBeFragment(t common.Type) bool { - switch t.(type) { - case *schema.Object, *schema.Interface, *schema.Union: - return true - default: - return false - } -} - -func canBeInput(t common.Type) bool { - switch t := t.(type) { - case *schema.InputObject, *schema.Scalar, *schema.Enum: - return true - case *common.List: - return canBeInput(t.OfType) - case *common.NonNull: - return canBeInput(t.OfType) - default: - return false - } -} - -func hasSubfields(t common.Type) bool { - switch t := t.(type) { - case *schema.Object, *schema.Interface, *schema.Union: - return true - case *common.List: - return hasSubfields(t.OfType) - case *common.NonNull: - return hasSubfields(t.OfType) - default: - return false - } -} - -func isLeaf(t common.Type) bool { - switch t.(type) { - case *schema.Scalar, *schema.Enum: - return true - default: - return false - } -} - -func isNull(lit interface{}) bool { - _, ok := lit.(*common.NullLit) - return ok -} - -func typesCompatible(a, b common.Type) bool { - al, aIsList := a.(*common.List) - bl, bIsList := b.(*common.List) - if aIsList || bIsList { - return aIsList && bIsList && typesCompatible(al.OfType, bl.OfType) - } - - ann, aIsNN := a.(*common.NonNull) - bnn, bIsNN := b.(*common.NonNull) - if aIsNN || bIsNN { - return aIsNN && bIsNN && typesCompatible(ann.OfType, bnn.OfType) - } - - if isLeaf(a) || isLeaf(b) { - return a == b - } - - return true -} - -func typeCanBeUsedAs(t, as common.Type) bool { - nnT, okT := t.(*common.NonNull) - if okT { - t = nnT.OfType - } - - nnAs, okAs := as.(*common.NonNull) - if okAs { - as = nnAs.OfType - if !okT { - return false // nullable can not be used as non-null - } - } - - if t == as { - return true - } - - if lT, ok := t.(*common.List); ok { - if lAs, ok := as.(*common.List); ok { - return typeCanBeUsedAs(lT.OfType, lAs.OfType) - } - } - return false -} diff --git a/vendor/github.com/neelance/graphql-go/introspection.go b/vendor/github.com/neelance/graphql-go/introspection.go deleted file mode 100644 index f72a7700..00000000 --- a/vendor/github.com/neelance/graphql-go/introspection.go +++ /dev/null @@ -1,117 +0,0 @@ -package graphql - -import ( - "context" - "encoding/json" - - "github.com/neelance/graphql-go/internal/exec/resolvable" - "github.com/neelance/graphql-go/introspection" -) - -// Inspect allows inspection of the given schema. -func (s *Schema) Inspect() *introspection.Schema { - return introspection.WrapSchema(s.schema) -} - -// ToJSON encodes the schema in a JSON format used by tools like Relay. -func (s *Schema) ToJSON() ([]byte, error) { - result := s.exec(context.Background(), introspectionQuery, "", nil, &resolvable.Schema{ - Query: &resolvable.Object{}, - Schema: *s.schema, - }) - if len(result.Errors) != 0 { - panic(result.Errors[0]) - } - return json.MarshalIndent(result.Data, "", "\t") -} - -var introspectionQuery = ` - query { - __schema { - queryType { name } - mutationType { name } - subscriptionType { name } - types { - ...FullType - } - directives { - name - description - locations - args { - ...InputValue - } - } - } - } - fragment FullType on __Type { - kind - name - description - fields(includeDeprecated: true) { - name - description - args { - ...InputValue - } - type { - ...TypeRef - } - isDeprecated - deprecationReason - } - inputFields { - ...InputValue - } - interfaces { - ...TypeRef - } - enumValues(includeDeprecated: true) { - name - description - isDeprecated - deprecationReason - } - possibleTypes { - ...TypeRef - } - } - fragment InputValue on __InputValue { - name - description - type { ...TypeRef } - defaultValue - } - fragment TypeRef on __Type { - kind - name - ofType { - kind - name - ofType { - kind - name - ofType { - kind - name - ofType { - kind - name - ofType { - kind - name - ofType { - kind - name - ofType { - kind - name - } - } - } - } - } - } - } - } -` diff --git a/vendor/github.com/neelance/graphql-go/introspection/introspection.go b/vendor/github.com/neelance/graphql-go/introspection/introspection.go deleted file mode 100644 index d2969b7a..00000000 --- a/vendor/github.com/neelance/graphql-go/introspection/introspection.go +++ /dev/null @@ -1,313 +0,0 @@ -package introspection - -import ( - "sort" - - "github.com/neelance/graphql-go/internal/common" - "github.com/neelance/graphql-go/internal/schema" -) - -type Schema struct { - schema *schema.Schema -} - -// WrapSchema is only used internally. -func WrapSchema(schema *schema.Schema) *Schema { - return &Schema{schema} -} - -func (r *Schema) Types() []*Type { - var names []string - for name := range r.schema.Types { - names = append(names, name) - } - sort.Strings(names) - - l := make([]*Type, len(names)) - for i, name := range names { - l[i] = &Type{r.schema.Types[name]} - } - return l -} - -func (r *Schema) Directives() []*Directive { - var names []string - for name := range r.schema.Directives { - names = append(names, name) - } - sort.Strings(names) - - l := make([]*Directive, len(names)) - for i, name := range names { - l[i] = &Directive{r.schema.Directives[name]} - } - return l -} - -func (r *Schema) QueryType() *Type { - t, ok := r.schema.EntryPoints["query"] - if !ok { - return nil - } - return &Type{t} -} - -func (r *Schema) MutationType() *Type { - t, ok := r.schema.EntryPoints["mutation"] - if !ok { - return nil - } - return &Type{t} -} - -func (r *Schema) SubscriptionType() *Type { - t, ok := r.schema.EntryPoints["subscription"] - if !ok { - return nil - } - return &Type{t} -} - -type Type struct { - typ common.Type -} - -// WrapType is only used internally. -func WrapType(typ common.Type) *Type { - return &Type{typ} -} - -func (r *Type) Kind() string { - return r.typ.Kind() -} - -func (r *Type) Name() *string { - if named, ok := r.typ.(schema.NamedType); ok { - name := named.TypeName() - return &name - } - return nil -} - -func (r *Type) Description() *string { - if named, ok := r.typ.(schema.NamedType); ok { - desc := named.Description() - if desc == "" { - return nil - } - return &desc - } - return nil -} - -func (r *Type) Fields(args *struct{ IncludeDeprecated bool }) *[]*Field { - var fields schema.FieldList - switch t := r.typ.(type) { - case *schema.Object: - fields = t.Fields - case *schema.Interface: - fields = t.Fields - default: - return nil - } - - var l []*Field - for _, f := range fields { - if d := f.Directives.Get("deprecated"); d == nil || args.IncludeDeprecated { - l = append(l, &Field{f}) - } - } - return &l -} - -func (r *Type) Interfaces() *[]*Type { - t, ok := r.typ.(*schema.Object) - if !ok { - return nil - } - - l := make([]*Type, len(t.Interfaces)) - for i, intf := range t.Interfaces { - l[i] = &Type{intf} - } - return &l -} - -func (r *Type) PossibleTypes() *[]*Type { - var possibleTypes []*schema.Object - switch t := r.typ.(type) { - case *schema.Interface: - possibleTypes = t.PossibleTypes - case *schema.Union: - possibleTypes = t.PossibleTypes - default: - return nil - } - - l := make([]*Type, len(possibleTypes)) - for i, intf := range possibleTypes { - l[i] = &Type{intf} - } - return &l -} - -func (r *Type) EnumValues(args *struct{ IncludeDeprecated bool }) *[]*EnumValue { - t, ok := r.typ.(*schema.Enum) - if !ok { - return nil - } - - var l []*EnumValue - for _, v := range t.Values { - if d := v.Directives.Get("deprecated"); d == nil || args.IncludeDeprecated { - l = append(l, &EnumValue{v}) - } - } - return &l -} - -func (r *Type) InputFields() *[]*InputValue { - t, ok := r.typ.(*schema.InputObject) - if !ok { - return nil - } - - l := make([]*InputValue, len(t.Values)) - for i, v := range t.Values { - l[i] = &InputValue{v} - } - return &l -} - -func (r *Type) OfType() *Type { - switch t := r.typ.(type) { - case *common.List: - return &Type{t.OfType} - case *common.NonNull: - return &Type{t.OfType} - default: - return nil - } -} - -type Field struct { - field *schema.Field -} - -func (r *Field) Name() string { - return r.field.Name -} - -func (r *Field) Description() *string { - if r.field.Desc == "" { - return nil - } - return &r.field.Desc -} - -func (r *Field) Args() []*InputValue { - l := make([]*InputValue, len(r.field.Args)) - for i, v := range r.field.Args { - l[i] = &InputValue{v} - } - return l -} - -func (r *Field) Type() *Type { - return &Type{r.field.Type} -} - -func (r *Field) IsDeprecated() bool { - return r.field.Directives.Get("deprecated") != nil -} - -func (r *Field) DeprecationReason() *string { - d := r.field.Directives.Get("deprecated") - if d == nil { - return nil - } - reason := d.Args.MustGet("reason").Value(nil).(string) - return &reason -} - -type InputValue struct { - value *common.InputValue -} - -func (r *InputValue) Name() string { - return r.value.Name.Name -} - -func (r *InputValue) Description() *string { - if r.value.Desc == "" { - return nil - } - return &r.value.Desc -} - -func (r *InputValue) Type() *Type { - return &Type{r.value.Type} -} - -func (r *InputValue) DefaultValue() *string { - if r.value.Default == nil { - return nil - } - s := r.value.Default.String() - return &s -} - -type EnumValue struct { - value *schema.EnumValue -} - -func (r *EnumValue) Name() string { - return r.value.Name -} - -func (r *EnumValue) Description() *string { - if r.value.Desc == "" { - return nil - } - return &r.value.Desc -} - -func (r *EnumValue) IsDeprecated() bool { - return r.value.Directives.Get("deprecated") != nil -} - -func (r *EnumValue) DeprecationReason() *string { - d := r.value.Directives.Get("deprecated") - if d == nil { - return nil - } - reason := d.Args.MustGet("reason").Value(nil).(string) - return &reason -} - -type Directive struct { - directive *schema.DirectiveDecl -} - -func (r *Directive) Name() string { - return r.directive.Name -} - -func (r *Directive) Description() *string { - if r.directive.Desc == "" { - return nil - } - return &r.directive.Desc -} - -func (r *Directive) Locations() []string { - return r.directive.Locs -} - -func (r *Directive) Args() []*InputValue { - l := make([]*InputValue, len(r.directive.Args)) - for i, v := range r.directive.Args { - l[i] = &InputValue{v} - } - return l -} diff --git a/vendor/github.com/neelance/graphql-go/log/log.go b/vendor/github.com/neelance/graphql-go/log/log.go deleted file mode 100644 index aaab4342..00000000 --- a/vendor/github.com/neelance/graphql-go/log/log.go +++ /dev/null @@ -1,23 +0,0 @@ -package log - -import ( - "context" - "log" - "runtime" -) - -// Logger is the interface used to log panics that occur durring query execution. It is setable via graphql.ParseSchema -type Logger interface { - LogPanic(ctx context.Context, value interface{}) -} - -// DefaultLogger is the default logger used to log panics that occur durring query execution -type DefaultLogger struct{} - -// LogPanic is used to log recovered panic values that occur durring query execution -func (l *DefaultLogger) LogPanic(_ context.Context, value interface{}) { - const size = 64 << 10 - buf := make([]byte, size) - buf = buf[:runtime.Stack(buf, false)] - log.Printf("graphql: panic occurred: %v\n%s", value, buf) -} diff --git a/vendor/github.com/neelance/graphql-go/relay/relay.go b/vendor/github.com/neelance/graphql-go/relay/relay.go deleted file mode 100644 index 61bdd93b..00000000 --- a/vendor/github.com/neelance/graphql-go/relay/relay.go +++ /dev/null @@ -1,70 +0,0 @@ -package relay - -import ( - "encoding/base64" - "encoding/json" - "errors" - "fmt" - "net/http" - "strings" - - graphql "github.com/neelance/graphql-go" -) - -func MarshalID(kind string, spec interface{}) graphql.ID { - d, err := json.Marshal(spec) - if err != nil { - panic(fmt.Errorf("relay.MarshalID: %s", err)) - } - return graphql.ID(base64.URLEncoding.EncodeToString(append([]byte(kind+":"), d...))) -} - -func UnmarshalKind(id graphql.ID) string { - s, err := base64.URLEncoding.DecodeString(string(id)) - if err != nil { - return "" - } - i := strings.IndexByte(string(s), ':') - if i == -1 { - return "" - } - return string(s[:i]) -} - -func UnmarshalSpec(id graphql.ID, v interface{}) error { - s, err := base64.URLEncoding.DecodeString(string(id)) - if err != nil { - return err - } - i := strings.IndexByte(string(s), ':') - if i == -1 { - return errors.New("invalid graphql.ID") - } - return json.Unmarshal([]byte(s[i+1:]), v) -} - -type Handler struct { - Schema *graphql.Schema -} - -func (h *Handler) ServeHTTP(w http.ResponseWriter, r *http.Request) { - var params struct { - Query string `json:"query"` - OperationName string `json:"operationName"` - Variables map[string]interface{} `json:"variables"` - } - if err := json.NewDecoder(r.Body).Decode(¶ms); err != nil { - http.Error(w, err.Error(), http.StatusBadRequest) - return - } - - response := h.Schema.Exec(r.Context(), params.Query, params.OperationName, params.Variables) - responseJSON, err := json.Marshal(response) - if err != nil { - http.Error(w, err.Error(), http.StatusInternalServerError) - return - } - - w.Header().Set("Content-Type", "application/json") - w.Write(responseJSON) -} diff --git a/vendor/github.com/neelance/graphql-go/relay/relay_test.go b/vendor/github.com/neelance/graphql-go/relay/relay_test.go deleted file mode 100644 index 72d8d51b..00000000 --- a/vendor/github.com/neelance/graphql-go/relay/relay_test.go +++ /dev/null @@ -1,36 +0,0 @@ -package relay_test - -import ( - "net/http/httptest" - "strings" - "testing" - - "github.com/neelance/graphql-go" - "github.com/neelance/graphql-go/example/starwars" - "github.com/neelance/graphql-go/relay" -) - -var starwarsSchema = graphql.MustParseSchema(starwars.Schema, &starwars.Resolver{}) - -func TestServeHTTP(t *testing.T) { - w := httptest.NewRecorder() - r := httptest.NewRequest("POST", "/some/path/here", strings.NewReader(`{"query":"{ hero { name } }", "operationName":"", "variables": null}`)) - h := relay.Handler{Schema: starwarsSchema} - - h.ServeHTTP(w, r) - - if w.Code != 200 { - t.Fatalf("Expected status code 200, got %d.", w.Code) - } - - contentType := w.Header().Get("Content-Type") - if contentType != "application/json" { - t.Fatalf("Invalid content-type. Expected [application/json], but instead got [%s]", contentType) - } - - expectedResponse := `{"data":{"hero":{"name":"R2-D2"}}}` - actualResponse := w.Body.String() - if expectedResponse != actualResponse { - t.Fatalf("Invalid response. Expected [%s], but instead got [%s]", expectedResponse, actualResponse) - } -} diff --git a/vendor/github.com/neelance/graphql-go/time.go b/vendor/github.com/neelance/graphql-go/time.go deleted file mode 100644 index 05c616d0..00000000 --- a/vendor/github.com/neelance/graphql-go/time.go +++ /dev/null @@ -1,36 +0,0 @@ -package graphql - -import ( - "fmt" - "time" -) - -// Time is a custom GraphQL type to represent an instant in time. It has to be added to a schema -// via "scalar Time" since it is not a predeclared GraphQL type like "ID". -type Time struct { - time.Time -} - -func (_ Time) ImplementsGraphQLType(name string) bool { - return name == "Time" -} - -func (t *Time) UnmarshalGraphQL(input interface{}) error { - switch input := input.(type) { - case time.Time: - t.Time = input - return nil - case string: - var err error - t.Time, err = time.Parse(time.RFC3339, input) - return err - case int: - t.Time = time.Unix(int64(input), 0) - return nil - case float64: - t.Time = time.Unix(int64(input), 0) - return nil - default: - return fmt.Errorf("wrong type") - } -} diff --git a/vendor/github.com/neelance/graphql-go/trace/trace.go b/vendor/github.com/neelance/graphql-go/trace/trace.go deleted file mode 100644 index 443f62a1..00000000 --- a/vendor/github.com/neelance/graphql-go/trace/trace.go +++ /dev/null @@ -1,80 +0,0 @@ -package trace - -import ( - "context" - "fmt" - - "github.com/neelance/graphql-go/errors" - "github.com/neelance/graphql-go/introspection" - opentracing "github.com/opentracing/opentracing-go" - "github.com/opentracing/opentracing-go/ext" - "github.com/opentracing/opentracing-go/log" -) - -type TraceQueryFinishFunc func([]*errors.QueryError) -type TraceFieldFinishFunc func(*errors.QueryError) - -type Tracer interface { - TraceQuery(ctx context.Context, queryString string, operationName string, variables map[string]interface{}, varTypes map[string]*introspection.Type) (context.Context, TraceQueryFinishFunc) - TraceField(ctx context.Context, label, typeName, fieldName string, trivial bool, args map[string]interface{}) (context.Context, TraceFieldFinishFunc) -} - -type OpenTracingTracer struct{} - -func (OpenTracingTracer) TraceQuery(ctx context.Context, queryString string, operationName string, variables map[string]interface{}, varTypes map[string]*introspection.Type) (context.Context, TraceQueryFinishFunc) { - span, spanCtx := opentracing.StartSpanFromContext(ctx, "GraphQL request") - span.SetTag("graphql.query", queryString) - - if operationName != "" { - span.SetTag("graphql.operationName", operationName) - } - - if len(variables) != 0 { - span.LogFields(log.Object("graphql.variables", variables)) - } - - return spanCtx, func(errs []*errors.QueryError) { - if len(errs) > 0 { - msg := errs[0].Error() - if len(errs) > 1 { - msg += fmt.Sprintf(" (and %d more errors)", len(errs)-1) - } - ext.Error.Set(span, true) - span.SetTag("graphql.error", msg) - } - span.Finish() - } -} - -func (OpenTracingTracer) TraceField(ctx context.Context, label, typeName, fieldName string, trivial bool, args map[string]interface{}) (context.Context, TraceFieldFinishFunc) { - if trivial { - return ctx, noop - } - - span, spanCtx := opentracing.StartSpanFromContext(ctx, label) - span.SetTag("graphql.type", typeName) - span.SetTag("graphql.field", fieldName) - for name, value := range args { - span.SetTag("graphql.args."+name, value) - } - - return spanCtx, func(err *errors.QueryError) { - if err != nil { - ext.Error.Set(span, true) - span.SetTag("graphql.error", err.Error()) - } - span.Finish() - } -} - -func noop(*errors.QueryError) {} - -type NoopTracer struct{} - -func (NoopTracer) TraceQuery(ctx context.Context, queryString string, operationName string, variables map[string]interface{}, varTypes map[string]*introspection.Type) (context.Context, TraceQueryFinishFunc) { - return ctx, func(errs []*errors.QueryError) {} -} - -func (NoopTracer) TraceField(ctx context.Context, label, typeName, fieldName string, trivial bool, args map[string]interface{}) (context.Context, TraceFieldFinishFunc) { - return ctx, func(err *errors.QueryError) {} -} diff --git a/vendor/github.com/opentracing/opentracing-go/.gitignore b/vendor/github.com/opentracing/opentracing-go/.gitignore deleted file mode 100644 index 565f0f73..00000000 --- a/vendor/github.com/opentracing/opentracing-go/.gitignore +++ /dev/null @@ -1,13 +0,0 @@ -# IntelliJ project files -.idea/ -opentracing-go.iml -opentracing-go.ipr -opentracing-go.iws - -# Test results -*.cov -*.html -test.log - -# Build dir -build/ diff --git a/vendor/github.com/opentracing/opentracing-go/.travis.yml b/vendor/github.com/opentracing/opentracing-go/.travis.yml deleted file mode 100644 index 0538f1bf..00000000 --- a/vendor/github.com/opentracing/opentracing-go/.travis.yml +++ /dev/null @@ -1,14 +0,0 @@ -language: go - -go: - - 1.6 - - 1.7 - - 1.8 - - tip - -install: - - go get -d -t github.com/opentracing/opentracing-go/... - - go get -u github.com/golang/lint/... -script: - - make test lint - - go build ./... diff --git a/vendor/github.com/opentracing/opentracing-go/CHANGELOG.md b/vendor/github.com/opentracing/opentracing-go/CHANGELOG.md deleted file mode 100644 index 1fc9fdf7..00000000 --- a/vendor/github.com/opentracing/opentracing-go/CHANGELOG.md +++ /dev/null @@ -1,14 +0,0 @@ -Changes by Version -================== - -1.1.0 (unreleased) -------------------- - -- Deprecate InitGlobalTracer() in favor of SetGlobalTracer() - - -1.0.0 (2016-09-26) -------------------- - -- This release implements OpenTracing Specification 1.0 (http://opentracing.io/spec) - diff --git a/vendor/github.com/opentracing/opentracing-go/LICENSE b/vendor/github.com/opentracing/opentracing-go/LICENSE deleted file mode 100644 index 148509a4..00000000 --- a/vendor/github.com/opentracing/opentracing-go/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -The MIT License (MIT) - -Copyright (c) 2016 The OpenTracing Authors - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/vendor/github.com/opentracing/opentracing-go/Makefile b/vendor/github.com/opentracing/opentracing-go/Makefile deleted file mode 100644 index 2f491f15..00000000 --- a/vendor/github.com/opentracing/opentracing-go/Makefile +++ /dev/null @@ -1,32 +0,0 @@ -PACKAGES := . ./mocktracer/... ./ext/... - -.DEFAULT_GOAL := test-and-lint - -.PHONE: test-and-lint - -test-and-lint: test lint - -.PHONY: test -test: - go test -v -cover ./... - -cover: - @rm -rf cover-all.out - $(foreach pkg, $(PACKAGES), $(MAKE) cover-pkg PKG=$(pkg) || true;) - @grep mode: cover.out > coverage.out - @cat cover-all.out >> coverage.out - go tool cover -html=coverage.out -o cover.html - @rm -rf cover.out cover-all.out coverage.out - -cover-pkg: - go test -coverprofile cover.out $(PKG) - @grep -v mode: cover.out >> cover-all.out - -.PHONY: lint -lint: - go fmt ./... - golint ./... - @# Run again with magic to exit non-zero if golint outputs anything. - @! (golint ./... | read dummy) - go vet ./... - diff --git a/vendor/github.com/opentracing/opentracing-go/README.md b/vendor/github.com/opentracing/opentracing-go/README.md deleted file mode 100644 index 1fb77d22..00000000 --- a/vendor/github.com/opentracing/opentracing-go/README.md +++ /dev/null @@ -1,147 +0,0 @@ -[![Gitter chat](http://img.shields.io/badge/gitter-join%20chat%20%E2%86%92-brightgreen.svg)](https://gitter.im/opentracing/public) [![Build Status](https://travis-ci.org/opentracing/opentracing-go.svg?branch=master)](https://travis-ci.org/opentracing/opentracing-go) [![GoDoc](https://godoc.org/github.com/opentracing/opentracing-go?status.svg)](http://godoc.org/github.com/opentracing/opentracing-go) - -# OpenTracing API for Go - -This package is a Go platform API for OpenTracing. - -## Required Reading - -In order to understand the Go platform API, one must first be familiar with the -[OpenTracing project](http://opentracing.io) and -[terminology](http://opentracing.io/documentation/pages/spec.html) more specifically. - -## API overview for those adding instrumentation - -Everyday consumers of this `opentracing` package really only need to worry -about a couple of key abstractions: the `StartSpan` function, the `Span` -interface, and binding a `Tracer` at `main()`-time. Here are code snippets -demonstrating some important use cases. - -#### Singleton initialization - -The simplest starting point is `./default_tracer.go`. As early as possible, call - -```go - import "github.com/opentracing/opentracing-go" - import ".../some_tracing_impl" - - func main() { - opentracing.InitGlobalTracer( - // tracing impl specific: - some_tracing_impl.New(...), - ) - ... - } -``` - -##### Non-Singleton initialization - -If you prefer direct control to singletons, manage ownership of the -`opentracing.Tracer` implementation explicitly. - -#### Creating a Span given an existing Go `context.Context` - -If you use `context.Context` in your application, OpenTracing's Go library will -happily rely on it for `Span` propagation. To start a new (blocking child) -`Span`, you can use `StartSpanFromContext`. - -```go - func xyz(ctx context.Context, ...) { - ... - span, ctx := opentracing.StartSpanFromContext(ctx, "operation_name") - defer span.Finish() - span.LogFields( - log.String("event", "soft error"), - log.String("type", "cache timeout"), - log.Int("waited.millis", 1500)) - ... - } -``` - -#### Starting an empty trace by creating a "root span" - -It's always possible to create a "root" `Span` with no parent or other causal -reference. - -```go - func xyz() { - ... - sp := opentracing.StartSpan("operation_name") - defer sp.Finish() - ... - } -``` - -#### Creating a (child) Span given an existing (parent) Span - -```go - func xyz(parentSpan opentracing.Span, ...) { - ... - sp := opentracing.StartSpan( - "operation_name", - opentracing.ChildOf(parentSpan.Context())) - defer sp.Finish() - ... - } -``` - -#### Serializing to the wire - -```go - func makeSomeRequest(ctx context.Context) ... { - if span := opentracing.SpanFromContext(ctx); span != nil { - httpClient := &http.Client{} - httpReq, _ := http.NewRequest("GET", "http://myservice/", nil) - - // Transmit the span's TraceContext as HTTP headers on our - // outbound request. - opentracing.GlobalTracer().Inject( - span.Context(), - opentracing.HTTPHeaders, - opentracing.HTTPHeadersCarrier(httpReq.Header)) - - resp, err := httpClient.Do(httpReq) - ... - } - ... - } -``` - -#### Deserializing from the wire - -```go - http.HandleFunc("/", func(w http.ResponseWriter, req *http.Request) { - var serverSpan opentracing.Span - appSpecificOperationName := ... - wireContext, err := opentracing.GlobalTracer().Extract( - opentracing.HTTPHeaders, - opentracing.HTTPHeadersCarrier(req.Header)) - if err != nil { - // Optionally record something about err here - } - - // Create the span referring to the RPC client if available. - // If wireContext == nil, a root span will be created. - serverSpan = opentracing.StartSpan( - appSpecificOperationName, - ext.RPCServerOption(wireContext)) - - defer serverSpan.Finish() - - ctx := opentracing.ContextWithSpan(context.Background(), serverSpan) - ... - } -``` - -#### Goroutine-safety - -The entire public API is goroutine-safe and does not require external -synchronization. - -## API pointers for those implementing a tracing system - -Tracing system implementors may be able to reuse or copy-paste-modify the `basictracer` package, found [here](https://github.com/opentracing/basictracer-go). In particular, see `basictracer.New(...)`. - -## API compatibility - -For the time being, "mild" backwards-incompatible changes may be made without changing the major version number. As OpenTracing and `opentracing-go` mature, backwards compatibility will become more of a priority. diff --git a/vendor/github.com/opentracing/opentracing-go/ext/tags.go b/vendor/github.com/opentracing/opentracing-go/ext/tags.go deleted file mode 100644 index c67ab5ee..00000000 --- a/vendor/github.com/opentracing/opentracing-go/ext/tags.go +++ /dev/null @@ -1,198 +0,0 @@ -package ext - -import opentracing "github.com/opentracing/opentracing-go" - -// These constants define common tag names recommended for better portability across -// tracing systems and languages/platforms. -// -// The tag names are defined as typed strings, so that in addition to the usual use -// -// span.setTag(TagName, value) -// -// they also support value type validation via this additional syntax: -// -// TagName.Set(span, value) -// -var ( - ////////////////////////////////////////////////////////////////////// - // SpanKind (client/server or producer/consumer) - ////////////////////////////////////////////////////////////////////// - - // SpanKind hints at relationship between spans, e.g. client/server - SpanKind = spanKindTagName("span.kind") - - // SpanKindRPCClient marks a span representing the client-side of an RPC - // or other remote call - SpanKindRPCClientEnum = SpanKindEnum("client") - SpanKindRPCClient = opentracing.Tag{Key: string(SpanKind), Value: SpanKindRPCClientEnum} - - // SpanKindRPCServer marks a span representing the server-side of an RPC - // or other remote call - SpanKindRPCServerEnum = SpanKindEnum("server") - SpanKindRPCServer = opentracing.Tag{Key: string(SpanKind), Value: SpanKindRPCServerEnum} - - // SpanKindProducer marks a span representing the producer-side of a - // message bus - SpanKindProducerEnum = SpanKindEnum("producer") - SpanKindProducer = opentracing.Tag{Key: string(SpanKind), Value: SpanKindProducerEnum} - - // SpanKindConsumer marks a span representing the consumer-side of a - // message bus - SpanKindConsumerEnum = SpanKindEnum("consumer") - SpanKindConsumer = opentracing.Tag{Key: string(SpanKind), Value: SpanKindConsumerEnum} - - ////////////////////////////////////////////////////////////////////// - // Component name - ////////////////////////////////////////////////////////////////////// - - // Component is a low-cardinality identifier of the module, library, - // or package that is generating a span. - Component = stringTagName("component") - - ////////////////////////////////////////////////////////////////////// - // Sampling hint - ////////////////////////////////////////////////////////////////////// - - // SamplingPriority determines the priority of sampling this Span. - SamplingPriority = uint16TagName("sampling.priority") - - ////////////////////////////////////////////////////////////////////// - // Peer tags. These tags can be emitted by either client-side of - // server-side to describe the other side/service in a peer-to-peer - // communications, like an RPC call. - ////////////////////////////////////////////////////////////////////// - - // PeerService records the service name of the peer. - PeerService = stringTagName("peer.service") - - // PeerAddress records the address name of the peer. This may be a "ip:port", - // a bare "hostname", a FQDN or even a database DSN substring - // like "mysql://username@127.0.0.1:3306/dbname" - PeerAddress = stringTagName("peer.address") - - // PeerHostname records the host name of the peer - PeerHostname = stringTagName("peer.hostname") - - // PeerHostIPv4 records IP v4 host address of the peer - PeerHostIPv4 = uint32TagName("peer.ipv4") - - // PeerHostIPv6 records IP v6 host address of the peer - PeerHostIPv6 = stringTagName("peer.ipv6") - - // PeerPort records port number of the peer - PeerPort = uint16TagName("peer.port") - - ////////////////////////////////////////////////////////////////////// - // HTTP Tags - ////////////////////////////////////////////////////////////////////// - - // HTTPUrl should be the URL of the request being handled in this segment - // of the trace, in standard URI format. The protocol is optional. - HTTPUrl = stringTagName("http.url") - - // HTTPMethod is the HTTP method of the request, and is case-insensitive. - HTTPMethod = stringTagName("http.method") - - // HTTPStatusCode is the numeric HTTP status code (200, 404, etc) of the - // HTTP response. - HTTPStatusCode = uint16TagName("http.status_code") - - ////////////////////////////////////////////////////////////////////// - // DB Tags - ////////////////////////////////////////////////////////////////////// - - // DBInstance is database instance name. - DBInstance = stringTagName("db.instance") - - // DBStatement is a database statement for the given database type. - // It can be a query or a prepared statement (i.e., before substitution). - DBStatement = stringTagName("db.statement") - - // DBType is a database type. For any SQL database, "sql". - // For others, the lower-case database category, e.g. "redis" - DBType = stringTagName("db.type") - - // DBUser is a username for accessing database. - DBUser = stringTagName("db.user") - - ////////////////////////////////////////////////////////////////////// - // Message Bus Tag - ////////////////////////////////////////////////////////////////////// - - // MessageBusDestination is an address at which messages can be exchanged - MessageBusDestination = stringTagName("message_bus.destination") - - ////////////////////////////////////////////////////////////////////// - // Error Tag - ////////////////////////////////////////////////////////////////////// - - // Error indicates that operation represented by the span resulted in an error. - Error = boolTagName("error") -) - -// --- - -// SpanKindEnum represents common span types -type SpanKindEnum string - -type spanKindTagName string - -// Set adds a string tag to the `span` -func (tag spanKindTagName) Set(span opentracing.Span, value SpanKindEnum) { - span.SetTag(string(tag), value) -} - -type rpcServerOption struct { - clientContext opentracing.SpanContext -} - -func (r rpcServerOption) Apply(o *opentracing.StartSpanOptions) { - if r.clientContext != nil { - opentracing.ChildOf(r.clientContext).Apply(o) - } - SpanKindRPCServer.Apply(o) -} - -// RPCServerOption returns a StartSpanOption appropriate for an RPC server span -// with `client` representing the metadata for the remote peer Span if available. -// In case client == nil, due to the client not being instrumented, this RPC -// server span will be a root span. -func RPCServerOption(client opentracing.SpanContext) opentracing.StartSpanOption { - return rpcServerOption{client} -} - -// --- - -type stringTagName string - -// Set adds a string tag to the `span` -func (tag stringTagName) Set(span opentracing.Span, value string) { - span.SetTag(string(tag), value) -} - -// --- - -type uint32TagName string - -// Set adds a uint32 tag to the `span` -func (tag uint32TagName) Set(span opentracing.Span, value uint32) { - span.SetTag(string(tag), value) -} - -// --- - -type uint16TagName string - -// Set adds a uint16 tag to the `span` -func (tag uint16TagName) Set(span opentracing.Span, value uint16) { - span.SetTag(string(tag), value) -} - -// --- - -type boolTagName string - -// Add adds a bool tag to the `span` -func (tag boolTagName) Set(span opentracing.Span, value bool) { - span.SetTag(string(tag), value) -} diff --git a/vendor/github.com/opentracing/opentracing-go/ext/tags_test.go b/vendor/github.com/opentracing/opentracing-go/ext/tags_test.go deleted file mode 100644 index ea9af335..00000000 --- a/vendor/github.com/opentracing/opentracing-go/ext/tags_test.go +++ /dev/null @@ -1,148 +0,0 @@ -package ext_test - -import ( - "testing" - - "github.com/stretchr/testify/assert" - - "github.com/opentracing/opentracing-go" - "github.com/opentracing/opentracing-go/ext" - "github.com/opentracing/opentracing-go/mocktracer" -) - -func TestPeerTags(t *testing.T) { - if ext.PeerService != "peer.service" { - t.Fatalf("Invalid PeerService %v", ext.PeerService) - } - tracer := mocktracer.New() - span := tracer.StartSpan("my-trace") - ext.PeerService.Set(span, "my-service") - ext.PeerAddress.Set(span, "my-hostname:8080") - ext.PeerHostname.Set(span, "my-hostname") - ext.PeerHostIPv4.Set(span, uint32(127<<24|1)) - ext.PeerHostIPv6.Set(span, "::") - ext.PeerPort.Set(span, uint16(8080)) - ext.SamplingPriority.Set(span, uint16(1)) - ext.SpanKind.Set(span, ext.SpanKindRPCServerEnum) - ext.SpanKindRPCClient.Set(span) - span.Finish() - - rawSpan := tracer.FinishedSpans()[0] - assert.Equal(t, map[string]interface{}{ - "peer.service": "my-service", - "peer.address": "my-hostname:8080", - "peer.hostname": "my-hostname", - "peer.ipv4": uint32(127<<24 | 1), - "peer.ipv6": "::", - "peer.port": uint16(8080), - "span.kind": ext.SpanKindRPCClientEnum, - }, rawSpan.Tags()) - assert.True(t, span.Context().(mocktracer.MockSpanContext).Sampled) - ext.SamplingPriority.Set(span, uint16(0)) - assert.False(t, span.Context().(mocktracer.MockSpanContext).Sampled) -} - -func TestHTTPTags(t *testing.T) { - tracer := mocktracer.New() - span := tracer.StartSpan("my-trace", ext.SpanKindRPCServer) - ext.HTTPUrl.Set(span, "test.biz/uri?protocol=false") - ext.HTTPMethod.Set(span, "GET") - ext.HTTPStatusCode.Set(span, 301) - span.Finish() - - rawSpan := tracer.FinishedSpans()[0] - assert.Equal(t, map[string]interface{}{ - "http.url": "test.biz/uri?protocol=false", - "http.method": "GET", - "http.status_code": uint16(301), - "span.kind": ext.SpanKindRPCServerEnum, - }, rawSpan.Tags()) -} - -func TestDBTags(t *testing.T) { - tracer := mocktracer.New() - span := tracer.StartSpan("my-trace", ext.SpanKindRPCClient) - ext.DBInstance.Set(span, "127.0.0.1:3306/customers") - ext.DBStatement.Set(span, "SELECT * FROM user_table") - ext.DBType.Set(span, "sql") - ext.DBUser.Set(span, "customer_user") - span.Finish() - - rawSpan := tracer.FinishedSpans()[0] - assert.Equal(t, map[string]interface{}{ - "db.instance": "127.0.0.1:3306/customers", - "db.statement": "SELECT * FROM user_table", - "db.type": "sql", - "db.user": "customer_user", - "span.kind": ext.SpanKindRPCClientEnum, - }, rawSpan.Tags()) -} - -func TestMiscTags(t *testing.T) { - tracer := mocktracer.New() - span := tracer.StartSpan("my-trace") - ext.Component.Set(span, "my-awesome-library") - ext.SamplingPriority.Set(span, 1) - ext.Error.Set(span, true) - - span.Finish() - - rawSpan := tracer.FinishedSpans()[0] - assert.Equal(t, map[string]interface{}{ - "component": "my-awesome-library", - "error": true, - }, rawSpan.Tags()) -} - -func TestRPCServerOption(t *testing.T) { - tracer := mocktracer.New() - parent := tracer.StartSpan("my-trace") - parent.SetBaggageItem("bag", "gage") - - carrier := opentracing.HTTPHeadersCarrier{} - err := tracer.Inject(parent.Context(), opentracing.HTTPHeaders, carrier) - if err != nil { - t.Fatal(err) - } - - parCtx, err := tracer.Extract(opentracing.HTTPHeaders, carrier) - if err != nil { - t.Fatal(err) - } - - tracer.StartSpan("my-child", ext.RPCServerOption(parCtx)).Finish() - - rawSpan := tracer.FinishedSpans()[0] - assert.Equal(t, map[string]interface{}{ - "span.kind": ext.SpanKindRPCServerEnum, - }, rawSpan.Tags()) - assert.Equal(t, map[string]string{ - "bag": "gage", - }, rawSpan.Context().(mocktracer.MockSpanContext).Baggage) -} - -func TestMessageBusProducerTags(t *testing.T) { - tracer := mocktracer.New() - span := tracer.StartSpan("my-trace", ext.SpanKindProducer) - ext.MessageBusDestination.Set(span, "topic name") - span.Finish() - - rawSpan := tracer.FinishedSpans()[0] - assert.Equal(t, map[string]interface{}{ - "message_bus.destination": "topic name", - "span.kind": ext.SpanKindProducerEnum, - }, rawSpan.Tags()) -} - -func TestMessageBusConsumerTags(t *testing.T) { - tracer := mocktracer.New() - span := tracer.StartSpan("my-trace", ext.SpanKindConsumer) - ext.MessageBusDestination.Set(span, "topic name") - span.Finish() - - rawSpan := tracer.FinishedSpans()[0] - assert.Equal(t, map[string]interface{}{ - "message_bus.destination": "topic name", - "span.kind": ext.SpanKindConsumerEnum, - }, rawSpan.Tags()) -} diff --git a/vendor/github.com/opentracing/opentracing-go/globaltracer.go b/vendor/github.com/opentracing/opentracing-go/globaltracer.go deleted file mode 100644 index 8c8e793f..00000000 --- a/vendor/github.com/opentracing/opentracing-go/globaltracer.go +++ /dev/null @@ -1,32 +0,0 @@ -package opentracing - -var ( - globalTracer Tracer = NoopTracer{} -) - -// SetGlobalTracer sets the [singleton] opentracing.Tracer returned by -// GlobalTracer(). Those who use GlobalTracer (rather than directly manage an -// opentracing.Tracer instance) should call SetGlobalTracer as early as -// possible in main(), prior to calling the `StartSpan` global func below. -// Prior to calling `SetGlobalTracer`, any Spans started via the `StartSpan` -// (etc) globals are noops. -func SetGlobalTracer(tracer Tracer) { - globalTracer = tracer -} - -// GlobalTracer returns the global singleton `Tracer` implementation. -// Before `SetGlobalTracer()` is called, the `GlobalTracer()` is a noop -// implementation that drops all data handed to it. -func GlobalTracer() Tracer { - return globalTracer -} - -// StartSpan defers to `Tracer.StartSpan`. See `GlobalTracer()`. -func StartSpan(operationName string, opts ...StartSpanOption) Span { - return globalTracer.StartSpan(operationName, opts...) -} - -// InitGlobalTracer is deprecated. Please use SetGlobalTracer. -func InitGlobalTracer(tracer Tracer) { - SetGlobalTracer(tracer) -} diff --git a/vendor/github.com/opentracing/opentracing-go/gocontext.go b/vendor/github.com/opentracing/opentracing-go/gocontext.go deleted file mode 100644 index 222a6520..00000000 --- a/vendor/github.com/opentracing/opentracing-go/gocontext.go +++ /dev/null @@ -1,57 +0,0 @@ -package opentracing - -import "golang.org/x/net/context" - -type contextKey struct{} - -var activeSpanKey = contextKey{} - -// ContextWithSpan returns a new `context.Context` that holds a reference to -// `span`'s SpanContext. -func ContextWithSpan(ctx context.Context, span Span) context.Context { - return context.WithValue(ctx, activeSpanKey, span) -} - -// SpanFromContext returns the `Span` previously associated with `ctx`, or -// `nil` if no such `Span` could be found. -// -// NOTE: context.Context != SpanContext: the former is Go's intra-process -// context propagation mechanism, and the latter houses OpenTracing's per-Span -// identity and baggage information. -func SpanFromContext(ctx context.Context) Span { - val := ctx.Value(activeSpanKey) - if sp, ok := val.(Span); ok { - return sp - } - return nil -} - -// StartSpanFromContext starts and returns a Span with `operationName`, using -// any Span found within `ctx` as a ChildOfRef. If no such parent could be -// found, StartSpanFromContext creates a root (parentless) Span. -// -// The second return value is a context.Context object built around the -// returned Span. -// -// Example usage: -// -// SomeFunction(ctx context.Context, ...) { -// sp, ctx := opentracing.StartSpanFromContext(ctx, "SomeFunction") -// defer sp.Finish() -// ... -// } -func StartSpanFromContext(ctx context.Context, operationName string, opts ...StartSpanOption) (Span, context.Context) { - return startSpanFromContextWithTracer(ctx, GlobalTracer(), operationName, opts...) -} - -// startSpanFromContextWithTracer is factored out for testing purposes. -func startSpanFromContextWithTracer(ctx context.Context, tracer Tracer, operationName string, opts ...StartSpanOption) (Span, context.Context) { - var span Span - if parentSpan := SpanFromContext(ctx); parentSpan != nil { - opts = append(opts, ChildOf(parentSpan.Context())) - span = tracer.StartSpan(operationName, opts...) - } else { - span = tracer.StartSpan(operationName, opts...) - } - return span, ContextWithSpan(ctx, span) -} diff --git a/vendor/github.com/opentracing/opentracing-go/gocontext_test.go b/vendor/github.com/opentracing/opentracing-go/gocontext_test.go deleted file mode 100644 index 65c01308..00000000 --- a/vendor/github.com/opentracing/opentracing-go/gocontext_test.go +++ /dev/null @@ -1,81 +0,0 @@ -package opentracing - -import ( - "testing" - "time" - - "github.com/stretchr/testify/assert" - "golang.org/x/net/context" -) - -func TestContextWithSpan(t *testing.T) { - span := &noopSpan{} - ctx := ContextWithSpan(context.Background(), span) - span2 := SpanFromContext(ctx) - if span != span2 { - t.Errorf("Not the same span returned from context, expected=%+v, actual=%+v", span, span2) - } - - ctx = context.Background() - span2 = SpanFromContext(ctx) - if span2 != nil { - t.Errorf("Expected nil span, found %+v", span2) - } - - ctx = ContextWithSpan(ctx, span) - span2 = SpanFromContext(ctx) - if span != span2 { - t.Errorf("Not the same span returned from context, expected=%+v, actual=%+v", span, span2) - } -} - -func TestStartSpanFromContext(t *testing.T) { - testTracer := testTracer{} - - // Test the case where there *is* a Span in the Context. - { - parentSpan := &testSpan{} - parentCtx := ContextWithSpan(context.Background(), parentSpan) - childSpan, childCtx := startSpanFromContextWithTracer(parentCtx, testTracer, "child") - if !childSpan.Context().(testSpanContext).HasParent { - t.Errorf("Failed to find parent: %v", childSpan) - } - if !childSpan.(testSpan).Equal(SpanFromContext(childCtx)) { - t.Errorf("Unable to find child span in context: %v", childCtx) - } - } - - // Test the case where there *is not* a Span in the Context. - { - emptyCtx := context.Background() - childSpan, childCtx := startSpanFromContextWithTracer(emptyCtx, testTracer, "child") - if childSpan.Context().(testSpanContext).HasParent { - t.Errorf("Should not have found parent: %v", childSpan) - } - if !childSpan.(testSpan).Equal(SpanFromContext(childCtx)) { - t.Errorf("Unable to find child span in context: %v", childCtx) - } - } -} - -func TestStartSpanFromContextOptions(t *testing.T) { - testTracer := testTracer{} - - // Test options are passed to tracer - - startTime := time.Now().Add(-10 * time.Second) // ten seconds ago - span, ctx := startSpanFromContextWithTracer( - context.Background(), testTracer, "parent", StartTime(startTime), Tag{"component", "test"}) - - assert.Equal(t, "test", span.(testSpan).Tags["component"]) - assert.Equal(t, startTime, span.(testSpan).StartTime) - - // Test it also works for a child span - - childStartTime := startTime.Add(3 * time.Second) - childSpan, _ := startSpanFromContextWithTracer( - ctx, testTracer, "child", StartTime(childStartTime)) - - assert.Equal(t, childSpan.(testSpan).Tags["component"], nil) - assert.Equal(t, childSpan.(testSpan).StartTime, childStartTime) -} diff --git a/vendor/github.com/opentracing/opentracing-go/log/field.go b/vendor/github.com/opentracing/opentracing-go/log/field.go deleted file mode 100644 index d2cd39a1..00000000 --- a/vendor/github.com/opentracing/opentracing-go/log/field.go +++ /dev/null @@ -1,245 +0,0 @@ -package log - -import ( - "fmt" - "math" -) - -type fieldType int - -const ( - stringType fieldType = iota - boolType - intType - int32Type - uint32Type - int64Type - uint64Type - float32Type - float64Type - errorType - objectType - lazyLoggerType -) - -// Field instances are constructed via LogBool, LogString, and so on. -// Tracing implementations may then handle them via the Field.Marshal -// method. -// -// "heavily influenced by" (i.e., partially stolen from) -// https://github.com/uber-go/zap -type Field struct { - key string - fieldType fieldType - numericVal int64 - stringVal string - interfaceVal interface{} -} - -// String adds a string-valued key:value pair to a Span.LogFields() record -func String(key, val string) Field { - return Field{ - key: key, - fieldType: stringType, - stringVal: val, - } -} - -// Bool adds a bool-valued key:value pair to a Span.LogFields() record -func Bool(key string, val bool) Field { - var numericVal int64 - if val { - numericVal = 1 - } - return Field{ - key: key, - fieldType: boolType, - numericVal: numericVal, - } -} - -// Int adds an int-valued key:value pair to a Span.LogFields() record -func Int(key string, val int) Field { - return Field{ - key: key, - fieldType: intType, - numericVal: int64(val), - } -} - -// Int32 adds an int32-valued key:value pair to a Span.LogFields() record -func Int32(key string, val int32) Field { - return Field{ - key: key, - fieldType: int32Type, - numericVal: int64(val), - } -} - -// Int64 adds an int64-valued key:value pair to a Span.LogFields() record -func Int64(key string, val int64) Field { - return Field{ - key: key, - fieldType: int64Type, - numericVal: val, - } -} - -// Uint32 adds a uint32-valued key:value pair to a Span.LogFields() record -func Uint32(key string, val uint32) Field { - return Field{ - key: key, - fieldType: uint32Type, - numericVal: int64(val), - } -} - -// Uint64 adds a uint64-valued key:value pair to a Span.LogFields() record -func Uint64(key string, val uint64) Field { - return Field{ - key: key, - fieldType: uint64Type, - numericVal: int64(val), - } -} - -// Float32 adds a float32-valued key:value pair to a Span.LogFields() record -func Float32(key string, val float32) Field { - return Field{ - key: key, - fieldType: float32Type, - numericVal: int64(math.Float32bits(val)), - } -} - -// Float64 adds a float64-valued key:value pair to a Span.LogFields() record -func Float64(key string, val float64) Field { - return Field{ - key: key, - fieldType: float64Type, - numericVal: int64(math.Float64bits(val)), - } -} - -// Error adds an error with the key "error" to a Span.LogFields() record -func Error(err error) Field { - return Field{ - key: "error", - fieldType: errorType, - interfaceVal: err, - } -} - -// Object adds an object-valued key:value pair to a Span.LogFields() record -func Object(key string, obj interface{}) Field { - return Field{ - key: key, - fieldType: objectType, - interfaceVal: obj, - } -} - -// LazyLogger allows for user-defined, late-bound logging of arbitrary data -type LazyLogger func(fv Encoder) - -// Lazy adds a LazyLogger to a Span.LogFields() record; the tracing -// implementation will call the LazyLogger function at an indefinite time in -// the future (after Lazy() returns). -func Lazy(ll LazyLogger) Field { - return Field{ - fieldType: lazyLoggerType, - interfaceVal: ll, - } -} - -// Encoder allows access to the contents of a Field (via a call to -// Field.Marshal). -// -// Tracer implementations typically provide an implementation of Encoder; -// OpenTracing callers typically do not need to concern themselves with it. -type Encoder interface { - EmitString(key, value string) - EmitBool(key string, value bool) - EmitInt(key string, value int) - EmitInt32(key string, value int32) - EmitInt64(key string, value int64) - EmitUint32(key string, value uint32) - EmitUint64(key string, value uint64) - EmitFloat32(key string, value float32) - EmitFloat64(key string, value float64) - EmitObject(key string, value interface{}) - EmitLazyLogger(value LazyLogger) -} - -// Marshal passes a Field instance through to the appropriate -// field-type-specific method of an Encoder. -func (lf Field) Marshal(visitor Encoder) { - switch lf.fieldType { - case stringType: - visitor.EmitString(lf.key, lf.stringVal) - case boolType: - visitor.EmitBool(lf.key, lf.numericVal != 0) - case intType: - visitor.EmitInt(lf.key, int(lf.numericVal)) - case int32Type: - visitor.EmitInt32(lf.key, int32(lf.numericVal)) - case int64Type: - visitor.EmitInt64(lf.key, int64(lf.numericVal)) - case uint32Type: - visitor.EmitUint32(lf.key, uint32(lf.numericVal)) - case uint64Type: - visitor.EmitUint64(lf.key, uint64(lf.numericVal)) - case float32Type: - visitor.EmitFloat32(lf.key, math.Float32frombits(uint32(lf.numericVal))) - case float64Type: - visitor.EmitFloat64(lf.key, math.Float64frombits(uint64(lf.numericVal))) - case errorType: - if err, ok := lf.interfaceVal.(error); ok { - visitor.EmitString(lf.key, err.Error()) - } else { - visitor.EmitString(lf.key, "") - } - case objectType: - visitor.EmitObject(lf.key, lf.interfaceVal) - case lazyLoggerType: - visitor.EmitLazyLogger(lf.interfaceVal.(LazyLogger)) - } -} - -// Key returns the field's key. -func (lf Field) Key() string { - return lf.key -} - -// Value returns the field's value as interface{}. -func (lf Field) Value() interface{} { - switch lf.fieldType { - case stringType: - return lf.stringVal - case boolType: - return lf.numericVal != 0 - case intType: - return int(lf.numericVal) - case int32Type: - return int32(lf.numericVal) - case int64Type: - return int64(lf.numericVal) - case uint32Type: - return uint32(lf.numericVal) - case uint64Type: - return uint64(lf.numericVal) - case float32Type: - return math.Float32frombits(uint32(lf.numericVal)) - case float64Type: - return math.Float64frombits(uint64(lf.numericVal)) - case errorType, objectType, lazyLoggerType: - return lf.interfaceVal - default: - return nil - } -} - -// String returns a string representation of the key and value. -func (lf Field) String() string { - return fmt.Sprint(lf.key, ":", lf.Value()) -} diff --git a/vendor/github.com/opentracing/opentracing-go/log/field_test.go b/vendor/github.com/opentracing/opentracing-go/log/field_test.go deleted file mode 100644 index 8304f182..00000000 --- a/vendor/github.com/opentracing/opentracing-go/log/field_test.go +++ /dev/null @@ -1,39 +0,0 @@ -package log - -import ( - "fmt" - "testing" -) - -func TestFieldString(t *testing.T) { - testCases := []struct { - field Field - expected string - }{ - { - field: String("key", "value"), - expected: "key:value", - }, - { - field: Bool("key", true), - expected: "key:true", - }, - { - field: Int("key", 5), - expected: "key:5", - }, - { - field: Error(fmt.Errorf("err msg")), - expected: "error:err msg", - }, - { - field: Error(nil), - expected: "error:", - }, - } - for i, tc := range testCases { - if str := tc.field.String(); str != tc.expected { - t.Errorf("%d: expected '%s', got '%s'", i, tc.expected, str) - } - } -} diff --git a/vendor/github.com/opentracing/opentracing-go/log/util.go b/vendor/github.com/opentracing/opentracing-go/log/util.go deleted file mode 100644 index 3832feb5..00000000 --- a/vendor/github.com/opentracing/opentracing-go/log/util.go +++ /dev/null @@ -1,54 +0,0 @@ -package log - -import "fmt" - -// InterleavedKVToFields converts keyValues a la Span.LogKV() to a Field slice -// a la Span.LogFields(). -func InterleavedKVToFields(keyValues ...interface{}) ([]Field, error) { - if len(keyValues)%2 != 0 { - return nil, fmt.Errorf("non-even keyValues len: %d", len(keyValues)) - } - fields := make([]Field, len(keyValues)/2) - for i := 0; i*2 < len(keyValues); i++ { - key, ok := keyValues[i*2].(string) - if !ok { - return nil, fmt.Errorf( - "non-string key (pair #%d): %T", - i, keyValues[i*2]) - } - switch typedVal := keyValues[i*2+1].(type) { - case bool: - fields[i] = Bool(key, typedVal) - case string: - fields[i] = String(key, typedVal) - case int: - fields[i] = Int(key, typedVal) - case int8: - fields[i] = Int32(key, int32(typedVal)) - case int16: - fields[i] = Int32(key, int32(typedVal)) - case int32: - fields[i] = Int32(key, typedVal) - case int64: - fields[i] = Int64(key, typedVal) - case uint: - fields[i] = Uint64(key, uint64(typedVal)) - case uint64: - fields[i] = Uint64(key, typedVal) - case uint8: - fields[i] = Uint32(key, uint32(typedVal)) - case uint16: - fields[i] = Uint32(key, uint32(typedVal)) - case uint32: - fields[i] = Uint32(key, typedVal) - case float32: - fields[i] = Float32(key, typedVal) - case float64: - fields[i] = Float64(key, typedVal) - default: - // When in doubt, coerce to a string - fields[i] = String(key, fmt.Sprint(typedVal)) - } - } - return fields, nil -} diff --git a/vendor/github.com/opentracing/opentracing-go/mocktracer/mocklogrecord.go b/vendor/github.com/opentracing/opentracing-go/mocktracer/mocklogrecord.go deleted file mode 100644 index 2ce96d9d..00000000 --- a/vendor/github.com/opentracing/opentracing-go/mocktracer/mocklogrecord.go +++ /dev/null @@ -1,105 +0,0 @@ -package mocktracer - -import ( - "fmt" - "reflect" - "time" - - "github.com/opentracing/opentracing-go/log" -) - -// MockLogRecord represents data logged to a Span via Span.LogFields or -// Span.LogKV. -type MockLogRecord struct { - Timestamp time.Time - Fields []MockKeyValue -} - -// MockKeyValue represents a single key:value pair. -type MockKeyValue struct { - Key string - - // All MockLogRecord values are coerced to strings via fmt.Sprint(), though - // we retain their type separately. - ValueKind reflect.Kind - ValueString string -} - -// EmitString belongs to the log.Encoder interface -func (m *MockKeyValue) EmitString(key, value string) { - m.Key = key - m.ValueKind = reflect.TypeOf(value).Kind() - m.ValueString = fmt.Sprint(value) -} - -// EmitBool belongs to the log.Encoder interface -func (m *MockKeyValue) EmitBool(key string, value bool) { - m.Key = key - m.ValueKind = reflect.TypeOf(value).Kind() - m.ValueString = fmt.Sprint(value) -} - -// EmitInt belongs to the log.Encoder interface -func (m *MockKeyValue) EmitInt(key string, value int) { - m.Key = key - m.ValueKind = reflect.TypeOf(value).Kind() - m.ValueString = fmt.Sprint(value) -} - -// EmitInt32 belongs to the log.Encoder interface -func (m *MockKeyValue) EmitInt32(key string, value int32) { - m.Key = key - m.ValueKind = reflect.TypeOf(value).Kind() - m.ValueString = fmt.Sprint(value) -} - -// EmitInt64 belongs to the log.Encoder interface -func (m *MockKeyValue) EmitInt64(key string, value int64) { - m.Key = key - m.ValueKind = reflect.TypeOf(value).Kind() - m.ValueString = fmt.Sprint(value) -} - -// EmitUint32 belongs to the log.Encoder interface -func (m *MockKeyValue) EmitUint32(key string, value uint32) { - m.Key = key - m.ValueKind = reflect.TypeOf(value).Kind() - m.ValueString = fmt.Sprint(value) -} - -// EmitUint64 belongs to the log.Encoder interface -func (m *MockKeyValue) EmitUint64(key string, value uint64) { - m.Key = key - m.ValueKind = reflect.TypeOf(value).Kind() - m.ValueString = fmt.Sprint(value) -} - -// EmitFloat32 belongs to the log.Encoder interface -func (m *MockKeyValue) EmitFloat32(key string, value float32) { - m.Key = key - m.ValueKind = reflect.TypeOf(value).Kind() - m.ValueString = fmt.Sprint(value) -} - -// EmitFloat64 belongs to the log.Encoder interface -func (m *MockKeyValue) EmitFloat64(key string, value float64) { - m.Key = key - m.ValueKind = reflect.TypeOf(value).Kind() - m.ValueString = fmt.Sprint(value) -} - -// EmitObject belongs to the log.Encoder interface -func (m *MockKeyValue) EmitObject(key string, value interface{}) { - m.Key = key - m.ValueKind = reflect.TypeOf(value).Kind() - m.ValueString = fmt.Sprint(value) -} - -// EmitLazyLogger belongs to the log.Encoder interface -func (m *MockKeyValue) EmitLazyLogger(value log.LazyLogger) { - var meta MockKeyValue - value(&meta) - m.Key = meta.Key - m.ValueKind = meta.ValueKind - m.ValueString = meta.ValueString -} diff --git a/vendor/github.com/opentracing/opentracing-go/mocktracer/mockspan.go b/vendor/github.com/opentracing/opentracing-go/mocktracer/mockspan.go deleted file mode 100644 index 69defda2..00000000 --- a/vendor/github.com/opentracing/opentracing-go/mocktracer/mockspan.go +++ /dev/null @@ -1,282 +0,0 @@ -package mocktracer - -import ( - "fmt" - "sync" - "sync/atomic" - "time" - - "github.com/opentracing/opentracing-go" - "github.com/opentracing/opentracing-go/ext" - "github.com/opentracing/opentracing-go/log" -) - -// MockSpanContext is an opentracing.SpanContext implementation. -// -// It is entirely unsuitable for production use, but appropriate for tests -// that want to verify tracing behavior in other frameworks/applications. -// -// By default all spans have Sampled=true flag, unless {"sampling.priority": 0} -// tag is set. -type MockSpanContext struct { - TraceID int - SpanID int - Sampled bool - Baggage map[string]string -} - -var mockIDSource = uint32(42) - -func nextMockID() int { - return int(atomic.AddUint32(&mockIDSource, 1)) -} - -// ForeachBaggageItem belongs to the SpanContext interface -func (c MockSpanContext) ForeachBaggageItem(handler func(k, v string) bool) { - for k, v := range c.Baggage { - if !handler(k, v) { - break - } - } -} - -// WithBaggageItem creates a new context with an extra baggage item. -func (c MockSpanContext) WithBaggageItem(key, value string) MockSpanContext { - var newBaggage map[string]string - if c.Baggage == nil { - newBaggage = map[string]string{key: value} - } else { - newBaggage = make(map[string]string, len(c.Baggage)+1) - for k, v := range c.Baggage { - newBaggage[k] = v - } - newBaggage[key] = value - } - // Use positional parameters so the compiler will help catch new fields. - return MockSpanContext{c.TraceID, c.SpanID, c.Sampled, newBaggage} -} - -// MockSpan is an opentracing.Span implementation that exports its internal -// state for testing purposes. -type MockSpan struct { - sync.RWMutex - - ParentID int - - OperationName string - StartTime time.Time - FinishTime time.Time - - // All of the below are protected by the embedded RWMutex. - SpanContext MockSpanContext - tags map[string]interface{} - logs []MockLogRecord - tracer *MockTracer -} - -func newMockSpan(t *MockTracer, name string, opts opentracing.StartSpanOptions) *MockSpan { - tags := opts.Tags - if tags == nil { - tags = map[string]interface{}{} - } - traceID := nextMockID() - parentID := int(0) - var baggage map[string]string - sampled := true - if len(opts.References) > 0 { - traceID = opts.References[0].ReferencedContext.(MockSpanContext).TraceID - parentID = opts.References[0].ReferencedContext.(MockSpanContext).SpanID - sampled = opts.References[0].ReferencedContext.(MockSpanContext).Sampled - baggage = opts.References[0].ReferencedContext.(MockSpanContext).Baggage - } - spanContext := MockSpanContext{traceID, nextMockID(), sampled, baggage} - startTime := opts.StartTime - if startTime.IsZero() { - startTime = time.Now() - } - return &MockSpan{ - ParentID: parentID, - OperationName: name, - StartTime: startTime, - tags: tags, - logs: []MockLogRecord{}, - SpanContext: spanContext, - - tracer: t, - } -} - -// Tags returns a copy of tags accumulated by the span so far -func (s *MockSpan) Tags() map[string]interface{} { - s.RLock() - defer s.RUnlock() - tags := make(map[string]interface{}) - for k, v := range s.tags { - tags[k] = v - } - return tags -} - -// Tag returns a single tag -func (s *MockSpan) Tag(k string) interface{} { - s.RLock() - defer s.RUnlock() - return s.tags[k] -} - -// Logs returns a copy of logs accumulated in the span so far -func (s *MockSpan) Logs() []MockLogRecord { - s.RLock() - defer s.RUnlock() - logs := make([]MockLogRecord, len(s.logs)) - copy(logs, s.logs) - return logs -} - -// Context belongs to the Span interface -func (s *MockSpan) Context() opentracing.SpanContext { - return s.SpanContext -} - -// SetTag belongs to the Span interface -func (s *MockSpan) SetTag(key string, value interface{}) opentracing.Span { - s.Lock() - defer s.Unlock() - if key == string(ext.SamplingPriority) { - if v, ok := value.(uint16); ok { - s.SpanContext.Sampled = v > 0 - return s - } - if v, ok := value.(int); ok { - s.SpanContext.Sampled = v > 0 - return s - } - } - s.tags[key] = value - return s -} - -// SetBaggageItem belongs to the Span interface -func (s *MockSpan) SetBaggageItem(key, val string) opentracing.Span { - s.Lock() - defer s.Unlock() - s.SpanContext = s.SpanContext.WithBaggageItem(key, val) - return s -} - -// BaggageItem belongs to the Span interface -func (s *MockSpan) BaggageItem(key string) string { - s.RLock() - defer s.RUnlock() - return s.SpanContext.Baggage[key] -} - -// Finish belongs to the Span interface -func (s *MockSpan) Finish() { - s.Lock() - s.FinishTime = time.Now() - s.Unlock() - s.tracer.recordSpan(s) -} - -// FinishWithOptions belongs to the Span interface -func (s *MockSpan) FinishWithOptions(opts opentracing.FinishOptions) { - s.Lock() - s.FinishTime = opts.FinishTime - s.Unlock() - - // Handle any late-bound LogRecords. - for _, lr := range opts.LogRecords { - s.logFieldsWithTimestamp(lr.Timestamp, lr.Fields...) - } - // Handle (deprecated) BulkLogData. - for _, ld := range opts.BulkLogData { - if ld.Payload != nil { - s.logFieldsWithTimestamp( - ld.Timestamp, - log.String("event", ld.Event), - log.Object("payload", ld.Payload)) - } else { - s.logFieldsWithTimestamp( - ld.Timestamp, - log.String("event", ld.Event)) - } - } - - s.tracer.recordSpan(s) -} - -// String allows printing span for debugging -func (s *MockSpan) String() string { - return fmt.Sprintf( - "traceId=%d, spanId=%d, parentId=%d, sampled=%t, name=%s", - s.SpanContext.TraceID, s.SpanContext.SpanID, s.ParentID, - s.SpanContext.Sampled, s.OperationName) -} - -// LogFields belongs to the Span interface -func (s *MockSpan) LogFields(fields ...log.Field) { - s.logFieldsWithTimestamp(time.Now(), fields...) -} - -// The caller MUST NOT hold s.Lock -func (s *MockSpan) logFieldsWithTimestamp(ts time.Time, fields ...log.Field) { - lr := MockLogRecord{ - Timestamp: ts, - Fields: make([]MockKeyValue, len(fields)), - } - for i, f := range fields { - outField := &(lr.Fields[i]) - f.Marshal(outField) - } - - s.Lock() - defer s.Unlock() - s.logs = append(s.logs, lr) -} - -// LogKV belongs to the Span interface. -// -// This implementations coerces all "values" to strings, though that is not -// something all implementations need to do. Indeed, a motivated person can and -// probably should have this do a typed switch on the values. -func (s *MockSpan) LogKV(keyValues ...interface{}) { - if len(keyValues)%2 != 0 { - s.LogFields(log.Error(fmt.Errorf("Non-even keyValues len: %v", len(keyValues)))) - return - } - fields, err := log.InterleavedKVToFields(keyValues...) - if err != nil { - s.LogFields(log.Error(err), log.String("function", "LogKV")) - return - } - s.LogFields(fields...) -} - -// LogEvent belongs to the Span interface -func (s *MockSpan) LogEvent(event string) { - s.LogFields(log.String("event", event)) -} - -// LogEventWithPayload belongs to the Span interface -func (s *MockSpan) LogEventWithPayload(event string, payload interface{}) { - s.LogFields(log.String("event", event), log.Object("payload", payload)) -} - -// Log belongs to the Span interface -func (s *MockSpan) Log(data opentracing.LogData) { - panic("MockSpan.Log() no longer supported") -} - -// SetOperationName belongs to the Span interface -func (s *MockSpan) SetOperationName(operationName string) opentracing.Span { - s.Lock() - defer s.Unlock() - s.OperationName = operationName - return s -} - -// Tracer belongs to the Span interface -func (s *MockSpan) Tracer() opentracing.Tracer { - return s.tracer -} diff --git a/vendor/github.com/opentracing/opentracing-go/mocktracer/mocktracer.go b/vendor/github.com/opentracing/opentracing-go/mocktracer/mocktracer.go deleted file mode 100644 index a74c1458..00000000 --- a/vendor/github.com/opentracing/opentracing-go/mocktracer/mocktracer.go +++ /dev/null @@ -1,105 +0,0 @@ -package mocktracer - -import ( - "sync" - - "github.com/opentracing/opentracing-go" -) - -// New returns a MockTracer opentracing.Tracer implementation that's intended -// to facilitate tests of OpenTracing instrumentation. -func New() *MockTracer { - t := &MockTracer{ - finishedSpans: []*MockSpan{}, - injectors: make(map[interface{}]Injector), - extractors: make(map[interface{}]Extractor), - } - - // register default injectors/extractors - textPropagator := new(TextMapPropagator) - t.RegisterInjector(opentracing.TextMap, textPropagator) - t.RegisterExtractor(opentracing.TextMap, textPropagator) - - httpPropagator := &TextMapPropagator{HTTPHeaders: true} - t.RegisterInjector(opentracing.HTTPHeaders, httpPropagator) - t.RegisterExtractor(opentracing.HTTPHeaders, httpPropagator) - - return t -} - -// MockTracer is only intended for testing OpenTracing instrumentation. -// -// It is entirely unsuitable for production use, but appropriate for tests -// that want to verify tracing behavior in other frameworks/applications. -type MockTracer struct { - sync.RWMutex - finishedSpans []*MockSpan - injectors map[interface{}]Injector - extractors map[interface{}]Extractor -} - -// FinishedSpans returns all spans that have been Finish()'ed since the -// MockTracer was constructed or since the last call to its Reset() method. -func (t *MockTracer) FinishedSpans() []*MockSpan { - t.RLock() - defer t.RUnlock() - spans := make([]*MockSpan, len(t.finishedSpans)) - copy(spans, t.finishedSpans) - return spans -} - -// Reset clears the internally accumulated finished spans. Note that any -// extant MockSpans will still append to finishedSpans when they Finish(), -// even after a call to Reset(). -func (t *MockTracer) Reset() { - t.Lock() - defer t.Unlock() - t.finishedSpans = []*MockSpan{} -} - -// StartSpan belongs to the Tracer interface. -func (t *MockTracer) StartSpan(operationName string, opts ...opentracing.StartSpanOption) opentracing.Span { - sso := opentracing.StartSpanOptions{} - for _, o := range opts { - o.Apply(&sso) - } - return newMockSpan(t, operationName, sso) -} - -// RegisterInjector registers injector for given format -func (t *MockTracer) RegisterInjector(format interface{}, injector Injector) { - t.injectors[format] = injector -} - -// RegisterExtractor registers extractor for given format -func (t *MockTracer) RegisterExtractor(format interface{}, extractor Extractor) { - t.extractors[format] = extractor -} - -// Inject belongs to the Tracer interface. -func (t *MockTracer) Inject(sm opentracing.SpanContext, format interface{}, carrier interface{}) error { - spanContext, ok := sm.(MockSpanContext) - if !ok { - return opentracing.ErrInvalidCarrier - } - injector, ok := t.injectors[format] - if !ok { - return opentracing.ErrUnsupportedFormat - } - return injector.Inject(spanContext, carrier) -} - -// Extract belongs to the Tracer interface. -func (t *MockTracer) Extract(format interface{}, carrier interface{}) (opentracing.SpanContext, error) { - extractor, ok := t.extractors[format] - if !ok { - return nil, opentracing.ErrUnsupportedFormat - } - return extractor.Extract(carrier) -} - -func (t *MockTracer) recordSpan(span *MockSpan) { - t.Lock() - defer t.Unlock() - t.finishedSpans = append(t.finishedSpans, span) -} diff --git a/vendor/github.com/opentracing/opentracing-go/mocktracer/mocktracer_test.go b/vendor/github.com/opentracing/opentracing-go/mocktracer/mocktracer_test.go deleted file mode 100644 index 63d01134..00000000 --- a/vendor/github.com/opentracing/opentracing-go/mocktracer/mocktracer_test.go +++ /dev/null @@ -1,268 +0,0 @@ -package mocktracer - -import ( - "net/http" - "reflect" - "testing" - "time" - - "github.com/stretchr/testify/assert" - "github.com/stretchr/testify/require" - - "github.com/opentracing/opentracing-go" - "github.com/opentracing/opentracing-go/ext" - "github.com/opentracing/opentracing-go/log" -) - -func TestMockTracer_StartSpan(t *testing.T) { - tracer := New() - span1 := tracer.StartSpan( - "a", - opentracing.Tags(map[string]interface{}{"x": "y"})) - - span2 := span1.Tracer().StartSpan( - "", opentracing.ChildOf(span1.Context())) - span2.Finish() - span1.Finish() - spans := tracer.FinishedSpans() - assert.Equal(t, 2, len(spans)) - - parent := spans[1] - child := spans[0] - assert.Equal(t, map[string]interface{}{"x": "y"}, parent.Tags()) - assert.Equal(t, child.ParentID, parent.Context().(MockSpanContext).SpanID) -} - -func TestMockSpan_SetOperationName(t *testing.T) { - tracer := New() - span := tracer.StartSpan("") - span.SetOperationName("x") - assert.Equal(t, "x", span.(*MockSpan).OperationName) -} - -func TestMockSpanContext_Baggage(t *testing.T) { - tracer := New() - span := tracer.StartSpan("x") - span.SetBaggageItem("x", "y") - assert.Equal(t, "y", span.BaggageItem("x")) - assert.Equal(t, map[string]string{"x": "y"}, span.Context().(MockSpanContext).Baggage) - - baggage := make(map[string]string) - span.Context().ForeachBaggageItem(func(k, v string) bool { - baggage[k] = v - return true - }) - assert.Equal(t, map[string]string{"x": "y"}, baggage) - - span.SetBaggageItem("a", "b") - baggage = make(map[string]string) - span.Context().ForeachBaggageItem(func(k, v string) bool { - baggage[k] = v - return false // exit early - }) - assert.Equal(t, 2, len(span.Context().(MockSpanContext).Baggage)) - assert.Equal(t, 1, len(baggage)) -} - -func TestMockSpan_Tag(t *testing.T) { - tracer := New() - span := tracer.StartSpan("x") - span.SetTag("x", "y") - assert.Equal(t, "y", span.(*MockSpan).Tag("x")) -} - -func TestMockSpan_Tags(t *testing.T) { - tracer := New() - span := tracer.StartSpan("x") - span.SetTag("x", "y") - assert.Equal(t, map[string]interface{}{"x": "y"}, span.(*MockSpan).Tags()) -} - -func TestMockTracer_FinishedSpans_and_Reset(t *testing.T) { - tracer := New() - span := tracer.StartSpan("x") - span.SetTag("x", "y") - span.Finish() - spans := tracer.FinishedSpans() - assert.Equal(t, 1, len(spans)) - assert.Equal(t, map[string]interface{}{"x": "y"}, spans[0].Tags()) - - tracer.Reset() - spans = tracer.FinishedSpans() - assert.Equal(t, 0, len(spans)) -} - -func zeroOutTimestamps(recs []MockLogRecord) { - for i := range recs { - recs[i].Timestamp = time.Time{} - } -} - -func TestMockSpan_LogKV(t *testing.T) { - tracer := New() - span := tracer.StartSpan("s") - span.LogKV("key0", "string0") - span.LogKV("key1", "string1", "key2", uint32(42)) - span.Finish() - spans := tracer.FinishedSpans() - assert.Equal(t, 1, len(spans)) - actual := spans[0].Logs() - zeroOutTimestamps(actual) - assert.Equal(t, []MockLogRecord{ - MockLogRecord{ - Fields: []MockKeyValue{ - MockKeyValue{Key: "key0", ValueKind: reflect.String, ValueString: "string0"}, - }, - }, - MockLogRecord{ - Fields: []MockKeyValue{ - MockKeyValue{Key: "key1", ValueKind: reflect.String, ValueString: "string1"}, - MockKeyValue{Key: "key2", ValueKind: reflect.Uint32, ValueString: "42"}, - }, - }, - }, actual) -} - -func TestMockSpan_LogFields(t *testing.T) { - tracer := New() - span := tracer.StartSpan("s") - span.LogFields(log.String("key0", "string0")) - span.LogFields(log.String("key1", "string1"), log.Uint32("key2", uint32(42))) - span.LogFields(log.Lazy(func(fv log.Encoder) { - fv.EmitInt("key_lazy", 12) - })) - span.FinishWithOptions(opentracing.FinishOptions{ - LogRecords: []opentracing.LogRecord{ - {Timestamp: time.Now(), Fields: []log.Field{log.String("key9", "finish")}}, - }}) - spans := tracer.FinishedSpans() - assert.Equal(t, 1, len(spans)) - actual := spans[0].Logs() - zeroOutTimestamps(actual) - assert.Equal(t, []MockLogRecord{ - MockLogRecord{ - Fields: []MockKeyValue{ - MockKeyValue{Key: "key0", ValueKind: reflect.String, ValueString: "string0"}, - }, - }, - MockLogRecord{ - Fields: []MockKeyValue{ - MockKeyValue{Key: "key1", ValueKind: reflect.String, ValueString: "string1"}, - MockKeyValue{Key: "key2", ValueKind: reflect.Uint32, ValueString: "42"}, - }, - }, - MockLogRecord{ - Fields: []MockKeyValue{ - // Note that the LazyLogger gets to control the key as well as the value. - MockKeyValue{Key: "key_lazy", ValueKind: reflect.Int, ValueString: "12"}, - }, - }, - MockLogRecord{ - Fields: []MockKeyValue{ - MockKeyValue{Key: "key9", ValueKind: reflect.String, ValueString: "finish"}, - }, - }, - }, actual) -} - -func TestMockSpan_DeprecatedLogs(t *testing.T) { - tracer := New() - span := tracer.StartSpan("x") - span.LogEvent("x") - span.LogEventWithPayload("y", "z") - span.LogEvent("a") - span.FinishWithOptions(opentracing.FinishOptions{ - BulkLogData: []opentracing.LogData{{Event: "f"}}}) - spans := tracer.FinishedSpans() - assert.Equal(t, 1, len(spans)) - actual := spans[0].Logs() - zeroOutTimestamps(actual) - assert.Equal(t, []MockLogRecord{ - MockLogRecord{ - Fields: []MockKeyValue{ - MockKeyValue{Key: "event", ValueKind: reflect.String, ValueString: "x"}, - }, - }, - MockLogRecord{ - Fields: []MockKeyValue{ - MockKeyValue{Key: "event", ValueKind: reflect.String, ValueString: "y"}, - MockKeyValue{Key: "payload", ValueKind: reflect.String, ValueString: "z"}, - }, - }, - MockLogRecord{ - Fields: []MockKeyValue{ - MockKeyValue{Key: "event", ValueKind: reflect.String, ValueString: "a"}, - }, - }, - MockLogRecord{ - Fields: []MockKeyValue{ - MockKeyValue{Key: "event", ValueKind: reflect.String, ValueString: "f"}, - }, - }, - }, actual) -} - -func TestMockTracer_Propagation(t *testing.T) { - textCarrier := func() interface{} { - return opentracing.TextMapCarrier(make(map[string]string)) - } - textLen := func(c interface{}) int { - return len(c.(opentracing.TextMapCarrier)) - } - - httpCarrier := func() interface{} { - httpHeaders := http.Header(make(map[string][]string)) - return opentracing.HTTPHeadersCarrier(httpHeaders) - } - httpLen := func(c interface{}) int { - return len(c.(opentracing.HTTPHeadersCarrier)) - } - - tests := []struct { - sampled bool - format opentracing.BuiltinFormat - carrier func() interface{} - len func(interface{}) int - }{ - {sampled: true, format: opentracing.TextMap, carrier: textCarrier, len: textLen}, - {sampled: false, format: opentracing.TextMap, carrier: textCarrier, len: textLen}, - {sampled: true, format: opentracing.HTTPHeaders, carrier: httpCarrier, len: httpLen}, - {sampled: false, format: opentracing.HTTPHeaders, carrier: httpCarrier, len: httpLen}, - } - for _, test := range tests { - tracer := New() - span := tracer.StartSpan("x") - span.SetBaggageItem("x", "y:z") // colon should be URL encoded as %3A - if !test.sampled { - ext.SamplingPriority.Set(span, 0) - } - mSpan := span.(*MockSpan) - - assert.Equal(t, opentracing.ErrUnsupportedFormat, - tracer.Inject(span.Context(), opentracing.Binary, nil)) - assert.Equal(t, opentracing.ErrInvalidCarrier, - tracer.Inject(span.Context(), opentracing.TextMap, span)) - - carrier := test.carrier() - - err := tracer.Inject(span.Context(), test.format, carrier) - require.NoError(t, err) - assert.Equal(t, 4, test.len(carrier), "expect baggage + 2 ids + sampled") - if test.format == opentracing.HTTPHeaders { - c := carrier.(opentracing.HTTPHeadersCarrier) - assert.Equal(t, "y%3Az", c["Mockpfx-Baggage-X"][0]) - } - - _, err = tracer.Extract(opentracing.Binary, nil) - assert.Equal(t, opentracing.ErrUnsupportedFormat, err) - _, err = tracer.Extract(opentracing.TextMap, tracer) - assert.Equal(t, opentracing.ErrInvalidCarrier, err) - - extractedContext, err := tracer.Extract(test.format, carrier) - require.NoError(t, err) - assert.Equal(t, mSpan.SpanContext.TraceID, extractedContext.(MockSpanContext).TraceID) - assert.Equal(t, mSpan.SpanContext.SpanID, extractedContext.(MockSpanContext).SpanID) - assert.Equal(t, test.sampled, extractedContext.(MockSpanContext).Sampled) - assert.Equal(t, "y:z", extractedContext.(MockSpanContext).Baggage["x"]) - } -} diff --git a/vendor/github.com/opentracing/opentracing-go/mocktracer/propagation.go b/vendor/github.com/opentracing/opentracing-go/mocktracer/propagation.go deleted file mode 100644 index 8364f1d1..00000000 --- a/vendor/github.com/opentracing/opentracing-go/mocktracer/propagation.go +++ /dev/null @@ -1,120 +0,0 @@ -package mocktracer - -import ( - "fmt" - "net/url" - "strconv" - "strings" - - "github.com/opentracing/opentracing-go" -) - -const mockTextMapIdsPrefix = "mockpfx-ids-" -const mockTextMapBaggagePrefix = "mockpfx-baggage-" - -var emptyContext = MockSpanContext{} - -// Injector is responsible for injecting SpanContext instances in a manner suitable -// for propagation via a format-specific "carrier" object. Typically the -// injection will take place across an RPC boundary, but message queues and -// other IPC mechanisms are also reasonable places to use an Injector. -type Injector interface { - // Inject takes `SpanContext` and injects it into `carrier`. The actual type - // of `carrier` depends on the `format` passed to `Tracer.Inject()`. - // - // Implementations may return opentracing.ErrInvalidCarrier or any other - // implementation-specific error if injection fails. - Inject(ctx MockSpanContext, carrier interface{}) error -} - -// Extractor is responsible for extracting SpanContext instances from a -// format-specific "carrier" object. Typically the extraction will take place -// on the server side of an RPC boundary, but message queues and other IPC -// mechanisms are also reasonable places to use an Extractor. -type Extractor interface { - // Extract decodes a SpanContext instance from the given `carrier`, - // or (nil, opentracing.ErrSpanContextNotFound) if no context could - // be found in the `carrier`. - Extract(carrier interface{}) (MockSpanContext, error) -} - -// TextMapPropagator implements Injector/Extractor for TextMap and HTTPHeaders formats. -type TextMapPropagator struct { - HTTPHeaders bool -} - -// Inject implements the Injector interface -func (t *TextMapPropagator) Inject(spanContext MockSpanContext, carrier interface{}) error { - writer, ok := carrier.(opentracing.TextMapWriter) - if !ok { - return opentracing.ErrInvalidCarrier - } - // Ids: - writer.Set(mockTextMapIdsPrefix+"traceid", strconv.Itoa(spanContext.TraceID)) - writer.Set(mockTextMapIdsPrefix+"spanid", strconv.Itoa(spanContext.SpanID)) - writer.Set(mockTextMapIdsPrefix+"sampled", fmt.Sprint(spanContext.Sampled)) - // Baggage: - for baggageKey, baggageVal := range spanContext.Baggage { - safeVal := baggageVal - if t.HTTPHeaders { - safeVal = url.QueryEscape(baggageVal) - } - writer.Set(mockTextMapBaggagePrefix+baggageKey, safeVal) - } - return nil -} - -// Extract implements the Extractor interface -func (t *TextMapPropagator) Extract(carrier interface{}) (MockSpanContext, error) { - reader, ok := carrier.(opentracing.TextMapReader) - if !ok { - return emptyContext, opentracing.ErrInvalidCarrier - } - rval := MockSpanContext{0, 0, true, nil} - err := reader.ForeachKey(func(key, val string) error { - lowerKey := strings.ToLower(key) - switch { - case lowerKey == mockTextMapIdsPrefix+"traceid": - // Ids: - i, err := strconv.Atoi(val) - if err != nil { - return err - } - rval.TraceID = i - case lowerKey == mockTextMapIdsPrefix+"spanid": - // Ids: - i, err := strconv.Atoi(val) - if err != nil { - return err - } - rval.SpanID = i - case lowerKey == mockTextMapIdsPrefix+"sampled": - b, err := strconv.ParseBool(val) - if err != nil { - return err - } - rval.Sampled = b - case strings.HasPrefix(lowerKey, mockTextMapBaggagePrefix): - // Baggage: - if rval.Baggage == nil { - rval.Baggage = make(map[string]string) - } - safeVal := val - if t.HTTPHeaders { - // unescape errors are ignored, nothing can be done - if rawVal, err := url.QueryUnescape(val); err == nil { - safeVal = rawVal - } - } - rval.Baggage[lowerKey[len(mockTextMapBaggagePrefix):]] = safeVal - } - return nil - }) - if rval.TraceID == 0 || rval.SpanID == 0 { - return emptyContext, opentracing.ErrSpanContextNotFound - } - if err != nil { - return emptyContext, err - } - return rval, nil -} diff --git a/vendor/github.com/opentracing/opentracing-go/noop.go b/vendor/github.com/opentracing/opentracing-go/noop.go deleted file mode 100644 index 0d32f692..00000000 --- a/vendor/github.com/opentracing/opentracing-go/noop.go +++ /dev/null @@ -1,64 +0,0 @@ -package opentracing - -import "github.com/opentracing/opentracing-go/log" - -// A NoopTracer is a trivial, minimum overhead implementation of Tracer -// for which all operations are no-ops. -// -// The primary use of this implementation is in libraries, such as RPC -// frameworks, that make tracing an optional feature controlled by the -// end user. A no-op implementation allows said libraries to use it -// as the default Tracer and to write instrumentation that does -// not need to keep checking if the tracer instance is nil. -// -// For the same reason, the NoopTracer is the default "global" tracer -// (see GlobalTracer and SetGlobalTracer functions). -// -// WARNING: NoopTracer does not support baggage propagation. -type NoopTracer struct{} - -type noopSpan struct{} -type noopSpanContext struct{} - -var ( - defaultNoopSpanContext = noopSpanContext{} - defaultNoopSpan = noopSpan{} - defaultNoopTracer = NoopTracer{} -) - -const ( - emptyString = "" -) - -// noopSpanContext: -func (n noopSpanContext) ForeachBaggageItem(handler func(k, v string) bool) {} - -// noopSpan: -func (n noopSpan) Context() SpanContext { return defaultNoopSpanContext } -func (n noopSpan) SetBaggageItem(key, val string) Span { return defaultNoopSpan } -func (n noopSpan) BaggageItem(key string) string { return emptyString } -func (n noopSpan) SetTag(key string, value interface{}) Span { return n } -func (n noopSpan) LogFields(fields ...log.Field) {} -func (n noopSpan) LogKV(keyVals ...interface{}) {} -func (n noopSpan) Finish() {} -func (n noopSpan) FinishWithOptions(opts FinishOptions) {} -func (n noopSpan) SetOperationName(operationName string) Span { return n } -func (n noopSpan) Tracer() Tracer { return defaultNoopTracer } -func (n noopSpan) LogEvent(event string) {} -func (n noopSpan) LogEventWithPayload(event string, payload interface{}) {} -func (n noopSpan) Log(data LogData) {} - -// StartSpan belongs to the Tracer interface. -func (n NoopTracer) StartSpan(operationName string, opts ...StartSpanOption) Span { - return defaultNoopSpan -} - -// Inject belongs to the Tracer interface. -func (n NoopTracer) Inject(sp SpanContext, format interface{}, carrier interface{}) error { - return nil -} - -// Extract belongs to the Tracer interface. -func (n NoopTracer) Extract(format interface{}, carrier interface{}) (SpanContext, error) { - return nil, ErrSpanContextNotFound -} diff --git a/vendor/github.com/opentracing/opentracing-go/options_test.go b/vendor/github.com/opentracing/opentracing-go/options_test.go deleted file mode 100644 index 56a543bf..00000000 --- a/vendor/github.com/opentracing/opentracing-go/options_test.go +++ /dev/null @@ -1,31 +0,0 @@ -package opentracing - -import ( - "testing" - - "github.com/stretchr/testify/require" -) - -func TestChildOfAndFollowsFrom(t *testing.T) { - tests := []struct { - newOpt func(SpanContext) SpanReference - refType SpanReferenceType - name string - }{ - {ChildOf, ChildOfRef, "ChildOf"}, - {FollowsFrom, FollowsFromRef, "FollowsFrom"}, - } - - for _, test := range tests { - opts := new(StartSpanOptions) - - test.newOpt(nil).Apply(opts) - require.Nil(t, opts.References, "%s(nil) must not append a reference", test.name) - - ctx := new(noopSpanContext) - test.newOpt(ctx).Apply(opts) - require.Equal(t, []SpanReference{ - SpanReference{ReferencedContext: ctx, Type: test.refType}, - }, opts.References, "%s(ctx) must append a reference", test.name) - } -} diff --git a/vendor/github.com/opentracing/opentracing-go/propagation.go b/vendor/github.com/opentracing/opentracing-go/propagation.go deleted file mode 100644 index 9583fc53..00000000 --- a/vendor/github.com/opentracing/opentracing-go/propagation.go +++ /dev/null @@ -1,176 +0,0 @@ -package opentracing - -import ( - "errors" - "net/http" -) - -/////////////////////////////////////////////////////////////////////////////// -// CORE PROPAGATION INTERFACES: -/////////////////////////////////////////////////////////////////////////////// - -var ( - // ErrUnsupportedFormat occurs when the `format` passed to Tracer.Inject() or - // Tracer.Extract() is not recognized by the Tracer implementation. - ErrUnsupportedFormat = errors.New("opentracing: Unknown or unsupported Inject/Extract format") - - // ErrSpanContextNotFound occurs when the `carrier` passed to - // Tracer.Extract() is valid and uncorrupted but has insufficient - // information to extract a SpanContext. - ErrSpanContextNotFound = errors.New("opentracing: SpanContext not found in Extract carrier") - - // ErrInvalidSpanContext errors occur when Tracer.Inject() is asked to - // operate on a SpanContext which it is not prepared to handle (for - // example, since it was created by a different tracer implementation). - ErrInvalidSpanContext = errors.New("opentracing: SpanContext type incompatible with tracer") - - // ErrInvalidCarrier errors occur when Tracer.Inject() or Tracer.Extract() - // implementations expect a different type of `carrier` than they are - // given. - ErrInvalidCarrier = errors.New("opentracing: Invalid Inject/Extract carrier") - - // ErrSpanContextCorrupted occurs when the `carrier` passed to - // Tracer.Extract() is of the expected type but is corrupted. - ErrSpanContextCorrupted = errors.New("opentracing: SpanContext data corrupted in Extract carrier") -) - -/////////////////////////////////////////////////////////////////////////////// -// BUILTIN PROPAGATION FORMATS: -/////////////////////////////////////////////////////////////////////////////// - -// BuiltinFormat is used to demarcate the values within package `opentracing` -// that are intended for use with the Tracer.Inject() and Tracer.Extract() -// methods. -type BuiltinFormat byte - -const ( - // Binary represents SpanContexts as opaque binary data. - // - // For Tracer.Inject(): the carrier must be an `io.Writer`. - // - // For Tracer.Extract(): the carrier must be an `io.Reader`. - Binary BuiltinFormat = iota - - // TextMap represents SpanContexts as key:value string pairs. - // - // Unlike HTTPHeaders, the TextMap format does not restrict the key or - // value character sets in any way. - // - // For Tracer.Inject(): the carrier must be a `TextMapWriter`. - // - // For Tracer.Extract(): the carrier must be a `TextMapReader`. - TextMap - - // HTTPHeaders represents SpanContexts as HTTP header string pairs. - // - // Unlike TextMap, the HTTPHeaders format requires that the keys and values - // be valid as HTTP headers as-is (i.e., character casing may be unstable - // and special characters are disallowed in keys, values should be - // URL-escaped, etc). - // - // For Tracer.Inject(): the carrier must be a `TextMapWriter`. - // - // For Tracer.Extract(): the carrier must be a `TextMapReader`. - // - // See HTTPHeaderCarrier for an implementation of both TextMapWriter - // and TextMapReader that defers to an http.Header instance for storage. - // For example, Inject(): - // - // carrier := opentracing.HTTPHeadersCarrier(httpReq.Header) - // err := span.Tracer().Inject( - // span, opentracing.HTTPHeaders, carrier) - // - // Or Extract(): - // - // carrier := opentracing.HTTPHeadersCarrier(httpReq.Header) - // span, err := tracer.Extract( - // opentracing.HTTPHeaders, carrier) - // - HTTPHeaders -) - -// TextMapWriter is the Inject() carrier for the TextMap builtin format. With -// it, the caller can encode a SpanContext for propagation as entries in a map -// of unicode strings. -type TextMapWriter interface { - // Set a key:value pair to the carrier. Multiple calls to Set() for the - // same key leads to undefined behavior. - // - // NOTE: The backing store for the TextMapWriter may contain data unrelated - // to SpanContext. As such, Inject() and Extract() implementations that - // call the TextMapWriter and TextMapReader interfaces must agree on a - // prefix or other convention to distinguish their own key:value pairs. - Set(key, val string) -} - -// TextMapReader is the Extract() carrier for the TextMap builtin format. With it, -// the caller can decode a propagated SpanContext as entries in a map of -// unicode strings. -type TextMapReader interface { - // ForeachKey returns TextMap contents via repeated calls to the `handler` - // function. If any call to `handler` returns a non-nil error, ForeachKey - // terminates and returns that error. - // - // NOTE: The backing store for the TextMapReader may contain data unrelated - // to SpanContext. As such, Inject() and Extract() implementations that - // call the TextMapWriter and TextMapReader interfaces must agree on a - // prefix or other convention to distinguish their own key:value pairs. - // - // The "foreach" callback pattern reduces unnecessary copying in some cases - // and also allows implementations to hold locks while the map is read. - ForeachKey(handler func(key, val string) error) error -} - -// TextMapCarrier allows the use of regular map[string]string -// as both TextMapWriter and TextMapReader. -type TextMapCarrier map[string]string - -// ForeachKey conforms to the TextMapReader interface. -func (c TextMapCarrier) ForeachKey(handler func(key, val string) error) error { - for k, v := range c { - if err := handler(k, v); err != nil { - return err - } - } - return nil -} - -// Set implements Set() of opentracing.TextMapWriter -func (c TextMapCarrier) Set(key, val string) { - c[key] = val -} - -// HTTPHeadersCarrier satisfies both TextMapWriter and TextMapReader. -// -// Example usage for server side: -// -// carrier := opentracing.HttpHeadersCarrier(httpReq.Header) -// spanContext, err := tracer.Extract(opentracing.HttpHeaders, carrier) -// -// Example usage for client side: -// -// carrier := opentracing.HTTPHeadersCarrier(httpReq.Header) -// err := tracer.Inject( -// span.Context(), -// opentracing.HttpHeaders, -// carrier) -// -type HTTPHeadersCarrier http.Header - -// Set conforms to the TextMapWriter interface. -func (c HTTPHeadersCarrier) Set(key, val string) { - h := http.Header(c) - h.Add(key, val) -} - -// ForeachKey conforms to the TextMapReader interface. -func (c HTTPHeadersCarrier) ForeachKey(handler func(key, val string) error) error { - for k, vals := range c { - for _, v := range vals { - if err := handler(k, v); err != nil { - return err - } - } - } - return nil -} diff --git a/vendor/github.com/opentracing/opentracing-go/propagation_test.go b/vendor/github.com/opentracing/opentracing-go/propagation_test.go deleted file mode 100644 index e3dad559..00000000 --- a/vendor/github.com/opentracing/opentracing-go/propagation_test.go +++ /dev/null @@ -1,93 +0,0 @@ -package opentracing - -import ( - "net/http" - "strconv" - "testing" -) - -const testHeaderPrefix = "testprefix-" - -func TestTextMapCarrierInject(t *testing.T) { - m := make(map[string]string) - m["NotOT"] = "blah" - m["opname"] = "AlsoNotOT" - tracer := testTracer{} - span := tracer.StartSpan("someSpan") - fakeID := span.Context().(testSpanContext).FakeID - - carrier := TextMapCarrier(m) - if err := span.Tracer().Inject(span.Context(), TextMap, carrier); err != nil { - t.Fatal(err) - } - - if len(m) != 3 { - t.Errorf("Unexpected header length: %v", len(m)) - } - // The prefix comes from just above; the suffix comes from - // testTracer.Inject(). - if m["testprefix-fakeid"] != strconv.Itoa(fakeID) { - t.Errorf("Could not find fakeid at expected key") - } -} - -func TestTextMapCarrierExtract(t *testing.T) { - m := make(map[string]string) - m["NotOT"] = "blah" - m["opname"] = "AlsoNotOT" - m["testprefix-fakeid"] = "42" - tracer := testTracer{} - - carrier := TextMapCarrier(m) - extractedContext, err := tracer.Extract(TextMap, carrier) - if err != nil { - t.Fatal(err) - } - - if extractedContext.(testSpanContext).FakeID != 42 { - t.Errorf("Failed to read testprefix-fakeid correctly") - } -} - -func TestHTTPHeaderInject(t *testing.T) { - h := http.Header{} - h.Add("NotOT", "blah") - h.Add("opname", "AlsoNotOT") - tracer := testTracer{} - span := tracer.StartSpan("someSpan") - fakeID := span.Context().(testSpanContext).FakeID - - // Use HTTPHeadersCarrier to wrap around `h`. - carrier := HTTPHeadersCarrier(h) - if err := span.Tracer().Inject(span.Context(), HTTPHeaders, carrier); err != nil { - t.Fatal(err) - } - - if len(h) != 3 { - t.Errorf("Unexpected header length: %v", len(h)) - } - // The prefix comes from just above; the suffix comes from - // testTracer.Inject(). - if h.Get("testprefix-fakeid") != strconv.Itoa(fakeID) { - t.Errorf("Could not find fakeid at expected key") - } -} - -func TestHTTPHeaderExtract(t *testing.T) { - h := http.Header{} - h.Add("NotOT", "blah") - h.Add("opname", "AlsoNotOT") - h.Add("testprefix-fakeid", "42") - tracer := testTracer{} - - // Use HTTPHeadersCarrier to wrap around `h`. - carrier := HTTPHeadersCarrier(h) - spanContext, err := tracer.Extract(HTTPHeaders, carrier) - if err != nil { - t.Fatal(err) - } - - if spanContext.(testSpanContext).FakeID != 42 { - t.Errorf("Failed to read testprefix-fakeid correctly") - } -} diff --git a/vendor/github.com/opentracing/opentracing-go/span.go b/vendor/github.com/opentracing/opentracing-go/span.go deleted file mode 100644 index f6c3234a..00000000 --- a/vendor/github.com/opentracing/opentracing-go/span.go +++ /dev/null @@ -1,185 +0,0 @@ -package opentracing - -import ( - "time" - - "github.com/opentracing/opentracing-go/log" -) - -// SpanContext represents Span state that must propagate to descendant Spans and across process -// boundaries (e.g., a tuple). -type SpanContext interface { - // ForeachBaggageItem grants access to all baggage items stored in the - // SpanContext. - // The handler function will be called for each baggage key/value pair. - // The ordering of items is not guaranteed. - // - // The bool return value indicates if the handler wants to continue iterating - // through the rest of the baggage items; for example if the handler is trying to - // find some baggage item by pattern matching the name, it can return false - // as soon as the item is found to stop further iterations. - ForeachBaggageItem(handler func(k, v string) bool) -} - -// Span represents an active, un-finished span in the OpenTracing system. -// -// Spans are created by the Tracer interface. -type Span interface { - // Sets the end timestamp and finalizes Span state. - // - // With the exception of calls to Context() (which are always allowed), - // Finish() must be the last call made to any span instance, and to do - // otherwise leads to undefined behavior. - Finish() - // FinishWithOptions is like Finish() but with explicit control over - // timestamps and log data. - FinishWithOptions(opts FinishOptions) - - // Context() yields the SpanContext for this Span. Note that the return - // value of Context() is still valid after a call to Span.Finish(), as is - // a call to Span.Context() after a call to Span.Finish(). - Context() SpanContext - - // Sets or changes the operation name. - SetOperationName(operationName string) Span - - // Adds a tag to the span. - // - // If there is a pre-existing tag set for `key`, it is overwritten. - // - // Tag values can be numeric types, strings, or bools. The behavior of - // other tag value types is undefined at the OpenTracing level. If a - // tracing system does not know how to handle a particular value type, it - // may ignore the tag, but shall not panic. - SetTag(key string, value interface{}) Span - - // LogFields is an efficient and type-checked way to record key:value - // logging data about a Span, though the programming interface is a little - // more verbose than LogKV(). Here's an example: - // - // span.LogFields( - // log.String("event", "soft error"), - // log.String("type", "cache timeout"), - // log.Int("waited.millis", 1500)) - // - // Also see Span.FinishWithOptions() and FinishOptions.BulkLogData. - LogFields(fields ...log.Field) - - // LogKV is a concise, readable way to record key:value logging data about - // a Span, though unfortunately this also makes it less efficient and less - // type-safe than LogFields(). Here's an example: - // - // span.LogKV( - // "event", "soft error", - // "type", "cache timeout", - // "waited.millis", 1500) - // - // For LogKV (as opposed to LogFields()), the parameters must appear as - // key-value pairs, like - // - // span.LogKV(key1, val1, key2, val2, key3, val3, ...) - // - // The keys must all be strings. The values may be strings, numeric types, - // bools, Go error instances, or arbitrary structs. - // - // (Note to implementors: consider the log.InterleavedKVToFields() helper) - LogKV(alternatingKeyValues ...interface{}) - - // SetBaggageItem sets a key:value pair on this Span and its SpanContext - // that also propagates to descendants of this Span. - // - // SetBaggageItem() enables powerful functionality given a full-stack - // opentracing integration (e.g., arbitrary application data from a mobile - // app can make it, transparently, all the way into the depths of a storage - // system), and with it some powerful costs: use this feature with care. - // - // IMPORTANT NOTE #1: SetBaggageItem() will only propagate baggage items to - // *future* causal descendants of the associated Span. - // - // IMPORTANT NOTE #2: Use this thoughtfully and with care. Every key and - // value is copied into every local *and remote* child of the associated - // Span, and that can add up to a lot of network and cpu overhead. - // - // Returns a reference to this Span for chaining. - SetBaggageItem(restrictedKey, value string) Span - - // Gets the value for a baggage item given its key. Returns the empty string - // if the value isn't found in this Span. - BaggageItem(restrictedKey string) string - - // Provides access to the Tracer that created this Span. - Tracer() Tracer - - // Deprecated: use LogFields or LogKV - LogEvent(event string) - // Deprecated: use LogFields or LogKV - LogEventWithPayload(event string, payload interface{}) - // Deprecated: use LogFields or LogKV - Log(data LogData) -} - -// LogRecord is data associated with a single Span log. Every LogRecord -// instance must specify at least one Field. -type LogRecord struct { - Timestamp time.Time - Fields []log.Field -} - -// FinishOptions allows Span.FinishWithOptions callers to override the finish -// timestamp and provide log data via a bulk interface. -type FinishOptions struct { - // FinishTime overrides the Span's finish time, or implicitly becomes - // time.Now() if FinishTime.IsZero(). - // - // FinishTime must resolve to a timestamp that's >= the Span's StartTime - // (per StartSpanOptions). - FinishTime time.Time - - // LogRecords allows the caller to specify the contents of many LogFields() - // calls with a single slice. May be nil. - // - // None of the LogRecord.Timestamp values may be .IsZero() (i.e., they must - // be set explicitly). Also, they must be >= the Span's start timestamp and - // <= the FinishTime (or time.Now() if FinishTime.IsZero()). Otherwise the - // behavior of FinishWithOptions() is undefined. - // - // If specified, the caller hands off ownership of LogRecords at - // FinishWithOptions() invocation time. - // - // If specified, the (deprecated) BulkLogData must be nil or empty. - LogRecords []LogRecord - - // BulkLogData is DEPRECATED. - BulkLogData []LogData -} - -// LogData is DEPRECATED -type LogData struct { - Timestamp time.Time - Event string - Payload interface{} -} - -// ToLogRecord converts a deprecated LogData to a non-deprecated LogRecord -func (ld *LogData) ToLogRecord() LogRecord { - var literalTimestamp time.Time - if ld.Timestamp.IsZero() { - literalTimestamp = time.Now() - } else { - literalTimestamp = ld.Timestamp - } - rval := LogRecord{ - Timestamp: literalTimestamp, - } - if ld.Payload == nil { - rval.Fields = []log.Field{ - log.String("event", ld.Event), - } - } else { - rval.Fields = []log.Field{ - log.String("event", ld.Event), - log.Object("payload", ld.Payload), - } - } - return rval -} diff --git a/vendor/github.com/opentracing/opentracing-go/testtracer_test.go b/vendor/github.com/opentracing/opentracing-go/testtracer_test.go deleted file mode 100644 index dd13788c..00000000 --- a/vendor/github.com/opentracing/opentracing-go/testtracer_test.go +++ /dev/null @@ -1,138 +0,0 @@ -package opentracing - -import ( - "strconv" - "strings" - "time" - - "github.com/opentracing/opentracing-go/log" -) - -const testHTTPHeaderPrefix = "testprefix-" - -// testTracer is a most-noop Tracer implementation that makes it possible for -// unittests to verify whether certain methods were / were not called. -type testTracer struct{} - -var fakeIDSource = 1 - -func nextFakeID() int { - fakeIDSource++ - return fakeIDSource -} - -type testSpanContext struct { - HasParent bool - FakeID int -} - -func (n testSpanContext) ForeachBaggageItem(handler func(k, v string) bool) {} - -type testSpan struct { - spanContext testSpanContext - OperationName string - StartTime time.Time - Tags map[string]interface{} -} - -func (n testSpan) Equal(os Span) bool { - other, ok := os.(testSpan) - if !ok { - return false - } - if n.spanContext != other.spanContext { - return false - } - if n.OperationName != other.OperationName { - return false - } - if !n.StartTime.Equal(other.StartTime) { - return false - } - if len(n.Tags) != len(other.Tags) { - return false - } - - for k, v := range n.Tags { - if ov, ok := other.Tags[k]; !ok || ov != v { - return false - } - } - - return true -} - -// testSpan: -func (n testSpan) Context() SpanContext { return n.spanContext } -func (n testSpan) SetTag(key string, value interface{}) Span { return n } -func (n testSpan) Finish() {} -func (n testSpan) FinishWithOptions(opts FinishOptions) {} -func (n testSpan) LogFields(fields ...log.Field) {} -func (n testSpan) LogKV(kvs ...interface{}) {} -func (n testSpan) SetOperationName(operationName string) Span { return n } -func (n testSpan) Tracer() Tracer { return testTracer{} } -func (n testSpan) SetBaggageItem(key, val string) Span { return n } -func (n testSpan) BaggageItem(key string) string { return "" } -func (n testSpan) LogEvent(event string) {} -func (n testSpan) LogEventWithPayload(event string, payload interface{}) {} -func (n testSpan) Log(data LogData) {} - -// StartSpan belongs to the Tracer interface. -func (n testTracer) StartSpan(operationName string, opts ...StartSpanOption) Span { - sso := StartSpanOptions{} - for _, o := range opts { - o.Apply(&sso) - } - return n.startSpanWithOptions(operationName, sso) -} - -func (n testTracer) startSpanWithOptions(name string, opts StartSpanOptions) Span { - fakeID := nextFakeID() - if len(opts.References) > 0 { - fakeID = opts.References[0].ReferencedContext.(testSpanContext).FakeID - } - - return testSpan{ - OperationName: name, - StartTime: opts.StartTime, - Tags: opts.Tags, - spanContext: testSpanContext{ - HasParent: len(opts.References) > 0, - FakeID: fakeID, - }, - } -} - -// Inject belongs to the Tracer interface. -func (n testTracer) Inject(sp SpanContext, format interface{}, carrier interface{}) error { - spanContext := sp.(testSpanContext) - switch format { - case HTTPHeaders, TextMap: - carrier.(TextMapWriter).Set(testHTTPHeaderPrefix+"fakeid", strconv.Itoa(spanContext.FakeID)) - return nil - } - return ErrUnsupportedFormat -} - -// Extract belongs to the Tracer interface. -func (n testTracer) Extract(format interface{}, carrier interface{}) (SpanContext, error) { - switch format { - case HTTPHeaders, TextMap: - // Just for testing purposes... generally not a worthwhile thing to - // propagate. - sm := testSpanContext{} - err := carrier.(TextMapReader).ForeachKey(func(key, val string) error { - switch strings.ToLower(key) { - case testHTTPHeaderPrefix + "fakeid": - i, err := strconv.Atoi(val) - if err != nil { - return err - } - sm.FakeID = i - } - return nil - }) - return sm, err - } - return nil, ErrSpanContextNotFound -} diff --git a/vendor/github.com/opentracing/opentracing-go/tracer.go b/vendor/github.com/opentracing/opentracing-go/tracer.go deleted file mode 100644 index fd77c1df..00000000 --- a/vendor/github.com/opentracing/opentracing-go/tracer.go +++ /dev/null @@ -1,305 +0,0 @@ -package opentracing - -import "time" - -// Tracer is a simple, thin interface for Span creation and SpanContext -// propagation. -type Tracer interface { - - // Create, start, and return a new Span with the given `operationName` and - // incorporate the given StartSpanOption `opts`. (Note that `opts` borrows - // from the "functional options" pattern, per - // http://dave.cheney.net/2014/10/17/functional-options-for-friendly-apis) - // - // A Span with no SpanReference options (e.g., opentracing.ChildOf() or - // opentracing.FollowsFrom()) becomes the root of its own trace. - // - // Examples: - // - // var tracer opentracing.Tracer = ... - // - // // The root-span case: - // sp := tracer.StartSpan("GetFeed") - // - // // The vanilla child span case: - // sp := tracer.StartSpan( - // "GetFeed", - // opentracing.ChildOf(parentSpan.Context())) - // - // // All the bells and whistles: - // sp := tracer.StartSpan( - // "GetFeed", - // opentracing.ChildOf(parentSpan.Context()), - // opentracing.Tag("user_agent", loggedReq.UserAgent), - // opentracing.StartTime(loggedReq.Timestamp), - // ) - // - StartSpan(operationName string, opts ...StartSpanOption) Span - - // Inject() takes the `sm` SpanContext instance and injects it for - // propagation within `carrier`. The actual type of `carrier` depends on - // the value of `format`. - // - // OpenTracing defines a common set of `format` values (see BuiltinFormat), - // and each has an expected carrier type. - // - // Other packages may declare their own `format` values, much like the keys - // used by `context.Context` (see - // https://godoc.org/golang.org/x/net/context#WithValue). - // - // Example usage (sans error handling): - // - // carrier := opentracing.HTTPHeadersCarrier(httpReq.Header) - // err := tracer.Inject( - // span.Context(), - // opentracing.HTTPHeaders, - // carrier) - // - // NOTE: All opentracing.Tracer implementations MUST support all - // BuiltinFormats. - // - // Implementations may return opentracing.ErrUnsupportedFormat if `format` - // is not supported by (or not known by) the implementation. - // - // Implementations may return opentracing.ErrInvalidCarrier or any other - // implementation-specific error if the format is supported but injection - // fails anyway. - // - // See Tracer.Extract(). - Inject(sm SpanContext, format interface{}, carrier interface{}) error - - // Extract() returns a SpanContext instance given `format` and `carrier`. - // - // OpenTracing defines a common set of `format` values (see BuiltinFormat), - // and each has an expected carrier type. - // - // Other packages may declare their own `format` values, much like the keys - // used by `context.Context` (see - // https://godoc.org/golang.org/x/net/context#WithValue). - // - // Example usage (with StartSpan): - // - // - // carrier := opentracing.HTTPHeadersCarrier(httpReq.Header) - // clientContext, err := tracer.Extract(opentracing.HTTPHeaders, carrier) - // - // // ... assuming the ultimate goal here is to resume the trace with a - // // server-side Span: - // var serverSpan opentracing.Span - // if err == nil { - // span = tracer.StartSpan( - // rpcMethodName, ext.RPCServerOption(clientContext)) - // } else { - // span = tracer.StartSpan(rpcMethodName) - // } - // - // - // NOTE: All opentracing.Tracer implementations MUST support all - // BuiltinFormats. - // - // Return values: - // - A successful Extract returns a SpanContext instance and a nil error - // - If there was simply no SpanContext to extract in `carrier`, Extract() - // returns (nil, opentracing.ErrSpanContextNotFound) - // - If `format` is unsupported or unrecognized, Extract() returns (nil, - // opentracing.ErrUnsupportedFormat) - // - If there are more fundamental problems with the `carrier` object, - // Extract() may return opentracing.ErrInvalidCarrier, - // opentracing.ErrSpanContextCorrupted, or implementation-specific - // errors. - // - // See Tracer.Inject(). - Extract(format interface{}, carrier interface{}) (SpanContext, error) -} - -// StartSpanOptions allows Tracer.StartSpan() callers and implementors a -// mechanism to override the start timestamp, specify Span References, and make -// a single Tag or multiple Tags available at Span start time. -// -// StartSpan() callers should look at the StartSpanOption interface and -// implementations available in this package. -// -// Tracer implementations can convert a slice of `StartSpanOption` instances -// into a `StartSpanOptions` struct like so: -// -// func StartSpan(opName string, opts ...opentracing.StartSpanOption) { -// sso := opentracing.StartSpanOptions{} -// for _, o := range opts { -// o.Apply(&sso) -// } -// ... -// } -// -type StartSpanOptions struct { - // Zero or more causal references to other Spans (via their SpanContext). - // If empty, start a "root" Span (i.e., start a new trace). - References []SpanReference - - // StartTime overrides the Span's start time, or implicitly becomes - // time.Now() if StartTime.IsZero(). - StartTime time.Time - - // Tags may have zero or more entries; the restrictions on map values are - // identical to those for Span.SetTag(). May be nil. - // - // If specified, the caller hands off ownership of Tags at - // StartSpan() invocation time. - Tags map[string]interface{} -} - -// StartSpanOption instances (zero or more) may be passed to Tracer.StartSpan. -// -// StartSpanOption borrows from the "functional options" pattern, per -// http://dave.cheney.net/2014/10/17/functional-options-for-friendly-apis -type StartSpanOption interface { - Apply(*StartSpanOptions) -} - -// SpanReferenceType is an enum type describing different categories of -// relationships between two Spans. If Span-2 refers to Span-1, the -// SpanReferenceType describes Span-1 from Span-2's perspective. For example, -// ChildOfRef means that Span-1 created Span-2. -// -// NOTE: Span-1 and Span-2 do *not* necessarily depend on each other for -// completion; e.g., Span-2 may be part of a background job enqueued by Span-1, -// or Span-2 may be sitting in a distributed queue behind Span-1. -type SpanReferenceType int - -const ( - // ChildOfRef refers to a parent Span that caused *and* somehow depends - // upon the new child Span. Often (but not always), the parent Span cannot - // finish until the child Span does. - // - // An timing diagram for a ChildOfRef that's blocked on the new Span: - // - // [-Parent Span---------] - // [-Child Span----] - // - // See http://opentracing.io/spec/ - // - // See opentracing.ChildOf() - ChildOfRef SpanReferenceType = iota - - // FollowsFromRef refers to a parent Span that does not depend in any way - // on the result of the new child Span. For instance, one might use - // FollowsFromRefs to describe pipeline stages separated by queues, - // or a fire-and-forget cache insert at the tail end of a web request. - // - // A FollowsFromRef Span is part of the same logical trace as the new Span: - // i.e., the new Span is somehow caused by the work of its FollowsFromRef. - // - // All of the following could be valid timing diagrams for children that - // "FollowFrom" a parent. - // - // [-Parent Span-] [-Child Span-] - // - // - // [-Parent Span--] - // [-Child Span-] - // - // - // [-Parent Span-] - // [-Child Span-] - // - // See http://opentracing.io/spec/ - // - // See opentracing.FollowsFrom() - FollowsFromRef -) - -// SpanReference is a StartSpanOption that pairs a SpanReferenceType and a -// referenced SpanContext. See the SpanReferenceType documentation for -// supported relationships. If SpanReference is created with -// ReferencedContext==nil, it has no effect. Thus it allows for a more concise -// syntax for starting spans: -// -// sc, _ := tracer.Extract(someFormat, someCarrier) -// span := tracer.StartSpan("operation", opentracing.ChildOf(sc)) -// -// The `ChildOf(sc)` option above will not panic if sc == nil, it will just -// not add the parent span reference to the options. -type SpanReference struct { - Type SpanReferenceType - ReferencedContext SpanContext -} - -// Apply satisfies the StartSpanOption interface. -func (r SpanReference) Apply(o *StartSpanOptions) { - if r.ReferencedContext != nil { - o.References = append(o.References, r) - } -} - -// ChildOf returns a StartSpanOption pointing to a dependent parent span. -// If sc == nil, the option has no effect. -// -// See ChildOfRef, SpanReference -func ChildOf(sc SpanContext) SpanReference { - return SpanReference{ - Type: ChildOfRef, - ReferencedContext: sc, - } -} - -// FollowsFrom returns a StartSpanOption pointing to a parent Span that caused -// the child Span but does not directly depend on its result in any way. -// If sc == nil, the option has no effect. -// -// See FollowsFromRef, SpanReference -func FollowsFrom(sc SpanContext) SpanReference { - return SpanReference{ - Type: FollowsFromRef, - ReferencedContext: sc, - } -} - -// StartTime is a StartSpanOption that sets an explicit start timestamp for the -// new Span. -type StartTime time.Time - -// Apply satisfies the StartSpanOption interface. -func (t StartTime) Apply(o *StartSpanOptions) { - o.StartTime = time.Time(t) -} - -// Tags are a generic map from an arbitrary string key to an opaque value type. -// The underlying tracing system is responsible for interpreting and -// serializing the values. -type Tags map[string]interface{} - -// Apply satisfies the StartSpanOption interface. -func (t Tags) Apply(o *StartSpanOptions) { - if o.Tags == nil { - o.Tags = make(map[string]interface{}) - } - for k, v := range t { - o.Tags[k] = v - } -} - -// Tag may be passed as a StartSpanOption to add a tag to new spans, -// or its Set method may be used to apply the tag to an existing Span, -// for example: -// -// tracer.StartSpan("opName", Tag{"Key", value}) -// -// or -// -// Tag{"key", value}.Set(span) -type Tag struct { - Key string - Value interface{} -} - -// Apply satisfies the StartSpanOption interface. -func (t Tag) Apply(o *StartSpanOptions) { - if o.Tags == nil { - o.Tags = make(map[string]interface{}) - } - o.Tags[t.Key] = t.Value -} - -// Set applies the tag to an existing Span. -func (t Tag) Set(s Span) { - s.SetTag(t.Key, t.Value) -}