From 605b0a96aec319ef0af522eca05530cd147414ae Mon Sep 17 00:00:00 2001 From: Matt K <1036969+mkrump@users.noreply.github.com> Date: Thu, 8 Feb 2018 10:12:08 -0600 Subject: [PATCH] Add graphql server (#27) * Add graphql server * Update Makefile * Update log_filters constraint * Add GetLogFilter to repo * Update travis (use Makefile, go fmt, go vet) * Add logFilter schema and resolvers * Add GetWatchedEvent to watched_events_repo * Add watchedEventLog schema and resolvers --- .travis.yml | 15 +- Gopkg.lock | 33 +- Gopkg.toml | 4 + Makefile | 97 +- cmd/addFilter.go | 4 +- cmd/graphql.go | 96 + cmd/root.go | 10 +- ...e_log_filters_to_block_constraint.down.sql | 7 + ...ate_log_filters_to_block_constraint.up.sql | 7 + db/schema.sql | 4 +- filters/example-filter.json | 1 - integration_test/block_rewards_test.go | 4 +- integration_test/contract_test.go | 4 +- pkg/config/config_test.go | 2 +- pkg/contract_summary/console_presenter.go | 2 +- pkg/contract_summary/summary.go | 2 +- pkg/core/watched_event_log.go | 14 + pkg/filters/filter_query.go | 6 +- pkg/filters/filter_test.go | 4 +- pkg/geth/abi_test.go | 4 +- pkg/geth/block_rewards.go | 2 +- pkg/geth/block_to_core_block_test.go | 2 +- pkg/geth/blockchain.go | 4 +- pkg/geth/contract.go | 4 +- pkg/geth/log_to_core_log.go | 2 +- pkg/geth/log_to_core_log_test.go | 4 +- pkg/geth/node/node.go | 2 +- pkg/geth/receipt_to_core_receipt.go | 2 +- pkg/geth/receipt_to_core_receipt_test.go | 4 +- .../graphql_server_suite_test.go | 13 + pkg/graphql_server/schema.go | 162 + pkg/graphql_server/schema_test.go | 168 + pkg/history/populate_blocks_test.go | 12 +- pkg/history/validate_blocks_test.go | 8 +- pkg/repositories/inmemory/in_memory.go | 20 +- pkg/repositories/postgres/block_repository.go | 2 +- .../postgres/block_repository_test.go | 22 +- .../postgres/contract_repository.go | 2 +- .../postgres/contract_repository_test.go | 12 +- .../postgres/log_filter_repository.go | 62 +- .../postgres/log_filter_repository_test.go | 48 +- pkg/repositories/postgres/logs.go | 2 +- pkg/repositories/postgres/logs_test.go | 8 +- pkg/repositories/postgres/postgres_test.go | 6 +- .../postgres/receipt_repository.go | 2 +- .../postgres/receipts_repository_test.go | 6 +- pkg/repositories/postgres/watched_events.go | 27 +- .../postgres/watched_events_test.go | 54 +- pkg/repositories/repository.go | 20 +- .../github.com/neelance/graphql-go/.gitignore | 1 + vendor/github.com/neelance/graphql-go/LICENSE | 24 + .../github.com/neelance/graphql-go/README.md | 57 + .../neelance/graphql-go/errors/errors.go | 41 + .../example/starwars/server/server.go | 64 + .../graphql-go/example/starwars/starwars.go | 647 +++ .../neelance/graphql-go/gqltesting/testing.go | 67 + .../github.com/neelance/graphql-go/graphql.go | 185 + .../neelance/graphql-go/graphql_test.go | 1755 ++++++ vendor/github.com/neelance/graphql-go/id.go | 30 + .../graphql-go/internal/common/directive.go | 32 + .../graphql-go/internal/common/lexer.go | 122 + .../graphql-go/internal/common/literals.go | 206 + .../graphql-go/internal/common/types.go | 80 + .../graphql-go/internal/common/values.go | 77 + .../neelance/graphql-go/internal/exec/exec.go | 313 ++ .../graphql-go/internal/exec/packer/packer.go | 367 ++ .../internal/exec/resolvable/meta.go | 58 + .../internal/exec/resolvable/resolvable.go | 331 ++ .../internal/exec/selected/selected.go | 238 + .../graphql-go/internal/query/query.go | 240 + .../graphql-go/internal/schema/meta.go | 190 + .../graphql-go/internal/schema/schema.go | 462 ++ .../graphql-go/internal/tests/all_test.go | 75 + .../graphql-go/internal/tests/empty.go | 1 + .../internal/tests/testdata/LICENSE | 33 + .../internal/tests/testdata/export.js | 110 + .../graphql-go/internal/tests/testdata/gen.go | 4 + .../internal/tests/testdata/tests.json | 4948 +++++++++++++++++ .../internal/validation/suggestion.go | 71 + .../internal/validation/validation.go | 860 +++ .../neelance/graphql-go/introspection.go | 117 + .../graphql-go/introspection/introspection.go | 313 ++ .../github.com/neelance/graphql-go/log/log.go | 23 + .../neelance/graphql-go/relay/relay.go | 70 + .../neelance/graphql-go/relay/relay_test.go | 36 + vendor/github.com/neelance/graphql-go/time.go | 36 + .../neelance/graphql-go/trace/trace.go | 80 + .../opentracing/opentracing-go/.gitignore | 13 + .../opentracing/opentracing-go/.travis.yml | 14 + .../opentracing/opentracing-go/CHANGELOG.md | 14 + .../opentracing/opentracing-go/LICENSE | 21 + .../opentracing/opentracing-go/Makefile | 32 + .../opentracing/opentracing-go/README.md | 147 + .../opentracing/opentracing-go/ext/tags.go | 198 + .../opentracing-go/ext/tags_test.go | 148 + .../opentracing-go/globaltracer.go | 32 + .../opentracing/opentracing-go/gocontext.go | 57 + .../opentracing-go/gocontext_test.go | 81 + .../opentracing/opentracing-go/log/field.go | 245 + .../opentracing-go/log/field_test.go | 39 + .../opentracing/opentracing-go/log/util.go | 54 + .../mocktracer/mocklogrecord.go | 105 + .../opentracing-go/mocktracer/mockspan.go | 282 + .../opentracing-go/mocktracer/mocktracer.go | 105 + .../mocktracer/mocktracer_test.go | 268 + .../opentracing-go/mocktracer/propagation.go | 120 + .../opentracing/opentracing-go/noop.go | 64 + .../opentracing-go/options_test.go | 31 + .../opentracing/opentracing-go/propagation.go | 176 + .../opentracing-go/propagation_test.go | 93 + .../opentracing/opentracing-go/span.go | 185 + .../opentracing-go/testtracer_test.go | 138 + .../opentracing/opentracing-go/tracer.go | 305 + 113 files changed, 16180 insertions(+), 153 deletions(-) create mode 100644 cmd/graphql.go create mode 100644 db/migrations/1517854395_update_log_filters_to_block_constraint.down.sql create mode 100644 db/migrations/1517854395_update_log_filters_to_block_constraint.up.sql create mode 100644 pkg/core/watched_event_log.go create mode 100644 pkg/graphql_server/graphql_server_suite_test.go create mode 100644 pkg/graphql_server/schema.go create mode 100644 pkg/graphql_server/schema_test.go create mode 100644 vendor/github.com/neelance/graphql-go/.gitignore create mode 100644 vendor/github.com/neelance/graphql-go/LICENSE create mode 100644 vendor/github.com/neelance/graphql-go/README.md create mode 100644 vendor/github.com/neelance/graphql-go/errors/errors.go create mode 100644 vendor/github.com/neelance/graphql-go/example/starwars/server/server.go create mode 100644 vendor/github.com/neelance/graphql-go/example/starwars/starwars.go create mode 100644 vendor/github.com/neelance/graphql-go/gqltesting/testing.go create mode 100644 vendor/github.com/neelance/graphql-go/graphql.go create mode 100644 vendor/github.com/neelance/graphql-go/graphql_test.go create mode 100644 vendor/github.com/neelance/graphql-go/id.go create mode 100644 vendor/github.com/neelance/graphql-go/internal/common/directive.go create mode 100644 vendor/github.com/neelance/graphql-go/internal/common/lexer.go create mode 100644 vendor/github.com/neelance/graphql-go/internal/common/literals.go create mode 100644 vendor/github.com/neelance/graphql-go/internal/common/types.go create mode 100644 vendor/github.com/neelance/graphql-go/internal/common/values.go create mode 100644 vendor/github.com/neelance/graphql-go/internal/exec/exec.go create mode 100644 vendor/github.com/neelance/graphql-go/internal/exec/packer/packer.go create mode 100644 vendor/github.com/neelance/graphql-go/internal/exec/resolvable/meta.go create mode 100644 vendor/github.com/neelance/graphql-go/internal/exec/resolvable/resolvable.go create mode 100644 vendor/github.com/neelance/graphql-go/internal/exec/selected/selected.go create mode 100644 vendor/github.com/neelance/graphql-go/internal/query/query.go create mode 100644 vendor/github.com/neelance/graphql-go/internal/schema/meta.go create mode 100644 vendor/github.com/neelance/graphql-go/internal/schema/schema.go create mode 100644 vendor/github.com/neelance/graphql-go/internal/tests/all_test.go create mode 100644 vendor/github.com/neelance/graphql-go/internal/tests/empty.go create mode 100644 vendor/github.com/neelance/graphql-go/internal/tests/testdata/LICENSE create mode 100644 vendor/github.com/neelance/graphql-go/internal/tests/testdata/export.js create mode 100644 vendor/github.com/neelance/graphql-go/internal/tests/testdata/gen.go create mode 100644 vendor/github.com/neelance/graphql-go/internal/tests/testdata/tests.json create mode 100644 vendor/github.com/neelance/graphql-go/internal/validation/suggestion.go create mode 100644 vendor/github.com/neelance/graphql-go/internal/validation/validation.go create mode 100644 vendor/github.com/neelance/graphql-go/introspection.go create mode 100644 vendor/github.com/neelance/graphql-go/introspection/introspection.go create mode 100644 vendor/github.com/neelance/graphql-go/log/log.go create mode 100644 vendor/github.com/neelance/graphql-go/relay/relay.go create mode 100644 vendor/github.com/neelance/graphql-go/relay/relay_test.go create mode 100644 vendor/github.com/neelance/graphql-go/time.go create mode 100644 vendor/github.com/neelance/graphql-go/trace/trace.go create mode 100644 vendor/github.com/opentracing/opentracing-go/.gitignore create mode 100644 vendor/github.com/opentracing/opentracing-go/.travis.yml create mode 100644 vendor/github.com/opentracing/opentracing-go/CHANGELOG.md create mode 100644 vendor/github.com/opentracing/opentracing-go/LICENSE create mode 100644 vendor/github.com/opentracing/opentracing-go/Makefile create mode 100644 vendor/github.com/opentracing/opentracing-go/README.md create mode 100644 vendor/github.com/opentracing/opentracing-go/ext/tags.go create mode 100644 vendor/github.com/opentracing/opentracing-go/ext/tags_test.go create mode 100644 vendor/github.com/opentracing/opentracing-go/globaltracer.go create mode 100644 vendor/github.com/opentracing/opentracing-go/gocontext.go create mode 100644 vendor/github.com/opentracing/opentracing-go/gocontext_test.go create mode 100644 vendor/github.com/opentracing/opentracing-go/log/field.go create mode 100644 vendor/github.com/opentracing/opentracing-go/log/field_test.go create mode 100644 vendor/github.com/opentracing/opentracing-go/log/util.go create mode 100644 vendor/github.com/opentracing/opentracing-go/mocktracer/mocklogrecord.go create mode 100644 vendor/github.com/opentracing/opentracing-go/mocktracer/mockspan.go create mode 100644 vendor/github.com/opentracing/opentracing-go/mocktracer/mocktracer.go create mode 100644 vendor/github.com/opentracing/opentracing-go/mocktracer/mocktracer_test.go create mode 100644 vendor/github.com/opentracing/opentracing-go/mocktracer/propagation.go create mode 100644 vendor/github.com/opentracing/opentracing-go/noop.go create mode 100644 vendor/github.com/opentracing/opentracing-go/options_test.go create mode 100644 vendor/github.com/opentracing/opentracing-go/propagation.go create mode 100644 vendor/github.com/opentracing/opentracing-go/propagation_test.go create mode 100644 vendor/github.com/opentracing/opentracing-go/span.go create mode 100644 vendor/github.com/opentracing/opentracing-go/testtracer_test.go create mode 100644 vendor/github.com/opentracing/opentracing-go/tracer.go diff --git a/.travis.yml b/.travis.yml index 559f948d..f29a0415 100644 --- a/.travis.yml +++ b/.travis.yml @@ -10,24 +10,21 @@ addons: go_import_path: github.com/vulcanize/vulcanizedb before_install: - # ginkgo - - go get -u github.com/onsi/ginkgo/ginkgo - # migrate - - go get -u -d github.com/mattes/migrate/cli github.com/lib/pq - - sudo $(which go) build -tags 'postgres' -o /usr/local/bin/migrate github.com/mattes/migrate/cli - - sudo -u postgres createdb vulcanize_private + # ginkgo golint dep migrate + - make installtools # geth - wget https://gethstore.blob.core.windows.net/builds/geth-linux-amd64-1.7.2-1db4ecdc.tar.gz - tar -xzf geth-linux-amd64-1.7.2-1db4ecdc.tar.gz - sudo cp geth-linux-amd64-1.7.2-1db4ecdc/geth /usr/local/bin before_script: - - ./scripts/setup - - nohup ./scripts/start_private_blockchain db/schema.sql -migrate: $(MATTESMIGRATE) checkdbvars - migrate -database $(CONNECT_STRING) -path ./db/migrations up +.PHONY: migrate +migrate: $(MIGRATE) checkdbvars + $(MIGRATE) -database $(CONNECT_STRING) -path ./db/migrations up pg_dump -O -s $(CONNECT_STRING) > db/schema.sql +.PHONY: import import: test -n "$(NAME)" # $$NAME psql $(NAME) < db/schema.sql -dep: $(DEP) - dep ensure - -build: dep - go build - -test: $(GINKGO) - ginkgo -r - +#Ethereum createprivate: #!/bin/bash - echo "Deleting test blockchain" + echo "Deleting test node" rm -rf test_data_dir - echo "Creating test blockchain with a new account" + echo "Creating test node with a new account" mkdir test_data_dir geth --dev --datadir test_data_dir --password .private_blockchain_password account new diff --git a/cmd/addFilter.go b/cmd/addFilter.go index 2b83c5a6..a6abf908 100644 --- a/cmd/addFilter.go +++ b/cmd/addFilter.go @@ -5,10 +5,10 @@ import ( "io/ioutil" "log" + "github.com/spf13/cobra" "github.com/vulcanize/vulcanizedb/pkg/filters" "github.com/vulcanize/vulcanizedb/pkg/geth" "github.com/vulcanize/vulcanizedb/utils" - "github.com/spf13/cobra" ) // addFilterCmd represents the addFilter command @@ -67,7 +67,7 @@ func addFilter() { log.Fatal(err) } for _, filter := range logFilters { - err = repository.AddFilter(filter) + err = repository.CreateFilter(filter) if err != nil { log.Fatal(err) } diff --git a/cmd/graphql.go b/cmd/graphql.go new file mode 100644 index 00000000..fc2697f3 --- /dev/null +++ b/cmd/graphql.go @@ -0,0 +1,96 @@ +package cmd + +import ( + "net/http" + _ "net/http/pprof" + + "log" + + "github.com/neelance/graphql-go" + "github.com/neelance/graphql-go/relay" + "github.com/spf13/cobra" + "github.com/vulcanize/vulcanizedb/pkg/geth" + "github.com/vulcanize/vulcanizedb/pkg/graphql_server" + "github.com/vulcanize/vulcanizedb/utils" +) + +var graphqlCmd = &cobra.Command{ + Use: "graphql", + Short: "Starts Vulcanize graphql server", + Long: `Starts vulcanize graphql server +and usage of using your command. For example: + +vulcanizedb graphql --port 9090 --host localhost +`, + Run: func(cmd *cobra.Command, args []string) { + schema := parseSchema() + serve(schema) + }, +} + +func init() { + var ( + port int + host string + ) + rootCmd.AddCommand(graphqlCmd) + + syncCmd.Flags().IntVar(&port, "port", 9090, "graphql: port") + syncCmd.Flags().StringVar(&host, "host", "localhost", "graphql: host") + +} + +func parseSchema() *graphql.Schema { + + blockchain := geth.NewBlockchain(ipc) + repository := utils.LoadPostgres(databaseConfig, blockchain.Node()) + schema := graphql.MustParseSchema(graphql_server.Schema, graphql_server.NewResolver(repository)) + return schema + +} + +func serve(schema *graphql.Schema) { + http.Handle("/", http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.Write(page) + })) + http.Handle("/query", &relay.Handler{Schema: schema}) + + log.Fatal(http.ListenAndServe(":9090", nil)) +} + +var page = []byte(` + + + + + + + + + + +
Loading...
+ + + +`) diff --git a/cmd/root.go b/cmd/root.go index 5fdec651..b30b4f40 100644 --- a/cmd/root.go +++ b/cmd/root.go @@ -4,15 +4,17 @@ import ( "fmt" "os" - "github.com/vulcanize/vulcanizedb/pkg/config" "github.com/mitchellh/go-homedir" "github.com/spf13/cobra" "github.com/spf13/viper" + "github.com/vulcanize/vulcanizedb/pkg/config" ) -var cfgFile string -var databaseConfig config.Database -var ipc string +var ( + cfgFile string + databaseConfig config.Database + ipc string +) var rootCmd = &cobra.Command{ Use: "vulcanizedb", diff --git a/db/migrations/1517854395_update_log_filters_to_block_constraint.down.sql b/db/migrations/1517854395_update_log_filters_to_block_constraint.down.sql new file mode 100644 index 00000000..253d5849 --- /dev/null +++ b/db/migrations/1517854395_update_log_filters_to_block_constraint.down.sql @@ -0,0 +1,7 @@ +BEGIN; +ALTER TABLE log_filters + DROP CONSTRAINT log_filters_to_block_check; + +ALTER TABLE log_filters + ADD CONSTRAINT log_filters_from_block_check1 CHECK (to_block >= 0); +COMMIT; \ No newline at end of file diff --git a/db/migrations/1517854395_update_log_filters_to_block_constraint.up.sql b/db/migrations/1517854395_update_log_filters_to_block_constraint.up.sql new file mode 100644 index 00000000..32fa00cb --- /dev/null +++ b/db/migrations/1517854395_update_log_filters_to_block_constraint.up.sql @@ -0,0 +1,7 @@ +BEGIN; +ALTER TABLE log_filters + DROP CONSTRAINT log_filters_from_block_check1; + +ALTER TABLE log_filters + ADD CONSTRAINT log_filters_to_block_check CHECK (to_block >= 0); +COMMIT; diff --git a/db/schema.sql b/db/schema.sql index 9c9735ae..23a1d533 100644 --- a/db/schema.sql +++ b/db/schema.sql @@ -123,8 +123,8 @@ CREATE TABLE log_filters ( topic2 character varying(66), topic3 character varying(66), CONSTRAINT log_filters_from_block_check CHECK ((from_block >= 0)), - CONSTRAINT log_filters_from_block_check1 CHECK ((from_block >= 0)), - CONSTRAINT log_filters_name_check CHECK (((name)::text <> ''::text)) + CONSTRAINT log_filters_name_check CHECK (((name)::text <> ''::text)), + CONSTRAINT log_filters_to_block_check CHECK ((to_block >= 0)) ); diff --git a/filters/example-filter.json b/filters/example-filter.json index b123b0eb..f29e9a95 100644 --- a/filters/example-filter.json +++ b/filters/example-filter.json @@ -7,7 +7,6 @@ }, { "name": "NewFilter", - "toBlock": "0x4B34AA", "fromBlock": "0x4B34AD", "address": "0x06012c8cf97bead5deae237070f9587f8e7a266d", "topics": ["0x241ea03ca20251805084d27d4440371c34a0b85ff108f6bb5611248f73818b80"] diff --git a/integration_test/block_rewards_test.go b/integration_test/block_rewards_test.go index 5cc18efe..1fc9c206 100644 --- a/integration_test/block_rewards_test.go +++ b/integration_test/block_rewards_test.go @@ -3,10 +3,10 @@ package integration import ( "log" - cfg "github.com/vulcanize/vulcanizedb/pkg/config" - "github.com/vulcanize/vulcanizedb/pkg/geth" . "github.com/onsi/ginkgo" . "github.com/onsi/gomega" + cfg "github.com/vulcanize/vulcanizedb/pkg/config" + "github.com/vulcanize/vulcanizedb/pkg/geth" ) var _ = Describe("Rewards calculations", func() { diff --git a/integration_test/contract_test.go b/integration_test/contract_test.go index d6deb549..c710a7be 100644 --- a/integration_test/contract_test.go +++ b/integration_test/contract_test.go @@ -5,12 +5,12 @@ import ( "log" + . "github.com/onsi/ginkgo" + . "github.com/onsi/gomega" cfg "github.com/vulcanize/vulcanizedb/pkg/config" "github.com/vulcanize/vulcanizedb/pkg/core" "github.com/vulcanize/vulcanizedb/pkg/geth" "github.com/vulcanize/vulcanizedb/pkg/geth/testing" - . "github.com/onsi/ginkgo" - . "github.com/onsi/gomega" ) var _ = Describe("Reading contracts", func() { diff --git a/pkg/config/config_test.go b/pkg/config/config_test.go index 89936906..61af9ab7 100644 --- a/pkg/config/config_test.go +++ b/pkg/config/config_test.go @@ -3,9 +3,9 @@ package config_test import ( "path/filepath" - cfg "github.com/vulcanize/vulcanizedb/pkg/config" . "github.com/onsi/ginkgo" . "github.com/onsi/gomega" + cfg "github.com/vulcanize/vulcanizedb/pkg/config" ) var _ = Describe("Loading the config", func() { diff --git a/pkg/contract_summary/console_presenter.go b/pkg/contract_summary/console_presenter.go index 6197ed3d..b475450a 100644 --- a/pkg/contract_summary/console_presenter.go +++ b/pkg/contract_summary/console_presenter.go @@ -3,8 +3,8 @@ package contract_summary import ( "fmt" - "github.com/vulcanize/vulcanizedb/pkg/core" "github.com/ethereum/go-ethereum/common" + "github.com/vulcanize/vulcanizedb/pkg/core" ) func GenerateConsoleOutput(summary ContractSummary) string { diff --git a/pkg/contract_summary/summary.go b/pkg/contract_summary/summary.go index fc15aea0..58908e24 100644 --- a/pkg/contract_summary/summary.go +++ b/pkg/contract_summary/summary.go @@ -18,7 +18,7 @@ type ContractSummary struct { } func NewSummary(blockchain core.Blockchain, repository repositories.Repository, contractHash string, blockNumber *big.Int) (ContractSummary, error) { - contract, err := repository.FindContract(contractHash) + contract, err := repository.GetContract(contractHash) if err != nil { return ContractSummary{}, err } else { diff --git a/pkg/core/watched_event_log.go b/pkg/core/watched_event_log.go new file mode 100644 index 00000000..9bf23a18 --- /dev/null +++ b/pkg/core/watched_event_log.go @@ -0,0 +1,14 @@ +package core + +type WatchedEvent struct { + Name string `json:"name"` // name + BlockNumber int64 `json:"block_number" db:"block_number"` // block_number + Address string `json:"address"` // address + TxHash string `json:"tx_hash" db:"tx_hash"` // tx_hash + Index int64 `json:"index"` // index + Topic0 string `json:"topic0"` // topic0 + Topic1 string `json:"topic1"` // topic1 + Topic2 string `json:"topic2"` // topic2 + Topic3 string `json:"topic3"` // topic3 + Data string `json:"data"` // data +} diff --git a/pkg/filters/filter_query.go b/pkg/filters/filter_query.go index 7070edee..cd02712a 100644 --- a/pkg/filters/filter_query.go +++ b/pkg/filters/filter_query.go @@ -5,17 +5,17 @@ import ( "errors" - "github.com/vulcanize/vulcanizedb/pkg/core" "github.com/ethereum/go-ethereum/common" "github.com/ethereum/go-ethereum/common/hexutil" + "github.com/vulcanize/vulcanizedb/pkg/core" ) type LogFilters []LogFilter type LogFilter struct { Name string `json:"name"` - FromBlock int64 `json:"fromBlock"` - ToBlock int64 `json:"toBlock"` + FromBlock int64 `json:"fromBlock" db:"from_block"` + ToBlock int64 `json:"toBlock" db:"to_block"` Address string `json:"address"` core.Topics `json:"topics"` } diff --git a/pkg/filters/filter_test.go b/pkg/filters/filter_test.go index 3112afd6..474f0311 100644 --- a/pkg/filters/filter_test.go +++ b/pkg/filters/filter_test.go @@ -3,10 +3,10 @@ package filters_test import ( "encoding/json" - "github.com/vulcanize/vulcanizedb/pkg/core" - "github.com/vulcanize/vulcanizedb/pkg/filters" . "github.com/onsi/ginkgo" . "github.com/onsi/gomega" + "github.com/vulcanize/vulcanizedb/pkg/core" + "github.com/vulcanize/vulcanizedb/pkg/filters" ) var _ = Describe("Log filters", func() { diff --git a/pkg/geth/abi_test.go b/pkg/geth/abi_test.go index f930661d..436f1d6b 100644 --- a/pkg/geth/abi_test.go +++ b/pkg/geth/abi_test.go @@ -9,12 +9,12 @@ import ( "log" - cfg "github.com/vulcanize/vulcanizedb/pkg/config" - "github.com/vulcanize/vulcanizedb/pkg/geth" "github.com/ethereum/go-ethereum/accounts/abi" . "github.com/onsi/ginkgo" . "github.com/onsi/gomega" "github.com/onsi/gomega/ghttp" + cfg "github.com/vulcanize/vulcanizedb/pkg/config" + "github.com/vulcanize/vulcanizedb/pkg/geth" ) var _ = Describe("ABI files", func() { diff --git a/pkg/geth/block_rewards.go b/pkg/geth/block_rewards.go index 9d9a439d..c26c67c4 100644 --- a/pkg/geth/block_rewards.go +++ b/pkg/geth/block_rewards.go @@ -1,9 +1,9 @@ package geth import ( - "github.com/vulcanize/vulcanizedb/pkg/core" "github.com/ethereum/go-ethereum/core/types" "github.com/ethereum/go-ethereum/params" + "github.com/vulcanize/vulcanizedb/pkg/core" ) func CalcUnclesReward(block core.Block, uncles []*types.Header) float64 { diff --git a/pkg/geth/block_to_core_block_test.go b/pkg/geth/block_to_core_block_test.go index aae8e2c8..06fbbcff 100644 --- a/pkg/geth/block_to_core_block_test.go +++ b/pkg/geth/block_to_core_block_test.go @@ -5,12 +5,12 @@ import ( "context" - "github.com/vulcanize/vulcanizedb/pkg/geth" "github.com/ethereum/go-ethereum/common" "github.com/ethereum/go-ethereum/common/hexutil" "github.com/ethereum/go-ethereum/core/types" . "github.com/onsi/ginkgo" . "github.com/onsi/gomega" + "github.com/vulcanize/vulcanizedb/pkg/geth" ) type FakeGethClient struct { diff --git a/pkg/geth/blockchain.go b/pkg/geth/blockchain.go index 169698a9..8c040539 100644 --- a/pkg/geth/blockchain.go +++ b/pkg/geth/blockchain.go @@ -7,13 +7,13 @@ import ( "log" - "github.com/vulcanize/vulcanizedb/pkg/core" - "github.com/vulcanize/vulcanizedb/pkg/geth/node" "github.com/ethereum/go-ethereum" "github.com/ethereum/go-ethereum/common" "github.com/ethereum/go-ethereum/core/types" "github.com/ethereum/go-ethereum/ethclient" "github.com/ethereum/go-ethereum/rpc" + "github.com/vulcanize/vulcanizedb/pkg/core" + "github.com/vulcanize/vulcanizedb/pkg/geth/node" "golang.org/x/net/context" ) diff --git a/pkg/geth/contract.go b/pkg/geth/contract.go index 4900ee71..b7db4500 100644 --- a/pkg/geth/contract.go +++ b/pkg/geth/contract.go @@ -8,9 +8,9 @@ import ( "context" "math/big" - "github.com/vulcanize/vulcanizedb/pkg/core" "github.com/ethereum/go-ethereum" "github.com/ethereum/go-ethereum/common" + "github.com/vulcanize/vulcanizedb/pkg/core" ) var ( @@ -50,7 +50,7 @@ func (blockchain *Blockchain) GetAttributes(contract core.Contract) (core.Contra for _, abiElement := range parsed.Methods { if (len(abiElement.Outputs) > 0) && (len(abiElement.Inputs) == 0) && abiElement.Const { attributeType := abiElement.Outputs[0].Type.String() - contractAttributes = append(contractAttributes, core.ContractAttribute{abiElement.Name, attributeType}) + contractAttributes = append(contractAttributes, core.ContractAttribute{Name: abiElement.Name, Type: attributeType}) } } sort.Sort(contractAttributes) diff --git a/pkg/geth/log_to_core_log.go b/pkg/geth/log_to_core_log.go index 380ffc85..bc83bf14 100644 --- a/pkg/geth/log_to_core_log.go +++ b/pkg/geth/log_to_core_log.go @@ -3,10 +3,10 @@ package geth import ( "strings" - "github.com/vulcanize/vulcanizedb/pkg/core" "github.com/ethereum/go-ethereum/common" "github.com/ethereum/go-ethereum/common/hexutil" "github.com/ethereum/go-ethereum/core/types" + "github.com/vulcanize/vulcanizedb/pkg/core" ) func ToCoreLogs(gethLogs []types.Log) []core.Log { diff --git a/pkg/geth/log_to_core_log_test.go b/pkg/geth/log_to_core_log_test.go index 3601672c..6da8fb29 100644 --- a/pkg/geth/log_to_core_log_test.go +++ b/pkg/geth/log_to_core_log_test.go @@ -3,13 +3,13 @@ package geth_test import ( "strings" - "github.com/vulcanize/vulcanizedb/pkg/core" - "github.com/vulcanize/vulcanizedb/pkg/geth" "github.com/ethereum/go-ethereum/common" "github.com/ethereum/go-ethereum/common/hexutil" "github.com/ethereum/go-ethereum/core/types" . "github.com/onsi/ginkgo" . "github.com/onsi/gomega" + "github.com/vulcanize/vulcanizedb/pkg/core" + "github.com/vulcanize/vulcanizedb/pkg/geth" ) var _ = Describe("Conversion of GethLog to core.Log", func() { diff --git a/pkg/geth/node/node.go b/pkg/geth/node/node.go index c47e34bb..e5102cc7 100644 --- a/pkg/geth/node/node.go +++ b/pkg/geth/node/node.go @@ -5,10 +5,10 @@ import ( "strconv" - "github.com/vulcanize/vulcanizedb/pkg/core" "github.com/ethereum/go-ethereum/core/types" "github.com/ethereum/go-ethereum/p2p" "github.com/ethereum/go-ethereum/rpc" + "github.com/vulcanize/vulcanizedb/pkg/core" ) func Info(client *rpc.Client) core.Node { diff --git a/pkg/geth/receipt_to_core_receipt.go b/pkg/geth/receipt_to_core_receipt.go index b94ddb75..57b0ed92 100644 --- a/pkg/geth/receipt_to_core_receipt.go +++ b/pkg/geth/receipt_to_core_receipt.go @@ -5,10 +5,10 @@ import ( "bytes" - "github.com/vulcanize/vulcanizedb/pkg/core" "github.com/ethereum/go-ethereum/common" "github.com/ethereum/go-ethereum/common/hexutil" "github.com/ethereum/go-ethereum/core/types" + "github.com/vulcanize/vulcanizedb/pkg/core" ) func BigTo64(n *big.Int) int64 { diff --git a/pkg/geth/receipt_to_core_receipt_test.go b/pkg/geth/receipt_to_core_receipt_test.go index 596e4c9d..4c87dad3 100644 --- a/pkg/geth/receipt_to_core_receipt_test.go +++ b/pkg/geth/receipt_to_core_receipt_test.go @@ -3,13 +3,13 @@ package geth_test import ( "math/big" - "github.com/vulcanize/vulcanizedb/pkg/core" - "github.com/vulcanize/vulcanizedb/pkg/geth" "github.com/ethereum/go-ethereum/common" "github.com/ethereum/go-ethereum/common/hexutil" "github.com/ethereum/go-ethereum/core/types" . "github.com/onsi/ginkgo" . "github.com/onsi/gomega" + "github.com/vulcanize/vulcanizedb/pkg/core" + "github.com/vulcanize/vulcanizedb/pkg/geth" ) var _ = Describe("Conversion of GethReceipt to core.Receipt", func() { diff --git a/pkg/graphql_server/graphql_server_suite_test.go b/pkg/graphql_server/graphql_server_suite_test.go new file mode 100644 index 00000000..a7a3b735 --- /dev/null +++ b/pkg/graphql_server/graphql_server_suite_test.go @@ -0,0 +1,13 @@ +package graphql_server_test + +import ( + "testing" + + . "github.com/onsi/ginkgo" + . "github.com/onsi/gomega" +) + +func TestGraphqlServer(t *testing.T) { + RegisterFailHandler(Fail) + RunSpecs(t, "GraphqlServer Suite") +} diff --git a/pkg/graphql_server/schema.go b/pkg/graphql_server/schema.go new file mode 100644 index 00000000..3bd9f9fe --- /dev/null +++ b/pkg/graphql_server/schema.go @@ -0,0 +1,162 @@ +package graphql_server + +import ( + "github.com/vulcanize/vulcanizedb/pkg/core" + "github.com/vulcanize/vulcanizedb/pkg/filters" + "github.com/vulcanize/vulcanizedb/pkg/repositories" +) + +var Schema = ` + schema { + query: Query + } + type Query { + logFilter(name: String!): LogFilter + watchedEvents(name: String!): WatchedEventList + } + + type LogFilter { + name: String! + fromBlock: Int + toBlock: Int + address: String! + topics: [String]! + } + + type WatchedEventList{ + total: Int! + watchedEvents: [WatchedEvent]! + } + + type WatchedEvent { + name: String! + blockNumber: Int! + address: String! + tx_hash: String! + topic0: String! + topic1: String! + topic2: String! + topic3: String! + data: String! + } +` + +type Resolver struct { + repository repositories.Repository +} + +func NewResolver(repository repositories.Repository) *Resolver { + return &Resolver{repository: repository} +} + +func (r *Resolver) LogFilter(args struct { + Name string +}) (*logFilterResolver, error) { + logFilter, err := r.repository.GetFilter(args.Name) + if err != nil { + return &logFilterResolver{}, err + } + return &logFilterResolver{&logFilter}, nil +} + +type logFilterResolver struct { + lf *filters.LogFilter +} + +func (lfr *logFilterResolver) Name() string { + return lfr.lf.Name +} + +func (lfr *logFilterResolver) FromBlock() *int32 { + fromBlock := int32(lfr.lf.FromBlock) + return &fromBlock +} + +func (lfr *logFilterResolver) ToBlock() *int32 { + toBlock := int32(lfr.lf.ToBlock) + return &toBlock +} + +func (lfr *logFilterResolver) Address() string { + return lfr.lf.Address +} + +func (lfr *logFilterResolver) Topics() []*string { + var topics = make([]*string, 4) + for i := range topics { + if lfr.lf.Topics[i] != "" { + topics[i] = &lfr.lf.Topics[i] + } + } + return topics +} + +func (r *Resolver) WatchedEvents(args struct { + Name string +}) (*watchedEventsResolver, error) { + watchedEvents, err := r.repository.GetWatchedEvents(args.Name) + if err != nil { + return &watchedEventsResolver{}, err + } + return &watchedEventsResolver{watchedEvents: watchedEvents}, err +} + +type watchedEventsResolver struct { + watchedEvents []*core.WatchedEvent +} + +func (wesr watchedEventsResolver) WatchedEvents() []*watchedEventResolver { + return resolveWatchedEvents(wesr.watchedEvents) +} + +func (wesr watchedEventsResolver) Total() int32 { + return int32(len(wesr.watchedEvents)) +} + +func resolveWatchedEvents(watchedEvents []*core.WatchedEvent) []*watchedEventResolver { + watchedEventResolvers := make([]*watchedEventResolver, 0) + for _, watchedEvent := range watchedEvents { + watchedEventResolvers = append(watchedEventResolvers, &watchedEventResolver{watchedEvent}) + } + return watchedEventResolvers +} + +type watchedEventResolver struct { + we *core.WatchedEvent +} + +func (wer watchedEventResolver) Name() string { + return wer.we.Name +} + +func (wer watchedEventResolver) BlockNumber() int32 { + return int32(wer.we.BlockNumber) +} + +func (wer watchedEventResolver) Address() string { + return wer.we.Address +} + +func (wer watchedEventResolver) TxHash() string { + return wer.we.TxHash +} + +func (wer watchedEventResolver) Topic0() string { + return wer.we.Topic0 +} + +func (wer watchedEventResolver) Topic1() string { + return wer.we.Topic1 +} + +func (wer watchedEventResolver) Topic2() string { + return wer.we.Topic2 +} + +func (wer watchedEventResolver) Topic3() string { + return wer.we.Topic3 +} + +func (wer watchedEventResolver) Data() string { + return wer.we.Data +} diff --git a/pkg/graphql_server/schema_test.go b/pkg/graphql_server/schema_test.go new file mode 100644 index 00000000..dff3eb29 --- /dev/null +++ b/pkg/graphql_server/schema_test.go @@ -0,0 +1,168 @@ +package graphql_server_test + +import ( + "log" + + "encoding/json" + + "context" + + "github.com/neelance/graphql-go" + . "github.com/onsi/ginkgo" + . "github.com/onsi/gomega" + "github.com/vulcanize/vulcanizedb/pkg/config" + "github.com/vulcanize/vulcanizedb/pkg/core" + "github.com/vulcanize/vulcanizedb/pkg/filters" + "github.com/vulcanize/vulcanizedb/pkg/graphql_server" + "github.com/vulcanize/vulcanizedb/pkg/repositories" + "github.com/vulcanize/vulcanizedb/pkg/repositories/postgres" +) + +func formatJSON(data []byte) []byte { + var v interface{} + if err := json.Unmarshal(data, &v); err != nil { + log.Fatalf("invalid JSON: %s", err) + } + formatted, err := json.Marshal(v) + if err != nil { + log.Fatal(err) + } + return formatted +} + +var _ = Describe("GraphQL", func() { + var cfg config.Config + var repository repositories.Repository + + BeforeEach(func() { + + cfg, _ = config.NewConfig("private") + node := core.Node{GenesisBlock: "GENESIS", NetworkId: 1, Id: "x123", ClientName: "geth"} + repository = postgres.BuildRepository(node) + e := repository.CreateFilter(filters.LogFilter{ + Name: "TestFilter1", + FromBlock: 1, + ToBlock: 10, + Address: "0x123456789", + Topics: core.Topics{0: "topic=1", 2: "topic=2"}, + }) + if e != nil { + log.Fatal(e) + } + f, e := repository.GetFilter("TestFilter1") + if e != nil { + log.Println(f) + log.Fatal(e) + } + + matchingEvent := core.Log{ + BlockNumber: 5, + TxHash: "0xTX1", + Address: "0x123456789", + Topics: core.Topics{0: "topic=1", 2: "topic=2"}, + Index: 0, + Data: "0xDATADATADATA", + } + nonMatchingEvent := core.Log{ + BlockNumber: 5, + TxHash: "0xTX2", + Address: "0xOTHERADDRESS", + Topics: core.Topics{0: "topic=1", 2: "topic=2"}, + Index: 0, + Data: "0xDATADATADATA", + } + e = repository.CreateLogs([]core.Log{matchingEvent, nonMatchingEvent}) + if e != nil { + log.Fatal(e) + } + }) + + It("Queries example schema for specific log filter", func() { + var variables map[string]interface{} + r := graphql_server.NewResolver(repository) + var schema = graphql.MustParseSchema(graphql_server.Schema, r) + response := schema.Exec(context.Background(), + `{ + logFilter(name: "TestFilter1") { + name + fromBlock + toBlock + address + topics + } + }`, + "", + variables) + expected := `{ + "logFilter": { + "name": "TestFilter1", + "fromBlock": 1, + "toBlock": 10, + "address": "0x123456789", + "topics": ["topic=1", null, "topic=2", null] + } + }` + var v interface{} + if len(response.Errors) != 0 { + log.Fatal(response.Errors) + } + err := json.Unmarshal(response.Data, &v) + Expect(err).ToNot(HaveOccurred()) + a := formatJSON(response.Data) + e := formatJSON([]byte(expected)) + Expect(a).To(Equal(e)) + }) + + It("Queries example schema for specific watched event log", func() { + var variables map[string]interface{} + + r := graphql_server.NewResolver(repository) + var schema = graphql.MustParseSchema(graphql_server.Schema, r) + response := schema.Exec(context.Background(), + `{ + watchedEvents(name: "TestFilter1") { + total + watchedEvents{ + name + blockNumber + address + tx_hash + topic0 + topic1 + topic2 + topic3 + data + } + } + }`, + "", + variables) + expected := `{ + "watchedEvents": + { + "total": 1, + "watchedEvents": [ + {"name":"TestFilter1", + "blockNumber": 5, + "address": "0x123456789", + "tx_hash": "0xTX1", + "topic0": "topic=1", + "topic1": "", + "topic2": "topic=2", + "topic3": "", + "data": "0xDATADATADATA" + } + ] + } + }` + var v interface{} + if len(response.Errors) != 0 { + log.Fatal(response.Errors) + } + err := json.Unmarshal(response.Data, &v) + Expect(err).ToNot(HaveOccurred()) + a := formatJSON(response.Data) + e := formatJSON([]byte(expected)) + Expect(a).To(Equal(e)) + }) +}) diff --git a/pkg/history/populate_blocks_test.go b/pkg/history/populate_blocks_test.go index 79b729bb..fb4670c2 100644 --- a/pkg/history/populate_blocks_test.go +++ b/pkg/history/populate_blocks_test.go @@ -21,7 +21,7 @@ var _ = Describe("Populating blocks", func() { repository.CreateOrUpdateBlock(core.Block{Number: 2}) blocksAdded := history.PopulateMissingBlocks(blockchain, repository, 1) - _, err := repository.FindBlockByNumber(1) + _, err := repository.GetBlock(1) Expect(blocksAdded).To(Equal(1)) Expect(err).ToNot(HaveOccurred()) @@ -54,15 +54,15 @@ var _ = Describe("Populating blocks", func() { Expect(blocksAdded).To(Equal(3)) Expect(repository.BlockCount()).To(Equal(11)) - _, err := repository.FindBlockByNumber(4) + _, err := repository.GetBlock(4) Expect(err).To(HaveOccurred()) - _, err = repository.FindBlockByNumber(5) + _, err = repository.GetBlock(5) Expect(err).ToNot(HaveOccurred()) - _, err = repository.FindBlockByNumber(8) + _, err = repository.GetBlock(8) Expect(err).ToNot(HaveOccurred()) - _, err = repository.FindBlockByNumber(10) + _, err = repository.GetBlock(10) Expect(err).ToNot(HaveOccurred()) - _, err = repository.FindBlockByNumber(13) + _, err = repository.GetBlock(13) Expect(err).To(HaveOccurred()) }) diff --git a/pkg/history/validate_blocks_test.go b/pkg/history/validate_blocks_test.go index d517cb0f..2ff781ab 100644 --- a/pkg/history/validate_blocks_test.go +++ b/pkg/history/validate_blocks_test.go @@ -36,7 +36,7 @@ var _ = Describe("Blocks validator", func() { }) It("returns the window size", func() { - window := history.ValidationWindow{1, 3} + window := history.ValidationWindow{LowerBound: 1, UpperBound: 3} Expect(window.Size()).To(Equal(2)) }) @@ -51,15 +51,15 @@ var _ = Describe("Blocks validator", func() { validator := history.NewBlockValidator(blockchain, repository, 2) window := validator.ValidateBlocks() - Expect(window).To(Equal(history.ValidationWindow{5, 7})) + Expect(window).To(Equal(history.ValidationWindow{LowerBound: 5, UpperBound: 7})) Expect(repository.BlockCount()).To(Equal(2)) Expect(repository.CreateOrUpdateBlockCallCount).To(Equal(2)) }) It("logs window message", func() { expectedMessage := &bytes.Buffer{} - window := history.ValidationWindow{5, 7} - history.ParsedWindowTemplate.Execute(expectedMessage, history.ValidationWindow{5, 7}) + window := history.ValidationWindow{LowerBound: 5, UpperBound: 7} + history.ParsedWindowTemplate.Execute(expectedMessage, history.ValidationWindow{LowerBound: 5, UpperBound: 7}) blockchain := fakes.NewBlockchainWithBlocks([]core.Block{}) repository := inmemory.NewInMemory() diff --git a/pkg/repositories/inmemory/in_memory.go b/pkg/repositories/inmemory/in_memory.go index 74c8e37a..9b3f5b33 100644 --- a/pkg/repositories/inmemory/in_memory.go +++ b/pkg/repositories/inmemory/in_memory.go @@ -23,7 +23,15 @@ type InMemory struct { CreateOrUpdateBlockCallCount int } -func (repository *InMemory) AddFilter(filter filters.LogFilter) error { +func (repository *InMemory) GetWatchedEvents(name string) ([]*core.WatchedEvent, error) { + panic("implement me") +} + +func (repository *InMemory) GetFilter(name string) (filters.LogFilter, error) { + panic("implement me") +} + +func (repository *InMemory) CreateFilter(filter filters.LogFilter) error { key := filter.Name if _, ok := repository.logFilters[key]; ok || key == "" { return errors.New("filter name not unique") @@ -43,7 +51,7 @@ func NewInMemory() *InMemory { } } -func (repository *InMemory) FindReceipt(txHash string) (core.Receipt, error) { +func (repository *InMemory) GetReceipt(txHash string) (core.Receipt, error) { if receipt, ok := repository.receipts[txHash]; ok { return receipt, nil } @@ -62,14 +70,14 @@ func (repository *InMemory) SetBlocksStatus(chainHead int64) { func (repository *InMemory) CreateLogs(logs []core.Log) error { for _, log := range logs { - key := fmt.Sprintf("%s%s", log.BlockNumber, log.Index) + key := fmt.Sprintf("%d%d", log.BlockNumber, log.Index) var logs []core.Log repository.logs[key] = append(logs, log) } return nil } -func (repository *InMemory) FindLogs(address string, blockNumber int64) []core.Log { +func (repository *InMemory) GetLogs(address string, blockNumber int64) []core.Log { var matchingLogs []core.Log for _, logs := range repository.logs { for _, log := range logs { @@ -91,7 +99,7 @@ func (repository *InMemory) ContractExists(contractHash string) bool { return present } -func (repository *InMemory) FindContract(contractHash string) (core.Contract, error) { +func (repository *InMemory) GetContract(contractHash string) (core.Contract, error) { contract, ok := repository.contracts[contractHash] if !ok { return core.Contract{}, repositories.ErrContractDoesNotExist(contractHash) @@ -130,7 +138,7 @@ func (repository *InMemory) BlockCount() int { return len(repository.blocks) } -func (repository *InMemory) FindBlockByNumber(blockNumber int64) (core.Block, error) { +func (repository *InMemory) GetBlock(blockNumber int64) (core.Block, error) { if block, ok := repository.blocks[blockNumber]; ok { return block, nil } diff --git a/pkg/repositories/postgres/block_repository.go b/pkg/repositories/postgres/block_repository.go index 448283fe..d7b7eb13 100644 --- a/pkg/repositories/postgres/block_repository.go +++ b/pkg/repositories/postgres/block_repository.go @@ -56,7 +56,7 @@ func (db DB) MissingBlockNumbers(startingBlockNumber int64, highestBlockNumber i return numbers } -func (db DB) FindBlockByNumber(blockNumber int64) (core.Block, error) { +func (db DB) GetBlock(blockNumber int64) (core.Block, error) { blockRows := db.DB.QueryRowx( `SELECT id, number, diff --git a/pkg/repositories/postgres/block_repository_test.go b/pkg/repositories/postgres/block_repository_test.go index 2d7f7327..0254586f 100644 --- a/pkg/repositories/postgres/block_repository_test.go +++ b/pkg/repositories/postgres/block_repository_test.go @@ -36,7 +36,7 @@ var _ = Describe("Saving blocks", func() { } repositoryTwo := postgres.BuildRepository(nodeTwo) - _, err := repositoryTwo.FindBlockByNumber(123) + _, err := repositoryTwo.GetBlock(123) Expect(err).To(HaveOccurred()) }) @@ -74,7 +74,7 @@ var _ = Describe("Saving blocks", func() { repository.CreateOrUpdateBlock(block) - savedBlock, err := repository.FindBlockByNumber(blockNumber) + savedBlock, err := repository.GetBlock(blockNumber) Expect(err).NotTo(HaveOccurred()) Expect(savedBlock.Reward).To(Equal(blockReward)) Expect(savedBlock.Difficulty).To(Equal(difficulty)) @@ -93,7 +93,7 @@ var _ = Describe("Saving blocks", func() { }) It("does not find a block when searching for a number that does not exist", func() { - _, err := repository.FindBlockByNumber(111) + _, err := repository.GetBlock(111) Expect(err).To(HaveOccurred()) }) @@ -106,7 +106,7 @@ var _ = Describe("Saving blocks", func() { repository.CreateOrUpdateBlock(block) - savedBlock, _ := repository.FindBlockByNumber(123) + savedBlock, _ := repository.GetBlock(123) Expect(len(savedBlock.Transactions)).To(Equal(1)) }) @@ -118,7 +118,7 @@ var _ = Describe("Saving blocks", func() { repository.CreateOrUpdateBlock(block) - savedBlock, _ := repository.FindBlockByNumber(123) + savedBlock, _ := repository.GetBlock(123) Expect(len(savedBlock.Transactions)).To(Equal(2)) }) @@ -138,7 +138,7 @@ var _ = Describe("Saving blocks", func() { repository.CreateOrUpdateBlock(blockOne) repository.CreateOrUpdateBlock(blockTwo) - savedBlock, _ := repository.FindBlockByNumber(123) + savedBlock, _ := repository.GetBlock(123) Expect(len(savedBlock.Transactions)).To(Equal(2)) Expect(savedBlock.Transactions[0].Hash).To(Equal("x678")) Expect(savedBlock.Transactions[1].Hash).To(Equal("x9ab")) @@ -163,8 +163,8 @@ var _ = Describe("Saving blocks", func() { repository.CreateOrUpdateBlock(blockOne) repositoryTwo.CreateOrUpdateBlock(blockTwo) - retrievedBlockOne, _ := repository.FindBlockByNumber(123) - retrievedBlockTwo, _ := repositoryTwo.FindBlockByNumber(123) + retrievedBlockOne, _ := repository.GetBlock(123) + retrievedBlockTwo, _ := repositoryTwo.GetBlock(123) Expect(retrievedBlockOne.Transactions[0].Hash).To(Equal("x123")) Expect(retrievedBlockTwo.Transactions[0].Hash).To(Equal("x678")) @@ -196,7 +196,7 @@ var _ = Describe("Saving blocks", func() { repository.CreateOrUpdateBlock(block) - savedBlock, _ := repository.FindBlockByNumber(123) + savedBlock, _ := repository.GetBlock(123) Expect(len(savedBlock.Transactions)).To(Equal(1)) savedTransaction := savedBlock.Transactions[0] Expect(savedTransaction.Data).To(Equal(transaction.Data)) @@ -271,10 +271,10 @@ var _ = Describe("Saving blocks", func() { repository.SetBlocksStatus(int64(blockNumberOfChainHead)) - blockOne, err := repository.FindBlockByNumber(1) + blockOne, err := repository.GetBlock(1) Expect(err).ToNot(HaveOccurred()) Expect(blockOne.IsFinal).To(Equal(true)) - blockTwo, err := repository.FindBlockByNumber(24) + blockTwo, err := repository.GetBlock(24) Expect(err).ToNot(HaveOccurred()) Expect(blockTwo.IsFinal).To(BeFalse()) }) diff --git a/pkg/repositories/postgres/contract_repository.go b/pkg/repositories/postgres/contract_repository.go index a6b9ea96..c5069524 100644 --- a/pkg/repositories/postgres/contract_repository.go +++ b/pkg/repositories/postgres/contract_repository.go @@ -36,7 +36,7 @@ func (db DB) ContractExists(contractHash string) bool { return exists } -func (db DB) FindContract(contractHash string) (core.Contract, error) { +func (db DB) GetContract(contractHash string) (core.Contract, error) { var hash string var abi string contract := db.DB.QueryRow( diff --git a/pkg/repositories/postgres/contract_repository_test.go b/pkg/repositories/postgres/contract_repository_test.go index 0d23d2a9..66b87f02 100644 --- a/pkg/repositories/postgres/contract_repository_test.go +++ b/pkg/repositories/postgres/contract_repository_test.go @@ -25,7 +25,7 @@ var _ = Describe("Creating contracts", func() { It("returns the contract when it exists", func() { repository.CreateContract(core.Contract{Hash: "x123"}) - contract, err := repository.FindContract("x123") + contract, err := repository.GetContract("x123") Expect(err).NotTo(HaveOccurred()) Expect(contract.Hash).To(Equal("x123")) @@ -34,13 +34,13 @@ var _ = Describe("Creating contracts", func() { }) It("returns err if contract does not exist", func() { - _, err := repository.FindContract("x123") + _, err := repository.GetContract("x123") Expect(err).To(HaveOccurred()) }) It("returns empty array when no transactions 'To' a contract", func() { repository.CreateContract(core.Contract{Hash: "x123"}) - contract, err := repository.FindContract("x123") + contract, err := repository.GetContract("x123") Expect(err).ToNot(HaveOccurred()) Expect(contract.Transactions).To(BeEmpty()) }) @@ -59,7 +59,7 @@ var _ = Describe("Creating contracts", func() { blockRepository.CreateOrUpdateBlock(block) repository.CreateContract(core.Contract{Hash: "x123"}) - contract, err := repository.FindContract("x123") + contract, err := repository.GetContract("x123") Expect(err).ToNot(HaveOccurred()) sort.Slice(contract.Transactions, func(i, j int) bool { return contract.Transactions[i].Hash < contract.Transactions[j].Hash @@ -76,7 +76,7 @@ var _ = Describe("Creating contracts", func() { Abi: "{\"some\": \"json\"}", Hash: "x123", }) - contract, err := repository.FindContract("x123") + contract, err := repository.GetContract("x123") Expect(err).ToNot(HaveOccurred()) Expect(contract.Abi).To(Equal("{\"some\": \"json\"}")) }) @@ -90,7 +90,7 @@ var _ = Describe("Creating contracts", func() { Abi: "{\"some\": \"different json\"}", Hash: "x123", }) - contract, err := repository.FindContract("x123") + contract, err := repository.GetContract("x123") Expect(err).ToNot(HaveOccurred()) Expect(contract.Abi).To(Equal("{\"some\": \"different json\"}")) }) diff --git a/pkg/repositories/postgres/log_filter_repository.go b/pkg/repositories/postgres/log_filter_repository.go index 46bc18c7..c2b6c99f 100644 --- a/pkg/repositories/postgres/log_filter_repository.go +++ b/pkg/repositories/postgres/log_filter_repository.go @@ -1,8 +1,16 @@ package postgres -import "github.com/vulcanize/vulcanizedb/pkg/filters" +import ( + "database/sql" -func (db DB) AddFilter(query filters.LogFilter) error { + "encoding/json" + "errors" + + "github.com/vulcanize/vulcanizedb/pkg/filters" + "github.com/vulcanize/vulcanizedb/pkg/repositories" +) + +func (db DB) CreateFilter(query filters.LogFilter) error { _, err := db.DB.Exec( `INSERT INTO log_filters (name, from_block, to_block, address, topic0, topic1, topic2, topic3) @@ -13,3 +21,53 @@ func (db DB) AddFilter(query filters.LogFilter) error { } return nil } + +func (db DB) GetFilter(name string) (filters.LogFilter, error) { + lf := DBLogFilter{} + err := db.DB.Get(&lf, + `SELECT + id, + name, + from_block, + to_block, + address, + json_build_array(topic0, topic1, topic2, topic3) AS topics + FROM log_filters + WHERE name = $1`, name) + if err != nil { + switch err { + case sql.ErrNoRows: + return filters.LogFilter{}, repositories.ErrFilterDoesNotExist(name) + default: + return filters.LogFilter{}, err + } + } + dbLogFilterToCoreLogFilter(lf) + return *lf.LogFilter, nil +} + +type DBTopics []*string + +func (t *DBTopics) Scan(src interface{}) error { + asBytes, ok := src.([]byte) + if !ok { + return error(errors.New("scan source was not []byte")) + } + json.Unmarshal(asBytes, &t) + + return nil +} + +type DBLogFilter struct { + ID int + *filters.LogFilter + Topics DBTopics +} + +func dbLogFilterToCoreLogFilter(lf DBLogFilter) { + for i, v := range lf.Topics { + if v != nil { + lf.LogFilter.Topics[i] = *v + } + } +} diff --git a/pkg/repositories/postgres/log_filter_repository_test.go b/pkg/repositories/postgres/log_filter_repository_test.go index 12f927f7..51692d4f 100644 --- a/pkg/repositories/postgres/log_filter_repository_test.go +++ b/pkg/repositories/postgres/log_filter_repository_test.go @@ -37,7 +37,7 @@ var _ = Describe("Logs Repository", func() { "", }, } - err := repository.AddFilter(logFilter) + err := repository.CreateFilter(logFilter) Expect(err).ToNot(HaveOccurred()) }) @@ -54,8 +54,52 @@ var _ = Describe("Logs Repository", func() { "", }, } - err := repository.AddFilter(logFilter) + err := repository.CreateFilter(logFilter) Expect(err).To(HaveOccurred()) }) + + It("gets a log filter", func() { + + logFilter1 := filters.LogFilter{ + Name: "TestFilter1", + FromBlock: 1, + ToBlock: 2, + Address: "0x8888f1f195afa192cfee860698584c030f4c9db1", + Topics: core.Topics{ + "0x000000000000000000000000a94f5374fce5edbc8e2a8697c15331677e6ebf0b", + "", + "0x000000000000000000000000a94f5374fce5edbc8e2a8697c15331677e6ebf0b", + "", + }, + } + err := repository.CreateFilter(logFilter1) + Expect(err).ToNot(HaveOccurred()) + logFilter2 := filters.LogFilter{ + Name: "TestFilter2", + FromBlock: 10, + ToBlock: 20, + Address: "0x8888f1f195afa192cfee860698584c030f4c9db1", + Topics: core.Topics{ + "0x000000000000000000000000a94f5374fce5edbc8e2a8697c15331677e6ebf0b", + "", + "0x000000000000000000000000a94f5374fce5edbc8e2a8697c15331677e6ebf0b", + "", + }, + } + err = repository.CreateFilter(logFilter2) + Expect(err).ToNot(HaveOccurred()) + + logFilter1, err = repository.GetFilter("TestFilter1") + Expect(err).ToNot(HaveOccurred()) + Expect(logFilter1).To(Equal(logFilter1)) + logFilter1, err = repository.GetFilter("TestFilter1") + Expect(err).ToNot(HaveOccurred()) + Expect(logFilter2).To(Equal(logFilter2)) + }) + + It("returns ErrFilterDoesNotExist error when log does not exist", func() { + _, err := repository.GetFilter("TestFilter1") + Expect(err).To(Equal(repositories.ErrFilterDoesNotExist("TestFilter1"))) + }) }) }) diff --git a/pkg/repositories/postgres/logs.go b/pkg/repositories/postgres/logs.go index 8a56bef5..9d125a74 100644 --- a/pkg/repositories/postgres/logs.go +++ b/pkg/repositories/postgres/logs.go @@ -26,7 +26,7 @@ func (db DB) CreateLogs(logs []core.Log) error { return nil } -func (db DB) FindLogs(address string, blockNumber int64) []core.Log { +func (db DB) GetLogs(address string, blockNumber int64) []core.Log { logRows, _ := db.DB.Query( `SELECT block_number, address, diff --git a/pkg/repositories/postgres/logs_test.go b/pkg/repositories/postgres/logs_test.go index e932af21..85f451bd 100644 --- a/pkg/repositories/postgres/logs_test.go +++ b/pkg/repositories/postgres/logs_test.go @@ -35,7 +35,7 @@ var _ = Describe("Logs Repository", func() { }}, ) - log := repository.FindLogs("x123", 1) + log := repository.GetLogs("x123", 1) Expect(log).NotTo(BeNil()) Expect(log[0].BlockNumber).To(Equal(int64(1))) @@ -49,7 +49,7 @@ var _ = Describe("Logs Repository", func() { }) It("returns nil if log does not exist", func() { - log := repository.FindLogs("x123", 1) + log := repository.GetLogs("x123", 1) Expect(log).To(BeNil()) }) @@ -82,7 +82,7 @@ var _ = Describe("Logs Repository", func() { }}, ) - log := repository.FindLogs("x123", 1) + log := repository.GetLogs("x123", 1) type logIndex struct { blockNumber int64 @@ -168,7 +168,7 @@ var _ = Describe("Logs Repository", func() { block := core.Block{Transactions: []core.Transaction{transaction}} err := blockRepository.CreateOrUpdateBlock(block) Expect(err).To(Not(HaveOccurred())) - retrievedLogs := repository.FindLogs("0x99041f808d598b782d5a3e498681c2452a31da08", 4745407) + retrievedLogs := repository.GetLogs("0x99041f808d598b782d5a3e498681c2452a31da08", 4745407) expected := logs[1:] Expect(retrievedLogs).To(Equal(expected)) diff --git a/pkg/repositories/postgres/postgres_test.go b/pkg/repositories/postgres/postgres_test.go index bf08f050..670cea0c 100644 --- a/pkg/repositories/postgres/postgres_test.go +++ b/pkg/repositories/postgres/postgres_test.go @@ -94,7 +94,7 @@ var _ = Describe("Postgres repository", func() { repository, _ := postgres.NewDB(cfg.Database, node) err1 := repository.CreateOrUpdateBlock(badBlock) - savedBlock, err2 := repository.FindBlockByNumber(123) + savedBlock, err2 := repository.GetBlock(123) Expect(err1).To(HaveOccurred()) Expect(err2).To(HaveOccurred()) @@ -129,7 +129,7 @@ var _ = Describe("Postgres repository", func() { repository, _ := postgres.NewDB(cfg.Database, node) err := repository.CreateLogs([]core.Log{badLog}) - savedBlock := repository.FindLogs("x123", 1) + savedBlock := repository.GetLogs("x123", 1) Expect(err).ToNot(BeNil()) Expect(savedBlock).To(BeNil()) @@ -148,7 +148,7 @@ var _ = Describe("Postgres repository", func() { repository, _ := postgres.NewDB(cfg.Database, node) err1 := repository.CreateOrUpdateBlock(block) - savedBlock, err2 := repository.FindBlockByNumber(123) + savedBlock, err2 := repository.GetBlock(123) Expect(err1).To(HaveOccurred()) Expect(err2).To(HaveOccurred()) diff --git a/pkg/repositories/postgres/receipt_repository.go b/pkg/repositories/postgres/receipt_repository.go index 1fba0803..341ffb31 100644 --- a/pkg/repositories/postgres/receipt_repository.go +++ b/pkg/repositories/postgres/receipt_repository.go @@ -7,7 +7,7 @@ import ( "github.com/vulcanize/vulcanizedb/pkg/repositories" ) -func (db DB) FindReceipt(txHash string) (core.Receipt, error) { +func (db DB) GetReceipt(txHash string) (core.Receipt, error) { row := db.DB.QueryRow( `SELECT contract_address, tx_hash, diff --git a/pkg/repositories/postgres/receipts_repository_test.go b/pkg/repositories/postgres/receipts_repository_test.go index 660cad62..1e0a68e4 100644 --- a/pkg/repositories/postgres/receipts_repository_test.go +++ b/pkg/repositories/postgres/receipts_repository_test.go @@ -42,7 +42,7 @@ var _ = Describe("Logs Repository", func() { block := core.Block{Transactions: []core.Transaction{transaction}} blockRepository.CreateOrUpdateBlock(block) - receipt, err := repository.FindReceipt("0xe340558980f89d5f86045ac11e5cc34e4bcec20f9f1e2a427aa39d87114e8223") + receipt, err := repository.GetReceipt("0xe340558980f89d5f86045ac11e5cc34e4bcec20f9f1e2a427aa39d87114e8223") Expect(err).ToNot(HaveOccurred()) //Not currently serializing bloom logs @@ -55,7 +55,7 @@ var _ = Describe("Logs Repository", func() { }) It("returns ErrReceiptDoesNotExist when receipt does not exist", func() { - receipt, err := repository.FindReceipt("DOES NOT EXIST") + receipt, err := repository.GetReceipt("DOES NOT EXIST") Expect(err).To(HaveOccurred()) Expect(receipt).To(BeZero()) }) @@ -76,7 +76,7 @@ var _ = Describe("Logs Repository", func() { } blockRepository.CreateOrUpdateBlock(block) - _, err := repository.FindReceipt(receipt.TxHash) + _, err := repository.GetReceipt(receipt.TxHash) Expect(err).To(Not(HaveOccurred())) }) diff --git a/pkg/repositories/postgres/watched_events.go b/pkg/repositories/postgres/watched_events.go index d7cd16a6..29df2faf 100644 --- a/pkg/repositories/postgres/watched_events.go +++ b/pkg/repositories/postgres/watched_events.go @@ -1,32 +1,19 @@ package postgres -type WatchedEventLog struct { - Name string `json:"name"` // name - BlockNumber int64 `json:"block_number" db:"block_number"` // block_number - Address string `json:"address"` // address - TxHash string `json:"tx_hash" db:"tx_hash"` // tx_hash - Index int64 `json:"index"` // index - Topic0 string `json:"topic0"` // topic0 - Topic1 string `json:"topic1"` // topic1 - Topic2 string `json:"topic2"` // topic2 - Topic3 string `json:"topic3"` // topic3 - Data string `json:"data"` // data -} +import ( + "github.com/vulcanize/vulcanizedb/pkg/core" +) -type WatchedEventLogs interface { - AllWatchedEventLogs() ([]*WatchedEventLog, error) -} - -func (db *DB) AllWatchedEventLogs() ([]*WatchedEventLog, error) { - rows, err := db.DB.Queryx(`SELECT name, block_number, address, tx_hash, index, topic0, topic1, topic2, topic3, data FROM watched_event_logs`) +func (db DB) GetWatchedEvents(name string) ([]*core.WatchedEvent, error) { + rows, err := db.DB.Queryx(`SELECT name, block_number, address, tx_hash, index, topic0, topic1, topic2, topic3, data FROM watched_event_logs where name=$1`, name) if err != nil { return nil, err } defer rows.Close() - lgs := make([]*WatchedEventLog, 0) + lgs := make([]*core.WatchedEvent, 0) for rows.Next() { - lg := new(WatchedEventLog) + lg := new(core.WatchedEvent) err := rows.StructScan(lg) if err != nil { return nil, err diff --git a/pkg/repositories/postgres/watched_events_test.go b/pkg/repositories/postgres/watched_events_test.go index c9bd64f6..3399e79d 100644 --- a/pkg/repositories/postgres/watched_events_test.go +++ b/pkg/repositories/postgres/watched_events_test.go @@ -27,7 +27,7 @@ var _ = Describe("Watched Events Repository", func() { postgres.ClearData(repository) }) - It("retrieves watched logs that match the event filter", func() { + It("retrieves watched event logs that match the event filter", func() { filter := filters.LogFilter{ Name: "Filter1", FromBlock: 0, @@ -45,7 +45,7 @@ var _ = Describe("Watched Events Repository", func() { Data: "", }, } - expectedWatchedEventLog := []*postgres.WatchedEventLog{ + expectedWatchedEventLog := []*core.WatchedEvent{ { Name: "Filter1", BlockNumber: 0, @@ -57,11 +57,57 @@ var _ = Describe("Watched Events Repository", func() { Data: "", }, } - err := repository.AddFilter(filter) + err := repository.CreateFilter(filter) Expect(err).ToNot(HaveOccurred()) err = repository.CreateLogs(logs) Expect(err).ToNot(HaveOccurred()) - matchingLogs, err := repository.AllWatchedEventLogs() + matchingLogs, err := repository.GetWatchedEvents("Filter1") + Expect(err).ToNot(HaveOccurred()) + Expect(matchingLogs).To(Equal(expectedWatchedEventLog)) + + }) + + It("retrieves a watched event log by name", func() { + filter := filters.LogFilter{ + Name: "Filter1", + FromBlock: 0, + ToBlock: 10, + Address: "0x123", + Topics: core.Topics{0: "event1=10", 2: "event3=hello"}, + } + logs := []core.Log{ + { + BlockNumber: 0, + TxHash: "0x1", + Address: "0x123", + Topics: core.Topics{0: "event1=10", 2: "event3=hello"}, + Index: 0, + Data: "", + }, + { + BlockNumber: 100, + TxHash: "", + Address: "", + Topics: core.Topics{}, + Index: 0, + Data: "", + }, + } + expectedWatchedEventLog := []*core.WatchedEvent{{ + Name: "Filter1", + BlockNumber: 0, + TxHash: "0x1", + Address: "0x123", + Topic0: "event1=10", + Topic2: "event3=hello", + Index: 0, + Data: "", + }} + err := repository.CreateFilter(filter) + Expect(err).ToNot(HaveOccurred()) + err = repository.CreateLogs(logs) + Expect(err).ToNot(HaveOccurred()) + matchingLogs, err := repository.GetWatchedEvents("Filter1") Expect(err).ToNot(HaveOccurred()) Expect(matchingLogs).To(Equal(expectedWatchedEventLog)) diff --git a/pkg/repositories/repository.go b/pkg/repositories/repository.go index a8915d32..fd740b97 100644 --- a/pkg/repositories/repository.go +++ b/pkg/repositories/repository.go @@ -14,6 +14,7 @@ type Repository interface { LogsRepository ReceiptRepository FilterRepository + WatchedEventsRepository } var ErrBlockDoesNotExist = func(blockNumber int64) error { @@ -22,7 +23,7 @@ var ErrBlockDoesNotExist = func(blockNumber int64) error { type BlockRepository interface { CreateOrUpdateBlock(block core.Block) error - FindBlockByNumber(blockNumber int64) (core.Block, error) + GetBlock(blockNumber int64) (core.Block, error) MissingBlockNumbers(startingBlockNumber int64, endingBlockNumber int64) []int64 SetBlocksStatus(chainHead int64) } @@ -33,17 +34,22 @@ var ErrContractDoesNotExist = func(contractHash string) error { type ContractRepository interface { CreateContract(contract core.Contract) error + GetContract(contractHash string) (core.Contract, error) ContractExists(contractHash string) bool - FindContract(contractHash string) (core.Contract, error) +} + +var ErrFilterDoesNotExist = func(name string) error { + return errors.New(fmt.Sprintf("filter %s does not exist", name)) } type FilterRepository interface { - AddFilter(filter filters.LogFilter) error + CreateFilter(filter filters.LogFilter) error + GetFilter(name string) (filters.LogFilter, error) } type LogsRepository interface { - FindLogs(address string, blockNumber int64) []core.Log CreateLogs(logs []core.Log) error + GetLogs(address string, blockNumber int64) []core.Log } var ErrReceiptDoesNotExist = func(txHash string) error { @@ -51,5 +57,9 @@ var ErrReceiptDoesNotExist = func(txHash string) error { } type ReceiptRepository interface { - FindReceipt(txHash string) (core.Receipt, error) + GetReceipt(txHash string) (core.Receipt, error) +} + +type WatchedEventsRepository interface { + GetWatchedEvents(name string) ([]*core.WatchedEvent, error) } diff --git a/vendor/github.com/neelance/graphql-go/.gitignore b/vendor/github.com/neelance/graphql-go/.gitignore new file mode 100644 index 00000000..32b9e0f1 --- /dev/null +++ b/vendor/github.com/neelance/graphql-go/.gitignore @@ -0,0 +1 @@ +/internal/tests/testdata/graphql-js diff --git a/vendor/github.com/neelance/graphql-go/LICENSE b/vendor/github.com/neelance/graphql-go/LICENSE new file mode 100644 index 00000000..3907ceca --- /dev/null +++ b/vendor/github.com/neelance/graphql-go/LICENSE @@ -0,0 +1,24 @@ +Copyright (c) 2016 Richard Musiol. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/neelance/graphql-go/README.md b/vendor/github.com/neelance/graphql-go/README.md new file mode 100644 index 00000000..eead9b08 --- /dev/null +++ b/vendor/github.com/neelance/graphql-go/README.md @@ -0,0 +1,57 @@ +# graphql-go + +[![Sourcegraph](https://sourcegraph.com/github.com/neelance/graphql-go/-/badge.svg)](https://sourcegraph.com/github.com/neelance/graphql-go?badge) +[![Build Status](https://semaphoreci.com/api/v1/neelance/graphql-go/branches/master/badge.svg)](https://semaphoreci.com/neelance/graphql-go) +[![GoDoc](https://godoc.org/github.com/neelance/graphql-go?status.svg)](https://godoc.org/github.com/neelance/graphql-go) + +## Status + +The project is under heavy development. It is stable enough so we use it in production at [Sourcegraph](https://sourcegraph.com), but expect changes. + +## Goals + +* [ ] full support of [GraphQL spec (October 2016)](https://facebook.github.io/graphql/) + * [ ] propagation of `null` on resolver errors + * [x] everything else +* [x] minimal API +* [x] support for context.Context and OpenTracing +* [x] early error detection at application startup by type-checking if the given resolver matches the schema +* [x] resolvers are purely based on method sets (e.g. it's up to you if you want to resolve a GraphQL interface with a Go interface or a Go struct) +* [ ] nice error messages (no internal panics, even with an invalid schema or resolver; please file a bug if you see an internal panic) + * [x] nice errors on resolver validation + * [ ] nice errors on all invalid schemas + * [ ] nice errors on all invalid queries +* [x] panic handling (a panic in a resolver should not take down the whole app) +* [x] parallel execution of resolvers + +## (Some) Documentation + +### Resolvers + +A resolver must have one method for each field of the GraphQL type it resolves. The method name has to be [exported](https://golang.org/ref/spec#Exported_identifiers) and match the field's name in a non-case-sensitive way. + +The method has up to two arguments: + +- Optional `context.Context` argument. +- Mandatory `*struct { ... }` argument if the corresponding GraphQL field has arguments. The names of the struct fields have to be [exported](https://golang.org/ref/spec#Exported_identifiers) and have to match the names of the GraphQL arguments in a non-case-sensitive way. + +The method has up to two results: + +- The GraphQL field's value as determined by the resolver. +- Optional `error` result. + +Example for a simple resolver method: + +```go +func (r *helloWorldResolver) Hello() string { + return "Hello world!" +} +``` + +The following signature is also allowed: + +```go +func (r *helloWorldResolver) Hello(ctx context.Context) (string, error) { + return "Hello world!", nil +} +``` diff --git a/vendor/github.com/neelance/graphql-go/errors/errors.go b/vendor/github.com/neelance/graphql-go/errors/errors.go new file mode 100644 index 00000000..fdfa6202 --- /dev/null +++ b/vendor/github.com/neelance/graphql-go/errors/errors.go @@ -0,0 +1,41 @@ +package errors + +import ( + "fmt" +) + +type QueryError struct { + Message string `json:"message"` + Locations []Location `json:"locations,omitempty"` + Path []interface{} `json:"path,omitempty"` + Rule string `json:"-"` + ResolverError error `json:"-"` +} + +type Location struct { + Line int `json:"line"` + Column int `json:"column"` +} + +func (a Location) Before(b Location) bool { + return a.Line < b.Line || (a.Line == b.Line && a.Column < b.Column) +} + +func Errorf(format string, a ...interface{}) *QueryError { + return &QueryError{ + Message: fmt.Sprintf(format, a...), + } +} + +func (err *QueryError) Error() string { + if err == nil { + return "" + } + str := fmt.Sprintf("graphql: %s", err.Message) + for _, loc := range err.Locations { + str += fmt.Sprintf(" (line %d, column %d)", loc.Line, loc.Column) + } + return str +} + +var _ error = &QueryError{} diff --git a/vendor/github.com/neelance/graphql-go/example/starwars/server/server.go b/vendor/github.com/neelance/graphql-go/example/starwars/server/server.go new file mode 100644 index 00000000..ce1a30fe --- /dev/null +++ b/vendor/github.com/neelance/graphql-go/example/starwars/server/server.go @@ -0,0 +1,64 @@ +package main + +import ( + "log" + "net/http" + + "github.com/neelance/graphql-go" + "github.com/neelance/graphql-go/example/starwars" + "github.com/neelance/graphql-go/relay" +) + +var schema *graphql.Schema + +func init() { + schema = graphql.MustParseSchema(starwars.Schema, &starwars.Resolver{}) +} + +func main() { + http.Handle("/", http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.Write(page) + })) + + http.Handle("/query", &relay.Handler{Schema: schema}) + + log.Fatal(http.ListenAndServe(":8080", nil)) +} + +var page = []byte(` + + + + + + + + + + +
Loading...
+ + + +`) diff --git a/vendor/github.com/neelance/graphql-go/example/starwars/starwars.go b/vendor/github.com/neelance/graphql-go/example/starwars/starwars.go new file mode 100644 index 00000000..0c559373 --- /dev/null +++ b/vendor/github.com/neelance/graphql-go/example/starwars/starwars.go @@ -0,0 +1,647 @@ +// Package starwars provides a example schema and resolver based on Star Wars characters. +// +// Source: https://github.com/graphql/graphql.github.io/blob/source/site/_core/swapiSchema.js +package starwars + +import ( + "encoding/base64" + "fmt" + "strconv" + "strings" + + graphql "github.com/neelance/graphql-go" +) + +var Schema = ` + schema { + query: Query + mutation: Mutation + } + # The query type, represents all of the entry points into our object graph + type Query { + hero(episode: Episode = NEWHOPE): Character + reviews(episode: Episode!): [Review]! + search(text: String!): [SearchResult]! + character(id: ID!): Character + droid(id: ID!): Droid + human(id: ID!): Human + starship(id: ID!): Starship + } + # The mutation type, represents all updates we can make to our data + type Mutation { + createReview(episode: Episode!, review: ReviewInput!): Review + } + # The episodes in the Star Wars trilogy + enum Episode { + # Star Wars Episode IV: A New Hope, released in 1977. + NEWHOPE + # Star Wars Episode V: The Empire Strikes Back, released in 1980. + EMPIRE + # Star Wars Episode VI: Return of the Jedi, released in 1983. + JEDI + } + # A character from the Star Wars universe + interface Character { + # The ID of the character + id: ID! + # The name of the character + name: String! + # The friends of the character, or an empty list if they have none + friends: [Character] + # The friends of the character exposed as a connection with edges + friendsConnection(first: Int, after: ID): FriendsConnection! + # The movies this character appears in + appearsIn: [Episode!]! + } + # Units of height + enum LengthUnit { + # The standard unit around the world + METER + # Primarily used in the United States + FOOT + } + # A humanoid creature from the Star Wars universe + type Human implements Character { + # The ID of the human + id: ID! + # What this human calls themselves + name: String! + # Height in the preferred unit, default is meters + height(unit: LengthUnit = METER): Float! + # Mass in kilograms, or null if unknown + mass: Float + # This human's friends, or an empty list if they have none + friends: [Character] + # The friends of the human exposed as a connection with edges + friendsConnection(first: Int, after: ID): FriendsConnection! + # The movies this human appears in + appearsIn: [Episode!]! + # A list of starships this person has piloted, or an empty list if none + starships: [Starship] + } + # An autonomous mechanical character in the Star Wars universe + type Droid implements Character { + # The ID of the droid + id: ID! + # What others call this droid + name: String! + # This droid's friends, or an empty list if they have none + friends: [Character] + # The friends of the droid exposed as a connection with edges + friendsConnection(first: Int, after: ID): FriendsConnection! + # The movies this droid appears in + appearsIn: [Episode!]! + # This droid's primary function + primaryFunction: String + } + # A connection object for a character's friends + type FriendsConnection { + # The total number of friends + totalCount: Int! + # The edges for each of the character's friends. + edges: [FriendsEdge] + # A list of the friends, as a convenience when edges are not needed. + friends: [Character] + # Information for paginating this connection + pageInfo: PageInfo! + } + # An edge object for a character's friends + type FriendsEdge { + # A cursor used for pagination + cursor: ID! + # The character represented by this friendship edge + node: Character + } + # Information for paginating this connection + type PageInfo { + startCursor: ID + endCursor: ID + hasNextPage: Boolean! + } + # Represents a review for a movie + type Review { + # The number of stars this review gave, 1-5 + stars: Int! + # Comment about the movie + commentary: String + } + # The input object sent when someone is creating a new review + input ReviewInput { + # 0-5 stars + stars: Int! + # Comment about the movie, optional + commentary: String + } + type Starship { + # The ID of the starship + id: ID! + # The name of the starship + name: String! + # Length of the starship, along the longest axis + length(unit: LengthUnit = METER): Float! + } + union SearchResult = Human | Droid | Starship +` + +type human struct { + ID graphql.ID + Name string + Friends []graphql.ID + AppearsIn []string + Height float64 + Mass int + Starships []graphql.ID +} + +var humans = []*human{ + { + ID: "1000", + Name: "Luke Skywalker", + Friends: []graphql.ID{"1002", "1003", "2000", "2001"}, + AppearsIn: []string{"NEWHOPE", "EMPIRE", "JEDI"}, + Height: 1.72, + Mass: 77, + Starships: []graphql.ID{"3001", "3003"}, + }, + { + ID: "1001", + Name: "Darth Vader", + Friends: []graphql.ID{"1004"}, + AppearsIn: []string{"NEWHOPE", "EMPIRE", "JEDI"}, + Height: 2.02, + Mass: 136, + Starships: []graphql.ID{"3002"}, + }, + { + ID: "1002", + Name: "Han Solo", + Friends: []graphql.ID{"1000", "1003", "2001"}, + AppearsIn: []string{"NEWHOPE", "EMPIRE", "JEDI"}, + Height: 1.8, + Mass: 80, + Starships: []graphql.ID{"3000", "3003"}, + }, + { + ID: "1003", + Name: "Leia Organa", + Friends: []graphql.ID{"1000", "1002", "2000", "2001"}, + AppearsIn: []string{"NEWHOPE", "EMPIRE", "JEDI"}, + Height: 1.5, + Mass: 49, + }, + { + ID: "1004", + Name: "Wilhuff Tarkin", + Friends: []graphql.ID{"1001"}, + AppearsIn: []string{"NEWHOPE"}, + Height: 1.8, + Mass: 0, + }, +} + +var humanData = make(map[graphql.ID]*human) + +func init() { + for _, h := range humans { + humanData[h.ID] = h + } +} + +type droid struct { + ID graphql.ID + Name string + Friends []graphql.ID + AppearsIn []string + PrimaryFunction string +} + +var droids = []*droid{ + { + ID: "2000", + Name: "C-3PO", + Friends: []graphql.ID{"1000", "1002", "1003", "2001"}, + AppearsIn: []string{"NEWHOPE", "EMPIRE", "JEDI"}, + PrimaryFunction: "Protocol", + }, + { + ID: "2001", + Name: "R2-D2", + Friends: []graphql.ID{"1000", "1002", "1003"}, + AppearsIn: []string{"NEWHOPE", "EMPIRE", "JEDI"}, + PrimaryFunction: "Astromech", + }, +} + +var droidData = make(map[graphql.ID]*droid) + +func init() { + for _, d := range droids { + droidData[d.ID] = d + } +} + +type starship struct { + ID graphql.ID + Name string + Length float64 +} + +var starships = []*starship{ + { + ID: "3000", + Name: "Millennium Falcon", + Length: 34.37, + }, + { + ID: "3001", + Name: "X-Wing", + Length: 12.5, + }, + { + ID: "3002", + Name: "TIE Advanced x1", + Length: 9.2, + }, + { + ID: "3003", + Name: "Imperial shuttle", + Length: 20, + }, +} + +var starshipData = make(map[graphql.ID]*starship) + +func init() { + for _, s := range starships { + starshipData[s.ID] = s + } +} + +type review struct { + stars int32 + commentary *string +} + +var reviews = make(map[string][]*review) + +type Resolver struct{} + +func (r *Resolver) Hero(args struct{ Episode string }) *characterResolver { + if args.Episode == "EMPIRE" { + return &characterResolver{&humanResolver{humanData["1000"]}} + } + return &characterResolver{&droidResolver{droidData["2001"]}} +} + +func (r *Resolver) Reviews(args struct{ Episode string }) []*reviewResolver { + var l []*reviewResolver + for _, review := range reviews[args.Episode] { + l = append(l, &reviewResolver{review}) + } + return l +} + +func (r *Resolver) Search(args struct{ Text string }) []*searchResultResolver { + var l []*searchResultResolver + for _, h := range humans { + if strings.Contains(h.Name, args.Text) { + l = append(l, &searchResultResolver{&humanResolver{h}}) + } + } + for _, d := range droids { + if strings.Contains(d.Name, args.Text) { + l = append(l, &searchResultResolver{&droidResolver{d}}) + } + } + for _, s := range starships { + if strings.Contains(s.Name, args.Text) { + l = append(l, &searchResultResolver{&starshipResolver{s}}) + } + } + return l +} + +func (r *Resolver) Character(args struct{ ID graphql.ID }) *characterResolver { + if h := humanData[args.ID]; h != nil { + return &characterResolver{&humanResolver{h}} + } + if d := droidData[args.ID]; d != nil { + return &characterResolver{&droidResolver{d}} + } + return nil +} + +func (r *Resolver) Human(args struct{ ID graphql.ID }) *humanResolver { + if h := humanData[args.ID]; h != nil { + return &humanResolver{h} + } + return nil +} + +func (r *Resolver) Droid(args struct{ ID graphql.ID }) *droidResolver { + if d := droidData[args.ID]; d != nil { + return &droidResolver{d} + } + return nil +} + +func (r *Resolver) Starship(args struct{ ID graphql.ID }) *starshipResolver { + if s := starshipData[args.ID]; s != nil { + return &starshipResolver{s} + } + return nil +} + +func (r *Resolver) CreateReview(args *struct { + Episode string + Review *reviewInput +}) *reviewResolver { + review := &review{ + stars: args.Review.Stars, + commentary: args.Review.Commentary, + } + reviews[args.Episode] = append(reviews[args.Episode], review) + return &reviewResolver{review} +} + +type friendsConnectionArgs struct { + First *int32 + After *graphql.ID +} + +type character interface { + ID() graphql.ID + Name() string + Friends() *[]*characterResolver + FriendsConnection(friendsConnectionArgs) (*friendsConnectionResolver, error) + AppearsIn() []string +} + +type characterResolver struct { + character +} + +func (r *characterResolver) ToHuman() (*humanResolver, bool) { + c, ok := r.character.(*humanResolver) + return c, ok +} + +func (r *characterResolver) ToDroid() (*droidResolver, bool) { + c, ok := r.character.(*droidResolver) + return c, ok +} + +type humanResolver struct { + h *human +} + +func (r *humanResolver) ID() graphql.ID { + return r.h.ID +} + +func (r *humanResolver) Name() string { + return r.h.Name +} + +func (r *humanResolver) Height(args struct{ Unit string }) float64 { + return convertLength(r.h.Height, args.Unit) +} + +func (r *humanResolver) Mass() *float64 { + if r.h.Mass == 0 { + return nil + } + f := float64(r.h.Mass) + return &f +} + +func (r *humanResolver) Friends() *[]*characterResolver { + return resolveCharacters(r.h.Friends) +} + +func (r *humanResolver) FriendsConnection(args friendsConnectionArgs) (*friendsConnectionResolver, error) { + return newFriendsConnectionResolver(r.h.Friends, args) +} + +func (r *humanResolver) AppearsIn() []string { + return r.h.AppearsIn +} + +func (r *humanResolver) Starships() *[]*starshipResolver { + l := make([]*starshipResolver, len(r.h.Starships)) + for i, id := range r.h.Starships { + l[i] = &starshipResolver{starshipData[id]} + } + return &l +} + +type droidResolver struct { + d *droid +} + +func (r *droidResolver) ID() graphql.ID { + return r.d.ID +} + +func (r *droidResolver) Name() string { + return r.d.Name +} + +func (r *droidResolver) Friends() *[]*characterResolver { + return resolveCharacters(r.d.Friends) +} + +func (r *droidResolver) FriendsConnection(args friendsConnectionArgs) (*friendsConnectionResolver, error) { + return newFriendsConnectionResolver(r.d.Friends, args) +} + +func (r *droidResolver) AppearsIn() []string { + return r.d.AppearsIn +} + +func (r *droidResolver) PrimaryFunction() *string { + if r.d.PrimaryFunction == "" { + return nil + } + return &r.d.PrimaryFunction +} + +type starshipResolver struct { + s *starship +} + +func (r *starshipResolver) ID() graphql.ID { + return r.s.ID +} + +func (r *starshipResolver) Name() string { + return r.s.Name +} + +func (r *starshipResolver) Length(args struct{ Unit string }) float64 { + return convertLength(r.s.Length, args.Unit) +} + +type searchResultResolver struct { + result interface{} +} + +func (r *searchResultResolver) ToHuman() (*humanResolver, bool) { + res, ok := r.result.(*humanResolver) + return res, ok +} + +func (r *searchResultResolver) ToDroid() (*droidResolver, bool) { + res, ok := r.result.(*droidResolver) + return res, ok +} + +func (r *searchResultResolver) ToStarship() (*starshipResolver, bool) { + res, ok := r.result.(*starshipResolver) + return res, ok +} + +func convertLength(meters float64, unit string) float64 { + switch unit { + case "METER": + return meters + case "FOOT": + return meters * 3.28084 + default: + panic("invalid unit") + } +} + +func resolveCharacters(ids []graphql.ID) *[]*characterResolver { + var characters []*characterResolver + for _, id := range ids { + if c := resolveCharacter(id); c != nil { + characters = append(characters, c) + } + } + return &characters +} + +func resolveCharacter(id graphql.ID) *characterResolver { + if h, ok := humanData[id]; ok { + return &characterResolver{&humanResolver{h}} + } + if d, ok := droidData[id]; ok { + return &characterResolver{&droidResolver{d}} + } + return nil +} + +type reviewResolver struct { + r *review +} + +func (r *reviewResolver) Stars() int32 { + return r.r.stars +} + +func (r *reviewResolver) Commentary() *string { + return r.r.commentary +} + +type friendsConnectionResolver struct { + ids []graphql.ID + from int + to int +} + +func newFriendsConnectionResolver(ids []graphql.ID, args friendsConnectionArgs) (*friendsConnectionResolver, error) { + from := 0 + if args.After != nil { + b, err := base64.StdEncoding.DecodeString(string(*args.After)) + if err != nil { + return nil, err + } + i, err := strconv.Atoi(strings.TrimPrefix(string(b), "cursor")) + if err != nil { + return nil, err + } + from = i + } + + to := len(ids) + if args.First != nil { + to = from + int(*args.First) + if to > len(ids) { + to = len(ids) + } + } + + return &friendsConnectionResolver{ + ids: ids, + from: from, + to: to, + }, nil +} + +func (r *friendsConnectionResolver) TotalCount() int32 { + return int32(len(r.ids)) +} + +func (r *friendsConnectionResolver) Edges() *[]*friendsEdgeResolver { + l := make([]*friendsEdgeResolver, r.to-r.from) + for i := range l { + l[i] = &friendsEdgeResolver{ + cursor: encodeCursor(r.from + i), + id: r.ids[r.from+i], + } + } + return &l +} + +func (r *friendsConnectionResolver) Friends() *[]*characterResolver { + return resolveCharacters(r.ids[r.from:r.to]) +} + +func (r *friendsConnectionResolver) PageInfo() *pageInfoResolver { + return &pageInfoResolver{ + startCursor: encodeCursor(r.from), + endCursor: encodeCursor(r.to - 1), + hasNextPage: r.to < len(r.ids), + } +} + +func encodeCursor(i int) graphql.ID { + return graphql.ID(base64.StdEncoding.EncodeToString([]byte(fmt.Sprintf("cursor%d", i+1)))) +} + +type friendsEdgeResolver struct { + cursor graphql.ID + id graphql.ID +} + +func (r *friendsEdgeResolver) Cursor() graphql.ID { + return r.cursor +} + +func (r *friendsEdgeResolver) Node() *characterResolver { + return resolveCharacter(r.id) +} + +type pageInfoResolver struct { + startCursor graphql.ID + endCursor graphql.ID + hasNextPage bool +} + +func (r *pageInfoResolver) StartCursor() *graphql.ID { + return &r.startCursor +} + +func (r *pageInfoResolver) EndCursor() *graphql.ID { + return &r.endCursor +} + +func (r *pageInfoResolver) HasNextPage() bool { + return r.hasNextPage +} + +type reviewInput struct { + Stars int32 + Commentary *string +} diff --git a/vendor/github.com/neelance/graphql-go/gqltesting/testing.go b/vendor/github.com/neelance/graphql-go/gqltesting/testing.go new file mode 100644 index 00000000..56f90e4c --- /dev/null +++ b/vendor/github.com/neelance/graphql-go/gqltesting/testing.go @@ -0,0 +1,67 @@ +package gqltesting + +import ( + "bytes" + "context" + "encoding/json" + "strconv" + "testing" + + graphql "github.com/neelance/graphql-go" +) + +// Test is a GraphQL test case to be used with RunTest(s). +type Test struct { + Context context.Context + Schema *graphql.Schema + Query string + OperationName string + Variables map[string]interface{} + ExpectedResult string +} + +// RunTests runs the given GraphQL test cases as subtests. +func RunTests(t *testing.T, tests []*Test) { + if len(tests) == 1 { + RunTest(t, tests[0]) + return + } + + for i, test := range tests { + t.Run(strconv.Itoa(i+1), func(t *testing.T) { + RunTest(t, test) + }) + } +} + +// RunTest runs a single GraphQL test case. +func RunTest(t *testing.T, test *Test) { + if test.Context == nil { + test.Context = context.Background() + } + result := test.Schema.Exec(test.Context, test.Query, test.OperationName, test.Variables) + if len(result.Errors) != 0 { + t.Fatal(result.Errors[0]) + } + got := formatJSON(t, result.Data) + + want := formatJSON(t, []byte(test.ExpectedResult)) + + if !bytes.Equal(got, want) { + t.Logf("got: %s", got) + t.Logf("want: %s", want) + t.Fail() + } +} + +func formatJSON(t *testing.T, data []byte) []byte { + var v interface{} + if err := json.Unmarshal(data, &v); err != nil { + t.Fatalf("invalid JSON: %s", err) + } + formatted, err := json.Marshal(v) + if err != nil { + t.Fatal(err) + } + return formatted +} diff --git a/vendor/github.com/neelance/graphql-go/graphql.go b/vendor/github.com/neelance/graphql-go/graphql.go new file mode 100644 index 00000000..f63242fa --- /dev/null +++ b/vendor/github.com/neelance/graphql-go/graphql.go @@ -0,0 +1,185 @@ +package graphql + +import ( + "context" + "fmt" + + "encoding/json" + + "github.com/neelance/graphql-go/errors" + "github.com/neelance/graphql-go/internal/common" + "github.com/neelance/graphql-go/internal/exec" + "github.com/neelance/graphql-go/internal/exec/resolvable" + "github.com/neelance/graphql-go/internal/exec/selected" + "github.com/neelance/graphql-go/internal/query" + "github.com/neelance/graphql-go/internal/schema" + "github.com/neelance/graphql-go/internal/validation" + "github.com/neelance/graphql-go/introspection" + "github.com/neelance/graphql-go/log" + "github.com/neelance/graphql-go/trace" +) + +// ParseSchema parses a GraphQL schema and attaches the given root resolver. It returns an error if +// the Go type signature of the resolvers does not match the schema. If nil is passed as the +// resolver, then the schema can not be executed, but it may be inspected (e.g. with ToJSON). +func ParseSchema(schemaString string, resolver interface{}, opts ...SchemaOpt) (*Schema, error) { + s := &Schema{ + schema: schema.New(), + maxParallelism: 10, + tracer: trace.OpenTracingTracer{}, + logger: &log.DefaultLogger{}, + } + for _, opt := range opts { + opt(s) + } + + if err := s.schema.Parse(schemaString); err != nil { + return nil, err + } + + if resolver != nil { + r, err := resolvable.ApplyResolver(s.schema, resolver) + if err != nil { + return nil, err + } + s.res = r + } + + return s, nil +} + +// MustParseSchema calls ParseSchema and panics on error. +func MustParseSchema(schemaString string, resolver interface{}, opts ...SchemaOpt) *Schema { + s, err := ParseSchema(schemaString, resolver, opts...) + if err != nil { + panic(err) + } + return s +} + +// Schema represents a GraphQL schema with an optional resolver. +type Schema struct { + schema *schema.Schema + res *resolvable.Schema + + maxParallelism int + tracer trace.Tracer + logger log.Logger +} + +// SchemaOpt is an option to pass to ParseSchema or MustParseSchema. +type SchemaOpt func(*Schema) + +// MaxParallelism specifies the maximum number of resolvers per request allowed to run in parallel. The default is 10. +func MaxParallelism(n int) SchemaOpt { + return func(s *Schema) { + s.maxParallelism = n + } +} + +// Tracer is used to trace queries and fields. It defaults to trace.OpenTracingTracer. +func Tracer(tracer trace.Tracer) SchemaOpt { + return func(s *Schema) { + s.tracer = tracer + } +} + +// Logger is used to log panics durring query execution. It defaults to exec.DefaultLogger. +func Logger(logger log.Logger) SchemaOpt { + return func(s *Schema) { + s.logger = logger + } +} + +// Response represents a typical response of a GraphQL server. It may be encoded to JSON directly or +// it may be further processed to a custom response type, for example to include custom error data. +type Response struct { + Data json.RawMessage `json:"data,omitempty"` + Errors []*errors.QueryError `json:"errors,omitempty"` + Extensions map[string]interface{} `json:"extensions,omitempty"` +} + +// Validate validates the given query with the schema. +func (s *Schema) Validate(queryString string) []*errors.QueryError { + doc, qErr := query.Parse(queryString) + if qErr != nil { + return []*errors.QueryError{qErr} + } + + return validation.Validate(s.schema, doc) +} + +// Exec executes the given query with the schema's resolver. It panics if the schema was created +// without a resolver. If the context get cancelled, no further resolvers will be called and a +// the context error will be returned as soon as possible (not immediately). +func (s *Schema) Exec(ctx context.Context, queryString string, operationName string, variables map[string]interface{}) *Response { + if s.res == nil { + panic("schema created without resolver, can not exec") + } + return s.exec(ctx, queryString, operationName, variables, s.res) +} + +func (s *Schema) exec(ctx context.Context, queryString string, operationName string, variables map[string]interface{}, res *resolvable.Schema) *Response { + doc, qErr := query.Parse(queryString) + if qErr != nil { + return &Response{Errors: []*errors.QueryError{qErr}} + } + + errs := validation.Validate(s.schema, doc) + if len(errs) != 0 { + return &Response{Errors: errs} + } + + op, err := getOperation(doc, operationName) + if err != nil { + return &Response{Errors: []*errors.QueryError{errors.Errorf("%s", err)}} + } + + r := &exec.Request{ + Request: selected.Request{ + Doc: doc, + Vars: variables, + Schema: s.schema, + }, + Limiter: make(chan struct{}, s.maxParallelism), + Tracer: s.tracer, + Logger: s.logger, + } + varTypes := make(map[string]*introspection.Type) + for _, v := range op.Vars { + t, err := common.ResolveType(v.Type, s.schema.Resolve) + if err != nil { + return &Response{Errors: []*errors.QueryError{err}} + } + varTypes[v.Name.Name] = introspection.WrapType(t) + } + traceCtx, finish := s.tracer.TraceQuery(ctx, queryString, operationName, variables, varTypes) + data, errs := r.Execute(traceCtx, res, op) + finish(errs) + + return &Response{ + Data: data, + Errors: errs, + } +} + +func getOperation(document *query.Document, operationName string) (*query.Operation, error) { + if len(document.Operations) == 0 { + return nil, fmt.Errorf("no operations in query document") + } + + if operationName == "" { + if len(document.Operations) > 1 { + return nil, fmt.Errorf("more than one operation in query document and no operation name given") + } + for _, op := range document.Operations { + return op, nil // return the one and only operation + } + } + + op := document.Operations.Get(operationName) + if op == nil { + return nil, fmt.Errorf("no operation with name %q", operationName) + } + return op, nil +} diff --git a/vendor/github.com/neelance/graphql-go/graphql_test.go b/vendor/github.com/neelance/graphql-go/graphql_test.go new file mode 100644 index 00000000..8e581afb --- /dev/null +++ b/vendor/github.com/neelance/graphql-go/graphql_test.go @@ -0,0 +1,1755 @@ +package graphql_test + +import ( + "context" + "testing" + "time" + + "github.com/neelance/graphql-go" + "github.com/neelance/graphql-go/example/starwars" + "github.com/neelance/graphql-go/gqltesting" +) + +type helloWorldResolver1 struct{} + +func (r *helloWorldResolver1) Hello() string { + return "Hello world!" +} + +type helloWorldResolver2 struct{} + +func (r *helloWorldResolver2) Hello(ctx context.Context) (string, error) { + return "Hello world!", nil +} + +type helloSnakeResolver1 struct{} + +func (r *helloSnakeResolver1) HelloHTML() string { + return "Hello snake!" +} + +func (r *helloSnakeResolver1) SayHello(args struct{ FullName string }) string { + return "Hello " + args.FullName + "!" +} + +type helloSnakeResolver2 struct{} + +func (r *helloSnakeResolver2) HelloHTML(ctx context.Context) (string, error) { + return "Hello snake!", nil +} + +func (r *helloSnakeResolver2) SayHello(ctx context.Context, args struct{ FullName string }) (string, error) { + return "Hello " + args.FullName + "!", nil +} + +type theNumberResolver struct { + number int32 +} + +func (r *theNumberResolver) TheNumber() int32 { + return r.number +} + +func (r *theNumberResolver) ChangeTheNumber(args struct{ NewNumber int32 }) *theNumberResolver { + r.number = args.NewNumber + return r +} + +type timeResolver struct{} + +func (r *timeResolver) AddHour(args struct{ Time graphql.Time }) graphql.Time { + return graphql.Time{Time: args.Time.Add(time.Hour)} +} + +var starwarsSchema = graphql.MustParseSchema(starwars.Schema, &starwars.Resolver{}) + +func TestHelloWorld(t *testing.T) { + gqltesting.RunTests(t, []*gqltesting.Test{ + { + Schema: graphql.MustParseSchema(` + schema { + query: Query + } + + type Query { + hello: String! + } + `, &helloWorldResolver1{}), + Query: ` + { + hello + } + `, + ExpectedResult: ` + { + "hello": "Hello world!" + } + `, + }, + + { + Schema: graphql.MustParseSchema(` + schema { + query: Query + } + + type Query { + hello: String! + } + `, &helloWorldResolver2{}), + Query: ` + { + hello + } + `, + ExpectedResult: ` + { + "hello": "Hello world!" + } + `, + }, + }) +} + +func TestHelloSnake(t *testing.T) { + gqltesting.RunTests(t, []*gqltesting.Test{ + { + Schema: graphql.MustParseSchema(` + schema { + query: Query + } + + type Query { + hello_html: String! + } + `, &helloSnakeResolver1{}), + Query: ` + { + hello_html + } + `, + ExpectedResult: ` + { + "hello_html": "Hello snake!" + } + `, + }, + + { + Schema: graphql.MustParseSchema(` + schema { + query: Query + } + + type Query { + hello_html: String! + } + `, &helloSnakeResolver2{}), + Query: ` + { + hello_html + } + `, + ExpectedResult: ` + { + "hello_html": "Hello snake!" + } + `, + }, + }) +} + +func TestHelloSnakeArguments(t *testing.T) { + gqltesting.RunTests(t, []*gqltesting.Test{ + { + Schema: graphql.MustParseSchema(` + schema { + query: Query + } + + type Query { + say_hello(full_name: String!): String! + } + `, &helloSnakeResolver1{}), + Query: ` + { + say_hello(full_name: "Rob Pike") + } + `, + ExpectedResult: ` + { + "say_hello": "Hello Rob Pike!" + } + `, + }, + + { + Schema: graphql.MustParseSchema(` + schema { + query: Query + } + + type Query { + say_hello(full_name: String!): String! + } + `, &helloSnakeResolver2{}), + Query: ` + { + say_hello(full_name: "Rob Pike") + } + `, + ExpectedResult: ` + { + "say_hello": "Hello Rob Pike!" + } + `, + }, + }) +} + +func TestBasic(t *testing.T) { + gqltesting.RunTests(t, []*gqltesting.Test{ + { + Schema: starwarsSchema, + Query: ` + { + hero { + id + name + friends { + name + } + } + } + `, + ExpectedResult: ` + { + "hero": { + "id": "2001", + "name": "R2-D2", + "friends": [ + { + "name": "Luke Skywalker" + }, + { + "name": "Han Solo" + }, + { + "name": "Leia Organa" + } + ] + } + } + `, + }, + }) +} + +func TestArguments(t *testing.T) { + gqltesting.RunTests(t, []*gqltesting.Test{ + { + Schema: starwarsSchema, + Query: ` + { + human(id: "1000") { + name + height + } + } + `, + ExpectedResult: ` + { + "human": { + "name": "Luke Skywalker", + "height": 1.72 + } + } + `, + }, + + { + Schema: starwarsSchema, + Query: ` + { + human(id: "1000") { + name + height(unit: FOOT) + } + } + `, + ExpectedResult: ` + { + "human": { + "name": "Luke Skywalker", + "height": 5.6430448 + } + } + `, + }, + }) +} + +func TestAliases(t *testing.T) { + gqltesting.RunTests(t, []*gqltesting.Test{ + { + Schema: starwarsSchema, + Query: ` + { + empireHero: hero(episode: EMPIRE) { + name + } + jediHero: hero(episode: JEDI) { + name + } + } + `, + ExpectedResult: ` + { + "empireHero": { + "name": "Luke Skywalker" + }, + "jediHero": { + "name": "R2-D2" + } + } + `, + }, + }) +} + +func TestFragments(t *testing.T) { + gqltesting.RunTests(t, []*gqltesting.Test{ + { + Schema: starwarsSchema, + Query: ` + { + leftComparison: hero(episode: EMPIRE) { + ...comparisonFields + ...height + } + rightComparison: hero(episode: JEDI) { + ...comparisonFields + ...height + } + } + + fragment comparisonFields on Character { + name + appearsIn + friends { + name + } + } + + fragment height on Human { + height + } + `, + ExpectedResult: ` + { + "leftComparison": { + "name": "Luke Skywalker", + "appearsIn": [ + "NEWHOPE", + "EMPIRE", + "JEDI" + ], + "friends": [ + { + "name": "Han Solo" + }, + { + "name": "Leia Organa" + }, + { + "name": "C-3PO" + }, + { + "name": "R2-D2" + } + ], + "height": 1.72 + }, + "rightComparison": { + "name": "R2-D2", + "appearsIn": [ + "NEWHOPE", + "EMPIRE", + "JEDI" + ], + "friends": [ + { + "name": "Luke Skywalker" + }, + { + "name": "Han Solo" + }, + { + "name": "Leia Organa" + } + ] + } + } + `, + }, + }) +} + +func TestVariables(t *testing.T) { + gqltesting.RunTests(t, []*gqltesting.Test{ + { + Schema: starwarsSchema, + Query: ` + query HeroNameAndFriends($episode: Episode) { + hero(episode: $episode) { + name + } + } + `, + Variables: map[string]interface{}{ + "episode": "JEDI", + }, + ExpectedResult: ` + { + "hero": { + "name": "R2-D2" + } + } + `, + }, + + { + Schema: starwarsSchema, + Query: ` + query HeroNameAndFriends($episode: Episode) { + hero(episode: $episode) { + name + } + } + `, + Variables: map[string]interface{}{ + "episode": "EMPIRE", + }, + ExpectedResult: ` + { + "hero": { + "name": "Luke Skywalker" + } + } + `, + }, + }) +} + +func TestSkipDirective(t *testing.T) { + gqltesting.RunTests(t, []*gqltesting.Test{ + { + Schema: starwarsSchema, + Query: ` + query Hero($episode: Episode, $withoutFriends: Boolean!) { + hero(episode: $episode) { + name + friends @skip(if: $withoutFriends) { + name + } + } + } + `, + Variables: map[string]interface{}{ + "episode": "JEDI", + "withoutFriends": true, + }, + ExpectedResult: ` + { + "hero": { + "name": "R2-D2" + } + } + `, + }, + + { + Schema: starwarsSchema, + Query: ` + query Hero($episode: Episode, $withoutFriends: Boolean!) { + hero(episode: $episode) { + name + friends @skip(if: $withoutFriends) { + name + } + } + } + `, + Variables: map[string]interface{}{ + "episode": "JEDI", + "withoutFriends": false, + }, + ExpectedResult: ` + { + "hero": { + "name": "R2-D2", + "friends": [ + { + "name": "Luke Skywalker" + }, + { + "name": "Han Solo" + }, + { + "name": "Leia Organa" + } + ] + } + } + `, + }, + }) +} + +func TestIncludeDirective(t *testing.T) { + gqltesting.RunTests(t, []*gqltesting.Test{ + { + Schema: starwarsSchema, + Query: ` + query Hero($episode: Episode, $withFriends: Boolean!) { + hero(episode: $episode) { + name + ...friendsFragment @include(if: $withFriends) + } + } + + fragment friendsFragment on Character { + friends { + name + } + } + `, + Variables: map[string]interface{}{ + "episode": "JEDI", + "withFriends": false, + }, + ExpectedResult: ` + { + "hero": { + "name": "R2-D2" + } + } + `, + }, + + { + Schema: starwarsSchema, + Query: ` + query Hero($episode: Episode, $withFriends: Boolean!) { + hero(episode: $episode) { + name + ...friendsFragment @include(if: $withFriends) + } + } + + fragment friendsFragment on Character { + friends { + name + } + } + `, + Variables: map[string]interface{}{ + "episode": "JEDI", + "withFriends": true, + }, + ExpectedResult: ` + { + "hero": { + "name": "R2-D2", + "friends": [ + { + "name": "Luke Skywalker" + }, + { + "name": "Han Solo" + }, + { + "name": "Leia Organa" + } + ] + } + } + `, + }, + }) +} + +type testDeprecatedDirectiveResolver struct{} + +func (r *testDeprecatedDirectiveResolver) A() int32 { + return 0 +} + +func (r *testDeprecatedDirectiveResolver) B() int32 { + return 0 +} + +func (r *testDeprecatedDirectiveResolver) C() int32 { + return 0 +} + +func TestDeprecatedDirective(t *testing.T) { + gqltesting.RunTests(t, []*gqltesting.Test{ + { + Schema: graphql.MustParseSchema(` + schema { + query: Query + } + + type Query { + a: Int! + b: Int! @deprecated + c: Int! @deprecated(reason: "We don't like it") + } + `, &testDeprecatedDirectiveResolver{}), + Query: ` + { + __type(name: "Query") { + fields { + name + } + allFields: fields(includeDeprecated: true) { + name + isDeprecated + deprecationReason + } + } + } + `, + ExpectedResult: ` + { + "__type": { + "fields": [ + { "name": "a" } + ], + "allFields": [ + { "name": "a", "isDeprecated": false, "deprecationReason": null }, + { "name": "b", "isDeprecated": true, "deprecationReason": "No longer supported" }, + { "name": "c", "isDeprecated": true, "deprecationReason": "We don't like it" } + ] + } + } + `, + }, + { + Schema: graphql.MustParseSchema(` + schema { + query: Query + } + + type Query { + } + + enum Test { + A + B @deprecated + C @deprecated(reason: "We don't like it") + } + `, &testDeprecatedDirectiveResolver{}), + Query: ` + { + __type(name: "Test") { + enumValues { + name + } + allEnumValues: enumValues(includeDeprecated: true) { + name + isDeprecated + deprecationReason + } + } + } + `, + ExpectedResult: ` + { + "__type": { + "enumValues": [ + { "name": "A" } + ], + "allEnumValues": [ + { "name": "A", "isDeprecated": false, "deprecationReason": null }, + { "name": "B", "isDeprecated": true, "deprecationReason": "No longer supported" }, + { "name": "C", "isDeprecated": true, "deprecationReason": "We don't like it" } + ] + } + } + `, + }, + }) +} + +func TestInlineFragments(t *testing.T) { + gqltesting.RunTests(t, []*gqltesting.Test{ + { + Schema: starwarsSchema, + Query: ` + query HeroForEpisode($episode: Episode!) { + hero(episode: $episode) { + name + ... on Droid { + primaryFunction + } + ... on Human { + height + } + } + } + `, + Variables: map[string]interface{}{ + "episode": "JEDI", + }, + ExpectedResult: ` + { + "hero": { + "name": "R2-D2", + "primaryFunction": "Astromech" + } + } + `, + }, + + { + Schema: starwarsSchema, + Query: ` + query HeroForEpisode($episode: Episode!) { + hero(episode: $episode) { + name + ... on Droid { + primaryFunction + } + ... on Human { + height + } + } + } + `, + Variables: map[string]interface{}{ + "episode": "EMPIRE", + }, + ExpectedResult: ` + { + "hero": { + "name": "Luke Skywalker", + "height": 1.72 + } + } + `, + }, + }) +} + +func TestTypeName(t *testing.T) { + gqltesting.RunTests(t, []*gqltesting.Test{ + { + Schema: starwarsSchema, + Query: ` + { + search(text: "an") { + __typename + ... on Human { + name + } + ... on Droid { + name + } + ... on Starship { + name + } + } + } + `, + ExpectedResult: ` + { + "search": [ + { + "__typename": "Human", + "name": "Han Solo" + }, + { + "__typename": "Human", + "name": "Leia Organa" + }, + { + "__typename": "Starship", + "name": "TIE Advanced x1" + } + ] + } + `, + }, + + { + Schema: starwarsSchema, + Query: ` + { + human(id: "1000") { + __typename + name + } + } + `, + ExpectedResult: ` + { + "human": { + "__typename": "Human", + "name": "Luke Skywalker" + } + } + `, + }, + }) +} + +func TestConnections(t *testing.T) { + gqltesting.RunTests(t, []*gqltesting.Test{ + { + Schema: starwarsSchema, + Query: ` + { + hero { + name + friendsConnection { + totalCount + pageInfo { + startCursor + endCursor + hasNextPage + } + edges { + cursor + node { + name + } + } + } + } + } + `, + ExpectedResult: ` + { + "hero": { + "name": "R2-D2", + "friendsConnection": { + "totalCount": 3, + "pageInfo": { + "startCursor": "Y3Vyc29yMQ==", + "endCursor": "Y3Vyc29yMw==", + "hasNextPage": false + }, + "edges": [ + { + "cursor": "Y3Vyc29yMQ==", + "node": { + "name": "Luke Skywalker" + } + }, + { + "cursor": "Y3Vyc29yMg==", + "node": { + "name": "Han Solo" + } + }, + { + "cursor": "Y3Vyc29yMw==", + "node": { + "name": "Leia Organa" + } + } + ] + } + } + } + `, + }, + + { + Schema: starwarsSchema, + Query: ` + { + hero { + name + friendsConnection(first: 1, after: "Y3Vyc29yMQ==") { + totalCount + pageInfo { + startCursor + endCursor + hasNextPage + } + edges { + cursor + node { + name + } + } + } + }, + moreFriends: hero { + name + friendsConnection(first: 1, after: "Y3Vyc29yMg==") { + totalCount + pageInfo { + startCursor + endCursor + hasNextPage + } + edges { + cursor + node { + name + } + } + } + } + } + `, + ExpectedResult: ` + { + "hero": { + "name": "R2-D2", + "friendsConnection": { + "totalCount": 3, + "pageInfo": { + "startCursor": "Y3Vyc29yMg==", + "endCursor": "Y3Vyc29yMg==", + "hasNextPage": true + }, + "edges": [ + { + "cursor": "Y3Vyc29yMg==", + "node": { + "name": "Han Solo" + } + } + ] + } + }, + "moreFriends": { + "name": "R2-D2", + "friendsConnection": { + "totalCount": 3, + "pageInfo": { + "startCursor": "Y3Vyc29yMw==", + "endCursor": "Y3Vyc29yMw==", + "hasNextPage": false + }, + "edges": [ + { + "cursor": "Y3Vyc29yMw==", + "node": { + "name": "Leia Organa" + } + } + ] + } + } + } + `, + }, + }) +} + +func TestMutation(t *testing.T) { + gqltesting.RunTests(t, []*gqltesting.Test{ + { + Schema: starwarsSchema, + Query: ` + { + reviews(episode: JEDI) { + stars + commentary + } + } + `, + ExpectedResult: ` + { + "reviews": [] + } + `, + }, + + { + Schema: starwarsSchema, + Query: ` + mutation CreateReviewForEpisode($ep: Episode!, $review: ReviewInput!) { + createReview(episode: $ep, review: $review) { + stars + commentary + } + } + `, + Variables: map[string]interface{}{ + "ep": "JEDI", + "review": map[string]interface{}{ + "stars": 5, + "commentary": "This is a great movie!", + }, + }, + ExpectedResult: ` + { + "createReview": { + "stars": 5, + "commentary": "This is a great movie!" + } + } + `, + }, + + { + Schema: starwarsSchema, + Query: ` + mutation CreateReviewForEpisode($ep: Episode!, $review: ReviewInput!) { + createReview(episode: $ep, review: $review) { + stars + commentary + } + } + `, + Variables: map[string]interface{}{ + "ep": "EMPIRE", + "review": map[string]interface{}{ + "stars": float64(4), + }, + }, + ExpectedResult: ` + { + "createReview": { + "stars": 4, + "commentary": null + } + } + `, + }, + + { + Schema: starwarsSchema, + Query: ` + { + reviews(episode: JEDI) { + stars + commentary + } + } + `, + ExpectedResult: ` + { + "reviews": [{ + "stars": 5, + "commentary": "This is a great movie!" + }] + } + `, + }, + }) +} + +func TestIntrospection(t *testing.T) { + gqltesting.RunTests(t, []*gqltesting.Test{ + { + Schema: starwarsSchema, + Query: ` + { + __schema { + types { + name + } + } + } + `, + ExpectedResult: ` + { + "__schema": { + "types": [ + { "name": "Boolean" }, + { "name": "Character" }, + { "name": "Droid" }, + { "name": "Episode" }, + { "name": "Float" }, + { "name": "FriendsConnection" }, + { "name": "FriendsEdge" }, + { "name": "Human" }, + { "name": "ID" }, + { "name": "Int" }, + { "name": "LengthUnit" }, + { "name": "Mutation" }, + { "name": "PageInfo" }, + { "name": "Query" }, + { "name": "Review" }, + { "name": "ReviewInput" }, + { "name": "SearchResult" }, + { "name": "Starship" }, + { "name": "String" }, + { "name": "__Directive" }, + { "name": "__DirectiveLocation" }, + { "name": "__EnumValue" }, + { "name": "__Field" }, + { "name": "__InputValue" }, + { "name": "__Schema" }, + { "name": "__Type" }, + { "name": "__TypeKind" } + ] + } + } + `, + }, + + { + Schema: starwarsSchema, + Query: ` + { + __schema { + queryType { + name + } + } + } + `, + ExpectedResult: ` + { + "__schema": { + "queryType": { + "name": "Query" + } + } + } + `, + }, + + { + Schema: starwarsSchema, + Query: ` + { + a: __type(name: "Droid") { + name + kind + interfaces { + name + } + possibleTypes { + name + } + }, + b: __type(name: "Character") { + name + kind + interfaces { + name + } + possibleTypes { + name + } + } + c: __type(name: "SearchResult") { + name + kind + interfaces { + name + } + possibleTypes { + name + } + } + } + `, + ExpectedResult: ` + { + "a": { + "name": "Droid", + "kind": "OBJECT", + "interfaces": [ + { + "name": "Character" + } + ], + "possibleTypes": null + }, + "b": { + "name": "Character", + "kind": "INTERFACE", + "interfaces": null, + "possibleTypes": [ + { + "name": "Human" + }, + { + "name": "Droid" + } + ] + }, + "c": { + "name": "SearchResult", + "kind": "UNION", + "interfaces": null, + "possibleTypes": [ + { + "name": "Human" + }, + { + "name": "Droid" + }, + { + "name": "Starship" + } + ] + } + } + `, + }, + + { + Schema: starwarsSchema, + Query: ` + { + __type(name: "Droid") { + name + fields { + name + args { + name + type { + name + } + defaultValue + } + type { + name + kind + } + } + } + } + `, + ExpectedResult: ` + { + "__type": { + "name": "Droid", + "fields": [ + { + "name": "id", + "args": [], + "type": { + "name": null, + "kind": "NON_NULL" + } + }, + { + "name": "name", + "args": [], + "type": { + "name": null, + "kind": "NON_NULL" + } + }, + { + "name": "friends", + "args": [], + "type": { + "name": null, + "kind": "LIST" + } + }, + { + "name": "friendsConnection", + "args": [ + { + "name": "first", + "type": { + "name": "Int" + }, + "defaultValue": null + }, + { + "name": "after", + "type": { + "name": "ID" + }, + "defaultValue": null + } + ], + "type": { + "name": null, + "kind": "NON_NULL" + } + }, + { + "name": "appearsIn", + "args": [], + "type": { + "name": null, + "kind": "NON_NULL" + } + }, + { + "name": "primaryFunction", + "args": [], + "type": { + "name": "String", + "kind": "SCALAR" + } + } + ] + } + } + `, + }, + + { + Schema: starwarsSchema, + Query: ` + { + __type(name: "Episode") { + enumValues { + name + } + } + } + `, + ExpectedResult: ` + { + "__type": { + "enumValues": [ + { + "name": "NEWHOPE" + }, + { + "name": "EMPIRE" + }, + { + "name": "JEDI" + } + ] + } + } + `, + }, + + { + Schema: starwarsSchema, + Query: ` + { + __schema { + directives { + name + description + locations + args { + name + description + type { + kind + ofType { + kind + name + } + } + } + } + } + } + `, + ExpectedResult: ` + { + "__schema": { + "directives": [ + { + "name": "deprecated", + "description": "Marks an element of a GraphQL schema as no longer supported.", + "locations": [ + "FIELD_DEFINITION", + "ENUM_VALUE" + ], + "args": [ + { + "name": "reason", + "description": "Explains why this element was deprecated, usually also including a suggestion\nfor how to access supported similar data. Formatted in\n[Markdown](https://daringfireball.net/projects/markdown/).", + "type": { + "kind": "SCALAR", + "ofType": null + } + } + ] + }, + { + "name": "include", + "description": "Directs the executor to include this field or fragment only when the ` + "`" + `if` + "`" + ` argument is true.", + "locations": [ + "FIELD", + "FRAGMENT_SPREAD", + "INLINE_FRAGMENT" + ], + "args": [ + { + "name": "if", + "description": "Included when true.", + "type": { + "kind": "NON_NULL", + "ofType": { + "kind": "SCALAR", + "name": "Boolean" + } + } + } + ] + }, + { + "name": "skip", + "description": "Directs the executor to skip this field or fragment when the ` + "`" + `if` + "`" + ` argument is true.", + "locations": [ + "FIELD", + "FRAGMENT_SPREAD", + "INLINE_FRAGMENT" + ], + "args": [ + { + "name": "if", + "description": "Skipped when true.", + "type": { + "kind": "NON_NULL", + "ofType": { + "kind": "SCALAR", + "name": "Boolean" + } + } + } + ] + } + ] + } + } + `, + }, + }) +} + +func TestMutationOrder(t *testing.T) { + gqltesting.RunTests(t, []*gqltesting.Test{ + { + Schema: graphql.MustParseSchema(` + schema { + query: Query + mutation: Mutation + } + + type Query { + theNumber: Int! + } + + type Mutation { + changeTheNumber(newNumber: Int!): Query + } + `, &theNumberResolver{}), + Query: ` + mutation { + first: changeTheNumber(newNumber: 1) { + theNumber + } + second: changeTheNumber(newNumber: 3) { + theNumber + } + third: changeTheNumber(newNumber: 2) { + theNumber + } + } + `, + ExpectedResult: ` + { + "first": { + "theNumber": 1 + }, + "second": { + "theNumber": 3 + }, + "third": { + "theNumber": 2 + } + } + `, + }, + }) +} + +func TestTime(t *testing.T) { + gqltesting.RunTests(t, []*gqltesting.Test{ + { + Schema: graphql.MustParseSchema(` + schema { + query: Query + } + + type Query { + addHour(time: Time = "2001-02-03T04:05:06Z"): Time! + } + + scalar Time + `, &timeResolver{}), + Query: ` + query($t: Time!) { + a: addHour(time: $t) + b: addHour + } + `, + Variables: map[string]interface{}{ + "t": time.Date(2000, 2, 3, 4, 5, 6, 0, time.UTC), + }, + ExpectedResult: ` + { + "a": "2000-02-03T05:05:06Z", + "b": "2001-02-03T05:05:06Z" + } + `, + }, + }) +} + +type resolverWithUnexportedMethod struct{} + +func (r *resolverWithUnexportedMethod) changeTheNumber(args struct{ NewNumber int32 }) int32 { + return args.NewNumber +} + +func TestUnexportedMethod(t *testing.T) { + _, err := graphql.ParseSchema(` + schema { + mutation: Mutation + } + + type Mutation { + changeTheNumber(newNumber: Int!): Int! + } + `, &resolverWithUnexportedMethod{}) + if err == nil { + t.Error("error expected") + } +} + +type resolverWithUnexportedField struct{} + +func (r *resolverWithUnexportedField) ChangeTheNumber(args struct{ newNumber int32 }) int32 { + return args.newNumber +} + +func TestUnexportedField(t *testing.T) { + _, err := graphql.ParseSchema(` + schema { + mutation: Mutation + } + + type Mutation { + changeTheNumber(newNumber: Int!): Int! + } + `, &resolverWithUnexportedField{}) + if err == nil { + t.Error("error expected") + } +} + +type inputResolver struct{} + +func (r *inputResolver) Int(args struct{ Value int32 }) int32 { + return args.Value +} + +func (r *inputResolver) Float(args struct{ Value float64 }) float64 { + return args.Value +} + +func (r *inputResolver) String(args struct{ Value string }) string { + return args.Value +} + +func (r *inputResolver) Boolean(args struct{ Value bool }) bool { + return args.Value +} + +func (r *inputResolver) Nullable(args struct{ Value *int32 }) *int32 { + return args.Value +} + +func (r *inputResolver) List(args struct{ Value []*struct{ V int32 } }) []int32 { + l := make([]int32, len(args.Value)) + for i, entry := range args.Value { + l[i] = entry.V + } + return l +} + +func (r *inputResolver) NullableList(args struct{ Value *[]*struct{ V int32 } }) *[]*int32 { + if args.Value == nil { + return nil + } + l := make([]*int32, len(*args.Value)) + for i, entry := range *args.Value { + if entry != nil { + l[i] = &entry.V + } + } + return &l +} + +func (r *inputResolver) Enum(args struct{ Value string }) string { + return args.Value +} + +func (r *inputResolver) NullableEnum(args struct{ Value *string }) *string { + return args.Value +} + +type recursive struct { + Next *recursive +} + +func (r *inputResolver) Recursive(args struct{ Value *recursive }) int32 { + n := int32(0) + v := args.Value + for v != nil { + v = v.Next + n++ + } + return n +} + +func (r *inputResolver) ID(args struct{ Value graphql.ID }) graphql.ID { + return args.Value +} + +func TestInput(t *testing.T) { + coercionSchema := graphql.MustParseSchema(` + schema { + query: Query + } + + type Query { + int(value: Int!): Int! + float(value: Float!): Float! + string(value: String!): String! + boolean(value: Boolean!): Boolean! + nullable(value: Int): Int + list(value: [Input!]!): [Int!]! + nullableList(value: [Input]): [Int] + enum(value: Enum!): Enum! + nullableEnum(value: Enum): Enum + recursive(value: RecursiveInput!): Int! + id(value: ID!): ID! + } + + input Input { + v: Int! + } + + input RecursiveInput { + next: RecursiveInput + } + + enum Enum { + Option1 + Option2 + } + `, &inputResolver{}) + gqltesting.RunTests(t, []*gqltesting.Test{ + { + Schema: coercionSchema, + Query: ` + { + int(value: 42) + float1: float(value: 42) + float2: float(value: 42.5) + string(value: "foo") + boolean(value: true) + nullable1: nullable(value: 42) + nullable2: nullable(value: null) + list1: list(value: [{v: 41}, {v: 42}, {v: 43}]) + list2: list(value: {v: 42}) + nullableList1: nullableList(value: [{v: 41}, null, {v: 43}]) + nullableList2: nullableList(value: null) + enum(value: Option2) + nullableEnum1: nullableEnum(value: Option2) + nullableEnum2: nullableEnum(value: null) + recursive(value: {next: {next: {}}}) + intID: id(value: 1234) + strID: id(value: "1234") + } + `, + ExpectedResult: ` + { + "int": 42, + "float1": 42, + "float2": 42.5, + "string": "foo", + "boolean": true, + "nullable1": 42, + "nullable2": null, + "list1": [41, 42, 43], + "list2": [42], + "nullableList1": [41, null, 43], + "nullableList2": null, + "enum": "Option2", + "nullableEnum1": "Option2", + "nullableEnum2": null, + "recursive": 3, + "intID": "1234", + "strID": "1234" + } + `, + }, + }) +} + +func TestComposedFragments(t *testing.T) { + gqltesting.RunTests(t, []*gqltesting.Test{ + { + Schema: starwarsSchema, + Query: ` + { + composed: hero(episode: EMPIRE) { + name + ...friendsNames + ...friendsIds + } + } + + fragment friendsNames on Character { + name + friends { + name + } + } + + fragment friendsIds on Character { + name + friends { + id + } + } + `, + ExpectedResult: ` + { + "composed": { + "name": "Luke Skywalker", + "friends": [ + { + "id": "1002", + "name": "Han Solo" + }, + { + "id": "1003", + "name": "Leia Organa" + }, + { + "id": "2000", + "name": "C-3PO" + }, + { + "id": "2001", + "name": "R2-D2" + } + ] + } + } + `, + }, + }) +} diff --git a/vendor/github.com/neelance/graphql-go/id.go b/vendor/github.com/neelance/graphql-go/id.go new file mode 100644 index 00000000..3ae2c38c --- /dev/null +++ b/vendor/github.com/neelance/graphql-go/id.go @@ -0,0 +1,30 @@ +package graphql + +import ( + "errors" + "strconv" +) + +// ID represents GraphQL's "ID" scalar type. A custom type may be used instead. +type ID string + +func (_ ID) ImplementsGraphQLType(name string) bool { + return name == "ID" +} + +func (id *ID) UnmarshalGraphQL(input interface{}) error { + var err error + switch input := input.(type) { + case string: + *id = ID(input) + case int32: + *id = ID(strconv.Itoa(int(input))) + default: + err = errors.New("wrong type") + } + return err +} + +func (id ID) MarshalJSON() ([]byte, error) { + return strconv.AppendQuote(nil, string(id)), nil +} diff --git a/vendor/github.com/neelance/graphql-go/internal/common/directive.go b/vendor/github.com/neelance/graphql-go/internal/common/directive.go new file mode 100644 index 00000000..62dca47f --- /dev/null +++ b/vendor/github.com/neelance/graphql-go/internal/common/directive.go @@ -0,0 +1,32 @@ +package common + +type Directive struct { + Name Ident + Args ArgumentList +} + +func ParseDirectives(l *Lexer) DirectiveList { + var directives DirectiveList + for l.Peek() == '@' { + l.ConsumeToken('@') + d := &Directive{} + d.Name = l.ConsumeIdentWithLoc() + d.Name.Loc.Column-- + if l.Peek() == '(' { + d.Args = ParseArguments(l) + } + directives = append(directives, d) + } + return directives +} + +type DirectiveList []*Directive + +func (l DirectiveList) Get(name string) *Directive { + for _, d := range l { + if d.Name.Name == name { + return d + } + } + return nil +} diff --git a/vendor/github.com/neelance/graphql-go/internal/common/lexer.go b/vendor/github.com/neelance/graphql-go/internal/common/lexer.go new file mode 100644 index 00000000..f67dc31e --- /dev/null +++ b/vendor/github.com/neelance/graphql-go/internal/common/lexer.go @@ -0,0 +1,122 @@ +package common + +import ( + "fmt" + "text/scanner" + + "github.com/neelance/graphql-go/errors" +) + +type syntaxError string + +type Lexer struct { + sc *scanner.Scanner + next rune + descComment string +} + +type Ident struct { + Name string + Loc errors.Location +} + +func New(sc *scanner.Scanner) *Lexer { + l := &Lexer{sc: sc} + l.Consume() + return l +} + +func (l *Lexer) CatchSyntaxError(f func()) (errRes *errors.QueryError) { + defer func() { + if err := recover(); err != nil { + if err, ok := err.(syntaxError); ok { + errRes = errors.Errorf("syntax error: %s", err) + errRes.Locations = []errors.Location{l.Location()} + return + } + panic(err) + } + }() + + f() + return +} + +func (l *Lexer) Peek() rune { + return l.next +} + +func (l *Lexer) Consume() { + l.descComment = "" + for { + l.next = l.sc.Scan() + if l.next == ',' { + continue + } + if l.next == '#' { + if l.sc.Peek() == ' ' { + l.sc.Next() + } + if l.descComment != "" { + l.descComment += "\n" + } + for { + next := l.sc.Next() + if next == '\n' || next == scanner.EOF { + break + } + l.descComment += string(next) + } + continue + } + break + } +} + +func (l *Lexer) ConsumeIdent() string { + name := l.sc.TokenText() + l.ConsumeToken(scanner.Ident) + return name +} + +func (l *Lexer) ConsumeIdentWithLoc() Ident { + loc := l.Location() + name := l.sc.TokenText() + l.ConsumeToken(scanner.Ident) + return Ident{name, loc} +} + +func (l *Lexer) ConsumeKeyword(keyword string) { + if l.next != scanner.Ident || l.sc.TokenText() != keyword { + l.SyntaxError(fmt.Sprintf("unexpected %q, expecting %q", l.sc.TokenText(), keyword)) + } + l.Consume() +} + +func (l *Lexer) ConsumeLiteral() *BasicLit { + lit := &BasicLit{Type: l.next, Text: l.sc.TokenText()} + l.Consume() + return lit +} + +func (l *Lexer) ConsumeToken(expected rune) { + if l.next != expected { + l.SyntaxError(fmt.Sprintf("unexpected %q, expecting %s", l.sc.TokenText(), scanner.TokenString(expected))) + } + l.Consume() +} + +func (l *Lexer) DescComment() string { + return l.descComment +} + +func (l *Lexer) SyntaxError(message string) { + panic(syntaxError(message)) +} + +func (l *Lexer) Location() errors.Location { + return errors.Location{ + Line: l.sc.Line, + Column: l.sc.Column, + } +} diff --git a/vendor/github.com/neelance/graphql-go/internal/common/literals.go b/vendor/github.com/neelance/graphql-go/internal/common/literals.go new file mode 100644 index 00000000..d1c84e3a --- /dev/null +++ b/vendor/github.com/neelance/graphql-go/internal/common/literals.go @@ -0,0 +1,206 @@ +package common + +import ( + "strconv" + "strings" + "text/scanner" + + "github.com/neelance/graphql-go/errors" +) + +type Literal interface { + Value(vars map[string]interface{}) interface{} + String() string + Location() errors.Location +} + +type BasicLit struct { + Type rune + Text string + Loc errors.Location +} + +func (lit *BasicLit) Value(vars map[string]interface{}) interface{} { + switch lit.Type { + case scanner.Int: + value, err := strconv.ParseInt(lit.Text, 10, 32) + if err != nil { + panic(err) + } + return int32(value) + + case scanner.Float: + value, err := strconv.ParseFloat(lit.Text, 64) + if err != nil { + panic(err) + } + return value + + case scanner.String: + value, err := strconv.Unquote(lit.Text) + if err != nil { + panic(err) + } + return value + + case scanner.Ident: + switch lit.Text { + case "true": + return true + case "false": + return false + default: + return lit.Text + } + + default: + panic("invalid literal") + } +} + +func (lit *BasicLit) String() string { + return lit.Text +} + +func (lit *BasicLit) Location() errors.Location { + return lit.Loc +} + +type ListLit struct { + Entries []Literal + Loc errors.Location +} + +func (lit *ListLit) Value(vars map[string]interface{}) interface{} { + entries := make([]interface{}, len(lit.Entries)) + for i, entry := range lit.Entries { + entries[i] = entry.Value(vars) + } + return entries +} + +func (lit *ListLit) String() string { + entries := make([]string, len(lit.Entries)) + for i, entry := range lit.Entries { + entries[i] = entry.String() + } + return "[" + strings.Join(entries, ", ") + "]" +} + +func (lit *ListLit) Location() errors.Location { + return lit.Loc +} + +type ObjectLit struct { + Fields []*ObjectLitField + Loc errors.Location +} + +type ObjectLitField struct { + Name Ident + Value Literal +} + +func (lit *ObjectLit) Value(vars map[string]interface{}) interface{} { + fields := make(map[string]interface{}, len(lit.Fields)) + for _, f := range lit.Fields { + fields[f.Name.Name] = f.Value.Value(vars) + } + return fields +} + +func (lit *ObjectLit) String() string { + entries := make([]string, 0, len(lit.Fields)) + for _, f := range lit.Fields { + entries = append(entries, f.Name.Name+": "+f.Value.String()) + } + return "{" + strings.Join(entries, ", ") + "}" +} + +func (lit *ObjectLit) Location() errors.Location { + return lit.Loc +} + +type NullLit struct { + Loc errors.Location +} + +func (lit *NullLit) Value(vars map[string]interface{}) interface{} { + return nil +} + +func (lit *NullLit) String() string { + return "null" +} + +func (lit *NullLit) Location() errors.Location { + return lit.Loc +} + +type Variable struct { + Name string + Loc errors.Location +} + +func (v Variable) Value(vars map[string]interface{}) interface{} { + return vars[v.Name] +} + +func (v Variable) String() string { + return "$" + v.Name +} + +func (v *Variable) Location() errors.Location { + return v.Loc +} + +func ParseLiteral(l *Lexer, constOnly bool) Literal { + loc := l.Location() + switch l.Peek() { + case '$': + if constOnly { + l.SyntaxError("variable not allowed") + panic("unreachable") + } + l.ConsumeToken('$') + return &Variable{l.ConsumeIdent(), loc} + + case scanner.Int, scanner.Float, scanner.String, scanner.Ident: + lit := l.ConsumeLiteral() + if lit.Type == scanner.Ident && lit.Text == "null" { + return &NullLit{loc} + } + lit.Loc = loc + return lit + case '-': + l.ConsumeToken('-') + lit := l.ConsumeLiteral() + lit.Text = "-" + lit.Text + lit.Loc = loc + return lit + case '[': + l.ConsumeToken('[') + var list []Literal + for l.Peek() != ']' { + list = append(list, ParseLiteral(l, constOnly)) + } + l.ConsumeToken(']') + return &ListLit{list, loc} + + case '{': + l.ConsumeToken('{') + var fields []*ObjectLitField + for l.Peek() != '}' { + name := l.ConsumeIdentWithLoc() + l.ConsumeToken(':') + value := ParseLiteral(l, constOnly) + fields = append(fields, &ObjectLitField{name, value}) + } + l.ConsumeToken('}') + return &ObjectLit{fields, loc} + + default: + l.SyntaxError("invalid value") + panic("unreachable") + } +} diff --git a/vendor/github.com/neelance/graphql-go/internal/common/types.go b/vendor/github.com/neelance/graphql-go/internal/common/types.go new file mode 100644 index 00000000..6a017f56 --- /dev/null +++ b/vendor/github.com/neelance/graphql-go/internal/common/types.go @@ -0,0 +1,80 @@ +package common + +import ( + "github.com/neelance/graphql-go/errors" +) + +type Type interface { + Kind() string + String() string +} + +type List struct { + OfType Type +} + +type NonNull struct { + OfType Type +} + +type TypeName struct { + Ident +} + +func (*List) Kind() string { return "LIST" } +func (*NonNull) Kind() string { return "NON_NULL" } +func (*TypeName) Kind() string { panic("TypeName needs to be resolved to actual type") } + +func (t *List) String() string { return "[" + t.OfType.String() + "]" } +func (t *NonNull) String() string { return t.OfType.String() + "!" } +func (*TypeName) String() string { panic("TypeName needs to be resolved to actual type") } + +func ParseType(l *Lexer) Type { + t := parseNullType(l) + if l.Peek() == '!' { + l.ConsumeToken('!') + return &NonNull{OfType: t} + } + return t +} + +func parseNullType(l *Lexer) Type { + if l.Peek() == '[' { + l.ConsumeToken('[') + ofType := ParseType(l) + l.ConsumeToken(']') + return &List{OfType: ofType} + } + + return &TypeName{Ident: l.ConsumeIdentWithLoc()} +} + +type Resolver func(name string) Type + +func ResolveType(t Type, resolver Resolver) (Type, *errors.QueryError) { + switch t := t.(type) { + case *List: + ofType, err := ResolveType(t.OfType, resolver) + if err != nil { + return nil, err + } + return &List{OfType: ofType}, nil + case *NonNull: + ofType, err := ResolveType(t.OfType, resolver) + if err != nil { + return nil, err + } + return &NonNull{OfType: ofType}, nil + case *TypeName: + refT := resolver(t.Name) + if refT == nil { + err := errors.Errorf("Unknown type %q.", t.Name) + err.Rule = "KnownTypeNames" + err.Locations = []errors.Location{t.Loc} + return nil, err + } + return refT, nil + default: + return t, nil + } +} diff --git a/vendor/github.com/neelance/graphql-go/internal/common/values.go b/vendor/github.com/neelance/graphql-go/internal/common/values.go new file mode 100644 index 00000000..794f68de --- /dev/null +++ b/vendor/github.com/neelance/graphql-go/internal/common/values.go @@ -0,0 +1,77 @@ +package common + +import ( + "github.com/neelance/graphql-go/errors" +) + +type InputValue struct { + Name Ident + Type Type + Default Literal + Desc string + Loc errors.Location + TypeLoc errors.Location +} + +type InputValueList []*InputValue + +func (l InputValueList) Get(name string) *InputValue { + for _, v := range l { + if v.Name.Name == name { + return v + } + } + return nil +} + +func ParseInputValue(l *Lexer) *InputValue { + p := &InputValue{} + p.Loc = l.Location() + p.Desc = l.DescComment() + p.Name = l.ConsumeIdentWithLoc() + l.ConsumeToken(':') + p.TypeLoc = l.Location() + p.Type = ParseType(l) + if l.Peek() == '=' { + l.ConsumeToken('=') + p.Default = ParseLiteral(l, true) + } + return p +} + +type Argument struct { + Name Ident + Value Literal +} + +type ArgumentList []Argument + +func (l ArgumentList) Get(name string) (Literal, bool) { + for _, arg := range l { + if arg.Name.Name == name { + return arg.Value, true + } + } + return nil, false +} + +func (l ArgumentList) MustGet(name string) Literal { + value, ok := l.Get(name) + if !ok { + panic("argument not found") + } + return value +} + +func ParseArguments(l *Lexer) ArgumentList { + var args ArgumentList + l.ConsumeToken('(') + for l.Peek() != ')' { + name := l.ConsumeIdentWithLoc() + l.ConsumeToken(':') + value := ParseLiteral(l, false) + args = append(args, Argument{Name: name, Value: value}) + } + l.ConsumeToken(')') + return args +} diff --git a/vendor/github.com/neelance/graphql-go/internal/exec/exec.go b/vendor/github.com/neelance/graphql-go/internal/exec/exec.go new file mode 100644 index 00000000..39b6456a --- /dev/null +++ b/vendor/github.com/neelance/graphql-go/internal/exec/exec.go @@ -0,0 +1,313 @@ +package exec + +import ( + "bytes" + "context" + "encoding/json" + "reflect" + "sync" + + "github.com/neelance/graphql-go/errors" + "github.com/neelance/graphql-go/internal/common" + "github.com/neelance/graphql-go/internal/exec/resolvable" + "github.com/neelance/graphql-go/internal/exec/selected" + "github.com/neelance/graphql-go/internal/query" + "github.com/neelance/graphql-go/internal/schema" + "github.com/neelance/graphql-go/log" + "github.com/neelance/graphql-go/trace" +) + +type Request struct { + selected.Request + Limiter chan struct{} + Tracer trace.Tracer + Logger log.Logger +} + +type fieldResult struct { + name string + value []byte +} + +func (r *Request) handlePanic(ctx context.Context) { + if value := recover(); value != nil { + r.Logger.LogPanic(ctx, value) + r.AddError(makePanicError(value)) + } +} + +func makePanicError(value interface{}) *errors.QueryError { + return errors.Errorf("graphql: panic occurred: %v", value) +} + +func (r *Request) Execute(ctx context.Context, s *resolvable.Schema, op *query.Operation) ([]byte, []*errors.QueryError) { + var out bytes.Buffer + func() { + defer r.handlePanic(ctx) + sels := selected.ApplyOperation(&r.Request, s, op) + r.execSelections(ctx, sels, nil, s.Resolver, &out, op.Type == query.Mutation) + }() + + if err := ctx.Err(); err != nil { + return nil, []*errors.QueryError{errors.Errorf("%s", err)} + } + + return out.Bytes(), r.Errs +} + +type fieldToExec struct { + field *selected.SchemaField + sels []selected.Selection + resolver reflect.Value + out *bytes.Buffer +} + +func (r *Request) execSelections(ctx context.Context, sels []selected.Selection, path *pathSegment, resolver reflect.Value, out *bytes.Buffer, serially bool) { + async := !serially && selected.HasAsyncSel(sels) + + var fields []*fieldToExec + collectFieldsToResolve(sels, resolver, &fields, make(map[string]*fieldToExec)) + + if async { + var wg sync.WaitGroup + wg.Add(len(fields)) + for _, f := range fields { + go func(f *fieldToExec) { + defer wg.Done() + defer r.handlePanic(ctx) + f.out = new(bytes.Buffer) + execFieldSelection(ctx, r, f, &pathSegment{path, f.field.Alias}, true) + }(f) + } + wg.Wait() + } + + out.WriteByte('{') + for i, f := range fields { + if i > 0 { + out.WriteByte(',') + } + out.WriteByte('"') + out.WriteString(f.field.Alias) + out.WriteByte('"') + out.WriteByte(':') + if async { + out.Write(f.out.Bytes()) + continue + } + f.out = out + execFieldSelection(ctx, r, f, &pathSegment{path, f.field.Alias}, false) + } + out.WriteByte('}') +} + +func collectFieldsToResolve(sels []selected.Selection, resolver reflect.Value, fields *[]*fieldToExec, fieldByAlias map[string]*fieldToExec) { + for _, sel := range sels { + switch sel := sel.(type) { + case *selected.SchemaField: + field, ok := fieldByAlias[sel.Alias] + if !ok { // validation already checked for conflict (TODO) + field = &fieldToExec{field: sel, resolver: resolver} + fieldByAlias[sel.Alias] = field + *fields = append(*fields, field) + } + field.sels = append(field.sels, sel.Sels...) + + case *selected.TypenameField: + sf := &selected.SchemaField{ + Field: resolvable.MetaFieldTypename, + Alias: sel.Alias, + FixedResult: reflect.ValueOf(typeOf(sel, resolver)), + } + *fields = append(*fields, &fieldToExec{field: sf, resolver: resolver}) + + case *selected.TypeAssertion: + out := resolver.Method(sel.MethodIndex).Call(nil) + if !out[1].Bool() { + continue + } + collectFieldsToResolve(sel.Sels, out[0], fields, fieldByAlias) + + default: + panic("unreachable") + } + } +} + +func typeOf(tf *selected.TypenameField, resolver reflect.Value) string { + if len(tf.TypeAssertions) == 0 { + return tf.Name + } + for name, a := range tf.TypeAssertions { + out := resolver.Method(a.MethodIndex).Call(nil) + if out[1].Bool() { + return name + } + } + return "" +} + +func execFieldSelection(ctx context.Context, r *Request, f *fieldToExec, path *pathSegment, applyLimiter bool) { + if applyLimiter { + r.Limiter <- struct{}{} + } + + var result reflect.Value + var err *errors.QueryError + + traceCtx, finish := r.Tracer.TraceField(ctx, f.field.TraceLabel, f.field.TypeName, f.field.Name, !f.field.Async, f.field.Args) + defer func() { + finish(err) + }() + + err = func() (err *errors.QueryError) { + defer func() { + if panicValue := recover(); panicValue != nil { + r.Logger.LogPanic(ctx, panicValue) + err = makePanicError(panicValue) + err.Path = path.toSlice() + } + }() + + if f.field.FixedResult.IsValid() { + result = f.field.FixedResult + return nil + } + + if err := traceCtx.Err(); err != nil { + return errors.Errorf("%s", err) // don't execute any more resolvers if context got cancelled + } + + var in []reflect.Value + if f.field.HasContext { + in = append(in, reflect.ValueOf(traceCtx)) + } + if f.field.ArgsPacker != nil { + in = append(in, f.field.PackedArgs) + } + callOut := f.resolver.Method(f.field.MethodIndex).Call(in) + result = callOut[0] + if f.field.HasError && !callOut[1].IsNil() { + resolverErr := callOut[1].Interface().(error) + err := errors.Errorf("%s", resolverErr) + err.Path = path.toSlice() + err.ResolverError = resolverErr + return err + } + return nil + }() + + if applyLimiter { + <-r.Limiter + } + + if err != nil { + r.AddError(err) + f.out.WriteString("null") // TODO handle non-nil + return + } + + r.execSelectionSet(traceCtx, f.sels, f.field.Type, path, result, f.out) +} + +func (r *Request) execSelectionSet(ctx context.Context, sels []selected.Selection, typ common.Type, path *pathSegment, resolver reflect.Value, out *bytes.Buffer) { + t, nonNull := unwrapNonNull(typ) + switch t := t.(type) { + case *schema.Object, *schema.Interface, *schema.Union: + if resolver.Kind() == reflect.Ptr && resolver.IsNil() { + if nonNull { + panic(errors.Errorf("got nil for non-null %q", t)) + } + out.WriteString("null") + return + } + + r.execSelections(ctx, sels, path, resolver, out, false) + return + } + + if !nonNull { + if resolver.IsNil() { + out.WriteString("null") + return + } + resolver = resolver.Elem() + } + + switch t := t.(type) { + case *common.List: + l := resolver.Len() + + if selected.HasAsyncSel(sels) { + var wg sync.WaitGroup + wg.Add(l) + entryouts := make([]bytes.Buffer, l) + for i := 0; i < l; i++ { + go func(i int) { + defer wg.Done() + defer r.handlePanic(ctx) + r.execSelectionSet(ctx, sels, t.OfType, &pathSegment{path, i}, resolver.Index(i), &entryouts[i]) + }(i) + } + wg.Wait() + + out.WriteByte('[') + for i, entryout := range entryouts { + if i > 0 { + out.WriteByte(',') + } + out.Write(entryout.Bytes()) + } + out.WriteByte(']') + return + } + + out.WriteByte('[') + for i := 0; i < l; i++ { + if i > 0 { + out.WriteByte(',') + } + r.execSelectionSet(ctx, sels, t.OfType, &pathSegment{path, i}, resolver.Index(i), out) + } + out.WriteByte(']') + + case *schema.Scalar: + v := resolver.Interface() + data, err := json.Marshal(v) + if err != nil { + panic(errors.Errorf("could not marshal %v", v)) + } + out.Write(data) + + case *schema.Enum: + out.WriteByte('"') + out.WriteString(resolver.String()) + out.WriteByte('"') + + default: + panic("unreachable") + } +} + +func unwrapNonNull(t common.Type) (common.Type, bool) { + if nn, ok := t.(*common.NonNull); ok { + return nn.OfType, true + } + return t, false +} + +type marshaler interface { + MarshalJSON() ([]byte, error) +} + +type pathSegment struct { + parent *pathSegment + value interface{} +} + +func (p *pathSegment) toSlice() []interface{} { + if p == nil { + return nil + } + return append(p.parent.toSlice(), p.value) +} diff --git a/vendor/github.com/neelance/graphql-go/internal/exec/packer/packer.go b/vendor/github.com/neelance/graphql-go/internal/exec/packer/packer.go new file mode 100644 index 00000000..02b9d832 --- /dev/null +++ b/vendor/github.com/neelance/graphql-go/internal/exec/packer/packer.go @@ -0,0 +1,367 @@ +package packer + +import ( + "fmt" + "math" + "reflect" + "strings" + + "github.com/neelance/graphql-go/errors" + "github.com/neelance/graphql-go/internal/common" + "github.com/neelance/graphql-go/internal/schema" +) + +type packer interface { + Pack(value interface{}) (reflect.Value, error) +} + +type Builder struct { + packerMap map[typePair]*packerMapEntry + structPackers []*StructPacker +} + +type typePair struct { + graphQLType common.Type + resolverType reflect.Type +} + +type packerMapEntry struct { + packer packer + targets []*packer +} + +func NewBuilder() *Builder { + return &Builder{ + packerMap: make(map[typePair]*packerMapEntry), + } +} + +func (b *Builder) Finish() error { + for _, entry := range b.packerMap { + for _, target := range entry.targets { + *target = entry.packer + } + } + + for _, p := range b.structPackers { + p.defaultStruct = reflect.New(p.structType).Elem() + for _, f := range p.fields { + if defaultVal := f.field.Default; defaultVal != nil { + v, err := f.fieldPacker.Pack(defaultVal.Value(nil)) + if err != nil { + return err + } + p.defaultStruct.FieldByIndex(f.fieldIndex).Set(v) + } + } + } + + return nil +} + +func (b *Builder) assignPacker(target *packer, schemaType common.Type, reflectType reflect.Type) error { + k := typePair{schemaType, reflectType} + ref, ok := b.packerMap[k] + if !ok { + ref = &packerMapEntry{} + b.packerMap[k] = ref + var err error + ref.packer, err = b.makePacker(schemaType, reflectType) + if err != nil { + return err + } + } + ref.targets = append(ref.targets, target) + return nil +} + +func (b *Builder) makePacker(schemaType common.Type, reflectType reflect.Type) (packer, error) { + t, nonNull := unwrapNonNull(schemaType) + if !nonNull { + if reflectType.Kind() != reflect.Ptr { + return nil, fmt.Errorf("%s is not a pointer", reflectType) + } + elemType := reflectType.Elem() + addPtr := true + if _, ok := t.(*schema.InputObject); ok { + elemType = reflectType // keep pointer for input objects + addPtr = false + } + elem, err := b.makeNonNullPacker(t, elemType) + if err != nil { + return nil, err + } + return &nullPacker{ + elemPacker: elem, + valueType: reflectType, + addPtr: addPtr, + }, nil + } + + return b.makeNonNullPacker(t, reflectType) +} + +func (b *Builder) makeNonNullPacker(schemaType common.Type, reflectType reflect.Type) (packer, error) { + if u, ok := reflect.New(reflectType).Interface().(Unmarshaler); ok { + if !u.ImplementsGraphQLType(schemaType.String()) { + return nil, fmt.Errorf("can not unmarshal %s into %s", schemaType, reflectType) + } + return &unmarshalerPacker{ + ValueType: reflectType, + }, nil + } + + switch t := schemaType.(type) { + case *schema.Scalar: + return &ValuePacker{ + ValueType: reflectType, + }, nil + + case *schema.Enum: + want := reflect.TypeOf("") + if reflectType != want { + return nil, fmt.Errorf("wrong type, expected %s", want) + } + return &ValuePacker{ + ValueType: reflectType, + }, nil + + case *schema.InputObject: + e, err := b.MakeStructPacker(t.Values, reflectType) + if err != nil { + return nil, err + } + return e, nil + + case *common.List: + if reflectType.Kind() != reflect.Slice { + return nil, fmt.Errorf("expected slice, got %s", reflectType) + } + p := &listPacker{ + sliceType: reflectType, + } + if err := b.assignPacker(&p.elem, t.OfType, reflectType.Elem()); err != nil { + return nil, err + } + return p, nil + + case *schema.Object, *schema.Interface, *schema.Union: + return nil, fmt.Errorf("type of kind %s can not be used as input", t.Kind()) + + default: + panic("unreachable") + } +} + +func (b *Builder) MakeStructPacker(values common.InputValueList, typ reflect.Type) (*StructPacker, error) { + structType := typ + usePtr := false + if typ.Kind() == reflect.Ptr { + structType = typ.Elem() + usePtr = true + } + if structType.Kind() != reflect.Struct { + return nil, fmt.Errorf("expected struct or pointer to struct, got %s", typ) + } + + var fields []*structPackerField + for _, v := range values { + fe := &structPackerField{field: v} + fx := func(n string) bool { + return strings.EqualFold(stripUnderscore(n), stripUnderscore(v.Name.Name)) + } + + sf, ok := structType.FieldByNameFunc(fx) + if !ok { + return nil, fmt.Errorf("missing argument %q", v.Name) + } + if sf.PkgPath != "" { + return nil, fmt.Errorf("field %q must be exported", sf.Name) + } + fe.fieldIndex = sf.Index + + ft := v.Type + if v.Default != nil { + ft, _ = unwrapNonNull(ft) + ft = &common.NonNull{OfType: ft} + } + + if err := b.assignPacker(&fe.fieldPacker, ft, sf.Type); err != nil { + return nil, fmt.Errorf("field %q: %s", sf.Name, err) + } + + fields = append(fields, fe) + } + + p := &StructPacker{ + structType: structType, + usePtr: usePtr, + fields: fields, + } + b.structPackers = append(b.structPackers, p) + return p, nil +} + +type StructPacker struct { + structType reflect.Type + usePtr bool + defaultStruct reflect.Value + fields []*structPackerField +} + +type structPackerField struct { + field *common.InputValue + fieldIndex []int + fieldPacker packer +} + +func (p *StructPacker) Pack(value interface{}) (reflect.Value, error) { + if value == nil { + return reflect.Value{}, errors.Errorf("got null for non-null") + } + + values := value.(map[string]interface{}) + v := reflect.New(p.structType) + v.Elem().Set(p.defaultStruct) + for _, f := range p.fields { + if value, ok := values[f.field.Name.Name]; ok { + packed, err := f.fieldPacker.Pack(value) + if err != nil { + return reflect.Value{}, err + } + v.Elem().FieldByIndex(f.fieldIndex).Set(packed) + } + } + if !p.usePtr { + return v.Elem(), nil + } + return v, nil +} + +type listPacker struct { + sliceType reflect.Type + elem packer +} + +func (e *listPacker) Pack(value interface{}) (reflect.Value, error) { + list, ok := value.([]interface{}) + if !ok { + list = []interface{}{value} + } + + v := reflect.MakeSlice(e.sliceType, len(list), len(list)) + for i := range list { + packed, err := e.elem.Pack(list[i]) + if err != nil { + return reflect.Value{}, err + } + v.Index(i).Set(packed) + } + return v, nil +} + +type nullPacker struct { + elemPacker packer + valueType reflect.Type + addPtr bool +} + +func (p *nullPacker) Pack(value interface{}) (reflect.Value, error) { + if value == nil { + return reflect.Zero(p.valueType), nil + } + + v, err := p.elemPacker.Pack(value) + if err != nil { + return reflect.Value{}, err + } + + if p.addPtr { + ptr := reflect.New(p.valueType.Elem()) + ptr.Elem().Set(v) + return ptr, nil + } + + return v, nil +} + +type ValuePacker struct { + ValueType reflect.Type +} + +func (p *ValuePacker) Pack(value interface{}) (reflect.Value, error) { + if value == nil { + return reflect.Value{}, errors.Errorf("got null for non-null") + } + + coerced, err := unmarshalInput(p.ValueType, value) + if err != nil { + return reflect.Value{}, fmt.Errorf("could not unmarshal %#v (%T) into %s: %s", value, value, p.ValueType, err) + } + return reflect.ValueOf(coerced), nil +} + +type unmarshalerPacker struct { + ValueType reflect.Type +} + +func (p *unmarshalerPacker) Pack(value interface{}) (reflect.Value, error) { + if value == nil { + return reflect.Value{}, errors.Errorf("got null for non-null") + } + + v := reflect.New(p.ValueType) + if err := v.Interface().(Unmarshaler).UnmarshalGraphQL(value); err != nil { + return reflect.Value{}, err + } + return v.Elem(), nil +} + +type Unmarshaler interface { + ImplementsGraphQLType(name string) bool + UnmarshalGraphQL(input interface{}) error +} + +func unmarshalInput(typ reflect.Type, input interface{}) (interface{}, error) { + if reflect.TypeOf(input) == typ { + return input, nil + } + + switch typ.Kind() { + case reflect.Int32: + switch input := input.(type) { + case int: + if input < math.MinInt32 || input > math.MaxInt32 { + return nil, fmt.Errorf("not a 32-bit integer") + } + return int32(input), nil + case float64: + coerced := int32(input) + if input < math.MinInt32 || input > math.MaxInt32 || float64(coerced) != input { + return nil, fmt.Errorf("not a 32-bit integer") + } + return coerced, nil + } + + case reflect.Float64: + switch input := input.(type) { + case int32: + return float64(input), nil + case int: + return float64(input), nil + } + } + + return nil, fmt.Errorf("incompatible type") +} + +func unwrapNonNull(t common.Type) (common.Type, bool) { + if nn, ok := t.(*common.NonNull); ok { + return nn.OfType, true + } + return t, false +} + +func stripUnderscore(s string) string { + return strings.Replace(s, "_", "", -1) +} diff --git a/vendor/github.com/neelance/graphql-go/internal/exec/resolvable/meta.go b/vendor/github.com/neelance/graphql-go/internal/exec/resolvable/meta.go new file mode 100644 index 00000000..f9b0bb92 --- /dev/null +++ b/vendor/github.com/neelance/graphql-go/internal/exec/resolvable/meta.go @@ -0,0 +1,58 @@ +package resolvable + +import ( + "fmt" + "reflect" + + "github.com/neelance/graphql-go/internal/common" + "github.com/neelance/graphql-go/internal/schema" + "github.com/neelance/graphql-go/introspection" +) + +var MetaSchema *Object +var MetaType *Object + +func init() { + var err error + b := newBuilder(schema.Meta) + + metaSchema := schema.Meta.Types["__Schema"].(*schema.Object) + MetaSchema, err = b.makeObjectExec(metaSchema.Name, metaSchema.Fields, nil, false, reflect.TypeOf(&introspection.Schema{})) + if err != nil { + panic(err) + } + + metaType := schema.Meta.Types["__Type"].(*schema.Object) + MetaType, err = b.makeObjectExec(metaType.Name, metaType.Fields, nil, false, reflect.TypeOf(&introspection.Type{})) + if err != nil { + panic(err) + } + + if err := b.finish(); err != nil { + panic(err) + } +} + +var MetaFieldTypename = Field{ + Field: schema.Field{ + Name: "__typename", + Type: &common.NonNull{OfType: schema.Meta.Types["String"]}, + }, + TraceLabel: fmt.Sprintf("GraphQL field: __typename"), +} + +var MetaFieldSchema = Field{ + Field: schema.Field{ + Name: "__schema", + Type: schema.Meta.Types["__Schema"], + }, + TraceLabel: fmt.Sprintf("GraphQL field: __schema"), +} + +var MetaFieldType = Field{ + Field: schema.Field{ + Name: "__type", + Type: schema.Meta.Types["__Type"], + }, + TraceLabel: fmt.Sprintf("GraphQL field: __type"), +} diff --git a/vendor/github.com/neelance/graphql-go/internal/exec/resolvable/resolvable.go b/vendor/github.com/neelance/graphql-go/internal/exec/resolvable/resolvable.go new file mode 100644 index 00000000..c681cf20 --- /dev/null +++ b/vendor/github.com/neelance/graphql-go/internal/exec/resolvable/resolvable.go @@ -0,0 +1,331 @@ +package resolvable + +import ( + "context" + "fmt" + "reflect" + "strings" + + "github.com/neelance/graphql-go/internal/common" + "github.com/neelance/graphql-go/internal/exec/packer" + "github.com/neelance/graphql-go/internal/schema" +) + +type Schema struct { + schema.Schema + Query Resolvable + Mutation Resolvable + Resolver reflect.Value +} + +type Resolvable interface { + isResolvable() +} + +type Object struct { + Name string + Fields map[string]*Field + TypeAssertions map[string]*TypeAssertion +} + +type Field struct { + schema.Field + TypeName string + MethodIndex int + HasContext bool + ArgsPacker *packer.StructPacker + HasError bool + ValueExec Resolvable + TraceLabel string +} + +type TypeAssertion struct { + MethodIndex int + TypeExec Resolvable +} + +type List struct { + Elem Resolvable +} + +type Scalar struct{} + +func (*Object) isResolvable() {} +func (*List) isResolvable() {} +func (*Scalar) isResolvable() {} + +func ApplyResolver(s *schema.Schema, resolver interface{}) (*Schema, error) { + b := newBuilder(s) + + var query, mutation Resolvable + + if t, ok := s.EntryPoints["query"]; ok { + if err := b.assignExec(&query, t, reflect.TypeOf(resolver)); err != nil { + return nil, err + } + } + + if t, ok := s.EntryPoints["mutation"]; ok { + if err := b.assignExec(&mutation, t, reflect.TypeOf(resolver)); err != nil { + return nil, err + } + } + + if err := b.finish(); err != nil { + return nil, err + } + + return &Schema{ + Schema: *s, + Resolver: reflect.ValueOf(resolver), + Query: query, + Mutation: mutation, + }, nil +} + +type execBuilder struct { + schema *schema.Schema + resMap map[typePair]*resMapEntry + packerBuilder *packer.Builder +} + +type typePair struct { + graphQLType common.Type + resolverType reflect.Type +} + +type resMapEntry struct { + exec Resolvable + targets []*Resolvable +} + +func newBuilder(s *schema.Schema) *execBuilder { + return &execBuilder{ + schema: s, + resMap: make(map[typePair]*resMapEntry), + packerBuilder: packer.NewBuilder(), + } +} + +func (b *execBuilder) finish() error { + for _, entry := range b.resMap { + for _, target := range entry.targets { + *target = entry.exec + } + } + + return b.packerBuilder.Finish() +} + +func (b *execBuilder) assignExec(target *Resolvable, t common.Type, resolverType reflect.Type) error { + k := typePair{t, resolverType} + ref, ok := b.resMap[k] + if !ok { + ref = &resMapEntry{} + b.resMap[k] = ref + var err error + ref.exec, err = b.makeExec(t, resolverType) + if err != nil { + return err + } + } + ref.targets = append(ref.targets, target) + return nil +} + +func (b *execBuilder) makeExec(t common.Type, resolverType reflect.Type) (Resolvable, error) { + var nonNull bool + t, nonNull = unwrapNonNull(t) + + switch t := t.(type) { + case *schema.Object: + return b.makeObjectExec(t.Name, t.Fields, nil, nonNull, resolverType) + + case *schema.Interface: + return b.makeObjectExec(t.Name, t.Fields, t.PossibleTypes, nonNull, resolverType) + + case *schema.Union: + return b.makeObjectExec(t.Name, nil, t.PossibleTypes, nonNull, resolverType) + } + + if !nonNull { + if resolverType.Kind() != reflect.Ptr { + return nil, fmt.Errorf("%s is not a pointer", resolverType) + } + resolverType = resolverType.Elem() + } + + switch t := t.(type) { + case *schema.Scalar: + return makeScalarExec(t, resolverType) + + case *schema.Enum: + return &Scalar{}, nil + + case *common.List: + if resolverType.Kind() != reflect.Slice { + return nil, fmt.Errorf("%s is not a slice", resolverType) + } + e := &List{} + if err := b.assignExec(&e.Elem, t.OfType, resolverType.Elem()); err != nil { + return nil, err + } + return e, nil + + default: + panic("invalid type") + } +} + +func makeScalarExec(t *schema.Scalar, resolverType reflect.Type) (Resolvable, error) { + implementsType := false + switch r := reflect.New(resolverType).Interface().(type) { + case *int32: + implementsType = (t.Name == "Int") + case *float64: + implementsType = (t.Name == "Float") + case *string: + implementsType = (t.Name == "String") + case *bool: + implementsType = (t.Name == "Boolean") + case packer.Unmarshaler: + implementsType = r.ImplementsGraphQLType(t.Name) + } + if !implementsType { + return nil, fmt.Errorf("can not use %s as %s", resolverType, t.Name) + } + return &Scalar{}, nil +} + +func (b *execBuilder) makeObjectExec(typeName string, fields schema.FieldList, possibleTypes []*schema.Object, nonNull bool, resolverType reflect.Type) (*Object, error) { + if !nonNull { + if resolverType.Kind() != reflect.Ptr && resolverType.Kind() != reflect.Interface { + return nil, fmt.Errorf("%s is not a pointer or interface", resolverType) + } + } + + methodHasReceiver := resolverType.Kind() != reflect.Interface + + Fields := make(map[string]*Field) + for _, f := range fields { + methodIndex := findMethod(resolverType, f.Name) + if methodIndex == -1 { + hint := "" + if findMethod(reflect.PtrTo(resolverType), f.Name) != -1 { + hint = " (hint: the method exists on the pointer type)" + } + return nil, fmt.Errorf("%s does not resolve %q: missing method for field %q%s", resolverType, typeName, f.Name, hint) + } + + m := resolverType.Method(methodIndex) + fe, err := b.makeFieldExec(typeName, f, m, methodIndex, methodHasReceiver) + if err != nil { + return nil, fmt.Errorf("%s\n\treturned by (%s).%s", err, resolverType, m.Name) + } + Fields[f.Name] = fe + } + + typeAssertions := make(map[string]*TypeAssertion) + for _, impl := range possibleTypes { + methodIndex := findMethod(resolverType, "to"+impl.Name) + if methodIndex == -1 { + return nil, fmt.Errorf("%s does not resolve %q: missing method %q to convert to %q", resolverType, typeName, "to"+impl.Name, impl.Name) + } + if resolverType.Method(methodIndex).Type.NumOut() != 2 { + return nil, fmt.Errorf("%s does not resolve %q: method %q should return a value and a bool indicating success", resolverType, typeName, "to"+impl.Name) + } + a := &TypeAssertion{ + MethodIndex: methodIndex, + } + if err := b.assignExec(&a.TypeExec, impl, resolverType.Method(methodIndex).Type.Out(0)); err != nil { + return nil, err + } + typeAssertions[impl.Name] = a + } + + return &Object{ + Name: typeName, + Fields: Fields, + TypeAssertions: typeAssertions, + }, nil +} + +var contextType = reflect.TypeOf((*context.Context)(nil)).Elem() +var errorType = reflect.TypeOf((*error)(nil)).Elem() + +func (b *execBuilder) makeFieldExec(typeName string, f *schema.Field, m reflect.Method, methodIndex int, methodHasReceiver bool) (*Field, error) { + in := make([]reflect.Type, m.Type.NumIn()) + for i := range in { + in[i] = m.Type.In(i) + } + if methodHasReceiver { + in = in[1:] // first parameter is receiver + } + + hasContext := len(in) > 0 && in[0] == contextType + if hasContext { + in = in[1:] + } + + var argsPacker *packer.StructPacker + if len(f.Args) > 0 { + if len(in) == 0 { + return nil, fmt.Errorf("must have parameter for field arguments") + } + var err error + argsPacker, err = b.packerBuilder.MakeStructPacker(f.Args, in[0]) + if err != nil { + return nil, err + } + in = in[1:] + } + + if len(in) > 0 { + return nil, fmt.Errorf("too many parameters") + } + + if m.Type.NumOut() > 2 { + return nil, fmt.Errorf("too many return values") + } + + hasError := m.Type.NumOut() == 2 + if hasError { + if m.Type.Out(1) != errorType { + return nil, fmt.Errorf(`must have "error" as its second return value`) + } + } + + fe := &Field{ + Field: *f, + TypeName: typeName, + MethodIndex: methodIndex, + HasContext: hasContext, + ArgsPacker: argsPacker, + HasError: hasError, + TraceLabel: fmt.Sprintf("GraphQL field: %s.%s", typeName, f.Name), + } + if err := b.assignExec(&fe.ValueExec, f.Type, m.Type.Out(0)); err != nil { + return nil, err + } + return fe, nil +} + +func findMethod(t reflect.Type, name string) int { + for i := 0; i < t.NumMethod(); i++ { + if strings.EqualFold(stripUnderscore(name), stripUnderscore(t.Method(i).Name)) { + return i + } + } + return -1 +} + +func unwrapNonNull(t common.Type) (common.Type, bool) { + if nn, ok := t.(*common.NonNull); ok { + return nn.OfType, true + } + return t, false +} + +func stripUnderscore(s string) string { + return strings.Replace(s, "_", "", -1) +} diff --git a/vendor/github.com/neelance/graphql-go/internal/exec/selected/selected.go b/vendor/github.com/neelance/graphql-go/internal/exec/selected/selected.go new file mode 100644 index 00000000..eecdcf38 --- /dev/null +++ b/vendor/github.com/neelance/graphql-go/internal/exec/selected/selected.go @@ -0,0 +1,238 @@ +package selected + +import ( + "fmt" + "reflect" + "sync" + + "github.com/neelance/graphql-go/errors" + "github.com/neelance/graphql-go/internal/common" + "github.com/neelance/graphql-go/internal/exec/packer" + "github.com/neelance/graphql-go/internal/exec/resolvable" + "github.com/neelance/graphql-go/internal/query" + "github.com/neelance/graphql-go/internal/schema" + "github.com/neelance/graphql-go/introspection" +) + +type Request struct { + Schema *schema.Schema + Doc *query.Document + Vars map[string]interface{} + Mu sync.Mutex + Errs []*errors.QueryError +} + +func (r *Request) AddError(err *errors.QueryError) { + r.Mu.Lock() + r.Errs = append(r.Errs, err) + r.Mu.Unlock() +} + +func ApplyOperation(r *Request, s *resolvable.Schema, op *query.Operation) []Selection { + var obj *resolvable.Object + switch op.Type { + case query.Query: + obj = s.Query.(*resolvable.Object) + case query.Mutation: + obj = s.Mutation.(*resolvable.Object) + } + return applySelectionSet(r, obj, op.Selections) +} + +type Selection interface { + isSelection() +} + +type SchemaField struct { + resolvable.Field + Alias string + Args map[string]interface{} + PackedArgs reflect.Value + Sels []Selection + Async bool + FixedResult reflect.Value +} + +type TypeAssertion struct { + resolvable.TypeAssertion + Sels []Selection +} + +type TypenameField struct { + resolvable.Object + Alias string +} + +func (*SchemaField) isSelection() {} +func (*TypeAssertion) isSelection() {} +func (*TypenameField) isSelection() {} + +func applySelectionSet(r *Request, e *resolvable.Object, sels []query.Selection) (flattenedSels []Selection) { + for _, sel := range sels { + switch sel := sel.(type) { + case *query.Field: + field := sel + if skipByDirective(r, field.Directives) { + continue + } + + switch field.Name.Name { + case "__typename": + flattenedSels = append(flattenedSels, &TypenameField{ + Object: *e, + Alias: field.Alias.Name, + }) + + case "__schema": + flattenedSels = append(flattenedSels, &SchemaField{ + Field: resolvable.MetaFieldSchema, + Alias: field.Alias.Name, + Sels: applySelectionSet(r, resolvable.MetaSchema, field.Selections), + Async: true, + FixedResult: reflect.ValueOf(introspection.WrapSchema(r.Schema)), + }) + + case "__type": + p := packer.ValuePacker{ValueType: reflect.TypeOf("")} + v, err := p.Pack(field.Arguments.MustGet("name").Value(r.Vars)) + if err != nil { + r.AddError(errors.Errorf("%s", err)) + return nil + } + + t, ok := r.Schema.Types[v.String()] + if !ok { + return nil + } + + flattenedSels = append(flattenedSels, &SchemaField{ + Field: resolvable.MetaFieldType, + Alias: field.Alias.Name, + Sels: applySelectionSet(r, resolvable.MetaType, field.Selections), + Async: true, + FixedResult: reflect.ValueOf(introspection.WrapType(t)), + }) + + default: + fe := e.Fields[field.Name.Name] + + var args map[string]interface{} + var packedArgs reflect.Value + if fe.ArgsPacker != nil { + args = make(map[string]interface{}) + for _, arg := range field.Arguments { + args[arg.Name.Name] = arg.Value.Value(r.Vars) + } + var err error + packedArgs, err = fe.ArgsPacker.Pack(args) + if err != nil { + r.AddError(errors.Errorf("%s", err)) + return + } + } + + fieldSels := applyField(r, fe.ValueExec, field.Selections) + flattenedSels = append(flattenedSels, &SchemaField{ + Field: *fe, + Alias: field.Alias.Name, + Args: args, + PackedArgs: packedArgs, + Sels: fieldSels, + Async: fe.HasContext || fe.ArgsPacker != nil || fe.HasError || HasAsyncSel(fieldSels), + }) + } + + case *query.InlineFragment: + frag := sel + if skipByDirective(r, frag.Directives) { + continue + } + flattenedSels = append(flattenedSels, applyFragment(r, e, &frag.Fragment)...) + + case *query.FragmentSpread: + spread := sel + if skipByDirective(r, spread.Directives) { + continue + } + flattenedSels = append(flattenedSels, applyFragment(r, e, &r.Doc.Fragments.Get(spread.Name.Name).Fragment)...) + + default: + panic("invalid type") + } + } + return +} + +func applyFragment(r *Request, e *resolvable.Object, frag *query.Fragment) []Selection { + if frag.On.Name != "" && frag.On.Name != e.Name { + a, ok := e.TypeAssertions[frag.On.Name] + if !ok { + panic(fmt.Errorf("%q does not implement %q", frag.On, e.Name)) // TODO proper error handling + } + + return []Selection{&TypeAssertion{ + TypeAssertion: *a, + Sels: applySelectionSet(r, a.TypeExec.(*resolvable.Object), frag.Selections), + }} + } + return applySelectionSet(r, e, frag.Selections) +} + +func applyField(r *Request, e resolvable.Resolvable, sels []query.Selection) []Selection { + switch e := e.(type) { + case *resolvable.Object: + return applySelectionSet(r, e, sels) + case *resolvable.List: + return applyField(r, e.Elem, sels) + case *resolvable.Scalar: + return nil + default: + panic("unreachable") + } +} + +func skipByDirective(r *Request, directives common.DirectiveList) bool { + if d := directives.Get("skip"); d != nil { + p := packer.ValuePacker{ValueType: reflect.TypeOf(false)} + v, err := p.Pack(d.Args.MustGet("if").Value(r.Vars)) + if err != nil { + r.AddError(errors.Errorf("%s", err)) + } + if err == nil && v.Bool() { + return true + } + } + + if d := directives.Get("include"); d != nil { + p := packer.ValuePacker{ValueType: reflect.TypeOf(false)} + v, err := p.Pack(d.Args.MustGet("if").Value(r.Vars)) + if err != nil { + r.AddError(errors.Errorf("%s", err)) + } + if err == nil && !v.Bool() { + return true + } + } + + return false +} + +func HasAsyncSel(sels []Selection) bool { + for _, sel := range sels { + switch sel := sel.(type) { + case *SchemaField: + if sel.Async { + return true + } + case *TypeAssertion: + if HasAsyncSel(sel.Sels) { + return true + } + case *TypenameField: + // sync + default: + panic("unreachable") + } + } + return false +} diff --git a/vendor/github.com/neelance/graphql-go/internal/query/query.go b/vendor/github.com/neelance/graphql-go/internal/query/query.go new file mode 100644 index 00000000..f11b1b77 --- /dev/null +++ b/vendor/github.com/neelance/graphql-go/internal/query/query.go @@ -0,0 +1,240 @@ +package query + +import ( + "fmt" + "strings" + "text/scanner" + + "github.com/neelance/graphql-go/errors" + "github.com/neelance/graphql-go/internal/common" +) + +type Document struct { + Operations OperationList + Fragments FragmentList +} + +type OperationList []*Operation + +func (l OperationList) Get(name string) *Operation { + for _, f := range l { + if f.Name.Name == name { + return f + } + } + return nil +} + +type FragmentList []*FragmentDecl + +func (l FragmentList) Get(name string) *FragmentDecl { + for _, f := range l { + if f.Name.Name == name { + return f + } + } + return nil +} + +type Operation struct { + Type OperationType + Name common.Ident + Vars common.InputValueList + Selections []Selection + Directives common.DirectiveList + Loc errors.Location +} + +type OperationType string + +const ( + Query OperationType = "QUERY" + Mutation = "MUTATION" + Subscription = "SUBSCRIPTION" +) + +type Fragment struct { + On common.TypeName + Selections []Selection +} + +type FragmentDecl struct { + Fragment + Name common.Ident + Directives common.DirectiveList + Loc errors.Location +} + +type Selection interface { + isSelection() +} + +type Field struct { + Alias common.Ident + Name common.Ident + Arguments common.ArgumentList + Directives common.DirectiveList + Selections []Selection + SelectionSetLoc errors.Location +} + +type InlineFragment struct { + Fragment + Directives common.DirectiveList + Loc errors.Location +} + +type FragmentSpread struct { + Name common.Ident + Directives common.DirectiveList + Loc errors.Location +} + +func (Field) isSelection() {} +func (InlineFragment) isSelection() {} +func (FragmentSpread) isSelection() {} + +func Parse(queryString string) (*Document, *errors.QueryError) { + sc := &scanner.Scanner{ + Mode: scanner.ScanIdents | scanner.ScanInts | scanner.ScanFloats | scanner.ScanStrings, + } + sc.Init(strings.NewReader(queryString)) + + l := common.New(sc) + var doc *Document + err := l.CatchSyntaxError(func() { + doc = parseDocument(l) + }) + if err != nil { + return nil, err + } + + return doc, nil +} + +func parseDocument(l *common.Lexer) *Document { + d := &Document{} + for l.Peek() != scanner.EOF { + if l.Peek() == '{' { + op := &Operation{Type: Query, Loc: l.Location()} + op.Selections = parseSelectionSet(l) + d.Operations = append(d.Operations, op) + continue + } + + loc := l.Location() + switch x := l.ConsumeIdent(); x { + case "query": + op := parseOperation(l, Query) + op.Loc = loc + d.Operations = append(d.Operations, op) + + case "mutation": + d.Operations = append(d.Operations, parseOperation(l, Mutation)) + + case "subscription": + d.Operations = append(d.Operations, parseOperation(l, Subscription)) + + case "fragment": + frag := parseFragment(l) + frag.Loc = loc + d.Fragments = append(d.Fragments, frag) + + default: + l.SyntaxError(fmt.Sprintf(`unexpected %q, expecting "fragment"`, x)) + } + } + return d +} + +func parseOperation(l *common.Lexer, opType OperationType) *Operation { + op := &Operation{Type: opType} + op.Name.Loc = l.Location() + if l.Peek() == scanner.Ident { + op.Name = l.ConsumeIdentWithLoc() + } + op.Directives = common.ParseDirectives(l) + if l.Peek() == '(' { + l.ConsumeToken('(') + for l.Peek() != ')' { + loc := l.Location() + l.ConsumeToken('$') + iv := common.ParseInputValue(l) + iv.Loc = loc + op.Vars = append(op.Vars, iv) + } + l.ConsumeToken(')') + } + op.Selections = parseSelectionSet(l) + return op +} + +func parseFragment(l *common.Lexer) *FragmentDecl { + f := &FragmentDecl{} + f.Name = l.ConsumeIdentWithLoc() + l.ConsumeKeyword("on") + f.On = common.TypeName{Ident: l.ConsumeIdentWithLoc()} + f.Directives = common.ParseDirectives(l) + f.Selections = parseSelectionSet(l) + return f +} + +func parseSelectionSet(l *common.Lexer) []Selection { + var sels []Selection + l.ConsumeToken('{') + for l.Peek() != '}' { + sels = append(sels, parseSelection(l)) + } + l.ConsumeToken('}') + return sels +} + +func parseSelection(l *common.Lexer) Selection { + if l.Peek() == '.' { + return parseSpread(l) + } + return parseField(l) +} + +func parseField(l *common.Lexer) *Field { + f := &Field{} + f.Alias = l.ConsumeIdentWithLoc() + f.Name = f.Alias + if l.Peek() == ':' { + l.ConsumeToken(':') + f.Name = l.ConsumeIdentWithLoc() + } + if l.Peek() == '(' { + f.Arguments = common.ParseArguments(l) + } + f.Directives = common.ParseDirectives(l) + if l.Peek() == '{' { + f.SelectionSetLoc = l.Location() + f.Selections = parseSelectionSet(l) + } + return f +} + +func parseSpread(l *common.Lexer) Selection { + loc := l.Location() + l.ConsumeToken('.') + l.ConsumeToken('.') + l.ConsumeToken('.') + + f := &InlineFragment{Loc: loc} + if l.Peek() == scanner.Ident { + ident := l.ConsumeIdentWithLoc() + if ident.Name != "on" { + fs := &FragmentSpread{ + Name: ident, + Loc: loc, + } + fs.Directives = common.ParseDirectives(l) + return fs + } + f.On = common.TypeName{Ident: l.ConsumeIdentWithLoc()} + } + f.Directives = common.ParseDirectives(l) + f.Selections = parseSelectionSet(l) + return f +} diff --git a/vendor/github.com/neelance/graphql-go/internal/schema/meta.go b/vendor/github.com/neelance/graphql-go/internal/schema/meta.go new file mode 100644 index 00000000..b48bf7ac --- /dev/null +++ b/vendor/github.com/neelance/graphql-go/internal/schema/meta.go @@ -0,0 +1,190 @@ +package schema + +var Meta *Schema + +func init() { + Meta = &Schema{} // bootstrap + Meta = New() + if err := Meta.Parse(metaSrc); err != nil { + panic(err) + } +} + +var metaSrc = ` + # The ` + "`" + `Int` + "`" + ` scalar type represents non-fractional signed whole numeric values. Int can represent values between -(2^31) and 2^31 - 1. + scalar Int + + # The ` + "`" + `Float` + "`" + ` scalar type represents signed double-precision fractional values as specified by [IEEE 754](http://en.wikipedia.org/wiki/IEEE_floating_point). + scalar Float + + # The ` + "`" + `String` + "`" + ` scalar type represents textual data, represented as UTF-8 character sequences. The String type is most often used by GraphQL to represent free-form human-readable text. + scalar String + + # The ` + "`" + `Boolean` + "`" + ` scalar type represents ` + "`" + `true` + "`" + ` or ` + "`" + `false` + "`" + `. + scalar Boolean + + # The ` + "`" + `ID` + "`" + ` scalar type represents a unique identifier, often used to refetch an object or as key for a cache. The ID type appears in a JSON response as a String; however, it is not intended to be human-readable. When expected as an input type, any string (such as ` + "`" + `"4"` + "`" + `) or integer (such as ` + "`" + `4` + "`" + `) input value will be accepted as an ID. + scalar ID + + # Directs the executor to include this field or fragment only when the ` + "`" + `if` + "`" + ` argument is true. + directive @include( + # Included when true. + if: Boolean! + ) on FIELD | FRAGMENT_SPREAD | INLINE_FRAGMENT + + # Directs the executor to skip this field or fragment when the ` + "`" + `if` + "`" + ` argument is true. + directive @skip( + # Skipped when true. + if: Boolean! + ) on FIELD | FRAGMENT_SPREAD | INLINE_FRAGMENT + + # Marks an element of a GraphQL schema as no longer supported. + directive @deprecated( + # Explains why this element was deprecated, usually also including a suggestion + # for how to access supported similar data. Formatted in + # [Markdown](https://daringfireball.net/projects/markdown/). + reason: String = "No longer supported" + ) on FIELD_DEFINITION | ENUM_VALUE + + # A Directive provides a way to describe alternate runtime execution and type validation behavior in a GraphQL document. + # + # In some cases, you need to provide options to alter GraphQL's execution behavior + # in ways field arguments will not suffice, such as conditionally including or + # skipping a field. Directives provide this by describing additional information + # to the executor. + type __Directive { + name: String! + description: String + locations: [__DirectiveLocation!]! + args: [__InputValue!]! + } + + # A Directive can be adjacent to many parts of the GraphQL language, a + # __DirectiveLocation describes one such possible adjacencies. + enum __DirectiveLocation { + # Location adjacent to a query operation. + QUERY + # Location adjacent to a mutation operation. + MUTATION + # Location adjacent to a subscription operation. + SUBSCRIPTION + # Location adjacent to a field. + FIELD + # Location adjacent to a fragment definition. + FRAGMENT_DEFINITION + # Location adjacent to a fragment spread. + FRAGMENT_SPREAD + # Location adjacent to an inline fragment. + INLINE_FRAGMENT + # Location adjacent to a schema definition. + SCHEMA + # Location adjacent to a scalar definition. + SCALAR + # Location adjacent to an object type definition. + OBJECT + # Location adjacent to a field definition. + FIELD_DEFINITION + # Location adjacent to an argument definition. + ARGUMENT_DEFINITION + # Location adjacent to an interface definition. + INTERFACE + # Location adjacent to a union definition. + UNION + # Location adjacent to an enum definition. + ENUM + # Location adjacent to an enum value definition. + ENUM_VALUE + # Location adjacent to an input object type definition. + INPUT_OBJECT + # Location adjacent to an input object field definition. + INPUT_FIELD_DEFINITION + } + + # One possible value for a given Enum. Enum values are unique values, not a + # placeholder for a string or numeric value. However an Enum value is returned in + # a JSON response as a string. + type __EnumValue { + name: String! + description: String + isDeprecated: Boolean! + deprecationReason: String + } + + # Object and Interface types are described by a list of Fields, each of which has + # a name, potentially a list of arguments, and a return type. + type __Field { + name: String! + description: String + args: [__InputValue!]! + type: __Type! + isDeprecated: Boolean! + deprecationReason: String + } + + # Arguments provided to Fields or Directives and the input fields of an + # InputObject are represented as Input Values which describe their type and + # optionally a default value. + type __InputValue { + name: String! + description: String + type: __Type! + # A GraphQL-formatted string representing the default value for this input value. + defaultValue: String + } + + # A GraphQL Schema defines the capabilities of a GraphQL server. It exposes all + # available types and directives on the server, as well as the entry points for + # query, mutation, and subscription operations. + type __Schema { + # A list of all types supported by this server. + types: [__Type!]! + # The type that query operations will be rooted at. + queryType: __Type! + # If this server supports mutation, the type that mutation operations will be rooted at. + mutationType: __Type + # If this server support subscription, the type that subscription operations will be rooted at. + subscriptionType: __Type + # A list of all directives supported by this server. + directives: [__Directive!]! + } + + # The fundamental unit of any GraphQL Schema is the type. There are many kinds of + # types in GraphQL as represented by the ` + "`" + `__TypeKind` + "`" + ` enum. + # + # Depending on the kind of a type, certain fields describe information about that + # type. Scalar types provide no information beyond a name and description, while + # Enum types provide their values. Object and Interface types provide the fields + # they describe. Abstract types, Union and Interface, provide the Object types + # possible at runtime. List and NonNull types compose other types. + type __Type { + kind: __TypeKind! + name: String + description: String + fields(includeDeprecated: Boolean = false): [__Field!] + interfaces: [__Type!] + possibleTypes: [__Type!] + enumValues(includeDeprecated: Boolean = false): [__EnumValue!] + inputFields: [__InputValue!] + ofType: __Type + } + + # An enum describing what kind of type a given ` + "`" + `__Type` + "`" + ` is. + enum __TypeKind { + # Indicates this type is a scalar. + SCALAR + # Indicates this type is an object. ` + "`" + `fields` + "`" + ` and ` + "`" + `interfaces` + "`" + ` are valid fields. + OBJECT + # Indicates this type is an interface. ` + "`" + `fields` + "`" + ` and ` + "`" + `possibleTypes` + "`" + ` are valid fields. + INTERFACE + # Indicates this type is a union. ` + "`" + `possibleTypes` + "`" + ` is a valid field. + UNION + # Indicates this type is an enum. ` + "`" + `enumValues` + "`" + ` is a valid field. + ENUM + # Indicates this type is an input object. ` + "`" + `inputFields` + "`" + ` is a valid field. + INPUT_OBJECT + # Indicates this type is a list. ` + "`" + `ofType` + "`" + ` is a valid field. + LIST + # Indicates this type is a non-null. ` + "`" + `ofType` + "`" + ` is a valid field. + NON_NULL + } +` diff --git a/vendor/github.com/neelance/graphql-go/internal/schema/schema.go b/vendor/github.com/neelance/graphql-go/internal/schema/schema.go new file mode 100644 index 00000000..0cada3a9 --- /dev/null +++ b/vendor/github.com/neelance/graphql-go/internal/schema/schema.go @@ -0,0 +1,462 @@ +package schema + +import ( + "fmt" + "strings" + "text/scanner" + + "github.com/neelance/graphql-go/errors" + "github.com/neelance/graphql-go/internal/common" +) + +type Schema struct { + EntryPoints map[string]NamedType + Types map[string]NamedType + Directives map[string]*DirectiveDecl + + entryPointNames map[string]string + objects []*Object + unions []*Union + enums []*Enum +} + +func (s *Schema) Resolve(name string) common.Type { + return s.Types[name] +} + +type NamedType interface { + common.Type + TypeName() string + Description() string +} + +type Scalar struct { + Name string + Desc string +} + +type Object struct { + Name string + Interfaces []*Interface + Fields FieldList + Desc string + + interfaceNames []string +} + +type Interface struct { + Name string + PossibleTypes []*Object + Fields FieldList + Desc string +} + +type Union struct { + Name string + PossibleTypes []*Object + Desc string + + typeNames []string +} + +type Enum struct { + Name string + Values []*EnumValue + Desc string +} + +type EnumValue struct { + Name string + Directives common.DirectiveList + Desc string +} + +type InputObject struct { + Name string + Desc string + Values common.InputValueList +} + +type FieldList []*Field + +func (l FieldList) Get(name string) *Field { + for _, f := range l { + if f.Name == name { + return f + } + } + return nil +} + +func (l FieldList) Names() []string { + names := make([]string, len(l)) + for i, f := range l { + names[i] = f.Name + } + return names +} + +type DirectiveDecl struct { + Name string + Desc string + Locs []string + Args common.InputValueList +} + +func (*Scalar) Kind() string { return "SCALAR" } +func (*Object) Kind() string { return "OBJECT" } +func (*Interface) Kind() string { return "INTERFACE" } +func (*Union) Kind() string { return "UNION" } +func (*Enum) Kind() string { return "ENUM" } +func (*InputObject) Kind() string { return "INPUT_OBJECT" } + +func (t *Scalar) String() string { return t.Name } +func (t *Object) String() string { return t.Name } +func (t *Interface) String() string { return t.Name } +func (t *Union) String() string { return t.Name } +func (t *Enum) String() string { return t.Name } +func (t *InputObject) String() string { return t.Name } + +func (t *Scalar) TypeName() string { return t.Name } +func (t *Object) TypeName() string { return t.Name } +func (t *Interface) TypeName() string { return t.Name } +func (t *Union) TypeName() string { return t.Name } +func (t *Enum) TypeName() string { return t.Name } +func (t *InputObject) TypeName() string { return t.Name } + +func (t *Scalar) Description() string { return t.Desc } +func (t *Object) Description() string { return t.Desc } +func (t *Interface) Description() string { return t.Desc } +func (t *Union) Description() string { return t.Desc } +func (t *Enum) Description() string { return t.Desc } +func (t *InputObject) Description() string { return t.Desc } + +type Field struct { + Name string + Args common.InputValueList + Type common.Type + Directives common.DirectiveList + Desc string +} + +func New() *Schema { + s := &Schema{ + entryPointNames: make(map[string]string), + Types: make(map[string]NamedType), + Directives: make(map[string]*DirectiveDecl), + } + for n, t := range Meta.Types { + s.Types[n] = t + } + for n, d := range Meta.Directives { + s.Directives[n] = d + } + return s +} + +func (s *Schema) Parse(schemaString string) error { + sc := &scanner.Scanner{ + Mode: scanner.ScanIdents | scanner.ScanInts | scanner.ScanFloats | scanner.ScanStrings, + } + sc.Init(strings.NewReader(schemaString)) + + l := common.New(sc) + err := l.CatchSyntaxError(func() { + parseSchema(s, l) + }) + if err != nil { + return err + } + + for _, t := range s.Types { + if err := resolveNamedType(s, t); err != nil { + return err + } + } + for _, d := range s.Directives { + for _, arg := range d.Args { + t, err := common.ResolveType(arg.Type, s.Resolve) + if err != nil { + return err + } + arg.Type = t + } + } + + s.EntryPoints = make(map[string]NamedType) + for key, name := range s.entryPointNames { + t, ok := s.Types[name] + if !ok { + if !ok { + return errors.Errorf("type %q not found", name) + } + } + s.EntryPoints[key] = t + } + + for _, obj := range s.objects { + obj.Interfaces = make([]*Interface, len(obj.interfaceNames)) + for i, intfName := range obj.interfaceNames { + t, ok := s.Types[intfName] + if !ok { + return errors.Errorf("interface %q not found", intfName) + } + intf, ok := t.(*Interface) + if !ok { + return errors.Errorf("type %q is not an interface", intfName) + } + obj.Interfaces[i] = intf + intf.PossibleTypes = append(intf.PossibleTypes, obj) + } + } + + for _, union := range s.unions { + union.PossibleTypes = make([]*Object, len(union.typeNames)) + for i, name := range union.typeNames { + t, ok := s.Types[name] + if !ok { + return errors.Errorf("object type %q not found", name) + } + obj, ok := t.(*Object) + if !ok { + return errors.Errorf("type %q is not an object", name) + } + union.PossibleTypes[i] = obj + } + } + + for _, enum := range s.enums { + for _, value := range enum.Values { + if err := resolveDirectives(s, value.Directives); err != nil { + return err + } + } + } + + return nil +} + +func resolveNamedType(s *Schema, t NamedType) error { + switch t := t.(type) { + case *Object: + for _, f := range t.Fields { + if err := resolveField(s, f); err != nil { + return err + } + } + case *Interface: + for _, f := range t.Fields { + if err := resolveField(s, f); err != nil { + return err + } + } + case *InputObject: + if err := resolveInputObject(s, t.Values); err != nil { + return err + } + } + return nil +} + +func resolveField(s *Schema, f *Field) error { + t, err := common.ResolveType(f.Type, s.Resolve) + if err != nil { + return err + } + f.Type = t + if err := resolveDirectives(s, f.Directives); err != nil { + return err + } + return resolveInputObject(s, f.Args) +} + +func resolveDirectives(s *Schema, directives common.DirectiveList) error { + for _, d := range directives { + dirName := d.Name.Name + dd, ok := s.Directives[dirName] + if !ok { + return errors.Errorf("directive %q not found", dirName) + } + for _, arg := range d.Args { + if dd.Args.Get(arg.Name.Name) == nil { + return errors.Errorf("invalid argument %q for directive %q", arg.Name.Name, dirName) + } + } + for _, arg := range dd.Args { + if _, ok := d.Args.Get(arg.Name.Name); !ok { + d.Args = append(d.Args, common.Argument{Name: arg.Name, Value: arg.Default}) + } + } + } + return nil +} + +func resolveInputObject(s *Schema, values common.InputValueList) error { + for _, v := range values { + t, err := common.ResolveType(v.Type, s.Resolve) + if err != nil { + return err + } + v.Type = t + } + return nil +} + +func parseSchema(s *Schema, l *common.Lexer) { + for l.Peek() != scanner.EOF { + desc := l.DescComment() + switch x := l.ConsumeIdent(); x { + case "schema": + l.ConsumeToken('{') + for l.Peek() != '}' { + name := l.ConsumeIdent() + l.ConsumeToken(':') + typ := l.ConsumeIdent() + s.entryPointNames[name] = typ + } + l.ConsumeToken('}') + case "type": + obj := parseObjectDecl(l) + obj.Desc = desc + s.Types[obj.Name] = obj + s.objects = append(s.objects, obj) + case "interface": + intf := parseInterfaceDecl(l) + intf.Desc = desc + s.Types[intf.Name] = intf + case "union": + union := parseUnionDecl(l) + union.Desc = desc + s.Types[union.Name] = union + s.unions = append(s.unions, union) + case "enum": + enum := parseEnumDecl(l) + enum.Desc = desc + s.Types[enum.Name] = enum + s.enums = append(s.enums, enum) + case "input": + input := parseInputDecl(l) + input.Desc = desc + s.Types[input.Name] = input + case "scalar": + name := l.ConsumeIdent() + s.Types[name] = &Scalar{Name: name, Desc: desc} + case "directive": + directive := parseDirectiveDecl(l) + directive.Desc = desc + s.Directives[directive.Name] = directive + default: + l.SyntaxError(fmt.Sprintf(`unexpected %q, expecting "schema", "type", "enum", "interface", "union", "input", "scalar" or "directive"`, x)) + } + } +} + +func parseObjectDecl(l *common.Lexer) *Object { + o := &Object{} + o.Name = l.ConsumeIdent() + if l.Peek() == scanner.Ident { + l.ConsumeKeyword("implements") + for { + o.interfaceNames = append(o.interfaceNames, l.ConsumeIdent()) + if l.Peek() == '{' { + break + } + } + } + l.ConsumeToken('{') + o.Fields = parseFields(l) + l.ConsumeToken('}') + return o +} + +func parseInterfaceDecl(l *common.Lexer) *Interface { + i := &Interface{} + i.Name = l.ConsumeIdent() + l.ConsumeToken('{') + i.Fields = parseFields(l) + l.ConsumeToken('}') + return i +} + +func parseUnionDecl(l *common.Lexer) *Union { + union := &Union{} + union.Name = l.ConsumeIdent() + l.ConsumeToken('=') + union.typeNames = []string{l.ConsumeIdent()} + for l.Peek() == '|' { + l.ConsumeToken('|') + union.typeNames = append(union.typeNames, l.ConsumeIdent()) + } + return union +} + +func parseInputDecl(l *common.Lexer) *InputObject { + i := &InputObject{} + i.Name = l.ConsumeIdent() + l.ConsumeToken('{') + for l.Peek() != '}' { + i.Values = append(i.Values, common.ParseInputValue(l)) + } + l.ConsumeToken('}') + return i +} + +func parseEnumDecl(l *common.Lexer) *Enum { + enum := &Enum{} + enum.Name = l.ConsumeIdent() + l.ConsumeToken('{') + for l.Peek() != '}' { + v := &EnumValue{} + v.Desc = l.DescComment() + v.Name = l.ConsumeIdent() + v.Directives = common.ParseDirectives(l) + enum.Values = append(enum.Values, v) + } + l.ConsumeToken('}') + return enum +} + +func parseDirectiveDecl(l *common.Lexer) *DirectiveDecl { + d := &DirectiveDecl{} + l.ConsumeToken('@') + d.Name = l.ConsumeIdent() + if l.Peek() == '(' { + l.ConsumeToken('(') + for l.Peek() != ')' { + v := common.ParseInputValue(l) + d.Args = append(d.Args, v) + } + l.ConsumeToken(')') + } + l.ConsumeKeyword("on") + for { + loc := l.ConsumeIdent() + d.Locs = append(d.Locs, loc) + if l.Peek() != '|' { + break + } + l.ConsumeToken('|') + } + return d +} + +func parseFields(l *common.Lexer) FieldList { + var fields FieldList + for l.Peek() != '}' { + f := &Field{} + f.Desc = l.DescComment() + f.Name = l.ConsumeIdent() + if l.Peek() == '(' { + l.ConsumeToken('(') + for l.Peek() != ')' { + f.Args = append(f.Args, common.ParseInputValue(l)) + } + l.ConsumeToken(')') + } + l.ConsumeToken(':') + f.Type = common.ParseType(l) + f.Directives = common.ParseDirectives(l) + fields = append(fields, f) + } + return fields +} diff --git a/vendor/github.com/neelance/graphql-go/internal/tests/all_test.go b/vendor/github.com/neelance/graphql-go/internal/tests/all_test.go new file mode 100644 index 00000000..7d31673f --- /dev/null +++ b/vendor/github.com/neelance/graphql-go/internal/tests/all_test.go @@ -0,0 +1,75 @@ +package tests + +import ( + "os" + "reflect" + "sort" + "testing" + + "encoding/json" + + "github.com/neelance/graphql-go/errors" + "github.com/neelance/graphql-go/internal/query" + "github.com/neelance/graphql-go/internal/schema" + "github.com/neelance/graphql-go/internal/validation" +) + +type Test struct { + Name string + Rule string + Schema int + Query string + Errors []*errors.QueryError +} + +func TestAll(t *testing.T) { + f, err := os.Open("testdata/tests.json") + if err != nil { + t.Fatal(err) + } + + var testData struct { + Schemas []string + Tests []*Test + } + if err := json.NewDecoder(f).Decode(&testData); err != nil { + t.Fatal(err) + } + + schemas := make([]*schema.Schema, len(testData.Schemas)) + for i, schemaStr := range testData.Schemas { + schemas[i] = schema.New() + if err := schemas[i].Parse(schemaStr); err != nil { + t.Fatal(err) + } + } + + for _, test := range testData.Tests { + t.Run(test.Name, func(t *testing.T) { + d, err := query.Parse(test.Query) + if err != nil { + t.Fatal(err) + } + errs := validation.Validate(schemas[test.Schema], d) + got := []*errors.QueryError{} + for _, err := range errs { + if err.Rule == test.Rule { + err.Rule = "" + got = append(got, err) + } + } + sortLocations(test.Errors) + sortLocations(got) + if !reflect.DeepEqual(test.Errors, got) { + t.Errorf("wrong errors\nexpected: %v\ngot: %v", test.Errors, got) + } + }) + } +} + +func sortLocations(errs []*errors.QueryError) { + for _, err := range errs { + locs := err.Locations + sort.Slice(locs, func(i, j int) bool { return locs[i].Before(locs[j]) }) + } +} diff --git a/vendor/github.com/neelance/graphql-go/internal/tests/empty.go b/vendor/github.com/neelance/graphql-go/internal/tests/empty.go new file mode 100644 index 00000000..ca8701d2 --- /dev/null +++ b/vendor/github.com/neelance/graphql-go/internal/tests/empty.go @@ -0,0 +1 @@ +package tests diff --git a/vendor/github.com/neelance/graphql-go/internal/tests/testdata/LICENSE b/vendor/github.com/neelance/graphql-go/internal/tests/testdata/LICENSE new file mode 100644 index 00000000..fce4519e --- /dev/null +++ b/vendor/github.com/neelance/graphql-go/internal/tests/testdata/LICENSE @@ -0,0 +1,33 @@ +The files in this testdata directory are derived from the graphql-js project: +https://github.com/graphql/graphql-js + +BSD License + +For GraphQL software + +Copyright (c) 2015, Facebook, Inc. All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, +are permitted provided that the following conditions are met: + + * Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + + * Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + + * Neither the name Facebook nor the names of its contributors may be used to + endorse or promote products derived from this software without specific + prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR +ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON +ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. \ No newline at end of file diff --git a/vendor/github.com/neelance/graphql-go/internal/tests/testdata/export.js b/vendor/github.com/neelance/graphql-go/internal/tests/testdata/export.js new file mode 100644 index 00000000..b89f5574 --- /dev/null +++ b/vendor/github.com/neelance/graphql-go/internal/tests/testdata/export.js @@ -0,0 +1,110 @@ +import fs from 'fs'; +import Module from 'module'; +import { testSchema } from './src/validation/__tests__/harness'; +import { printSchema } from './src/utilities'; + +let schemas = []; +function registerSchema(schema) { + for (let i = 0; i < schemas.length; i++) { + if (schemas[i] == schema) { + return i; + } + } + schemas.push(schema); + return schemas.length - 1; +} + +const harness = { + expectPassesRule(rule, queryString) { + harness.expectPassesRuleWithSchema(testSchema, rule, queryString); + }, + expectPassesRuleWithSchema(schema, rule, queryString, errors) { + tests.push({ + name: names.join('/'), + rule: rule.name, + schema: registerSchema(schema), + query: queryString, + errors: [], + }); + }, + expectFailsRule(rule, queryString, errors) { + harness.expectFailsRuleWithSchema(testSchema, rule, queryString, errors); + }, + expectFailsRuleWithSchema(schema, rule, queryString, errors) { + tests.push({ + name: names.join('/'), + rule: rule.name, + schema: registerSchema(schema), + query: queryString, + errors: errors, + }); + } +}; + +let tests = []; +let names = [] +const fakeModules = { + 'mocha': { + describe(name, f) { + switch (name) { + case 'within schema language': + return; + } + names.push(name); + f(); + names.pop(); + }, + it(name, f) { + switch (name) { + case 'ignores type definitions': + case 'reports correctly when a non-exclusive follows an exclusive': + case 'disallows differing subfields': + return; + } + names.push(name); + f(); + names.pop(); + }, + }, + './harness': harness, +}; + +const originalLoader = Module._load; +Module._load = function(request, parent, isMain) { + return fakeModules[request] || originalLoader(request, parent, isMain); +}; + +require('./src/validation/__tests__/ArgumentsOfCorrectType-test'); +require('./src/validation/__tests__/DefaultValuesOfCorrectType-test'); +require('./src/validation/__tests__/FieldsOnCorrectType-test'); +require('./src/validation/__tests__/FragmentsOnCompositeTypes-test'); +require('./src/validation/__tests__/KnownArgumentNames-test'); +require('./src/validation/__tests__/KnownDirectives-test'); +require('./src/validation/__tests__/KnownFragmentNames-test'); +require('./src/validation/__tests__/KnownTypeNames-test'); +require('./src/validation/__tests__/LoneAnonymousOperation-test'); +require('./src/validation/__tests__/NoFragmentCycles-test'); +require('./src/validation/__tests__/NoUndefinedVariables-test'); +require('./src/validation/__tests__/NoUnusedFragments-test'); +require('./src/validation/__tests__/NoUnusedVariables-test'); +require('./src/validation/__tests__/OverlappingFieldsCanBeMerged-test'); +require('./src/validation/__tests__/PossibleFragmentSpreads-test'); +require('./src/validation/__tests__/ProvidedNonNullArguments-test'); +require('./src/validation/__tests__/ScalarLeafs-test'); +require('./src/validation/__tests__/UniqueArgumentNames-test'); +require('./src/validation/__tests__/UniqueDirectivesPerLocation-test'); +require('./src/validation/__tests__/UniqueFragmentNames-test'); +require('./src/validation/__tests__/UniqueInputFieldNames-test'); +require('./src/validation/__tests__/UniqueOperationNames-test'); +require('./src/validation/__tests__/UniqueVariableNames-test'); +require('./src/validation/__tests__/VariablesAreInputTypes-test'); +require('./src/validation/__tests__/VariablesInAllowedPosition-test'); + +let output = JSON.stringify({ + schemas: schemas.map(s => printSchema(s)), + tests: tests, +}, null, 2) +output = output.replace(' Did you mean to use an inline fragment on \\"Dog\\" or \\"Cat\\"?', ''); +output = output.replace(' Did you mean to use an inline fragment on \\"Being\\", \\"Pet\\", \\"Canine\\", \\"Dog\\", or \\"Cat\\"?', ''); +output = output.replace(' Did you mean \\"Pet\\"?', ''); +fs.writeFileSync("tests.json", output); diff --git a/vendor/github.com/neelance/graphql-go/internal/tests/testdata/gen.go b/vendor/github.com/neelance/graphql-go/internal/tests/testdata/gen.go new file mode 100644 index 00000000..6d5ac9e6 --- /dev/null +++ b/vendor/github.com/neelance/graphql-go/internal/tests/testdata/gen.go @@ -0,0 +1,4 @@ +package testdata + +//go:generate cp export.js graphql-js/export.js +//go:generate babel-node graphql-js/export.js diff --git a/vendor/github.com/neelance/graphql-go/internal/tests/testdata/tests.json b/vendor/github.com/neelance/graphql-go/internal/tests/testdata/tests.json new file mode 100644 index 00000000..35511c6a --- /dev/null +++ b/vendor/github.com/neelance/graphql-go/internal/tests/testdata/tests.json @@ -0,0 +1,4948 @@ +{ + "schemas": [ + "schema {\n query: QueryRoot\n}\n\ndirective @onQuery on QUERY\n\ndirective @onMutation on MUTATION\n\ndirective @onSubscription on SUBSCRIPTION\n\ndirective @onField on FIELD\n\ndirective @onFragmentDefinition on FRAGMENT_DEFINITION\n\ndirective @onFragmentSpread on FRAGMENT_SPREAD\n\ndirective @onInlineFragment on INLINE_FRAGMENT\n\ndirective @onSchema on SCHEMA\n\ndirective @onScalar on SCALAR\n\ndirective @onObject on OBJECT\n\ndirective @onFieldDefinition on FIELD_DEFINITION\n\ndirective @onArgumentDefinition on ARGUMENT_DEFINITION\n\ndirective @onInterface on INTERFACE\n\ndirective @onUnion on UNION\n\ndirective @onEnum on ENUM\n\ndirective @onEnumValue on ENUM_VALUE\n\ndirective @onInputObject on INPUT_OBJECT\n\ndirective @onInputFieldDefinition on INPUT_FIELD_DEFINITION\n\ntype Alien implements Being, Intelligent {\n iq: Int\n name(surname: Boolean): String\n numEyes: Int\n}\n\ninterface Being {\n name(surname: Boolean): String\n}\n\ninterface Canine {\n name(surname: Boolean): String\n}\n\ntype Cat implements Being, Pet {\n furColor: FurColor\n meowVolume: Int\n meows: Boolean\n name(surname: Boolean): String\n nickname: String\n}\n\nunion CatOrDog = Dog | Cat\n\ninput ComplexInput {\n booleanField: Boolean\n intField: Int\n requiredField: Boolean!\n stringField: String\n stringListField: [String]\n}\n\ntype ComplicatedArgs {\n booleanArgField(booleanArg: Boolean): String\n complexArgField(complexArg: ComplexInput): String\n enumArgField(enumArg: FurColor): String\n floatArgField(floatArg: Float): String\n idArgField(idArg: ID): String\n intArgField(intArg: Int): String\n multipleOptAndReq(req1: Int!, req2: Int!, opt1: Int = 0, opt2: Int = 0): String\n multipleOpts(opt1: Int = 0, opt2: Int = 0): String\n multipleReqs(req1: Int!, req2: Int!): String\n nonNullIntArgField(nonNullIntArg: Int!): String\n stringArgField(stringArg: String): String\n stringListArgField(stringListArg: [String]): String\n}\n\ntype Dog implements Being, Pet, Canine {\n barkVolume: Int\n barks: Boolean\n doesKnowCommand(dogCommand: DogCommand): Boolean\n isAtLocation(x: Int, y: Int): Boolean\n isHousetrained(atOtherHomes: Boolean = true): Boolean\n name(surname: Boolean): String\n nickname: String\n}\n\nenum DogCommand {\n SIT\n HEEL\n DOWN\n}\n\nunion DogOrHuman = Dog | Human\n\nenum FurColor {\n BROWN\n BLACK\n TAN\n SPOTTED\n NO_FUR\n UNKNOWN\n}\n\ntype Human implements Being, Intelligent {\n iq: Int\n name(surname: Boolean): String\n pets: [Pet]\n relatives: [Human]\n}\n\nunion HumanOrAlien = Human | Alien\n\ninterface Intelligent {\n iq: Int\n}\n\ninterface Pet {\n name(surname: Boolean): String\n}\n\ntype QueryRoot {\n alien: Alien\n cat: Cat\n catOrDog: CatOrDog\n complicatedArgs: ComplicatedArgs\n dog: Dog\n dogOrHuman: DogOrHuman\n human(id: ID): Human\n humanOrAlien: HumanOrAlien\n pet: Pet\n}\n", + "schema {\n query: QueryRoot\n}\n\ntype Connection {\n edges: [Edge]\n}\n\ntype Edge {\n node: Node\n}\n\ntype IntBox implements SomeBox {\n deepBox: IntBox\n intBox: IntBox\n listStringBox: [StringBox]\n scalar: Int\n stringBox: StringBox\n unrelatedField: String\n}\n\ntype Node {\n id: ID\n name: String\n}\n\ninterface NonNullStringBox1 {\n scalar: String!\n}\n\ntype NonNullStringBox1Impl implements SomeBox, NonNullStringBox1 {\n deepBox: SomeBox\n scalar: String!\n unrelatedField: String\n}\n\ninterface NonNullStringBox2 {\n scalar: String!\n}\n\ntype NonNullStringBox2Impl implements SomeBox, NonNullStringBox2 {\n deepBox: SomeBox\n scalar: String!\n unrelatedField: String\n}\n\ntype QueryRoot {\n connection: Connection\n someBox: SomeBox\n}\n\ninterface SomeBox {\n deepBox: SomeBox\n unrelatedField: String\n}\n\ntype StringBox implements SomeBox {\n deepBox: StringBox\n intBox: IntBox\n listStringBox: [StringBox]\n scalar: String\n stringBox: StringBox\n unrelatedField: String\n}\n", + "type Foo {\n constructor: String\n}\n\ntype Query {\n foo: Foo\n}\n" + ], + "tests": [ + { + "name": "Validate: Argument values of correct type/Valid values/Good int value", + "rule": "ArgumentsOfCorrectType", + "schema": 0, + "query": "\n {\n complicatedArgs {\n intArgField(intArg: 2)\n }\n }\n ", + "errors": [] + }, + { + "name": "Validate: Argument values of correct type/Valid values/Good negative int value", + "rule": "ArgumentsOfCorrectType", + "schema": 0, + "query": "\n {\n complicatedArgs {\n intArgField(intArg: -2)\n }\n }\n ", + "errors": [] + }, + { + "name": "Validate: Argument values of correct type/Valid values/Good boolean value", + "rule": "ArgumentsOfCorrectType", + "schema": 0, + "query": "\n {\n complicatedArgs {\n booleanArgField(booleanArg: true)\n }\n }\n ", + "errors": [] + }, + { + "name": "Validate: Argument values of correct type/Valid values/Good string value", + "rule": "ArgumentsOfCorrectType", + "schema": 0, + "query": "\n {\n complicatedArgs {\n stringArgField(stringArg: \"foo\")\n }\n }\n ", + "errors": [] + }, + { + "name": "Validate: Argument values of correct type/Valid values/Good float value", + "rule": "ArgumentsOfCorrectType", + "schema": 0, + "query": "\n {\n complicatedArgs {\n floatArgField(floatArg: 1.1)\n }\n }\n ", + "errors": [] + }, + { + "name": "Validate: Argument values of correct type/Valid values/Good negative float value", + "rule": "ArgumentsOfCorrectType", + "schema": 0, + "query": "\n {\n complicatedArgs {\n floatArgField(floatArg: -1.1)\n }\n }\n ", + "errors": [] + }, + { + "name": "Validate: Argument values of correct type/Valid values/Int into Float", + "rule": "ArgumentsOfCorrectType", + "schema": 0, + "query": "\n {\n complicatedArgs {\n floatArgField(floatArg: 1)\n }\n }\n ", + "errors": [] + }, + { + "name": "Validate: Argument values of correct type/Valid values/Int into ID", + "rule": "ArgumentsOfCorrectType", + "schema": 0, + "query": "\n {\n complicatedArgs {\n idArgField(idArg: 1)\n }\n }\n ", + "errors": [] + }, + { + "name": "Validate: Argument values of correct type/Valid values/String into ID", + "rule": "ArgumentsOfCorrectType", + "schema": 0, + "query": "\n {\n complicatedArgs {\n idArgField(idArg: \"someIdString\")\n }\n }\n ", + "errors": [] + }, + { + "name": "Validate: Argument values of correct type/Valid values/Good enum value", + "rule": "ArgumentsOfCorrectType", + "schema": 0, + "query": "\n {\n dog {\n doesKnowCommand(dogCommand: SIT)\n }\n }\n ", + "errors": [] + }, + { + "name": "Validate: Argument values of correct type/Valid values/Enum with undefined value", + "rule": "ArgumentsOfCorrectType", + "schema": 0, + "query": "\n {\n complicatedArgs {\n enumArgField(enumArg: UNKNOWN)\n }\n }\n ", + "errors": [] + }, + { + "name": "Validate: Argument values of correct type/Valid values/Enum with null value", + "rule": "ArgumentsOfCorrectType", + "schema": 0, + "query": "\n {\n complicatedArgs {\n enumArgField(enumArg: NO_FUR)\n }\n }\n ", + "errors": [] + }, + { + "name": "Validate: Argument values of correct type/Valid values/null into nullable type", + "rule": "ArgumentsOfCorrectType", + "schema": 0, + "query": "\n {\n complicatedArgs {\n intArgField(intArg: null)\n }\n }\n ", + "errors": [] + }, + { + "name": "Validate: Argument values of correct type/Valid values/null into nullable type", + "rule": "ArgumentsOfCorrectType", + "schema": 0, + "query": "\n {\n dog(a: null, b: null, c:{ requiredField: true, intField: null }) {\n name\n }\n }\n ", + "errors": [] + }, + { + "name": "Validate: Argument values of correct type/Invalid String values/Int into String", + "rule": "ArgumentsOfCorrectType", + "schema": 0, + "query": "\n {\n complicatedArgs {\n stringArgField(stringArg: 1)\n }\n }\n ", + "errors": [ + { + "message": "Argument \"stringArg\" has invalid value 1.\nExpected type \"String\", found 1.", + "locations": [ + { + "line": 4, + "column": 39 + } + ] + } + ] + }, + { + "name": "Validate: Argument values of correct type/Invalid String values/Float into String", + "rule": "ArgumentsOfCorrectType", + "schema": 0, + "query": "\n {\n complicatedArgs {\n stringArgField(stringArg: 1.0)\n }\n }\n ", + "errors": [ + { + "message": "Argument \"stringArg\" has invalid value 1.0.\nExpected type \"String\", found 1.0.", + "locations": [ + { + "line": 4, + "column": 39 + } + ] + } + ] + }, + { + "name": "Validate: Argument values of correct type/Invalid String values/Boolean into String", + "rule": "ArgumentsOfCorrectType", + "schema": 0, + "query": "\n {\n complicatedArgs {\n stringArgField(stringArg: true)\n }\n }\n ", + "errors": [ + { + "message": "Argument \"stringArg\" has invalid value true.\nExpected type \"String\", found true.", + "locations": [ + { + "line": 4, + "column": 39 + } + ] + } + ] + }, + { + "name": "Validate: Argument values of correct type/Invalid String values/Unquoted String into String", + "rule": "ArgumentsOfCorrectType", + "schema": 0, + "query": "\n {\n complicatedArgs {\n stringArgField(stringArg: BAR)\n }\n }\n ", + "errors": [ + { + "message": "Argument \"stringArg\" has invalid value BAR.\nExpected type \"String\", found BAR.", + "locations": [ + { + "line": 4, + "column": 39 + } + ] + } + ] + }, + { + "name": "Validate: Argument values of correct type/Invalid Int values/String into Int", + "rule": "ArgumentsOfCorrectType", + "schema": 0, + "query": "\n {\n complicatedArgs {\n intArgField(intArg: \"3\")\n }\n }\n ", + "errors": [ + { + "message": "Argument \"intArg\" has invalid value \"3\".\nExpected type \"Int\", found \"3\".", + "locations": [ + { + "line": 4, + "column": 33 + } + ] + } + ] + }, + { + "name": "Validate: Argument values of correct type/Invalid Int values/Big Int into Int", + "rule": "ArgumentsOfCorrectType", + "schema": 0, + "query": "\n {\n complicatedArgs {\n intArgField(intArg: 829384293849283498239482938)\n }\n }\n ", + "errors": [ + { + "message": "Argument \"intArg\" has invalid value 829384293849283498239482938.\nExpected type \"Int\", found 829384293849283498239482938.", + "locations": [ + { + "line": 4, + "column": 33 + } + ] + } + ] + }, + { + "name": "Validate: Argument values of correct type/Invalid Int values/Unquoted String into Int", + "rule": "ArgumentsOfCorrectType", + "schema": 0, + "query": "\n {\n complicatedArgs {\n intArgField(intArg: FOO)\n }\n }\n ", + "errors": [ + { + "message": "Argument \"intArg\" has invalid value FOO.\nExpected type \"Int\", found FOO.", + "locations": [ + { + "line": 4, + "column": 33 + } + ] + } + ] + }, + { + "name": "Validate: Argument values of correct type/Invalid Int values/Simple Float into Int", + "rule": "ArgumentsOfCorrectType", + "schema": 0, + "query": "\n {\n complicatedArgs {\n intArgField(intArg: 3.0)\n }\n }\n ", + "errors": [ + { + "message": "Argument \"intArg\" has invalid value 3.0.\nExpected type \"Int\", found 3.0.", + "locations": [ + { + "line": 4, + "column": 33 + } + ] + } + ] + }, + { + "name": "Validate: Argument values of correct type/Invalid Int values/Float into Int", + "rule": "ArgumentsOfCorrectType", + "schema": 0, + "query": "\n {\n complicatedArgs {\n intArgField(intArg: 3.333)\n }\n }\n ", + "errors": [ + { + "message": "Argument \"intArg\" has invalid value 3.333.\nExpected type \"Int\", found 3.333.", + "locations": [ + { + "line": 4, + "column": 33 + } + ] + } + ] + }, + { + "name": "Validate: Argument values of correct type/Invalid Float values/String into Float", + "rule": "ArgumentsOfCorrectType", + "schema": 0, + "query": "\n {\n complicatedArgs {\n floatArgField(floatArg: \"3.333\")\n }\n }\n ", + "errors": [ + { + "message": "Argument \"floatArg\" has invalid value \"3.333\".\nExpected type \"Float\", found \"3.333\".", + "locations": [ + { + "line": 4, + "column": 37 + } + ] + } + ] + }, + { + "name": "Validate: Argument values of correct type/Invalid Float values/Boolean into Float", + "rule": "ArgumentsOfCorrectType", + "schema": 0, + "query": "\n {\n complicatedArgs {\n floatArgField(floatArg: true)\n }\n }\n ", + "errors": [ + { + "message": "Argument \"floatArg\" has invalid value true.\nExpected type \"Float\", found true.", + "locations": [ + { + "line": 4, + "column": 37 + } + ] + } + ] + }, + { + "name": "Validate: Argument values of correct type/Invalid Float values/Unquoted into Float", + "rule": "ArgumentsOfCorrectType", + "schema": 0, + "query": "\n {\n complicatedArgs {\n floatArgField(floatArg: FOO)\n }\n }\n ", + "errors": [ + { + "message": "Argument \"floatArg\" has invalid value FOO.\nExpected type \"Float\", found FOO.", + "locations": [ + { + "line": 4, + "column": 37 + } + ] + } + ] + }, + { + "name": "Validate: Argument values of correct type/Invalid Boolean value/Int into Boolean", + "rule": "ArgumentsOfCorrectType", + "schema": 0, + "query": "\n {\n complicatedArgs {\n booleanArgField(booleanArg: 2)\n }\n }\n ", + "errors": [ + { + "message": "Argument \"booleanArg\" has invalid value 2.\nExpected type \"Boolean\", found 2.", + "locations": [ + { + "line": 4, + "column": 41 + } + ] + } + ] + }, + { + "name": "Validate: Argument values of correct type/Invalid Boolean value/Float into Boolean", + "rule": "ArgumentsOfCorrectType", + "schema": 0, + "query": "\n {\n complicatedArgs {\n booleanArgField(booleanArg: 1.0)\n }\n }\n ", + "errors": [ + { + "message": "Argument \"booleanArg\" has invalid value 1.0.\nExpected type \"Boolean\", found 1.0.", + "locations": [ + { + "line": 4, + "column": 41 + } + ] + } + ] + }, + { + "name": "Validate: Argument values of correct type/Invalid Boolean value/String into Boolean", + "rule": "ArgumentsOfCorrectType", + "schema": 0, + "query": "\n {\n complicatedArgs {\n booleanArgField(booleanArg: \"true\")\n }\n }\n ", + "errors": [ + { + "message": "Argument \"booleanArg\" has invalid value \"true\".\nExpected type \"Boolean\", found \"true\".", + "locations": [ + { + "line": 4, + "column": 41 + } + ] + } + ] + }, + { + "name": "Validate: Argument values of correct type/Invalid Boolean value/Unquoted into Boolean", + "rule": "ArgumentsOfCorrectType", + "schema": 0, + "query": "\n {\n complicatedArgs {\n booleanArgField(booleanArg: TRUE)\n }\n }\n ", + "errors": [ + { + "message": "Argument \"booleanArg\" has invalid value TRUE.\nExpected type \"Boolean\", found TRUE.", + "locations": [ + { + "line": 4, + "column": 41 + } + ] + } + ] + }, + { + "name": "Validate: Argument values of correct type/Invalid ID value/Float into ID", + "rule": "ArgumentsOfCorrectType", + "schema": 0, + "query": "\n {\n complicatedArgs {\n idArgField(idArg: 1.0)\n }\n }\n ", + "errors": [ + { + "message": "Argument \"idArg\" has invalid value 1.0.\nExpected type \"ID\", found 1.0.", + "locations": [ + { + "line": 4, + "column": 31 + } + ] + } + ] + }, + { + "name": "Validate: Argument values of correct type/Invalid ID value/Boolean into ID", + "rule": "ArgumentsOfCorrectType", + "schema": 0, + "query": "\n {\n complicatedArgs {\n idArgField(idArg: true)\n }\n }\n ", + "errors": [ + { + "message": "Argument \"idArg\" has invalid value true.\nExpected type \"ID\", found true.", + "locations": [ + { + "line": 4, + "column": 31 + } + ] + } + ] + }, + { + "name": "Validate: Argument values of correct type/Invalid ID value/Unquoted into ID", + "rule": "ArgumentsOfCorrectType", + "schema": 0, + "query": "\n {\n complicatedArgs {\n idArgField(idArg: SOMETHING)\n }\n }\n ", + "errors": [ + { + "message": "Argument \"idArg\" has invalid value SOMETHING.\nExpected type \"ID\", found SOMETHING.", + "locations": [ + { + "line": 4, + "column": 31 + } + ] + } + ] + }, + { + "name": "Validate: Argument values of correct type/Invalid Enum value/Int into Enum", + "rule": "ArgumentsOfCorrectType", + "schema": 0, + "query": "\n {\n dog {\n doesKnowCommand(dogCommand: 2)\n }\n }\n ", + "errors": [ + { + "message": "Argument \"dogCommand\" has invalid value 2.\nExpected type \"DogCommand\", found 2.", + "locations": [ + { + "line": 4, + "column": 41 + } + ] + } + ] + }, + { + "name": "Validate: Argument values of correct type/Invalid Enum value/Float into Enum", + "rule": "ArgumentsOfCorrectType", + "schema": 0, + "query": "\n {\n dog {\n doesKnowCommand(dogCommand: 1.0)\n }\n }\n ", + "errors": [ + { + "message": "Argument \"dogCommand\" has invalid value 1.0.\nExpected type \"DogCommand\", found 1.0.", + "locations": [ + { + "line": 4, + "column": 41 + } + ] + } + ] + }, + { + "name": "Validate: Argument values of correct type/Invalid Enum value/String into Enum", + "rule": "ArgumentsOfCorrectType", + "schema": 0, + "query": "\n {\n dog {\n doesKnowCommand(dogCommand: \"SIT\")\n }\n }\n ", + "errors": [ + { + "message": "Argument \"dogCommand\" has invalid value \"SIT\".\nExpected type \"DogCommand\", found \"SIT\".", + "locations": [ + { + "line": 4, + "column": 41 + } + ] + } + ] + }, + { + "name": "Validate: Argument values of correct type/Invalid Enum value/Boolean into Enum", + "rule": "ArgumentsOfCorrectType", + "schema": 0, + "query": "\n {\n dog {\n doesKnowCommand(dogCommand: true)\n }\n }\n ", + "errors": [ + { + "message": "Argument \"dogCommand\" has invalid value true.\nExpected type \"DogCommand\", found true.", + "locations": [ + { + "line": 4, + "column": 41 + } + ] + } + ] + }, + { + "name": "Validate: Argument values of correct type/Invalid Enum value/Unknown Enum Value into Enum", + "rule": "ArgumentsOfCorrectType", + "schema": 0, + "query": "\n {\n dog {\n doesKnowCommand(dogCommand: JUGGLE)\n }\n }\n ", + "errors": [ + { + "message": "Argument \"dogCommand\" has invalid value JUGGLE.\nExpected type \"DogCommand\", found JUGGLE.", + "locations": [ + { + "line": 4, + "column": 41 + } + ] + } + ] + }, + { + "name": "Validate: Argument values of correct type/Invalid Enum value/Different case Enum Value into Enum", + "rule": "ArgumentsOfCorrectType", + "schema": 0, + "query": "\n {\n dog {\n doesKnowCommand(dogCommand: sit)\n }\n }\n ", + "errors": [ + { + "message": "Argument \"dogCommand\" has invalid value sit.\nExpected type \"DogCommand\", found sit.", + "locations": [ + { + "line": 4, + "column": 41 + } + ] + } + ] + }, + { + "name": "Validate: Argument values of correct type/Valid List value/Good list value", + "rule": "ArgumentsOfCorrectType", + "schema": 0, + "query": "\n {\n complicatedArgs {\n stringListArgField(stringListArg: [\"one\", null, \"two\"])\n }\n }\n ", + "errors": [] + }, + { + "name": "Validate: Argument values of correct type/Valid List value/Empty list value", + "rule": "ArgumentsOfCorrectType", + "schema": 0, + "query": "\n {\n complicatedArgs {\n stringListArgField(stringListArg: [])\n }\n }\n ", + "errors": [] + }, + { + "name": "Validate: Argument values of correct type/Valid List value/Null value", + "rule": "ArgumentsOfCorrectType", + "schema": 0, + "query": "\n {\n complicatedArgs {\n stringListArgField(stringListArg: null)\n }\n }\n ", + "errors": [] + }, + { + "name": "Validate: Argument values of correct type/Valid List value/Single value into List", + "rule": "ArgumentsOfCorrectType", + "schema": 0, + "query": "\n {\n complicatedArgs {\n stringListArgField(stringListArg: \"one\")\n }\n }\n ", + "errors": [] + }, + { + "name": "Validate: Argument values of correct type/Invalid List value/Incorrect item type", + "rule": "ArgumentsOfCorrectType", + "schema": 0, + "query": "\n {\n complicatedArgs {\n stringListArgField(stringListArg: [\"one\", 2])\n }\n }\n ", + "errors": [ + { + "message": "Argument \"stringListArg\" has invalid value [\"one\", 2].\nIn element #1: Expected type \"String\", found 2.", + "locations": [ + { + "line": 4, + "column": 47 + } + ] + } + ] + }, + { + "name": "Validate: Argument values of correct type/Invalid List value/Single value of incorrect type", + "rule": "ArgumentsOfCorrectType", + "schema": 0, + "query": "\n {\n complicatedArgs {\n stringListArgField(stringListArg: 1)\n }\n }\n ", + "errors": [ + { + "message": "Argument \"stringListArg\" has invalid value 1.\nExpected type \"String\", found 1.", + "locations": [ + { + "line": 4, + "column": 47 + } + ] + } + ] + }, + { + "name": "Validate: Argument values of correct type/Valid non-nullable value/Arg on optional arg", + "rule": "ArgumentsOfCorrectType", + "schema": 0, + "query": "\n {\n dog {\n isHousetrained(atOtherHomes: true)\n }\n }\n ", + "errors": [] + }, + { + "name": "Validate: Argument values of correct type/Valid non-nullable value/No Arg on optional arg", + "rule": "ArgumentsOfCorrectType", + "schema": 0, + "query": "\n {\n dog {\n isHousetrained\n }\n }\n ", + "errors": [] + }, + { + "name": "Validate: Argument values of correct type/Valid non-nullable value/Multiple args", + "rule": "ArgumentsOfCorrectType", + "schema": 0, + "query": "\n {\n complicatedArgs {\n multipleReqs(req1: 1, req2: 2)\n }\n }\n ", + "errors": [] + }, + { + "name": "Validate: Argument values of correct type/Valid non-nullable value/Multiple args reverse order", + "rule": "ArgumentsOfCorrectType", + "schema": 0, + "query": "\n {\n complicatedArgs {\n multipleReqs(req2: 2, req1: 1)\n }\n }\n ", + "errors": [] + }, + { + "name": "Validate: Argument values of correct type/Valid non-nullable value/No args on multiple optional", + "rule": "ArgumentsOfCorrectType", + "schema": 0, + "query": "\n {\n complicatedArgs {\n multipleOpts\n }\n }\n ", + "errors": [] + }, + { + "name": "Validate: Argument values of correct type/Valid non-nullable value/One arg on multiple optional", + "rule": "ArgumentsOfCorrectType", + "schema": 0, + "query": "\n {\n complicatedArgs {\n multipleOpts(opt1: 1)\n }\n }\n ", + "errors": [] + }, + { + "name": "Validate: Argument values of correct type/Valid non-nullable value/Second arg on multiple optional", + "rule": "ArgumentsOfCorrectType", + "schema": 0, + "query": "\n {\n complicatedArgs {\n multipleOpts(opt2: 1)\n }\n }\n ", + "errors": [] + }, + { + "name": "Validate: Argument values of correct type/Valid non-nullable value/Multiple reqs on mixedList", + "rule": "ArgumentsOfCorrectType", + "schema": 0, + "query": "\n {\n complicatedArgs {\n multipleOptAndReq(req1: 3, req2: 4)\n }\n }\n ", + "errors": [] + }, + { + "name": "Validate: Argument values of correct type/Valid non-nullable value/Multiple reqs and one opt on mixedList", + "rule": "ArgumentsOfCorrectType", + "schema": 0, + "query": "\n {\n complicatedArgs {\n multipleOptAndReq(req1: 3, req2: 4, opt1: 5)\n }\n }\n ", + "errors": [] + }, + { + "name": "Validate: Argument values of correct type/Valid non-nullable value/All reqs and opts on mixedList", + "rule": "ArgumentsOfCorrectType", + "schema": 0, + "query": "\n {\n complicatedArgs {\n multipleOptAndReq(req1: 3, req2: 4, opt1: 5, opt2: 6)\n }\n }\n ", + "errors": [] + }, + { + "name": "Validate: Argument values of correct type/Invalid non-nullable value/Incorrect value type", + "rule": "ArgumentsOfCorrectType", + "schema": 0, + "query": "\n {\n complicatedArgs {\n multipleReqs(req2: \"two\", req1: \"one\")\n }\n }\n ", + "errors": [ + { + "message": "Argument \"req2\" has invalid value \"two\".\nExpected type \"Int\", found \"two\".", + "locations": [ + { + "line": 4, + "column": 32 + } + ] + }, + { + "message": "Argument \"req1\" has invalid value \"one\".\nExpected type \"Int\", found \"one\".", + "locations": [ + { + "line": 4, + "column": 45 + } + ] + } + ] + }, + { + "name": "Validate: Argument values of correct type/Invalid non-nullable value/Incorrect value and missing argument", + "rule": "ArgumentsOfCorrectType", + "schema": 0, + "query": "\n {\n complicatedArgs {\n multipleReqs(req1: \"one\")\n }\n }\n ", + "errors": [ + { + "message": "Argument \"req1\" has invalid value \"one\".\nExpected type \"Int\", found \"one\".", + "locations": [ + { + "line": 4, + "column": 32 + } + ] + } + ] + }, + { + "name": "Validate: Argument values of correct type/Invalid non-nullable value/Null value", + "rule": "ArgumentsOfCorrectType", + "schema": 0, + "query": "\n {\n complicatedArgs {\n multipleReqs(req1: null)\n }\n }\n ", + "errors": [ + { + "message": "Argument \"req1\" has invalid value null.\nExpected \"Int!\", found null.", + "locations": [ + { + "line": 4, + "column": 32 + } + ] + } + ] + }, + { + "name": "Validate: Argument values of correct type/Valid input object value/Optional arg, despite required field in type", + "rule": "ArgumentsOfCorrectType", + "schema": 0, + "query": "\n {\n complicatedArgs {\n complexArgField\n }\n }\n ", + "errors": [] + }, + { + "name": "Validate: Argument values of correct type/Valid input object value/Partial object, only required", + "rule": "ArgumentsOfCorrectType", + "schema": 0, + "query": "\n {\n complicatedArgs {\n complexArgField(complexArg: { requiredField: true })\n }\n }\n ", + "errors": [] + }, + { + "name": "Validate: Argument values of correct type/Valid input object value/Partial object, required field can be falsey", + "rule": "ArgumentsOfCorrectType", + "schema": 0, + "query": "\n {\n complicatedArgs {\n complexArgField(complexArg: { requiredField: false })\n }\n }\n ", + "errors": [] + }, + { + "name": "Validate: Argument values of correct type/Valid input object value/Partial object, including required", + "rule": "ArgumentsOfCorrectType", + "schema": 0, + "query": "\n {\n complicatedArgs {\n complexArgField(complexArg: { requiredField: true, intField: 4 })\n }\n }\n ", + "errors": [] + }, + { + "name": "Validate: Argument values of correct type/Valid input object value/Full object", + "rule": "ArgumentsOfCorrectType", + "schema": 0, + "query": "\n {\n complicatedArgs {\n complexArgField(complexArg: {\n requiredField: true,\n intField: 4,\n stringField: \"foo\",\n booleanField: false,\n stringListField: [\"one\", \"two\"]\n })\n }\n }\n ", + "errors": [] + }, + { + "name": "Validate: Argument values of correct type/Valid input object value/Full object with fields in different order", + "rule": "ArgumentsOfCorrectType", + "schema": 0, + "query": "\n {\n complicatedArgs {\n complexArgField(complexArg: {\n stringListField: [\"one\", \"two\"],\n booleanField: false,\n requiredField: true,\n stringField: \"foo\",\n intField: 4,\n })\n }\n }\n ", + "errors": [] + }, + { + "name": "Validate: Argument values of correct type/Invalid input object value/Partial object, missing required", + "rule": "ArgumentsOfCorrectType", + "schema": 0, + "query": "\n {\n complicatedArgs {\n complexArgField(complexArg: { intField: 4 })\n }\n }\n ", + "errors": [ + { + "message": "Argument \"complexArg\" has invalid value {intField: 4}.\nIn field \"requiredField\": Expected \"Boolean!\", found null.", + "locations": [ + { + "line": 4, + "column": 41 + } + ] + } + ] + }, + { + "name": "Validate: Argument values of correct type/Invalid input object value/Partial object, invalid field type", + "rule": "ArgumentsOfCorrectType", + "schema": 0, + "query": "\n {\n complicatedArgs {\n complexArgField(complexArg: {\n stringListField: [\"one\", 2],\n requiredField: true,\n })\n }\n }\n ", + "errors": [ + { + "message": "Argument \"complexArg\" has invalid value {stringListField: [\"one\", 2], requiredField: true}.\nIn field \"stringListField\": In element #1: Expected type \"String\", found 2.", + "locations": [ + { + "line": 4, + "column": 41 + } + ] + } + ] + }, + { + "name": "Validate: Argument values of correct type/Invalid input object value/Partial object, unknown field arg", + "rule": "ArgumentsOfCorrectType", + "schema": 0, + "query": "\n {\n complicatedArgs {\n complexArgField(complexArg: {\n requiredField: true,\n unknownField: \"value\"\n })\n }\n }\n ", + "errors": [ + { + "message": "Argument \"complexArg\" has invalid value {requiredField: true, unknownField: \"value\"}.\nIn field \"unknownField\": Unknown field.", + "locations": [ + { + "line": 4, + "column": 41 + } + ] + } + ] + }, + { + "name": "Validate: Argument values of correct type/Directive arguments/with directives of valid types", + "rule": "ArgumentsOfCorrectType", + "schema": 0, + "query": "\n {\n dog @include(if: true) {\n name\n }\n human @skip(if: false) {\n name\n }\n }\n ", + "errors": [] + }, + { + "name": "Validate: Argument values of correct type/Directive arguments/with directive with incorrect types", + "rule": "ArgumentsOfCorrectType", + "schema": 0, + "query": "\n {\n dog @include(if: \"yes\") {\n name @skip(if: ENUM)\n }\n }\n ", + "errors": [ + { + "message": "Argument \"if\" has invalid value \"yes\".\nExpected type \"Boolean\", found \"yes\".", + "locations": [ + { + "line": 3, + "column": 28 + } + ] + }, + { + "message": "Argument \"if\" has invalid value ENUM.\nExpected type \"Boolean\", found ENUM.", + "locations": [ + { + "line": 4, + "column": 28 + } + ] + } + ] + }, + { + "name": "Validate: Variable default values of correct type/variables with no default values", + "rule": "DefaultValuesOfCorrectType", + "schema": 0, + "query": "\n query NullableValues($a: Int, $b: String, $c: ComplexInput) {\n dog { name }\n }\n ", + "errors": [] + }, + { + "name": "Validate: Variable default values of correct type/required variables without default values", + "rule": "DefaultValuesOfCorrectType", + "schema": 0, + "query": "\n query RequiredValues($a: Int!, $b: String!) {\n dog { name }\n }\n ", + "errors": [] + }, + { + "name": "Validate: Variable default values of correct type/variables with valid default values", + "rule": "DefaultValuesOfCorrectType", + "schema": 0, + "query": "\n query WithDefaultValues(\n $a: Int = 1,\n $b: String = \"ok\",\n $c: ComplexInput = { requiredField: true, intField: 3 }\n ) {\n dog { name }\n }\n ", + "errors": [] + }, + { + "name": "Validate: Variable default values of correct type/variables with valid default null values", + "rule": "DefaultValuesOfCorrectType", + "schema": 0, + "query": "\n query WithDefaultValues(\n $a: Int = null,\n $b: String = null,\n $c: ComplexInput = { requiredField: true, intField: null }\n ) {\n dog { name }\n }\n ", + "errors": [] + }, + { + "name": "Validate: Variable default values of correct type/variables with invalid default null values", + "rule": "DefaultValuesOfCorrectType", + "schema": 0, + "query": "\n query WithDefaultValues(\n $a: Int! = null,\n $b: String! = null,\n $c: ComplexInput = { requiredField: null, intField: null }\n ) {\n dog { name }\n }\n ", + "errors": [ + { + "message": "Variable \"$a\" of type \"Int!\" is required and will not use the default value. Perhaps you meant to use type \"Int\".", + "locations": [ + { + "line": 3, + "column": 20 + } + ] + }, + { + "message": "Variable \"$a\" of type \"Int!\" has invalid default value null.\nExpected \"Int!\", found null.", + "locations": [ + { + "line": 3, + "column": 20 + } + ] + }, + { + "message": "Variable \"$b\" of type \"String!\" is required and will not use the default value. Perhaps you meant to use type \"String\".", + "locations": [ + { + "line": 4, + "column": 23 + } + ] + }, + { + "message": "Variable \"$b\" of type \"String!\" has invalid default value null.\nExpected \"String!\", found null.", + "locations": [ + { + "line": 4, + "column": 23 + } + ] + }, + { + "message": "Variable \"$c\" of type \"ComplexInput\" has invalid default value {requiredField: null, intField: null}.\nIn field \"requiredField\": Expected \"Boolean!\", found null.", + "locations": [ + { + "line": 5, + "column": 28 + } + ] + } + ] + }, + { + "name": "Validate: Variable default values of correct type/no required variables with default values", + "rule": "DefaultValuesOfCorrectType", + "schema": 0, + "query": "\n query UnreachableDefaultValues($a: Int! = 3, $b: String! = \"default\") {\n dog { name }\n }\n ", + "errors": [ + { + "message": "Variable \"$a\" of type \"Int!\" is required and will not use the default value. Perhaps you meant to use type \"Int\".", + "locations": [ + { + "line": 2, + "column": 49 + } + ] + }, + { + "message": "Variable \"$b\" of type \"String!\" is required and will not use the default value. Perhaps you meant to use type \"String\".", + "locations": [ + { + "line": 2, + "column": 66 + } + ] + } + ] + }, + { + "name": "Validate: Variable default values of correct type/variables with invalid default values", + "rule": "DefaultValuesOfCorrectType", + "schema": 0, + "query": "\n query InvalidDefaultValues(\n $a: Int = \"one\",\n $b: String = 4,\n $c: ComplexInput = \"notverycomplex\"\n ) {\n dog { name }\n }\n ", + "errors": [ + { + "message": "Variable \"$a\" of type \"Int\" has invalid default value \"one\".\nExpected type \"Int\", found \"one\".", + "locations": [ + { + "line": 3, + "column": 19 + } + ] + }, + { + "message": "Variable \"$b\" of type \"String\" has invalid default value 4.\nExpected type \"String\", found 4.", + "locations": [ + { + "line": 4, + "column": 22 + } + ] + }, + { + "message": "Variable \"$c\" of type \"ComplexInput\" has invalid default value \"notverycomplex\".\nExpected \"ComplexInput\", found not an object.", + "locations": [ + { + "line": 5, + "column": 28 + } + ] + } + ] + }, + { + "name": "Validate: Variable default values of correct type/complex variables missing required field", + "rule": "DefaultValuesOfCorrectType", + "schema": 0, + "query": "\n query MissingRequiredField($a: ComplexInput = {intField: 3}) {\n dog { name }\n }\n ", + "errors": [ + { + "message": "Variable \"$a\" of type \"ComplexInput\" has invalid default value {intField: 3}.\nIn field \"requiredField\": Expected \"Boolean!\", found null.", + "locations": [ + { + "line": 2, + "column": 53 + } + ] + } + ] + }, + { + "name": "Validate: Variable default values of correct type/list variables with invalid item", + "rule": "DefaultValuesOfCorrectType", + "schema": 0, + "query": "\n query InvalidItem($a: [String] = [\"one\", 2]) {\n dog { name }\n }\n ", + "errors": [ + { + "message": "Variable \"$a\" of type \"[String]\" has invalid default value [\"one\", 2].\nIn element #1: Expected type \"String\", found 2.", + "locations": [ + { + "line": 2, + "column": 40 + } + ] + } + ] + }, + { + "name": "Validate: Fields on correct type/Object field selection", + "rule": "FieldsOnCorrectType", + "schema": 0, + "query": "\n fragment objectFieldSelection on Dog {\n __typename\n name\n }\n ", + "errors": [] + }, + { + "name": "Validate: Fields on correct type/Aliased object field selection", + "rule": "FieldsOnCorrectType", + "schema": 0, + "query": "\n fragment aliasedObjectFieldSelection on Dog {\n tn : __typename\n otherName : name\n }\n ", + "errors": [] + }, + { + "name": "Validate: Fields on correct type/Interface field selection", + "rule": "FieldsOnCorrectType", + "schema": 0, + "query": "\n fragment interfaceFieldSelection on Pet {\n __typename\n name\n }\n ", + "errors": [] + }, + { + "name": "Validate: Fields on correct type/Aliased interface field selection", + "rule": "FieldsOnCorrectType", + "schema": 0, + "query": "\n fragment interfaceFieldSelection on Pet {\n otherName : name\n }\n ", + "errors": [] + }, + { + "name": "Validate: Fields on correct type/Lying alias selection", + "rule": "FieldsOnCorrectType", + "schema": 0, + "query": "\n fragment lyingAliasSelection on Dog {\n name : nickname\n }\n ", + "errors": [] + }, + { + "name": "Validate: Fields on correct type/Ignores fields on unknown type", + "rule": "FieldsOnCorrectType", + "schema": 0, + "query": "\n fragment unknownSelection on UnknownType {\n unknownField\n }\n ", + "errors": [] + }, + { + "name": "Validate: Fields on correct type/reports errors when type is known again", + "rule": "FieldsOnCorrectType", + "schema": 0, + "query": "\n fragment typeKnownAgain on Pet {\n unknown_pet_field {\n ... on Cat {\n unknown_cat_field\n }\n }\n }", + "errors": [ + { + "message": "Cannot query field \"unknown_pet_field\" on type \"Pet\".", + "locations": [ + { + "line": 3, + "column": 9 + } + ] + }, + { + "message": "Cannot query field \"unknown_cat_field\" on type \"Cat\".", + "locations": [ + { + "line": 5, + "column": 13 + } + ] + } + ] + }, + { + "name": "Validate: Fields on correct type/Field not defined on fragment", + "rule": "FieldsOnCorrectType", + "schema": 0, + "query": "\n fragment fieldNotDefined on Dog {\n meowVolume\n }", + "errors": [ + { + "message": "Cannot query field \"meowVolume\" on type \"Dog\". Did you mean \"barkVolume\"?", + "locations": [ + { + "line": 3, + "column": 9 + } + ] + } + ] + }, + { + "name": "Validate: Fields on correct type/Ignores deeply unknown field", + "rule": "FieldsOnCorrectType", + "schema": 0, + "query": "\n fragment deepFieldNotDefined on Dog {\n unknown_field {\n deeper_unknown_field\n }\n }", + "errors": [ + { + "message": "Cannot query field \"unknown_field\" on type \"Dog\".", + "locations": [ + { + "line": 3, + "column": 9 + } + ] + } + ] + }, + { + "name": "Validate: Fields on correct type/Sub-field not defined", + "rule": "FieldsOnCorrectType", + "schema": 0, + "query": "\n fragment subFieldNotDefined on Human {\n pets {\n unknown_field\n }\n }", + "errors": [ + { + "message": "Cannot query field \"unknown_field\" on type \"Pet\".", + "locations": [ + { + "line": 4, + "column": 11 + } + ] + } + ] + }, + { + "name": "Validate: Fields on correct type/Field not defined on inline fragment", + "rule": "FieldsOnCorrectType", + "schema": 0, + "query": "\n fragment fieldNotDefined on Pet {\n ... on Dog {\n meowVolume\n }\n }", + "errors": [ + { + "message": "Cannot query field \"meowVolume\" on type \"Dog\". Did you mean \"barkVolume\"?", + "locations": [ + { + "line": 4, + "column": 11 + } + ] + } + ] + }, + { + "name": "Validate: Fields on correct type/Aliased field target not defined", + "rule": "FieldsOnCorrectType", + "schema": 0, + "query": "\n fragment aliasedFieldTargetNotDefined on Dog {\n volume : mooVolume\n }", + "errors": [ + { + "message": "Cannot query field \"mooVolume\" on type \"Dog\". Did you mean \"barkVolume\"?", + "locations": [ + { + "line": 3, + "column": 9 + } + ] + } + ] + }, + { + "name": "Validate: Fields on correct type/Aliased lying field target not defined", + "rule": "FieldsOnCorrectType", + "schema": 0, + "query": "\n fragment aliasedLyingFieldTargetNotDefined on Dog {\n barkVolume : kawVolume\n }", + "errors": [ + { + "message": "Cannot query field \"kawVolume\" on type \"Dog\". Did you mean \"barkVolume\"?", + "locations": [ + { + "line": 3, + "column": 9 + } + ] + } + ] + }, + { + "name": "Validate: Fields on correct type/Not defined on interface", + "rule": "FieldsOnCorrectType", + "schema": 0, + "query": "\n fragment notDefinedOnInterface on Pet {\n tailLength\n }", + "errors": [ + { + "message": "Cannot query field \"tailLength\" on type \"Pet\".", + "locations": [ + { + "line": 3, + "column": 9 + } + ] + } + ] + }, + { + "name": "Validate: Fields on correct type/Defined on implementors but not on interface", + "rule": "FieldsOnCorrectType", + "schema": 0, + "query": "\n fragment definedOnImplementorsButNotInterface on Pet {\n nickname\n }", + "errors": [ + { + "message": "Cannot query field \"nickname\" on type \"Pet\".", + "locations": [ + { + "line": 3, + "column": 9 + } + ] + } + ] + }, + { + "name": "Validate: Fields on correct type/Meta field selection on union", + "rule": "FieldsOnCorrectType", + "schema": 0, + "query": "\n fragment directFieldSelectionOnUnion on CatOrDog {\n __typename\n }", + "errors": [] + }, + { + "name": "Validate: Fields on correct type/Direct field selection on union", + "rule": "FieldsOnCorrectType", + "schema": 0, + "query": "\n fragment directFieldSelectionOnUnion on CatOrDog {\n directField\n }", + "errors": [ + { + "message": "Cannot query field \"directField\" on type \"CatOrDog\".", + "locations": [ + { + "line": 3, + "column": 9 + } + ] + } + ] + }, + { + "name": "Validate: Fields on correct type/Defined on implementors queried on union", + "rule": "FieldsOnCorrectType", + "schema": 0, + "query": "\n fragment definedOnImplementorsQueriedOnUnion on CatOrDog {\n name\n }", + "errors": [ + { + "message": "Cannot query field \"name\" on type \"CatOrDog\".", + "locations": [ + { + "line": 3, + "column": 9 + } + ] + } + ] + }, + { + "name": "Validate: Fields on correct type/valid field in inline fragment", + "rule": "FieldsOnCorrectType", + "schema": 0, + "query": "\n fragment objectFieldSelection on Pet {\n ... on Dog {\n name\n }\n ... {\n name\n }\n }\n ", + "errors": [] + }, + { + "name": "Validate: Fragments on composite types/object is valid fragment type", + "rule": "FragmentsOnCompositeTypes", + "schema": 0, + "query": "\n fragment validFragment on Dog {\n barks\n }\n ", + "errors": [] + }, + { + "name": "Validate: Fragments on composite types/interface is valid fragment type", + "rule": "FragmentsOnCompositeTypes", + "schema": 0, + "query": "\n fragment validFragment on Pet {\n name\n }\n ", + "errors": [] + }, + { + "name": "Validate: Fragments on composite types/object is valid inline fragment type", + "rule": "FragmentsOnCompositeTypes", + "schema": 0, + "query": "\n fragment validFragment on Pet {\n ... on Dog {\n barks\n }\n }\n ", + "errors": [] + }, + { + "name": "Validate: Fragments on composite types/inline fragment without type is valid", + "rule": "FragmentsOnCompositeTypes", + "schema": 0, + "query": "\n fragment validFragment on Pet {\n ... {\n name\n }\n }\n ", + "errors": [] + }, + { + "name": "Validate: Fragments on composite types/union is valid fragment type", + "rule": "FragmentsOnCompositeTypes", + "schema": 0, + "query": "\n fragment validFragment on CatOrDog {\n __typename\n }\n ", + "errors": [] + }, + { + "name": "Validate: Fragments on composite types/scalar is invalid fragment type", + "rule": "FragmentsOnCompositeTypes", + "schema": 0, + "query": "\n fragment scalarFragment on Boolean {\n bad\n }\n ", + "errors": [ + { + "message": "Fragment \"scalarFragment\" cannot condition on non composite type \"Boolean\".", + "locations": [ + { + "line": 2, + "column": 34 + } + ] + } + ] + }, + { + "name": "Validate: Fragments on composite types/enum is invalid fragment type", + "rule": "FragmentsOnCompositeTypes", + "schema": 0, + "query": "\n fragment scalarFragment on FurColor {\n bad\n }\n ", + "errors": [ + { + "message": "Fragment \"scalarFragment\" cannot condition on non composite type \"FurColor\".", + "locations": [ + { + "line": 2, + "column": 34 + } + ] + } + ] + }, + { + "name": "Validate: Fragments on composite types/input object is invalid fragment type", + "rule": "FragmentsOnCompositeTypes", + "schema": 0, + "query": "\n fragment inputFragment on ComplexInput {\n stringField\n }\n ", + "errors": [ + { + "message": "Fragment \"inputFragment\" cannot condition on non composite type \"ComplexInput\".", + "locations": [ + { + "line": 2, + "column": 33 + } + ] + } + ] + }, + { + "name": "Validate: Fragments on composite types/scalar is invalid inline fragment type", + "rule": "FragmentsOnCompositeTypes", + "schema": 0, + "query": "\n fragment invalidFragment on Pet {\n ... on String {\n barks\n }\n }\n ", + "errors": [ + { + "message": "Fragment cannot condition on non composite type \"String\".", + "locations": [ + { + "line": 3, + "column": 16 + } + ] + } + ] + }, + { + "name": "Validate: Known argument names/single arg is known", + "rule": "KnownArgumentNames", + "schema": 0, + "query": "\n fragment argOnRequiredArg on Dog {\n doesKnowCommand(dogCommand: SIT)\n }\n ", + "errors": [] + }, + { + "name": "Validate: Known argument names/multiple args are known", + "rule": "KnownArgumentNames", + "schema": 0, + "query": "\n fragment multipleArgs on ComplicatedArgs {\n multipleReqs(req1: 1, req2: 2)\n }\n ", + "errors": [] + }, + { + "name": "Validate: Known argument names/ignores args of unknown fields", + "rule": "KnownArgumentNames", + "schema": 0, + "query": "\n fragment argOnUnknownField on Dog {\n unknownField(unknownArg: SIT)\n }\n ", + "errors": [] + }, + { + "name": "Validate: Known argument names/multiple args in reverse order are known", + "rule": "KnownArgumentNames", + "schema": 0, + "query": "\n fragment multipleArgsReverseOrder on ComplicatedArgs {\n multipleReqs(req2: 2, req1: 1)\n }\n ", + "errors": [] + }, + { + "name": "Validate: Known argument names/no args on optional arg", + "rule": "KnownArgumentNames", + "schema": 0, + "query": "\n fragment noArgOnOptionalArg on Dog {\n isHousetrained\n }\n ", + "errors": [] + }, + { + "name": "Validate: Known argument names/args are known deeply", + "rule": "KnownArgumentNames", + "schema": 0, + "query": "\n {\n dog {\n doesKnowCommand(dogCommand: SIT)\n }\n human {\n pet {\n ... on Dog {\n doesKnowCommand(dogCommand: SIT)\n }\n }\n }\n }\n ", + "errors": [] + }, + { + "name": "Validate: Known argument names/directive args are known", + "rule": "KnownArgumentNames", + "schema": 0, + "query": "\n {\n dog @skip(if: true)\n }\n ", + "errors": [] + }, + { + "name": "Validate: Known argument names/undirective args are invalid", + "rule": "KnownArgumentNames", + "schema": 0, + "query": "\n {\n dog @skip(unless: true)\n }\n ", + "errors": [ + { + "message": "Unknown argument \"unless\" on directive \"@skip\".", + "locations": [ + { + "line": 3, + "column": 19 + } + ] + } + ] + }, + { + "name": "Validate: Known argument names/invalid arg name", + "rule": "KnownArgumentNames", + "schema": 0, + "query": "\n fragment invalidArgName on Dog {\n doesKnowCommand(unknown: true)\n }\n ", + "errors": [ + { + "message": "Unknown argument \"unknown\" on field \"doesKnowCommand\" of type \"Dog\".", + "locations": [ + { + "line": 3, + "column": 25 + } + ] + } + ] + }, + { + "name": "Validate: Known argument names/unknown args amongst known args", + "rule": "KnownArgumentNames", + "schema": 0, + "query": "\n fragment oneGoodArgOneInvalidArg on Dog {\n doesKnowCommand(whoknows: 1, dogCommand: SIT, unknown: true)\n }\n ", + "errors": [ + { + "message": "Unknown argument \"whoknows\" on field \"doesKnowCommand\" of type \"Dog\".", + "locations": [ + { + "line": 3, + "column": 25 + } + ] + }, + { + "message": "Unknown argument \"unknown\" on field \"doesKnowCommand\" of type \"Dog\".", + "locations": [ + { + "line": 3, + "column": 55 + } + ] + } + ] + }, + { + "name": "Validate: Known argument names/unknown args deeply", + "rule": "KnownArgumentNames", + "schema": 0, + "query": "\n {\n dog {\n doesKnowCommand(unknown: true)\n }\n human {\n pet {\n ... on Dog {\n doesKnowCommand(unknown: true)\n }\n }\n }\n }\n ", + "errors": [ + { + "message": "Unknown argument \"unknown\" on field \"doesKnowCommand\" of type \"Dog\".", + "locations": [ + { + "line": 4, + "column": 27 + } + ] + }, + { + "message": "Unknown argument \"unknown\" on field \"doesKnowCommand\" of type \"Dog\".", + "locations": [ + { + "line": 9, + "column": 31 + } + ] + } + ] + }, + { + "name": "Validate: Known directives/with no directives", + "rule": "KnownDirectives", + "schema": 0, + "query": "\n query Foo {\n name\n ...Frag\n }\n\n fragment Frag on Dog {\n name\n }\n ", + "errors": [] + }, + { + "name": "Validate: Known directives/with known directives", + "rule": "KnownDirectives", + "schema": 0, + "query": "\n {\n dog @include(if: true) {\n name\n }\n human @skip(if: false) {\n name\n }\n }\n ", + "errors": [] + }, + { + "name": "Validate: Known directives/with unknown directive", + "rule": "KnownDirectives", + "schema": 0, + "query": "\n {\n dog @unknown(directive: \"value\") {\n name\n }\n }\n ", + "errors": [ + { + "message": "Unknown directive \"unknown\".", + "locations": [ + { + "line": 3, + "column": 13 + } + ] + } + ] + }, + { + "name": "Validate: Known directives/with many unknown directives", + "rule": "KnownDirectives", + "schema": 0, + "query": "\n {\n dog @unknown(directive: \"value\") {\n name\n }\n human @unknown(directive: \"value\") {\n name\n pets @unknown(directive: \"value\") {\n name\n }\n }\n }\n ", + "errors": [ + { + "message": "Unknown directive \"unknown\".", + "locations": [ + { + "line": 3, + "column": 13 + } + ] + }, + { + "message": "Unknown directive \"unknown\".", + "locations": [ + { + "line": 6, + "column": 15 + } + ] + }, + { + "message": "Unknown directive \"unknown\".", + "locations": [ + { + "line": 8, + "column": 16 + } + ] + } + ] + }, + { + "name": "Validate: Known directives/with well placed directives", + "rule": "KnownDirectives", + "schema": 0, + "query": "\n query Foo @onQuery {\n name @include(if: true)\n ...Frag @include(if: true)\n skippedField @skip(if: true)\n ...SkippedFrag @skip(if: true)\n }\n\n mutation Bar @onMutation {\n someField\n }\n ", + "errors": [] + }, + { + "name": "Validate: Known directives/with misplaced directives", + "rule": "KnownDirectives", + "schema": 0, + "query": "\n query Foo @include(if: true) {\n name @onQuery\n ...Frag @onQuery\n }\n\n mutation Bar @onQuery {\n someField\n }\n ", + "errors": [ + { + "message": "Directive \"include\" may not be used on QUERY.", + "locations": [ + { + "line": 2, + "column": 17 + } + ] + }, + { + "message": "Directive \"onQuery\" may not be used on FIELD.", + "locations": [ + { + "line": 3, + "column": 14 + } + ] + }, + { + "message": "Directive \"onQuery\" may not be used on FRAGMENT_SPREAD.", + "locations": [ + { + "line": 4, + "column": 17 + } + ] + }, + { + "message": "Directive \"onQuery\" may not be used on MUTATION.", + "locations": [ + { + "line": 7, + "column": 20 + } + ] + } + ] + }, + { + "name": "Validate: Known fragment names/known fragment names are valid", + "rule": "KnownFragmentNames", + "schema": 0, + "query": "\n {\n human(id: 4) {\n ...HumanFields1\n ... on Human {\n ...HumanFields2\n }\n ... {\n name\n }\n }\n }\n fragment HumanFields1 on Human {\n name\n ...HumanFields3\n }\n fragment HumanFields2 on Human {\n name\n }\n fragment HumanFields3 on Human {\n name\n }\n ", + "errors": [] + }, + { + "name": "Validate: Known fragment names/unknown fragment names are invalid", + "rule": "KnownFragmentNames", + "schema": 0, + "query": "\n {\n human(id: 4) {\n ...UnknownFragment1\n ... on Human {\n ...UnknownFragment2\n }\n }\n }\n fragment HumanFields on Human {\n name\n ...UnknownFragment3\n }\n ", + "errors": [ + { + "message": "Unknown fragment \"UnknownFragment1\".", + "locations": [ + { + "line": 4, + "column": 14 + } + ] + }, + { + "message": "Unknown fragment \"UnknownFragment2\".", + "locations": [ + { + "line": 6, + "column": 16 + } + ] + }, + { + "message": "Unknown fragment \"UnknownFragment3\".", + "locations": [ + { + "line": 12, + "column": 12 + } + ] + } + ] + }, + { + "name": "Validate: Known type names/known type names are valid", + "rule": "KnownTypeNames", + "schema": 0, + "query": "\n query Foo($var: String, $required: [String!]!) {\n user(id: 4) {\n pets { ... on Pet { name }, ...PetFields, ... { name } }\n }\n }\n fragment PetFields on Pet {\n name\n }\n ", + "errors": [] + }, + { + "name": "Validate: Known type names/unknown type names are invalid", + "rule": "KnownTypeNames", + "schema": 0, + "query": "\n query Foo($var: JumbledUpLetters) {\n user(id: 4) {\n name\n pets { ... on Badger { name }, ...PetFields }\n }\n }\n fragment PetFields on Peettt {\n name\n }\n ", + "errors": [ + { + "message": "Unknown type \"JumbledUpLetters\".", + "locations": [ + { + "line": 2, + "column": 23 + } + ] + }, + { + "message": "Unknown type \"Badger\".", + "locations": [ + { + "line": 5, + "column": 25 + } + ] + }, + { + "message": "Unknown type \"Peettt\".", + "locations": [ + { + "line": 8, + "column": 29 + } + ] + } + ] + }, + { + "name": "Validate: Anonymous operation must be alone/no operations", + "rule": "LoneAnonymousOperation", + "schema": 0, + "query": "\n fragment fragA on Type {\n field\n }\n ", + "errors": [] + }, + { + "name": "Validate: Anonymous operation must be alone/one anon operation", + "rule": "LoneAnonymousOperation", + "schema": 0, + "query": "\n {\n field\n }\n ", + "errors": [] + }, + { + "name": "Validate: Anonymous operation must be alone/multiple named operations", + "rule": "LoneAnonymousOperation", + "schema": 0, + "query": "\n query Foo {\n field\n }\n\n query Bar {\n field\n }\n ", + "errors": [] + }, + { + "name": "Validate: Anonymous operation must be alone/anon operation with fragment", + "rule": "LoneAnonymousOperation", + "schema": 0, + "query": "\n {\n ...Foo\n }\n fragment Foo on Type {\n field\n }\n ", + "errors": [] + }, + { + "name": "Validate: Anonymous operation must be alone/multiple anon operations", + "rule": "LoneAnonymousOperation", + "schema": 0, + "query": "\n {\n fieldA\n }\n {\n fieldB\n }\n ", + "errors": [ + { + "message": "This anonymous operation must be the only defined operation.", + "locations": [ + { + "line": 2, + "column": 7 + } + ] + }, + { + "message": "This anonymous operation must be the only defined operation.", + "locations": [ + { + "line": 5, + "column": 7 + } + ] + } + ] + }, + { + "name": "Validate: Anonymous operation must be alone/anon operation with a mutation", + "rule": "LoneAnonymousOperation", + "schema": 0, + "query": "\n {\n fieldA\n }\n mutation Foo {\n fieldB\n }\n ", + "errors": [ + { + "message": "This anonymous operation must be the only defined operation.", + "locations": [ + { + "line": 2, + "column": 7 + } + ] + } + ] + }, + { + "name": "Validate: Anonymous operation must be alone/anon operation with a subscription", + "rule": "LoneAnonymousOperation", + "schema": 0, + "query": "\n {\n fieldA\n }\n subscription Foo {\n fieldB\n }\n ", + "errors": [ + { + "message": "This anonymous operation must be the only defined operation.", + "locations": [ + { + "line": 2, + "column": 7 + } + ] + } + ] + }, + { + "name": "Validate: No circular fragment spreads/single reference is valid", + "rule": "NoFragmentCycles", + "schema": 0, + "query": "\n fragment fragA on Dog { ...fragB }\n fragment fragB on Dog { name }\n ", + "errors": [] + }, + { + "name": "Validate: No circular fragment spreads/spreading twice is not circular", + "rule": "NoFragmentCycles", + "schema": 0, + "query": "\n fragment fragA on Dog { ...fragB, ...fragB }\n fragment fragB on Dog { name }\n ", + "errors": [] + }, + { + "name": "Validate: No circular fragment spreads/spreading twice indirectly is not circular", + "rule": "NoFragmentCycles", + "schema": 0, + "query": "\n fragment fragA on Dog { ...fragB, ...fragC }\n fragment fragB on Dog { ...fragC }\n fragment fragC on Dog { name }\n ", + "errors": [] + }, + { + "name": "Validate: No circular fragment spreads/double spread within abstract types", + "rule": "NoFragmentCycles", + "schema": 0, + "query": "\n fragment nameFragment on Pet {\n ... on Dog { name }\n ... on Cat { name }\n }\n\n fragment spreadsInAnon on Pet {\n ... on Dog { ...nameFragment }\n ... on Cat { ...nameFragment }\n }\n ", + "errors": [] + }, + { + "name": "Validate: No circular fragment spreads/does not false positive on unknown fragment", + "rule": "NoFragmentCycles", + "schema": 0, + "query": "\n fragment nameFragment on Pet {\n ...UnknownFragment\n }\n ", + "errors": [] + }, + { + "name": "Validate: No circular fragment spreads/spreading recursively within field fails", + "rule": "NoFragmentCycles", + "schema": 0, + "query": "\n fragment fragA on Human { relatives { ...fragA } },\n ", + "errors": [ + { + "message": "Cannot spread fragment \"fragA\" within itself.", + "locations": [ + { + "line": 2, + "column": 45 + } + ] + } + ] + }, + { + "name": "Validate: No circular fragment spreads/no spreading itself directly", + "rule": "NoFragmentCycles", + "schema": 0, + "query": "\n fragment fragA on Dog { ...fragA }\n ", + "errors": [ + { + "message": "Cannot spread fragment \"fragA\" within itself.", + "locations": [ + { + "line": 2, + "column": 31 + } + ] + } + ] + }, + { + "name": "Validate: No circular fragment spreads/no spreading itself directly within inline fragment", + "rule": "NoFragmentCycles", + "schema": 0, + "query": "\n fragment fragA on Pet {\n ... on Dog {\n ...fragA\n }\n }\n ", + "errors": [ + { + "message": "Cannot spread fragment \"fragA\" within itself.", + "locations": [ + { + "line": 4, + "column": 11 + } + ] + } + ] + }, + { + "name": "Validate: No circular fragment spreads/no spreading itself indirectly", + "rule": "NoFragmentCycles", + "schema": 0, + "query": "\n fragment fragA on Dog { ...fragB }\n fragment fragB on Dog { ...fragA }\n ", + "errors": [ + { + "message": "Cannot spread fragment \"fragA\" within itself via fragB.", + "locations": [ + { + "line": 2, + "column": 31 + }, + { + "line": 3, + "column": 31 + } + ] + } + ] + }, + { + "name": "Validate: No circular fragment spreads/no spreading itself indirectly reports opposite order", + "rule": "NoFragmentCycles", + "schema": 0, + "query": "\n fragment fragB on Dog { ...fragA }\n fragment fragA on Dog { ...fragB }\n ", + "errors": [ + { + "message": "Cannot spread fragment \"fragB\" within itself via fragA.", + "locations": [ + { + "line": 2, + "column": 31 + }, + { + "line": 3, + "column": 31 + } + ] + } + ] + }, + { + "name": "Validate: No circular fragment spreads/no spreading itself indirectly within inline fragment", + "rule": "NoFragmentCycles", + "schema": 0, + "query": "\n fragment fragA on Pet {\n ... on Dog {\n ...fragB\n }\n }\n fragment fragB on Pet {\n ... on Dog {\n ...fragA\n }\n }\n ", + "errors": [ + { + "message": "Cannot spread fragment \"fragA\" within itself via fragB.", + "locations": [ + { + "line": 4, + "column": 11 + }, + { + "line": 9, + "column": 11 + } + ] + } + ] + }, + { + "name": "Validate: No circular fragment spreads/no spreading itself deeply", + "rule": "NoFragmentCycles", + "schema": 0, + "query": "\n fragment fragA on Dog { ...fragB }\n fragment fragB on Dog { ...fragC }\n fragment fragC on Dog { ...fragO }\n fragment fragX on Dog { ...fragY }\n fragment fragY on Dog { ...fragZ }\n fragment fragZ on Dog { ...fragO }\n fragment fragO on Dog { ...fragP }\n fragment fragP on Dog { ...fragA, ...fragX }\n ", + "errors": [ + { + "message": "Cannot spread fragment \"fragA\" within itself via fragB, fragC, fragO, fragP.", + "locations": [ + { + "line": 2, + "column": 31 + }, + { + "line": 3, + "column": 31 + }, + { + "line": 4, + "column": 31 + }, + { + "line": 8, + "column": 31 + }, + { + "line": 9, + "column": 31 + } + ] + }, + { + "message": "Cannot spread fragment \"fragO\" within itself via fragP, fragX, fragY, fragZ.", + "locations": [ + { + "line": 8, + "column": 31 + }, + { + "line": 9, + "column": 41 + }, + { + "line": 5, + "column": 31 + }, + { + "line": 6, + "column": 31 + }, + { + "line": 7, + "column": 31 + } + ] + } + ] + }, + { + "name": "Validate: No circular fragment spreads/no spreading itself deeply two paths", + "rule": "NoFragmentCycles", + "schema": 0, + "query": "\n fragment fragA on Dog { ...fragB, ...fragC }\n fragment fragB on Dog { ...fragA }\n fragment fragC on Dog { ...fragA }\n ", + "errors": [ + { + "message": "Cannot spread fragment \"fragA\" within itself via fragB.", + "locations": [ + { + "line": 2, + "column": 31 + }, + { + "line": 3, + "column": 31 + } + ] + }, + { + "message": "Cannot spread fragment \"fragA\" within itself via fragC.", + "locations": [ + { + "line": 2, + "column": 41 + }, + { + "line": 4, + "column": 31 + } + ] + } + ] + }, + { + "name": "Validate: No circular fragment spreads/no spreading itself deeply two paths -- alt traverse order", + "rule": "NoFragmentCycles", + "schema": 0, + "query": "\n fragment fragA on Dog { ...fragC }\n fragment fragB on Dog { ...fragC }\n fragment fragC on Dog { ...fragA, ...fragB }\n ", + "errors": [ + { + "message": "Cannot spread fragment \"fragA\" within itself via fragC.", + "locations": [ + { + "line": 2, + "column": 31 + }, + { + "line": 4, + "column": 31 + } + ] + }, + { + "message": "Cannot spread fragment \"fragC\" within itself via fragB.", + "locations": [ + { + "line": 4, + "column": 41 + }, + { + "line": 3, + "column": 31 + } + ] + } + ] + }, + { + "name": "Validate: No circular fragment spreads/no spreading itself deeply and immediately", + "rule": "NoFragmentCycles", + "schema": 0, + "query": "\n fragment fragA on Dog { ...fragB }\n fragment fragB on Dog { ...fragB, ...fragC }\n fragment fragC on Dog { ...fragA, ...fragB }\n ", + "errors": [ + { + "message": "Cannot spread fragment \"fragB\" within itself.", + "locations": [ + { + "line": 3, + "column": 31 + } + ] + }, + { + "message": "Cannot spread fragment \"fragA\" within itself via fragB, fragC.", + "locations": [ + { + "line": 2, + "column": 31 + }, + { + "line": 3, + "column": 41 + }, + { + "line": 4, + "column": 31 + } + ] + }, + { + "message": "Cannot spread fragment \"fragB\" within itself via fragC.", + "locations": [ + { + "line": 3, + "column": 41 + }, + { + "line": 4, + "column": 41 + } + ] + } + ] + }, + { + "name": "Validate: No undefined variables/all variables defined", + "rule": "NoUndefinedVariables", + "schema": 0, + "query": "\n query Foo($a: String, $b: String, $c: String) {\n field(a: $a, b: $b, c: $c)\n }\n ", + "errors": [] + }, + { + "name": "Validate: No undefined variables/all variables deeply defined", + "rule": "NoUndefinedVariables", + "schema": 0, + "query": "\n query Foo($a: String, $b: String, $c: String) {\n field(a: $a) {\n field(b: $b) {\n field(c: $c)\n }\n }\n }\n ", + "errors": [] + }, + { + "name": "Validate: No undefined variables/all variables deeply in inline fragments defined", + "rule": "NoUndefinedVariables", + "schema": 0, + "query": "\n query Foo($a: String, $b: String, $c: String) {\n ... on Type {\n field(a: $a) {\n field(b: $b) {\n ... on Type {\n field(c: $c)\n }\n }\n }\n }\n }\n ", + "errors": [] + }, + { + "name": "Validate: No undefined variables/all variables in fragments deeply defined", + "rule": "NoUndefinedVariables", + "schema": 0, + "query": "\n query Foo($a: String, $b: String, $c: String) {\n ...FragA\n }\n fragment FragA on Type {\n field(a: $a) {\n ...FragB\n }\n }\n fragment FragB on Type {\n field(b: $b) {\n ...FragC\n }\n }\n fragment FragC on Type {\n field(c: $c)\n }\n ", + "errors": [] + }, + { + "name": "Validate: No undefined variables/variable within single fragment defined in multiple operations", + "rule": "NoUndefinedVariables", + "schema": 0, + "query": "\n query Foo($a: String) {\n ...FragA\n }\n query Bar($a: String) {\n ...FragA\n }\n fragment FragA on Type {\n field(a: $a)\n }\n ", + "errors": [] + }, + { + "name": "Validate: No undefined variables/variable within fragments defined in operations", + "rule": "NoUndefinedVariables", + "schema": 0, + "query": "\n query Foo($a: String) {\n ...FragA\n }\n query Bar($b: String) {\n ...FragB\n }\n fragment FragA on Type {\n field(a: $a)\n }\n fragment FragB on Type {\n field(b: $b)\n }\n ", + "errors": [] + }, + { + "name": "Validate: No undefined variables/variable within recursive fragment defined", + "rule": "NoUndefinedVariables", + "schema": 0, + "query": "\n query Foo($a: String) {\n ...FragA\n }\n fragment FragA on Type {\n field(a: $a) {\n ...FragA\n }\n }\n ", + "errors": [] + }, + { + "name": "Validate: No undefined variables/variable not defined", + "rule": "NoUndefinedVariables", + "schema": 0, + "query": "\n query Foo($a: String, $b: String, $c: String) {\n field(a: $a, b: $b, c: $c, d: $d)\n }\n ", + "errors": [ + { + "message": "Variable \"$d\" is not defined by operation \"Foo\".", + "locations": [ + { + "line": 3, + "column": 39 + }, + { + "line": 2, + "column": 7 + } + ] + } + ] + }, + { + "name": "Validate: No undefined variables/variable not defined by un-named query", + "rule": "NoUndefinedVariables", + "schema": 0, + "query": "\n {\n field(a: $a)\n }\n ", + "errors": [ + { + "message": "Variable \"$a\" is not defined.", + "locations": [ + { + "line": 3, + "column": 18 + }, + { + "line": 2, + "column": 7 + } + ] + } + ] + }, + { + "name": "Validate: No undefined variables/multiple variables not defined", + "rule": "NoUndefinedVariables", + "schema": 0, + "query": "\n query Foo($b: String) {\n field(a: $a, b: $b, c: $c)\n }\n ", + "errors": [ + { + "message": "Variable \"$a\" is not defined by operation \"Foo\".", + "locations": [ + { + "line": 3, + "column": 18 + }, + { + "line": 2, + "column": 7 + } + ] + }, + { + "message": "Variable \"$c\" is not defined by operation \"Foo\".", + "locations": [ + { + "line": 3, + "column": 32 + }, + { + "line": 2, + "column": 7 + } + ] + } + ] + }, + { + "name": "Validate: No undefined variables/variable in fragment not defined by un-named query", + "rule": "NoUndefinedVariables", + "schema": 0, + "query": "\n {\n ...FragA\n }\n fragment FragA on Type {\n field(a: $a)\n }\n ", + "errors": [ + { + "message": "Variable \"$a\" is not defined.", + "locations": [ + { + "line": 6, + "column": 18 + }, + { + "line": 2, + "column": 7 + } + ] + } + ] + }, + { + "name": "Validate: No undefined variables/variable in fragment not defined by operation", + "rule": "NoUndefinedVariables", + "schema": 0, + "query": "\n query Foo($a: String, $b: String) {\n ...FragA\n }\n fragment FragA on Type {\n field(a: $a) {\n ...FragB\n }\n }\n fragment FragB on Type {\n field(b: $b) {\n ...FragC\n }\n }\n fragment FragC on Type {\n field(c: $c)\n }\n ", + "errors": [ + { + "message": "Variable \"$c\" is not defined by operation \"Foo\".", + "locations": [ + { + "line": 16, + "column": 18 + }, + { + "line": 2, + "column": 7 + } + ] + } + ] + }, + { + "name": "Validate: No undefined variables/multiple variables in fragments not defined", + "rule": "NoUndefinedVariables", + "schema": 0, + "query": "\n query Foo($b: String) {\n ...FragA\n }\n fragment FragA on Type {\n field(a: $a) {\n ...FragB\n }\n }\n fragment FragB on Type {\n field(b: $b) {\n ...FragC\n }\n }\n fragment FragC on Type {\n field(c: $c)\n }\n ", + "errors": [ + { + "message": "Variable \"$a\" is not defined by operation \"Foo\".", + "locations": [ + { + "line": 6, + "column": 18 + }, + { + "line": 2, + "column": 7 + } + ] + }, + { + "message": "Variable \"$c\" is not defined by operation \"Foo\".", + "locations": [ + { + "line": 16, + "column": 18 + }, + { + "line": 2, + "column": 7 + } + ] + } + ] + }, + { + "name": "Validate: No undefined variables/single variable in fragment not defined by multiple operations", + "rule": "NoUndefinedVariables", + "schema": 0, + "query": "\n query Foo($a: String) {\n ...FragAB\n }\n query Bar($a: String) {\n ...FragAB\n }\n fragment FragAB on Type {\n field(a: $a, b: $b)\n }\n ", + "errors": [ + { + "message": "Variable \"$b\" is not defined by operation \"Foo\".", + "locations": [ + { + "line": 9, + "column": 25 + }, + { + "line": 2, + "column": 7 + } + ] + }, + { + "message": "Variable \"$b\" is not defined by operation \"Bar\".", + "locations": [ + { + "line": 9, + "column": 25 + }, + { + "line": 5, + "column": 7 + } + ] + } + ] + }, + { + "name": "Validate: No undefined variables/variables in fragment not defined by multiple operations", + "rule": "NoUndefinedVariables", + "schema": 0, + "query": "\n query Foo($b: String) {\n ...FragAB\n }\n query Bar($a: String) {\n ...FragAB\n }\n fragment FragAB on Type {\n field(a: $a, b: $b)\n }\n ", + "errors": [ + { + "message": "Variable \"$a\" is not defined by operation \"Foo\".", + "locations": [ + { + "line": 9, + "column": 18 + }, + { + "line": 2, + "column": 7 + } + ] + }, + { + "message": "Variable \"$b\" is not defined by operation \"Bar\".", + "locations": [ + { + "line": 9, + "column": 25 + }, + { + "line": 5, + "column": 7 + } + ] + } + ] + }, + { + "name": "Validate: No undefined variables/variable in fragment used by other operation", + "rule": "NoUndefinedVariables", + "schema": 0, + "query": "\n query Foo($b: String) {\n ...FragA\n }\n query Bar($a: String) {\n ...FragB\n }\n fragment FragA on Type {\n field(a: $a)\n }\n fragment FragB on Type {\n field(b: $b)\n }\n ", + "errors": [ + { + "message": "Variable \"$a\" is not defined by operation \"Foo\".", + "locations": [ + { + "line": 9, + "column": 18 + }, + { + "line": 2, + "column": 7 + } + ] + }, + { + "message": "Variable \"$b\" is not defined by operation \"Bar\".", + "locations": [ + { + "line": 12, + "column": 18 + }, + { + "line": 5, + "column": 7 + } + ] + } + ] + }, + { + "name": "Validate: No undefined variables/multiple undefined variables produce multiple errors", + "rule": "NoUndefinedVariables", + "schema": 0, + "query": "\n query Foo($b: String) {\n ...FragAB\n }\n query Bar($a: String) {\n ...FragAB\n }\n fragment FragAB on Type {\n field1(a: $a, b: $b)\n ...FragC\n field3(a: $a, b: $b)\n }\n fragment FragC on Type {\n field2(c: $c)\n }\n ", + "errors": [ + { + "message": "Variable \"$a\" is not defined by operation \"Foo\".", + "locations": [ + { + "line": 9, + "column": 19 + }, + { + "line": 2, + "column": 7 + } + ] + }, + { + "message": "Variable \"$a\" is not defined by operation \"Foo\".", + "locations": [ + { + "line": 11, + "column": 19 + }, + { + "line": 2, + "column": 7 + } + ] + }, + { + "message": "Variable \"$c\" is not defined by operation \"Foo\".", + "locations": [ + { + "line": 14, + "column": 19 + }, + { + "line": 2, + "column": 7 + } + ] + }, + { + "message": "Variable \"$b\" is not defined by operation \"Bar\".", + "locations": [ + { + "line": 9, + "column": 26 + }, + { + "line": 5, + "column": 7 + } + ] + }, + { + "message": "Variable \"$b\" is not defined by operation \"Bar\".", + "locations": [ + { + "line": 11, + "column": 26 + }, + { + "line": 5, + "column": 7 + } + ] + }, + { + "message": "Variable \"$c\" is not defined by operation \"Bar\".", + "locations": [ + { + "line": 14, + "column": 19 + }, + { + "line": 5, + "column": 7 + } + ] + } + ] + }, + { + "name": "Validate: No unused fragments/all fragment names are used", + "rule": "NoUnusedFragments", + "schema": 0, + "query": "\n {\n human(id: 4) {\n ...HumanFields1\n ... on Human {\n ...HumanFields2\n }\n }\n }\n fragment HumanFields1 on Human {\n name\n ...HumanFields3\n }\n fragment HumanFields2 on Human {\n name\n }\n fragment HumanFields3 on Human {\n name\n }\n ", + "errors": [] + }, + { + "name": "Validate: No unused fragments/all fragment names are used by multiple operations", + "rule": "NoUnusedFragments", + "schema": 0, + "query": "\n query Foo {\n human(id: 4) {\n ...HumanFields1\n }\n }\n query Bar {\n human(id: 4) {\n ...HumanFields2\n }\n }\n fragment HumanFields1 on Human {\n name\n ...HumanFields3\n }\n fragment HumanFields2 on Human {\n name\n }\n fragment HumanFields3 on Human {\n name\n }\n ", + "errors": [] + }, + { + "name": "Validate: No unused fragments/contains unknown fragments", + "rule": "NoUnusedFragments", + "schema": 0, + "query": "\n query Foo {\n human(id: 4) {\n ...HumanFields1\n }\n }\n query Bar {\n human(id: 4) {\n ...HumanFields2\n }\n }\n fragment HumanFields1 on Human {\n name\n ...HumanFields3\n }\n fragment HumanFields2 on Human {\n name\n }\n fragment HumanFields3 on Human {\n name\n }\n fragment Unused1 on Human {\n name\n }\n fragment Unused2 on Human {\n name\n }\n ", + "errors": [ + { + "message": "Fragment \"Unused1\" is never used.", + "locations": [ + { + "line": 22, + "column": 7 + } + ] + }, + { + "message": "Fragment \"Unused2\" is never used.", + "locations": [ + { + "line": 25, + "column": 7 + } + ] + } + ] + }, + { + "name": "Validate: No unused fragments/contains unknown fragments with ref cycle", + "rule": "NoUnusedFragments", + "schema": 0, + "query": "\n query Foo {\n human(id: 4) {\n ...HumanFields1\n }\n }\n query Bar {\n human(id: 4) {\n ...HumanFields2\n }\n }\n fragment HumanFields1 on Human {\n name\n ...HumanFields3\n }\n fragment HumanFields2 on Human {\n name\n }\n fragment HumanFields3 on Human {\n name\n }\n fragment Unused1 on Human {\n name\n ...Unused2\n }\n fragment Unused2 on Human {\n name\n ...Unused1\n }\n ", + "errors": [ + { + "message": "Fragment \"Unused1\" is never used.", + "locations": [ + { + "line": 22, + "column": 7 + } + ] + }, + { + "message": "Fragment \"Unused2\" is never used.", + "locations": [ + { + "line": 26, + "column": 7 + } + ] + } + ] + }, + { + "name": "Validate: No unused fragments/contains unknown and undef fragments", + "rule": "NoUnusedFragments", + "schema": 0, + "query": "\n query Foo {\n human(id: 4) {\n ...bar\n }\n }\n fragment foo on Human {\n name\n }\n ", + "errors": [ + { + "message": "Fragment \"foo\" is never used.", + "locations": [ + { + "line": 7, + "column": 7 + } + ] + } + ] + }, + { + "name": "Validate: No unused variables/uses all variables", + "rule": "NoUnusedVariables", + "schema": 0, + "query": "\n query ($a: String, $b: String, $c: String) {\n field(a: $a, b: $b, c: $c)\n }\n ", + "errors": [] + }, + { + "name": "Validate: No unused variables/uses all variables deeply", + "rule": "NoUnusedVariables", + "schema": 0, + "query": "\n query Foo($a: String, $b: String, $c: String) {\n field(a: $a) {\n field(b: $b) {\n field(c: $c)\n }\n }\n }\n ", + "errors": [] + }, + { + "name": "Validate: No unused variables/uses all variables deeply in inline fragments", + "rule": "NoUnusedVariables", + "schema": 0, + "query": "\n query Foo($a: String, $b: String, $c: String) {\n ... on Type {\n field(a: $a) {\n field(b: $b) {\n ... on Type {\n field(c: $c)\n }\n }\n }\n }\n }\n ", + "errors": [] + }, + { + "name": "Validate: No unused variables/uses all variables in fragments", + "rule": "NoUnusedVariables", + "schema": 0, + "query": "\n query Foo($a: String, $b: String, $c: String) {\n ...FragA\n }\n fragment FragA on Type {\n field(a: $a) {\n ...FragB\n }\n }\n fragment FragB on Type {\n field(b: $b) {\n ...FragC\n }\n }\n fragment FragC on Type {\n field(c: $c)\n }\n ", + "errors": [] + }, + { + "name": "Validate: No unused variables/variable used by fragment in multiple operations", + "rule": "NoUnusedVariables", + "schema": 0, + "query": "\n query Foo($a: String) {\n ...FragA\n }\n query Bar($b: String) {\n ...FragB\n }\n fragment FragA on Type {\n field(a: $a)\n }\n fragment FragB on Type {\n field(b: $b)\n }\n ", + "errors": [] + }, + { + "name": "Validate: No unused variables/variable used by recursive fragment", + "rule": "NoUnusedVariables", + "schema": 0, + "query": "\n query Foo($a: String) {\n ...FragA\n }\n fragment FragA on Type {\n field(a: $a) {\n ...FragA\n }\n }\n ", + "errors": [] + }, + { + "name": "Validate: No unused variables/variable not used", + "rule": "NoUnusedVariables", + "schema": 0, + "query": "\n query ($a: String, $b: String, $c: String) {\n field(a: $a, b: $b)\n }\n ", + "errors": [ + { + "message": "Variable \"$c\" is never used.", + "locations": [ + { + "line": 2, + "column": 38 + } + ] + } + ] + }, + { + "name": "Validate: No unused variables/multiple variables not used", + "rule": "NoUnusedVariables", + "schema": 0, + "query": "\n query Foo($a: String, $b: String, $c: String) {\n field(b: $b)\n }\n ", + "errors": [ + { + "message": "Variable \"$a\" is never used in operation \"Foo\".", + "locations": [ + { + "line": 2, + "column": 17 + } + ] + }, + { + "message": "Variable \"$c\" is never used in operation \"Foo\".", + "locations": [ + { + "line": 2, + "column": 41 + } + ] + } + ] + }, + { + "name": "Validate: No unused variables/variable not used in fragments", + "rule": "NoUnusedVariables", + "schema": 0, + "query": "\n query Foo($a: String, $b: String, $c: String) {\n ...FragA\n }\n fragment FragA on Type {\n field(a: $a) {\n ...FragB\n }\n }\n fragment FragB on Type {\n field(b: $b) {\n ...FragC\n }\n }\n fragment FragC on Type {\n field\n }\n ", + "errors": [ + { + "message": "Variable \"$c\" is never used in operation \"Foo\".", + "locations": [ + { + "line": 2, + "column": 41 + } + ] + } + ] + }, + { + "name": "Validate: No unused variables/multiple variables not used in fragments", + "rule": "NoUnusedVariables", + "schema": 0, + "query": "\n query Foo($a: String, $b: String, $c: String) {\n ...FragA\n }\n fragment FragA on Type {\n field {\n ...FragB\n }\n }\n fragment FragB on Type {\n field(b: $b) {\n ...FragC\n }\n }\n fragment FragC on Type {\n field\n }\n ", + "errors": [ + { + "message": "Variable \"$a\" is never used in operation \"Foo\".", + "locations": [ + { + "line": 2, + "column": 17 + } + ] + }, + { + "message": "Variable \"$c\" is never used in operation \"Foo\".", + "locations": [ + { + "line": 2, + "column": 41 + } + ] + } + ] + }, + { + "name": "Validate: No unused variables/variable not used by unreferenced fragment", + "rule": "NoUnusedVariables", + "schema": 0, + "query": "\n query Foo($b: String) {\n ...FragA\n }\n fragment FragA on Type {\n field(a: $a)\n }\n fragment FragB on Type {\n field(b: $b)\n }\n ", + "errors": [ + { + "message": "Variable \"$b\" is never used in operation \"Foo\".", + "locations": [ + { + "line": 2, + "column": 17 + } + ] + } + ] + }, + { + "name": "Validate: No unused variables/variable not used by fragment used by other operation", + "rule": "NoUnusedVariables", + "schema": 0, + "query": "\n query Foo($b: String) {\n ...FragA\n }\n query Bar($a: String) {\n ...FragB\n }\n fragment FragA on Type {\n field(a: $a)\n }\n fragment FragB on Type {\n field(b: $b)\n }\n ", + "errors": [ + { + "message": "Variable \"$b\" is never used in operation \"Foo\".", + "locations": [ + { + "line": 2, + "column": 17 + } + ] + }, + { + "message": "Variable \"$a\" is never used in operation \"Bar\".", + "locations": [ + { + "line": 5, + "column": 17 + } + ] + } + ] + }, + { + "name": "Validate: Overlapping fields can be merged/unique fields", + "rule": "OverlappingFieldsCanBeMerged", + "schema": 0, + "query": "\n fragment uniqueFields on Dog {\n name\n nickname\n }\n ", + "errors": [] + }, + { + "name": "Validate: Overlapping fields can be merged/identical fields", + "rule": "OverlappingFieldsCanBeMerged", + "schema": 0, + "query": "\n fragment mergeIdenticalFields on Dog {\n name\n name\n }\n ", + "errors": [] + }, + { + "name": "Validate: Overlapping fields can be merged/identical fields with identical args", + "rule": "OverlappingFieldsCanBeMerged", + "schema": 0, + "query": "\n fragment mergeIdenticalFieldsWithIdenticalArgs on Dog {\n doesKnowCommand(dogCommand: SIT)\n doesKnowCommand(dogCommand: SIT)\n }\n ", + "errors": [] + }, + { + "name": "Validate: Overlapping fields can be merged/identical fields with identical directives", + "rule": "OverlappingFieldsCanBeMerged", + "schema": 0, + "query": "\n fragment mergeSameFieldsWithSameDirectives on Dog {\n name @include(if: true)\n name @include(if: true)\n }\n ", + "errors": [] + }, + { + "name": "Validate: Overlapping fields can be merged/different args with different aliases", + "rule": "OverlappingFieldsCanBeMerged", + "schema": 0, + "query": "\n fragment differentArgsWithDifferentAliases on Dog {\n knowsSit: doesKnowCommand(dogCommand: SIT)\n knowsDown: doesKnowCommand(dogCommand: DOWN)\n }\n ", + "errors": [] + }, + { + "name": "Validate: Overlapping fields can be merged/different directives with different aliases", + "rule": "OverlappingFieldsCanBeMerged", + "schema": 0, + "query": "\n fragment differentDirectivesWithDifferentAliases on Dog {\n nameIfTrue: name @include(if: true)\n nameIfFalse: name @include(if: false)\n }\n ", + "errors": [] + }, + { + "name": "Validate: Overlapping fields can be merged/different skip/include directives accepted", + "rule": "OverlappingFieldsCanBeMerged", + "schema": 0, + "query": "\n fragment differentDirectivesWithDifferentAliases on Dog {\n name @include(if: true)\n name @include(if: false)\n }\n ", + "errors": [] + }, + { + "name": "Validate: Overlapping fields can be merged/Same aliases with different field targets", + "rule": "OverlappingFieldsCanBeMerged", + "schema": 0, + "query": "\n fragment sameAliasesWithDifferentFieldTargets on Dog {\n fido: name\n fido: nickname\n }\n ", + "errors": [ + { + "message": "Fields \"fido\" conflict because name and nickname are different fields. Use different aliases on the fields to fetch both if this was intentional.", + "locations": [ + { + "line": 3, + "column": 9 + }, + { + "line": 4, + "column": 9 + } + ] + } + ] + }, + { + "name": "Validate: Overlapping fields can be merged/Same aliases allowed on non-overlapping fields", + "rule": "OverlappingFieldsCanBeMerged", + "schema": 0, + "query": "\n fragment sameAliasesWithDifferentFieldTargets on Pet {\n ... on Dog {\n name\n }\n ... on Cat {\n name: nickname\n }\n }\n ", + "errors": [] + }, + { + "name": "Validate: Overlapping fields can be merged/Alias masking direct field access", + "rule": "OverlappingFieldsCanBeMerged", + "schema": 0, + "query": "\n fragment aliasMaskingDirectFieldAccess on Dog {\n name: nickname\n name\n }\n ", + "errors": [ + { + "message": "Fields \"name\" conflict because nickname and name are different fields. Use different aliases on the fields to fetch both if this was intentional.", + "locations": [ + { + "line": 3, + "column": 9 + }, + { + "line": 4, + "column": 9 + } + ] + } + ] + }, + { + "name": "Validate: Overlapping fields can be merged/different args, second adds an argument", + "rule": "OverlappingFieldsCanBeMerged", + "schema": 0, + "query": "\n fragment conflictingArgs on Dog {\n doesKnowCommand\n doesKnowCommand(dogCommand: HEEL)\n }\n ", + "errors": [ + { + "message": "Fields \"doesKnowCommand\" conflict because they have differing arguments. Use different aliases on the fields to fetch both if this was intentional.", + "locations": [ + { + "line": 3, + "column": 9 + }, + { + "line": 4, + "column": 9 + } + ] + } + ] + }, + { + "name": "Validate: Overlapping fields can be merged/different args, second missing an argument", + "rule": "OverlappingFieldsCanBeMerged", + "schema": 0, + "query": "\n fragment conflictingArgs on Dog {\n doesKnowCommand(dogCommand: SIT)\n doesKnowCommand\n }\n ", + "errors": [ + { + "message": "Fields \"doesKnowCommand\" conflict because they have differing arguments. Use different aliases on the fields to fetch both if this was intentional.", + "locations": [ + { + "line": 3, + "column": 9 + }, + { + "line": 4, + "column": 9 + } + ] + } + ] + }, + { + "name": "Validate: Overlapping fields can be merged/conflicting args", + "rule": "OverlappingFieldsCanBeMerged", + "schema": 0, + "query": "\n fragment conflictingArgs on Dog {\n doesKnowCommand(dogCommand: SIT)\n doesKnowCommand(dogCommand: HEEL)\n }\n ", + "errors": [ + { + "message": "Fields \"doesKnowCommand\" conflict because they have differing arguments. Use different aliases on the fields to fetch both if this was intentional.", + "locations": [ + { + "line": 3, + "column": 9 + }, + { + "line": 4, + "column": 9 + } + ] + } + ] + }, + { + "name": "Validate: Overlapping fields can be merged/allows different args where no conflict is possible", + "rule": "OverlappingFieldsCanBeMerged", + "schema": 0, + "query": "\n fragment conflictingArgs on Pet {\n ... on Dog {\n name(surname: true)\n }\n ... on Cat {\n name\n }\n }\n ", + "errors": [] + }, + { + "name": "Validate: Overlapping fields can be merged/encounters conflict in fragments", + "rule": "OverlappingFieldsCanBeMerged", + "schema": 0, + "query": "\n {\n ...A\n ...B\n }\n fragment A on Type {\n x: a\n }\n fragment B on Type {\n x: b\n }\n ", + "errors": [ + { + "message": "Fields \"x\" conflict because a and b are different fields. Use different aliases on the fields to fetch both if this was intentional.", + "locations": [ + { + "line": 7, + "column": 9 + }, + { + "line": 10, + "column": 9 + } + ] + } + ] + }, + { + "name": "Validate: Overlapping fields can be merged/reports each conflict once", + "rule": "OverlappingFieldsCanBeMerged", + "schema": 0, + "query": "\n {\n f1 {\n ...A\n ...B\n }\n f2 {\n ...B\n ...A\n }\n f3 {\n ...A\n ...B\n x: c\n }\n }\n fragment A on Type {\n x: a\n }\n fragment B on Type {\n x: b\n }\n ", + "errors": [ + { + "message": "Fields \"x\" conflict because a and b are different fields. Use different aliases on the fields to fetch both if this was intentional.", + "locations": [ + { + "line": 18, + "column": 9 + }, + { + "line": 21, + "column": 9 + } + ] + }, + { + "message": "Fields \"x\" conflict because c and a are different fields. Use different aliases on the fields to fetch both if this was intentional.", + "locations": [ + { + "line": 14, + "column": 11 + }, + { + "line": 18, + "column": 9 + } + ] + }, + { + "message": "Fields \"x\" conflict because c and b are different fields. Use different aliases on the fields to fetch both if this was intentional.", + "locations": [ + { + "line": 14, + "column": 11 + }, + { + "line": 21, + "column": 9 + } + ] + } + ] + }, + { + "name": "Validate: Overlapping fields can be merged/deep conflict", + "rule": "OverlappingFieldsCanBeMerged", + "schema": 0, + "query": "\n {\n field {\n x: a\n },\n field {\n x: b\n }\n }\n ", + "errors": [ + { + "message": "Fields \"field\" conflict because subfields \"x\" conflict because a and b are different fields. Use different aliases on the fields to fetch both if this was intentional.", + "locations": [ + { + "line": 3, + "column": 9 + }, + { + "line": 4, + "column": 11 + }, + { + "line": 6, + "column": 9 + }, + { + "line": 7, + "column": 11 + } + ] + } + ] + }, + { + "name": "Validate: Overlapping fields can be merged/deep conflict with multiple issues", + "rule": "OverlappingFieldsCanBeMerged", + "schema": 0, + "query": "\n {\n field {\n x: a\n y: c\n },\n field {\n x: b\n y: d\n }\n }\n ", + "errors": [ + { + "message": "Fields \"field\" conflict because subfields \"x\" conflict because a and b are different fields and subfields \"y\" conflict because c and d are different fields. Use different aliases on the fields to fetch both if this was intentional.", + "locations": [ + { + "line": 3, + "column": 9 + }, + { + "line": 4, + "column": 11 + }, + { + "line": 5, + "column": 11 + }, + { + "line": 7, + "column": 9 + }, + { + "line": 8, + "column": 11 + }, + { + "line": 9, + "column": 11 + } + ] + } + ] + }, + { + "name": "Validate: Overlapping fields can be merged/very deep conflict", + "rule": "OverlappingFieldsCanBeMerged", + "schema": 0, + "query": "\n {\n field {\n deepField {\n x: a\n }\n },\n field {\n deepField {\n x: b\n }\n }\n }\n ", + "errors": [ + { + "message": "Fields \"field\" conflict because subfields \"deepField\" conflict because subfields \"x\" conflict because a and b are different fields. Use different aliases on the fields to fetch both if this was intentional.", + "locations": [ + { + "line": 3, + "column": 9 + }, + { + "line": 4, + "column": 11 + }, + { + "line": 5, + "column": 13 + }, + { + "line": 8, + "column": 9 + }, + { + "line": 9, + "column": 11 + }, + { + "line": 10, + "column": 13 + } + ] + } + ] + }, + { + "name": "Validate: Overlapping fields can be merged/reports deep conflict to nearest common ancestor", + "rule": "OverlappingFieldsCanBeMerged", + "schema": 0, + "query": "\n {\n field {\n deepField {\n x: a\n }\n deepField {\n x: b\n }\n },\n field {\n deepField {\n y\n }\n }\n }\n ", + "errors": [ + { + "message": "Fields \"deepField\" conflict because subfields \"x\" conflict because a and b are different fields. Use different aliases on the fields to fetch both if this was intentional.", + "locations": [ + { + "line": 4, + "column": 11 + }, + { + "line": 5, + "column": 13 + }, + { + "line": 7, + "column": 11 + }, + { + "line": 8, + "column": 13 + } + ] + } + ] + }, + { + "name": "Validate: Overlapping fields can be merged/reports deep conflict to nearest common ancestor in fragments", + "rule": "OverlappingFieldsCanBeMerged", + "schema": 0, + "query": "\n {\n field {\n ...F\n }\n field {\n ...F\n }\n }\n fragment F on T {\n deepField {\n deeperField {\n x: a\n }\n deeperField {\n x: b\n }\n },\n deepField {\n deeperField {\n y\n }\n }\n }\n ", + "errors": [ + { + "message": "Fields \"deeperField\" conflict because subfields \"x\" conflict because a and b are different fields. Use different aliases on the fields to fetch both if this was intentional.", + "locations": [ + { + "line": 12, + "column": 11 + }, + { + "line": 13, + "column": 13 + }, + { + "line": 15, + "column": 11 + }, + { + "line": 16, + "column": 13 + } + ] + } + ] + }, + { + "name": "Validate: Overlapping fields can be merged/reports deep conflict in nested fragments", + "rule": "OverlappingFieldsCanBeMerged", + "schema": 0, + "query": "\n {\n field {\n ...F\n }\n field {\n ...I\n }\n }\n fragment F on T {\n x: a\n ...G\n }\n fragment G on T {\n y: c\n }\n fragment I on T {\n y: d\n ...J\n }\n fragment J on T {\n x: b\n }\n ", + "errors": [ + { + "message": "Fields \"field\" conflict because subfields \"x\" conflict because a and b are different fields and subfields \"y\" conflict because c and d are different fields. Use different aliases on the fields to fetch both if this was intentional.", + "locations": [ + { + "line": 3, + "column": 9 + }, + { + "line": 11, + "column": 9 + }, + { + "line": 15, + "column": 9 + }, + { + "line": 6, + "column": 9 + }, + { + "line": 22, + "column": 9 + }, + { + "line": 18, + "column": 9 + } + ] + } + ] + }, + { + "name": "Validate: Overlapping fields can be merged/ignores unknown fragments", + "rule": "OverlappingFieldsCanBeMerged", + "schema": 0, + "query": "\n {\n field\n ...Unknown\n ...Known\n }\n\n fragment Known on T {\n field\n ...OtherUnknown\n }\n ", + "errors": [] + }, + { + "name": "Validate: Overlapping fields can be merged/return types must be unambiguous/conflicting return types which potentially overlap", + "rule": "OverlappingFieldsCanBeMerged", + "schema": 1, + "query": "\n {\n someBox {\n ...on IntBox {\n scalar\n }\n ...on NonNullStringBox1 {\n scalar\n }\n }\n }\n ", + "errors": [ + { + "message": "Fields \"scalar\" conflict because they return conflicting types Int and String!. Use different aliases on the fields to fetch both if this was intentional.", + "locations": [ + { + "line": 5, + "column": 15 + }, + { + "line": 8, + "column": 15 + } + ] + } + ] + }, + { + "name": "Validate: Overlapping fields can be merged/return types must be unambiguous/compatible return shapes on different return types", + "rule": "OverlappingFieldsCanBeMerged", + "schema": 1, + "query": "\n {\n someBox {\n ... on SomeBox {\n deepBox {\n unrelatedField\n }\n }\n ... on StringBox {\n deepBox {\n unrelatedField\n }\n }\n }\n }\n ", + "errors": [] + }, + { + "name": "Validate: Overlapping fields can be merged/return types must be unambiguous/disallows differing return types despite no overlap", + "rule": "OverlappingFieldsCanBeMerged", + "schema": 1, + "query": "\n {\n someBox {\n ... on IntBox {\n scalar\n }\n ... on StringBox {\n scalar\n }\n }\n }\n ", + "errors": [ + { + "message": "Fields \"scalar\" conflict because they return conflicting types Int and String. Use different aliases on the fields to fetch both if this was intentional.", + "locations": [ + { + "line": 5, + "column": 15 + }, + { + "line": 8, + "column": 15 + } + ] + } + ] + }, + { + "name": "Validate: Overlapping fields can be merged/return types must be unambiguous/disallows differing return type nullability despite no overlap", + "rule": "OverlappingFieldsCanBeMerged", + "schema": 1, + "query": "\n {\n someBox {\n ... on NonNullStringBox1 {\n scalar\n }\n ... on StringBox {\n scalar\n }\n }\n }\n ", + "errors": [ + { + "message": "Fields \"scalar\" conflict because they return conflicting types String! and String. Use different aliases on the fields to fetch both if this was intentional.", + "locations": [ + { + "line": 5, + "column": 15 + }, + { + "line": 8, + "column": 15 + } + ] + } + ] + }, + { + "name": "Validate: Overlapping fields can be merged/return types must be unambiguous/disallows differing return type list despite no overlap", + "rule": "OverlappingFieldsCanBeMerged", + "schema": 1, + "query": "\n {\n someBox {\n ... on IntBox {\n box: listStringBox {\n scalar\n }\n }\n ... on StringBox {\n box: stringBox {\n scalar\n }\n }\n }\n }\n ", + "errors": [ + { + "message": "Fields \"box\" conflict because they return conflicting types [StringBox] and StringBox. Use different aliases on the fields to fetch both if this was intentional.", + "locations": [ + { + "line": 5, + "column": 15 + }, + { + "line": 10, + "column": 15 + } + ] + } + ] + }, + { + "name": "Validate: Overlapping fields can be merged/return types must be unambiguous/disallows differing return type list despite no overlap", + "rule": "OverlappingFieldsCanBeMerged", + "schema": 1, + "query": "\n {\n someBox {\n ... on IntBox {\n box: stringBox {\n scalar\n }\n }\n ... on StringBox {\n box: listStringBox {\n scalar\n }\n }\n }\n }\n ", + "errors": [ + { + "message": "Fields \"box\" conflict because they return conflicting types StringBox and [StringBox]. Use different aliases on the fields to fetch both if this was intentional.", + "locations": [ + { + "line": 5, + "column": 15 + }, + { + "line": 10, + "column": 15 + } + ] + } + ] + }, + { + "name": "Validate: Overlapping fields can be merged/return types must be unambiguous/disallows differing deep return types despite no overlap", + "rule": "OverlappingFieldsCanBeMerged", + "schema": 1, + "query": "\n {\n someBox {\n ... on IntBox {\n box: stringBox {\n scalar\n }\n }\n ... on StringBox {\n box: intBox {\n scalar\n }\n }\n }\n }\n ", + "errors": [ + { + "message": "Fields \"box\" conflict because subfields \"scalar\" conflict because they return conflicting types String and Int. Use different aliases on the fields to fetch both if this was intentional.", + "locations": [ + { + "line": 5, + "column": 15 + }, + { + "line": 6, + "column": 17 + }, + { + "line": 10, + "column": 15 + }, + { + "line": 11, + "column": 17 + } + ] + } + ] + }, + { + "name": "Validate: Overlapping fields can be merged/return types must be unambiguous/allows non-conflicting overlaping types", + "rule": "OverlappingFieldsCanBeMerged", + "schema": 1, + "query": "\n {\n someBox {\n ... on IntBox {\n scalar: unrelatedField\n }\n ... on StringBox {\n scalar\n }\n }\n }\n ", + "errors": [] + }, + { + "name": "Validate: Overlapping fields can be merged/return types must be unambiguous/same wrapped scalar return types", + "rule": "OverlappingFieldsCanBeMerged", + "schema": 1, + "query": "\n {\n someBox {\n ...on NonNullStringBox1 {\n scalar\n }\n ...on NonNullStringBox2 {\n scalar\n }\n }\n }\n ", + "errors": [] + }, + { + "name": "Validate: Overlapping fields can be merged/return types must be unambiguous/allows inline typeless fragments", + "rule": "OverlappingFieldsCanBeMerged", + "schema": 1, + "query": "\n {\n a\n ... {\n a\n }\n }\n ", + "errors": [] + }, + { + "name": "Validate: Overlapping fields can be merged/return types must be unambiguous/compares deep types including list", + "rule": "OverlappingFieldsCanBeMerged", + "schema": 1, + "query": "\n {\n connection {\n ...edgeID\n edges {\n node {\n id: name\n }\n }\n }\n }\n\n fragment edgeID on Connection {\n edges {\n node {\n id\n }\n }\n }\n ", + "errors": [ + { + "message": "Fields \"edges\" conflict because subfields \"node\" conflict because subfields \"id\" conflict because name and id are different fields. Use different aliases on the fields to fetch both if this was intentional.", + "locations": [ + { + "line": 5, + "column": 13 + }, + { + "line": 6, + "column": 15 + }, + { + "line": 7, + "column": 17 + }, + { + "line": 14, + "column": 11 + }, + { + "line": 15, + "column": 13 + }, + { + "line": 16, + "column": 15 + } + ] + } + ] + }, + { + "name": "Validate: Overlapping fields can be merged/return types must be unambiguous/ignores unknown types", + "rule": "OverlappingFieldsCanBeMerged", + "schema": 1, + "query": "\n {\n someBox {\n ...on UnknownType {\n scalar\n }\n ...on NonNullStringBox2 {\n scalar\n }\n }\n }\n ", + "errors": [] + }, + { + "name": "Validate: Overlapping fields can be merged/return types must be unambiguous/works for field names that are JS keywords", + "rule": "OverlappingFieldsCanBeMerged", + "schema": 2, + "query": "{\n foo {\n constructor\n }\n }", + "errors": [] + }, + { + "name": "Validate: Possible fragment spreads/of the same object", + "rule": "PossibleFragmentSpreads", + "schema": 0, + "query": "\n fragment objectWithinObject on Dog { ...dogFragment }\n fragment dogFragment on Dog { barkVolume }\n ", + "errors": [] + }, + { + "name": "Validate: Possible fragment spreads/of the same object with inline fragment", + "rule": "PossibleFragmentSpreads", + "schema": 0, + "query": "\n fragment objectWithinObjectAnon on Dog { ... on Dog { barkVolume } }\n ", + "errors": [] + }, + { + "name": "Validate: Possible fragment spreads/object into an implemented interface", + "rule": "PossibleFragmentSpreads", + "schema": 0, + "query": "\n fragment objectWithinInterface on Pet { ...dogFragment }\n fragment dogFragment on Dog { barkVolume }\n ", + "errors": [] + }, + { + "name": "Validate: Possible fragment spreads/object into containing union", + "rule": "PossibleFragmentSpreads", + "schema": 0, + "query": "\n fragment objectWithinUnion on CatOrDog { ...dogFragment }\n fragment dogFragment on Dog { barkVolume }\n ", + "errors": [] + }, + { + "name": "Validate: Possible fragment spreads/union into contained object", + "rule": "PossibleFragmentSpreads", + "schema": 0, + "query": "\n fragment unionWithinObject on Dog { ...catOrDogFragment }\n fragment catOrDogFragment on CatOrDog { __typename }\n ", + "errors": [] + }, + { + "name": "Validate: Possible fragment spreads/union into overlapping interface", + "rule": "PossibleFragmentSpreads", + "schema": 0, + "query": "\n fragment unionWithinInterface on Pet { ...catOrDogFragment }\n fragment catOrDogFragment on CatOrDog { __typename }\n ", + "errors": [] + }, + { + "name": "Validate: Possible fragment spreads/union into overlapping union", + "rule": "PossibleFragmentSpreads", + "schema": 0, + "query": "\n fragment unionWithinUnion on DogOrHuman { ...catOrDogFragment }\n fragment catOrDogFragment on CatOrDog { __typename }\n ", + "errors": [] + }, + { + "name": "Validate: Possible fragment spreads/interface into implemented object", + "rule": "PossibleFragmentSpreads", + "schema": 0, + "query": "\n fragment interfaceWithinObject on Dog { ...petFragment }\n fragment petFragment on Pet { name }\n ", + "errors": [] + }, + { + "name": "Validate: Possible fragment spreads/interface into overlapping interface", + "rule": "PossibleFragmentSpreads", + "schema": 0, + "query": "\n fragment interfaceWithinInterface on Pet { ...beingFragment }\n fragment beingFragment on Being { name }\n ", + "errors": [] + }, + { + "name": "Validate: Possible fragment spreads/interface into overlapping interface in inline fragment", + "rule": "PossibleFragmentSpreads", + "schema": 0, + "query": "\n fragment interfaceWithinInterface on Pet { ... on Being { name } }\n ", + "errors": [] + }, + { + "name": "Validate: Possible fragment spreads/interface into overlapping union", + "rule": "PossibleFragmentSpreads", + "schema": 0, + "query": "\n fragment interfaceWithinUnion on CatOrDog { ...petFragment }\n fragment petFragment on Pet { name }\n ", + "errors": [] + }, + { + "name": "Validate: Possible fragment spreads/different object into object", + "rule": "PossibleFragmentSpreads", + "schema": 0, + "query": "\n fragment invalidObjectWithinObject on Cat { ...dogFragment }\n fragment dogFragment on Dog { barkVolume }\n ", + "errors": [ + { + "message": "Fragment \"dogFragment\" cannot be spread here as objects of type \"Cat\" can never be of type \"Dog\".", + "locations": [ + { + "line": 2, + "column": 51 + } + ] + } + ] + }, + { + "name": "Validate: Possible fragment spreads/different object into object in inline fragment", + "rule": "PossibleFragmentSpreads", + "schema": 0, + "query": "\n fragment invalidObjectWithinObjectAnon on Cat {\n ... on Dog { barkVolume }\n }\n ", + "errors": [ + { + "message": "Fragment cannot be spread here as objects of type \"Cat\" can never be of type \"Dog\".", + "locations": [ + { + "line": 3, + "column": 9 + } + ] + } + ] + }, + { + "name": "Validate: Possible fragment spreads/object into not implementing interface", + "rule": "PossibleFragmentSpreads", + "schema": 0, + "query": "\n fragment invalidObjectWithinInterface on Pet { ...humanFragment }\n fragment humanFragment on Human { pets { name } }\n ", + "errors": [ + { + "message": "Fragment \"humanFragment\" cannot be spread here as objects of type \"Pet\" can never be of type \"Human\".", + "locations": [ + { + "line": 2, + "column": 54 + } + ] + } + ] + }, + { + "name": "Validate: Possible fragment spreads/object into not containing union", + "rule": "PossibleFragmentSpreads", + "schema": 0, + "query": "\n fragment invalidObjectWithinUnion on CatOrDog { ...humanFragment }\n fragment humanFragment on Human { pets { name } }\n ", + "errors": [ + { + "message": "Fragment \"humanFragment\" cannot be spread here as objects of type \"CatOrDog\" can never be of type \"Human\".", + "locations": [ + { + "line": 2, + "column": 55 + } + ] + } + ] + }, + { + "name": "Validate: Possible fragment spreads/union into not contained object", + "rule": "PossibleFragmentSpreads", + "schema": 0, + "query": "\n fragment invalidUnionWithinObject on Human { ...catOrDogFragment }\n fragment catOrDogFragment on CatOrDog { __typename }\n ", + "errors": [ + { + "message": "Fragment \"catOrDogFragment\" cannot be spread here as objects of type \"Human\" can never be of type \"CatOrDog\".", + "locations": [ + { + "line": 2, + "column": 52 + } + ] + } + ] + }, + { + "name": "Validate: Possible fragment spreads/union into non overlapping interface", + "rule": "PossibleFragmentSpreads", + "schema": 0, + "query": "\n fragment invalidUnionWithinInterface on Pet { ...humanOrAlienFragment }\n fragment humanOrAlienFragment on HumanOrAlien { __typename }\n ", + "errors": [ + { + "message": "Fragment \"humanOrAlienFragment\" cannot be spread here as objects of type \"Pet\" can never be of type \"HumanOrAlien\".", + "locations": [ + { + "line": 2, + "column": 53 + } + ] + } + ] + }, + { + "name": "Validate: Possible fragment spreads/union into non overlapping union", + "rule": "PossibleFragmentSpreads", + "schema": 0, + "query": "\n fragment invalidUnionWithinUnion on CatOrDog { ...humanOrAlienFragment }\n fragment humanOrAlienFragment on HumanOrAlien { __typename }\n ", + "errors": [ + { + "message": "Fragment \"humanOrAlienFragment\" cannot be spread here as objects of type \"CatOrDog\" can never be of type \"HumanOrAlien\".", + "locations": [ + { + "line": 2, + "column": 54 + } + ] + } + ] + }, + { + "name": "Validate: Possible fragment spreads/interface into non implementing object", + "rule": "PossibleFragmentSpreads", + "schema": 0, + "query": "\n fragment invalidInterfaceWithinObject on Cat { ...intelligentFragment }\n fragment intelligentFragment on Intelligent { iq }\n ", + "errors": [ + { + "message": "Fragment \"intelligentFragment\" cannot be spread here as objects of type \"Cat\" can never be of type \"Intelligent\".", + "locations": [ + { + "line": 2, + "column": 54 + } + ] + } + ] + }, + { + "name": "Validate: Possible fragment spreads/interface into non overlapping interface", + "rule": "PossibleFragmentSpreads", + "schema": 0, + "query": "\n fragment invalidInterfaceWithinInterface on Pet {\n ...intelligentFragment\n }\n fragment intelligentFragment on Intelligent { iq }\n ", + "errors": [ + { + "message": "Fragment \"intelligentFragment\" cannot be spread here as objects of type \"Pet\" can never be of type \"Intelligent\".", + "locations": [ + { + "line": 3, + "column": 9 + } + ] + } + ] + }, + { + "name": "Validate: Possible fragment spreads/interface into non overlapping interface in inline fragment", + "rule": "PossibleFragmentSpreads", + "schema": 0, + "query": "\n fragment invalidInterfaceWithinInterfaceAnon on Pet {\n ...on Intelligent { iq }\n }\n ", + "errors": [ + { + "message": "Fragment cannot be spread here as objects of type \"Pet\" can never be of type \"Intelligent\".", + "locations": [ + { + "line": 3, + "column": 9 + } + ] + } + ] + }, + { + "name": "Validate: Possible fragment spreads/interface into non overlapping union", + "rule": "PossibleFragmentSpreads", + "schema": 0, + "query": "\n fragment invalidInterfaceWithinUnion on HumanOrAlien { ...petFragment }\n fragment petFragment on Pet { name }\n ", + "errors": [ + { + "message": "Fragment \"petFragment\" cannot be spread here as objects of type \"HumanOrAlien\" can never be of type \"Pet\".", + "locations": [ + { + "line": 2, + "column": 62 + } + ] + } + ] + }, + { + "name": "Validate: Provided required arguments/ignores unknown arguments", + "rule": "ProvidedNonNullArguments", + "schema": 0, + "query": "\n {\n dog {\n isHousetrained(unknownArgument: true)\n }\n }\n ", + "errors": [] + }, + { + "name": "Validate: Provided required arguments/Valid non-nullable value/Arg on optional arg", + "rule": "ProvidedNonNullArguments", + "schema": 0, + "query": "\n {\n dog {\n isHousetrained(atOtherHomes: true)\n }\n }\n ", + "errors": [] + }, + { + "name": "Validate: Provided required arguments/Valid non-nullable value/No Arg on optional arg", + "rule": "ProvidedNonNullArguments", + "schema": 0, + "query": "\n {\n dog {\n isHousetrained\n }\n }\n ", + "errors": [] + }, + { + "name": "Validate: Provided required arguments/Valid non-nullable value/Multiple args", + "rule": "ProvidedNonNullArguments", + "schema": 0, + "query": "\n {\n complicatedArgs {\n multipleReqs(req1: 1, req2: 2)\n }\n }\n ", + "errors": [] + }, + { + "name": "Validate: Provided required arguments/Valid non-nullable value/Multiple args reverse order", + "rule": "ProvidedNonNullArguments", + "schema": 0, + "query": "\n {\n complicatedArgs {\n multipleReqs(req2: 2, req1: 1)\n }\n }\n ", + "errors": [] + }, + { + "name": "Validate: Provided required arguments/Valid non-nullable value/No args on multiple optional", + "rule": "ProvidedNonNullArguments", + "schema": 0, + "query": "\n {\n complicatedArgs {\n multipleOpts\n }\n }\n ", + "errors": [] + }, + { + "name": "Validate: Provided required arguments/Valid non-nullable value/One arg on multiple optional", + "rule": "ProvidedNonNullArguments", + "schema": 0, + "query": "\n {\n complicatedArgs {\n multipleOpts(opt1: 1)\n }\n }\n ", + "errors": [] + }, + { + "name": "Validate: Provided required arguments/Valid non-nullable value/Second arg on multiple optional", + "rule": "ProvidedNonNullArguments", + "schema": 0, + "query": "\n {\n complicatedArgs {\n multipleOpts(opt2: 1)\n }\n }\n ", + "errors": [] + }, + { + "name": "Validate: Provided required arguments/Valid non-nullable value/Multiple reqs on mixedList", + "rule": "ProvidedNonNullArguments", + "schema": 0, + "query": "\n {\n complicatedArgs {\n multipleOptAndReq(req1: 3, req2: 4)\n }\n }\n ", + "errors": [] + }, + { + "name": "Validate: Provided required arguments/Valid non-nullable value/Multiple reqs and one opt on mixedList", + "rule": "ProvidedNonNullArguments", + "schema": 0, + "query": "\n {\n complicatedArgs {\n multipleOptAndReq(req1: 3, req2: 4, opt1: 5)\n }\n }\n ", + "errors": [] + }, + { + "name": "Validate: Provided required arguments/Valid non-nullable value/All reqs and opts on mixedList", + "rule": "ProvidedNonNullArguments", + "schema": 0, + "query": "\n {\n complicatedArgs {\n multipleOptAndReq(req1: 3, req2: 4, opt1: 5, opt2: 6)\n }\n }\n ", + "errors": [] + }, + { + "name": "Validate: Provided required arguments/Invalid non-nullable value/Missing one non-nullable argument", + "rule": "ProvidedNonNullArguments", + "schema": 0, + "query": "\n {\n complicatedArgs {\n multipleReqs(req2: 2)\n }\n }\n ", + "errors": [ + { + "message": "Field \"multipleReqs\" argument \"req1\" of type \"Int!\" is required but not provided.", + "locations": [ + { + "line": 4, + "column": 13 + } + ] + } + ] + }, + { + "name": "Validate: Provided required arguments/Invalid non-nullable value/Missing multiple non-nullable arguments", + "rule": "ProvidedNonNullArguments", + "schema": 0, + "query": "\n {\n complicatedArgs {\n multipleReqs\n }\n }\n ", + "errors": [ + { + "message": "Field \"multipleReqs\" argument \"req1\" of type \"Int!\" is required but not provided.", + "locations": [ + { + "line": 4, + "column": 13 + } + ] + }, + { + "message": "Field \"multipleReqs\" argument \"req2\" of type \"Int!\" is required but not provided.", + "locations": [ + { + "line": 4, + "column": 13 + } + ] + } + ] + }, + { + "name": "Validate: Provided required arguments/Invalid non-nullable value/Incorrect value and missing argument", + "rule": "ProvidedNonNullArguments", + "schema": 0, + "query": "\n {\n complicatedArgs {\n multipleReqs(req1: \"one\")\n }\n }\n ", + "errors": [ + { + "message": "Field \"multipleReqs\" argument \"req2\" of type \"Int!\" is required but not provided.", + "locations": [ + { + "line": 4, + "column": 13 + } + ] + } + ] + }, + { + "name": "Validate: Provided required arguments/Directive arguments/ignores unknown directives", + "rule": "ProvidedNonNullArguments", + "schema": 0, + "query": "\n {\n dog @unknown\n }\n ", + "errors": [] + }, + { + "name": "Validate: Provided required arguments/Directive arguments/with directives of valid types", + "rule": "ProvidedNonNullArguments", + "schema": 0, + "query": "\n {\n dog @include(if: true) {\n name\n }\n human @skip(if: false) {\n name\n }\n }\n ", + "errors": [] + }, + { + "name": "Validate: Provided required arguments/Directive arguments/with directive with missing types", + "rule": "ProvidedNonNullArguments", + "schema": 0, + "query": "\n {\n dog @include {\n name @skip\n }\n }\n ", + "errors": [ + { + "message": "Directive \"@include\" argument \"if\" of type \"Boolean!\" is required but not provided.", + "locations": [ + { + "line": 3, + "column": 15 + } + ] + }, + { + "message": "Directive \"@skip\" argument \"if\" of type \"Boolean!\" is required but not provided.", + "locations": [ + { + "line": 4, + "column": 18 + } + ] + } + ] + }, + { + "name": "Validate: Scalar leafs/valid scalar selection", + "rule": "ScalarLeafs", + "schema": 0, + "query": "\n fragment scalarSelection on Dog {\n barks\n }\n ", + "errors": [] + }, + { + "name": "Validate: Scalar leafs/object type missing selection", + "rule": "ScalarLeafs", + "schema": 0, + "query": "\n query directQueryOnObjectWithoutSubFields {\n human\n }\n ", + "errors": [ + { + "message": "Field \"human\" of type \"Human\" must have a selection of subfields. Did you mean \"human { ... }\"?", + "locations": [ + { + "line": 3, + "column": 9 + } + ] + } + ] + }, + { + "name": "Validate: Scalar leafs/interface type missing selection", + "rule": "ScalarLeafs", + "schema": 0, + "query": "\n {\n human { pets }\n }\n ", + "errors": [ + { + "message": "Field \"pets\" of type \"[Pet]\" must have a selection of subfields. Did you mean \"pets { ... }\"?", + "locations": [ + { + "line": 3, + "column": 17 + } + ] + } + ] + }, + { + "name": "Validate: Scalar leafs/valid scalar selection with args", + "rule": "ScalarLeafs", + "schema": 0, + "query": "\n fragment scalarSelectionWithArgs on Dog {\n doesKnowCommand(dogCommand: SIT)\n }\n ", + "errors": [] + }, + { + "name": "Validate: Scalar leafs/scalar selection not allowed on Boolean", + "rule": "ScalarLeafs", + "schema": 0, + "query": "\n fragment scalarSelectionsNotAllowedOnBoolean on Dog {\n barks { sinceWhen }\n }\n ", + "errors": [ + { + "message": "Field \"barks\" must not have a selection since type \"Boolean\" has no subfields.", + "locations": [ + { + "line": 3, + "column": 15 + } + ] + } + ] + }, + { + "name": "Validate: Scalar leafs/scalar selection not allowed on Enum", + "rule": "ScalarLeafs", + "schema": 0, + "query": "\n fragment scalarSelectionsNotAllowedOnEnum on Cat {\n furColor { inHexdec }\n }\n ", + "errors": [ + { + "message": "Field \"furColor\" must not have a selection since type \"FurColor\" has no subfields.", + "locations": [ + { + "line": 3, + "column": 18 + } + ] + } + ] + }, + { + "name": "Validate: Scalar leafs/scalar selection not allowed with args", + "rule": "ScalarLeafs", + "schema": 0, + "query": "\n fragment scalarSelectionsNotAllowedWithArgs on Dog {\n doesKnowCommand(dogCommand: SIT) { sinceWhen }\n }\n ", + "errors": [ + { + "message": "Field \"doesKnowCommand\" must not have a selection since type \"Boolean\" has no subfields.", + "locations": [ + { + "line": 3, + "column": 42 + } + ] + } + ] + }, + { + "name": "Validate: Scalar leafs/Scalar selection not allowed with directives", + "rule": "ScalarLeafs", + "schema": 0, + "query": "\n fragment scalarSelectionsNotAllowedWithDirectives on Dog {\n name @include(if: true) { isAlsoHumanName }\n }\n ", + "errors": [ + { + "message": "Field \"name\" must not have a selection since type \"String\" has no subfields.", + "locations": [ + { + "line": 3, + "column": 33 + } + ] + } + ] + }, + { + "name": "Validate: Scalar leafs/Scalar selection not allowed with directives and args", + "rule": "ScalarLeafs", + "schema": 0, + "query": "\n fragment scalarSelectionsNotAllowedWithDirectivesAndArgs on Dog {\n doesKnowCommand(dogCommand: SIT) @include(if: true) { sinceWhen }\n }\n ", + "errors": [ + { + "message": "Field \"doesKnowCommand\" must not have a selection since type \"Boolean\" has no subfields.", + "locations": [ + { + "line": 3, + "column": 61 + } + ] + } + ] + }, + { + "name": "Validate: Unique argument names/no arguments on field", + "rule": "UniqueArgumentNames", + "schema": 0, + "query": "\n {\n field\n }\n ", + "errors": [] + }, + { + "name": "Validate: Unique argument names/no arguments on directive", + "rule": "UniqueArgumentNames", + "schema": 0, + "query": "\n {\n field @directive\n }\n ", + "errors": [] + }, + { + "name": "Validate: Unique argument names/argument on field", + "rule": "UniqueArgumentNames", + "schema": 0, + "query": "\n {\n field(arg: \"value\")\n }\n ", + "errors": [] + }, + { + "name": "Validate: Unique argument names/argument on directive", + "rule": "UniqueArgumentNames", + "schema": 0, + "query": "\n {\n field @directive(arg: \"value\")\n }\n ", + "errors": [] + }, + { + "name": "Validate: Unique argument names/same argument on two fields", + "rule": "UniqueArgumentNames", + "schema": 0, + "query": "\n {\n one: field(arg: \"value\")\n two: field(arg: \"value\")\n }\n ", + "errors": [] + }, + { + "name": "Validate: Unique argument names/same argument on field and directive", + "rule": "UniqueArgumentNames", + "schema": 0, + "query": "\n {\n field(arg: \"value\") @directive(arg: \"value\")\n }\n ", + "errors": [] + }, + { + "name": "Validate: Unique argument names/same argument on two directives", + "rule": "UniqueArgumentNames", + "schema": 0, + "query": "\n {\n field @directive1(arg: \"value\") @directive2(arg: \"value\")\n }\n ", + "errors": [] + }, + { + "name": "Validate: Unique argument names/multiple field arguments", + "rule": "UniqueArgumentNames", + "schema": 0, + "query": "\n {\n field(arg1: \"value\", arg2: \"value\", arg3: \"value\")\n }\n ", + "errors": [] + }, + { + "name": "Validate: Unique argument names/multiple directive arguments", + "rule": "UniqueArgumentNames", + "schema": 0, + "query": "\n {\n field @directive(arg1: \"value\", arg2: \"value\", arg3: \"value\")\n }\n ", + "errors": [] + }, + { + "name": "Validate: Unique argument names/duplicate field arguments", + "rule": "UniqueArgumentNames", + "schema": 0, + "query": "\n {\n field(arg1: \"value\", arg1: \"value\")\n }\n ", + "errors": [ + { + "message": "There can be only one argument named \"arg1\".", + "locations": [ + { + "line": 3, + "column": 15 + }, + { + "line": 3, + "column": 30 + } + ] + } + ] + }, + { + "name": "Validate: Unique argument names/many duplicate field arguments", + "rule": "UniqueArgumentNames", + "schema": 0, + "query": "\n {\n field(arg1: \"value\", arg1: \"value\", arg1: \"value\")\n }\n ", + "errors": [ + { + "message": "There can be only one argument named \"arg1\".", + "locations": [ + { + "line": 3, + "column": 15 + }, + { + "line": 3, + "column": 30 + } + ] + }, + { + "message": "There can be only one argument named \"arg1\".", + "locations": [ + { + "line": 3, + "column": 15 + }, + { + "line": 3, + "column": 45 + } + ] + } + ] + }, + { + "name": "Validate: Unique argument names/duplicate directive arguments", + "rule": "UniqueArgumentNames", + "schema": 0, + "query": "\n {\n field @directive(arg1: \"value\", arg1: \"value\")\n }\n ", + "errors": [ + { + "message": "There can be only one argument named \"arg1\".", + "locations": [ + { + "line": 3, + "column": 26 + }, + { + "line": 3, + "column": 41 + } + ] + } + ] + }, + { + "name": "Validate: Unique argument names/many duplicate directive arguments", + "rule": "UniqueArgumentNames", + "schema": 0, + "query": "\n {\n field @directive(arg1: \"value\", arg1: \"value\", arg1: \"value\")\n }\n ", + "errors": [ + { + "message": "There can be only one argument named \"arg1\".", + "locations": [ + { + "line": 3, + "column": 26 + }, + { + "line": 3, + "column": 41 + } + ] + }, + { + "message": "There can be only one argument named \"arg1\".", + "locations": [ + { + "line": 3, + "column": 26 + }, + { + "line": 3, + "column": 56 + } + ] + } + ] + }, + { + "name": "Validate: Directives Are Unique Per Location/no directives", + "rule": "UniqueDirectivesPerLocation", + "schema": 0, + "query": "\n fragment Test on Type {\n field\n }\n ", + "errors": [] + }, + { + "name": "Validate: Directives Are Unique Per Location/unique directives in different locations", + "rule": "UniqueDirectivesPerLocation", + "schema": 0, + "query": "\n fragment Test on Type @directiveA {\n field @directiveB\n }\n ", + "errors": [] + }, + { + "name": "Validate: Directives Are Unique Per Location/unique directives in same locations", + "rule": "UniqueDirectivesPerLocation", + "schema": 0, + "query": "\n fragment Test on Type @directiveA @directiveB {\n field @directiveA @directiveB\n }\n ", + "errors": [] + }, + { + "name": "Validate: Directives Are Unique Per Location/same directives in different locations", + "rule": "UniqueDirectivesPerLocation", + "schema": 0, + "query": "\n fragment Test on Type @directiveA {\n field @directiveA\n }\n ", + "errors": [] + }, + { + "name": "Validate: Directives Are Unique Per Location/same directives in similar locations", + "rule": "UniqueDirectivesPerLocation", + "schema": 0, + "query": "\n fragment Test on Type {\n field @directive\n field @directive\n }\n ", + "errors": [] + }, + { + "name": "Validate: Directives Are Unique Per Location/duplicate directives in one location", + "rule": "UniqueDirectivesPerLocation", + "schema": 0, + "query": "\n fragment Test on Type {\n field @directive @directive\n }\n ", + "errors": [ + { + "message": "The directive \"directive\" can only be used once at this location.", + "locations": [ + { + "line": 3, + "column": 15 + }, + { + "line": 3, + "column": 26 + } + ] + } + ] + }, + { + "name": "Validate: Directives Are Unique Per Location/many duplicate directives in one location", + "rule": "UniqueDirectivesPerLocation", + "schema": 0, + "query": "\n fragment Test on Type {\n field @directive @directive @directive\n }\n ", + "errors": [ + { + "message": "The directive \"directive\" can only be used once at this location.", + "locations": [ + { + "line": 3, + "column": 15 + }, + { + "line": 3, + "column": 26 + } + ] + }, + { + "message": "The directive \"directive\" can only be used once at this location.", + "locations": [ + { + "line": 3, + "column": 15 + }, + { + "line": 3, + "column": 37 + } + ] + } + ] + }, + { + "name": "Validate: Directives Are Unique Per Location/different duplicate directives in one location", + "rule": "UniqueDirectivesPerLocation", + "schema": 0, + "query": "\n fragment Test on Type {\n field @directiveA @directiveB @directiveA @directiveB\n }\n ", + "errors": [ + { + "message": "The directive \"directiveA\" can only be used once at this location.", + "locations": [ + { + "line": 3, + "column": 15 + }, + { + "line": 3, + "column": 39 + } + ] + }, + { + "message": "The directive \"directiveB\" can only be used once at this location.", + "locations": [ + { + "line": 3, + "column": 27 + }, + { + "line": 3, + "column": 51 + } + ] + } + ] + }, + { + "name": "Validate: Directives Are Unique Per Location/duplicate directives in many locations", + "rule": "UniqueDirectivesPerLocation", + "schema": 0, + "query": "\n fragment Test on Type @directive @directive {\n field @directive @directive\n }\n ", + "errors": [ + { + "message": "The directive \"directive\" can only be used once at this location.", + "locations": [ + { + "line": 2, + "column": 29 + }, + { + "line": 2, + "column": 40 + } + ] + }, + { + "message": "The directive \"directive\" can only be used once at this location.", + "locations": [ + { + "line": 3, + "column": 15 + }, + { + "line": 3, + "column": 26 + } + ] + } + ] + }, + { + "name": "Validate: Unique fragment names/no fragments", + "rule": "UniqueFragmentNames", + "schema": 0, + "query": "\n {\n field\n }\n ", + "errors": [] + }, + { + "name": "Validate: Unique fragment names/one fragment", + "rule": "UniqueFragmentNames", + "schema": 0, + "query": "\n {\n ...fragA\n }\n\n fragment fragA on Type {\n field\n }\n ", + "errors": [] + }, + { + "name": "Validate: Unique fragment names/many fragments", + "rule": "UniqueFragmentNames", + "schema": 0, + "query": "\n {\n ...fragA\n ...fragB\n ...fragC\n }\n fragment fragA on Type {\n fieldA\n }\n fragment fragB on Type {\n fieldB\n }\n fragment fragC on Type {\n fieldC\n }\n ", + "errors": [] + }, + { + "name": "Validate: Unique fragment names/inline fragments are always unique", + "rule": "UniqueFragmentNames", + "schema": 0, + "query": "\n {\n ...on Type {\n fieldA\n }\n ...on Type {\n fieldB\n }\n }\n ", + "errors": [] + }, + { + "name": "Validate: Unique fragment names/fragment and operation named the same", + "rule": "UniqueFragmentNames", + "schema": 0, + "query": "\n query Foo {\n ...Foo\n }\n fragment Foo on Type {\n field\n }\n ", + "errors": [] + }, + { + "name": "Validate: Unique fragment names/fragments named the same", + "rule": "UniqueFragmentNames", + "schema": 0, + "query": "\n {\n ...fragA\n }\n fragment fragA on Type {\n fieldA\n }\n fragment fragA on Type {\n fieldB\n }\n ", + "errors": [ + { + "message": "There can be only one fragment named \"fragA\".", + "locations": [ + { + "line": 5, + "column": 16 + }, + { + "line": 8, + "column": 16 + } + ] + } + ] + }, + { + "name": "Validate: Unique fragment names/fragments named the same without being referenced", + "rule": "UniqueFragmentNames", + "schema": 0, + "query": "\n fragment fragA on Type {\n fieldA\n }\n fragment fragA on Type {\n fieldB\n }\n ", + "errors": [ + { + "message": "There can be only one fragment named \"fragA\".", + "locations": [ + { + "line": 2, + "column": 16 + }, + { + "line": 5, + "column": 16 + } + ] + } + ] + }, + { + "name": "Validate: Unique input field names/input object with fields", + "rule": "UniqueInputFieldNames", + "schema": 0, + "query": "\n {\n field(arg: { f: true })\n }\n ", + "errors": [] + }, + { + "name": "Validate: Unique input field names/same input object within two args", + "rule": "UniqueInputFieldNames", + "schema": 0, + "query": "\n {\n field(arg1: { f: true }, arg2: { f: true })\n }\n ", + "errors": [] + }, + { + "name": "Validate: Unique input field names/multiple input object fields", + "rule": "UniqueInputFieldNames", + "schema": 0, + "query": "\n {\n field(arg: { f1: \"value\", f2: \"value\", f3: \"value\" })\n }\n ", + "errors": [] + }, + { + "name": "Validate: Unique input field names/allows for nested input objects with similar fields", + "rule": "UniqueInputFieldNames", + "schema": 0, + "query": "\n {\n field(arg: {\n deep: {\n deep: {\n id: 1\n }\n id: 1\n }\n id: 1\n })\n }\n ", + "errors": [] + }, + { + "name": "Validate: Unique input field names/duplicate input object fields", + "rule": "UniqueInputFieldNames", + "schema": 0, + "query": "\n {\n field(arg: { f1: \"value\", f1: \"value\" })\n }\n ", + "errors": [ + { + "message": "There can be only one input field named \"f1\".", + "locations": [ + { + "line": 3, + "column": 22 + }, + { + "line": 3, + "column": 35 + } + ] + } + ] + }, + { + "name": "Validate: Unique input field names/many duplicate input object fields", + "rule": "UniqueInputFieldNames", + "schema": 0, + "query": "\n {\n field(arg: { f1: \"value\", f1: \"value\", f1: \"value\" })\n }\n ", + "errors": [ + { + "message": "There can be only one input field named \"f1\".", + "locations": [ + { + "line": 3, + "column": 22 + }, + { + "line": 3, + "column": 35 + } + ] + }, + { + "message": "There can be only one input field named \"f1\".", + "locations": [ + { + "line": 3, + "column": 22 + }, + { + "line": 3, + "column": 48 + } + ] + } + ] + }, + { + "name": "Validate: Unique operation names/no operations", + "rule": "UniqueOperationNames", + "schema": 0, + "query": "\n fragment fragA on Type {\n field\n }\n ", + "errors": [] + }, + { + "name": "Validate: Unique operation names/one anon operation", + "rule": "UniqueOperationNames", + "schema": 0, + "query": "\n {\n field\n }\n ", + "errors": [] + }, + { + "name": "Validate: Unique operation names/one named operation", + "rule": "UniqueOperationNames", + "schema": 0, + "query": "\n query Foo {\n field\n }\n ", + "errors": [] + }, + { + "name": "Validate: Unique operation names/multiple operations", + "rule": "UniqueOperationNames", + "schema": 0, + "query": "\n query Foo {\n field\n }\n\n query Bar {\n field\n }\n ", + "errors": [] + }, + { + "name": "Validate: Unique operation names/multiple operations of different types", + "rule": "UniqueOperationNames", + "schema": 0, + "query": "\n query Foo {\n field\n }\n\n mutation Bar {\n field\n }\n\n subscription Baz {\n field\n }\n ", + "errors": [] + }, + { + "name": "Validate: Unique operation names/fragment and operation named the same", + "rule": "UniqueOperationNames", + "schema": 0, + "query": "\n query Foo {\n ...Foo\n }\n fragment Foo on Type {\n field\n }\n ", + "errors": [] + }, + { + "name": "Validate: Unique operation names/multiple operations of same name", + "rule": "UniqueOperationNames", + "schema": 0, + "query": "\n query Foo {\n fieldA\n }\n query Foo {\n fieldB\n }\n ", + "errors": [ + { + "message": "There can be only one operation named \"Foo\".", + "locations": [ + { + "line": 2, + "column": 13 + }, + { + "line": 5, + "column": 13 + } + ] + } + ] + }, + { + "name": "Validate: Unique operation names/multiple ops of same name of different types (mutation)", + "rule": "UniqueOperationNames", + "schema": 0, + "query": "\n query Foo {\n fieldA\n }\n mutation Foo {\n fieldB\n }\n ", + "errors": [ + { + "message": "There can be only one operation named \"Foo\".", + "locations": [ + { + "line": 2, + "column": 13 + }, + { + "line": 5, + "column": 16 + } + ] + } + ] + }, + { + "name": "Validate: Unique operation names/multiple ops of same name of different types (subscription)", + "rule": "UniqueOperationNames", + "schema": 0, + "query": "\n query Foo {\n fieldA\n }\n subscription Foo {\n fieldB\n }\n ", + "errors": [ + { + "message": "There can be only one operation named \"Foo\".", + "locations": [ + { + "line": 2, + "column": 13 + }, + { + "line": 5, + "column": 20 + } + ] + } + ] + }, + { + "name": "Validate: Unique variable names/unique variable names", + "rule": "UniqueVariableNames", + "schema": 0, + "query": "\n query A($x: Int, $y: String) { __typename }\n query B($x: String, $y: Int) { __typename }\n ", + "errors": [] + }, + { + "name": "Validate: Unique variable names/duplicate variable names", + "rule": "UniqueVariableNames", + "schema": 0, + "query": "\n query A($x: Int, $x: Int, $x: String) { __typename }\n query B($x: String, $x: Int) { __typename }\n query C($x: Int, $x: Int) { __typename }\n ", + "errors": [ + { + "message": "There can be only one variable named \"x\".", + "locations": [ + { + "line": 2, + "column": 16 + }, + { + "line": 2, + "column": 25 + } + ] + }, + { + "message": "There can be only one variable named \"x\".", + "locations": [ + { + "line": 2, + "column": 16 + }, + { + "line": 2, + "column": 34 + } + ] + }, + { + "message": "There can be only one variable named \"x\".", + "locations": [ + { + "line": 3, + "column": 16 + }, + { + "line": 3, + "column": 28 + } + ] + }, + { + "message": "There can be only one variable named \"x\".", + "locations": [ + { + "line": 4, + "column": 16 + }, + { + "line": 4, + "column": 25 + } + ] + } + ] + }, + { + "name": "Validate: Variables are input types/input types are valid", + "rule": "VariablesAreInputTypes", + "schema": 0, + "query": "\n query Foo($a: String, $b: [Boolean!]!, $c: ComplexInput) {\n field(a: $a, b: $b, c: $c)\n }\n ", + "errors": [] + }, + { + "name": "Validate: Variables are input types/output types are invalid", + "rule": "VariablesAreInputTypes", + "schema": 0, + "query": "\n query Foo($a: Dog, $b: [[CatOrDog!]]!, $c: Pet) {\n field(a: $a, b: $b, c: $c)\n }\n ", + "errors": [ + { + "locations": [ + { + "line": 2, + "column": 21 + } + ], + "message": "Variable \"$a\" cannot be non-input type \"Dog\"." + }, + { + "locations": [ + { + "line": 2, + "column": 30 + } + ], + "message": "Variable \"$b\" cannot be non-input type \"[[CatOrDog!]]!\"." + }, + { + "locations": [ + { + "line": 2, + "column": 50 + } + ], + "message": "Variable \"$c\" cannot be non-input type \"Pet\"." + } + ] + }, + { + "name": "Validate: Variables are in allowed positions/Boolean => Boolean", + "rule": "VariablesInAllowedPosition", + "schema": 0, + "query": "\n query Query($booleanArg: Boolean)\n {\n complicatedArgs {\n booleanArgField(booleanArg: $booleanArg)\n }\n }\n ", + "errors": [] + }, + { + "name": "Validate: Variables are in allowed positions/Boolean => Boolean within fragment", + "rule": "VariablesInAllowedPosition", + "schema": 0, + "query": "\n fragment booleanArgFrag on ComplicatedArgs {\n booleanArgField(booleanArg: $booleanArg)\n }\n query Query($booleanArg: Boolean)\n {\n complicatedArgs {\n ...booleanArgFrag\n }\n }\n ", + "errors": [] + }, + { + "name": "Validate: Variables are in allowed positions/Boolean => Boolean within fragment", + "rule": "VariablesInAllowedPosition", + "schema": 0, + "query": "\n query Query($booleanArg: Boolean)\n {\n complicatedArgs {\n ...booleanArgFrag\n }\n }\n fragment booleanArgFrag on ComplicatedArgs {\n booleanArgField(booleanArg: $booleanArg)\n }\n ", + "errors": [] + }, + { + "name": "Validate: Variables are in allowed positions/Boolean! => Boolean", + "rule": "VariablesInAllowedPosition", + "schema": 0, + "query": "\n query Query($nonNullBooleanArg: Boolean!)\n {\n complicatedArgs {\n booleanArgField(booleanArg: $nonNullBooleanArg)\n }\n }\n ", + "errors": [] + }, + { + "name": "Validate: Variables are in allowed positions/Boolean! => Boolean within fragment", + "rule": "VariablesInAllowedPosition", + "schema": 0, + "query": "\n fragment booleanArgFrag on ComplicatedArgs {\n booleanArgField(booleanArg: $nonNullBooleanArg)\n }\n\n query Query($nonNullBooleanArg: Boolean!)\n {\n complicatedArgs {\n ...booleanArgFrag\n }\n }\n ", + "errors": [] + }, + { + "name": "Validate: Variables are in allowed positions/Int => Int! with default", + "rule": "VariablesInAllowedPosition", + "schema": 0, + "query": "\n query Query($intArg: Int = 1)\n {\n complicatedArgs {\n nonNullIntArgField(nonNullIntArg: $intArg)\n }\n }\n ", + "errors": [] + }, + { + "name": "Validate: Variables are in allowed positions/[String] => [String]", + "rule": "VariablesInAllowedPosition", + "schema": 0, + "query": "\n query Query($stringListVar: [String])\n {\n complicatedArgs {\n stringListArgField(stringListArg: $stringListVar)\n }\n }\n ", + "errors": [] + }, + { + "name": "Validate: Variables are in allowed positions/[String!] => [String]", + "rule": "VariablesInAllowedPosition", + "schema": 0, + "query": "\n query Query($stringListVar: [String!])\n {\n complicatedArgs {\n stringListArgField(stringListArg: $stringListVar)\n }\n }\n ", + "errors": [] + }, + { + "name": "Validate: Variables are in allowed positions/String => [String] in item position", + "rule": "VariablesInAllowedPosition", + "schema": 0, + "query": "\n query Query($stringVar: String)\n {\n complicatedArgs {\n stringListArgField(stringListArg: [$stringVar])\n }\n }\n ", + "errors": [] + }, + { + "name": "Validate: Variables are in allowed positions/String! => [String] in item position", + "rule": "VariablesInAllowedPosition", + "schema": 0, + "query": "\n query Query($stringVar: String!)\n {\n complicatedArgs {\n stringListArgField(stringListArg: [$stringVar])\n }\n }\n ", + "errors": [] + }, + { + "name": "Validate: Variables are in allowed positions/ComplexInput => ComplexInput", + "rule": "VariablesInAllowedPosition", + "schema": 0, + "query": "\n query Query($complexVar: ComplexInput)\n {\n complicatedArgs {\n complexArgField(complexArg: $complexVar)\n }\n }\n ", + "errors": [] + }, + { + "name": "Validate: Variables are in allowed positions/ComplexInput => ComplexInput in field position", + "rule": "VariablesInAllowedPosition", + "schema": 0, + "query": "\n query Query($boolVar: Boolean = false)\n {\n complicatedArgs {\n complexArgField(complexArg: {requiredArg: $boolVar})\n }\n }\n ", + "errors": [] + }, + { + "name": "Validate: Variables are in allowed positions/Boolean! => Boolean! in directive", + "rule": "VariablesInAllowedPosition", + "schema": 0, + "query": "\n query Query($boolVar: Boolean!)\n {\n dog @include(if: $boolVar)\n }\n ", + "errors": [] + }, + { + "name": "Validate: Variables are in allowed positions/Boolean => Boolean! in directive with default", + "rule": "VariablesInAllowedPosition", + "schema": 0, + "query": "\n query Query($boolVar: Boolean = false)\n {\n dog @include(if: $boolVar)\n }\n ", + "errors": [] + }, + { + "name": "Validate: Variables are in allowed positions/Int => Int!", + "rule": "VariablesInAllowedPosition", + "schema": 0, + "query": "\n query Query($intArg: Int) {\n complicatedArgs {\n nonNullIntArgField(nonNullIntArg: $intArg)\n }\n }\n ", + "errors": [ + { + "message": "Variable \"$intArg\" of type \"Int\" used in position expecting type \"Int!\".", + "locations": [ + { + "line": 2, + "column": 19 + }, + { + "line": 4, + "column": 45 + } + ] + } + ] + }, + { + "name": "Validate: Variables are in allowed positions/Int => Int! within fragment", + "rule": "VariablesInAllowedPosition", + "schema": 0, + "query": "\n fragment nonNullIntArgFieldFrag on ComplicatedArgs {\n nonNullIntArgField(nonNullIntArg: $intArg)\n }\n\n query Query($intArg: Int) {\n complicatedArgs {\n ...nonNullIntArgFieldFrag\n }\n }\n ", + "errors": [ + { + "message": "Variable \"$intArg\" of type \"Int\" used in position expecting type \"Int!\".", + "locations": [ + { + "line": 6, + "column": 19 + }, + { + "line": 3, + "column": 43 + } + ] + } + ] + }, + { + "name": "Validate: Variables are in allowed positions/Int => Int! within nested fragment", + "rule": "VariablesInAllowedPosition", + "schema": 0, + "query": "\n fragment outerFrag on ComplicatedArgs {\n ...nonNullIntArgFieldFrag\n }\n\n fragment nonNullIntArgFieldFrag on ComplicatedArgs {\n nonNullIntArgField(nonNullIntArg: $intArg)\n }\n\n query Query($intArg: Int) {\n complicatedArgs {\n ...outerFrag\n }\n }\n ", + "errors": [ + { + "message": "Variable \"$intArg\" of type \"Int\" used in position expecting type \"Int!\".", + "locations": [ + { + "line": 10, + "column": 19 + }, + { + "line": 7, + "column": 43 + } + ] + } + ] + }, + { + "name": "Validate: Variables are in allowed positions/String over Boolean", + "rule": "VariablesInAllowedPosition", + "schema": 0, + "query": "\n query Query($stringVar: String) {\n complicatedArgs {\n booleanArgField(booleanArg: $stringVar)\n }\n }\n ", + "errors": [ + { + "message": "Variable \"$stringVar\" of type \"String\" used in position expecting type \"Boolean\".", + "locations": [ + { + "line": 2, + "column": 19 + }, + { + "line": 4, + "column": 39 + } + ] + } + ] + }, + { + "name": "Validate: Variables are in allowed positions/String => [String]", + "rule": "VariablesInAllowedPosition", + "schema": 0, + "query": "\n query Query($stringVar: String) {\n complicatedArgs {\n stringListArgField(stringListArg: $stringVar)\n }\n }\n ", + "errors": [ + { + "message": "Variable \"$stringVar\" of type \"String\" used in position expecting type \"[String]\".", + "locations": [ + { + "line": 2, + "column": 19 + }, + { + "line": 4, + "column": 45 + } + ] + } + ] + }, + { + "name": "Validate: Variables are in allowed positions/Boolean => Boolean! in directive", + "rule": "VariablesInAllowedPosition", + "schema": 0, + "query": "\n query Query($boolVar: Boolean) {\n dog @include(if: $boolVar)\n }\n ", + "errors": [ + { + "message": "Variable \"$boolVar\" of type \"Boolean\" used in position expecting type \"Boolean!\".", + "locations": [ + { + "line": 2, + "column": 19 + }, + { + "line": 3, + "column": 26 + } + ] + } + ] + }, + { + "name": "Validate: Variables are in allowed positions/String => Boolean! in directive", + "rule": "VariablesInAllowedPosition", + "schema": 0, + "query": "\n query Query($stringVar: String) {\n dog @include(if: $stringVar)\n }\n ", + "errors": [ + { + "message": "Variable \"$stringVar\" of type \"String\" used in position expecting type \"Boolean!\".", + "locations": [ + { + "line": 2, + "column": 19 + }, + { + "line": 3, + "column": 26 + } + ] + } + ] + } + ] +} \ No newline at end of file diff --git a/vendor/github.com/neelance/graphql-go/internal/validation/suggestion.go b/vendor/github.com/neelance/graphql-go/internal/validation/suggestion.go new file mode 100644 index 00000000..9702b5f5 --- /dev/null +++ b/vendor/github.com/neelance/graphql-go/internal/validation/suggestion.go @@ -0,0 +1,71 @@ +package validation + +import ( + "fmt" + "sort" + "strconv" + "strings" +) + +func makeSuggestion(prefix string, options []string, input string) string { + var selected []string + distances := make(map[string]int) + for _, opt := range options { + distance := levenshteinDistance(input, opt) + threshold := max(len(input)/2, max(len(opt)/2, 1)) + if distance < threshold { + selected = append(selected, opt) + distances[opt] = distance + } + } + + if len(selected) == 0 { + return "" + } + sort.Slice(selected, func(i, j int) bool { + return distances[selected[i]] < distances[selected[j]] + }) + + parts := make([]string, len(selected)) + for i, opt := range selected { + parts[i] = strconv.Quote(opt) + } + if len(parts) > 1 { + parts[len(parts)-1] = "or " + parts[len(parts)-1] + } + return fmt.Sprintf(" %s %s?", prefix, strings.Join(parts, ", ")) +} + +func levenshteinDistance(s1, s2 string) int { + column := make([]int, len(s1)+1) + for y := range s1 { + column[y+1] = y + 1 + } + for x, rx := range s2 { + column[0] = x + 1 + lastdiag := x + for y, ry := range s1 { + olddiag := column[y+1] + if rx != ry { + lastdiag++ + } + column[y+1] = min(column[y+1]+1, min(column[y]+1, lastdiag)) + lastdiag = olddiag + } + } + return column[len(s1)] +} + +func min(a, b int) int { + if a < b { + return a + } + return b +} + +func max(a, b int) int { + if a > b { + return a + } + return b +} diff --git a/vendor/github.com/neelance/graphql-go/internal/validation/validation.go b/vendor/github.com/neelance/graphql-go/internal/validation/validation.go new file mode 100644 index 00000000..a537d458 --- /dev/null +++ b/vendor/github.com/neelance/graphql-go/internal/validation/validation.go @@ -0,0 +1,860 @@ +package validation + +import ( + "fmt" + "math" + "reflect" + "strconv" + "strings" + "text/scanner" + + "github.com/neelance/graphql-go/errors" + "github.com/neelance/graphql-go/internal/common" + "github.com/neelance/graphql-go/internal/query" + "github.com/neelance/graphql-go/internal/schema" +) + +type varSet map[*common.InputValue]struct{} + +type selectionPair struct{ a, b query.Selection } + +type fieldInfo struct { + sf *schema.Field + parent schema.NamedType +} + +type context struct { + schema *schema.Schema + doc *query.Document + errs []*errors.QueryError + opErrs map[*query.Operation][]*errors.QueryError + usedVars map[*query.Operation]varSet + fieldMap map[*query.Field]fieldInfo + overlapValidated map[selectionPair]struct{} +} + +func (c *context) addErr(loc errors.Location, rule string, format string, a ...interface{}) { + c.addErrMultiLoc([]errors.Location{loc}, rule, format, a...) +} + +func (c *context) addErrMultiLoc(locs []errors.Location, rule string, format string, a ...interface{}) { + c.errs = append(c.errs, &errors.QueryError{ + Message: fmt.Sprintf(format, a...), + Locations: locs, + Rule: rule, + }) +} + +type opContext struct { + *context + ops []*query.Operation +} + +func Validate(s *schema.Schema, doc *query.Document) []*errors.QueryError { + c := &context{ + schema: s, + doc: doc, + opErrs: make(map[*query.Operation][]*errors.QueryError), + usedVars: make(map[*query.Operation]varSet), + fieldMap: make(map[*query.Field]fieldInfo), + overlapValidated: make(map[selectionPair]struct{}), + } + + opNames := make(nameSet) + fragUsedBy := make(map[*query.FragmentDecl][]*query.Operation) + for _, op := range doc.Operations { + c.usedVars[op] = make(varSet) + opc := &opContext{c, []*query.Operation{op}} + + if op.Name.Name == "" && len(doc.Operations) != 1 { + c.addErr(op.Loc, "LoneAnonymousOperation", "This anonymous operation must be the only defined operation.") + } + if op.Name.Name != "" { + validateName(c, opNames, op.Name, "UniqueOperationNames", "operation") + } + + validateDirectives(opc, string(op.Type), op.Directives) + + varNames := make(nameSet) + for _, v := range op.Vars { + validateName(c, varNames, v.Name, "UniqueVariableNames", "variable") + + t := resolveType(c, v.Type) + if !canBeInput(t) { + c.addErr(v.TypeLoc, "VariablesAreInputTypes", "Variable %q cannot be non-input type %q.", "$"+v.Name.Name, t) + } + + if v.Default != nil { + validateLiteral(opc, v.Default) + + if t != nil { + if nn, ok := t.(*common.NonNull); ok { + c.addErr(v.Default.Location(), "DefaultValuesOfCorrectType", "Variable %q of type %q is required and will not use the default value. Perhaps you meant to use type %q.", "$"+v.Name.Name, t, nn.OfType) + } + + if ok, reason := validateValueType(opc, v.Default, t); !ok { + c.addErr(v.Default.Location(), "DefaultValuesOfCorrectType", "Variable %q of type %q has invalid default value %s.\n%s", "$"+v.Name.Name, t, v.Default, reason) + } + } + } + } + + var entryPoint schema.NamedType + switch op.Type { + case query.Query: + entryPoint = s.EntryPoints["query"] + case query.Mutation: + entryPoint = s.EntryPoints["mutation"] + case query.Subscription: + entryPoint = s.EntryPoints["subscription"] + default: + panic("unreachable") + } + + validateSelectionSet(opc, op.Selections, entryPoint) + + fragUsed := make(map[*query.FragmentDecl]struct{}) + markUsedFragments(c, op.Selections, fragUsed) + for frag := range fragUsed { + fragUsedBy[frag] = append(fragUsedBy[frag], op) + } + } + + fragNames := make(nameSet) + fragVisited := make(map[*query.FragmentDecl]struct{}) + for _, frag := range doc.Fragments { + opc := &opContext{c, fragUsedBy[frag]} + + validateName(c, fragNames, frag.Name, "UniqueFragmentNames", "fragment") + validateDirectives(opc, "FRAGMENT_DEFINITION", frag.Directives) + + t := unwrapType(resolveType(c, &frag.On)) + // continue even if t is nil + if t != nil && !canBeFragment(t) { + c.addErr(frag.On.Loc, "FragmentsOnCompositeTypes", "Fragment %q cannot condition on non composite type %q.", frag.Name.Name, t) + continue + } + + validateSelectionSet(opc, frag.Selections, t) + + if _, ok := fragVisited[frag]; !ok { + detectFragmentCycle(c, frag.Selections, fragVisited, nil, map[string]int{frag.Name.Name: 0}) + } + } + + for _, frag := range doc.Fragments { + if len(fragUsedBy[frag]) == 0 { + c.addErr(frag.Loc, "NoUnusedFragments", "Fragment %q is never used.", frag.Name.Name) + } + } + + for _, op := range doc.Operations { + c.errs = append(c.errs, c.opErrs[op]...) + + opUsedVars := c.usedVars[op] + for _, v := range op.Vars { + if _, ok := opUsedVars[v]; !ok { + opSuffix := "" + if op.Name.Name != "" { + opSuffix = fmt.Sprintf(" in operation %q", op.Name.Name) + } + c.addErr(v.Loc, "NoUnusedVariables", "Variable %q is never used%s.", "$"+v.Name.Name, opSuffix) + } + } + } + + return c.errs +} + +func validateSelectionSet(c *opContext, sels []query.Selection, t schema.NamedType) { + for _, sel := range sels { + validateSelection(c, sel, t) + } + + for i, a := range sels { + for _, b := range sels[i+1:] { + c.validateOverlap(a, b, nil, nil) + } + } +} + +func validateSelection(c *opContext, sel query.Selection, t schema.NamedType) { + switch sel := sel.(type) { + case *query.Field: + validateDirectives(c, "FIELD", sel.Directives) + + fieldName := sel.Name.Name + var f *schema.Field + switch fieldName { + case "__typename": + f = &schema.Field{ + Name: "__typename", + Type: c.schema.Types["String"], + } + case "__schema": + f = &schema.Field{ + Name: "__schema", + Type: c.schema.Types["__Schema"], + } + case "__type": + f = &schema.Field{ + Name: "__type", + Args: common.InputValueList{ + &common.InputValue{ + Name: common.Ident{Name: "name"}, + Type: &common.NonNull{OfType: c.schema.Types["String"]}, + }, + }, + Type: c.schema.Types["__Type"], + } + default: + f = fields(t).Get(fieldName) + if f == nil && t != nil { + suggestion := makeSuggestion("Did you mean", fields(t).Names(), fieldName) + c.addErr(sel.Alias.Loc, "FieldsOnCorrectType", "Cannot query field %q on type %q.%s", fieldName, t, suggestion) + } + } + c.fieldMap[sel] = fieldInfo{sf: f, parent: t} + + validateArgumentLiterals(c, sel.Arguments) + if f != nil { + validateArgumentTypes(c, sel.Arguments, f.Args, sel.Alias.Loc, + func() string { return fmt.Sprintf("field %q of type %q", fieldName, t) }, + func() string { return fmt.Sprintf("Field %q", fieldName) }, + ) + } + + var ft common.Type + if f != nil { + ft = f.Type + sf := hasSubfields(ft) + if sf && sel.Selections == nil { + c.addErr(sel.Alias.Loc, "ScalarLeafs", "Field %q of type %q must have a selection of subfields. Did you mean \"%s { ... }\"?", fieldName, ft, fieldName) + } + if !sf && sel.Selections != nil { + c.addErr(sel.SelectionSetLoc, "ScalarLeafs", "Field %q must not have a selection since type %q has no subfields.", fieldName, ft) + } + } + if sel.Selections != nil { + validateSelectionSet(c, sel.Selections, unwrapType(ft)) + } + + case *query.InlineFragment: + validateDirectives(c, "INLINE_FRAGMENT", sel.Directives) + if sel.On.Name != "" { + fragTyp := unwrapType(resolveType(c.context, &sel.On)) + if fragTyp != nil && !compatible(t, fragTyp) { + c.addErr(sel.Loc, "PossibleFragmentSpreads", "Fragment cannot be spread here as objects of type %q can never be of type %q.", t, fragTyp) + } + t = fragTyp + // continue even if t is nil + } + if t != nil && !canBeFragment(t) { + c.addErr(sel.On.Loc, "FragmentsOnCompositeTypes", "Fragment cannot condition on non composite type %q.", t) + return + } + validateSelectionSet(c, sel.Selections, unwrapType(t)) + + case *query.FragmentSpread: + validateDirectives(c, "FRAGMENT_SPREAD", sel.Directives) + frag := c.doc.Fragments.Get(sel.Name.Name) + if frag == nil { + c.addErr(sel.Name.Loc, "KnownFragmentNames", "Unknown fragment %q.", sel.Name.Name) + return + } + fragTyp := c.schema.Types[frag.On.Name] + if !compatible(t, fragTyp) { + c.addErr(sel.Loc, "PossibleFragmentSpreads", "Fragment %q cannot be spread here as objects of type %q can never be of type %q.", frag.Name.Name, t, fragTyp) + } + + default: + panic("unreachable") + } +} + +func compatible(a, b common.Type) bool { + for _, pta := range possibleTypes(a) { + for _, ptb := range possibleTypes(b) { + if pta == ptb { + return true + } + } + } + return false +} + +func possibleTypes(t common.Type) []*schema.Object { + switch t := t.(type) { + case *schema.Object: + return []*schema.Object{t} + case *schema.Interface: + return t.PossibleTypes + case *schema.Union: + return t.PossibleTypes + default: + return nil + } +} + +func markUsedFragments(c *context, sels []query.Selection, fragUsed map[*query.FragmentDecl]struct{}) { + for _, sel := range sels { + switch sel := sel.(type) { + case *query.Field: + if sel.Selections != nil { + markUsedFragments(c, sel.Selections, fragUsed) + } + + case *query.InlineFragment: + markUsedFragments(c, sel.Selections, fragUsed) + + case *query.FragmentSpread: + frag := c.doc.Fragments.Get(sel.Name.Name) + if frag == nil { + return + } + + if _, ok := fragUsed[frag]; ok { + return + } + fragUsed[frag] = struct{}{} + markUsedFragments(c, frag.Selections, fragUsed) + + default: + panic("unreachable") + } + } +} + +func detectFragmentCycle(c *context, sels []query.Selection, fragVisited map[*query.FragmentDecl]struct{}, spreadPath []*query.FragmentSpread, spreadPathIndex map[string]int) { + for _, sel := range sels { + detectFragmentCycleSel(c, sel, fragVisited, spreadPath, spreadPathIndex) + } +} + +func detectFragmentCycleSel(c *context, sel query.Selection, fragVisited map[*query.FragmentDecl]struct{}, spreadPath []*query.FragmentSpread, spreadPathIndex map[string]int) { + switch sel := sel.(type) { + case *query.Field: + if sel.Selections != nil { + detectFragmentCycle(c, sel.Selections, fragVisited, spreadPath, spreadPathIndex) + } + + case *query.InlineFragment: + detectFragmentCycle(c, sel.Selections, fragVisited, spreadPath, spreadPathIndex) + + case *query.FragmentSpread: + frag := c.doc.Fragments.Get(sel.Name.Name) + if frag == nil { + return + } + + spreadPath = append(spreadPath, sel) + if i, ok := spreadPathIndex[frag.Name.Name]; ok { + cyclePath := spreadPath[i:] + via := "" + if len(cyclePath) > 1 { + names := make([]string, len(cyclePath)-1) + for i, frag := range cyclePath[:len(cyclePath)-1] { + names[i] = frag.Name.Name + } + via = " via " + strings.Join(names, ", ") + } + + locs := make([]errors.Location, len(cyclePath)) + for i, frag := range cyclePath { + locs[i] = frag.Loc + } + c.addErrMultiLoc(locs, "NoFragmentCycles", "Cannot spread fragment %q within itself%s.", frag.Name.Name, via) + return + } + + if _, ok := fragVisited[frag]; ok { + return + } + fragVisited[frag] = struct{}{} + + spreadPathIndex[frag.Name.Name] = len(spreadPath) + detectFragmentCycle(c, frag.Selections, fragVisited, spreadPath, spreadPathIndex) + delete(spreadPathIndex, frag.Name.Name) + + default: + panic("unreachable") + } +} + +func (c *context) validateOverlap(a, b query.Selection, reasons *[]string, locs *[]errors.Location) { + if a == b { + return + } + + if _, ok := c.overlapValidated[selectionPair{a, b}]; ok { + return + } + c.overlapValidated[selectionPair{a, b}] = struct{}{} + c.overlapValidated[selectionPair{b, a}] = struct{}{} + + switch a := a.(type) { + case *query.Field: + switch b := b.(type) { + case *query.Field: + if b.Alias.Loc.Before(a.Alias.Loc) { + a, b = b, a + } + if reasons2, locs2 := c.validateFieldOverlap(a, b); len(reasons2) != 0 { + locs2 = append(locs2, a.Alias.Loc, b.Alias.Loc) + if reasons == nil { + c.addErrMultiLoc(locs2, "OverlappingFieldsCanBeMerged", "Fields %q conflict because %s. Use different aliases on the fields to fetch both if this was intentional.", a.Alias.Name, strings.Join(reasons2, " and ")) + return + } + for _, r := range reasons2 { + *reasons = append(*reasons, fmt.Sprintf("subfields %q conflict because %s", a.Alias.Name, r)) + } + *locs = append(*locs, locs2...) + } + + case *query.InlineFragment: + for _, sel := range b.Selections { + c.validateOverlap(a, sel, reasons, locs) + } + + case *query.FragmentSpread: + if frag := c.doc.Fragments.Get(b.Name.Name); frag != nil { + for _, sel := range frag.Selections { + c.validateOverlap(a, sel, reasons, locs) + } + } + + default: + panic("unreachable") + } + + case *query.InlineFragment: + for _, sel := range a.Selections { + c.validateOverlap(sel, b, reasons, locs) + } + + case *query.FragmentSpread: + if frag := c.doc.Fragments.Get(a.Name.Name); frag != nil { + for _, sel := range frag.Selections { + c.validateOverlap(sel, b, reasons, locs) + } + } + + default: + panic("unreachable") + } +} + +func (c *context) validateFieldOverlap(a, b *query.Field) ([]string, []errors.Location) { + if a.Alias.Name != b.Alias.Name { + return nil, nil + } + + if asf := c.fieldMap[a].sf; asf != nil { + if bsf := c.fieldMap[b].sf; bsf != nil { + if !typesCompatible(asf.Type, bsf.Type) { + return []string{fmt.Sprintf("they return conflicting types %s and %s", asf.Type, bsf.Type)}, nil + } + } + } + + at := c.fieldMap[a].parent + bt := c.fieldMap[b].parent + if at == nil || bt == nil || at == bt { + if a.Name.Name != b.Name.Name { + return []string{fmt.Sprintf("%s and %s are different fields", a.Name.Name, b.Name.Name)}, nil + } + + if argumentsConflict(a.Arguments, b.Arguments) { + return []string{"they have differing arguments"}, nil + } + } + + var reasons []string + var locs []errors.Location + for _, a2 := range a.Selections { + for _, b2 := range b.Selections { + c.validateOverlap(a2, b2, &reasons, &locs) + } + } + return reasons, locs +} + +func argumentsConflict(a, b common.ArgumentList) bool { + if len(a) != len(b) { + return true + } + for _, argA := range a { + valB, ok := b.Get(argA.Name.Name) + if !ok || !reflect.DeepEqual(argA.Value.Value(nil), valB.Value(nil)) { + return true + } + } + return false +} + +func fields(t common.Type) schema.FieldList { + switch t := t.(type) { + case *schema.Object: + return t.Fields + case *schema.Interface: + return t.Fields + default: + return nil + } +} + +func unwrapType(t common.Type) schema.NamedType { + if t == nil { + return nil + } + for { + switch t2 := t.(type) { + case schema.NamedType: + return t2 + case *common.List: + t = t2.OfType + case *common.NonNull: + t = t2.OfType + default: + panic("unreachable") + } + } +} + +func resolveType(c *context, t common.Type) common.Type { + t2, err := common.ResolveType(t, c.schema.Resolve) + if err != nil { + c.errs = append(c.errs, err) + } + return t2 +} + +func validateDirectives(c *opContext, loc string, directives common.DirectiveList) { + directiveNames := make(nameSet) + for _, d := range directives { + dirName := d.Name.Name + validateNameCustomMsg(c.context, directiveNames, d.Name, "UniqueDirectivesPerLocation", func() string { + return fmt.Sprintf("The directive %q can only be used once at this location.", dirName) + }) + + validateArgumentLiterals(c, d.Args) + + dd, ok := c.schema.Directives[dirName] + if !ok { + c.addErr(d.Name.Loc, "KnownDirectives", "Unknown directive %q.", dirName) + continue + } + + locOK := false + for _, allowedLoc := range dd.Locs { + if loc == allowedLoc { + locOK = true + break + } + } + if !locOK { + c.addErr(d.Name.Loc, "KnownDirectives", "Directive %q may not be used on %s.", dirName, loc) + } + + validateArgumentTypes(c, d.Args, dd.Args, d.Name.Loc, + func() string { return fmt.Sprintf("directive %q", "@"+dirName) }, + func() string { return fmt.Sprintf("Directive %q", "@"+dirName) }, + ) + } + return +} + +type nameSet map[string]errors.Location + +func validateName(c *context, set nameSet, name common.Ident, rule string, kind string) { + validateNameCustomMsg(c, set, name, rule, func() string { + return fmt.Sprintf("There can be only one %s named %q.", kind, name.Name) + }) +} + +func validateNameCustomMsg(c *context, set nameSet, name common.Ident, rule string, msg func() string) { + if loc, ok := set[name.Name]; ok { + c.addErrMultiLoc([]errors.Location{loc, name.Loc}, rule, msg()) + return + } + set[name.Name] = name.Loc + return +} + +func validateArgumentTypes(c *opContext, args common.ArgumentList, argDecls common.InputValueList, loc errors.Location, owner1, owner2 func() string) { + for _, selArg := range args { + arg := argDecls.Get(selArg.Name.Name) + if arg == nil { + c.addErr(selArg.Name.Loc, "KnownArgumentNames", "Unknown argument %q on %s.", selArg.Name.Name, owner1()) + continue + } + value := selArg.Value + if ok, reason := validateValueType(c, value, arg.Type); !ok { + c.addErr(value.Location(), "ArgumentsOfCorrectType", "Argument %q has invalid value %s.\n%s", arg.Name.Name, value, reason) + } + } + for _, decl := range argDecls { + if _, ok := decl.Type.(*common.NonNull); ok { + if _, ok := args.Get(decl.Name.Name); !ok { + c.addErr(loc, "ProvidedNonNullArguments", "%s argument %q of type %q is required but not provided.", owner2(), decl.Name.Name, decl.Type) + } + } + } +} + +func validateArgumentLiterals(c *opContext, args common.ArgumentList) { + argNames := make(nameSet) + for _, arg := range args { + validateName(c.context, argNames, arg.Name, "UniqueArgumentNames", "argument") + validateLiteral(c, arg.Value) + } +} + +func validateLiteral(c *opContext, l common.Literal) { + switch l := l.(type) { + case *common.ObjectLit: + fieldNames := make(nameSet) + for _, f := range l.Fields { + validateName(c.context, fieldNames, f.Name, "UniqueInputFieldNames", "input field") + validateLiteral(c, f.Value) + } + case *common.ListLit: + for _, entry := range l.Entries { + validateLiteral(c, entry) + } + case *common.Variable: + for _, op := range c.ops { + v := op.Vars.Get(l.Name) + if v == nil { + byOp := "" + if op.Name.Name != "" { + byOp = fmt.Sprintf(" by operation %q", op.Name.Name) + } + c.opErrs[op] = append(c.opErrs[op], &errors.QueryError{ + Message: fmt.Sprintf("Variable %q is not defined%s.", "$"+l.Name, byOp), + Locations: []errors.Location{l.Loc, op.Loc}, + Rule: "NoUndefinedVariables", + }) + continue + } + c.usedVars[op][v] = struct{}{} + } + } +} + +func validateValueType(c *opContext, v common.Literal, t common.Type) (bool, string) { + if v, ok := v.(*common.Variable); ok { + for _, op := range c.ops { + if v2 := op.Vars.Get(v.Name); v2 != nil { + t2, err := common.ResolveType(v2.Type, c.schema.Resolve) + if _, ok := t2.(*common.NonNull); !ok && v2.Default != nil { + t2 = &common.NonNull{OfType: t2} + } + if err == nil && !typeCanBeUsedAs(t2, t) { + c.addErrMultiLoc([]errors.Location{v2.Loc, v.Loc}, "VariablesInAllowedPosition", "Variable %q of type %q used in position expecting type %q.", "$"+v.Name, t2, t) + } + } + } + return true, "" + } + + if nn, ok := t.(*common.NonNull); ok { + if isNull(v) { + return false, fmt.Sprintf("Expected %q, found null.", t) + } + t = nn.OfType + } + if isNull(v) { + return true, "" + } + + switch t := t.(type) { + case *schema.Scalar, *schema.Enum: + if lit, ok := v.(*common.BasicLit); ok { + if validateBasicLit(lit, t) { + return true, "" + } + } + + case *common.List: + list, ok := v.(*common.ListLit) + if !ok { + return validateValueType(c, v, t.OfType) // single value instead of list + } + for i, entry := range list.Entries { + if ok, reason := validateValueType(c, entry, t.OfType); !ok { + return false, fmt.Sprintf("In element #%d: %s", i, reason) + } + } + return true, "" + + case *schema.InputObject: + v, ok := v.(*common.ObjectLit) + if !ok { + return false, fmt.Sprintf("Expected %q, found not an object.", t) + } + for _, f := range v.Fields { + name := f.Name.Name + iv := t.Values.Get(name) + if iv == nil { + return false, fmt.Sprintf("In field %q: Unknown field.", name) + } + if ok, reason := validateValueType(c, f.Value, iv.Type); !ok { + return false, fmt.Sprintf("In field %q: %s", name, reason) + } + } + for _, iv := range t.Values { + found := false + for _, f := range v.Fields { + if f.Name.Name == iv.Name.Name { + found = true + break + } + } + if !found { + if _, ok := iv.Type.(*common.NonNull); ok && iv.Default == nil { + return false, fmt.Sprintf("In field %q: Expected %q, found null.", iv.Name.Name, iv.Type) + } + } + } + return true, "" + } + + return false, fmt.Sprintf("Expected type %q, found %s.", t, v) +} + +func validateBasicLit(v *common.BasicLit, t common.Type) bool { + switch t := t.(type) { + case *schema.Scalar: + switch t.Name { + case "Int": + if v.Type != scanner.Int { + return false + } + f, err := strconv.ParseFloat(v.Text, 64) + if err != nil { + panic(err) + } + return f >= math.MinInt32 && f <= math.MaxInt32 + case "Float": + return v.Type == scanner.Int || v.Type == scanner.Float + case "String": + return v.Type == scanner.String + case "Boolean": + return v.Type == scanner.Ident && (v.Text == "true" || v.Text == "false") + case "ID": + return v.Type == scanner.Int || v.Type == scanner.String + default: + //TODO: Type-check against expected type by Unmarshalling + return true + } + + case *schema.Enum: + if v.Type != scanner.Ident { + return false + } + for _, option := range t.Values { + if option.Name == v.Text { + return true + } + } + return false + } + + return false +} + +func canBeFragment(t common.Type) bool { + switch t.(type) { + case *schema.Object, *schema.Interface, *schema.Union: + return true + default: + return false + } +} + +func canBeInput(t common.Type) bool { + switch t := t.(type) { + case *schema.InputObject, *schema.Scalar, *schema.Enum: + return true + case *common.List: + return canBeInput(t.OfType) + case *common.NonNull: + return canBeInput(t.OfType) + default: + return false + } +} + +func hasSubfields(t common.Type) bool { + switch t := t.(type) { + case *schema.Object, *schema.Interface, *schema.Union: + return true + case *common.List: + return hasSubfields(t.OfType) + case *common.NonNull: + return hasSubfields(t.OfType) + default: + return false + } +} + +func isLeaf(t common.Type) bool { + switch t.(type) { + case *schema.Scalar, *schema.Enum: + return true + default: + return false + } +} + +func isNull(lit interface{}) bool { + _, ok := lit.(*common.NullLit) + return ok +} + +func typesCompatible(a, b common.Type) bool { + al, aIsList := a.(*common.List) + bl, bIsList := b.(*common.List) + if aIsList || bIsList { + return aIsList && bIsList && typesCompatible(al.OfType, bl.OfType) + } + + ann, aIsNN := a.(*common.NonNull) + bnn, bIsNN := b.(*common.NonNull) + if aIsNN || bIsNN { + return aIsNN && bIsNN && typesCompatible(ann.OfType, bnn.OfType) + } + + if isLeaf(a) || isLeaf(b) { + return a == b + } + + return true +} + +func typeCanBeUsedAs(t, as common.Type) bool { + nnT, okT := t.(*common.NonNull) + if okT { + t = nnT.OfType + } + + nnAs, okAs := as.(*common.NonNull) + if okAs { + as = nnAs.OfType + if !okT { + return false // nullable can not be used as non-null + } + } + + if t == as { + return true + } + + if lT, ok := t.(*common.List); ok { + if lAs, ok := as.(*common.List); ok { + return typeCanBeUsedAs(lT.OfType, lAs.OfType) + } + } + return false +} diff --git a/vendor/github.com/neelance/graphql-go/introspection.go b/vendor/github.com/neelance/graphql-go/introspection.go new file mode 100644 index 00000000..f72a7700 --- /dev/null +++ b/vendor/github.com/neelance/graphql-go/introspection.go @@ -0,0 +1,117 @@ +package graphql + +import ( + "context" + "encoding/json" + + "github.com/neelance/graphql-go/internal/exec/resolvable" + "github.com/neelance/graphql-go/introspection" +) + +// Inspect allows inspection of the given schema. +func (s *Schema) Inspect() *introspection.Schema { + return introspection.WrapSchema(s.schema) +} + +// ToJSON encodes the schema in a JSON format used by tools like Relay. +func (s *Schema) ToJSON() ([]byte, error) { + result := s.exec(context.Background(), introspectionQuery, "", nil, &resolvable.Schema{ + Query: &resolvable.Object{}, + Schema: *s.schema, + }) + if len(result.Errors) != 0 { + panic(result.Errors[0]) + } + return json.MarshalIndent(result.Data, "", "\t") +} + +var introspectionQuery = ` + query { + __schema { + queryType { name } + mutationType { name } + subscriptionType { name } + types { + ...FullType + } + directives { + name + description + locations + args { + ...InputValue + } + } + } + } + fragment FullType on __Type { + kind + name + description + fields(includeDeprecated: true) { + name + description + args { + ...InputValue + } + type { + ...TypeRef + } + isDeprecated + deprecationReason + } + inputFields { + ...InputValue + } + interfaces { + ...TypeRef + } + enumValues(includeDeprecated: true) { + name + description + isDeprecated + deprecationReason + } + possibleTypes { + ...TypeRef + } + } + fragment InputValue on __InputValue { + name + description + type { ...TypeRef } + defaultValue + } + fragment TypeRef on __Type { + kind + name + ofType { + kind + name + ofType { + kind + name + ofType { + kind + name + ofType { + kind + name + ofType { + kind + name + ofType { + kind + name + ofType { + kind + name + } + } + } + } + } + } + } + } +` diff --git a/vendor/github.com/neelance/graphql-go/introspection/introspection.go b/vendor/github.com/neelance/graphql-go/introspection/introspection.go new file mode 100644 index 00000000..d2969b7a --- /dev/null +++ b/vendor/github.com/neelance/graphql-go/introspection/introspection.go @@ -0,0 +1,313 @@ +package introspection + +import ( + "sort" + + "github.com/neelance/graphql-go/internal/common" + "github.com/neelance/graphql-go/internal/schema" +) + +type Schema struct { + schema *schema.Schema +} + +// WrapSchema is only used internally. +func WrapSchema(schema *schema.Schema) *Schema { + return &Schema{schema} +} + +func (r *Schema) Types() []*Type { + var names []string + for name := range r.schema.Types { + names = append(names, name) + } + sort.Strings(names) + + l := make([]*Type, len(names)) + for i, name := range names { + l[i] = &Type{r.schema.Types[name]} + } + return l +} + +func (r *Schema) Directives() []*Directive { + var names []string + for name := range r.schema.Directives { + names = append(names, name) + } + sort.Strings(names) + + l := make([]*Directive, len(names)) + for i, name := range names { + l[i] = &Directive{r.schema.Directives[name]} + } + return l +} + +func (r *Schema) QueryType() *Type { + t, ok := r.schema.EntryPoints["query"] + if !ok { + return nil + } + return &Type{t} +} + +func (r *Schema) MutationType() *Type { + t, ok := r.schema.EntryPoints["mutation"] + if !ok { + return nil + } + return &Type{t} +} + +func (r *Schema) SubscriptionType() *Type { + t, ok := r.schema.EntryPoints["subscription"] + if !ok { + return nil + } + return &Type{t} +} + +type Type struct { + typ common.Type +} + +// WrapType is only used internally. +func WrapType(typ common.Type) *Type { + return &Type{typ} +} + +func (r *Type) Kind() string { + return r.typ.Kind() +} + +func (r *Type) Name() *string { + if named, ok := r.typ.(schema.NamedType); ok { + name := named.TypeName() + return &name + } + return nil +} + +func (r *Type) Description() *string { + if named, ok := r.typ.(schema.NamedType); ok { + desc := named.Description() + if desc == "" { + return nil + } + return &desc + } + return nil +} + +func (r *Type) Fields(args *struct{ IncludeDeprecated bool }) *[]*Field { + var fields schema.FieldList + switch t := r.typ.(type) { + case *schema.Object: + fields = t.Fields + case *schema.Interface: + fields = t.Fields + default: + return nil + } + + var l []*Field + for _, f := range fields { + if d := f.Directives.Get("deprecated"); d == nil || args.IncludeDeprecated { + l = append(l, &Field{f}) + } + } + return &l +} + +func (r *Type) Interfaces() *[]*Type { + t, ok := r.typ.(*schema.Object) + if !ok { + return nil + } + + l := make([]*Type, len(t.Interfaces)) + for i, intf := range t.Interfaces { + l[i] = &Type{intf} + } + return &l +} + +func (r *Type) PossibleTypes() *[]*Type { + var possibleTypes []*schema.Object + switch t := r.typ.(type) { + case *schema.Interface: + possibleTypes = t.PossibleTypes + case *schema.Union: + possibleTypes = t.PossibleTypes + default: + return nil + } + + l := make([]*Type, len(possibleTypes)) + for i, intf := range possibleTypes { + l[i] = &Type{intf} + } + return &l +} + +func (r *Type) EnumValues(args *struct{ IncludeDeprecated bool }) *[]*EnumValue { + t, ok := r.typ.(*schema.Enum) + if !ok { + return nil + } + + var l []*EnumValue + for _, v := range t.Values { + if d := v.Directives.Get("deprecated"); d == nil || args.IncludeDeprecated { + l = append(l, &EnumValue{v}) + } + } + return &l +} + +func (r *Type) InputFields() *[]*InputValue { + t, ok := r.typ.(*schema.InputObject) + if !ok { + return nil + } + + l := make([]*InputValue, len(t.Values)) + for i, v := range t.Values { + l[i] = &InputValue{v} + } + return &l +} + +func (r *Type) OfType() *Type { + switch t := r.typ.(type) { + case *common.List: + return &Type{t.OfType} + case *common.NonNull: + return &Type{t.OfType} + default: + return nil + } +} + +type Field struct { + field *schema.Field +} + +func (r *Field) Name() string { + return r.field.Name +} + +func (r *Field) Description() *string { + if r.field.Desc == "" { + return nil + } + return &r.field.Desc +} + +func (r *Field) Args() []*InputValue { + l := make([]*InputValue, len(r.field.Args)) + for i, v := range r.field.Args { + l[i] = &InputValue{v} + } + return l +} + +func (r *Field) Type() *Type { + return &Type{r.field.Type} +} + +func (r *Field) IsDeprecated() bool { + return r.field.Directives.Get("deprecated") != nil +} + +func (r *Field) DeprecationReason() *string { + d := r.field.Directives.Get("deprecated") + if d == nil { + return nil + } + reason := d.Args.MustGet("reason").Value(nil).(string) + return &reason +} + +type InputValue struct { + value *common.InputValue +} + +func (r *InputValue) Name() string { + return r.value.Name.Name +} + +func (r *InputValue) Description() *string { + if r.value.Desc == "" { + return nil + } + return &r.value.Desc +} + +func (r *InputValue) Type() *Type { + return &Type{r.value.Type} +} + +func (r *InputValue) DefaultValue() *string { + if r.value.Default == nil { + return nil + } + s := r.value.Default.String() + return &s +} + +type EnumValue struct { + value *schema.EnumValue +} + +func (r *EnumValue) Name() string { + return r.value.Name +} + +func (r *EnumValue) Description() *string { + if r.value.Desc == "" { + return nil + } + return &r.value.Desc +} + +func (r *EnumValue) IsDeprecated() bool { + return r.value.Directives.Get("deprecated") != nil +} + +func (r *EnumValue) DeprecationReason() *string { + d := r.value.Directives.Get("deprecated") + if d == nil { + return nil + } + reason := d.Args.MustGet("reason").Value(nil).(string) + return &reason +} + +type Directive struct { + directive *schema.DirectiveDecl +} + +func (r *Directive) Name() string { + return r.directive.Name +} + +func (r *Directive) Description() *string { + if r.directive.Desc == "" { + return nil + } + return &r.directive.Desc +} + +func (r *Directive) Locations() []string { + return r.directive.Locs +} + +func (r *Directive) Args() []*InputValue { + l := make([]*InputValue, len(r.directive.Args)) + for i, v := range r.directive.Args { + l[i] = &InputValue{v} + } + return l +} diff --git a/vendor/github.com/neelance/graphql-go/log/log.go b/vendor/github.com/neelance/graphql-go/log/log.go new file mode 100644 index 00000000..aaab4342 --- /dev/null +++ b/vendor/github.com/neelance/graphql-go/log/log.go @@ -0,0 +1,23 @@ +package log + +import ( + "context" + "log" + "runtime" +) + +// Logger is the interface used to log panics that occur durring query execution. It is setable via graphql.ParseSchema +type Logger interface { + LogPanic(ctx context.Context, value interface{}) +} + +// DefaultLogger is the default logger used to log panics that occur durring query execution +type DefaultLogger struct{} + +// LogPanic is used to log recovered panic values that occur durring query execution +func (l *DefaultLogger) LogPanic(_ context.Context, value interface{}) { + const size = 64 << 10 + buf := make([]byte, size) + buf = buf[:runtime.Stack(buf, false)] + log.Printf("graphql: panic occurred: %v\n%s", value, buf) +} diff --git a/vendor/github.com/neelance/graphql-go/relay/relay.go b/vendor/github.com/neelance/graphql-go/relay/relay.go new file mode 100644 index 00000000..61bdd93b --- /dev/null +++ b/vendor/github.com/neelance/graphql-go/relay/relay.go @@ -0,0 +1,70 @@ +package relay + +import ( + "encoding/base64" + "encoding/json" + "errors" + "fmt" + "net/http" + "strings" + + graphql "github.com/neelance/graphql-go" +) + +func MarshalID(kind string, spec interface{}) graphql.ID { + d, err := json.Marshal(spec) + if err != nil { + panic(fmt.Errorf("relay.MarshalID: %s", err)) + } + return graphql.ID(base64.URLEncoding.EncodeToString(append([]byte(kind+":"), d...))) +} + +func UnmarshalKind(id graphql.ID) string { + s, err := base64.URLEncoding.DecodeString(string(id)) + if err != nil { + return "" + } + i := strings.IndexByte(string(s), ':') + if i == -1 { + return "" + } + return string(s[:i]) +} + +func UnmarshalSpec(id graphql.ID, v interface{}) error { + s, err := base64.URLEncoding.DecodeString(string(id)) + if err != nil { + return err + } + i := strings.IndexByte(string(s), ':') + if i == -1 { + return errors.New("invalid graphql.ID") + } + return json.Unmarshal([]byte(s[i+1:]), v) +} + +type Handler struct { + Schema *graphql.Schema +} + +func (h *Handler) ServeHTTP(w http.ResponseWriter, r *http.Request) { + var params struct { + Query string `json:"query"` + OperationName string `json:"operationName"` + Variables map[string]interface{} `json:"variables"` + } + if err := json.NewDecoder(r.Body).Decode(¶ms); err != nil { + http.Error(w, err.Error(), http.StatusBadRequest) + return + } + + response := h.Schema.Exec(r.Context(), params.Query, params.OperationName, params.Variables) + responseJSON, err := json.Marshal(response) + if err != nil { + http.Error(w, err.Error(), http.StatusInternalServerError) + return + } + + w.Header().Set("Content-Type", "application/json") + w.Write(responseJSON) +} diff --git a/vendor/github.com/neelance/graphql-go/relay/relay_test.go b/vendor/github.com/neelance/graphql-go/relay/relay_test.go new file mode 100644 index 00000000..72d8d51b --- /dev/null +++ b/vendor/github.com/neelance/graphql-go/relay/relay_test.go @@ -0,0 +1,36 @@ +package relay_test + +import ( + "net/http/httptest" + "strings" + "testing" + + "github.com/neelance/graphql-go" + "github.com/neelance/graphql-go/example/starwars" + "github.com/neelance/graphql-go/relay" +) + +var starwarsSchema = graphql.MustParseSchema(starwars.Schema, &starwars.Resolver{}) + +func TestServeHTTP(t *testing.T) { + w := httptest.NewRecorder() + r := httptest.NewRequest("POST", "/some/path/here", strings.NewReader(`{"query":"{ hero { name } }", "operationName":"", "variables": null}`)) + h := relay.Handler{Schema: starwarsSchema} + + h.ServeHTTP(w, r) + + if w.Code != 200 { + t.Fatalf("Expected status code 200, got %d.", w.Code) + } + + contentType := w.Header().Get("Content-Type") + if contentType != "application/json" { + t.Fatalf("Invalid content-type. Expected [application/json], but instead got [%s]", contentType) + } + + expectedResponse := `{"data":{"hero":{"name":"R2-D2"}}}` + actualResponse := w.Body.String() + if expectedResponse != actualResponse { + t.Fatalf("Invalid response. Expected [%s], but instead got [%s]", expectedResponse, actualResponse) + } +} diff --git a/vendor/github.com/neelance/graphql-go/time.go b/vendor/github.com/neelance/graphql-go/time.go new file mode 100644 index 00000000..05c616d0 --- /dev/null +++ b/vendor/github.com/neelance/graphql-go/time.go @@ -0,0 +1,36 @@ +package graphql + +import ( + "fmt" + "time" +) + +// Time is a custom GraphQL type to represent an instant in time. It has to be added to a schema +// via "scalar Time" since it is not a predeclared GraphQL type like "ID". +type Time struct { + time.Time +} + +func (_ Time) ImplementsGraphQLType(name string) bool { + return name == "Time" +} + +func (t *Time) UnmarshalGraphQL(input interface{}) error { + switch input := input.(type) { + case time.Time: + t.Time = input + return nil + case string: + var err error + t.Time, err = time.Parse(time.RFC3339, input) + return err + case int: + t.Time = time.Unix(int64(input), 0) + return nil + case float64: + t.Time = time.Unix(int64(input), 0) + return nil + default: + return fmt.Errorf("wrong type") + } +} diff --git a/vendor/github.com/neelance/graphql-go/trace/trace.go b/vendor/github.com/neelance/graphql-go/trace/trace.go new file mode 100644 index 00000000..443f62a1 --- /dev/null +++ b/vendor/github.com/neelance/graphql-go/trace/trace.go @@ -0,0 +1,80 @@ +package trace + +import ( + "context" + "fmt" + + "github.com/neelance/graphql-go/errors" + "github.com/neelance/graphql-go/introspection" + opentracing "github.com/opentracing/opentracing-go" + "github.com/opentracing/opentracing-go/ext" + "github.com/opentracing/opentracing-go/log" +) + +type TraceQueryFinishFunc func([]*errors.QueryError) +type TraceFieldFinishFunc func(*errors.QueryError) + +type Tracer interface { + TraceQuery(ctx context.Context, queryString string, operationName string, variables map[string]interface{}, varTypes map[string]*introspection.Type) (context.Context, TraceQueryFinishFunc) + TraceField(ctx context.Context, label, typeName, fieldName string, trivial bool, args map[string]interface{}) (context.Context, TraceFieldFinishFunc) +} + +type OpenTracingTracer struct{} + +func (OpenTracingTracer) TraceQuery(ctx context.Context, queryString string, operationName string, variables map[string]interface{}, varTypes map[string]*introspection.Type) (context.Context, TraceQueryFinishFunc) { + span, spanCtx := opentracing.StartSpanFromContext(ctx, "GraphQL request") + span.SetTag("graphql.query", queryString) + + if operationName != "" { + span.SetTag("graphql.operationName", operationName) + } + + if len(variables) != 0 { + span.LogFields(log.Object("graphql.variables", variables)) + } + + return spanCtx, func(errs []*errors.QueryError) { + if len(errs) > 0 { + msg := errs[0].Error() + if len(errs) > 1 { + msg += fmt.Sprintf(" (and %d more errors)", len(errs)-1) + } + ext.Error.Set(span, true) + span.SetTag("graphql.error", msg) + } + span.Finish() + } +} + +func (OpenTracingTracer) TraceField(ctx context.Context, label, typeName, fieldName string, trivial bool, args map[string]interface{}) (context.Context, TraceFieldFinishFunc) { + if trivial { + return ctx, noop + } + + span, spanCtx := opentracing.StartSpanFromContext(ctx, label) + span.SetTag("graphql.type", typeName) + span.SetTag("graphql.field", fieldName) + for name, value := range args { + span.SetTag("graphql.args."+name, value) + } + + return spanCtx, func(err *errors.QueryError) { + if err != nil { + ext.Error.Set(span, true) + span.SetTag("graphql.error", err.Error()) + } + span.Finish() + } +} + +func noop(*errors.QueryError) {} + +type NoopTracer struct{} + +func (NoopTracer) TraceQuery(ctx context.Context, queryString string, operationName string, variables map[string]interface{}, varTypes map[string]*introspection.Type) (context.Context, TraceQueryFinishFunc) { + return ctx, func(errs []*errors.QueryError) {} +} + +func (NoopTracer) TraceField(ctx context.Context, label, typeName, fieldName string, trivial bool, args map[string]interface{}) (context.Context, TraceFieldFinishFunc) { + return ctx, func(err *errors.QueryError) {} +} diff --git a/vendor/github.com/opentracing/opentracing-go/.gitignore b/vendor/github.com/opentracing/opentracing-go/.gitignore new file mode 100644 index 00000000..565f0f73 --- /dev/null +++ b/vendor/github.com/opentracing/opentracing-go/.gitignore @@ -0,0 +1,13 @@ +# IntelliJ project files +.idea/ +opentracing-go.iml +opentracing-go.ipr +opentracing-go.iws + +# Test results +*.cov +*.html +test.log + +# Build dir +build/ diff --git a/vendor/github.com/opentracing/opentracing-go/.travis.yml b/vendor/github.com/opentracing/opentracing-go/.travis.yml new file mode 100644 index 00000000..0538f1bf --- /dev/null +++ b/vendor/github.com/opentracing/opentracing-go/.travis.yml @@ -0,0 +1,14 @@ +language: go + +go: + - 1.6 + - 1.7 + - 1.8 + - tip + +install: + - go get -d -t github.com/opentracing/opentracing-go/... + - go get -u github.com/golang/lint/... +script: + - make test lint + - go build ./... diff --git a/vendor/github.com/opentracing/opentracing-go/CHANGELOG.md b/vendor/github.com/opentracing/opentracing-go/CHANGELOG.md new file mode 100644 index 00000000..1fc9fdf7 --- /dev/null +++ b/vendor/github.com/opentracing/opentracing-go/CHANGELOG.md @@ -0,0 +1,14 @@ +Changes by Version +================== + +1.1.0 (unreleased) +------------------- + +- Deprecate InitGlobalTracer() in favor of SetGlobalTracer() + + +1.0.0 (2016-09-26) +------------------- + +- This release implements OpenTracing Specification 1.0 (http://opentracing.io/spec) + diff --git a/vendor/github.com/opentracing/opentracing-go/LICENSE b/vendor/github.com/opentracing/opentracing-go/LICENSE new file mode 100644 index 00000000..148509a4 --- /dev/null +++ b/vendor/github.com/opentracing/opentracing-go/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2016 The OpenTracing Authors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/opentracing/opentracing-go/Makefile b/vendor/github.com/opentracing/opentracing-go/Makefile new file mode 100644 index 00000000..2f491f15 --- /dev/null +++ b/vendor/github.com/opentracing/opentracing-go/Makefile @@ -0,0 +1,32 @@ +PACKAGES := . ./mocktracer/... ./ext/... + +.DEFAULT_GOAL := test-and-lint + +.PHONE: test-and-lint + +test-and-lint: test lint + +.PHONY: test +test: + go test -v -cover ./... + +cover: + @rm -rf cover-all.out + $(foreach pkg, $(PACKAGES), $(MAKE) cover-pkg PKG=$(pkg) || true;) + @grep mode: cover.out > coverage.out + @cat cover-all.out >> coverage.out + go tool cover -html=coverage.out -o cover.html + @rm -rf cover.out cover-all.out coverage.out + +cover-pkg: + go test -coverprofile cover.out $(PKG) + @grep -v mode: cover.out >> cover-all.out + +.PHONY: lint +lint: + go fmt ./... + golint ./... + @# Run again with magic to exit non-zero if golint outputs anything. + @! (golint ./... | read dummy) + go vet ./... + diff --git a/vendor/github.com/opentracing/opentracing-go/README.md b/vendor/github.com/opentracing/opentracing-go/README.md new file mode 100644 index 00000000..1fb77d22 --- /dev/null +++ b/vendor/github.com/opentracing/opentracing-go/README.md @@ -0,0 +1,147 @@ +[![Gitter chat](http://img.shields.io/badge/gitter-join%20chat%20%E2%86%92-brightgreen.svg)](https://gitter.im/opentracing/public) [![Build Status](https://travis-ci.org/opentracing/opentracing-go.svg?branch=master)](https://travis-ci.org/opentracing/opentracing-go) [![GoDoc](https://godoc.org/github.com/opentracing/opentracing-go?status.svg)](http://godoc.org/github.com/opentracing/opentracing-go) + +# OpenTracing API for Go + +This package is a Go platform API for OpenTracing. + +## Required Reading + +In order to understand the Go platform API, one must first be familiar with the +[OpenTracing project](http://opentracing.io) and +[terminology](http://opentracing.io/documentation/pages/spec.html) more specifically. + +## API overview for those adding instrumentation + +Everyday consumers of this `opentracing` package really only need to worry +about a couple of key abstractions: the `StartSpan` function, the `Span` +interface, and binding a `Tracer` at `main()`-time. Here are code snippets +demonstrating some important use cases. + +#### Singleton initialization + +The simplest starting point is `./default_tracer.go`. As early as possible, call + +```go + import "github.com/opentracing/opentracing-go" + import ".../some_tracing_impl" + + func main() { + opentracing.InitGlobalTracer( + // tracing impl specific: + some_tracing_impl.New(...), + ) + ... + } +``` + +##### Non-Singleton initialization + +If you prefer direct control to singletons, manage ownership of the +`opentracing.Tracer` implementation explicitly. + +#### Creating a Span given an existing Go `context.Context` + +If you use `context.Context` in your application, OpenTracing's Go library will +happily rely on it for `Span` propagation. To start a new (blocking child) +`Span`, you can use `StartSpanFromContext`. + +```go + func xyz(ctx context.Context, ...) { + ... + span, ctx := opentracing.StartSpanFromContext(ctx, "operation_name") + defer span.Finish() + span.LogFields( + log.String("event", "soft error"), + log.String("type", "cache timeout"), + log.Int("waited.millis", 1500)) + ... + } +``` + +#### Starting an empty trace by creating a "root span" + +It's always possible to create a "root" `Span` with no parent or other causal +reference. + +```go + func xyz() { + ... + sp := opentracing.StartSpan("operation_name") + defer sp.Finish() + ... + } +``` + +#### Creating a (child) Span given an existing (parent) Span + +```go + func xyz(parentSpan opentracing.Span, ...) { + ... + sp := opentracing.StartSpan( + "operation_name", + opentracing.ChildOf(parentSpan.Context())) + defer sp.Finish() + ... + } +``` + +#### Serializing to the wire + +```go + func makeSomeRequest(ctx context.Context) ... { + if span := opentracing.SpanFromContext(ctx); span != nil { + httpClient := &http.Client{} + httpReq, _ := http.NewRequest("GET", "http://myservice/", nil) + + // Transmit the span's TraceContext as HTTP headers on our + // outbound request. + opentracing.GlobalTracer().Inject( + span.Context(), + opentracing.HTTPHeaders, + opentracing.HTTPHeadersCarrier(httpReq.Header)) + + resp, err := httpClient.Do(httpReq) + ... + } + ... + } +``` + +#### Deserializing from the wire + +```go + http.HandleFunc("/", func(w http.ResponseWriter, req *http.Request) { + var serverSpan opentracing.Span + appSpecificOperationName := ... + wireContext, err := opentracing.GlobalTracer().Extract( + opentracing.HTTPHeaders, + opentracing.HTTPHeadersCarrier(req.Header)) + if err != nil { + // Optionally record something about err here + } + + // Create the span referring to the RPC client if available. + // If wireContext == nil, a root span will be created. + serverSpan = opentracing.StartSpan( + appSpecificOperationName, + ext.RPCServerOption(wireContext)) + + defer serverSpan.Finish() + + ctx := opentracing.ContextWithSpan(context.Background(), serverSpan) + ... + } +``` + +#### Goroutine-safety + +The entire public API is goroutine-safe and does not require external +synchronization. + +## API pointers for those implementing a tracing system + +Tracing system implementors may be able to reuse or copy-paste-modify the `basictracer` package, found [here](https://github.com/opentracing/basictracer-go). In particular, see `basictracer.New(...)`. + +## API compatibility + +For the time being, "mild" backwards-incompatible changes may be made without changing the major version number. As OpenTracing and `opentracing-go` mature, backwards compatibility will become more of a priority. diff --git a/vendor/github.com/opentracing/opentracing-go/ext/tags.go b/vendor/github.com/opentracing/opentracing-go/ext/tags.go new file mode 100644 index 00000000..c67ab5ee --- /dev/null +++ b/vendor/github.com/opentracing/opentracing-go/ext/tags.go @@ -0,0 +1,198 @@ +package ext + +import opentracing "github.com/opentracing/opentracing-go" + +// These constants define common tag names recommended for better portability across +// tracing systems and languages/platforms. +// +// The tag names are defined as typed strings, so that in addition to the usual use +// +// span.setTag(TagName, value) +// +// they also support value type validation via this additional syntax: +// +// TagName.Set(span, value) +// +var ( + ////////////////////////////////////////////////////////////////////// + // SpanKind (client/server or producer/consumer) + ////////////////////////////////////////////////////////////////////// + + // SpanKind hints at relationship between spans, e.g. client/server + SpanKind = spanKindTagName("span.kind") + + // SpanKindRPCClient marks a span representing the client-side of an RPC + // or other remote call + SpanKindRPCClientEnum = SpanKindEnum("client") + SpanKindRPCClient = opentracing.Tag{Key: string(SpanKind), Value: SpanKindRPCClientEnum} + + // SpanKindRPCServer marks a span representing the server-side of an RPC + // or other remote call + SpanKindRPCServerEnum = SpanKindEnum("server") + SpanKindRPCServer = opentracing.Tag{Key: string(SpanKind), Value: SpanKindRPCServerEnum} + + // SpanKindProducer marks a span representing the producer-side of a + // message bus + SpanKindProducerEnum = SpanKindEnum("producer") + SpanKindProducer = opentracing.Tag{Key: string(SpanKind), Value: SpanKindProducerEnum} + + // SpanKindConsumer marks a span representing the consumer-side of a + // message bus + SpanKindConsumerEnum = SpanKindEnum("consumer") + SpanKindConsumer = opentracing.Tag{Key: string(SpanKind), Value: SpanKindConsumerEnum} + + ////////////////////////////////////////////////////////////////////// + // Component name + ////////////////////////////////////////////////////////////////////// + + // Component is a low-cardinality identifier of the module, library, + // or package that is generating a span. + Component = stringTagName("component") + + ////////////////////////////////////////////////////////////////////// + // Sampling hint + ////////////////////////////////////////////////////////////////////// + + // SamplingPriority determines the priority of sampling this Span. + SamplingPriority = uint16TagName("sampling.priority") + + ////////////////////////////////////////////////////////////////////// + // Peer tags. These tags can be emitted by either client-side of + // server-side to describe the other side/service in a peer-to-peer + // communications, like an RPC call. + ////////////////////////////////////////////////////////////////////// + + // PeerService records the service name of the peer. + PeerService = stringTagName("peer.service") + + // PeerAddress records the address name of the peer. This may be a "ip:port", + // a bare "hostname", a FQDN or even a database DSN substring + // like "mysql://username@127.0.0.1:3306/dbname" + PeerAddress = stringTagName("peer.address") + + // PeerHostname records the host name of the peer + PeerHostname = stringTagName("peer.hostname") + + // PeerHostIPv4 records IP v4 host address of the peer + PeerHostIPv4 = uint32TagName("peer.ipv4") + + // PeerHostIPv6 records IP v6 host address of the peer + PeerHostIPv6 = stringTagName("peer.ipv6") + + // PeerPort records port number of the peer + PeerPort = uint16TagName("peer.port") + + ////////////////////////////////////////////////////////////////////// + // HTTP Tags + ////////////////////////////////////////////////////////////////////// + + // HTTPUrl should be the URL of the request being handled in this segment + // of the trace, in standard URI format. The protocol is optional. + HTTPUrl = stringTagName("http.url") + + // HTTPMethod is the HTTP method of the request, and is case-insensitive. + HTTPMethod = stringTagName("http.method") + + // HTTPStatusCode is the numeric HTTP status code (200, 404, etc) of the + // HTTP response. + HTTPStatusCode = uint16TagName("http.status_code") + + ////////////////////////////////////////////////////////////////////// + // DB Tags + ////////////////////////////////////////////////////////////////////// + + // DBInstance is database instance name. + DBInstance = stringTagName("db.instance") + + // DBStatement is a database statement for the given database type. + // It can be a query or a prepared statement (i.e., before substitution). + DBStatement = stringTagName("db.statement") + + // DBType is a database type. For any SQL database, "sql". + // For others, the lower-case database category, e.g. "redis" + DBType = stringTagName("db.type") + + // DBUser is a username for accessing database. + DBUser = stringTagName("db.user") + + ////////////////////////////////////////////////////////////////////// + // Message Bus Tag + ////////////////////////////////////////////////////////////////////// + + // MessageBusDestination is an address at which messages can be exchanged + MessageBusDestination = stringTagName("message_bus.destination") + + ////////////////////////////////////////////////////////////////////// + // Error Tag + ////////////////////////////////////////////////////////////////////// + + // Error indicates that operation represented by the span resulted in an error. + Error = boolTagName("error") +) + +// --- + +// SpanKindEnum represents common span types +type SpanKindEnum string + +type spanKindTagName string + +// Set adds a string tag to the `span` +func (tag spanKindTagName) Set(span opentracing.Span, value SpanKindEnum) { + span.SetTag(string(tag), value) +} + +type rpcServerOption struct { + clientContext opentracing.SpanContext +} + +func (r rpcServerOption) Apply(o *opentracing.StartSpanOptions) { + if r.clientContext != nil { + opentracing.ChildOf(r.clientContext).Apply(o) + } + SpanKindRPCServer.Apply(o) +} + +// RPCServerOption returns a StartSpanOption appropriate for an RPC server span +// with `client` representing the metadata for the remote peer Span if available. +// In case client == nil, due to the client not being instrumented, this RPC +// server span will be a root span. +func RPCServerOption(client opentracing.SpanContext) opentracing.StartSpanOption { + return rpcServerOption{client} +} + +// --- + +type stringTagName string + +// Set adds a string tag to the `span` +func (tag stringTagName) Set(span opentracing.Span, value string) { + span.SetTag(string(tag), value) +} + +// --- + +type uint32TagName string + +// Set adds a uint32 tag to the `span` +func (tag uint32TagName) Set(span opentracing.Span, value uint32) { + span.SetTag(string(tag), value) +} + +// --- + +type uint16TagName string + +// Set adds a uint16 tag to the `span` +func (tag uint16TagName) Set(span opentracing.Span, value uint16) { + span.SetTag(string(tag), value) +} + +// --- + +type boolTagName string + +// Add adds a bool tag to the `span` +func (tag boolTagName) Set(span opentracing.Span, value bool) { + span.SetTag(string(tag), value) +} diff --git a/vendor/github.com/opentracing/opentracing-go/ext/tags_test.go b/vendor/github.com/opentracing/opentracing-go/ext/tags_test.go new file mode 100644 index 00000000..ea9af335 --- /dev/null +++ b/vendor/github.com/opentracing/opentracing-go/ext/tags_test.go @@ -0,0 +1,148 @@ +package ext_test + +import ( + "testing" + + "github.com/stretchr/testify/assert" + + "github.com/opentracing/opentracing-go" + "github.com/opentracing/opentracing-go/ext" + "github.com/opentracing/opentracing-go/mocktracer" +) + +func TestPeerTags(t *testing.T) { + if ext.PeerService != "peer.service" { + t.Fatalf("Invalid PeerService %v", ext.PeerService) + } + tracer := mocktracer.New() + span := tracer.StartSpan("my-trace") + ext.PeerService.Set(span, "my-service") + ext.PeerAddress.Set(span, "my-hostname:8080") + ext.PeerHostname.Set(span, "my-hostname") + ext.PeerHostIPv4.Set(span, uint32(127<<24|1)) + ext.PeerHostIPv6.Set(span, "::") + ext.PeerPort.Set(span, uint16(8080)) + ext.SamplingPriority.Set(span, uint16(1)) + ext.SpanKind.Set(span, ext.SpanKindRPCServerEnum) + ext.SpanKindRPCClient.Set(span) + span.Finish() + + rawSpan := tracer.FinishedSpans()[0] + assert.Equal(t, map[string]interface{}{ + "peer.service": "my-service", + "peer.address": "my-hostname:8080", + "peer.hostname": "my-hostname", + "peer.ipv4": uint32(127<<24 | 1), + "peer.ipv6": "::", + "peer.port": uint16(8080), + "span.kind": ext.SpanKindRPCClientEnum, + }, rawSpan.Tags()) + assert.True(t, span.Context().(mocktracer.MockSpanContext).Sampled) + ext.SamplingPriority.Set(span, uint16(0)) + assert.False(t, span.Context().(mocktracer.MockSpanContext).Sampled) +} + +func TestHTTPTags(t *testing.T) { + tracer := mocktracer.New() + span := tracer.StartSpan("my-trace", ext.SpanKindRPCServer) + ext.HTTPUrl.Set(span, "test.biz/uri?protocol=false") + ext.HTTPMethod.Set(span, "GET") + ext.HTTPStatusCode.Set(span, 301) + span.Finish() + + rawSpan := tracer.FinishedSpans()[0] + assert.Equal(t, map[string]interface{}{ + "http.url": "test.biz/uri?protocol=false", + "http.method": "GET", + "http.status_code": uint16(301), + "span.kind": ext.SpanKindRPCServerEnum, + }, rawSpan.Tags()) +} + +func TestDBTags(t *testing.T) { + tracer := mocktracer.New() + span := tracer.StartSpan("my-trace", ext.SpanKindRPCClient) + ext.DBInstance.Set(span, "127.0.0.1:3306/customers") + ext.DBStatement.Set(span, "SELECT * FROM user_table") + ext.DBType.Set(span, "sql") + ext.DBUser.Set(span, "customer_user") + span.Finish() + + rawSpan := tracer.FinishedSpans()[0] + assert.Equal(t, map[string]interface{}{ + "db.instance": "127.0.0.1:3306/customers", + "db.statement": "SELECT * FROM user_table", + "db.type": "sql", + "db.user": "customer_user", + "span.kind": ext.SpanKindRPCClientEnum, + }, rawSpan.Tags()) +} + +func TestMiscTags(t *testing.T) { + tracer := mocktracer.New() + span := tracer.StartSpan("my-trace") + ext.Component.Set(span, "my-awesome-library") + ext.SamplingPriority.Set(span, 1) + ext.Error.Set(span, true) + + span.Finish() + + rawSpan := tracer.FinishedSpans()[0] + assert.Equal(t, map[string]interface{}{ + "component": "my-awesome-library", + "error": true, + }, rawSpan.Tags()) +} + +func TestRPCServerOption(t *testing.T) { + tracer := mocktracer.New() + parent := tracer.StartSpan("my-trace") + parent.SetBaggageItem("bag", "gage") + + carrier := opentracing.HTTPHeadersCarrier{} + err := tracer.Inject(parent.Context(), opentracing.HTTPHeaders, carrier) + if err != nil { + t.Fatal(err) + } + + parCtx, err := tracer.Extract(opentracing.HTTPHeaders, carrier) + if err != nil { + t.Fatal(err) + } + + tracer.StartSpan("my-child", ext.RPCServerOption(parCtx)).Finish() + + rawSpan := tracer.FinishedSpans()[0] + assert.Equal(t, map[string]interface{}{ + "span.kind": ext.SpanKindRPCServerEnum, + }, rawSpan.Tags()) + assert.Equal(t, map[string]string{ + "bag": "gage", + }, rawSpan.Context().(mocktracer.MockSpanContext).Baggage) +} + +func TestMessageBusProducerTags(t *testing.T) { + tracer := mocktracer.New() + span := tracer.StartSpan("my-trace", ext.SpanKindProducer) + ext.MessageBusDestination.Set(span, "topic name") + span.Finish() + + rawSpan := tracer.FinishedSpans()[0] + assert.Equal(t, map[string]interface{}{ + "message_bus.destination": "topic name", + "span.kind": ext.SpanKindProducerEnum, + }, rawSpan.Tags()) +} + +func TestMessageBusConsumerTags(t *testing.T) { + tracer := mocktracer.New() + span := tracer.StartSpan("my-trace", ext.SpanKindConsumer) + ext.MessageBusDestination.Set(span, "topic name") + span.Finish() + + rawSpan := tracer.FinishedSpans()[0] + assert.Equal(t, map[string]interface{}{ + "message_bus.destination": "topic name", + "span.kind": ext.SpanKindConsumerEnum, + }, rawSpan.Tags()) +} diff --git a/vendor/github.com/opentracing/opentracing-go/globaltracer.go b/vendor/github.com/opentracing/opentracing-go/globaltracer.go new file mode 100644 index 00000000..8c8e793f --- /dev/null +++ b/vendor/github.com/opentracing/opentracing-go/globaltracer.go @@ -0,0 +1,32 @@ +package opentracing + +var ( + globalTracer Tracer = NoopTracer{} +) + +// SetGlobalTracer sets the [singleton] opentracing.Tracer returned by +// GlobalTracer(). Those who use GlobalTracer (rather than directly manage an +// opentracing.Tracer instance) should call SetGlobalTracer as early as +// possible in main(), prior to calling the `StartSpan` global func below. +// Prior to calling `SetGlobalTracer`, any Spans started via the `StartSpan` +// (etc) globals are noops. +func SetGlobalTracer(tracer Tracer) { + globalTracer = tracer +} + +// GlobalTracer returns the global singleton `Tracer` implementation. +// Before `SetGlobalTracer()` is called, the `GlobalTracer()` is a noop +// implementation that drops all data handed to it. +func GlobalTracer() Tracer { + return globalTracer +} + +// StartSpan defers to `Tracer.StartSpan`. See `GlobalTracer()`. +func StartSpan(operationName string, opts ...StartSpanOption) Span { + return globalTracer.StartSpan(operationName, opts...) +} + +// InitGlobalTracer is deprecated. Please use SetGlobalTracer. +func InitGlobalTracer(tracer Tracer) { + SetGlobalTracer(tracer) +} diff --git a/vendor/github.com/opentracing/opentracing-go/gocontext.go b/vendor/github.com/opentracing/opentracing-go/gocontext.go new file mode 100644 index 00000000..222a6520 --- /dev/null +++ b/vendor/github.com/opentracing/opentracing-go/gocontext.go @@ -0,0 +1,57 @@ +package opentracing + +import "golang.org/x/net/context" + +type contextKey struct{} + +var activeSpanKey = contextKey{} + +// ContextWithSpan returns a new `context.Context` that holds a reference to +// `span`'s SpanContext. +func ContextWithSpan(ctx context.Context, span Span) context.Context { + return context.WithValue(ctx, activeSpanKey, span) +} + +// SpanFromContext returns the `Span` previously associated with `ctx`, or +// `nil` if no such `Span` could be found. +// +// NOTE: context.Context != SpanContext: the former is Go's intra-process +// context propagation mechanism, and the latter houses OpenTracing's per-Span +// identity and baggage information. +func SpanFromContext(ctx context.Context) Span { + val := ctx.Value(activeSpanKey) + if sp, ok := val.(Span); ok { + return sp + } + return nil +} + +// StartSpanFromContext starts and returns a Span with `operationName`, using +// any Span found within `ctx` as a ChildOfRef. If no such parent could be +// found, StartSpanFromContext creates a root (parentless) Span. +// +// The second return value is a context.Context object built around the +// returned Span. +// +// Example usage: +// +// SomeFunction(ctx context.Context, ...) { +// sp, ctx := opentracing.StartSpanFromContext(ctx, "SomeFunction") +// defer sp.Finish() +// ... +// } +func StartSpanFromContext(ctx context.Context, operationName string, opts ...StartSpanOption) (Span, context.Context) { + return startSpanFromContextWithTracer(ctx, GlobalTracer(), operationName, opts...) +} + +// startSpanFromContextWithTracer is factored out for testing purposes. +func startSpanFromContextWithTracer(ctx context.Context, tracer Tracer, operationName string, opts ...StartSpanOption) (Span, context.Context) { + var span Span + if parentSpan := SpanFromContext(ctx); parentSpan != nil { + opts = append(opts, ChildOf(parentSpan.Context())) + span = tracer.StartSpan(operationName, opts...) + } else { + span = tracer.StartSpan(operationName, opts...) + } + return span, ContextWithSpan(ctx, span) +} diff --git a/vendor/github.com/opentracing/opentracing-go/gocontext_test.go b/vendor/github.com/opentracing/opentracing-go/gocontext_test.go new file mode 100644 index 00000000..65c01308 --- /dev/null +++ b/vendor/github.com/opentracing/opentracing-go/gocontext_test.go @@ -0,0 +1,81 @@ +package opentracing + +import ( + "testing" + "time" + + "github.com/stretchr/testify/assert" + "golang.org/x/net/context" +) + +func TestContextWithSpan(t *testing.T) { + span := &noopSpan{} + ctx := ContextWithSpan(context.Background(), span) + span2 := SpanFromContext(ctx) + if span != span2 { + t.Errorf("Not the same span returned from context, expected=%+v, actual=%+v", span, span2) + } + + ctx = context.Background() + span2 = SpanFromContext(ctx) + if span2 != nil { + t.Errorf("Expected nil span, found %+v", span2) + } + + ctx = ContextWithSpan(ctx, span) + span2 = SpanFromContext(ctx) + if span != span2 { + t.Errorf("Not the same span returned from context, expected=%+v, actual=%+v", span, span2) + } +} + +func TestStartSpanFromContext(t *testing.T) { + testTracer := testTracer{} + + // Test the case where there *is* a Span in the Context. + { + parentSpan := &testSpan{} + parentCtx := ContextWithSpan(context.Background(), parentSpan) + childSpan, childCtx := startSpanFromContextWithTracer(parentCtx, testTracer, "child") + if !childSpan.Context().(testSpanContext).HasParent { + t.Errorf("Failed to find parent: %v", childSpan) + } + if !childSpan.(testSpan).Equal(SpanFromContext(childCtx)) { + t.Errorf("Unable to find child span in context: %v", childCtx) + } + } + + // Test the case where there *is not* a Span in the Context. + { + emptyCtx := context.Background() + childSpan, childCtx := startSpanFromContextWithTracer(emptyCtx, testTracer, "child") + if childSpan.Context().(testSpanContext).HasParent { + t.Errorf("Should not have found parent: %v", childSpan) + } + if !childSpan.(testSpan).Equal(SpanFromContext(childCtx)) { + t.Errorf("Unable to find child span in context: %v", childCtx) + } + } +} + +func TestStartSpanFromContextOptions(t *testing.T) { + testTracer := testTracer{} + + // Test options are passed to tracer + + startTime := time.Now().Add(-10 * time.Second) // ten seconds ago + span, ctx := startSpanFromContextWithTracer( + context.Background(), testTracer, "parent", StartTime(startTime), Tag{"component", "test"}) + + assert.Equal(t, "test", span.(testSpan).Tags["component"]) + assert.Equal(t, startTime, span.(testSpan).StartTime) + + // Test it also works for a child span + + childStartTime := startTime.Add(3 * time.Second) + childSpan, _ := startSpanFromContextWithTracer( + ctx, testTracer, "child", StartTime(childStartTime)) + + assert.Equal(t, childSpan.(testSpan).Tags["component"], nil) + assert.Equal(t, childSpan.(testSpan).StartTime, childStartTime) +} diff --git a/vendor/github.com/opentracing/opentracing-go/log/field.go b/vendor/github.com/opentracing/opentracing-go/log/field.go new file mode 100644 index 00000000..d2cd39a1 --- /dev/null +++ b/vendor/github.com/opentracing/opentracing-go/log/field.go @@ -0,0 +1,245 @@ +package log + +import ( + "fmt" + "math" +) + +type fieldType int + +const ( + stringType fieldType = iota + boolType + intType + int32Type + uint32Type + int64Type + uint64Type + float32Type + float64Type + errorType + objectType + lazyLoggerType +) + +// Field instances are constructed via LogBool, LogString, and so on. +// Tracing implementations may then handle them via the Field.Marshal +// method. +// +// "heavily influenced by" (i.e., partially stolen from) +// https://github.com/uber-go/zap +type Field struct { + key string + fieldType fieldType + numericVal int64 + stringVal string + interfaceVal interface{} +} + +// String adds a string-valued key:value pair to a Span.LogFields() record +func String(key, val string) Field { + return Field{ + key: key, + fieldType: stringType, + stringVal: val, + } +} + +// Bool adds a bool-valued key:value pair to a Span.LogFields() record +func Bool(key string, val bool) Field { + var numericVal int64 + if val { + numericVal = 1 + } + return Field{ + key: key, + fieldType: boolType, + numericVal: numericVal, + } +} + +// Int adds an int-valued key:value pair to a Span.LogFields() record +func Int(key string, val int) Field { + return Field{ + key: key, + fieldType: intType, + numericVal: int64(val), + } +} + +// Int32 adds an int32-valued key:value pair to a Span.LogFields() record +func Int32(key string, val int32) Field { + return Field{ + key: key, + fieldType: int32Type, + numericVal: int64(val), + } +} + +// Int64 adds an int64-valued key:value pair to a Span.LogFields() record +func Int64(key string, val int64) Field { + return Field{ + key: key, + fieldType: int64Type, + numericVal: val, + } +} + +// Uint32 adds a uint32-valued key:value pair to a Span.LogFields() record +func Uint32(key string, val uint32) Field { + return Field{ + key: key, + fieldType: uint32Type, + numericVal: int64(val), + } +} + +// Uint64 adds a uint64-valued key:value pair to a Span.LogFields() record +func Uint64(key string, val uint64) Field { + return Field{ + key: key, + fieldType: uint64Type, + numericVal: int64(val), + } +} + +// Float32 adds a float32-valued key:value pair to a Span.LogFields() record +func Float32(key string, val float32) Field { + return Field{ + key: key, + fieldType: float32Type, + numericVal: int64(math.Float32bits(val)), + } +} + +// Float64 adds a float64-valued key:value pair to a Span.LogFields() record +func Float64(key string, val float64) Field { + return Field{ + key: key, + fieldType: float64Type, + numericVal: int64(math.Float64bits(val)), + } +} + +// Error adds an error with the key "error" to a Span.LogFields() record +func Error(err error) Field { + return Field{ + key: "error", + fieldType: errorType, + interfaceVal: err, + } +} + +// Object adds an object-valued key:value pair to a Span.LogFields() record +func Object(key string, obj interface{}) Field { + return Field{ + key: key, + fieldType: objectType, + interfaceVal: obj, + } +} + +// LazyLogger allows for user-defined, late-bound logging of arbitrary data +type LazyLogger func(fv Encoder) + +// Lazy adds a LazyLogger to a Span.LogFields() record; the tracing +// implementation will call the LazyLogger function at an indefinite time in +// the future (after Lazy() returns). +func Lazy(ll LazyLogger) Field { + return Field{ + fieldType: lazyLoggerType, + interfaceVal: ll, + } +} + +// Encoder allows access to the contents of a Field (via a call to +// Field.Marshal). +// +// Tracer implementations typically provide an implementation of Encoder; +// OpenTracing callers typically do not need to concern themselves with it. +type Encoder interface { + EmitString(key, value string) + EmitBool(key string, value bool) + EmitInt(key string, value int) + EmitInt32(key string, value int32) + EmitInt64(key string, value int64) + EmitUint32(key string, value uint32) + EmitUint64(key string, value uint64) + EmitFloat32(key string, value float32) + EmitFloat64(key string, value float64) + EmitObject(key string, value interface{}) + EmitLazyLogger(value LazyLogger) +} + +// Marshal passes a Field instance through to the appropriate +// field-type-specific method of an Encoder. +func (lf Field) Marshal(visitor Encoder) { + switch lf.fieldType { + case stringType: + visitor.EmitString(lf.key, lf.stringVal) + case boolType: + visitor.EmitBool(lf.key, lf.numericVal != 0) + case intType: + visitor.EmitInt(lf.key, int(lf.numericVal)) + case int32Type: + visitor.EmitInt32(lf.key, int32(lf.numericVal)) + case int64Type: + visitor.EmitInt64(lf.key, int64(lf.numericVal)) + case uint32Type: + visitor.EmitUint32(lf.key, uint32(lf.numericVal)) + case uint64Type: + visitor.EmitUint64(lf.key, uint64(lf.numericVal)) + case float32Type: + visitor.EmitFloat32(lf.key, math.Float32frombits(uint32(lf.numericVal))) + case float64Type: + visitor.EmitFloat64(lf.key, math.Float64frombits(uint64(lf.numericVal))) + case errorType: + if err, ok := lf.interfaceVal.(error); ok { + visitor.EmitString(lf.key, err.Error()) + } else { + visitor.EmitString(lf.key, "") + } + case objectType: + visitor.EmitObject(lf.key, lf.interfaceVal) + case lazyLoggerType: + visitor.EmitLazyLogger(lf.interfaceVal.(LazyLogger)) + } +} + +// Key returns the field's key. +func (lf Field) Key() string { + return lf.key +} + +// Value returns the field's value as interface{}. +func (lf Field) Value() interface{} { + switch lf.fieldType { + case stringType: + return lf.stringVal + case boolType: + return lf.numericVal != 0 + case intType: + return int(lf.numericVal) + case int32Type: + return int32(lf.numericVal) + case int64Type: + return int64(lf.numericVal) + case uint32Type: + return uint32(lf.numericVal) + case uint64Type: + return uint64(lf.numericVal) + case float32Type: + return math.Float32frombits(uint32(lf.numericVal)) + case float64Type: + return math.Float64frombits(uint64(lf.numericVal)) + case errorType, objectType, lazyLoggerType: + return lf.interfaceVal + default: + return nil + } +} + +// String returns a string representation of the key and value. +func (lf Field) String() string { + return fmt.Sprint(lf.key, ":", lf.Value()) +} diff --git a/vendor/github.com/opentracing/opentracing-go/log/field_test.go b/vendor/github.com/opentracing/opentracing-go/log/field_test.go new file mode 100644 index 00000000..8304f182 --- /dev/null +++ b/vendor/github.com/opentracing/opentracing-go/log/field_test.go @@ -0,0 +1,39 @@ +package log + +import ( + "fmt" + "testing" +) + +func TestFieldString(t *testing.T) { + testCases := []struct { + field Field + expected string + }{ + { + field: String("key", "value"), + expected: "key:value", + }, + { + field: Bool("key", true), + expected: "key:true", + }, + { + field: Int("key", 5), + expected: "key:5", + }, + { + field: Error(fmt.Errorf("err msg")), + expected: "error:err msg", + }, + { + field: Error(nil), + expected: "error:", + }, + } + for i, tc := range testCases { + if str := tc.field.String(); str != tc.expected { + t.Errorf("%d: expected '%s', got '%s'", i, tc.expected, str) + } + } +} diff --git a/vendor/github.com/opentracing/opentracing-go/log/util.go b/vendor/github.com/opentracing/opentracing-go/log/util.go new file mode 100644 index 00000000..3832feb5 --- /dev/null +++ b/vendor/github.com/opentracing/opentracing-go/log/util.go @@ -0,0 +1,54 @@ +package log + +import "fmt" + +// InterleavedKVToFields converts keyValues a la Span.LogKV() to a Field slice +// a la Span.LogFields(). +func InterleavedKVToFields(keyValues ...interface{}) ([]Field, error) { + if len(keyValues)%2 != 0 { + return nil, fmt.Errorf("non-even keyValues len: %d", len(keyValues)) + } + fields := make([]Field, len(keyValues)/2) + for i := 0; i*2 < len(keyValues); i++ { + key, ok := keyValues[i*2].(string) + if !ok { + return nil, fmt.Errorf( + "non-string key (pair #%d): %T", + i, keyValues[i*2]) + } + switch typedVal := keyValues[i*2+1].(type) { + case bool: + fields[i] = Bool(key, typedVal) + case string: + fields[i] = String(key, typedVal) + case int: + fields[i] = Int(key, typedVal) + case int8: + fields[i] = Int32(key, int32(typedVal)) + case int16: + fields[i] = Int32(key, int32(typedVal)) + case int32: + fields[i] = Int32(key, typedVal) + case int64: + fields[i] = Int64(key, typedVal) + case uint: + fields[i] = Uint64(key, uint64(typedVal)) + case uint64: + fields[i] = Uint64(key, typedVal) + case uint8: + fields[i] = Uint32(key, uint32(typedVal)) + case uint16: + fields[i] = Uint32(key, uint32(typedVal)) + case uint32: + fields[i] = Uint32(key, typedVal) + case float32: + fields[i] = Float32(key, typedVal) + case float64: + fields[i] = Float64(key, typedVal) + default: + // When in doubt, coerce to a string + fields[i] = String(key, fmt.Sprint(typedVal)) + } + } + return fields, nil +} diff --git a/vendor/github.com/opentracing/opentracing-go/mocktracer/mocklogrecord.go b/vendor/github.com/opentracing/opentracing-go/mocktracer/mocklogrecord.go new file mode 100644 index 00000000..2ce96d9d --- /dev/null +++ b/vendor/github.com/opentracing/opentracing-go/mocktracer/mocklogrecord.go @@ -0,0 +1,105 @@ +package mocktracer + +import ( + "fmt" + "reflect" + "time" + + "github.com/opentracing/opentracing-go/log" +) + +// MockLogRecord represents data logged to a Span via Span.LogFields or +// Span.LogKV. +type MockLogRecord struct { + Timestamp time.Time + Fields []MockKeyValue +} + +// MockKeyValue represents a single key:value pair. +type MockKeyValue struct { + Key string + + // All MockLogRecord values are coerced to strings via fmt.Sprint(), though + // we retain their type separately. + ValueKind reflect.Kind + ValueString string +} + +// EmitString belongs to the log.Encoder interface +func (m *MockKeyValue) EmitString(key, value string) { + m.Key = key + m.ValueKind = reflect.TypeOf(value).Kind() + m.ValueString = fmt.Sprint(value) +} + +// EmitBool belongs to the log.Encoder interface +func (m *MockKeyValue) EmitBool(key string, value bool) { + m.Key = key + m.ValueKind = reflect.TypeOf(value).Kind() + m.ValueString = fmt.Sprint(value) +} + +// EmitInt belongs to the log.Encoder interface +func (m *MockKeyValue) EmitInt(key string, value int) { + m.Key = key + m.ValueKind = reflect.TypeOf(value).Kind() + m.ValueString = fmt.Sprint(value) +} + +// EmitInt32 belongs to the log.Encoder interface +func (m *MockKeyValue) EmitInt32(key string, value int32) { + m.Key = key + m.ValueKind = reflect.TypeOf(value).Kind() + m.ValueString = fmt.Sprint(value) +} + +// EmitInt64 belongs to the log.Encoder interface +func (m *MockKeyValue) EmitInt64(key string, value int64) { + m.Key = key + m.ValueKind = reflect.TypeOf(value).Kind() + m.ValueString = fmt.Sprint(value) +} + +// EmitUint32 belongs to the log.Encoder interface +func (m *MockKeyValue) EmitUint32(key string, value uint32) { + m.Key = key + m.ValueKind = reflect.TypeOf(value).Kind() + m.ValueString = fmt.Sprint(value) +} + +// EmitUint64 belongs to the log.Encoder interface +func (m *MockKeyValue) EmitUint64(key string, value uint64) { + m.Key = key + m.ValueKind = reflect.TypeOf(value).Kind() + m.ValueString = fmt.Sprint(value) +} + +// EmitFloat32 belongs to the log.Encoder interface +func (m *MockKeyValue) EmitFloat32(key string, value float32) { + m.Key = key + m.ValueKind = reflect.TypeOf(value).Kind() + m.ValueString = fmt.Sprint(value) +} + +// EmitFloat64 belongs to the log.Encoder interface +func (m *MockKeyValue) EmitFloat64(key string, value float64) { + m.Key = key + m.ValueKind = reflect.TypeOf(value).Kind() + m.ValueString = fmt.Sprint(value) +} + +// EmitObject belongs to the log.Encoder interface +func (m *MockKeyValue) EmitObject(key string, value interface{}) { + m.Key = key + m.ValueKind = reflect.TypeOf(value).Kind() + m.ValueString = fmt.Sprint(value) +} + +// EmitLazyLogger belongs to the log.Encoder interface +func (m *MockKeyValue) EmitLazyLogger(value log.LazyLogger) { + var meta MockKeyValue + value(&meta) + m.Key = meta.Key + m.ValueKind = meta.ValueKind + m.ValueString = meta.ValueString +} diff --git a/vendor/github.com/opentracing/opentracing-go/mocktracer/mockspan.go b/vendor/github.com/opentracing/opentracing-go/mocktracer/mockspan.go new file mode 100644 index 00000000..69defda2 --- /dev/null +++ b/vendor/github.com/opentracing/opentracing-go/mocktracer/mockspan.go @@ -0,0 +1,282 @@ +package mocktracer + +import ( + "fmt" + "sync" + "sync/atomic" + "time" + + "github.com/opentracing/opentracing-go" + "github.com/opentracing/opentracing-go/ext" + "github.com/opentracing/opentracing-go/log" +) + +// MockSpanContext is an opentracing.SpanContext implementation. +// +// It is entirely unsuitable for production use, but appropriate for tests +// that want to verify tracing behavior in other frameworks/applications. +// +// By default all spans have Sampled=true flag, unless {"sampling.priority": 0} +// tag is set. +type MockSpanContext struct { + TraceID int + SpanID int + Sampled bool + Baggage map[string]string +} + +var mockIDSource = uint32(42) + +func nextMockID() int { + return int(atomic.AddUint32(&mockIDSource, 1)) +} + +// ForeachBaggageItem belongs to the SpanContext interface +func (c MockSpanContext) ForeachBaggageItem(handler func(k, v string) bool) { + for k, v := range c.Baggage { + if !handler(k, v) { + break + } + } +} + +// WithBaggageItem creates a new context with an extra baggage item. +func (c MockSpanContext) WithBaggageItem(key, value string) MockSpanContext { + var newBaggage map[string]string + if c.Baggage == nil { + newBaggage = map[string]string{key: value} + } else { + newBaggage = make(map[string]string, len(c.Baggage)+1) + for k, v := range c.Baggage { + newBaggage[k] = v + } + newBaggage[key] = value + } + // Use positional parameters so the compiler will help catch new fields. + return MockSpanContext{c.TraceID, c.SpanID, c.Sampled, newBaggage} +} + +// MockSpan is an opentracing.Span implementation that exports its internal +// state for testing purposes. +type MockSpan struct { + sync.RWMutex + + ParentID int + + OperationName string + StartTime time.Time + FinishTime time.Time + + // All of the below are protected by the embedded RWMutex. + SpanContext MockSpanContext + tags map[string]interface{} + logs []MockLogRecord + tracer *MockTracer +} + +func newMockSpan(t *MockTracer, name string, opts opentracing.StartSpanOptions) *MockSpan { + tags := opts.Tags + if tags == nil { + tags = map[string]interface{}{} + } + traceID := nextMockID() + parentID := int(0) + var baggage map[string]string + sampled := true + if len(opts.References) > 0 { + traceID = opts.References[0].ReferencedContext.(MockSpanContext).TraceID + parentID = opts.References[0].ReferencedContext.(MockSpanContext).SpanID + sampled = opts.References[0].ReferencedContext.(MockSpanContext).Sampled + baggage = opts.References[0].ReferencedContext.(MockSpanContext).Baggage + } + spanContext := MockSpanContext{traceID, nextMockID(), sampled, baggage} + startTime := opts.StartTime + if startTime.IsZero() { + startTime = time.Now() + } + return &MockSpan{ + ParentID: parentID, + OperationName: name, + StartTime: startTime, + tags: tags, + logs: []MockLogRecord{}, + SpanContext: spanContext, + + tracer: t, + } +} + +// Tags returns a copy of tags accumulated by the span so far +func (s *MockSpan) Tags() map[string]interface{} { + s.RLock() + defer s.RUnlock() + tags := make(map[string]interface{}) + for k, v := range s.tags { + tags[k] = v + } + return tags +} + +// Tag returns a single tag +func (s *MockSpan) Tag(k string) interface{} { + s.RLock() + defer s.RUnlock() + return s.tags[k] +} + +// Logs returns a copy of logs accumulated in the span so far +func (s *MockSpan) Logs() []MockLogRecord { + s.RLock() + defer s.RUnlock() + logs := make([]MockLogRecord, len(s.logs)) + copy(logs, s.logs) + return logs +} + +// Context belongs to the Span interface +func (s *MockSpan) Context() opentracing.SpanContext { + return s.SpanContext +} + +// SetTag belongs to the Span interface +func (s *MockSpan) SetTag(key string, value interface{}) opentracing.Span { + s.Lock() + defer s.Unlock() + if key == string(ext.SamplingPriority) { + if v, ok := value.(uint16); ok { + s.SpanContext.Sampled = v > 0 + return s + } + if v, ok := value.(int); ok { + s.SpanContext.Sampled = v > 0 + return s + } + } + s.tags[key] = value + return s +} + +// SetBaggageItem belongs to the Span interface +func (s *MockSpan) SetBaggageItem(key, val string) opentracing.Span { + s.Lock() + defer s.Unlock() + s.SpanContext = s.SpanContext.WithBaggageItem(key, val) + return s +} + +// BaggageItem belongs to the Span interface +func (s *MockSpan) BaggageItem(key string) string { + s.RLock() + defer s.RUnlock() + return s.SpanContext.Baggage[key] +} + +// Finish belongs to the Span interface +func (s *MockSpan) Finish() { + s.Lock() + s.FinishTime = time.Now() + s.Unlock() + s.tracer.recordSpan(s) +} + +// FinishWithOptions belongs to the Span interface +func (s *MockSpan) FinishWithOptions(opts opentracing.FinishOptions) { + s.Lock() + s.FinishTime = opts.FinishTime + s.Unlock() + + // Handle any late-bound LogRecords. + for _, lr := range opts.LogRecords { + s.logFieldsWithTimestamp(lr.Timestamp, lr.Fields...) + } + // Handle (deprecated) BulkLogData. + for _, ld := range opts.BulkLogData { + if ld.Payload != nil { + s.logFieldsWithTimestamp( + ld.Timestamp, + log.String("event", ld.Event), + log.Object("payload", ld.Payload)) + } else { + s.logFieldsWithTimestamp( + ld.Timestamp, + log.String("event", ld.Event)) + } + } + + s.tracer.recordSpan(s) +} + +// String allows printing span for debugging +func (s *MockSpan) String() string { + return fmt.Sprintf( + "traceId=%d, spanId=%d, parentId=%d, sampled=%t, name=%s", + s.SpanContext.TraceID, s.SpanContext.SpanID, s.ParentID, + s.SpanContext.Sampled, s.OperationName) +} + +// LogFields belongs to the Span interface +func (s *MockSpan) LogFields(fields ...log.Field) { + s.logFieldsWithTimestamp(time.Now(), fields...) +} + +// The caller MUST NOT hold s.Lock +func (s *MockSpan) logFieldsWithTimestamp(ts time.Time, fields ...log.Field) { + lr := MockLogRecord{ + Timestamp: ts, + Fields: make([]MockKeyValue, len(fields)), + } + for i, f := range fields { + outField := &(lr.Fields[i]) + f.Marshal(outField) + } + + s.Lock() + defer s.Unlock() + s.logs = append(s.logs, lr) +} + +// LogKV belongs to the Span interface. +// +// This implementations coerces all "values" to strings, though that is not +// something all implementations need to do. Indeed, a motivated person can and +// probably should have this do a typed switch on the values. +func (s *MockSpan) LogKV(keyValues ...interface{}) { + if len(keyValues)%2 != 0 { + s.LogFields(log.Error(fmt.Errorf("Non-even keyValues len: %v", len(keyValues)))) + return + } + fields, err := log.InterleavedKVToFields(keyValues...) + if err != nil { + s.LogFields(log.Error(err), log.String("function", "LogKV")) + return + } + s.LogFields(fields...) +} + +// LogEvent belongs to the Span interface +func (s *MockSpan) LogEvent(event string) { + s.LogFields(log.String("event", event)) +} + +// LogEventWithPayload belongs to the Span interface +func (s *MockSpan) LogEventWithPayload(event string, payload interface{}) { + s.LogFields(log.String("event", event), log.Object("payload", payload)) +} + +// Log belongs to the Span interface +func (s *MockSpan) Log(data opentracing.LogData) { + panic("MockSpan.Log() no longer supported") +} + +// SetOperationName belongs to the Span interface +func (s *MockSpan) SetOperationName(operationName string) opentracing.Span { + s.Lock() + defer s.Unlock() + s.OperationName = operationName + return s +} + +// Tracer belongs to the Span interface +func (s *MockSpan) Tracer() opentracing.Tracer { + return s.tracer +} diff --git a/vendor/github.com/opentracing/opentracing-go/mocktracer/mocktracer.go b/vendor/github.com/opentracing/opentracing-go/mocktracer/mocktracer.go new file mode 100644 index 00000000..a74c1458 --- /dev/null +++ b/vendor/github.com/opentracing/opentracing-go/mocktracer/mocktracer.go @@ -0,0 +1,105 @@ +package mocktracer + +import ( + "sync" + + "github.com/opentracing/opentracing-go" +) + +// New returns a MockTracer opentracing.Tracer implementation that's intended +// to facilitate tests of OpenTracing instrumentation. +func New() *MockTracer { + t := &MockTracer{ + finishedSpans: []*MockSpan{}, + injectors: make(map[interface{}]Injector), + extractors: make(map[interface{}]Extractor), + } + + // register default injectors/extractors + textPropagator := new(TextMapPropagator) + t.RegisterInjector(opentracing.TextMap, textPropagator) + t.RegisterExtractor(opentracing.TextMap, textPropagator) + + httpPropagator := &TextMapPropagator{HTTPHeaders: true} + t.RegisterInjector(opentracing.HTTPHeaders, httpPropagator) + t.RegisterExtractor(opentracing.HTTPHeaders, httpPropagator) + + return t +} + +// MockTracer is only intended for testing OpenTracing instrumentation. +// +// It is entirely unsuitable for production use, but appropriate for tests +// that want to verify tracing behavior in other frameworks/applications. +type MockTracer struct { + sync.RWMutex + finishedSpans []*MockSpan + injectors map[interface{}]Injector + extractors map[interface{}]Extractor +} + +// FinishedSpans returns all spans that have been Finish()'ed since the +// MockTracer was constructed or since the last call to its Reset() method. +func (t *MockTracer) FinishedSpans() []*MockSpan { + t.RLock() + defer t.RUnlock() + spans := make([]*MockSpan, len(t.finishedSpans)) + copy(spans, t.finishedSpans) + return spans +} + +// Reset clears the internally accumulated finished spans. Note that any +// extant MockSpans will still append to finishedSpans when they Finish(), +// even after a call to Reset(). +func (t *MockTracer) Reset() { + t.Lock() + defer t.Unlock() + t.finishedSpans = []*MockSpan{} +} + +// StartSpan belongs to the Tracer interface. +func (t *MockTracer) StartSpan(operationName string, opts ...opentracing.StartSpanOption) opentracing.Span { + sso := opentracing.StartSpanOptions{} + for _, o := range opts { + o.Apply(&sso) + } + return newMockSpan(t, operationName, sso) +} + +// RegisterInjector registers injector for given format +func (t *MockTracer) RegisterInjector(format interface{}, injector Injector) { + t.injectors[format] = injector +} + +// RegisterExtractor registers extractor for given format +func (t *MockTracer) RegisterExtractor(format interface{}, extractor Extractor) { + t.extractors[format] = extractor +} + +// Inject belongs to the Tracer interface. +func (t *MockTracer) Inject(sm opentracing.SpanContext, format interface{}, carrier interface{}) error { + spanContext, ok := sm.(MockSpanContext) + if !ok { + return opentracing.ErrInvalidCarrier + } + injector, ok := t.injectors[format] + if !ok { + return opentracing.ErrUnsupportedFormat + } + return injector.Inject(spanContext, carrier) +} + +// Extract belongs to the Tracer interface. +func (t *MockTracer) Extract(format interface{}, carrier interface{}) (opentracing.SpanContext, error) { + extractor, ok := t.extractors[format] + if !ok { + return nil, opentracing.ErrUnsupportedFormat + } + return extractor.Extract(carrier) +} + +func (t *MockTracer) recordSpan(span *MockSpan) { + t.Lock() + defer t.Unlock() + t.finishedSpans = append(t.finishedSpans, span) +} diff --git a/vendor/github.com/opentracing/opentracing-go/mocktracer/mocktracer_test.go b/vendor/github.com/opentracing/opentracing-go/mocktracer/mocktracer_test.go new file mode 100644 index 00000000..63d01134 --- /dev/null +++ b/vendor/github.com/opentracing/opentracing-go/mocktracer/mocktracer_test.go @@ -0,0 +1,268 @@ +package mocktracer + +import ( + "net/http" + "reflect" + "testing" + "time" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" + + "github.com/opentracing/opentracing-go" + "github.com/opentracing/opentracing-go/ext" + "github.com/opentracing/opentracing-go/log" +) + +func TestMockTracer_StartSpan(t *testing.T) { + tracer := New() + span1 := tracer.StartSpan( + "a", + opentracing.Tags(map[string]interface{}{"x": "y"})) + + span2 := span1.Tracer().StartSpan( + "", opentracing.ChildOf(span1.Context())) + span2.Finish() + span1.Finish() + spans := tracer.FinishedSpans() + assert.Equal(t, 2, len(spans)) + + parent := spans[1] + child := spans[0] + assert.Equal(t, map[string]interface{}{"x": "y"}, parent.Tags()) + assert.Equal(t, child.ParentID, parent.Context().(MockSpanContext).SpanID) +} + +func TestMockSpan_SetOperationName(t *testing.T) { + tracer := New() + span := tracer.StartSpan("") + span.SetOperationName("x") + assert.Equal(t, "x", span.(*MockSpan).OperationName) +} + +func TestMockSpanContext_Baggage(t *testing.T) { + tracer := New() + span := tracer.StartSpan("x") + span.SetBaggageItem("x", "y") + assert.Equal(t, "y", span.BaggageItem("x")) + assert.Equal(t, map[string]string{"x": "y"}, span.Context().(MockSpanContext).Baggage) + + baggage := make(map[string]string) + span.Context().ForeachBaggageItem(func(k, v string) bool { + baggage[k] = v + return true + }) + assert.Equal(t, map[string]string{"x": "y"}, baggage) + + span.SetBaggageItem("a", "b") + baggage = make(map[string]string) + span.Context().ForeachBaggageItem(func(k, v string) bool { + baggage[k] = v + return false // exit early + }) + assert.Equal(t, 2, len(span.Context().(MockSpanContext).Baggage)) + assert.Equal(t, 1, len(baggage)) +} + +func TestMockSpan_Tag(t *testing.T) { + tracer := New() + span := tracer.StartSpan("x") + span.SetTag("x", "y") + assert.Equal(t, "y", span.(*MockSpan).Tag("x")) +} + +func TestMockSpan_Tags(t *testing.T) { + tracer := New() + span := tracer.StartSpan("x") + span.SetTag("x", "y") + assert.Equal(t, map[string]interface{}{"x": "y"}, span.(*MockSpan).Tags()) +} + +func TestMockTracer_FinishedSpans_and_Reset(t *testing.T) { + tracer := New() + span := tracer.StartSpan("x") + span.SetTag("x", "y") + span.Finish() + spans := tracer.FinishedSpans() + assert.Equal(t, 1, len(spans)) + assert.Equal(t, map[string]interface{}{"x": "y"}, spans[0].Tags()) + + tracer.Reset() + spans = tracer.FinishedSpans() + assert.Equal(t, 0, len(spans)) +} + +func zeroOutTimestamps(recs []MockLogRecord) { + for i := range recs { + recs[i].Timestamp = time.Time{} + } +} + +func TestMockSpan_LogKV(t *testing.T) { + tracer := New() + span := tracer.StartSpan("s") + span.LogKV("key0", "string0") + span.LogKV("key1", "string1", "key2", uint32(42)) + span.Finish() + spans := tracer.FinishedSpans() + assert.Equal(t, 1, len(spans)) + actual := spans[0].Logs() + zeroOutTimestamps(actual) + assert.Equal(t, []MockLogRecord{ + MockLogRecord{ + Fields: []MockKeyValue{ + MockKeyValue{Key: "key0", ValueKind: reflect.String, ValueString: "string0"}, + }, + }, + MockLogRecord{ + Fields: []MockKeyValue{ + MockKeyValue{Key: "key1", ValueKind: reflect.String, ValueString: "string1"}, + MockKeyValue{Key: "key2", ValueKind: reflect.Uint32, ValueString: "42"}, + }, + }, + }, actual) +} + +func TestMockSpan_LogFields(t *testing.T) { + tracer := New() + span := tracer.StartSpan("s") + span.LogFields(log.String("key0", "string0")) + span.LogFields(log.String("key1", "string1"), log.Uint32("key2", uint32(42))) + span.LogFields(log.Lazy(func(fv log.Encoder) { + fv.EmitInt("key_lazy", 12) + })) + span.FinishWithOptions(opentracing.FinishOptions{ + LogRecords: []opentracing.LogRecord{ + {Timestamp: time.Now(), Fields: []log.Field{log.String("key9", "finish")}}, + }}) + spans := tracer.FinishedSpans() + assert.Equal(t, 1, len(spans)) + actual := spans[0].Logs() + zeroOutTimestamps(actual) + assert.Equal(t, []MockLogRecord{ + MockLogRecord{ + Fields: []MockKeyValue{ + MockKeyValue{Key: "key0", ValueKind: reflect.String, ValueString: "string0"}, + }, + }, + MockLogRecord{ + Fields: []MockKeyValue{ + MockKeyValue{Key: "key1", ValueKind: reflect.String, ValueString: "string1"}, + MockKeyValue{Key: "key2", ValueKind: reflect.Uint32, ValueString: "42"}, + }, + }, + MockLogRecord{ + Fields: []MockKeyValue{ + // Note that the LazyLogger gets to control the key as well as the value. + MockKeyValue{Key: "key_lazy", ValueKind: reflect.Int, ValueString: "12"}, + }, + }, + MockLogRecord{ + Fields: []MockKeyValue{ + MockKeyValue{Key: "key9", ValueKind: reflect.String, ValueString: "finish"}, + }, + }, + }, actual) +} + +func TestMockSpan_DeprecatedLogs(t *testing.T) { + tracer := New() + span := tracer.StartSpan("x") + span.LogEvent("x") + span.LogEventWithPayload("y", "z") + span.LogEvent("a") + span.FinishWithOptions(opentracing.FinishOptions{ + BulkLogData: []opentracing.LogData{{Event: "f"}}}) + spans := tracer.FinishedSpans() + assert.Equal(t, 1, len(spans)) + actual := spans[0].Logs() + zeroOutTimestamps(actual) + assert.Equal(t, []MockLogRecord{ + MockLogRecord{ + Fields: []MockKeyValue{ + MockKeyValue{Key: "event", ValueKind: reflect.String, ValueString: "x"}, + }, + }, + MockLogRecord{ + Fields: []MockKeyValue{ + MockKeyValue{Key: "event", ValueKind: reflect.String, ValueString: "y"}, + MockKeyValue{Key: "payload", ValueKind: reflect.String, ValueString: "z"}, + }, + }, + MockLogRecord{ + Fields: []MockKeyValue{ + MockKeyValue{Key: "event", ValueKind: reflect.String, ValueString: "a"}, + }, + }, + MockLogRecord{ + Fields: []MockKeyValue{ + MockKeyValue{Key: "event", ValueKind: reflect.String, ValueString: "f"}, + }, + }, + }, actual) +} + +func TestMockTracer_Propagation(t *testing.T) { + textCarrier := func() interface{} { + return opentracing.TextMapCarrier(make(map[string]string)) + } + textLen := func(c interface{}) int { + return len(c.(opentracing.TextMapCarrier)) + } + + httpCarrier := func() interface{} { + httpHeaders := http.Header(make(map[string][]string)) + return opentracing.HTTPHeadersCarrier(httpHeaders) + } + httpLen := func(c interface{}) int { + return len(c.(opentracing.HTTPHeadersCarrier)) + } + + tests := []struct { + sampled bool + format opentracing.BuiltinFormat + carrier func() interface{} + len func(interface{}) int + }{ + {sampled: true, format: opentracing.TextMap, carrier: textCarrier, len: textLen}, + {sampled: false, format: opentracing.TextMap, carrier: textCarrier, len: textLen}, + {sampled: true, format: opentracing.HTTPHeaders, carrier: httpCarrier, len: httpLen}, + {sampled: false, format: opentracing.HTTPHeaders, carrier: httpCarrier, len: httpLen}, + } + for _, test := range tests { + tracer := New() + span := tracer.StartSpan("x") + span.SetBaggageItem("x", "y:z") // colon should be URL encoded as %3A + if !test.sampled { + ext.SamplingPriority.Set(span, 0) + } + mSpan := span.(*MockSpan) + + assert.Equal(t, opentracing.ErrUnsupportedFormat, + tracer.Inject(span.Context(), opentracing.Binary, nil)) + assert.Equal(t, opentracing.ErrInvalidCarrier, + tracer.Inject(span.Context(), opentracing.TextMap, span)) + + carrier := test.carrier() + + err := tracer.Inject(span.Context(), test.format, carrier) + require.NoError(t, err) + assert.Equal(t, 4, test.len(carrier), "expect baggage + 2 ids + sampled") + if test.format == opentracing.HTTPHeaders { + c := carrier.(opentracing.HTTPHeadersCarrier) + assert.Equal(t, "y%3Az", c["Mockpfx-Baggage-X"][0]) + } + + _, err = tracer.Extract(opentracing.Binary, nil) + assert.Equal(t, opentracing.ErrUnsupportedFormat, err) + _, err = tracer.Extract(opentracing.TextMap, tracer) + assert.Equal(t, opentracing.ErrInvalidCarrier, err) + + extractedContext, err := tracer.Extract(test.format, carrier) + require.NoError(t, err) + assert.Equal(t, mSpan.SpanContext.TraceID, extractedContext.(MockSpanContext).TraceID) + assert.Equal(t, mSpan.SpanContext.SpanID, extractedContext.(MockSpanContext).SpanID) + assert.Equal(t, test.sampled, extractedContext.(MockSpanContext).Sampled) + assert.Equal(t, "y:z", extractedContext.(MockSpanContext).Baggage["x"]) + } +} diff --git a/vendor/github.com/opentracing/opentracing-go/mocktracer/propagation.go b/vendor/github.com/opentracing/opentracing-go/mocktracer/propagation.go new file mode 100644 index 00000000..8364f1d1 --- /dev/null +++ b/vendor/github.com/opentracing/opentracing-go/mocktracer/propagation.go @@ -0,0 +1,120 @@ +package mocktracer + +import ( + "fmt" + "net/url" + "strconv" + "strings" + + "github.com/opentracing/opentracing-go" +) + +const mockTextMapIdsPrefix = "mockpfx-ids-" +const mockTextMapBaggagePrefix = "mockpfx-baggage-" + +var emptyContext = MockSpanContext{} + +// Injector is responsible for injecting SpanContext instances in a manner suitable +// for propagation via a format-specific "carrier" object. Typically the +// injection will take place across an RPC boundary, but message queues and +// other IPC mechanisms are also reasonable places to use an Injector. +type Injector interface { + // Inject takes `SpanContext` and injects it into `carrier`. The actual type + // of `carrier` depends on the `format` passed to `Tracer.Inject()`. + // + // Implementations may return opentracing.ErrInvalidCarrier or any other + // implementation-specific error if injection fails. + Inject(ctx MockSpanContext, carrier interface{}) error +} + +// Extractor is responsible for extracting SpanContext instances from a +// format-specific "carrier" object. Typically the extraction will take place +// on the server side of an RPC boundary, but message queues and other IPC +// mechanisms are also reasonable places to use an Extractor. +type Extractor interface { + // Extract decodes a SpanContext instance from the given `carrier`, + // or (nil, opentracing.ErrSpanContextNotFound) if no context could + // be found in the `carrier`. + Extract(carrier interface{}) (MockSpanContext, error) +} + +// TextMapPropagator implements Injector/Extractor for TextMap and HTTPHeaders formats. +type TextMapPropagator struct { + HTTPHeaders bool +} + +// Inject implements the Injector interface +func (t *TextMapPropagator) Inject(spanContext MockSpanContext, carrier interface{}) error { + writer, ok := carrier.(opentracing.TextMapWriter) + if !ok { + return opentracing.ErrInvalidCarrier + } + // Ids: + writer.Set(mockTextMapIdsPrefix+"traceid", strconv.Itoa(spanContext.TraceID)) + writer.Set(mockTextMapIdsPrefix+"spanid", strconv.Itoa(spanContext.SpanID)) + writer.Set(mockTextMapIdsPrefix+"sampled", fmt.Sprint(spanContext.Sampled)) + // Baggage: + for baggageKey, baggageVal := range spanContext.Baggage { + safeVal := baggageVal + if t.HTTPHeaders { + safeVal = url.QueryEscape(baggageVal) + } + writer.Set(mockTextMapBaggagePrefix+baggageKey, safeVal) + } + return nil +} + +// Extract implements the Extractor interface +func (t *TextMapPropagator) Extract(carrier interface{}) (MockSpanContext, error) { + reader, ok := carrier.(opentracing.TextMapReader) + if !ok { + return emptyContext, opentracing.ErrInvalidCarrier + } + rval := MockSpanContext{0, 0, true, nil} + err := reader.ForeachKey(func(key, val string) error { + lowerKey := strings.ToLower(key) + switch { + case lowerKey == mockTextMapIdsPrefix+"traceid": + // Ids: + i, err := strconv.Atoi(val) + if err != nil { + return err + } + rval.TraceID = i + case lowerKey == mockTextMapIdsPrefix+"spanid": + // Ids: + i, err := strconv.Atoi(val) + if err != nil { + return err + } + rval.SpanID = i + case lowerKey == mockTextMapIdsPrefix+"sampled": + b, err := strconv.ParseBool(val) + if err != nil { + return err + } + rval.Sampled = b + case strings.HasPrefix(lowerKey, mockTextMapBaggagePrefix): + // Baggage: + if rval.Baggage == nil { + rval.Baggage = make(map[string]string) + } + safeVal := val + if t.HTTPHeaders { + // unescape errors are ignored, nothing can be done + if rawVal, err := url.QueryUnescape(val); err == nil { + safeVal = rawVal + } + } + rval.Baggage[lowerKey[len(mockTextMapBaggagePrefix):]] = safeVal + } + return nil + }) + if rval.TraceID == 0 || rval.SpanID == 0 { + return emptyContext, opentracing.ErrSpanContextNotFound + } + if err != nil { + return emptyContext, err + } + return rval, nil +} diff --git a/vendor/github.com/opentracing/opentracing-go/noop.go b/vendor/github.com/opentracing/opentracing-go/noop.go new file mode 100644 index 00000000..0d32f692 --- /dev/null +++ b/vendor/github.com/opentracing/opentracing-go/noop.go @@ -0,0 +1,64 @@ +package opentracing + +import "github.com/opentracing/opentracing-go/log" + +// A NoopTracer is a trivial, minimum overhead implementation of Tracer +// for which all operations are no-ops. +// +// The primary use of this implementation is in libraries, such as RPC +// frameworks, that make tracing an optional feature controlled by the +// end user. A no-op implementation allows said libraries to use it +// as the default Tracer and to write instrumentation that does +// not need to keep checking if the tracer instance is nil. +// +// For the same reason, the NoopTracer is the default "global" tracer +// (see GlobalTracer and SetGlobalTracer functions). +// +// WARNING: NoopTracer does not support baggage propagation. +type NoopTracer struct{} + +type noopSpan struct{} +type noopSpanContext struct{} + +var ( + defaultNoopSpanContext = noopSpanContext{} + defaultNoopSpan = noopSpan{} + defaultNoopTracer = NoopTracer{} +) + +const ( + emptyString = "" +) + +// noopSpanContext: +func (n noopSpanContext) ForeachBaggageItem(handler func(k, v string) bool) {} + +// noopSpan: +func (n noopSpan) Context() SpanContext { return defaultNoopSpanContext } +func (n noopSpan) SetBaggageItem(key, val string) Span { return defaultNoopSpan } +func (n noopSpan) BaggageItem(key string) string { return emptyString } +func (n noopSpan) SetTag(key string, value interface{}) Span { return n } +func (n noopSpan) LogFields(fields ...log.Field) {} +func (n noopSpan) LogKV(keyVals ...interface{}) {} +func (n noopSpan) Finish() {} +func (n noopSpan) FinishWithOptions(opts FinishOptions) {} +func (n noopSpan) SetOperationName(operationName string) Span { return n } +func (n noopSpan) Tracer() Tracer { return defaultNoopTracer } +func (n noopSpan) LogEvent(event string) {} +func (n noopSpan) LogEventWithPayload(event string, payload interface{}) {} +func (n noopSpan) Log(data LogData) {} + +// StartSpan belongs to the Tracer interface. +func (n NoopTracer) StartSpan(operationName string, opts ...StartSpanOption) Span { + return defaultNoopSpan +} + +// Inject belongs to the Tracer interface. +func (n NoopTracer) Inject(sp SpanContext, format interface{}, carrier interface{}) error { + return nil +} + +// Extract belongs to the Tracer interface. +func (n NoopTracer) Extract(format interface{}, carrier interface{}) (SpanContext, error) { + return nil, ErrSpanContextNotFound +} diff --git a/vendor/github.com/opentracing/opentracing-go/options_test.go b/vendor/github.com/opentracing/opentracing-go/options_test.go new file mode 100644 index 00000000..56a543bf --- /dev/null +++ b/vendor/github.com/opentracing/opentracing-go/options_test.go @@ -0,0 +1,31 @@ +package opentracing + +import ( + "testing" + + "github.com/stretchr/testify/require" +) + +func TestChildOfAndFollowsFrom(t *testing.T) { + tests := []struct { + newOpt func(SpanContext) SpanReference + refType SpanReferenceType + name string + }{ + {ChildOf, ChildOfRef, "ChildOf"}, + {FollowsFrom, FollowsFromRef, "FollowsFrom"}, + } + + for _, test := range tests { + opts := new(StartSpanOptions) + + test.newOpt(nil).Apply(opts) + require.Nil(t, opts.References, "%s(nil) must not append a reference", test.name) + + ctx := new(noopSpanContext) + test.newOpt(ctx).Apply(opts) + require.Equal(t, []SpanReference{ + SpanReference{ReferencedContext: ctx, Type: test.refType}, + }, opts.References, "%s(ctx) must append a reference", test.name) + } +} diff --git a/vendor/github.com/opentracing/opentracing-go/propagation.go b/vendor/github.com/opentracing/opentracing-go/propagation.go new file mode 100644 index 00000000..9583fc53 --- /dev/null +++ b/vendor/github.com/opentracing/opentracing-go/propagation.go @@ -0,0 +1,176 @@ +package opentracing + +import ( + "errors" + "net/http" +) + +/////////////////////////////////////////////////////////////////////////////// +// CORE PROPAGATION INTERFACES: +/////////////////////////////////////////////////////////////////////////////// + +var ( + // ErrUnsupportedFormat occurs when the `format` passed to Tracer.Inject() or + // Tracer.Extract() is not recognized by the Tracer implementation. + ErrUnsupportedFormat = errors.New("opentracing: Unknown or unsupported Inject/Extract format") + + // ErrSpanContextNotFound occurs when the `carrier` passed to + // Tracer.Extract() is valid and uncorrupted but has insufficient + // information to extract a SpanContext. + ErrSpanContextNotFound = errors.New("opentracing: SpanContext not found in Extract carrier") + + // ErrInvalidSpanContext errors occur when Tracer.Inject() is asked to + // operate on a SpanContext which it is not prepared to handle (for + // example, since it was created by a different tracer implementation). + ErrInvalidSpanContext = errors.New("opentracing: SpanContext type incompatible with tracer") + + // ErrInvalidCarrier errors occur when Tracer.Inject() or Tracer.Extract() + // implementations expect a different type of `carrier` than they are + // given. + ErrInvalidCarrier = errors.New("opentracing: Invalid Inject/Extract carrier") + + // ErrSpanContextCorrupted occurs when the `carrier` passed to + // Tracer.Extract() is of the expected type but is corrupted. + ErrSpanContextCorrupted = errors.New("opentracing: SpanContext data corrupted in Extract carrier") +) + +/////////////////////////////////////////////////////////////////////////////// +// BUILTIN PROPAGATION FORMATS: +/////////////////////////////////////////////////////////////////////////////// + +// BuiltinFormat is used to demarcate the values within package `opentracing` +// that are intended for use with the Tracer.Inject() and Tracer.Extract() +// methods. +type BuiltinFormat byte + +const ( + // Binary represents SpanContexts as opaque binary data. + // + // For Tracer.Inject(): the carrier must be an `io.Writer`. + // + // For Tracer.Extract(): the carrier must be an `io.Reader`. + Binary BuiltinFormat = iota + + // TextMap represents SpanContexts as key:value string pairs. + // + // Unlike HTTPHeaders, the TextMap format does not restrict the key or + // value character sets in any way. + // + // For Tracer.Inject(): the carrier must be a `TextMapWriter`. + // + // For Tracer.Extract(): the carrier must be a `TextMapReader`. + TextMap + + // HTTPHeaders represents SpanContexts as HTTP header string pairs. + // + // Unlike TextMap, the HTTPHeaders format requires that the keys and values + // be valid as HTTP headers as-is (i.e., character casing may be unstable + // and special characters are disallowed in keys, values should be + // URL-escaped, etc). + // + // For Tracer.Inject(): the carrier must be a `TextMapWriter`. + // + // For Tracer.Extract(): the carrier must be a `TextMapReader`. + // + // See HTTPHeaderCarrier for an implementation of both TextMapWriter + // and TextMapReader that defers to an http.Header instance for storage. + // For example, Inject(): + // + // carrier := opentracing.HTTPHeadersCarrier(httpReq.Header) + // err := span.Tracer().Inject( + // span, opentracing.HTTPHeaders, carrier) + // + // Or Extract(): + // + // carrier := opentracing.HTTPHeadersCarrier(httpReq.Header) + // span, err := tracer.Extract( + // opentracing.HTTPHeaders, carrier) + // + HTTPHeaders +) + +// TextMapWriter is the Inject() carrier for the TextMap builtin format. With +// it, the caller can encode a SpanContext for propagation as entries in a map +// of unicode strings. +type TextMapWriter interface { + // Set a key:value pair to the carrier. Multiple calls to Set() for the + // same key leads to undefined behavior. + // + // NOTE: The backing store for the TextMapWriter may contain data unrelated + // to SpanContext. As such, Inject() and Extract() implementations that + // call the TextMapWriter and TextMapReader interfaces must agree on a + // prefix or other convention to distinguish their own key:value pairs. + Set(key, val string) +} + +// TextMapReader is the Extract() carrier for the TextMap builtin format. With it, +// the caller can decode a propagated SpanContext as entries in a map of +// unicode strings. +type TextMapReader interface { + // ForeachKey returns TextMap contents via repeated calls to the `handler` + // function. If any call to `handler` returns a non-nil error, ForeachKey + // terminates and returns that error. + // + // NOTE: The backing store for the TextMapReader may contain data unrelated + // to SpanContext. As such, Inject() and Extract() implementations that + // call the TextMapWriter and TextMapReader interfaces must agree on a + // prefix or other convention to distinguish their own key:value pairs. + // + // The "foreach" callback pattern reduces unnecessary copying in some cases + // and also allows implementations to hold locks while the map is read. + ForeachKey(handler func(key, val string) error) error +} + +// TextMapCarrier allows the use of regular map[string]string +// as both TextMapWriter and TextMapReader. +type TextMapCarrier map[string]string + +// ForeachKey conforms to the TextMapReader interface. +func (c TextMapCarrier) ForeachKey(handler func(key, val string) error) error { + for k, v := range c { + if err := handler(k, v); err != nil { + return err + } + } + return nil +} + +// Set implements Set() of opentracing.TextMapWriter +func (c TextMapCarrier) Set(key, val string) { + c[key] = val +} + +// HTTPHeadersCarrier satisfies both TextMapWriter and TextMapReader. +// +// Example usage for server side: +// +// carrier := opentracing.HttpHeadersCarrier(httpReq.Header) +// spanContext, err := tracer.Extract(opentracing.HttpHeaders, carrier) +// +// Example usage for client side: +// +// carrier := opentracing.HTTPHeadersCarrier(httpReq.Header) +// err := tracer.Inject( +// span.Context(), +// opentracing.HttpHeaders, +// carrier) +// +type HTTPHeadersCarrier http.Header + +// Set conforms to the TextMapWriter interface. +func (c HTTPHeadersCarrier) Set(key, val string) { + h := http.Header(c) + h.Add(key, val) +} + +// ForeachKey conforms to the TextMapReader interface. +func (c HTTPHeadersCarrier) ForeachKey(handler func(key, val string) error) error { + for k, vals := range c { + for _, v := range vals { + if err := handler(k, v); err != nil { + return err + } + } + } + return nil +} diff --git a/vendor/github.com/opentracing/opentracing-go/propagation_test.go b/vendor/github.com/opentracing/opentracing-go/propagation_test.go new file mode 100644 index 00000000..e3dad559 --- /dev/null +++ b/vendor/github.com/opentracing/opentracing-go/propagation_test.go @@ -0,0 +1,93 @@ +package opentracing + +import ( + "net/http" + "strconv" + "testing" +) + +const testHeaderPrefix = "testprefix-" + +func TestTextMapCarrierInject(t *testing.T) { + m := make(map[string]string) + m["NotOT"] = "blah" + m["opname"] = "AlsoNotOT" + tracer := testTracer{} + span := tracer.StartSpan("someSpan") + fakeID := span.Context().(testSpanContext).FakeID + + carrier := TextMapCarrier(m) + if err := span.Tracer().Inject(span.Context(), TextMap, carrier); err != nil { + t.Fatal(err) + } + + if len(m) != 3 { + t.Errorf("Unexpected header length: %v", len(m)) + } + // The prefix comes from just above; the suffix comes from + // testTracer.Inject(). + if m["testprefix-fakeid"] != strconv.Itoa(fakeID) { + t.Errorf("Could not find fakeid at expected key") + } +} + +func TestTextMapCarrierExtract(t *testing.T) { + m := make(map[string]string) + m["NotOT"] = "blah" + m["opname"] = "AlsoNotOT" + m["testprefix-fakeid"] = "42" + tracer := testTracer{} + + carrier := TextMapCarrier(m) + extractedContext, err := tracer.Extract(TextMap, carrier) + if err != nil { + t.Fatal(err) + } + + if extractedContext.(testSpanContext).FakeID != 42 { + t.Errorf("Failed to read testprefix-fakeid correctly") + } +} + +func TestHTTPHeaderInject(t *testing.T) { + h := http.Header{} + h.Add("NotOT", "blah") + h.Add("opname", "AlsoNotOT") + tracer := testTracer{} + span := tracer.StartSpan("someSpan") + fakeID := span.Context().(testSpanContext).FakeID + + // Use HTTPHeadersCarrier to wrap around `h`. + carrier := HTTPHeadersCarrier(h) + if err := span.Tracer().Inject(span.Context(), HTTPHeaders, carrier); err != nil { + t.Fatal(err) + } + + if len(h) != 3 { + t.Errorf("Unexpected header length: %v", len(h)) + } + // The prefix comes from just above; the suffix comes from + // testTracer.Inject(). + if h.Get("testprefix-fakeid") != strconv.Itoa(fakeID) { + t.Errorf("Could not find fakeid at expected key") + } +} + +func TestHTTPHeaderExtract(t *testing.T) { + h := http.Header{} + h.Add("NotOT", "blah") + h.Add("opname", "AlsoNotOT") + h.Add("testprefix-fakeid", "42") + tracer := testTracer{} + + // Use HTTPHeadersCarrier to wrap around `h`. + carrier := HTTPHeadersCarrier(h) + spanContext, err := tracer.Extract(HTTPHeaders, carrier) + if err != nil { + t.Fatal(err) + } + + if spanContext.(testSpanContext).FakeID != 42 { + t.Errorf("Failed to read testprefix-fakeid correctly") + } +} diff --git a/vendor/github.com/opentracing/opentracing-go/span.go b/vendor/github.com/opentracing/opentracing-go/span.go new file mode 100644 index 00000000..f6c3234a --- /dev/null +++ b/vendor/github.com/opentracing/opentracing-go/span.go @@ -0,0 +1,185 @@ +package opentracing + +import ( + "time" + + "github.com/opentracing/opentracing-go/log" +) + +// SpanContext represents Span state that must propagate to descendant Spans and across process +// boundaries (e.g., a tuple). +type SpanContext interface { + // ForeachBaggageItem grants access to all baggage items stored in the + // SpanContext. + // The handler function will be called for each baggage key/value pair. + // The ordering of items is not guaranteed. + // + // The bool return value indicates if the handler wants to continue iterating + // through the rest of the baggage items; for example if the handler is trying to + // find some baggage item by pattern matching the name, it can return false + // as soon as the item is found to stop further iterations. + ForeachBaggageItem(handler func(k, v string) bool) +} + +// Span represents an active, un-finished span in the OpenTracing system. +// +// Spans are created by the Tracer interface. +type Span interface { + // Sets the end timestamp and finalizes Span state. + // + // With the exception of calls to Context() (which are always allowed), + // Finish() must be the last call made to any span instance, and to do + // otherwise leads to undefined behavior. + Finish() + // FinishWithOptions is like Finish() but with explicit control over + // timestamps and log data. + FinishWithOptions(opts FinishOptions) + + // Context() yields the SpanContext for this Span. Note that the return + // value of Context() is still valid after a call to Span.Finish(), as is + // a call to Span.Context() after a call to Span.Finish(). + Context() SpanContext + + // Sets or changes the operation name. + SetOperationName(operationName string) Span + + // Adds a tag to the span. + // + // If there is a pre-existing tag set for `key`, it is overwritten. + // + // Tag values can be numeric types, strings, or bools. The behavior of + // other tag value types is undefined at the OpenTracing level. If a + // tracing system does not know how to handle a particular value type, it + // may ignore the tag, but shall not panic. + SetTag(key string, value interface{}) Span + + // LogFields is an efficient and type-checked way to record key:value + // logging data about a Span, though the programming interface is a little + // more verbose than LogKV(). Here's an example: + // + // span.LogFields( + // log.String("event", "soft error"), + // log.String("type", "cache timeout"), + // log.Int("waited.millis", 1500)) + // + // Also see Span.FinishWithOptions() and FinishOptions.BulkLogData. + LogFields(fields ...log.Field) + + // LogKV is a concise, readable way to record key:value logging data about + // a Span, though unfortunately this also makes it less efficient and less + // type-safe than LogFields(). Here's an example: + // + // span.LogKV( + // "event", "soft error", + // "type", "cache timeout", + // "waited.millis", 1500) + // + // For LogKV (as opposed to LogFields()), the parameters must appear as + // key-value pairs, like + // + // span.LogKV(key1, val1, key2, val2, key3, val3, ...) + // + // The keys must all be strings. The values may be strings, numeric types, + // bools, Go error instances, or arbitrary structs. + // + // (Note to implementors: consider the log.InterleavedKVToFields() helper) + LogKV(alternatingKeyValues ...interface{}) + + // SetBaggageItem sets a key:value pair on this Span and its SpanContext + // that also propagates to descendants of this Span. + // + // SetBaggageItem() enables powerful functionality given a full-stack + // opentracing integration (e.g., arbitrary application data from a mobile + // app can make it, transparently, all the way into the depths of a storage + // system), and with it some powerful costs: use this feature with care. + // + // IMPORTANT NOTE #1: SetBaggageItem() will only propagate baggage items to + // *future* causal descendants of the associated Span. + // + // IMPORTANT NOTE #2: Use this thoughtfully and with care. Every key and + // value is copied into every local *and remote* child of the associated + // Span, and that can add up to a lot of network and cpu overhead. + // + // Returns a reference to this Span for chaining. + SetBaggageItem(restrictedKey, value string) Span + + // Gets the value for a baggage item given its key. Returns the empty string + // if the value isn't found in this Span. + BaggageItem(restrictedKey string) string + + // Provides access to the Tracer that created this Span. + Tracer() Tracer + + // Deprecated: use LogFields or LogKV + LogEvent(event string) + // Deprecated: use LogFields or LogKV + LogEventWithPayload(event string, payload interface{}) + // Deprecated: use LogFields or LogKV + Log(data LogData) +} + +// LogRecord is data associated with a single Span log. Every LogRecord +// instance must specify at least one Field. +type LogRecord struct { + Timestamp time.Time + Fields []log.Field +} + +// FinishOptions allows Span.FinishWithOptions callers to override the finish +// timestamp and provide log data via a bulk interface. +type FinishOptions struct { + // FinishTime overrides the Span's finish time, or implicitly becomes + // time.Now() if FinishTime.IsZero(). + // + // FinishTime must resolve to a timestamp that's >= the Span's StartTime + // (per StartSpanOptions). + FinishTime time.Time + + // LogRecords allows the caller to specify the contents of many LogFields() + // calls with a single slice. May be nil. + // + // None of the LogRecord.Timestamp values may be .IsZero() (i.e., they must + // be set explicitly). Also, they must be >= the Span's start timestamp and + // <= the FinishTime (or time.Now() if FinishTime.IsZero()). Otherwise the + // behavior of FinishWithOptions() is undefined. + // + // If specified, the caller hands off ownership of LogRecords at + // FinishWithOptions() invocation time. + // + // If specified, the (deprecated) BulkLogData must be nil or empty. + LogRecords []LogRecord + + // BulkLogData is DEPRECATED. + BulkLogData []LogData +} + +// LogData is DEPRECATED +type LogData struct { + Timestamp time.Time + Event string + Payload interface{} +} + +// ToLogRecord converts a deprecated LogData to a non-deprecated LogRecord +func (ld *LogData) ToLogRecord() LogRecord { + var literalTimestamp time.Time + if ld.Timestamp.IsZero() { + literalTimestamp = time.Now() + } else { + literalTimestamp = ld.Timestamp + } + rval := LogRecord{ + Timestamp: literalTimestamp, + } + if ld.Payload == nil { + rval.Fields = []log.Field{ + log.String("event", ld.Event), + } + } else { + rval.Fields = []log.Field{ + log.String("event", ld.Event), + log.Object("payload", ld.Payload), + } + } + return rval +} diff --git a/vendor/github.com/opentracing/opentracing-go/testtracer_test.go b/vendor/github.com/opentracing/opentracing-go/testtracer_test.go new file mode 100644 index 00000000..dd13788c --- /dev/null +++ b/vendor/github.com/opentracing/opentracing-go/testtracer_test.go @@ -0,0 +1,138 @@ +package opentracing + +import ( + "strconv" + "strings" + "time" + + "github.com/opentracing/opentracing-go/log" +) + +const testHTTPHeaderPrefix = "testprefix-" + +// testTracer is a most-noop Tracer implementation that makes it possible for +// unittests to verify whether certain methods were / were not called. +type testTracer struct{} + +var fakeIDSource = 1 + +func nextFakeID() int { + fakeIDSource++ + return fakeIDSource +} + +type testSpanContext struct { + HasParent bool + FakeID int +} + +func (n testSpanContext) ForeachBaggageItem(handler func(k, v string) bool) {} + +type testSpan struct { + spanContext testSpanContext + OperationName string + StartTime time.Time + Tags map[string]interface{} +} + +func (n testSpan) Equal(os Span) bool { + other, ok := os.(testSpan) + if !ok { + return false + } + if n.spanContext != other.spanContext { + return false + } + if n.OperationName != other.OperationName { + return false + } + if !n.StartTime.Equal(other.StartTime) { + return false + } + if len(n.Tags) != len(other.Tags) { + return false + } + + for k, v := range n.Tags { + if ov, ok := other.Tags[k]; !ok || ov != v { + return false + } + } + + return true +} + +// testSpan: +func (n testSpan) Context() SpanContext { return n.spanContext } +func (n testSpan) SetTag(key string, value interface{}) Span { return n } +func (n testSpan) Finish() {} +func (n testSpan) FinishWithOptions(opts FinishOptions) {} +func (n testSpan) LogFields(fields ...log.Field) {} +func (n testSpan) LogKV(kvs ...interface{}) {} +func (n testSpan) SetOperationName(operationName string) Span { return n } +func (n testSpan) Tracer() Tracer { return testTracer{} } +func (n testSpan) SetBaggageItem(key, val string) Span { return n } +func (n testSpan) BaggageItem(key string) string { return "" } +func (n testSpan) LogEvent(event string) {} +func (n testSpan) LogEventWithPayload(event string, payload interface{}) {} +func (n testSpan) Log(data LogData) {} + +// StartSpan belongs to the Tracer interface. +func (n testTracer) StartSpan(operationName string, opts ...StartSpanOption) Span { + sso := StartSpanOptions{} + for _, o := range opts { + o.Apply(&sso) + } + return n.startSpanWithOptions(operationName, sso) +} + +func (n testTracer) startSpanWithOptions(name string, opts StartSpanOptions) Span { + fakeID := nextFakeID() + if len(opts.References) > 0 { + fakeID = opts.References[0].ReferencedContext.(testSpanContext).FakeID + } + + return testSpan{ + OperationName: name, + StartTime: opts.StartTime, + Tags: opts.Tags, + spanContext: testSpanContext{ + HasParent: len(opts.References) > 0, + FakeID: fakeID, + }, + } +} + +// Inject belongs to the Tracer interface. +func (n testTracer) Inject(sp SpanContext, format interface{}, carrier interface{}) error { + spanContext := sp.(testSpanContext) + switch format { + case HTTPHeaders, TextMap: + carrier.(TextMapWriter).Set(testHTTPHeaderPrefix+"fakeid", strconv.Itoa(spanContext.FakeID)) + return nil + } + return ErrUnsupportedFormat +} + +// Extract belongs to the Tracer interface. +func (n testTracer) Extract(format interface{}, carrier interface{}) (SpanContext, error) { + switch format { + case HTTPHeaders, TextMap: + // Just for testing purposes... generally not a worthwhile thing to + // propagate. + sm := testSpanContext{} + err := carrier.(TextMapReader).ForeachKey(func(key, val string) error { + switch strings.ToLower(key) { + case testHTTPHeaderPrefix + "fakeid": + i, err := strconv.Atoi(val) + if err != nil { + return err + } + sm.FakeID = i + } + return nil + }) + return sm, err + } + return nil, ErrSpanContextNotFound +} diff --git a/vendor/github.com/opentracing/opentracing-go/tracer.go b/vendor/github.com/opentracing/opentracing-go/tracer.go new file mode 100644 index 00000000..fd77c1df --- /dev/null +++ b/vendor/github.com/opentracing/opentracing-go/tracer.go @@ -0,0 +1,305 @@ +package opentracing + +import "time" + +// Tracer is a simple, thin interface for Span creation and SpanContext +// propagation. +type Tracer interface { + + // Create, start, and return a new Span with the given `operationName` and + // incorporate the given StartSpanOption `opts`. (Note that `opts` borrows + // from the "functional options" pattern, per + // http://dave.cheney.net/2014/10/17/functional-options-for-friendly-apis) + // + // A Span with no SpanReference options (e.g., opentracing.ChildOf() or + // opentracing.FollowsFrom()) becomes the root of its own trace. + // + // Examples: + // + // var tracer opentracing.Tracer = ... + // + // // The root-span case: + // sp := tracer.StartSpan("GetFeed") + // + // // The vanilla child span case: + // sp := tracer.StartSpan( + // "GetFeed", + // opentracing.ChildOf(parentSpan.Context())) + // + // // All the bells and whistles: + // sp := tracer.StartSpan( + // "GetFeed", + // opentracing.ChildOf(parentSpan.Context()), + // opentracing.Tag("user_agent", loggedReq.UserAgent), + // opentracing.StartTime(loggedReq.Timestamp), + // ) + // + StartSpan(operationName string, opts ...StartSpanOption) Span + + // Inject() takes the `sm` SpanContext instance and injects it for + // propagation within `carrier`. The actual type of `carrier` depends on + // the value of `format`. + // + // OpenTracing defines a common set of `format` values (see BuiltinFormat), + // and each has an expected carrier type. + // + // Other packages may declare their own `format` values, much like the keys + // used by `context.Context` (see + // https://godoc.org/golang.org/x/net/context#WithValue). + // + // Example usage (sans error handling): + // + // carrier := opentracing.HTTPHeadersCarrier(httpReq.Header) + // err := tracer.Inject( + // span.Context(), + // opentracing.HTTPHeaders, + // carrier) + // + // NOTE: All opentracing.Tracer implementations MUST support all + // BuiltinFormats. + // + // Implementations may return opentracing.ErrUnsupportedFormat if `format` + // is not supported by (or not known by) the implementation. + // + // Implementations may return opentracing.ErrInvalidCarrier or any other + // implementation-specific error if the format is supported but injection + // fails anyway. + // + // See Tracer.Extract(). + Inject(sm SpanContext, format interface{}, carrier interface{}) error + + // Extract() returns a SpanContext instance given `format` and `carrier`. + // + // OpenTracing defines a common set of `format` values (see BuiltinFormat), + // and each has an expected carrier type. + // + // Other packages may declare their own `format` values, much like the keys + // used by `context.Context` (see + // https://godoc.org/golang.org/x/net/context#WithValue). + // + // Example usage (with StartSpan): + // + // + // carrier := opentracing.HTTPHeadersCarrier(httpReq.Header) + // clientContext, err := tracer.Extract(opentracing.HTTPHeaders, carrier) + // + // // ... assuming the ultimate goal here is to resume the trace with a + // // server-side Span: + // var serverSpan opentracing.Span + // if err == nil { + // span = tracer.StartSpan( + // rpcMethodName, ext.RPCServerOption(clientContext)) + // } else { + // span = tracer.StartSpan(rpcMethodName) + // } + // + // + // NOTE: All opentracing.Tracer implementations MUST support all + // BuiltinFormats. + // + // Return values: + // - A successful Extract returns a SpanContext instance and a nil error + // - If there was simply no SpanContext to extract in `carrier`, Extract() + // returns (nil, opentracing.ErrSpanContextNotFound) + // - If `format` is unsupported or unrecognized, Extract() returns (nil, + // opentracing.ErrUnsupportedFormat) + // - If there are more fundamental problems with the `carrier` object, + // Extract() may return opentracing.ErrInvalidCarrier, + // opentracing.ErrSpanContextCorrupted, or implementation-specific + // errors. + // + // See Tracer.Inject(). + Extract(format interface{}, carrier interface{}) (SpanContext, error) +} + +// StartSpanOptions allows Tracer.StartSpan() callers and implementors a +// mechanism to override the start timestamp, specify Span References, and make +// a single Tag or multiple Tags available at Span start time. +// +// StartSpan() callers should look at the StartSpanOption interface and +// implementations available in this package. +// +// Tracer implementations can convert a slice of `StartSpanOption` instances +// into a `StartSpanOptions` struct like so: +// +// func StartSpan(opName string, opts ...opentracing.StartSpanOption) { +// sso := opentracing.StartSpanOptions{} +// for _, o := range opts { +// o.Apply(&sso) +// } +// ... +// } +// +type StartSpanOptions struct { + // Zero or more causal references to other Spans (via their SpanContext). + // If empty, start a "root" Span (i.e., start a new trace). + References []SpanReference + + // StartTime overrides the Span's start time, or implicitly becomes + // time.Now() if StartTime.IsZero(). + StartTime time.Time + + // Tags may have zero or more entries; the restrictions on map values are + // identical to those for Span.SetTag(). May be nil. + // + // If specified, the caller hands off ownership of Tags at + // StartSpan() invocation time. + Tags map[string]interface{} +} + +// StartSpanOption instances (zero or more) may be passed to Tracer.StartSpan. +// +// StartSpanOption borrows from the "functional options" pattern, per +// http://dave.cheney.net/2014/10/17/functional-options-for-friendly-apis +type StartSpanOption interface { + Apply(*StartSpanOptions) +} + +// SpanReferenceType is an enum type describing different categories of +// relationships between two Spans. If Span-2 refers to Span-1, the +// SpanReferenceType describes Span-1 from Span-2's perspective. For example, +// ChildOfRef means that Span-1 created Span-2. +// +// NOTE: Span-1 and Span-2 do *not* necessarily depend on each other for +// completion; e.g., Span-2 may be part of a background job enqueued by Span-1, +// or Span-2 may be sitting in a distributed queue behind Span-1. +type SpanReferenceType int + +const ( + // ChildOfRef refers to a parent Span that caused *and* somehow depends + // upon the new child Span. Often (but not always), the parent Span cannot + // finish until the child Span does. + // + // An timing diagram for a ChildOfRef that's blocked on the new Span: + // + // [-Parent Span---------] + // [-Child Span----] + // + // See http://opentracing.io/spec/ + // + // See opentracing.ChildOf() + ChildOfRef SpanReferenceType = iota + + // FollowsFromRef refers to a parent Span that does not depend in any way + // on the result of the new child Span. For instance, one might use + // FollowsFromRefs to describe pipeline stages separated by queues, + // or a fire-and-forget cache insert at the tail end of a web request. + // + // A FollowsFromRef Span is part of the same logical trace as the new Span: + // i.e., the new Span is somehow caused by the work of its FollowsFromRef. + // + // All of the following could be valid timing diagrams for children that + // "FollowFrom" a parent. + // + // [-Parent Span-] [-Child Span-] + // + // + // [-Parent Span--] + // [-Child Span-] + // + // + // [-Parent Span-] + // [-Child Span-] + // + // See http://opentracing.io/spec/ + // + // See opentracing.FollowsFrom() + FollowsFromRef +) + +// SpanReference is a StartSpanOption that pairs a SpanReferenceType and a +// referenced SpanContext. See the SpanReferenceType documentation for +// supported relationships. If SpanReference is created with +// ReferencedContext==nil, it has no effect. Thus it allows for a more concise +// syntax for starting spans: +// +// sc, _ := tracer.Extract(someFormat, someCarrier) +// span := tracer.StartSpan("operation", opentracing.ChildOf(sc)) +// +// The `ChildOf(sc)` option above will not panic if sc == nil, it will just +// not add the parent span reference to the options. +type SpanReference struct { + Type SpanReferenceType + ReferencedContext SpanContext +} + +// Apply satisfies the StartSpanOption interface. +func (r SpanReference) Apply(o *StartSpanOptions) { + if r.ReferencedContext != nil { + o.References = append(o.References, r) + } +} + +// ChildOf returns a StartSpanOption pointing to a dependent parent span. +// If sc == nil, the option has no effect. +// +// See ChildOfRef, SpanReference +func ChildOf(sc SpanContext) SpanReference { + return SpanReference{ + Type: ChildOfRef, + ReferencedContext: sc, + } +} + +// FollowsFrom returns a StartSpanOption pointing to a parent Span that caused +// the child Span but does not directly depend on its result in any way. +// If sc == nil, the option has no effect. +// +// See FollowsFromRef, SpanReference +func FollowsFrom(sc SpanContext) SpanReference { + return SpanReference{ + Type: FollowsFromRef, + ReferencedContext: sc, + } +} + +// StartTime is a StartSpanOption that sets an explicit start timestamp for the +// new Span. +type StartTime time.Time + +// Apply satisfies the StartSpanOption interface. +func (t StartTime) Apply(o *StartSpanOptions) { + o.StartTime = time.Time(t) +} + +// Tags are a generic map from an arbitrary string key to an opaque value type. +// The underlying tracing system is responsible for interpreting and +// serializing the values. +type Tags map[string]interface{} + +// Apply satisfies the StartSpanOption interface. +func (t Tags) Apply(o *StartSpanOptions) { + if o.Tags == nil { + o.Tags = make(map[string]interface{}) + } + for k, v := range t { + o.Tags[k] = v + } +} + +// Tag may be passed as a StartSpanOption to add a tag to new spans, +// or its Set method may be used to apply the tag to an existing Span, +// for example: +// +// tracer.StartSpan("opName", Tag{"Key", value}) +// +// or +// +// Tag{"key", value}.Set(span) +type Tag struct { + Key string + Value interface{} +} + +// Apply satisfies the StartSpanOption interface. +func (t Tag) Apply(o *StartSpanOptions) { + if o.Tags == nil { + o.Tags = make(map[string]interface{}) + } + o.Tags[t.Key] = t.Value +} + +// Set applies the tag to an existing Span. +func (t Tag) Set(s Span) { + s.SetTag(t.Key, t.Value) +}