Graphql schema for DAG-JSON objects #1

Merged
ashwin merged 13 commits from ng-gql-ipld into ng-rm-record-types 2024-01-11 09:46:41 +00:00
3 changed files with 80 additions and 25 deletions
Showing only changes of commit 69cf8f7b56 - Show all commits

View File

@ -3,12 +3,13 @@ package keeper_test
import ( import (
"context" "context"
"fmt" "fmt"
"os"
"reflect"
"github.com/cerc-io/laconicd/x/registry/client/cli" "github.com/cerc-io/laconicd/x/registry/client/cli"
"github.com/cerc-io/laconicd/x/registry/helpers" "github.com/cerc-io/laconicd/x/registry/helpers"
"github.com/cerc-io/laconicd/x/registry/keeper" "github.com/cerc-io/laconicd/x/registry/keeper"
registrytypes "github.com/cerc-io/laconicd/x/registry/types" registrytypes "github.com/cerc-io/laconicd/x/registry/types"
"os"
"reflect"
) )
func (suite *KeeperTestSuite) TestGrpcQueryParams() { func (suite *KeeperTestSuite) TestGrpcQueryParams() {

View File

@ -22,7 +22,10 @@ import (
paramtypes "github.com/cosmos/cosmos-sdk/x/params/types" paramtypes "github.com/cosmos/cosmos-sdk/x/params/types"
"github.com/tendermint/tendermint/libs/log" "github.com/tendermint/tendermint/libs/log"
cid "github.com/ipfs/go-cid"
"github.com/ipld/go-ipld-prime"
"github.com/ipld/go-ipld-prime/codec/dagjson" "github.com/ipld/go-ipld-prime/codec/dagjson"
cidlink "github.com/ipld/go-ipld-prime/linking/cid"
basicnode "github.com/ipld/go-ipld-prime/node/basic" basicnode "github.com/ipld/go-ipld-prime/node/basic"
) )
@ -147,10 +150,20 @@ func (k Keeper) ListRecords(ctx sdk.Context) []types.Record {
// RecordsFromAttributes gets a list of records whose attributes match all provided values // RecordsFromAttributes gets a list of records whose attributes match all provided values
func (k Keeper) RecordsFromAttributes(ctx sdk.Context, attributes []*types.QueryListRecordsRequest_KeyValueInput, all bool) ([]types.Record, error) { func (k Keeper) RecordsFromAttributes(ctx sdk.Context, attributes []*types.QueryListRecordsRequest_KeyValueInput, all bool) ([]types.Record, error) {
log := logger(ctx).With("function", "RecordsFromAttributes")
resultRecordIds := []string{} resultRecordIds := []string{}
for i, attr := range attributes { for i, attr := range attributes {
val := GetAttributeValue(attr.Value) val, err := EncodeAttributeValue2(attr.Value)
if err != nil {
return nil, err
}
attributeIndex := GetAttributesIndexKey(attr.Key, val) attributeIndex := GetAttributesIndexKey(attr.Key, val)
log.Debug("attribute index",
"key", attr.Key,
"value", val,
"value_type", fmt.Sprintf("%T", val),
"index", attributeIndex)
recordIds, err := k.GetAttributeMapping(ctx, attributeIndex) recordIds, err := k.GetAttributeMapping(ctx, attributeIndex)
if err != nil { if err != nil {
return nil, err return nil, err
@ -178,27 +191,38 @@ func (k Keeper) RecordsFromAttributes(ctx sdk.Context, attributes []*types.Query
return records, nil return records, nil
} }
func GetAttributeValue(input *types.QueryListRecordsRequest_ValueInput) interface{} { // TODO non recursive
func EncodeAttributeValue2(input *types.QueryListRecordsRequest_ValueInput) ([]byte, error) {
np := basicnode.Prototype.Any
nb := np.NewBuilder()
switch value := input.GetValue().(type) { switch value := input.GetValue().(type) {
case *types.QueryListRecordsRequest_ValueInput_String_: case *types.QueryListRecordsRequest_ValueInput_String_:
return value.String_ nb.AssignString(value.String_)
case *types.QueryListRecordsRequest_ValueInput_Int: case *types.QueryListRecordsRequest_ValueInput_Int:
return value.Int nb.AssignInt(value.Int)
case *types.QueryListRecordsRequest_ValueInput_Float: case *types.QueryListRecordsRequest_ValueInput_Float:
return value.Float nb.AssignFloat(value.Float)
case *types.QueryListRecordsRequest_ValueInput_Boolean: case *types.QueryListRecordsRequest_ValueInput_Boolean:
return value.Boolean nb.AssignBool(value.Boolean)
case *types.QueryListRecordsRequest_ValueInput_Link: case *types.QueryListRecordsRequest_ValueInput_Link:
return value.Link link := cidlink.Link{Cid: cid.MustParse(value.Link)}
nb.AssignLink(link)
case *types.QueryListRecordsRequest_ValueInput_Array: case *types.QueryListRecordsRequest_ValueInput_Array:
return value.Array // TODO
case *types.QueryListRecordsRequest_ValueInput_Map: case *types.QueryListRecordsRequest_ValueInput_Map:
return value.Map // TODO
case nil:
return nil
default: default:
return fmt.Errorf("Value has unepxpected type %T", value) return nil, fmt.Errorf("Value has unepxpected type %T", value)
} }
n := nb.Build()
var buf bytes.Buffer
if err := dagjson.Encode(n, &buf); err != nil {
return nil, err
}
value := buf.Bytes()
return value, nil
} }
func getIntersection(a []string, b []string) []string { func getIntersection(a []string, b []string) []string {
@ -341,22 +365,43 @@ func (k Keeper) PutRecord(ctx sdk.Context, record types.Record) {
k.updateBlockChangeSetForRecord(ctx, record.Id) k.updateBlockChangeSetForRecord(ctx, record.Id)
} }
func (k Keeper) processAttributes(ctx sdk.Context, attrs map[string]any, id string, prefix string) error { func (k Keeper) processAttributes(ctx sdk.Context, attrs []byte, id string, prefix string) error {
np := basicnode.Prototype.Any // Pick a stle for the in-memory data. np := basicnode.Prototype.Map
nb := np.NewBuilder() // Create a builder. nb := np.NewBuilder()
err := dagjson.Decode(nb, bytes.NewReader(content)) // Hand the builder to decoding -- decoding will fill it in! err := dagjson.Decode(nb, bytes.NewReader(attrs))
if err != nil { if err != nil {
return "", err return err
}
n := nb.Build()
if n.Kind() != ipld.Kind_Map {
return fmt.Errorf("Record attributes must be a map, not %T", n.Kind())
}
return k.processAttributeMap(ctx, n, id, prefix)
} }
n := nb.Build() // Call 'Build' to get the resulting Node. (It's immutable!)
for key, value := range attrs { func (k Keeper) processAttributeMap(ctx sdk.Context, n ipld.Node, id string, prefix string) error {
if subRecord, ok := value.(map[string]any); ok { for it := n.MapIterator(); !it.Done(); {
err := k.processAttributes(ctx, subRecord, id, key) keynode, valuenode, err := it.Next()
if err != nil {
return err
}
key, err := keynode.AsString()
if err != nil {
return err
}
// for key, value := range attrs {
if valuenode.Kind() == ipld.Kind_Map {
err := k.processAttributeMap(ctx, valuenode, id, key)
if err != nil { if err != nil {
return err return err
} }
} else { } else {
var buf bytes.Buffer
if err := dagjson.Encode(valuenode, &buf); err != nil {
return err
}
value := buf.Bytes()
indexKey := GetAttributesIndexKey(prefix+key, value) indexKey := GetAttributesIndexKey(prefix+key, value)
if err := k.SetAttributeMapping(ctx, indexKey, id); err != nil { if err := k.SetAttributeMapping(ctx, indexKey, id); err != nil {
return err return err

View File

@ -13,6 +13,16 @@ const (
AuthorityUnderAuction = "auction" AuthorityUnderAuction = "auction"
) )
// TODO if schema records are to be more permissive than allowing a map of fields, this type will
// become specific to content records. schema records will either occupy a new message or have new
// more general purpose helper types.
type DagJsonBlob []byte
func (b DagJsonBlob) MarshalJSON() ([]byte, error) {
return b, nil
}
// ReadablePayload represents a signed record payload that can be serialized from/to YAML. // ReadablePayload represents a signed record payload that can be serialized from/to YAML.
type ReadablePayload struct { type ReadablePayload struct {
Record map[string]interface{} `json:"record"` Record map[string]interface{} `json:"record"`
@ -44,7 +54,6 @@ func (payloadObj *ReadablePayload) ToPayload() Payload {
}, },
Signatures: payloadObj.Signatures, Signatures: payloadObj.Signatures,
} }
// TODO rm error
return payload return payload
} }
@ -96,7 +105,7 @@ func (r *ReadableRecord) ToRecordObj() (Record, error) {
func (r *ReadableRecord) CanonicalJSON() []byte { func (r *ReadableRecord) CanonicalJSON() []byte {
bytes, err := canonicalJson.Marshal(r.Attributes) bytes, err := canonicalJson.Marshal(r.Attributes)
if err != nil { if err != nil {
panic("Record marshal error.") panic("Record marshal error: " + err.Error())
} }
return bytes return bytes