Compare commits

...

11 Commits

Author SHA1 Message Date
84035ba91e Enable CI on test-ci branch 2024-01-17 18:14:23 +05:30
bd128d090b More debug logs 2024-01-17 17:27:15 +05:30
f791ce828d Add debug comments 2024-01-17 17:06:23 +05:30
972b878674 Update naming test timeout 2024-01-16 17:35:23 +05:30
603fe5916a Update naming test timeout 2024-01-16 16:19:49 +05:30
b7223c1c43 Update container build script 2024-01-16 15:27:33 +05:30
e5900b2e4a Add container build script and update workflow 2024-01-16 14:34:37 +05:30
913389ab9f dockerfile cleanup 2024-01-15 11:55:25 +05:30
7e18b5a8cf refactor tests to run independently 2024-01-15 11:53:54 +05:30
7eb37f894d formatting 2024-01-15 11:31:21 +05:30
65001568c8 Remove hard-coded record types (#52)
Refactors the `Record.Attributes` from Any into a byte string.
Companion to cerc-io/laconicd#132.

Resolves https://github.com/cerc-io/laconicd/issues/107

Co-authored-by: Roy Crihfield <roy@manteia.ltd>
Co-authored-by: Prathamesh Musale <prathamesh.musale0@gmail.com>
Reviewed-on: cerc-io/laconic-sdk#52
Reviewed-by: Thomas E Lackey <telackey@noreply.git.vdb.to>
Co-authored-by: Nabarun <nabarun@deepstacksoft.com>
Co-committed-by: Nabarun <nabarun@deepstacksoft.com>
2024-01-15 04:58:55 +00:00
31 changed files with 1926 additions and 4185 deletions

2
.dockerignore Normal file
View File

@ -0,0 +1,2 @@
Dockerfile
node_modules

View File

@ -6,6 +6,7 @@ on:
push: push:
branches: branches:
- main - main
- test-ci
- release/** - release/**
env: env:
@ -17,65 +18,51 @@ jobs:
steps: steps:
- uses: actions/checkout@v3 - uses: actions/checkout@v3
- name: Checkout laconicd - name: Checkout laconicd
uses: actions/checkout@v3 # uses: actions/checkout@v3
with: # with:
path: "./laconicd/" # path: "./laconicd/"
repository: cerc-io/laconicd # repository: cerc-io/laconicd
fetch-depth: 0 # fetch-depth: 0
ref: main # ref: main
run: |
git clone https://git.vdb.to/deep-stack/laconicd.git
cd laconicd
git checkout pm-qol-improvements
- name: Environment - name: Environment
run: ls -tlh && env run: ls -tlh && env
- name: Start dockerd - name: Start dockerd
run: | run: |
dockerd -H $DOCKER_HOST --userland-proxy=false & dockerd -H $DOCKER_HOST --userland-proxy=false &
sleep 5 sleep 5
- name: build containers scripts
- name: Build laconicd container
working-directory: laconicd/tests/sdk_tests working-directory: laconicd/tests/sdk_tests
run: ./build-laconicd-container.sh run: ./build-laconicd-container.sh
- name: build test-container - name: Build laconic-sdk container
run: docker build -t cerc-io/laconic-sdk-tester:local-test -f laconicd/tests/sdk_tests/Dockerfile-sdk . run: ./scripts/build-sdk-test-container.sh
- name: start containers
- name: Start containers
working-directory: laconicd/tests/sdk_tests working-directory: laconicd/tests/sdk_tests
run: docker compose up -d run: docker compose up -d
- name: run basic tests - name: Run tests
working-directory: laconicd/tests/sdk_tests working-directory: laconicd/tests/sdk_tests
run: | run: ./run-tests.sh
laconicd_key=$( docker compose exec laconicd echo y | docker compose exec laconicd laconicd keys export mykey --unarmored-hex --unsafe )
cosmos_chain_id=laconic_9000-1 - name: Start containers (auctions enabled)
laconicd_rest_endpoint=http://laconicd:1317
laconicd_gql_endpoint=http://laconicd:9473/api
sleep 30s
docker compose exec sdk-test-runner sh -c "COSMOS_CHAIN_ID=${cosmos_chain_id} LACONICD_REST_ENDPOINT=${laconicd_rest_endpoint} LACONICD_GQL_ENDPOINT=${laconicd_gql_endpoint} PRIVATE_KEY=${laconicd_key} yarn test"
- name: stop containers
working-directory: laconicd/tests/sdk_tests working-directory: laconicd/tests/sdk_tests
run: docker compose down env:
- name: start auction containers TEST_AUCTION_ENABLED: true
run: docker compose up -d
- name: Run auction tests
working-directory: laconicd/tests/sdk_tests working-directory: laconicd/tests/sdk_tests
run: docker compose -f docker-compose-auctions.yml up -d run: ./run-tests.sh test:auctions
- name: run auction tests
- name: Start containers (expiry enabled)
working-directory: laconicd/tests/sdk_tests working-directory: laconicd/tests/sdk_tests
run: | env:
laconicd_key=$( docker compose exec laconicd echo y | docker compose exec laconicd laconicd keys export mykey --unarmored-hex --unsafe ) TEST_REGISTRY_EXPIRY: true
cosmos_chain_id=laconic_9000-1 run: docker compose up -d
laconicd_rest_endpoint=http://laconicd:1317 - name: Run nameservice expiry tests
laconicd_gql_endpoint=http://laconicd:9473/api
sleep 30s
docker compose exec sdk-test-runner sh -c "COSMOS_CHAIN_ID=${cosmos_chain_id} LACONICD_REST_ENDPOINT=${laconicd_rest_endpoint} LACONICD_GQL_ENDPOINT=${laconicd_gql_endpoint} PRIVATE_KEY=${laconicd_key} yarn test:auctions"
- name: start containers
working-directory: laconicd/tests/sdk_tests working-directory: laconicd/tests/sdk_tests
run: docker compose down run: ./run-tests.sh test:nameservice-expiry
- name: start containers
working-directory: laconicd/tests/sdk_tests
run: docker compose -f docker-compose-nameservice.yml up -d
- name: run nameservice expiry tests
working-directory: laconicd/tests/sdk_tests
run: |
laconicd_key=$( docker compose exec laconicd echo y | docker compose exec laconicd laconicd keys export mykey --unarmored-hex --unsafe )
cosmos_chain_id=laconic_9000-1
laconicd_rest_endpoint=http://laconicd:1317
laconicd_gql_endpoint=http://laconicd:9473/api
sleep 30s
docker compose exec sdk-test-runner sh -c "COSMOS_CHAIN_ID=${cosmos_chain_id} LACONICD_REST_ENDPOINT=${laconicd_rest_endpoint} LACONICD_GQL_ENDPOINT=${laconicd_gql_endpoint} PRIVATE_KEY=${laconicd_key} yarn test:nameservice-expiry"
- name: stop nameservice containers
working-directory: laconicd/tests/sdk_tests
run: docker compose down

View File

@ -20,53 +20,34 @@ jobs:
ref: main ref: main
- name: Environment - name: Environment
run: ls -tlh && env run: ls -tlh && env
- name: build containers scripts
- name: Build laconicd container
working-directory: laconicd/tests/sdk_tests working-directory: laconicd/tests/sdk_tests
run: ./build-laconicd-container.sh run: ./build-laconicd-container.sh
- name: build test-container - name: Build laconic-sdk container
run: docker build -t cerc-io/laconic-sdk-tester:local-test -f laconicd/tests/sdk_tests/Dockerfile-sdk . run: ./scripts/build-sdk-test-container.sh
- name: start containers
- name: Start containers
working-directory: laconicd/tests/sdk_tests working-directory: laconicd/tests/sdk_tests
run: docker compose up -d run: docker compose up -d
- name: run basic tests - name: Run tests
working-directory: laconicd/tests/sdk_tests working-directory: laconicd/tests/sdk_tests
run: | run: ./run-tests.sh
laconicd_key=$( docker compose exec laconicd echo y | docker compose exec laconicd laconicd keys export mykey --unarmored-hex --unsafe )
cosmos_chain_id=laconic_9000-1 - name: Start containers (auctions enabled)
laconicd_rest_endpoint=http://laconicd:1317
laconicd_gql_endpoint=http://laconicd:9473/api
sleep 30s
docker compose exec sdk-test-runner sh -c "COSMOS_CHAIN_ID=${cosmos_chain_id} LACONICD_REST_ENDPOINT=${laconicd_rest_endpoint} LACONICD_GQL_ENDPOINT=${laconicd_gql_endpoint} PRIVATE_KEY=${laconicd_key} yarn test"
- name: stop containers
working-directory: laconicd/tests/sdk_tests working-directory: laconicd/tests/sdk_tests
run: docker compose down env:
- name: start auction containers TEST_AUCTION_ENABLED: true
run: docker compose up -d
- name: Run auction tests
working-directory: laconicd/tests/sdk_tests working-directory: laconicd/tests/sdk_tests
run: docker compose -f docker-compose-auctions.yml up -d run: ./run-tests.sh test:auctions
- name: run auction tests
- name: Start containers (expiry enabled)
working-directory: laconicd/tests/sdk_tests working-directory: laconicd/tests/sdk_tests
run: | env:
laconicd_key=$( docker compose exec laconicd echo y | docker compose exec laconicd laconicd keys export mykey --unarmored-hex --unsafe ) TEST_REGISTRY_EXPIRY: true
cosmos_chain_id=laconic_9000-1 run: docker compose up -d
laconicd_rest_endpoint=http://laconicd:1317 - name: Run nameservice expiry tests
laconicd_gql_endpoint=http://laconicd:9473/api
sleep 30s
docker compose exec sdk-test-runner sh -c "COSMOS_CHAIN_ID=${cosmos_chain_id} LACONICD_REST_ENDPOINT=${laconicd_rest_endpoint} LACONICD_GQL_ENDPOINT=${laconicd_gql_endpoint} PRIVATE_KEY=${laconicd_key} yarn test:auctions"
- name: start containers
working-directory: laconicd/tests/sdk_tests working-directory: laconicd/tests/sdk_tests
run: docker compose down run: ./run-tests.sh test:nameservice-expiry
- name: start containers
working-directory: laconicd/tests/sdk_tests
run: docker compose -f docker-compose-nameservice.yml up -d
- name: run nameservice expiry tests
working-directory: laconicd/tests/sdk_tests
run: |
laconicd_key=$( docker compose exec laconicd echo y | docker compose exec laconicd laconicd keys export mykey --unarmored-hex --unsafe )
cosmos_chain_id=laconic_9000-1
laconicd_rest_endpoint=http://laconicd:1317
laconicd_gql_endpoint=http://laconicd:9473/api
sleep 30s
docker compose exec sdk-test-runner sh -c "COSMOS_CHAIN_ID=${cosmos_chain_id} LACONICD_REST_ENDPOINT=${laconicd_rest_endpoint} LACONICD_GQL_ENDPOINT=${laconicd_gql_endpoint} PRIVATE_KEY=${laconicd_key} yarn test:nameservice-expiry"
- name: stop nameservice containers
working-directory: laconicd/tests/sdk_tests
run: docker compose down

View File

@ -12,13 +12,5 @@ Run following scripts when [proto files](./proto/) are updated.
2. Generate typescript code for the proto files 2. Generate typescript code for the proto files
```bash ```bash
./scripts/create-proto-files.sh ./scripts/proto-gen.sh
``` ```
3. Remove GRPC code from generated code
```bash
./scripts/remove-grpc.sh
```
Reference: https://github.com/tharsis/evmosjs/tree/main/packages/proto#note

View File

@ -21,34 +21,18 @@ RUN \
&& npm config -g set prefix ${NPM_GLOBAL} \ && npm config -g set prefix ${NPM_GLOBAL} \
&& su ${USERNAME} -c "npm config -g set prefix ${NPM_GLOBAL}" \ && su ${USERNAME} -c "npm config -g set prefix ${NPM_GLOBAL}" \
# Install eslint # Install eslint
&& su ${USERNAME} -c "umask 0002 && npm install -g eslint lerna jest" \ && su ${USERNAME} -c "umask 0002 && npm install -g eslint" \
&& npm cache clean --force > /dev/null 2>&1 && npm cache clean --force > /dev/null 2>&1
# [Optional] Uncomment this section to install additional OS packages.
# RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \
# && apt-get -y install --no-install-recommends <your-package-list-here>
# [Optional] Uncomment if you want to install an additional version of node using nvm
# ARG EXTRA_NODE_VERSION=10
# RUN su node -c "source /usr/local/share/nvm/nvm.sh && nvm install ${EXTRA_NODE_VERSION}"
# [Optional] Uncomment if you want to install more global node modules
# RUN su node -c "npm install -g <your-package-list-here>"
WORKDIR / WORKDIR /
RUN mkdir node_modules && mkdir proto && mkdir scripts && mkdir src
COPY node_modules ./node_modules/
COPY proto . ./proto/
COPY scripts ./scripts/
COPY src ./src/
COPY entrypoint.sh . COPY entrypoint.sh .
ENTRYPOINT ["/entrypoint.sh"] ENTRYPOINT ["/entrypoint.sh"]
# Placeholder CMD : generally this will be overridden at run time like :
# docker run -it -v /home/builder/cerc/laconic-sdk:/workspace cerc/builder-js sh -c 'cd /workspace && yarn && yarn build'
CMD node --version CMD node --version
# Temp hack, clone the laconic-sdk repo here WORKDIR /app/laconic-sdk
WORKDIR /app
COPY package*.json .
RUN yarn install RUN yarn install
COPY . .
WORKDIR /app/laconic-sdk WORKDIR /app/laconic-sdk

View File

@ -34,6 +34,12 @@ Follow these steps to run the tests:
- Run the tests with auctions enabled - Run the tests with auctions enabled
- Remove laconicd data from previous run
```bash
rm -rf ~/.laconicd
```
- In laconicd repo run: - In laconicd repo run:
```bash ```bash
@ -54,6 +60,12 @@ Follow these steps to run the tests:
- Run the tests for record and authority expiry - Run the tests for record and authority expiry
- Remove laconicd data from previous run
```bash
rm -rf ~/.laconicd
```
- In laconicd repo run: - In laconicd repo run:
```bash ```bash

View File

@ -1,6 +1,6 @@
{ {
"name": "@cerc-io/laconic-sdk", "name": "@cerc-io/laconic-sdk",
"version": "0.1.13", "version": "0.1.14",
"main": "dist/index.js", "main": "dist/index.js",
"types": "dist/index.d.ts", "types": "dist/index.d.ts",
"repository": "git@github.com:cerc-io/laconic-sdk.git", "repository": "git@github.com:cerc-io/laconic-sdk.git",

View File

@ -0,0 +1,115 @@
// Protocol Buffers - Google's data interchange format
// Copyright 2008 Google Inc. All rights reserved.
// https://developers.google.com/protocol-buffers/
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
syntax = "proto3";
package google.protobuf;
option cc_enable_arenas = true;
option go_package = "google.golang.org/protobuf/types/known/durationpb";
option java_package = "com.google.protobuf";
option java_outer_classname = "DurationProto";
option java_multiple_files = true;
option objc_class_prefix = "GPB";
option csharp_namespace = "Google.Protobuf.WellKnownTypes";
// A Duration represents a signed, fixed-length span of time represented
// as a count of seconds and fractions of seconds at nanosecond
// resolution. It is independent of any calendar and concepts like "day"
// or "month". It is related to Timestamp in that the difference between
// two Timestamp values is a Duration and it can be added or subtracted
// from a Timestamp. Range is approximately +-10,000 years.
//
// # Examples
//
// Example 1: Compute Duration from two Timestamps in pseudo code.
//
// Timestamp start = ...;
// Timestamp end = ...;
// Duration duration = ...;
//
// duration.seconds = end.seconds - start.seconds;
// duration.nanos = end.nanos - start.nanos;
//
// if (duration.seconds < 0 && duration.nanos > 0) {
// duration.seconds += 1;
// duration.nanos -= 1000000000;
// } else if (duration.seconds > 0 && duration.nanos < 0) {
// duration.seconds -= 1;
// duration.nanos += 1000000000;
// }
//
// Example 2: Compute Timestamp from Timestamp + Duration in pseudo code.
//
// Timestamp start = ...;
// Duration duration = ...;
// Timestamp end = ...;
//
// end.seconds = start.seconds + duration.seconds;
// end.nanos = start.nanos + duration.nanos;
//
// if (end.nanos < 0) {
// end.seconds -= 1;
// end.nanos += 1000000000;
// } else if (end.nanos >= 1000000000) {
// end.seconds += 1;
// end.nanos -= 1000000000;
// }
//
// Example 3: Compute Duration from datetime.timedelta in Python.
//
// td = datetime.timedelta(days=3, minutes=10)
// duration = Duration()
// duration.FromTimedelta(td)
//
// # JSON Mapping
//
// In JSON format, the Duration type is encoded as a string rather than an
// object, where the string ends in the suffix "s" (indicating seconds) and
// is preceded by the number of seconds, with nanoseconds expressed as
// fractional seconds. For example, 3 seconds with 0 nanoseconds should be
// encoded in JSON format as "3s", while 3 seconds and 1 nanosecond should
// be expressed in JSON format as "3.000000001s", and 3 seconds and 1
// microsecond should be expressed in JSON format as "3.000001s".
//
message Duration {
// Signed seconds of the span of time. Must be from -315,576,000,000
// to +315,576,000,000 inclusive. Note: these bounds are computed from:
// 60 sec/min * 60 min/hr * 24 hr/day * 365.25 days/year * 10000 years
int64 seconds = 1;
// Signed fractions of a second at nanosecond resolution of the span
// of time. Durations less than one second are represented with a 0
// `seconds` field and a positive or negative `nanos` field. For durations
// of one second or more, a non-zero value for the `nanos` field must be
// of the same sign as the `seconds` field. Must be from -999,999,999
// to +999,999,999 inclusive.
int32 nanos = 2;
}

View File

@ -0,0 +1,144 @@
// Protocol Buffers - Google's data interchange format
// Copyright 2008 Google Inc. All rights reserved.
// https://developers.google.com/protocol-buffers/
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
syntax = "proto3";
package google.protobuf;
option cc_enable_arenas = true;
option go_package = "google.golang.org/protobuf/types/known/timestamppb";
option java_package = "com.google.protobuf";
option java_outer_classname = "TimestampProto";
option java_multiple_files = true;
option objc_class_prefix = "GPB";
option csharp_namespace = "Google.Protobuf.WellKnownTypes";
// A Timestamp represents a point in time independent of any time zone or local
// calendar, encoded as a count of seconds and fractions of seconds at
// nanosecond resolution. The count is relative to an epoch at UTC midnight on
// January 1, 1970, in the proleptic Gregorian calendar which extends the
// Gregorian calendar backwards to year one.
//
// All minutes are 60 seconds long. Leap seconds are "smeared" so that no leap
// second table is needed for interpretation, using a [24-hour linear
// smear](https://developers.google.com/time/smear).
//
// The range is from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59.999999999Z. By
// restricting to that range, we ensure that we can convert to and from [RFC
// 3339](https://www.ietf.org/rfc/rfc3339.txt) date strings.
//
// # Examples
//
// Example 1: Compute Timestamp from POSIX `time()`.
//
// Timestamp timestamp;
// timestamp.set_seconds(time(NULL));
// timestamp.set_nanos(0);
//
// Example 2: Compute Timestamp from POSIX `gettimeofday()`.
//
// struct timeval tv;
// gettimeofday(&tv, NULL);
//
// Timestamp timestamp;
// timestamp.set_seconds(tv.tv_sec);
// timestamp.set_nanos(tv.tv_usec * 1000);
//
// Example 3: Compute Timestamp from Win32 `GetSystemTimeAsFileTime()`.
//
// FILETIME ft;
// GetSystemTimeAsFileTime(&ft);
// UINT64 ticks = (((UINT64)ft.dwHighDateTime) << 32) | ft.dwLowDateTime;
//
// // A Windows tick is 100 nanoseconds. Windows epoch 1601-01-01T00:00:00Z
// // is 11644473600 seconds before Unix epoch 1970-01-01T00:00:00Z.
// Timestamp timestamp;
// timestamp.set_seconds((INT64) ((ticks / 10000000) - 11644473600LL));
// timestamp.set_nanos((INT32) ((ticks % 10000000) * 100));
//
// Example 4: Compute Timestamp from Java `System.currentTimeMillis()`.
//
// long millis = System.currentTimeMillis();
//
// Timestamp timestamp = Timestamp.newBuilder().setSeconds(millis / 1000)
// .setNanos((int) ((millis % 1000) * 1000000)).build();
//
// Example 5: Compute Timestamp from Java `Instant.now()`.
//
// Instant now = Instant.now();
//
// Timestamp timestamp =
// Timestamp.newBuilder().setSeconds(now.getEpochSecond())
// .setNanos(now.getNano()).build();
//
// Example 6: Compute Timestamp from current time in Python.
//
// timestamp = Timestamp()
// timestamp.GetCurrentTime()
//
// # JSON Mapping
//
// In JSON format, the Timestamp type is encoded as a string in the
// [RFC 3339](https://www.ietf.org/rfc/rfc3339.txt) format. That is, the
// format is "{year}-{month}-{day}T{hour}:{min}:{sec}[.{frac_sec}]Z"
// where {year} is always expressed using four digits while {month}, {day},
// {hour}, {min}, and {sec} are zero-padded to two digits each. The fractional
// seconds, which can go up to 9 digits (i.e. up to 1 nanosecond resolution),
// are optional. The "Z" suffix indicates the timezone ("UTC"); the timezone
// is required. A proto3 JSON serializer should always use UTC (as indicated by
// "Z") when printing the Timestamp type and a proto3 JSON parser should be
// able to accept both UTC and other timezones (as indicated by an offset).
//
// For example, "2017-01-15T01:30:15.01Z" encodes 15.01 seconds past
// 01:30 UTC on January 15, 2017.
//
// In JavaScript, one can convert a Date object to this format using the
// standard
// [toISOString()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/toISOString)
// method. In Python, a standard `datetime.datetime` object can be converted
// to this format using
// [`strftime`](https://docs.python.org/2/library/time.html#time.strftime) with
// the time format spec '%Y-%m-%dT%H:%M:%S.%fZ'. Likewise, in Java, one can use
// the Joda Time's [`ISODateTimeFormat.dateTime()`](
// http://joda-time.sourceforge.net/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime()
// ) to obtain a formatter capable of generating timestamps in this format.
//
message Timestamp {
// Represents seconds of UTC time since Unix epoch
// 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to
// 9999-12-31T23:59:59Z inclusive.
int64 seconds = 1;
// Non-negative fractions of a second at nanosecond resolution. Negative
// second values with fractions must still have non-negative nanos values
// that count forward in time. Must be from 0 to 999,999,999
// inclusive.
int32 nanos = 2;
}

View File

@ -1,131 +0,0 @@
syntax = "proto3";
package vulcanize.registry.v1beta1;
import "gogoproto/gogo.proto";
option go_package = "github.com/cerc-io/laconicd/x/registry/types";
message ServiceProviderRegistration {
string bond_id = 1 [(gogoproto.moretags) = "json:\"bondId\" yaml:\"bondId\""];
string laconic_id = 2 [(gogoproto.moretags) = "json:\"laconicId\" yaml:\"laconicId\""];
X500 x500 = 3 [(gogoproto.moretags) = "json:\"x500\" yaml:\"x500\""];
string type = 4 [(gogoproto.moretags) = "json:\"type\" yaml:\"type\""];
string version = 6 [(gogoproto.moretags) = "json:\"version\" yaml:\"version\""];
}
message X500 {
string common_name = 1 [(gogoproto.moretags) = "json:\"commonName\" yaml:\"commonName\""];
string organization_unit = 2 [(gogoproto.moretags) = "json:\"organizationUnit\" yaml:\"organizationUnit\""];
string organization_name = 3 [(gogoproto.moretags) = "json:\"organizationName\" yaml:\"organizationName\""];
string locality_name = 4 [(gogoproto.moretags) = "json:\"localityName\" yaml:\"localityName\""];
string state_name = 5 [(gogoproto.moretags) = "json:\"stateName\" yaml:\"stateName\""];
string country = 6 [(gogoproto.moretags) = "json:\"country\" yaml:\"country\""];
}
message WebsiteRegistrationRecord {
string url = 1 [(gogoproto.moretags) = "json:\"url\" yaml:\"url\""];
string repo_registration_record_cid = 2
[(gogoproto.moretags) = "json:\"repoRegistrationRecordCID\" yaml:\"repoRegistrationRecordCID\""];
string build_artifact_cid = 3 [(gogoproto.moretags) = "json:\"buildArtifactCID\" yaml:\"buildArtifactCID\""];
string tls_cert_cid = 4 [(gogoproto.moretags) = "json:\"TLSCertCID\" yaml:\"TLSCertCID\""];
string type = 5 [(gogoproto.moretags) = "json:\"type\" yaml:\"type\""];
string version = 6 [(gogoproto.moretags) = "json:\"version\" yaml:\"version\""];
}
message ApplicationRecord {
string type = 1 [(gogoproto.moretags) = "json:\"type\" yaml:\"type\""];
string name = 2 [(gogoproto.moretags) = "json:\"name\" yaml:\"name\""];
string description = 3 [(gogoproto.moretags) = "json:\"description\" yaml:\"description\""];
string version = 4 [(gogoproto.moretags) = "json:\"version\" yaml:\"version\""];
string homepage = 5 [(gogoproto.moretags) = "json:\"homepage\" yaml:\"homepage\""];
string license = 6 [(gogoproto.moretags) = "json:\"license\" yaml:\"license\""];
string author = 7 [(gogoproto.moretags) = "json:\"author\" yaml:\"author\""];
repeated string repository = 8 [(gogoproto.moretags) = "json:\"repository\" yaml:\"repository\""];
string repository_ref = 9 [(gogoproto.moretags) = "json:\"repositoryRef\" yaml:\"repositoryRef\""];
string app_version = 10 [(gogoproto.moretags) = "json:\"appVersion\" yaml:\"appVersion\""];
string app_type = 11 [(gogoproto.moretags) = "json:\"appType\" yaml:\"appType\""];
string engines = 12 [(gogoproto.moretags) = "json:\"engines\" yaml:\"engines\""];
repeated string os = 13 [(gogoproto.moretags) = "json:\"os\" yaml:\"os\""];
repeated string cpu = 14 [(gogoproto.moretags) = "json:\"cpu\" yaml:\"cpu\""];
string meta = 20 [(gogoproto.moretags) = "json:\"meta\" yaml:\"meta\""];
repeated string tags = 21 [(gogoproto.moretags) = "json:\"tags\" yaml:\"tags\""];
}
message ApplicationArtifact {
string type = 1 [(gogoproto.moretags) = "json:\"type\" yaml:\"type\""];
string name = 2 [(gogoproto.moretags) = "json:\"name\" yaml:\"name\""];
string description = 4 [(gogoproto.moretags) = "json:\"description\" yaml:\"description\""];
string version = 5 [(gogoproto.moretags) = "json:\"version\" yaml:\"version\""];
string application = 6 [(gogoproto.moretags) = "json:\"application\" yaml:\"application\""];
string content_type = 7 [(gogoproto.moretags) = "json:\"contentType\" yaml:\"contentType\""];
string os = 8 [(gogoproto.moretags) = "json:\"os\" yaml:\"os\""];
string cpu = 9 [(gogoproto.moretags) = "json:\"cpu\" yaml:\"cpu\""];
repeated string uri = 10 [(gogoproto.moretags) = "json:\"uri\" yaml:\"uri\""];
string meta = 20 [(gogoproto.moretags) = "json:\"meta\" yaml:\"meta\""];
repeated string tags = 21 [(gogoproto.moretags) = "json:\"tags\" yaml:\"tags\""];
}
message DnsRecord {
string type = 1 [(gogoproto.moretags) = "json:\"type\" yaml:\"type\""];
string name = 2 [(gogoproto.moretags) = "json:\"name\" yaml:\"name\""];
string version = 3 [(gogoproto.moretags) = "json:\"version\" yaml:\"version\""];
string resource_type = 4 [(gogoproto.moretags) = "json:\"resourceType\" yaml:\"resourceType\""];
string value = 5 [(gogoproto.moretags) = "json:\"value\" yaml:\"value\""];
string request = 6 [(gogoproto.moretags) = "json:\"request\" yaml:\"request\""];
string meta = 20 [(gogoproto.moretags) = "json:\"meta\" yaml:\"meta\""];
repeated string tags = 21 [(gogoproto.moretags) = "json:\"tags\" yaml:\"tags\""];
}
message ApplicationDeploymentRequest {
string type = 1 [(gogoproto.moretags) = "json:\"type\" yaml:\"type\""];
string name = 2 [(gogoproto.moretags) = "json:\"name\" yaml:\"name\""];
string version = 3 [(gogoproto.moretags) = "json:\"version\" yaml:\"version\""];
string application = 4 [(gogoproto.moretags) = "json:\"application\" yaml:\"application\""];
string dns = 5 [(gogoproto.moretags) = "json:\"dns\" yaml:\"dns\""];
string config = 6 [(gogoproto.moretags) = "json:\"config\" yaml:\"config\""];
string deployment = 7 [(gogoproto.moretags) = "json:\"deployment\" yaml:\"deployment\""];
string meta = 20 [(gogoproto.moretags) = "json:\"meta\" yaml:\"meta\""];
repeated string tags = 21 [(gogoproto.moretags) = "json:\"tags\" yaml:\"tags\""];
}
message ApplicationDeploymentRecord {
string type = 1 [(gogoproto.moretags) = "json:\"type\" yaml:\"type\""];
string name = 2 [(gogoproto.moretags) = "json:\"name\" yaml:\"name\""];
string description = 3 [(gogoproto.moretags) = "json:\"description\" yaml:\"description\""];
string version = 4 [(gogoproto.moretags) = "json:\"version\" yaml:\"version\""];
string application = 5 [(gogoproto.moretags) = "json:\"application\" yaml:\"application\""];
string url = 6 [(gogoproto.moretags) = "json:\"url\" yaml:\"url\""];
string dns = 7 [(gogoproto.moretags) = "json:\"dns\" yaml:\"dns\""];
string request = 8 [(gogoproto.moretags) = "json:\"request\" yaml:\"request\""];
string meta = 20 [(gogoproto.moretags) = "json:\"meta\" yaml:\"meta\""];
repeated string tags = 21 [(gogoproto.moretags) = "json:\"tags\" yaml:\"tags\""];
}
message ApplicationDeploymentRemovalRequest {
string type = 1 [(gogoproto.moretags) = "json:\"type\" yaml:\"type\""];
string version = 2 [(gogoproto.moretags) = "json:\"version\" yaml:\"version\""];
string deployment = 3 [(gogoproto.moretags) = "json:\"deployment\" yaml:\"deployment\""];
string request = 4 [(gogoproto.moretags) = "json:\"request\" yaml:\"request\""];
string meta = 20 [(gogoproto.moretags) = "json:\"meta\" yaml:\"meta\""];
repeated string tags = 21 [(gogoproto.moretags) = "json:\"tags\" yaml:\"tags\""];
}
message ApplicationDeploymentRemovalRecord {
string type = 1 [(gogoproto.moretags) = "json:\"type\" yaml:\"type\""];
string version = 2 [(gogoproto.moretags) = "json:\"version\" yaml:\"version\""];
string deployment = 3 [(gogoproto.moretags) = "json:\"deployment\" yaml:\"deployment\""];
string request = 4 [(gogoproto.moretags) = "json:\"request\" yaml:\"request\""];
string meta = 20 [(gogoproto.moretags) = "json:\"meta\" yaml:\"meta\""];
repeated string tags = 21 [(gogoproto.moretags) = "json:\"tags\" yaml:\"tags\""];
}
message GeneralRecord {
string type = 1 [(gogoproto.moretags) = "json:\"type\" yaml:\"type\""];
string name = 2 [(gogoproto.moretags) = "json:\"name\" yaml:\"name\""];
string description = 3 [(gogoproto.moretags) = "json:\"description\" yaml:\"description\""];
string version = 4 [(gogoproto.moretags) = "json:\"version\" yaml:\"version\""];
string category = 5 [(gogoproto.moretags) = "json:\"category\" yaml:\"category\""];
string value = 6 [(gogoproto.moretags) = "json:\"value\" yaml:\"value\""];
string meta = 20 [(gogoproto.moretags) = "json:\"meta\" yaml:\"meta\""];
repeated string tags = 21 [(gogoproto.moretags) = "json:\"tags\" yaml:\"tags\""];
}

View File

@ -67,17 +67,25 @@ message QueryParamsResponse {
// QueryListRecordsRequest is request type for registry records list // QueryListRecordsRequest is request type for registry records list
message QueryListRecordsRequest { message QueryListRecordsRequest {
message ReferenceInput { message LinkInput {
string id = 1; string id = 1;
} }
message ArrayInput {
repeated ValueInput values = 1;
}
message MapInput {
map<string, ValueInput> values = 1;
}
message ValueInput { message ValueInput {
string type = 1; oneof value {
string string = 2; string string = 1;
int64 int = 3; int64 int = 2;
double float = 4; double float = 3;
bool boolean = 5; bool boolean = 4;
ReferenceInput reference = 6; string link = 5;
repeated ValueInput values = 7; ArrayInput array = 6;
MapInput map = 7;
}
} }
message KeyValueInput { message KeyValueInput {
string key = 1; string key = 1;

View File

@ -5,7 +5,6 @@ import "google/protobuf/duration.proto";
import "google/protobuf/timestamp.proto"; import "google/protobuf/timestamp.proto";
import "gogoproto/gogo.proto"; import "gogoproto/gogo.proto";
import "cosmos/base/v1beta1/coin.proto"; import "cosmos/base/v1beta1/coin.proto";
import "google/protobuf/any.proto";
option go_package = "github.com/cerc-io/laconicd/x/registry/types"; option go_package = "github.com/cerc-io/laconicd/x/registry/types";
@ -64,7 +63,7 @@ message Record {
string expiry_time = 4 [(gogoproto.moretags) = "json:\"expiryTime\" yaml:\"expiryTime\""]; string expiry_time = 4 [(gogoproto.moretags) = "json:\"expiryTime\" yaml:\"expiryTime\""];
bool deleted = 5; bool deleted = 5;
repeated string owners = 6 [(gogoproto.moretags) = "json:\"owners\" yaml:\"owners\""]; repeated string owners = 6 [(gogoproto.moretags) = "json:\"owners\" yaml:\"owners\""];
google.protobuf.Any attributes = 7 [(gogoproto.moretags) = "json:\"attributes\" yaml:\"attributes\""]; bytes attributes = 7 [(gogoproto.moretags) = "json:\"attributes\" yaml:\"attributes\""];
repeated string names = 8 [(gogoproto.moretags) = "json:\"names\" yaml:\"names\""]; repeated string names = 8 [(gogoproto.moretags) = "json:\"names\" yaml:\"names\""];
string type = 9 [(gogoproto.moretags) = "json:\"types\" yaml:\"types\""]; string type = 9 [(gogoproto.moretags) = "json:\"types\" yaml:\"types\""];
} }

View File

@ -0,0 +1,2 @@
#!/usr/bin/env bash
docker build -t cerc/laconic-sdk-tester:local -f Dockerfile .

View File

@ -1,12 +0,0 @@
#!/bin/bash
# NOTE: protoc is required
I=$(pwd)/proto
DEST_TS=$(pwd)/src/proto/
mkdir -p $DEST_TS
protoc \
--plugin=protoc-gen-ts=./node_modules/.bin/protoc-gen-ts \
--ts_out=$DEST_TS \
--proto_path=$I \
$(find $(pwd)/proto/vulcanize -iname "*.proto")

40
scripts/proto-gen.sh Executable file
View File

@ -0,0 +1,40 @@
#!/bin/bash
# NOTE: protoc is required
set -e
REPO_ROOT=$(pwd)
I=$REPO_ROOT/proto
DEST_TS=$REPO_ROOT/src/proto/
echo "Generating protobuf files"
mkdir -p $DEST_TS
protoc \
--plugin=protoc-gen-ts=$REPO_ROOT/node_modules/.bin/protoc-gen-ts \
--ts_out=$DEST_TS \
--proto_path=$I \
$(find $REPO_ROOT/proto/vulcanize -iname "*.proto")
SED='sed -i'
if [[ "$OSTYPE" == "darwin"* ]]; then
SED='sed -i ""'
fi
echo "Removing gRPC references..."
# https://github.com/tharsis/evmosjs/tree/main/packages/proto#note
for file in $(find $REPO_ROOT/src/proto -type f)
do
line=$(grep -n '@grpc/grpc-js' $file | cut -f1 -d':')
if [[ -n "$line" ]] && [[ "$line" -gt 0 ]]; then
echo "Processing file: $file"
$SED "${line}d" ${file}
functions=$(grep -n 'interface GrpcUnaryServiceInterface' $file | cut -f1 -d':')
$SED "${functions},\$d" ${file}
echo '}' >> $file
fi
$SED '1s#^#/* eslint-disable */\n#' $file
$SED '1s#^#// @ts-nocheck\n#' $file
done

View File

@ -1,16 +0,0 @@
#!/bin/bash
echo $PWD
for file in $(find src/proto -type f)
do
line=$(grep -n '@grpc/grpc-js' $file | cut -f1 -d':')
if [[ $line -gt 0 ]];
then
echo "Processing file... $file"
sed -i "${line}d" ${file}
functions=$(grep -n 'interface GrpcUnaryServiceInterface' $file | cut -f1 -d':')
sed -i "${functions},\$d" ${file}
echo '}' >> $file
fi
sed -i '1s#^#/* eslint-disable */\n#' $file
sed -i '1s#^#// @ts-nocheck\n#' $file
done

View File

@ -166,10 +166,6 @@ export class Account {
assert(message); assert(message);
const eipMessageDomain: any = message.eipToSign.domain; const eipMessageDomain: any = message.eipToSign.domain;
if(message.eipToSign.message.msgs[0].value.payload!=null){
message.eipToSign.message.msgs[0].value.payload.record.attributes.value=Array.from(message.eipToSign.message.msgs[0].value.payload.record.attributes.value)
}
const signature = signTypedData({ const signature = signTypedData({
data: { data: {
types: message.eipToSign.types as MessageTypes, types: message.eipToSign.types as MessageTypes,

View File

@ -12,20 +12,10 @@ jest.setTimeout(90 * 1000);
const bondTests = () => { const bondTests = () => {
let registry: Registry; let registry: Registry;
let watcher: any;
let version1: string;
let version2: string;
let bondId1: string;
let bondId2: string;
let bondOwner: string;
const publishNewWatcherVersion = async (bondId: string) => { const publishNewWatcherVersion = async (bondId: string) => {
watcher = await ensureUpdatedConfig(WATCHER_YML_PATH); let watcher = await ensureUpdatedConfig(WATCHER_YML_PATH);
await registry.setRecord({ privateKey, record: watcher.record, bondId }, privateKey, fee); await registry.setRecord({ privateKey, record: watcher.record, bondId }, privateKey, fee);
return watcher.record.version; return watcher;
}; };
beforeAll(async () => { beforeAll(async () => {
@ -33,93 +23,119 @@ const bondTests = () => {
}); });
test('Create bond.', async () => { test('Create bond.', async () => {
bondId1 = await registry.getNextBondId(privateKey); let bondId = await registry.getNextBondId(privateKey);
expect(bondId).toBeDefined();
await registry.createBond({ denom: 'aphoton', amount: '1000000000' }, privateKey, fee);
});
describe('With bond created', () => {
let bond1: any
beforeAll(async () => {
let bondId1 = await registry.getNextBondId(privateKey);
expect(bondId1).toBeDefined(); expect(bondId1).toBeDefined();
await registry.createBond({ denom: 'aphoton', amount: '1000000000' }, privateKey, fee); await registry.createBond({ denom: 'aphoton', amount: '1000000000' }, privateKey, fee);
})
[bond1] = await registry.getBondsByIds([bondId1]);
expect(bond1).toBeDefined();
expect(bond1.id).toEqual(bondId1);
});
test('Get bond by ID.', async () => { test('Get bond by ID.', async () => {
const [bond] = await registry.getBondsByIds([bondId1]); const [bond] = await registry.getBondsByIds([bond1.id]);
expect(bond).toBeDefined(); expect(bond).toBeDefined();
expect(bond.id).toBe(bondId1); expect(bond.id).toBe(bond1.id);
expect(bond.balance).toHaveLength(1); expect(bond.balance).toHaveLength(1);
expect(bond.balance[0]).toEqual({ type: 'aphoton', quantity: '1000000000' }); expect(bond.balance[0]).toEqual({ type: 'aphoton', quantity: '1000000000' });
bondOwner = bond.owner;
}); });
test('Query bonds.', async () => { test('Query bonds.', async () => {
const bonds = await registry.queryBonds(); const bonds = await registry.queryBonds();
expect(bonds).toBeDefined(); expect(bonds).toBeDefined();
const bond = bonds.filter((bond: any) => bond.id === bondId1); const bond = bonds.filter((bond: any) => bond.id === bond1.id);
expect(bond).toBeDefined(); expect(bond).toBeDefined();
}); });
test('Query bonds by owner.', async () => { test('Query bonds by owner.', async () => {
const bonds = await registry.queryBonds({ owner: bondOwner }); const bonds = await registry.queryBonds({ owner: bond1.owner });
expect(bonds).toBeDefined(); expect(bonds).toBeDefined();
const bond = bonds.filter((bond: any) => bond.id === bondId1); const bond = bonds.filter((bond: any) => bond.id === bond1.id);
expect(bond).toBeDefined(); expect(bond).toBeDefined();
}); });
test('Refill bond.', async () => { test('Refill bond.', async () => {
await registry.refillBond({ id: bondId1, denom: 'aphoton', amount: '500' }, privateKey, fee); await registry.refillBond({ id: bond1.id, denom: 'aphoton', amount: '500' }, privateKey, fee);
const [bond] = await registry.getBondsByIds([bondId1]); const [bond] = await registry.getBondsByIds([bond1.id]);
expect(bond).toBeDefined(); expect(bond).toBeDefined();
expect(bond.id).toBe(bondId1); expect(bond.id).toBe(bond1.id);
expect(bond.balance).toHaveLength(1); expect(bond.balance).toHaveLength(1);
expect(bond.balance[0]).toEqual({ type: 'aphoton', quantity: '1000000500' }); expect(bond.balance[0]).toEqual({ type: 'aphoton', quantity: '1000000500' });
}); });
test('Withdraw bond.', async () => { test('Withdraw bond.', async () => {
await registry.withdrawBond({ id: bondId1, denom: 'aphoton', amount: '500' }, privateKey, fee); await registry.withdrawBond({ id: bond1.id, denom: 'aphoton', amount: '500' }, privateKey, fee);
const [bond] = await registry.getBondsByIds([bondId1]); const [bond] = await registry.getBondsByIds([bond1.id]);
expect(bond).toBeDefined(); expect(bond).toBeDefined();
expect(bond.id).toBe(bondId1); expect(bond.id).toBe(bond1.id);
expect(bond.balance).toHaveLength(1); expect(bond.balance).toHaveLength(1);
expect(bond.balance[0]).toEqual({ type: 'aphoton', quantity: '1000000000' }); expect(bond.balance[0]).toEqual({ type: 'aphoton', quantity: '1000000000' });
}); });
test('Cancel bond.', async () => { test('Cancel bond.', async () => {
await registry.cancelBond({ id: bondId1 }, privateKey, fee); await registry.cancelBond({ id: bond1.id }, privateKey, fee);
const [bond] = await registry.getBondsByIds([bondId1]); const [bond] = await registry.getBondsByIds([bond1.id]);
expect(bond.id).toBe(""); expect(bond.id).toBe("");
expect(bond.owner).toBe(""); expect(bond.owner).toBe("");
expect(bond.balance).toHaveLength(0); expect(bond.balance).toHaveLength(0);
}); });
});
test('Associate/Dissociate bond.', async () => { test('Associate/Dissociate bond.', async () => {
let bondId1: string;
bondId1 = await registry.getNextBondId(privateKey); bondId1 = await registry.getNextBondId(privateKey);
expect(bondId1).toBeDefined(); expect(bondId1).toBeDefined();
await registry.createBond({ denom: 'aphoton', amount: '1000000000' }, privateKey, fee); await registry.createBond({ denom: 'aphoton', amount: '1000000000' }, privateKey, fee);
// Create a new record. // Create a new record.
version1 = await publishNewWatcherVersion(bondId1); let watcher = await publishNewWatcherVersion(bondId1);
let [record1] = await registry.queryRecords({ type: watcher.record.type, name: watcher.record.name, version: version1 }, true); let query = { type: watcher.record.type, url: watcher.record.url, version: watcher.record.version };
let [record1] = await registry.queryRecords(query, true);
expect(record1.bondId).toBe(bondId1); expect(record1.bondId).toBe(bondId1);
// Dissociate record, query and confirm. // Dissociate record, query and confirm.
await registry.dissociateBond({ recordId: record1.id }, privateKey, fee); await registry.dissociateBond({ recordId: record1.id }, privateKey, fee);
[record1] = await registry.queryRecords({ type: watcher.record.type, name: watcher.record.name, version: version1 }, true); [record1] = await registry.queryRecords(query, true);
expect(record1.bondId).toBe(''); expect(record1.bondId).toBe('');
// Associate record with bond, query and confirm. // Associate record with bond, query and confirm.
await registry.associateBond({ recordId: record1.id, bondId: bondId1 }, privateKey, fee); await registry.associateBond({ recordId: record1.id, bondId: bondId1 }, privateKey, fee);
[record1] = await registry.queryRecords({ type: watcher.record.type, name: watcher.record.name, version: version1 }, true); [record1] = await registry.queryRecords(query, true);
expect(record1.bondId).toBe(bondId1); expect(record1.bondId).toBe(bondId1);
}); });
test('Reassociate/Dissociate records.', async () => { test('Reassociate/Dissociate records.', async () => {
let bondId1: string;
let bondId2: string;
bondId1 = await registry.getNextBondId(privateKey);
expect(bondId1).toBeDefined();
await registry.createBond({ denom: 'aphoton', amount: '1000000000' }, privateKey, fee);
// Create a new record version. // Create a new record version.
version2 = await publishNewWatcherVersion(bondId1); let watcher = await publishNewWatcherVersion(bondId1);
let queryv1 = { type: watcher.record.type, url: watcher.record.url, version: watcher.record.version };
let queryv2 = { type: watcher.record.type, url: watcher.record.url, version: watcher.record.version };
// Check version1, version2 as associated with bondId1. // Check version1, version2 as associated with bondId1.
let records; let records;
records = await registry.queryRecords({ type: watcher.record.type, name: watcher.record.name, version: version1 }, true); records = await registry.queryRecords(queryv1, true);
expect(records[0].bondId).toBe(bondId1); expect(records[0].bondId).toBe(bondId1);
records = await registry.queryRecords({ type: watcher.record.type, name: watcher.record.name, version: version2 }, true); records = await registry.queryRecords(queryv2, true);
expect(records[0].bondId).toBe(bondId1); expect(records[0].bondId).toBe(bondId1);
// Create another bond. // Create another bond.
@ -131,16 +147,16 @@ const bondTests = () => {
// Reassociate records from bondId1 to bondId2, verify change. // Reassociate records from bondId1 to bondId2, verify change.
await registry.reassociateRecords({ oldBondId: bondId1, newBondId: bondId2 }, privateKey, fee); await registry.reassociateRecords({ oldBondId: bondId1, newBondId: bondId2 }, privateKey, fee);
records = await registry.queryRecords({ type: watcher.record.type, name: watcher.record.name, version: version1 }, true); records = await registry.queryRecords(queryv1, true);
expect(records[0].bondId).toBe(bondId2); expect(records[0].bondId).toBe(bondId2);
records = await registry.queryRecords({ type: watcher.record.type, name: watcher.record.name, version: version2 }, true); records = await registry.queryRecords(queryv2, true);
expect(records[0].bondId).toBe(bondId2); expect(records[0].bondId).toBe(bondId2);
// Dissociate all records from bond, verify change. // Dissociate all records from bond, verify change.
await registry.dissociateRecords({ bondId: bondId2 }, privateKey, fee); await registry.dissociateRecords({ bondId: bondId2 }, privateKey, fee);
records = await registry.queryRecords({ type: watcher.record.type, name: watcher.record.name, version: version1 }, true); records = await registry.queryRecords(queryv1, true);
expect(records[0].bondId).toBe(''); expect(records[0].bondId).toBe('');
records = await registry.queryRecords({ type: watcher.record.type, name: watcher.record.name, version: version2 }, true); records = await registry.queryRecords(queryv2, true);
expect(records[0].bondId).toBe(''); expect(records[0].bondId).toBe('');
}); });
}; };

View File

@ -44,11 +44,7 @@ const MSG_SET_RECORD_TYPES = {
{ name: 'create_time', type: 'string' }, { name: 'create_time', type: 'string' },
{ name: 'expiry_time', type: 'string' }, { name: 'expiry_time', type: 'string' },
{ name: 'deleted', type: 'bool' }, { name: 'deleted', type: 'bool' },
{ name: 'attributes', type: 'TypePayloadRecordAttributes' }, { name: 'attributes', type: 'bytes' },
],
TypePayloadRecordAttributes: [
{ name: 'type_url', type: 'string' },
{ name: 'value', type: 'uint8[]' },
], ],
TypePayloadSignatures: [ TypePayloadSignatures: [
{ name: 'sig', type: 'string' }, { name: 'sig', type: 'string' },

View File

@ -7,7 +7,7 @@ import { ensureUpdatedConfig, getConfig } from './testing/helper';
const WATCHER_YML_PATH = path.join(__dirname, './testing/data/watcher.yml'); const WATCHER_YML_PATH = path.join(__dirname, './testing/data/watcher.yml');
jest.setTimeout(120 * 1000); jest.setTimeout(5 * 60 * 1000);
const { chainId, restEndpoint, gqlEndpoint, privateKey, fee } = getConfig(); const { chainId, restEndpoint, gqlEndpoint, privateKey, fee } = getConfig();
@ -18,21 +18,21 @@ const namingTests = () => {
let watcher: any; let watcher: any;
let watcherId: string; let watcherId: string;
let authorityName: string;
let otherAuthorityName: string;
let otherPrivateKey: string;
let crn: string;
beforeAll(async () => { beforeAll(async () => {
console.log('running parent beforeAll')
registry = new Registry(gqlEndpoint, restEndpoint, chainId); registry = new Registry(gqlEndpoint, restEndpoint, chainId);
console.log('registry = new Registry')
// Create bond. // Create bond.
bondId = await registry.getNextBondId(privateKey); bondId = await registry.getNextBondId(privateKey);
console.log('bondId', bondId)
await registry.createBond({ denom: 'aphoton', amount: '2000000000' }, privateKey, fee); await registry.createBond({ denom: 'aphoton', amount: '2000000000' }, privateKey, fee);
console.log('done registry.createBond')
// Create watcher. // Create watcher.
watcher = await ensureUpdatedConfig(WATCHER_YML_PATH); watcher = await ensureUpdatedConfig(WATCHER_YML_PATH);
console.log('done await ensureUpdatedConfig')
const result = await registry.setRecord( const result = await registry.setRecord(
{ {
privateKey, privateKey,
@ -44,13 +44,33 @@ const namingTests = () => {
) )
watcherId = result.data.id; watcherId = result.data.id;
console.log('watcherId', watcherId)
console.log('done running parent beforeAll')
}); });
describe('Authority tests', () => {
test('Reserve authority.', async () => { test('Reserve authority.', async () => {
authorityName = `laconic-${Date.now()}`; const authorityName = `laconic-${Date.now()}`;
await registry.reserveAuthority({ name: authorityName }, privateKey, fee); await registry.reserveAuthority({ name: authorityName }, privateKey, fee);
console.log('done Reserve authority')
}); });
describe('With authority reserved', () => {
let authorityName: string;
let crn: string;
beforeAll(async () => {
console.log('running beforeAll')
authorityName = `laconic-${Date.now()}`;
crn = `crn://${authorityName}/app/test`;
await registry.reserveAuthority({ name: authorityName }, privateKey, fee);
console.log('done running beforeAll')
})
test('Lookup authority.', async () => { test('Lookup authority.', async () => {
const [record] = await registry.lookupAuthorities([authorityName]); const [record] = await registry.lookupAuthorities([authorityName]);
@ -58,6 +78,7 @@ const namingTests = () => {
expect(record.ownerAddress).not.toBe(''); expect(record.ownerAddress).not.toBe('');
expect(record.ownerPublicKey).not.toBe(''); expect(record.ownerPublicKey).not.toBe('');
expect(Number(record.height)).toBeGreaterThan(0); expect(Number(record.height)).toBeGreaterThan(0);
console.log('done Lookup authority')
}); });
test('Lookup non existing authority', async () => { test('Lookup non existing authority', async () => {
@ -66,10 +87,13 @@ const namingTests = () => {
expect(record.ownerAddress).toBe(''); expect(record.ownerAddress).toBe('');
expect(record.ownerPublicKey).toBe(''); expect(record.ownerPublicKey).toBe('');
expect(Number(record.height)).toBe(0); expect(Number(record.height)).toBe(0);
console.log('done Lookup non existing authority')
}); });
test('Reserve already reserved authority', async () => { test('Reserve already reserved authority', async () => {
await expect(registry.reserveAuthority({ name: authorityName }, privateKey, fee)).rejects.toThrow('Name already reserved.'); await expect(registry.reserveAuthority({ name: authorityName }, privateKey, fee)).
rejects.toThrow('Name already reserved.');
console.log('done Reserve already reserved authority')
}); });
test('Reserve sub-authority.', async () => { test('Reserve sub-authority.', async () => {
@ -81,6 +105,7 @@ const namingTests = () => {
expect(record.ownerAddress).not.toBe(''); expect(record.ownerAddress).not.toBe('');
expect(record.ownerPublicKey).not.toBe(''); expect(record.ownerPublicKey).not.toBe('');
expect(Number(record.height)).toBeGreaterThan(0); expect(Number(record.height)).toBeGreaterThan(0);
console.log('done Reserve sub-authority')
}); });
test('Reserve sub-authority with different owner.', async () => { test('Reserve sub-authority with different owner.', async () => {
@ -105,23 +130,61 @@ const namingTests = () => {
}); });
test('Set name for unbonded authority', async () => { test('Set name for unbonded authority', async () => {
crn = `crn://${authorityName}/app/test`;
assert(watcherId) assert(watcherId)
await expect(registry.setName({ crn, cid: watcherId }, privateKey, fee)).rejects.toThrow('Authority bond not found.'); await expect(registry.setName({ crn, cid: watcherId }, privateKey, fee)).
rejects.toThrow('Authority bond not found.');
}); });
test('Set authority bond', async () => { test('Set authority bond', async () => {
await registry.setAuthorityBond({ name: authorityName, bondId }, privateKey, fee); await registry.setAuthorityBond({ name: authorityName, bondId }, privateKey, fee);
}); });
});
});
describe('Naming tests', () => {
let authorityName: string;
let otherAuthorityName: string;
let otherPrivateKey: string;
let otherAccount: Account;
beforeAll(async () => {
authorityName = `laconic-${Date.now()}`;
await registry.reserveAuthority({ name: authorityName }, privateKey, fee);
await registry.setAuthorityBond({ name: authorityName, bondId }, privateKey, fee);
// Create another account.
const mnenonic = Account.generateMnemonic();
otherAccount = await Account.generateFromMnemonic(mnenonic);
await registry.sendCoins({ denom: 'aphoton', amount: '1000000000', destinationAddress: otherAccount.formattedCosmosAddress }, privateKey, fee);
otherAuthorityName = `other-${Date.now()}`;
otherPrivateKey = otherAccount.privateKey.toString('hex');
});
test('Set name', async () => { test('Set name', async () => {
crn = `crn://${authorityName}/app/test`; const crn = `crn://${authorityName}/app/test1`;
await registry.setName({ crn, cid: watcherId }, privateKey, fee); await registry.setName({ crn, cid: watcherId }, privateKey, fee);
// Query records should return it (some CRN points to it). // Query records should return it (some CRN points to it).
const records = await registry.queryRecords({ type: 'WebsiteRegistrationRecord', version: watcher.record.version }); const [record] = await registry.queryRecords({ type: 'WebsiteRegistrationRecord', version: watcher.record.version });
expect(records).toBeDefined(); expect(record).toBeDefined();
expect(records).toHaveLength(1); expect(record.names).toHaveLength(1);
await registry.deleteName({ crn }, privateKey, fee);
});
describe('With name set', () => {
let crn: string;
beforeAll(async () => {
crn = `crn://${authorityName}/app/test2`;
await registry.setName({ crn, cid: watcherId }, privateKey, fee);
});
afterAll(async () => {
await registry.deleteName({ crn }, privateKey, fee);
}); });
test('Lookup name', async () => { test('Lookup name', async () => {
@ -179,42 +242,6 @@ const namingTests = () => {
expect(oldRecord.height).toBeDefined(); expect(oldRecord.height).toBeDefined();
}); });
test('Set name without reserving authority', async () => {
await expect(registry.setName({ crn: 'crn://not-reserved/app/test', cid: watcherId }, privateKey, fee))
.rejects.toThrow('Name authority not found.');
});
test('Set name for non-owned authority', async () => {
// Create another account.
const mnenonic = Account.generateMnemonic();
const otherAccount = await Account.generateFromMnemonic(mnenonic);
await registry.sendCoins({ denom: 'aphoton', amount: '1000000000', destinationAddress: otherAccount.formattedCosmosAddress }, privateKey, fee);
// Other account reserves an authority.
otherAuthorityName = `other-${Date.now()}`;
otherPrivateKey = otherAccount.privateKey.toString('hex');
await registry.reserveAuthority({ name: otherAuthorityName }, otherPrivateKey, fee);
// Try setting name under other authority.
await expect(registry.setName({ crn: `crn://${otherAuthorityName}/app/test`, cid: watcherId }, privateKey, fee)).rejects.toThrow('Access denied.');
});
test('Lookup non existing name', async () => {
const records = await registry.lookupNames(['crn://not-reserved/app/test']);
expect(records).toBeDefined();
expect(records).toHaveLength(1);
const [record] = records;
expect(record).toBeNull();
});
test('Resolve non existing name', async () => {
const records = await registry.resolveNames(['crn://not-reserved/app/test']);
expect(records).toBeDefined();
expect(records).toHaveLength(1);
const [record] = records;
expect(record).toBeNull();
});
test('Delete name', async () => { test('Delete name', async () => {
await registry.deleteName({ crn }, privateKey, fee); await registry.deleteName({ crn }, privateKey, fee);
@ -241,6 +268,7 @@ const namingTests = () => {
test('Delete already deleted name', async () => { test('Delete already deleted name', async () => {
await registry.deleteName({ crn }, privateKey, fee); await registry.deleteName({ crn }, privateKey, fee);
await registry.deleteName({ crn }, privateKey, fee);
const records = await registry.lookupNames([crn], true); const records = await registry.lookupNames([crn], true);
expect(records).toBeDefined(); expect(records).toBeDefined();
@ -252,6 +280,22 @@ const namingTests = () => {
expect(latest.id).toBe(''); expect(latest.id).toBe('');
expect(latest.height).toBeDefined(); expect(latest.height).toBeDefined();
}); });
});
test('Set name without reserving authority', async () => {
await expect(registry.setName({ crn: 'crn://not-reserved/app/test', cid: watcherId }, privateKey, fee))
.rejects.toThrow('Name authority not found.');
});
test('Set name for non-owned authority', async () => {
await registry.sendCoins({ denom: 'aphoton', amount: '1000000000', destinationAddress: otherAccount.formattedCosmosAddress }, privateKey, fee);
// Other account reserves an authority.
await registry.reserveAuthority({ name: otherAuthorityName }, otherPrivateKey, fee);
// Try setting name under other authority.
await expect(registry.setName({ crn: `crn://${otherAuthorityName}/app/test`, cid: watcherId }, privateKey, fee)).rejects.toThrow('Access denied.');
});
test('Delete name for non-owned authority.', async () => { test('Delete name for non-owned authority.', async () => {
const otherBondId = await registry.getNextBondId(otherPrivateKey); const otherBondId = await registry.getNextBondId(otherPrivateKey);
@ -262,6 +306,23 @@ const namingTests = () => {
// Try deleting name under other authority. // Try deleting name under other authority.
await expect(registry.deleteName({ crn: `crn://${otherAuthorityName}/app/test` }, privateKey, fee)).rejects.toThrow('Access denied.'); await expect(registry.deleteName({ crn: `crn://${otherAuthorityName}/app/test` }, privateKey, fee)).rejects.toThrow('Access denied.');
}); });
test('Lookup non existing name', async () => {
const records = await registry.lookupNames(['crn://not-reserved/app/test']);
expect(records).toBeDefined();
expect(records).toHaveLength(1);
const [record] = records;
expect(record).toBeNull();
});
test('Resolve non existing name', async () => {
const records = await registry.resolveNames(['crn://not-reserved/app/test']);
expect(records).toBeDefined();
expect(records).toHaveLength(1);
const [record] = records;
expect(record).toBeNull();
});
});
}; };
if (process.env.TEST_AUCTIONS_ENABLED) { if (process.env.TEST_AUCTIONS_ENABLED) {

View File

@ -1,101 +0,0 @@
// @ts-nocheck
/* eslint-disable */
/**
* Generated by the protoc-gen-ts. DO NOT EDIT!
* compiler version: 4.25.1
* source: google/protobuf/any.proto
* git: https://github.com/thesayyn/protoc-gen-ts */
import * as dependency_1 from "./../../gogoproto/gogo";
import * as pb_1 from "google-protobuf";
export namespace google.protobuf {
export class Any extends pb_1.Message {
#one_of_decls: number[][] = [];
constructor(data?: any[] | {
type_url?: string;
value?: Uint8Array;
}) {
super();
pb_1.Message.initialize(this, Array.isArray(data) ? data : [], 0, -1, [], this.#one_of_decls);
if (!Array.isArray(data) && typeof data == "object") {
if ("type_url" in data && data.type_url != undefined) {
this.type_url = data.type_url;
}
if ("value" in data && data.value != undefined) {
this.value = data.value;
}
}
}
get type_url() {
return pb_1.Message.getFieldWithDefault(this, 1, "") as string;
}
set type_url(value: string) {
pb_1.Message.setField(this, 1, value);
}
get value() {
return pb_1.Message.getFieldWithDefault(this, 2, new Uint8Array(0)) as Uint8Array;
}
set value(value: Uint8Array) {
pb_1.Message.setField(this, 2, value);
}
static fromObject(data: {
type_url?: string;
value?: Uint8Array;
}): Any {
const message = new Any({});
if (data.type_url != null) {
message.type_url = data.type_url;
}
if (data.value != null) {
message.value = data.value;
}
return message;
}
toObject() {
const data: {
type_url?: string;
value?: Uint8Array;
} = {};
if (this.type_url != null) {
data.type_url = this.type_url;
}
if (this.value != null) {
data.value = this.value;
}
return data;
}
serialize(): Uint8Array;
serialize(w: pb_1.BinaryWriter): void;
serialize(w?: pb_1.BinaryWriter): Uint8Array | void {
const writer = w || new pb_1.BinaryWriter();
if (this.type_url.length)
writer.writeString(1, this.type_url);
if (this.value.length)
writer.writeBytes(2, this.value);
if (!w)
return writer.getResultBuffer();
}
static deserialize(bytes: Uint8Array | pb_1.BinaryReader): Any {
const reader = bytes instanceof pb_1.BinaryReader ? bytes : new pb_1.BinaryReader(bytes), message = new Any();
while (reader.nextField()) {
if (reader.isEndGroup())
break;
switch (reader.getFieldNumber()) {
case 1:
message.type_url = reader.readString();
break;
case 2:
message.value = reader.readBytes();
break;
default: reader.skipField();
}
}
return message;
}
serializeBinary(): Uint8Array {
return this.serialize();
}
static deserializeBinary(bytes: Uint8Array): Any {
return Any.deserialize(bytes);
}
}
}

File diff suppressed because it is too large Load Diff

View File

@ -239,7 +239,7 @@ export namespace vulcanize.registry.v1beta1 {
} }
} }
export namespace QueryListRecordsRequest { export namespace QueryListRecordsRequest {
export class ReferenceInput extends pb_1.Message { export class LinkInput extends pb_1.Message {
#one_of_decls: number[][] = []; #one_of_decls: number[][] = [];
constructor(data?: any[] | { constructor(data?: any[] | {
id?: string; id?: string;
@ -260,8 +260,8 @@ export namespace vulcanize.registry.v1beta1 {
} }
static fromObject(data: { static fromObject(data: {
id?: string; id?: string;
}): ReferenceInput { }): LinkInput {
const message = new ReferenceInput({}); const message = new LinkInput({});
if (data.id != null) { if (data.id != null) {
message.id = data.id; message.id = data.id;
} }
@ -285,8 +285,8 @@ export namespace vulcanize.registry.v1beta1 {
if (!w) if (!w)
return writer.getResultBuffer(); return writer.getResultBuffer();
} }
static deserialize(bytes: Uint8Array | pb_1.BinaryReader): ReferenceInput { static deserialize(bytes: Uint8Array | pb_1.BinaryReader): LinkInput {
const reader = bytes instanceof pb_1.BinaryReader ? bytes : new pb_1.BinaryReader(bytes), message = new ReferenceInput(); const reader = bytes instanceof pb_1.BinaryReader ? bytes : new pb_1.BinaryReader(bytes), message = new LinkInput();
while (reader.nextField()) { while (reader.nextField()) {
if (reader.isEndGroup()) if (reader.isEndGroup())
break; break;
@ -302,27 +302,220 @@ export namespace vulcanize.registry.v1beta1 {
serializeBinary(): Uint8Array { serializeBinary(): Uint8Array {
return this.serialize(); return this.serialize();
} }
static deserializeBinary(bytes: Uint8Array): ReferenceInput { static deserializeBinary(bytes: Uint8Array): LinkInput {
return ReferenceInput.deserialize(bytes); return LinkInput.deserialize(bytes);
} }
} }
export class ValueInput extends pb_1.Message { export class ArrayInput extends pb_1.Message {
#one_of_decls: number[][] = []; #one_of_decls: number[][] = [];
constructor(data?: any[] | { constructor(data?: any[] | {
type?: string;
string?: string;
int?: number;
float?: number;
boolean?: boolean;
reference?: QueryListRecordsRequest.ReferenceInput;
values?: QueryListRecordsRequest.ValueInput[]; values?: QueryListRecordsRequest.ValueInput[];
}) { }) {
super(); super();
pb_1.Message.initialize(this, Array.isArray(data) ? data : [], 0, -1, [7], this.#one_of_decls); pb_1.Message.initialize(this, Array.isArray(data) ? data : [], 0, -1, [1], this.#one_of_decls);
if (!Array.isArray(data) && typeof data == "object") { if (!Array.isArray(data) && typeof data == "object") {
if ("type" in data && data.type != undefined) { if ("values" in data && data.values != undefined) {
this.type = data.type; this.values = data.values;
} }
}
}
get values() {
return pb_1.Message.getRepeatedWrapperField(this, QueryListRecordsRequest.ValueInput, 1) as QueryListRecordsRequest.ValueInput[];
}
set values(value: QueryListRecordsRequest.ValueInput[]) {
pb_1.Message.setRepeatedWrapperField(this, 1, value);
}
static fromObject(data: {
values?: ReturnType<typeof QueryListRecordsRequest.ValueInput.prototype.toObject>[];
}): ArrayInput {
const message = new ArrayInput({});
if (data.values != null) {
message.values = data.values.map(item => QueryListRecordsRequest.ValueInput.fromObject(item));
}
return message;
}
toObject() {
const data: {
values?: ReturnType<typeof QueryListRecordsRequest.ValueInput.prototype.toObject>[];
} = {};
if (this.values != null) {
data.values = this.values.map((item: QueryListRecordsRequest.ValueInput) => item.toObject());
}
return data;
}
serialize(): Uint8Array;
serialize(w: pb_1.BinaryWriter): void;
serialize(w?: pb_1.BinaryWriter): Uint8Array | void {
const writer = w || new pb_1.BinaryWriter();
if (this.values.length)
writer.writeRepeatedMessage(1, this.values, (item: QueryListRecordsRequest.ValueInput) => item.serialize(writer));
if (!w)
return writer.getResultBuffer();
}
static deserialize(bytes: Uint8Array | pb_1.BinaryReader): ArrayInput {
const reader = bytes instanceof pb_1.BinaryReader ? bytes : new pb_1.BinaryReader(bytes), message = new ArrayInput();
while (reader.nextField()) {
if (reader.isEndGroup())
break;
switch (reader.getFieldNumber()) {
case 1:
reader.readMessage(message.values, () => pb_1.Message.addToRepeatedWrapperField(message, 1, QueryListRecordsRequest.ValueInput.deserialize(reader), QueryListRecordsRequest.ValueInput));
break;
default: reader.skipField();
}
}
return message;
}
serializeBinary(): Uint8Array {
return this.serialize();
}
static deserializeBinary(bytes: Uint8Array): ArrayInput {
return ArrayInput.deserialize(bytes);
}
}
export class MapInput extends pb_1.Message {
#one_of_decls: number[][] = [];
constructor(data?: any[] | {
values?: Map<string, QueryListRecordsRequest.ValueInput>;
}) {
super();
pb_1.Message.initialize(this, Array.isArray(data) ? data : [], 0, -1, [], this.#one_of_decls);
if (!Array.isArray(data) && typeof data == "object") {
if ("values" in data && data.values != undefined) {
this.values = data.values;
}
}
if (!this.values)
this.values = new Map();
}
get values() {
return pb_1.Message.getField(this, 1) as any as Map<string, QueryListRecordsRequest.ValueInput>;
}
set values(value: Map<string, QueryListRecordsRequest.ValueInput>) {
pb_1.Message.setField(this, 1, value as any);
}
static fromObject(data: {
values?: {
[key: string]: ReturnType<typeof QueryListRecordsRequest.ValueInput.prototype.toObject>;
};
}): MapInput {
const message = new MapInput({});
if (typeof data.values == "object") {
message.values = new Map(Object.entries(data.values).map(([key, value]) => [key, QueryListRecordsRequest.ValueInput.fromObject(value)]));
}
return message;
}
toObject() {
const data: {
values?: {
[key: string]: ReturnType<typeof QueryListRecordsRequest.ValueInput.prototype.toObject>;
};
} = {};
if (this.values != null) {
data.values = (Object.fromEntries)((Array.from)(this.values).map(([key, value]) => [key, value.toObject()]));
}
return data;
}
serialize(): Uint8Array;
serialize(w: pb_1.BinaryWriter): void;
serialize(w?: pb_1.BinaryWriter): Uint8Array | void {
const writer = w || new pb_1.BinaryWriter();
for (const [key, value] of this.values) {
writer.writeMessage(1, this.values, () => {
writer.writeString(1, key);
writer.writeMessage(2, value, () => value.serialize(writer));
});
}
if (!w)
return writer.getResultBuffer();
}
static deserialize(bytes: Uint8Array | pb_1.BinaryReader): MapInput {
const reader = bytes instanceof pb_1.BinaryReader ? bytes : new pb_1.BinaryReader(bytes), message = new MapInput();
while (reader.nextField()) {
if (reader.isEndGroup())
break;
switch (reader.getFieldNumber()) {
case 1:
reader.readMessage(message, () => pb_1.Map.deserializeBinary(message.values as any, reader, reader.readString, () => {
let value;
reader.readMessage(message, () => value = QueryListRecordsRequest.ValueInput.deserialize(reader));
return value;
}));
break;
default: reader.skipField();
}
}
return message;
}
serializeBinary(): Uint8Array {
return this.serialize();
}
static deserializeBinary(bytes: Uint8Array): MapInput {
return MapInput.deserialize(bytes);
}
}
export class ValueInput extends pb_1.Message {
#one_of_decls: number[][] = [[1, 2, 3, 4, 5, 6, 7]];
constructor(data?: any[] | ({} & (({
string?: string;
int?: never;
float?: never;
boolean?: never;
link?: never;
array?: never;
map?: never;
} | {
string?: never;
int?: number;
float?: never;
boolean?: never;
link?: never;
array?: never;
map?: never;
} | {
string?: never;
int?: never;
float?: number;
boolean?: never;
link?: never;
array?: never;
map?: never;
} | {
string?: never;
int?: never;
float?: never;
boolean?: boolean;
link?: never;
array?: never;
map?: never;
} | {
string?: never;
int?: never;
float?: never;
boolean?: never;
link?: string;
array?: never;
map?: never;
} | {
string?: never;
int?: never;
float?: never;
boolean?: never;
link?: never;
array?: QueryListRecordsRequest.ArrayInput;
map?: never;
} | {
string?: never;
int?: never;
float?: never;
boolean?: never;
link?: never;
array?: never;
map?: QueryListRecordsRequest.MapInput;
})))) {
super();
pb_1.Message.initialize(this, Array.isArray(data) ? data : [], 0, -1, [], this.#one_of_decls);
if (!Array.isArray(data) && typeof data == "object") {
if ("string" in data && data.string != undefined) { if ("string" in data && data.string != undefined) {
this.string = data.string; this.string = data.string;
} }
@ -335,72 +528,105 @@ export namespace vulcanize.registry.v1beta1 {
if ("boolean" in data && data.boolean != undefined) { if ("boolean" in data && data.boolean != undefined) {
this.boolean = data.boolean; this.boolean = data.boolean;
} }
if ("reference" in data && data.reference != undefined) { if ("link" in data && data.link != undefined) {
this.reference = data.reference; this.link = data.link;
} }
if ("values" in data && data.values != undefined) { if ("array" in data && data.array != undefined) {
this.values = data.values; this.array = data.array;
}
if ("map" in data && data.map != undefined) {
this.map = data.map;
} }
} }
} }
get type() {
return pb_1.Message.getFieldWithDefault(this, 1, "") as string;
}
set type(value: string) {
pb_1.Message.setField(this, 1, value);
}
get string() { get string() {
return pb_1.Message.getFieldWithDefault(this, 2, "") as string; return pb_1.Message.getFieldWithDefault(this, 1, "") as string;
} }
set string(value: string) { set string(value: string) {
pb_1.Message.setField(this, 2, value); pb_1.Message.setOneofField(this, 1, this.#one_of_decls[0], value);
}
get has_string() {
return pb_1.Message.getField(this, 1) != null;
} }
get int() { get int() {
return pb_1.Message.getFieldWithDefault(this, 3, 0) as number; return pb_1.Message.getFieldWithDefault(this, 2, 0) as number;
} }
set int(value: number) { set int(value: number) {
pb_1.Message.setField(this, 3, value); pb_1.Message.setOneofField(this, 2, this.#one_of_decls[0], value);
}
get has_int() {
return pb_1.Message.getField(this, 2) != null;
} }
get float() { get float() {
return pb_1.Message.getFieldWithDefault(this, 4, 0) as number; return pb_1.Message.getFieldWithDefault(this, 3, 0) as number;
} }
set float(value: number) { set float(value: number) {
pb_1.Message.setField(this, 4, value); pb_1.Message.setOneofField(this, 3, this.#one_of_decls[0], value);
}
get has_float() {
return pb_1.Message.getField(this, 3) != null;
} }
get boolean() { get boolean() {
return pb_1.Message.getFieldWithDefault(this, 5, false) as boolean; return pb_1.Message.getFieldWithDefault(this, 4, false) as boolean;
} }
set boolean(value: boolean) { set boolean(value: boolean) {
pb_1.Message.setField(this, 5, value); pb_1.Message.setOneofField(this, 4, this.#one_of_decls[0], value);
} }
get reference() { get has_boolean() {
return pb_1.Message.getWrapperField(this, QueryListRecordsRequest.ReferenceInput, 6) as QueryListRecordsRequest.ReferenceInput; return pb_1.Message.getField(this, 4) != null;
} }
set reference(value: QueryListRecordsRequest.ReferenceInput) { get link() {
pb_1.Message.setWrapperField(this, 6, value); return pb_1.Message.getFieldWithDefault(this, 5, "") as string;
} }
get has_reference() { set link(value: string) {
pb_1.Message.setOneofField(this, 5, this.#one_of_decls[0], value);
}
get has_link() {
return pb_1.Message.getField(this, 5) != null;
}
get array() {
return pb_1.Message.getWrapperField(this, QueryListRecordsRequest.ArrayInput, 6) as QueryListRecordsRequest.ArrayInput;
}
set array(value: QueryListRecordsRequest.ArrayInput) {
pb_1.Message.setOneofWrapperField(this, 6, this.#one_of_decls[0], value);
}
get has_array() {
return pb_1.Message.getField(this, 6) != null; return pb_1.Message.getField(this, 6) != null;
} }
get values() { get map() {
return pb_1.Message.getRepeatedWrapperField(this, QueryListRecordsRequest.ValueInput, 7) as QueryListRecordsRequest.ValueInput[]; return pb_1.Message.getWrapperField(this, QueryListRecordsRequest.MapInput, 7) as QueryListRecordsRequest.MapInput;
} }
set values(value: QueryListRecordsRequest.ValueInput[]) { set map(value: QueryListRecordsRequest.MapInput) {
pb_1.Message.setRepeatedWrapperField(this, 7, value); pb_1.Message.setOneofWrapperField(this, 7, this.#one_of_decls[0], value);
}
get has_map() {
return pb_1.Message.getField(this, 7) != null;
}
get value() {
const cases: {
[index: number]: "none" | "string" | "int" | "float" | "boolean" | "link" | "array" | "map";
} = {
0: "none",
1: "string",
2: "int",
3: "float",
4: "boolean",
5: "link",
6: "array",
7: "map"
};
return cases[pb_1.Message.computeOneofCase(this, [1, 2, 3, 4, 5, 6, 7])];
} }
static fromObject(data: { static fromObject(data: {
type?: string;
string?: string; string?: string;
int?: number; int?: number;
float?: number; float?: number;
boolean?: boolean; boolean?: boolean;
reference?: ReturnType<typeof QueryListRecordsRequest.ReferenceInput.prototype.toObject>; link?: string;
values?: ReturnType<typeof QueryListRecordsRequest.ValueInput.prototype.toObject>[]; array?: ReturnType<typeof QueryListRecordsRequest.ArrayInput.prototype.toObject>;
map?: ReturnType<typeof QueryListRecordsRequest.MapInput.prototype.toObject>;
}): ValueInput { }): ValueInput {
const message = new ValueInput({}); const message = new ValueInput({});
if (data.type != null) {
message.type = data.type;
}
if (data.string != null) { if (data.string != null) {
message.string = data.string; message.string = data.string;
} }
@ -413,27 +639,27 @@ export namespace vulcanize.registry.v1beta1 {
if (data.boolean != null) { if (data.boolean != null) {
message.boolean = data.boolean; message.boolean = data.boolean;
} }
if (data.reference != null) { if (data.link != null) {
message.reference = QueryListRecordsRequest.ReferenceInput.fromObject(data.reference); message.link = data.link;
} }
if (data.values != null) { if (data.array != null) {
message.values = data.values.map(item => QueryListRecordsRequest.ValueInput.fromObject(item)); message.array = QueryListRecordsRequest.ArrayInput.fromObject(data.array);
}
if (data.map != null) {
message.map = QueryListRecordsRequest.MapInput.fromObject(data.map);
} }
return message; return message;
} }
toObject() { toObject() {
const data: { const data: {
type?: string;
string?: string; string?: string;
int?: number; int?: number;
float?: number; float?: number;
boolean?: boolean; boolean?: boolean;
reference?: ReturnType<typeof QueryListRecordsRequest.ReferenceInput.prototype.toObject>; link?: string;
values?: ReturnType<typeof QueryListRecordsRequest.ValueInput.prototype.toObject>[]; array?: ReturnType<typeof QueryListRecordsRequest.ArrayInput.prototype.toObject>;
map?: ReturnType<typeof QueryListRecordsRequest.MapInput.prototype.toObject>;
} = {}; } = {};
if (this.type != null) {
data.type = this.type;
}
if (this.string != null) { if (this.string != null) {
data.string = this.string; data.string = this.string;
} }
@ -446,11 +672,14 @@ export namespace vulcanize.registry.v1beta1 {
if (this.boolean != null) { if (this.boolean != null) {
data.boolean = this.boolean; data.boolean = this.boolean;
} }
if (this.reference != null) { if (this.link != null) {
data.reference = this.reference.toObject(); data.link = this.link;
} }
if (this.values != null) { if (this.array != null) {
data.values = this.values.map((item: QueryListRecordsRequest.ValueInput) => item.toObject()); data.array = this.array.toObject();
}
if (this.map != null) {
data.map = this.map.toObject();
} }
return data; return data;
} }
@ -458,20 +687,20 @@ export namespace vulcanize.registry.v1beta1 {
serialize(w: pb_1.BinaryWriter): void; serialize(w: pb_1.BinaryWriter): void;
serialize(w?: pb_1.BinaryWriter): Uint8Array | void { serialize(w?: pb_1.BinaryWriter): Uint8Array | void {
const writer = w || new pb_1.BinaryWriter(); const writer = w || new pb_1.BinaryWriter();
if (this.type.length) if (this.has_string)
writer.writeString(1, this.type); writer.writeString(1, this.string);
if (this.string.length) if (this.has_int)
writer.writeString(2, this.string); writer.writeInt64(2, this.int);
if (this.int != 0) if (this.has_float)
writer.writeInt64(3, this.int); writer.writeDouble(3, this.float);
if (this.float != 0) if (this.has_boolean)
writer.writeDouble(4, this.float); writer.writeBool(4, this.boolean);
if (this.boolean != false) if (this.has_link)
writer.writeBool(5, this.boolean); writer.writeString(5, this.link);
if (this.has_reference) if (this.has_array)
writer.writeMessage(6, this.reference, () => this.reference.serialize(writer)); writer.writeMessage(6, this.array, () => this.array.serialize(writer));
if (this.values.length) if (this.has_map)
writer.writeRepeatedMessage(7, this.values, (item: QueryListRecordsRequest.ValueInput) => item.serialize(writer)); writer.writeMessage(7, this.map, () => this.map.serialize(writer));
if (!w) if (!w)
return writer.getResultBuffer(); return writer.getResultBuffer();
} }
@ -482,25 +711,25 @@ export namespace vulcanize.registry.v1beta1 {
break; break;
switch (reader.getFieldNumber()) { switch (reader.getFieldNumber()) {
case 1: case 1:
message.type = reader.readString();
break;
case 2:
message.string = reader.readString(); message.string = reader.readString();
break; break;
case 3: case 2:
message.int = reader.readInt64(); message.int = reader.readInt64();
break; break;
case 4: case 3:
message.float = reader.readDouble(); message.float = reader.readDouble();
break; break;
case 5: case 4:
message.boolean = reader.readBool(); message.boolean = reader.readBool();
break; break;
case 5:
message.link = reader.readString();
break;
case 6: case 6:
reader.readMessage(message.reference, () => message.reference = QueryListRecordsRequest.ReferenceInput.deserialize(reader)); reader.readMessage(message.array, () => message.array = QueryListRecordsRequest.ArrayInput.deserialize(reader));
break; break;
case 7: case 7:
reader.readMessage(message.values, () => pb_1.Message.addToRepeatedWrapperField(message, 7, QueryListRecordsRequest.ValueInput.deserialize(reader), QueryListRecordsRequest.ValueInput)); reader.readMessage(message.map, () => message.map = QueryListRecordsRequest.MapInput.deserialize(reader));
break; break;
default: reader.skipField(); default: reader.skipField();
} }

View File

@ -9,7 +9,6 @@ import * as dependency_1 from "./../../../google/protobuf/duration";
import * as dependency_2 from "./../../../google/protobuf/timestamp"; import * as dependency_2 from "./../../../google/protobuf/timestamp";
import * as dependency_3 from "./../../../gogoproto/gogo"; import * as dependency_3 from "./../../../gogoproto/gogo";
import * as dependency_4 from "./../../../cosmos/base/v1beta1/coin"; import * as dependency_4 from "./../../../cosmos/base/v1beta1/coin";
import * as dependency_5 from "./../../../google/protobuf/any";
import * as pb_1 from "google-protobuf"; import * as pb_1 from "google-protobuf";
export namespace vulcanize.registry.v1beta1 { export namespace vulcanize.registry.v1beta1 {
export class Params extends pb_1.Message { export class Params extends pb_1.Message {
@ -348,7 +347,7 @@ export namespace vulcanize.registry.v1beta1 {
expiry_time?: string; expiry_time?: string;
deleted?: boolean; deleted?: boolean;
owners?: string[]; owners?: string[];
attributes?: dependency_5.google.protobuf.Any; attributes?: Uint8Array;
names?: string[]; names?: string[];
type?: string; type?: string;
}) { }) {
@ -421,13 +420,10 @@ export namespace vulcanize.registry.v1beta1 {
pb_1.Message.setField(this, 6, value); pb_1.Message.setField(this, 6, value);
} }
get attributes() { get attributes() {
return pb_1.Message.getWrapperField(this, dependency_5.google.protobuf.Any, 7) as dependency_5.google.protobuf.Any; return pb_1.Message.getFieldWithDefault(this, 7, new Uint8Array(0)) as Uint8Array;
} }
set attributes(value: dependency_5.google.protobuf.Any) { set attributes(value: Uint8Array) {
pb_1.Message.setWrapperField(this, 7, value); pb_1.Message.setField(this, 7, value);
}
get has_attributes() {
return pb_1.Message.getField(this, 7) != null;
} }
get names() { get names() {
return pb_1.Message.getFieldWithDefault(this, 8, []) as string[]; return pb_1.Message.getFieldWithDefault(this, 8, []) as string[];
@ -448,7 +444,7 @@ export namespace vulcanize.registry.v1beta1 {
expiry_time?: string; expiry_time?: string;
deleted?: boolean; deleted?: boolean;
owners?: string[]; owners?: string[];
attributes?: ReturnType<typeof dependency_5.google.protobuf.Any.prototype.toObject>; attributes?: Uint8Array;
names?: string[]; names?: string[];
type?: string; type?: string;
}): Record { }): Record {
@ -472,7 +468,7 @@ export namespace vulcanize.registry.v1beta1 {
message.owners = data.owners; message.owners = data.owners;
} }
if (data.attributes != null) { if (data.attributes != null) {
message.attributes = dependency_5.google.protobuf.Any.fromObject(data.attributes); message.attributes = data.attributes;
} }
if (data.names != null) { if (data.names != null) {
message.names = data.names; message.names = data.names;
@ -490,7 +486,7 @@ export namespace vulcanize.registry.v1beta1 {
expiry_time?: string; expiry_time?: string;
deleted?: boolean; deleted?: boolean;
owners?: string[]; owners?: string[];
attributes?: ReturnType<typeof dependency_5.google.protobuf.Any.prototype.toObject>; attributes?: Uint8Array;
names?: string[]; names?: string[];
type?: string; type?: string;
} = {}; } = {};
@ -513,7 +509,7 @@ export namespace vulcanize.registry.v1beta1 {
data.owners = this.owners; data.owners = this.owners;
} }
if (this.attributes != null) { if (this.attributes != null) {
data.attributes = this.attributes.toObject(); data.attributes = this.attributes;
} }
if (this.names != null) { if (this.names != null) {
data.names = this.names; data.names = this.names;
@ -539,8 +535,8 @@ export namespace vulcanize.registry.v1beta1 {
writer.writeBool(5, this.deleted); writer.writeBool(5, this.deleted);
if (this.owners.length) if (this.owners.length)
writer.writeRepeatedString(6, this.owners); writer.writeRepeatedString(6, this.owners);
if (this.has_attributes) if (this.attributes.length)
writer.writeMessage(7, this.attributes, () => this.attributes.serialize(writer)); writer.writeBytes(7, this.attributes);
if (this.names.length) if (this.names.length)
writer.writeRepeatedString(8, this.names); writer.writeRepeatedString(8, this.names);
if (this.type.length) if (this.type.length)
@ -573,7 +569,7 @@ export namespace vulcanize.registry.v1beta1 {
pb_1.Message.addToRepeatedField(message, 6, reader.readString()); pb_1.Message.addToRepeatedField(message, 6, reader.readString());
break; break;
case 7: case 7:
reader.readMessage(message.attributes, () => message.attributes = dependency_5.google.protobuf.Any.deserialize(reader)); message.attributes = reader.readBytes();
break; break;
case 8: case 8:
pb_1.Message.addToRepeatedField(message, 8, reader.readString()); pb_1.Message.addToRepeatedField(message, 8, reader.readString());

View File

@ -10,16 +10,24 @@ const attributeField = `
attributes { attributes {
key key
value { value {
null ... on BooleanValue { bool: value }
int ... on IntValue { int: value }
float ... on FloatValue { float: value }
string ... on StringValue { string: value }
boolean ... on BytesValue { bytes: value }
json ... on LinkValue { link: value }
reference { ... on ArrayValue {
id array: value {
... on BooleanValue { bool: value }
... on IntValue { int: value }
... on FloatValue { float: value }
... on StringValue { string: value }
... on BytesValue { bytes: value }
... on LinkValue { link: value }
} }
} }
... on MapValue { map: value { key mapping: value { __typename } } }
}
} }
`; `;

View File

@ -50,18 +50,18 @@ describe('Querying', () => {
expect(records.length).toBeGreaterThanOrEqual(1); expect(records.length).toBeGreaterThanOrEqual(1);
const { attributes: { repo_registration_record_cid: record_repo_registration_record_cid } } = records[0]; const { attributes: { repo_registration_record_cid: record_repo_registration_record_cid } } = records[0];
expect(repo_registration_record_cid).toBe(record_repo_registration_record_cid); expect(repo_registration_record_cid).toStrictEqual(record_repo_registration_record_cid);
}); });
test('Query records by attributes.', async () => { test('Query records by attributes.', async () => {
const { version, name } = watcher.record; const { version, url } = watcher.record;
const records = await registry.queryRecords({ version, name }, true); const records = await registry.queryRecords({ version, url, type: undefined }, true);
expect(records.length).toBe(1); expect(records.length).toBe(1);
[ watcher ] = records; [ watcher ] = records;
const { attributes: { version: recordVersion, name: recordName } } = watcher; const { attributes: { version: recordVersion, url: recordName } } = watcher;
expect(recordVersion).toBe(version); expect(recordVersion).toBe(version);
expect(recordName).toBe(name); expect(recordName).toBe(url);
}); });
test('Query records by id.', async () => { test('Query records by id.', async () => {
@ -75,6 +75,7 @@ describe('Querying', () => {
expect(record.id).toBe(watcher.id); expect(record.id).toBe(watcher.id);
// temp fix // temp fix
expect(record.attributes.repo_registration_record_cid).toBeDefined(); expect(record.attributes.repo_registration_record_cid).toBeDefined();
expect(record.attributes.repo_registration_record_cid).toHaveLength(46); expect(record.attributes.repo_registration_record_cid).toHaveProperty("/");
expect(record.attributes.repo_registration_record_cid["/"]).toHaveLength(46);
}); });
}); });

View File

@ -1,7 +1,10 @@
record: record:
type: WebsiteRegistrationRecord type: WebsiteRegistrationRecord
url: 'https://cerc.io' url: 'https://cerc.io'
repo_registration_record_cid: QmSnuWmxptJZdLJpKRarxBMS2Ju2oANVrgbr2xWbie9b2D repo_registration_record_cid:
build_artifact_cid: QmP8jTG1m9GSDJLCbeWhVSVgEzCPPwXRdCRuJtQ5Tz9Kc9 /: QmSnuWmxptJZdLJpKRarxBMS2Ju2oANVrgbr2xWbie9b2D
tls_cert_cid: QmbWqxBEKC3P8tqsKc98xmWNzrzDtRLMiMPL8wBuTGsMnR build_artifact_cid:
/: QmP8jTG1m9GSDJLCbeWhVSVgEzCPPwXRdCRuJtQ5Tz9Kc9
tls_cert_cid:
/: QmbWqxBEKC3P8tqsKc98xmWNzrzDtRLMiMPL8wBuTGsMnR
version: 1.0.23 version: 1.0.23

View File

@ -3,8 +3,6 @@ import { Validator } from 'jsonschema';
import RecordSchema from './schema/record.json'; import RecordSchema from './schema/record.json';
import { Util } from './util'; import { Util } from './util';
import * as attributes from './proto/vulcanize/registry/v1beta1/attributes';
import * as any from './proto/google/protobuf/any';
/** /**
* Record. * Record.
@ -29,22 +27,7 @@ export class Record {
} }
get attributes() { get attributes() {
let a = new any.google.protobuf.Any(); return Buffer.from(JSON.stringify(this._record), 'binary')
if (this._record.type) {
const namespace: any = attributes.vulcanize.registry.v1beta1;
if (!namespace[this._record.type]) {
throw new Error(`Class not found: ${this._record.type}`);
}
const value = namespace[this._record.type].fromObject(this._record);
a = new any.google.protobuf.Any({
type_url: `/vulcanize.registry.v1beta1.${this._record.type}`,
value: value.serialize(),
});
}
return a;
} }
/** /**

View File

@ -10,21 +10,21 @@ export class Util {
/** /**
* Sorts JSON object. * Sorts JSON object.
*/ */
static sortJSON(object: any) { static sortJSON(obj: any) {
if (object instanceof Array) { if (obj instanceof Array) {
for (let i = 0; i < object.length; i++) { for (let i = 0; i < obj.length; i++) {
object[i] = Util.sortJSON(object[i]); obj[i] = Util.sortJSON(obj[i]);
} }
return object; return obj;
} }
if (typeof object !== 'object' || object === null) return object; if (typeof obj !== 'object' || obj === null) return obj;
let keys = Object.keys(object); let keys = Object.keys(obj);
keys = keys.sort(); keys = keys.sort();
const newObject: {[key: string]: any} = {}; const newObject: {[key: string]: any} = {};
for (let i = 0; i < keys.length; i++) { for (let i = 0; i < keys.length; i++) {
newObject[keys[i]] = Util.sortJSON(object[keys[i]]); newObject[keys[i]] = Util.sortJSON(obj[keys[i]]);
} }
return newObject; return newObject;
} }
@ -32,31 +32,46 @@ export class Util {
/** /**
* Marshal object into gql 'attributes' variable. * Marshal object into gql 'attributes' variable.
*/ */
static toGQLAttributes(object: any) { static toGQLAttributes(obj: any) {
const vars: any[] = []; const vars: any[] = [];
Object.keys(obj).forEach(key => {
const value = this.toGQLValue(obj[key]);
Object.keys(object).forEach(key => { if (value !== undefined) {
let type: string = typeof object[key]; vars.push({ key, value });
if (object[key] === null) {
vars.push({ key, value: { 'null': true } });
} else if (type === 'number') {
type = (object[key] % 1 === 0) ? 'int' : 'float';
vars.push({ key, value: { [type]: object[key] } });
} else if (type === 'string') {
vars.push({ key, value: { 'string': object[key] } });
} else if (type === 'boolean') {
vars.push({ key, value: { 'boolean': object[key] } });
} else if (type === 'object') {
const nestedObject = object[key];
if (nestedObject['/'] !== undefined) {
vars.push({ key, value: { 'reference': { id: nestedObject['/'] } } });
}
} }
}); });
return vars; return vars;
} }
static toGQLValue(obj: any) {
if (obj === null) {
return null;
}
let type: string = typeof obj;
switch (type) {
case 'number':
type = (obj % 1 === 0) ? 'int' : 'float';
return { [type]: obj };
case 'string':
return { 'string': obj };
case 'boolean':
return { 'boolean': obj };
case 'object':
if (obj['/'] !== undefined) {
return { 'link': obj['/'] };
}
if (obj instanceof Array) {
return { 'array': obj };
}
return { 'map': obj };
case 'undefined':
return undefined;
default:
throw new Error(`Unknown object type '${type}': ${obj}`);
}
}
/** /**
* Unmarshal attributes array to object. * Unmarshal attributes array to object.
*/ */
@ -64,24 +79,29 @@ export class Util {
const res: {[key: string]: any} = {}; const res: {[key: string]: any} = {};
attributes.forEach(attr => { attributes.forEach(attr => {
if (attr.value.null) { res[attr.key] = this.fromGQLValue(attr.value);
res[attr.key] = null;
} else if (attr.value.json) {
res[attr.key] = JSON.parse(attr.value.json);
} else if (attr.value.reference) {
// Convert GQL reference to IPLD style link.
const ref = attr.value.reference;
res[attr.key] = { '/': ref.id };
} else {
const { values, null: n, ...types } = attr.value;
const value = Object.values(types).find(v => v !== null);
res[attr.key] = value;
}
}); });
return res; return res;
} }
static fromGQLValue(obj: any) {
// Get first non-null key
const present = Object.keys(obj).find(k => obj[k] !== null);
if (present === undefined) {
throw new Error('Object has no non-null values');
}
// Create an array if array type attribute
if (present === 'array') {
return obj[present].map((e: any) => {
return this.fromGQLValue(e);
});
}
return obj[present];
}
/** /**
* Get record content ID. * Get record content ID.
*/ */

1531
yarn.lock

File diff suppressed because it is too large Load Diff