Compare commits

...

7 Commits

Author SHA1 Message Date
3dabf399f7 Add context to error message (#57)
All checks were successful
Tests / sdk_tests (push) Successful in 22m48s
Part of [Fix error propagation from laconicd to client SDK](https://www.notion.so/Fix-error-propagation-from-laconicd-to-client-SDK-1bd20c5a4e8141a99e5809ea1fd7832f)

- Use error in response raw log from laconicd

Co-authored-by: neeraj <neeraj.rtly@gmail.com>
Reviewed-on: #57
2024-04-02 12:57:33 +00:00
73bef00dd3 Handle record attributes with null values (#56)
All checks were successful
Tests / sdk_tests (push) Successful in 22m32s
Part of cerc-io/laconicd#144

Reviewed-on: #56
Co-authored-by: Prathamesh Musale <prathamesh.musale0@gmail.com>
Co-committed-by: Prathamesh Musale <prathamesh.musale0@gmail.com>
2024-02-08 05:39:31 +00:00
809c4d8f5b Run tests only from src folder (#55)
All checks were successful
Tests / sdk_tests (push) Successful in 21m47s
Reviewed-on: #55
Co-authored-by: Prathamesh Musale <prathamesh.musale0@gmail.com>
Co-committed-by: Prathamesh Musale <prathamesh.musale0@gmail.com>
2024-01-23 12:10:56 +00:00
fedf35d702 Tests cleanup (#53)
Some checks failed
Tests / sdk_tests (push) Failing after 7m52s
General test improvements.
- Simplifies Dockerfile
- Refactors tests so they can be run independently

Co-authored-by: Roy Crihfield <roy@manteia.ltd>
Co-authored-by: Prathamesh Musale <prathamesh.musale0@gmail.com>
Reviewed-on: #53
Reviewed-by: Thomas E Lackey <telackey@noreply.git.vdb.to>
Co-authored-by: Prathamesh Musale <prathamesh@noreply.git.vdb.to>
Co-committed-by: Prathamesh Musale <prathamesh@noreply.git.vdb.to>
2024-01-22 08:30:35 +00:00
65001568c8 Remove hard-coded record types (#52)
Some checks failed
Tests / sdk_tests (push) Failing after 8m38s
Refactors the `Record.Attributes` from Any into a byte string.
Companion to cerc-io/laconicd#132.

Resolves https://github.com/cerc-io/laconicd/issues/107

Co-authored-by: Roy Crihfield <roy@manteia.ltd>
Co-authored-by: Prathamesh Musale <prathamesh.musale0@gmail.com>
Reviewed-on: #52
Reviewed-by: Thomas E Lackey <telackey@noreply.git.vdb.to>
Co-authored-by: Nabarun <nabarun@deepstacksoft.com>
Co-committed-by: Nabarun <nabarun@deepstacksoft.com>
2024-01-15 04:58:55 +00:00
990e427c3f Add ApplicationDeploymentRemovalRequests (#51)
Some checks failed
Tests / sdk_tests (push) Failing after 24m12s
```
message ApplicationDeploymentRemovalRequest {
  string type           = 1 [(gogoproto.moretags) = "json:\"type\" yaml:\"type\""];
  string version        = 2 [(gogoproto.moretags) = "json:\"version\" yaml:\"version\""];
  string deployment     = 3 [(gogoproto.moretags) = "json:\"deployment\" yaml:\"deployment\""];
  string request        = 4 [(gogoproto.moretags) = "json:\"request\" yaml:\"request\""];
  string meta           = 20 [(gogoproto.moretags) = "json:\"meta\" yaml:\"meta\""];
  repeated string tags  = 21 [(gogoproto.moretags) = "json:\"tags\" yaml:\"tags\""];
}

message ApplicationDeploymentRemovalRecord {
  string type           = 1 [(gogoproto.moretags) = "json:\"type\" yaml:\"type\""];
  string version        = 2 [(gogoproto.moretags) = "json:\"version\" yaml:\"version\""];
  string deployment     = 3 [(gogoproto.moretags) = "json:\"deployment\" yaml:\"deployment\""];
  string request        = 4 [(gogoproto.moretags) = "json:\"request\" yaml:\"request\""];
  string meta           = 20 [(gogoproto.moretags) = "json:\"meta\" yaml:\"meta\""];
  repeated string tags  = 21 [(gogoproto.moretags) = "json:\"tags\" yaml:\"tags\""];
}
```

Reviewed-on: #51
Co-authored-by: Thomas E Lackey <telackey@bozemanpass.com>
Co-committed-by: Thomas E Lackey <telackey@bozemanpass.com>
2023-12-21 21:19:49 +00:00
419dddc35a Make 'repository' a repeated value and add ApplicationArtifact type. (#50)
All checks were successful
Tests / sdk_tests (push) Successful in 19m39s
Reviewed-on: #50
Co-authored-by: Thomas E Lackey <telackey@bozemanpass.com>
Co-committed-by: Thomas E Lackey <telackey@bozemanpass.com>
2023-12-13 20:51:12 +00:00
31 changed files with 1907 additions and 3469 deletions

2
.dockerignore Normal file
View File

@ -0,0 +1,2 @@
Dockerfile
node_modules

View File

@ -25,57 +25,39 @@ jobs:
ref: main
- name: Environment
run: ls -tlh && env
- name: Start dockerd
run: |
dockerd -H $DOCKER_HOST --userland-proxy=false &
sleep 5
- name: build containers scripts
- name: Build laconicd container
working-directory: laconicd/tests/sdk_tests
run: ./build-laconicd-container.sh
- name: build test-container
run: docker build -t cerc-io/laconic-sdk-tester:local-test -f laconicd/tests/sdk_tests/Dockerfile-sdk .
- name: start containers
- name: Build laconic-sdk container
run: ./scripts/build-sdk-test-container.sh
- name: Start containers
working-directory: laconicd/tests/sdk_tests
run: docker compose up -d
- name: run basic tests
- name: Run tests
working-directory: laconicd/tests/sdk_tests
run: |
laconicd_key=$( docker compose exec laconicd echo y | docker compose exec laconicd laconicd keys export mykey --unarmored-hex --unsafe )
cosmos_chain_id=laconic_9000-1
laconicd_rest_endpoint=http://laconicd:1317
laconicd_gql_endpoint=http://laconicd:9473/api
sleep 30s
docker compose exec sdk-test-runner sh -c "COSMOS_CHAIN_ID=${cosmos_chain_id} LACONICD_REST_ENDPOINT=${laconicd_rest_endpoint} LACONICD_GQL_ENDPOINT=${laconicd_gql_endpoint} PRIVATE_KEY=${laconicd_key} yarn test"
- name: stop containers
run: ./run-tests.sh
- name: Start containers (auctions enabled)
working-directory: laconicd/tests/sdk_tests
run: docker compose down
- name: start auction containers
env:
TEST_AUCTION_ENABLED: true
run: docker compose up -d
- name: Run auction tests
working-directory: laconicd/tests/sdk_tests
run: docker compose -f docker-compose-auctions.yml up -d
- name: run auction tests
run: ./run-tests.sh test:auctions
- name: Start containers (expiry enabled)
working-directory: laconicd/tests/sdk_tests
run: |
laconicd_key=$( docker compose exec laconicd echo y | docker compose exec laconicd laconicd keys export mykey --unarmored-hex --unsafe )
cosmos_chain_id=laconic_9000-1
laconicd_rest_endpoint=http://laconicd:1317
laconicd_gql_endpoint=http://laconicd:9473/api
sleep 30s
docker compose exec sdk-test-runner sh -c "COSMOS_CHAIN_ID=${cosmos_chain_id} LACONICD_REST_ENDPOINT=${laconicd_rest_endpoint} LACONICD_GQL_ENDPOINT=${laconicd_gql_endpoint} PRIVATE_KEY=${laconicd_key} yarn test:auctions"
- name: start containers
env:
TEST_REGISTRY_EXPIRY: true
run: docker compose up -d
- name: Run nameservice expiry tests
working-directory: laconicd/tests/sdk_tests
run: docker compose down
- name: start containers
working-directory: laconicd/tests/sdk_tests
run: docker compose -f docker-compose-nameservice.yml up -d
- name: run nameservice expiry tests
working-directory: laconicd/tests/sdk_tests
run: |
laconicd_key=$( docker compose exec laconicd echo y | docker compose exec laconicd laconicd keys export mykey --unarmored-hex --unsafe )
cosmos_chain_id=laconic_9000-1
laconicd_rest_endpoint=http://laconicd:1317
laconicd_gql_endpoint=http://laconicd:9473/api
sleep 30s
docker compose exec sdk-test-runner sh -c "COSMOS_CHAIN_ID=${cosmos_chain_id} LACONICD_REST_ENDPOINT=${laconicd_rest_endpoint} LACONICD_GQL_ENDPOINT=${laconicd_gql_endpoint} PRIVATE_KEY=${laconicd_key} yarn test:nameservice-expiry"
- name: stop nameservice containers
working-directory: laconicd/tests/sdk_tests
run: docker compose down
run: ./run-tests.sh test:nameservice-expiry

View File

@ -20,53 +20,34 @@ jobs:
ref: main
- name: Environment
run: ls -tlh && env
- name: build containers scripts
- name: Build laconicd container
working-directory: laconicd/tests/sdk_tests
run: ./build-laconicd-container.sh
- name: build test-container
run: docker build -t cerc-io/laconic-sdk-tester:local-test -f laconicd/tests/sdk_tests/Dockerfile-sdk .
- name: start containers
- name: Build laconic-sdk container
run: ./scripts/build-sdk-test-container.sh
- name: Start containers
working-directory: laconicd/tests/sdk_tests
run: docker compose up -d
- name: run basic tests
- name: Run tests
working-directory: laconicd/tests/sdk_tests
run: |
laconicd_key=$( docker compose exec laconicd echo y | docker compose exec laconicd laconicd keys export mykey --unarmored-hex --unsafe )
cosmos_chain_id=laconic_9000-1
laconicd_rest_endpoint=http://laconicd:1317
laconicd_gql_endpoint=http://laconicd:9473/api
sleep 30s
docker compose exec sdk-test-runner sh -c "COSMOS_CHAIN_ID=${cosmos_chain_id} LACONICD_REST_ENDPOINT=${laconicd_rest_endpoint} LACONICD_GQL_ENDPOINT=${laconicd_gql_endpoint} PRIVATE_KEY=${laconicd_key} yarn test"
- name: stop containers
run: ./run-tests.sh
- name: Start containers (auctions enabled)
working-directory: laconicd/tests/sdk_tests
run: docker compose down
- name: start auction containers
env:
TEST_AUCTION_ENABLED: true
run: docker compose up -d
- name: Run auction tests
working-directory: laconicd/tests/sdk_tests
run: docker compose -f docker-compose-auctions.yml up -d
- name: run auction tests
run: ./run-tests.sh test:auctions
- name: Start containers (expiry enabled)
working-directory: laconicd/tests/sdk_tests
run: |
laconicd_key=$( docker compose exec laconicd echo y | docker compose exec laconicd laconicd keys export mykey --unarmored-hex --unsafe )
cosmos_chain_id=laconic_9000-1
laconicd_rest_endpoint=http://laconicd:1317
laconicd_gql_endpoint=http://laconicd:9473/api
sleep 30s
docker compose exec sdk-test-runner sh -c "COSMOS_CHAIN_ID=${cosmos_chain_id} LACONICD_REST_ENDPOINT=${laconicd_rest_endpoint} LACONICD_GQL_ENDPOINT=${laconicd_gql_endpoint} PRIVATE_KEY=${laconicd_key} yarn test:auctions"
- name: start containers
env:
TEST_REGISTRY_EXPIRY: true
run: docker compose up -d
- name: Run nameservice expiry tests
working-directory: laconicd/tests/sdk_tests
run: docker compose down
- name: start containers
working-directory: laconicd/tests/sdk_tests
run: docker compose -f docker-compose-nameservice.yml up -d
- name: run nameservice expiry tests
working-directory: laconicd/tests/sdk_tests
run: |
laconicd_key=$( docker compose exec laconicd echo y | docker compose exec laconicd laconicd keys export mykey --unarmored-hex --unsafe )
cosmos_chain_id=laconic_9000-1
laconicd_rest_endpoint=http://laconicd:1317
laconicd_gql_endpoint=http://laconicd:9473/api
sleep 30s
docker compose exec sdk-test-runner sh -c "COSMOS_CHAIN_ID=${cosmos_chain_id} LACONICD_REST_ENDPOINT=${laconicd_rest_endpoint} LACONICD_GQL_ENDPOINT=${laconicd_gql_endpoint} PRIVATE_KEY=${laconicd_key} yarn test:nameservice-expiry"
- name: stop nameservice containers
working-directory: laconicd/tests/sdk_tests
run: docker compose down
run: ./run-tests.sh test:nameservice-expiry

View File

@ -12,13 +12,5 @@ Run following scripts when [proto files](./proto/) are updated.
2. Generate typescript code for the proto files
```bash
./scripts/create-proto-files.sh
./scripts/proto-gen.sh
```
3. Remove GRPC code from generated code
```bash
./scripts/remove-grpc.sh
```
Reference: https://github.com/tharsis/evmosjs/tree/main/packages/proto#note

View File

@ -21,34 +21,18 @@ RUN \
&& npm config -g set prefix ${NPM_GLOBAL} \
&& su ${USERNAME} -c "npm config -g set prefix ${NPM_GLOBAL}" \
# Install eslint
&& su ${USERNAME} -c "umask 0002 && npm install -g eslint lerna jest" \
&& su ${USERNAME} -c "umask 0002 && npm install -g eslint" \
&& npm cache clean --force > /dev/null 2>&1
# [Optional] Uncomment this section to install additional OS packages.
# RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \
# && apt-get -y install --no-install-recommends <your-package-list-here>
# [Optional] Uncomment if you want to install an additional version of node using nvm
# ARG EXTRA_NODE_VERSION=10
# RUN su node -c "source /usr/local/share/nvm/nvm.sh && nvm install ${EXTRA_NODE_VERSION}"
# [Optional] Uncomment if you want to install more global node modules
# RUN su node -c "npm install -g <your-package-list-here>"
WORKDIR /
RUN mkdir node_modules && mkdir proto && mkdir scripts && mkdir src
COPY node_modules ./node_modules/
COPY proto . ./proto/
COPY scripts ./scripts/
COPY src ./src/
COPY entrypoint.sh .
ENTRYPOINT ["/entrypoint.sh"]
# Placeholder CMD : generally this will be overridden at run time like :
# docker run -it -v /home/builder/cerc/laconic-sdk:/workspace cerc/builder-js sh -c 'cd /workspace && yarn && yarn build'
CMD node --version
# Temp hack, clone the laconic-sdk repo here
WORKDIR /app
WORKDIR /app/laconic-sdk
COPY package*.json .
RUN yarn install
COPY . .
WORKDIR /app/laconic-sdk

View File

@ -34,6 +34,12 @@ Follow these steps to run the tests:
- Run the tests with auctions enabled
- Remove laconicd data from previous run
```bash
rm -rf ~/.laconicd
```
- In laconicd repo run:
```bash
@ -54,6 +60,12 @@ Follow these steps to run the tests:
- Run the tests for record and authority expiry
- Remove laconicd data from previous run
```bash
rm -rf ~/.laconicd
```
- In laconicd repo run:
```bash

View File

@ -1,6 +1,6 @@
{
"name": "@cerc-io/laconic-sdk",
"version": "0.1.11",
"version": "0.1.16",
"main": "dist/index.js",
"types": "dist/index.d.ts",
"repository": "git@github.com:cerc-io/laconic-sdk.git",
@ -48,7 +48,7 @@
"tiny-secp256k1": "^1.1.6"
},
"scripts": {
"test": "jest --runInBand --verbose",
"test": "jest --runInBand --verbose --testPathPattern=src",
"test:auctions": "TEST_AUCTIONS_ENABLED=1 jest --runInBand --verbose src/auction.test.ts",
"test:nameservice-expiry": "TEST_NAMESERVICE_EXPIRY=1 jest --runInBand --verbose src/nameservice-expiry.test.ts",
"build": "tsc"

View File

@ -0,0 +1,115 @@
// Protocol Buffers - Google's data interchange format
// Copyright 2008 Google Inc. All rights reserved.
// https://developers.google.com/protocol-buffers/
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
syntax = "proto3";
package google.protobuf;
option cc_enable_arenas = true;
option go_package = "google.golang.org/protobuf/types/known/durationpb";
option java_package = "com.google.protobuf";
option java_outer_classname = "DurationProto";
option java_multiple_files = true;
option objc_class_prefix = "GPB";
option csharp_namespace = "Google.Protobuf.WellKnownTypes";
// A Duration represents a signed, fixed-length span of time represented
// as a count of seconds and fractions of seconds at nanosecond
// resolution. It is independent of any calendar and concepts like "day"
// or "month". It is related to Timestamp in that the difference between
// two Timestamp values is a Duration and it can be added or subtracted
// from a Timestamp. Range is approximately +-10,000 years.
//
// # Examples
//
// Example 1: Compute Duration from two Timestamps in pseudo code.
//
// Timestamp start = ...;
// Timestamp end = ...;
// Duration duration = ...;
//
// duration.seconds = end.seconds - start.seconds;
// duration.nanos = end.nanos - start.nanos;
//
// if (duration.seconds < 0 && duration.nanos > 0) {
// duration.seconds += 1;
// duration.nanos -= 1000000000;
// } else if (duration.seconds > 0 && duration.nanos < 0) {
// duration.seconds -= 1;
// duration.nanos += 1000000000;
// }
//
// Example 2: Compute Timestamp from Timestamp + Duration in pseudo code.
//
// Timestamp start = ...;
// Duration duration = ...;
// Timestamp end = ...;
//
// end.seconds = start.seconds + duration.seconds;
// end.nanos = start.nanos + duration.nanos;
//
// if (end.nanos < 0) {
// end.seconds -= 1;
// end.nanos += 1000000000;
// } else if (end.nanos >= 1000000000) {
// end.seconds += 1;
// end.nanos -= 1000000000;
// }
//
// Example 3: Compute Duration from datetime.timedelta in Python.
//
// td = datetime.timedelta(days=3, minutes=10)
// duration = Duration()
// duration.FromTimedelta(td)
//
// # JSON Mapping
//
// In JSON format, the Duration type is encoded as a string rather than an
// object, where the string ends in the suffix "s" (indicating seconds) and
// is preceded by the number of seconds, with nanoseconds expressed as
// fractional seconds. For example, 3 seconds with 0 nanoseconds should be
// encoded in JSON format as "3s", while 3 seconds and 1 nanosecond should
// be expressed in JSON format as "3.000000001s", and 3 seconds and 1
// microsecond should be expressed in JSON format as "3.000001s".
//
message Duration {
// Signed seconds of the span of time. Must be from -315,576,000,000
// to +315,576,000,000 inclusive. Note: these bounds are computed from:
// 60 sec/min * 60 min/hr * 24 hr/day * 365.25 days/year * 10000 years
int64 seconds = 1;
// Signed fractions of a second at nanosecond resolution of the span
// of time. Durations less than one second are represented with a 0
// `seconds` field and a positive or negative `nanos` field. For durations
// of one second or more, a non-zero value for the `nanos` field must be
// of the same sign as the `seconds` field. Must be from -999,999,999
// to +999,999,999 inclusive.
int32 nanos = 2;
}

View File

@ -0,0 +1,144 @@
// Protocol Buffers - Google's data interchange format
// Copyright 2008 Google Inc. All rights reserved.
// https://developers.google.com/protocol-buffers/
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
syntax = "proto3";
package google.protobuf;
option cc_enable_arenas = true;
option go_package = "google.golang.org/protobuf/types/known/timestamppb";
option java_package = "com.google.protobuf";
option java_outer_classname = "TimestampProto";
option java_multiple_files = true;
option objc_class_prefix = "GPB";
option csharp_namespace = "Google.Protobuf.WellKnownTypes";
// A Timestamp represents a point in time independent of any time zone or local
// calendar, encoded as a count of seconds and fractions of seconds at
// nanosecond resolution. The count is relative to an epoch at UTC midnight on
// January 1, 1970, in the proleptic Gregorian calendar which extends the
// Gregorian calendar backwards to year one.
//
// All minutes are 60 seconds long. Leap seconds are "smeared" so that no leap
// second table is needed for interpretation, using a [24-hour linear
// smear](https://developers.google.com/time/smear).
//
// The range is from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59.999999999Z. By
// restricting to that range, we ensure that we can convert to and from [RFC
// 3339](https://www.ietf.org/rfc/rfc3339.txt) date strings.
//
// # Examples
//
// Example 1: Compute Timestamp from POSIX `time()`.
//
// Timestamp timestamp;
// timestamp.set_seconds(time(NULL));
// timestamp.set_nanos(0);
//
// Example 2: Compute Timestamp from POSIX `gettimeofday()`.
//
// struct timeval tv;
// gettimeofday(&tv, NULL);
//
// Timestamp timestamp;
// timestamp.set_seconds(tv.tv_sec);
// timestamp.set_nanos(tv.tv_usec * 1000);
//
// Example 3: Compute Timestamp from Win32 `GetSystemTimeAsFileTime()`.
//
// FILETIME ft;
// GetSystemTimeAsFileTime(&ft);
// UINT64 ticks = (((UINT64)ft.dwHighDateTime) << 32) | ft.dwLowDateTime;
//
// // A Windows tick is 100 nanoseconds. Windows epoch 1601-01-01T00:00:00Z
// // is 11644473600 seconds before Unix epoch 1970-01-01T00:00:00Z.
// Timestamp timestamp;
// timestamp.set_seconds((INT64) ((ticks / 10000000) - 11644473600LL));
// timestamp.set_nanos((INT32) ((ticks % 10000000) * 100));
//
// Example 4: Compute Timestamp from Java `System.currentTimeMillis()`.
//
// long millis = System.currentTimeMillis();
//
// Timestamp timestamp = Timestamp.newBuilder().setSeconds(millis / 1000)
// .setNanos((int) ((millis % 1000) * 1000000)).build();
//
// Example 5: Compute Timestamp from Java `Instant.now()`.
//
// Instant now = Instant.now();
//
// Timestamp timestamp =
// Timestamp.newBuilder().setSeconds(now.getEpochSecond())
// .setNanos(now.getNano()).build();
//
// Example 6: Compute Timestamp from current time in Python.
//
// timestamp = Timestamp()
// timestamp.GetCurrentTime()
//
// # JSON Mapping
//
// In JSON format, the Timestamp type is encoded as a string in the
// [RFC 3339](https://www.ietf.org/rfc/rfc3339.txt) format. That is, the
// format is "{year}-{month}-{day}T{hour}:{min}:{sec}[.{frac_sec}]Z"
// where {year} is always expressed using four digits while {month}, {day},
// {hour}, {min}, and {sec} are zero-padded to two digits each. The fractional
// seconds, which can go up to 9 digits (i.e. up to 1 nanosecond resolution),
// are optional. The "Z" suffix indicates the timezone ("UTC"); the timezone
// is required. A proto3 JSON serializer should always use UTC (as indicated by
// "Z") when printing the Timestamp type and a proto3 JSON parser should be
// able to accept both UTC and other timezones (as indicated by an offset).
//
// For example, "2017-01-15T01:30:15.01Z" encodes 15.01 seconds past
// 01:30 UTC on January 15, 2017.
//
// In JavaScript, one can convert a Date object to this format using the
// standard
// [toISOString()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/toISOString)
// method. In Python, a standard `datetime.datetime` object can be converted
// to this format using
// [`strftime`](https://docs.python.org/2/library/time.html#time.strftime) with
// the time format spec '%Y-%m-%dT%H:%M:%S.%fZ'. Likewise, in Java, one can use
// the Joda Time's [`ISODateTimeFormat.dateTime()`](
// http://joda-time.sourceforge.net/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime()
// ) to obtain a formatter capable of generating timestamps in this format.
//
message Timestamp {
// Represents seconds of UTC time since Unix epoch
// 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to
// 9999-12-31T23:59:59Z inclusive.
int64 seconds = 1;
// Non-negative fractions of a second at nanosecond resolution. Negative
// second values with fractions must still have non-negative nanos values
// that count forward in time. Must be from 0 to 999,999,999
// inclusive.
int32 nanos = 2;
}

View File

@ -1,98 +0,0 @@
syntax = "proto3";
package vulcanize.registry.v1beta1;
import "gogoproto/gogo.proto";
option go_package = "github.com/cerc-io/laconicd/x/registry/types";
message ServiceProviderRegistration {
string bond_id = 1 [(gogoproto.moretags) = "json:\"bondId\" yaml:\"bondId\""];
string laconic_id = 2 [(gogoproto.moretags) = "json:\"laconicId\" yaml:\"laconicId\""];
X500 x500 = 3 [(gogoproto.moretags) = "json:\"x500\" yaml:\"x500\""];
string type = 4 [(gogoproto.moretags) = "json:\"type\" yaml:\"type\""];
string version = 6 [(gogoproto.moretags) = "json:\"version\" yaml:\"version\""];
}
message X500 {
string common_name = 1 [(gogoproto.moretags) = "json:\"commonName\" yaml:\"commonName\""];
string organization_unit = 2 [(gogoproto.moretags) = "json:\"organizationUnit\" yaml:\"organizationUnit\""];
string organization_name = 3 [(gogoproto.moretags) = "json:\"organizationName\" yaml:\"organizationName\""];
string locality_name = 4 [(gogoproto.moretags) = "json:\"localityName\" yaml:\"localityName\""];
string state_name = 5 [(gogoproto.moretags) = "json:\"stateName\" yaml:\"stateName\""];
string country = 6 [(gogoproto.moretags) = "json:\"country\" yaml:\"country\""];
}
message WebsiteRegistrationRecord {
string url = 1 [(gogoproto.moretags) = "json:\"url\" yaml:\"url\""];
string repo_registration_record_cid = 2
[(gogoproto.moretags) = "json:\"repoRegistrationRecordCID\" yaml:\"repoRegistrationRecordCID\""];
string build_artifact_cid = 3 [(gogoproto.moretags) = "json:\"buildArtifactCID\" yaml:\"buildArtifactCID\""];
string tls_cert_cid = 4 [(gogoproto.moretags) = "json:\"TLSCertCID\" yaml:\"TLSCertCID\""];
string type = 5 [(gogoproto.moretags) = "json:\"type\" yaml:\"type\""];
string version = 6 [(gogoproto.moretags) = "json:\"version\" yaml:\"version\""];
}
message ApplicationRecord {
string type = 1 [(gogoproto.moretags) = "json:\"type\" yaml:\"type\""];
string name = 2 [(gogoproto.moretags) = "json:\"name\" yaml:\"name\""];
string description = 3 [(gogoproto.moretags) = "json:\"description\" yaml:\"description\""];
string version = 4 [(gogoproto.moretags) = "json:\"version\" yaml:\"version\""];
string homepage = 5 [(gogoproto.moretags) = "json:\"homepage\" yaml:\"homepage\""];
string license = 6 [(gogoproto.moretags) = "json:\"license\" yaml:\"license\""];
string author = 7 [(gogoproto.moretags) = "json:\"author\" yaml:\"author\""];
string repository = 8 [(gogoproto.moretags) = "json:\"repository\" yaml:\"repository\""];
string repository_ref = 9 [(gogoproto.moretags) = "json:\"repositoryRef\" yaml:\"repositoryRef\""];
string app_version = 10 [(gogoproto.moretags) = "json:\"appVersion\" yaml:\"appVersion\""];
string app_type = 11 [(gogoproto.moretags) = "json:\"appType\" yaml:\"appType\""];
string engines = 12 [(gogoproto.moretags) = "json:\"engines\" yaml:\"engines\""];
repeated string os = 13 [(gogoproto.moretags) = "json:\"os\" yaml:\"os\""];
repeated string cpu = 14 [(gogoproto.moretags) = "json:\"cpu\" yaml:\"cpu\""];
string meta = 20 [(gogoproto.moretags) = "json:\"meta\" yaml:\"meta\""];
repeated string tags = 21 [(gogoproto.moretags) = "json:\"tags\" yaml:\"tags\""];
}
message DnsRecord {
string type = 1 [(gogoproto.moretags) = "json:\"type\" yaml:\"type\""];
string name = 2 [(gogoproto.moretags) = "json:\"name\" yaml:\"name\""];
string version = 3 [(gogoproto.moretags) = "json:\"version\" yaml:\"version\""];
string resource_type = 4 [(gogoproto.moretags) = "json:\"resourceType\" yaml:\"resourceType\""];
string value = 5 [(gogoproto.moretags) = "json:\"value\" yaml:\"value\""];
string meta = 20 [(gogoproto.moretags) = "json:\"meta\" yaml:\"meta\""];
repeated string tags = 21 [(gogoproto.moretags) = "json:\"tags\" yaml:\"tags\""];
}
message ApplicationDeploymentRequest {
string type = 1 [(gogoproto.moretags) = "json:\"type\" yaml:\"type\""];
string name = 2 [(gogoproto.moretags) = "json:\"name\" yaml:\"name\""];
string version = 3 [(gogoproto.moretags) = "json:\"version\" yaml:\"version\""];
string application = 4 [(gogoproto.moretags) = "json:\"application\" yaml:\"application\""];
string dns = 5 [(gogoproto.moretags) = "json:\"dns\" yaml:\"dns\""];
string config = 6 [(gogoproto.moretags) = "json:\"config\" yaml:\"config\""];
string deployment = 7 [(gogoproto.moretags) = "json:\"deployment\" yaml:\"deployment\""];
string meta = 20 [(gogoproto.moretags) = "json:\"meta\" yaml:\"meta\""];
repeated string tags = 21 [(gogoproto.moretags) = "json:\"tags\" yaml:\"tags\""];
}
message ApplicationDeploymentRecord {
string type = 1 [(gogoproto.moretags) = "json:\"type\" yaml:\"type\""];
string name = 2 [(gogoproto.moretags) = "json:\"name\" yaml:\"name\""];
string description = 3 [(gogoproto.moretags) = "json:\"description\" yaml:\"description\""];
string version = 4 [(gogoproto.moretags) = "json:\"version\" yaml:\"version\""];
string application = 5 [(gogoproto.moretags) = "json:\"application\" yaml:\"application\""];
string url = 6 [(gogoproto.moretags) = "json:\"url\" yaml:\"url\""];
string dns = 7 [(gogoproto.moretags) = "json:\"dns\" yaml:\"dns\""];
string request = 8 [(gogoproto.moretags) = "json:\"request\" yaml:\"request\""];
string meta = 20 [(gogoproto.moretags) = "json:\"meta\" yaml:\"meta\""];
repeated string tags = 21 [(gogoproto.moretags) = "json:\"tags\" yaml:\"tags\""];
}
message GeneralRecord {
string type = 1 [(gogoproto.moretags) = "json:\"type\" yaml:\"type\""];
string name = 2 [(gogoproto.moretags) = "json:\"name\" yaml:\"name\""];
string description = 3 [(gogoproto.moretags) = "json:\"description\" yaml:\"description\""];
string version = 4 [(gogoproto.moretags) = "json:\"version\" yaml:\"version\""];
string category = 5 [(gogoproto.moretags) = "json:\"category\" yaml:\"category\""];
string value = 6 [(gogoproto.moretags) = "json:\"value\" yaml:\"value\""];
string meta = 20 [(gogoproto.moretags) = "json:\"meta\" yaml:\"meta\""];
repeated string tags = 21 [(gogoproto.moretags) = "json:\"tags\" yaml:\"tags\""];
}

View File

@ -67,17 +67,25 @@ message QueryParamsResponse {
// QueryListRecordsRequest is request type for registry records list
message QueryListRecordsRequest {
message ReferenceInput {
message LinkInput {
string id = 1;
}
message ArrayInput {
repeated ValueInput values = 1;
}
message MapInput {
map<string, ValueInput> values = 1;
}
message ValueInput {
string type = 1;
string string = 2;
int64 int = 3;
double float = 4;
bool boolean = 5;
ReferenceInput reference = 6;
repeated ValueInput values = 7;
oneof value {
string string = 1;
int64 int = 2;
double float = 3;
bool boolean = 4;
string link = 5;
ArrayInput array = 6;
MapInput map = 7;
}
}
message KeyValueInput {
string key = 1;

View File

@ -5,7 +5,6 @@ import "google/protobuf/duration.proto";
import "google/protobuf/timestamp.proto";
import "gogoproto/gogo.proto";
import "cosmos/base/v1beta1/coin.proto";
import "google/protobuf/any.proto";
option go_package = "github.com/cerc-io/laconicd/x/registry/types";
@ -64,7 +63,7 @@ message Record {
string expiry_time = 4 [(gogoproto.moretags) = "json:\"expiryTime\" yaml:\"expiryTime\""];
bool deleted = 5;
repeated string owners = 6 [(gogoproto.moretags) = "json:\"owners\" yaml:\"owners\""];
google.protobuf.Any attributes = 7 [(gogoproto.moretags) = "json:\"attributes\" yaml:\"attributes\""];
bytes attributes = 7 [(gogoproto.moretags) = "json:\"attributes\" yaml:\"attributes\""];
repeated string names = 8 [(gogoproto.moretags) = "json:\"names\" yaml:\"names\""];
string type = 9 [(gogoproto.moretags) = "json:\"types\" yaml:\"types\""];
}

View File

@ -0,0 +1,2 @@
#!/usr/bin/env bash
docker build -t cerc/laconic-sdk-tester:local -f Dockerfile .

View File

@ -1,12 +0,0 @@
#!/bin/bash
# NOTE: protoc is required
I=$(pwd)/proto
DEST_TS=$(pwd)/src/proto/
mkdir -p $DEST_TS
protoc \
--plugin=protoc-gen-ts=./node_modules/.bin/protoc-gen-ts \
--ts_out=$DEST_TS \
--proto_path=$I \
$(find $(pwd)/proto/vulcanize -iname "*.proto")

40
scripts/proto-gen.sh Executable file
View File

@ -0,0 +1,40 @@
#!/bin/bash
# NOTE: protoc is required
set -e
REPO_ROOT=$(pwd)
I=$REPO_ROOT/proto
DEST_TS=$REPO_ROOT/src/proto/
echo "Generating protobuf files"
mkdir -p $DEST_TS
protoc \
--plugin=protoc-gen-ts=$REPO_ROOT/node_modules/.bin/protoc-gen-ts \
--ts_out=$DEST_TS \
--proto_path=$I \
$(find $REPO_ROOT/proto/vulcanize -iname "*.proto")
SED='sed -i'
if [[ "$OSTYPE" == "darwin"* ]]; then
SED='sed -i ""'
fi
echo "Removing gRPC references..."
# https://github.com/tharsis/evmosjs/tree/main/packages/proto#note
for file in $(find $REPO_ROOT/src/proto -type f)
do
line=$(grep -n '@grpc/grpc-js' $file | cut -f1 -d':')
if [[ -n "$line" ]] && [[ "$line" -gt 0 ]]; then
echo "Processing file: $file"
$SED "${line}d" ${file}
functions=$(grep -n 'interface GrpcUnaryServiceInterface' $file | cut -f1 -d':')
$SED "${functions},\$d" ${file}
echo '}' >> $file
fi
$SED '1s#^#/* eslint-disable */\n#' $file
$SED '1s#^#// @ts-nocheck\n#' $file
done

View File

@ -1,16 +0,0 @@
#!/bin/bash
echo $PWD
for file in $(find src/proto -type f)
do
line=$(grep -n '@grpc/grpc-js' $file | cut -f1 -d':')
if [[ $line -gt 0 ]];
then
echo "Processing file... $file"
sed -i "${line}d" ${file}
functions=$(grep -n 'interface GrpcUnaryServiceInterface' $file | cut -f1 -d':')
sed -i "${functions},\$d" ${file}
echo '}' >> $file
fi
sed -i '1s#^#/* eslint-disable */\n#' $file
sed -i '1s#^#// @ts-nocheck\n#' $file
done

View File

@ -166,10 +166,6 @@ export class Account {
assert(message);
const eipMessageDomain: any = message.eipToSign.domain;
if(message.eipToSign.message.msgs[0].value.payload!=null){
message.eipToSign.message.msgs[0].value.payload.record.attributes.value=Array.from(message.eipToSign.message.msgs[0].value.payload.record.attributes.value)
}
const signature = signTypedData({
data: {
types: message.eipToSign.types as MessageTypes,

View File

@ -12,20 +12,10 @@ jest.setTimeout(90 * 1000);
const bondTests = () => {
let registry: Registry;
let watcher: any;
let version1: string;
let version2: string;
let bondId1: string;
let bondId2: string;
let bondOwner: string;
const publishNewWatcherVersion = async (bondId: string) => {
watcher = await ensureUpdatedConfig(WATCHER_YML_PATH);
let watcher = await ensureUpdatedConfig(WATCHER_YML_PATH);
await registry.setRecord({ privateKey, record: watcher.record, bondId }, privateKey, fee);
return watcher.record.version;
return watcher;
};
beforeAll(async () => {
@ -33,93 +23,119 @@ const bondTests = () => {
});
test('Create bond.', async () => {
bondId1 = await registry.getNextBondId(privateKey);
let bondId = await registry.getNextBondId(privateKey);
expect(bondId).toBeDefined();
await registry.createBond({ denom: 'aphoton', amount: '1000000000' }, privateKey, fee);
});
describe('With bond created', () => {
let bond1: any
beforeAll(async () => {
let bondId1 = await registry.getNextBondId(privateKey);
expect(bondId1).toBeDefined();
await registry.createBond({ denom: 'aphoton', amount: '1000000000' }, privateKey, fee);
})
[bond1] = await registry.getBondsByIds([bondId1]);
expect(bond1).toBeDefined();
expect(bond1.id).toEqual(bondId1);
});
test('Get bond by ID.', async () => {
const [bond] = await registry.getBondsByIds([bondId1]);
const [bond] = await registry.getBondsByIds([bond1.id]);
expect(bond).toBeDefined();
expect(bond.id).toBe(bondId1);
expect(bond.id).toBe(bond1.id);
expect(bond.balance).toHaveLength(1);
expect(bond.balance[0]).toEqual({ type: 'aphoton', quantity: '1000000000' });
bondOwner = bond.owner;
});
test('Query bonds.', async () => {
const bonds = await registry.queryBonds();
expect(bonds).toBeDefined();
const bond = bonds.filter((bond: any) => bond.id === bondId1);
const bond = bonds.filter((bond: any) => bond.id === bond1.id);
expect(bond).toBeDefined();
});
test('Query bonds by owner.', async () => {
const bonds = await registry.queryBonds({ owner: bondOwner });
const bonds = await registry.queryBonds({ owner: bond1.owner });
expect(bonds).toBeDefined();
const bond = bonds.filter((bond: any) => bond.id === bondId1);
const bond = bonds.filter((bond: any) => bond.id === bond1.id);
expect(bond).toBeDefined();
});
test('Refill bond.', async () => {
await registry.refillBond({ id: bondId1, denom: 'aphoton', amount: '500' }, privateKey, fee);
await registry.refillBond({ id: bond1.id, denom: 'aphoton', amount: '500' }, privateKey, fee);
const [bond] = await registry.getBondsByIds([bondId1]);
const [bond] = await registry.getBondsByIds([bond1.id]);
expect(bond).toBeDefined();
expect(bond.id).toBe(bondId1);
expect(bond.id).toBe(bond1.id);
expect(bond.balance).toHaveLength(1);
expect(bond.balance[0]).toEqual({ type: 'aphoton', quantity: '1000000500' });
});
test('Withdraw bond.', async () => {
await registry.withdrawBond({ id: bondId1, denom: 'aphoton', amount: '500' }, privateKey, fee);
await registry.withdrawBond({ id: bond1.id, denom: 'aphoton', amount: '500' }, privateKey, fee);
const [bond] = await registry.getBondsByIds([bondId1]);
const [bond] = await registry.getBondsByIds([bond1.id]);
expect(bond).toBeDefined();
expect(bond.id).toBe(bondId1);
expect(bond.id).toBe(bond1.id);
expect(bond.balance).toHaveLength(1);
expect(bond.balance[0]).toEqual({ type: 'aphoton', quantity: '1000000000' });
});
test('Cancel bond.', async () => {
await registry.cancelBond({ id: bondId1 }, privateKey, fee);
await registry.cancelBond({ id: bond1.id }, privateKey, fee);
const [bond] = await registry.getBondsByIds([bondId1]);
const [bond] = await registry.getBondsByIds([bond1.id]);
expect(bond.id).toBe("");
expect(bond.owner).toBe("");
expect(bond.balance).toHaveLength(0);
});
});
test('Associate/Dissociate bond.', async () => {
let bondId1: string;
bondId1 = await registry.getNextBondId(privateKey);
expect(bondId1).toBeDefined();
await registry.createBond({ denom: 'aphoton', amount: '1000000000' }, privateKey, fee);
// Create a new record.
version1 = await publishNewWatcherVersion(bondId1);
let [record1] = await registry.queryRecords({ type: watcher.record.type, name: watcher.record.name, version: version1 }, true);
let watcher = await publishNewWatcherVersion(bondId1);
let query = { type: watcher.record.type, url: watcher.record.url, version: watcher.record.version };
let [record1] = await registry.queryRecords(query, true);
expect(record1.bondId).toBe(bondId1);
// Dissociate record, query and confirm.
await registry.dissociateBond({ recordId: record1.id }, privateKey, fee);
[record1] = await registry.queryRecords({ type: watcher.record.type, name: watcher.record.name, version: version1 }, true);
[record1] = await registry.queryRecords(query, true);
expect(record1.bondId).toBe('');
// Associate record with bond, query and confirm.
await registry.associateBond({ recordId: record1.id, bondId: bondId1 }, privateKey, fee);
[record1] = await registry.queryRecords({ type: watcher.record.type, name: watcher.record.name, version: version1 }, true);
[record1] = await registry.queryRecords(query, true);
expect(record1.bondId).toBe(bondId1);
});
test('Reassociate/Dissociate records.', async () => {
let bondId1: string;
let bondId2: string;
bondId1 = await registry.getNextBondId(privateKey);
expect(bondId1).toBeDefined();
await registry.createBond({ denom: 'aphoton', amount: '1000000000' }, privateKey, fee);
// Create a new record version.
version2 = await publishNewWatcherVersion(bondId1);
let watcher = await publishNewWatcherVersion(bondId1);
let queryv1 = { type: watcher.record.type, url: watcher.record.url, version: watcher.record.version };
let queryv2 = { type: watcher.record.type, url: watcher.record.url, version: watcher.record.version };
// Check version1, version2 as associated with bondId1.
let records;
records = await registry.queryRecords({ type: watcher.record.type, name: watcher.record.name, version: version1 }, true);
records = await registry.queryRecords(queryv1, true);
expect(records[0].bondId).toBe(bondId1);
records = await registry.queryRecords({ type: watcher.record.type, name: watcher.record.name, version: version2 }, true);
records = await registry.queryRecords(queryv2, true);
expect(records[0].bondId).toBe(bondId1);
// Create another bond.
@ -131,16 +147,16 @@ const bondTests = () => {
// Reassociate records from bondId1 to bondId2, verify change.
await registry.reassociateRecords({ oldBondId: bondId1, newBondId: bondId2 }, privateKey, fee);
records = await registry.queryRecords({ type: watcher.record.type, name: watcher.record.name, version: version1 }, true);
records = await registry.queryRecords(queryv1, true);
expect(records[0].bondId).toBe(bondId2);
records = await registry.queryRecords({ type: watcher.record.type, name: watcher.record.name, version: version2 }, true);
records = await registry.queryRecords(queryv2, true);
expect(records[0].bondId).toBe(bondId2);
// Dissociate all records from bond, verify change.
await registry.dissociateRecords({ bondId: bondId2 }, privateKey, fee);
records = await registry.queryRecords({ type: watcher.record.type, name: watcher.record.name, version: version1 }, true);
records = await registry.queryRecords(queryv1, true);
expect(records[0].bondId).toBe('');
records = await registry.queryRecords({ type: watcher.record.type, name: watcher.record.name, version: version2 }, true);
records = await registry.queryRecords(queryv2, true);
expect(records[0].bondId).toBe('');
});
};

View File

@ -54,7 +54,7 @@ import {
export const DEFAULT_CHAIN_ID = 'laconic_9000-1';
const DEFAULT_WRITE_ERROR = 'Unable to write to laconicd.';
const DEFAULT_WRITE_ERROR = 'Unable to write to laconicd';
// Parse Tx response from cosmos-sdk.
export const parseTxResponse = (result: any, parseResponse?: (data: string) => any) => {
@ -117,7 +117,8 @@ export class Registry {
console.error(error)
}
return errorMessage || DEFAULT_WRITE_ERROR;
const [lastErrorLine] = error.split("\n").slice(-1);
return `${errorMessage || DEFAULT_WRITE_ERROR}: ${lastErrorLine}`;
}
constructor(gqlUrl: string, restUrl: string = "", chainId: string = DEFAULT_CHAIN_ID) {

View File

@ -44,11 +44,7 @@ const MSG_SET_RECORD_TYPES = {
{ name: 'create_time', type: 'string' },
{ name: 'expiry_time', type: 'string' },
{ name: 'deleted', type: 'bool' },
{ name: 'attributes', type: 'TypePayloadRecordAttributes' },
],
TypePayloadRecordAttributes: [
{ name: 'type_url', type: 'string' },
{ name: 'value', type: 'uint8[]' },
{ name: 'attributes', type: 'bytes' },
],
TypePayloadSignatures: [
{ name: 'sig', type: 'string' },
@ -85,10 +81,10 @@ export const parseMsgSetRecordResponse = (data: string) => {
}
export const NAMESERVICE_ERRORS = [
'Name already reserved.',
'Authority bond not found.',
'Name authority not found.',
'Access denied.',
'Name already reserved',
'Authority bond not found',
'Name authority not found',
'Access denied',
]
export interface MessageMsgReserveAuthority {

View File

@ -7,7 +7,7 @@ import { ensureUpdatedConfig, getConfig } from './testing/helper';
const WATCHER_YML_PATH = path.join(__dirname, './testing/data/watcher.yml');
jest.setTimeout(120 * 1000);
jest.setTimeout(5 * 60 * 1000);
const { chainId, restEndpoint, gqlEndpoint, privateKey, fee } = getConfig();
@ -18,12 +18,6 @@ const namingTests = () => {
let watcher: any;
let watcherId: string;
let authorityName: string;
let otherAuthorityName: string;
let otherPrivateKey: string;
let crn: string;
beforeAll(async () => {
registry = new Registry(gqlEndpoint, restEndpoint, chainId);
@ -46,11 +40,25 @@ const namingTests = () => {
watcherId = result.data.id;
});
describe('Authority tests', () => {
test('Reserve authority.', async () => {
authorityName = `laconic-${Date.now()}`;
const authorityName = `laconic-${Date.now()}`;
await registry.reserveAuthority({ name: authorityName }, privateKey, fee);
});
describe('With authority reserved', () => {
let authorityName: string;
let crn: string;
beforeAll(async () => {
authorityName = `laconic-${Date.now()}`;
crn = `crn://${authorityName}/app/test`;
await registry.reserveAuthority({ name: authorityName }, privateKey, fee);
})
test('Lookup authority.', async () => {
const [record] = await registry.lookupAuthorities([authorityName]);
@ -69,7 +77,8 @@ const namingTests = () => {
});
test('Reserve already reserved authority', async () => {
await expect(registry.reserveAuthority({ name: authorityName }, privateKey, fee)).rejects.toThrow('Name already reserved.');
await expect(registry.reserveAuthority({ name: authorityName }, privateKey, fee)).
rejects.toThrow('Name already reserved.');
});
test('Reserve sub-authority.', async () => {
@ -105,23 +114,61 @@ const namingTests = () => {
});
test('Set name for unbonded authority', async () => {
crn = `crn://${authorityName}/app/test`;
assert(watcherId)
await expect(registry.setName({ crn, cid: watcherId }, privateKey, fee)).rejects.toThrow('Authority bond not found.');
await expect(registry.setName({ crn, cid: watcherId }, privateKey, fee)).
rejects.toThrow('Authority bond not found.');
});
test('Set authority bond', async () => {
await registry.setAuthorityBond({ name: authorityName, bondId }, privateKey, fee);
});
});
});
describe('Naming tests', () => {
let authorityName: string;
let otherAuthorityName: string;
let otherPrivateKey: string;
let otherAccount: Account;
beforeAll(async () => {
authorityName = `laconic-${Date.now()}`;
await registry.reserveAuthority({ name: authorityName }, privateKey, fee);
await registry.setAuthorityBond({ name: authorityName, bondId }, privateKey, fee);
// Create another account.
const mnenonic = Account.generateMnemonic();
otherAccount = await Account.generateFromMnemonic(mnenonic);
await registry.sendCoins({ denom: 'aphoton', amount: '1000000000', destinationAddress: otherAccount.formattedCosmosAddress }, privateKey, fee);
otherAuthorityName = `other-${Date.now()}`;
otherPrivateKey = otherAccount.privateKey.toString('hex');
});
test('Set name', async () => {
crn = `crn://${authorityName}/app/test`;
const crn = `crn://${authorityName}/app/test1`;
await registry.setName({ crn, cid: watcherId }, privateKey, fee);
// Query records should return it (some CRN points to it).
const records = await registry.queryRecords({ type: 'WebsiteRegistrationRecord', version: watcher.record.version });
expect(records).toBeDefined();
expect(records).toHaveLength(1);
const [record] = await registry.queryRecords({ type: 'WebsiteRegistrationRecord', version: watcher.record.version });
expect(record).toBeDefined();
expect(record.names).toHaveLength(1);
await registry.deleteName({ crn }, privateKey, fee);
});
describe('With name set', () => {
let crn: string;
beforeAll(async () => {
crn = `crn://${authorityName}/app/test2`;
await registry.setName({ crn, cid: watcherId }, privateKey, fee);
});
afterAll(async () => {
await registry.deleteName({ crn }, privateKey, fee);
});
test('Lookup name', async () => {
@ -179,42 +226,6 @@ const namingTests = () => {
expect(oldRecord.height).toBeDefined();
});
test('Set name without reserving authority', async () => {
await expect(registry.setName({ crn: 'crn://not-reserved/app/test', cid: watcherId }, privateKey, fee))
.rejects.toThrow('Name authority not found.');
});
test('Set name for non-owned authority', async () => {
// Create another account.
const mnenonic = Account.generateMnemonic();
const otherAccount = await Account.generateFromMnemonic(mnenonic);
await registry.sendCoins({ denom: 'aphoton', amount: '1000000000', destinationAddress: otherAccount.formattedCosmosAddress }, privateKey, fee);
// Other account reserves an authority.
otherAuthorityName = `other-${Date.now()}`;
otherPrivateKey = otherAccount.privateKey.toString('hex');
await registry.reserveAuthority({ name: otherAuthorityName }, otherPrivateKey, fee);
// Try setting name under other authority.
await expect(registry.setName({ crn: `crn://${otherAuthorityName}/app/test`, cid: watcherId }, privateKey, fee)).rejects.toThrow('Access denied.');
});
test('Lookup non existing name', async () => {
const records = await registry.lookupNames(['crn://not-reserved/app/test']);
expect(records).toBeDefined();
expect(records).toHaveLength(1);
const [record] = records;
expect(record).toBeNull();
});
test('Resolve non existing name', async () => {
const records = await registry.resolveNames(['crn://not-reserved/app/test']);
expect(records).toBeDefined();
expect(records).toHaveLength(1);
const [record] = records;
expect(record).toBeNull();
});
test('Delete name', async () => {
await registry.deleteName({ crn }, privateKey, fee);
@ -241,6 +252,7 @@ const namingTests = () => {
test('Delete already deleted name', async () => {
await registry.deleteName({ crn }, privateKey, fee);
await registry.deleteName({ crn }, privateKey, fee);
const records = await registry.lookupNames([crn], true);
expect(records).toBeDefined();
@ -252,6 +264,22 @@ const namingTests = () => {
expect(latest.id).toBe('');
expect(latest.height).toBeDefined();
});
});
test('Set name without reserving authority', async () => {
await expect(registry.setName({ crn: 'crn://not-reserved/app/test', cid: watcherId }, privateKey, fee))
.rejects.toThrow('Name authority not found.');
});
test('Set name for non-owned authority', async () => {
await registry.sendCoins({ denom: 'aphoton', amount: '1000000000', destinationAddress: otherAccount.formattedCosmosAddress }, privateKey, fee);
// Other account reserves an authority.
await registry.reserveAuthority({ name: otherAuthorityName }, otherPrivateKey, fee);
// Try setting name under other authority.
await expect(registry.setName({ crn: `crn://${otherAuthorityName}/app/test`, cid: watcherId }, privateKey, fee)).rejects.toThrow('Access denied.');
});
test('Delete name for non-owned authority.', async () => {
const otherBondId = await registry.getNextBondId(otherPrivateKey);
@ -262,6 +290,23 @@ const namingTests = () => {
// Try deleting name under other authority.
await expect(registry.deleteName({ crn: `crn://${otherAuthorityName}/app/test` }, privateKey, fee)).rejects.toThrow('Access denied.');
});
test('Lookup non existing name', async () => {
const records = await registry.lookupNames(['crn://not-reserved/app/test']);
expect(records).toBeDefined();
expect(records).toHaveLength(1);
const [record] = records;
expect(record).toBeNull();
});
test('Resolve non existing name', async () => {
const records = await registry.resolveNames(['crn://not-reserved/app/test']);
expect(records).toBeDefined();
expect(records).toHaveLength(1);
const [record] = records;
expect(record).toBeNull();
});
});
};
if (process.env.TEST_AUCTIONS_ENABLED) {

View File

@ -1,101 +0,0 @@
// @ts-nocheck
/* eslint-disable */
/**
* Generated by the protoc-gen-ts. DO NOT EDIT!
* compiler version: 4.25.1
* source: google/protobuf/any.proto
* git: https://github.com/thesayyn/protoc-gen-ts */
import * as dependency_1 from "./../../gogoproto/gogo";
import * as pb_1 from "google-protobuf";
export namespace google.protobuf {
export class Any extends pb_1.Message {
#one_of_decls: number[][] = [];
constructor(data?: any[] | {
type_url?: string;
value?: Uint8Array;
}) {
super();
pb_1.Message.initialize(this, Array.isArray(data) ? data : [], 0, -1, [], this.#one_of_decls);
if (!Array.isArray(data) && typeof data == "object") {
if ("type_url" in data && data.type_url != undefined) {
this.type_url = data.type_url;
}
if ("value" in data && data.value != undefined) {
this.value = data.value;
}
}
}
get type_url() {
return pb_1.Message.getFieldWithDefault(this, 1, "") as string;
}
set type_url(value: string) {
pb_1.Message.setField(this, 1, value);
}
get value() {
return pb_1.Message.getFieldWithDefault(this, 2, new Uint8Array(0)) as Uint8Array;
}
set value(value: Uint8Array) {
pb_1.Message.setField(this, 2, value);
}
static fromObject(data: {
type_url?: string;
value?: Uint8Array;
}): Any {
const message = new Any({});
if (data.type_url != null) {
message.type_url = data.type_url;
}
if (data.value != null) {
message.value = data.value;
}
return message;
}
toObject() {
const data: {
type_url?: string;
value?: Uint8Array;
} = {};
if (this.type_url != null) {
data.type_url = this.type_url;
}
if (this.value != null) {
data.value = this.value;
}
return data;
}
serialize(): Uint8Array;
serialize(w: pb_1.BinaryWriter): void;
serialize(w?: pb_1.BinaryWriter): Uint8Array | void {
const writer = w || new pb_1.BinaryWriter();
if (this.type_url.length)
writer.writeString(1, this.type_url);
if (this.value.length)
writer.writeBytes(2, this.value);
if (!w)
return writer.getResultBuffer();
}
static deserialize(bytes: Uint8Array | pb_1.BinaryReader): Any {
const reader = bytes instanceof pb_1.BinaryReader ? bytes : new pb_1.BinaryReader(bytes), message = new Any();
while (reader.nextField()) {
if (reader.isEndGroup())
break;
switch (reader.getFieldNumber()) {
case 1:
message.type_url = reader.readString();
break;
case 2:
message.value = reader.readBytes();
break;
default: reader.skipField();
}
}
return message;
}
serializeBinary(): Uint8Array {
return this.serialize();
}
static deserializeBinary(bytes: Uint8Array): Any {
return Any.deserialize(bytes);
}
}
}

File diff suppressed because it is too large Load Diff

View File

@ -239,7 +239,7 @@ export namespace vulcanize.registry.v1beta1 {
}
}
export namespace QueryListRecordsRequest {
export class ReferenceInput extends pb_1.Message {
export class LinkInput extends pb_1.Message {
#one_of_decls: number[][] = [];
constructor(data?: any[] | {
id?: string;
@ -260,8 +260,8 @@ export namespace vulcanize.registry.v1beta1 {
}
static fromObject(data: {
id?: string;
}): ReferenceInput {
const message = new ReferenceInput({});
}): LinkInput {
const message = new LinkInput({});
if (data.id != null) {
message.id = data.id;
}
@ -285,8 +285,8 @@ export namespace vulcanize.registry.v1beta1 {
if (!w)
return writer.getResultBuffer();
}
static deserialize(bytes: Uint8Array | pb_1.BinaryReader): ReferenceInput {
const reader = bytes instanceof pb_1.BinaryReader ? bytes : new pb_1.BinaryReader(bytes), message = new ReferenceInput();
static deserialize(bytes: Uint8Array | pb_1.BinaryReader): LinkInput {
const reader = bytes instanceof pb_1.BinaryReader ? bytes : new pb_1.BinaryReader(bytes), message = new LinkInput();
while (reader.nextField()) {
if (reader.isEndGroup())
break;
@ -302,27 +302,220 @@ export namespace vulcanize.registry.v1beta1 {
serializeBinary(): Uint8Array {
return this.serialize();
}
static deserializeBinary(bytes: Uint8Array): ReferenceInput {
return ReferenceInput.deserialize(bytes);
static deserializeBinary(bytes: Uint8Array): LinkInput {
return LinkInput.deserialize(bytes);
}
}
export class ValueInput extends pb_1.Message {
export class ArrayInput extends pb_1.Message {
#one_of_decls: number[][] = [];
constructor(data?: any[] | {
type?: string;
string?: string;
int?: number;
float?: number;
boolean?: boolean;
reference?: QueryListRecordsRequest.ReferenceInput;
values?: QueryListRecordsRequest.ValueInput[];
}) {
super();
pb_1.Message.initialize(this, Array.isArray(data) ? data : [], 0, -1, [7], this.#one_of_decls);
pb_1.Message.initialize(this, Array.isArray(data) ? data : [], 0, -1, [1], this.#one_of_decls);
if (!Array.isArray(data) && typeof data == "object") {
if ("type" in data && data.type != undefined) {
this.type = data.type;
if ("values" in data && data.values != undefined) {
this.values = data.values;
}
}
}
get values() {
return pb_1.Message.getRepeatedWrapperField(this, QueryListRecordsRequest.ValueInput, 1) as QueryListRecordsRequest.ValueInput[];
}
set values(value: QueryListRecordsRequest.ValueInput[]) {
pb_1.Message.setRepeatedWrapperField(this, 1, value);
}
static fromObject(data: {
values?: ReturnType<typeof QueryListRecordsRequest.ValueInput.prototype.toObject>[];
}): ArrayInput {
const message = new ArrayInput({});
if (data.values != null) {
message.values = data.values.map(item => QueryListRecordsRequest.ValueInput.fromObject(item));
}
return message;
}
toObject() {
const data: {
values?: ReturnType<typeof QueryListRecordsRequest.ValueInput.prototype.toObject>[];
} = {};
if (this.values != null) {
data.values = this.values.map((item: QueryListRecordsRequest.ValueInput) => item.toObject());
}
return data;
}
serialize(): Uint8Array;
serialize(w: pb_1.BinaryWriter): void;
serialize(w?: pb_1.BinaryWriter): Uint8Array | void {
const writer = w || new pb_1.BinaryWriter();
if (this.values.length)
writer.writeRepeatedMessage(1, this.values, (item: QueryListRecordsRequest.ValueInput) => item.serialize(writer));
if (!w)
return writer.getResultBuffer();
}
static deserialize(bytes: Uint8Array | pb_1.BinaryReader): ArrayInput {
const reader = bytes instanceof pb_1.BinaryReader ? bytes : new pb_1.BinaryReader(bytes), message = new ArrayInput();
while (reader.nextField()) {
if (reader.isEndGroup())
break;
switch (reader.getFieldNumber()) {
case 1:
reader.readMessage(message.values, () => pb_1.Message.addToRepeatedWrapperField(message, 1, QueryListRecordsRequest.ValueInput.deserialize(reader), QueryListRecordsRequest.ValueInput));
break;
default: reader.skipField();
}
}
return message;
}
serializeBinary(): Uint8Array {
return this.serialize();
}
static deserializeBinary(bytes: Uint8Array): ArrayInput {
return ArrayInput.deserialize(bytes);
}
}
export class MapInput extends pb_1.Message {
#one_of_decls: number[][] = [];
constructor(data?: any[] | {
values?: Map<string, QueryListRecordsRequest.ValueInput>;
}) {
super();
pb_1.Message.initialize(this, Array.isArray(data) ? data : [], 0, -1, [], this.#one_of_decls);
if (!Array.isArray(data) && typeof data == "object") {
if ("values" in data && data.values != undefined) {
this.values = data.values;
}
}
if (!this.values)
this.values = new Map();
}
get values() {
return pb_1.Message.getField(this, 1) as any as Map<string, QueryListRecordsRequest.ValueInput>;
}
set values(value: Map<string, QueryListRecordsRequest.ValueInput>) {
pb_1.Message.setField(this, 1, value as any);
}
static fromObject(data: {
values?: {
[key: string]: ReturnType<typeof QueryListRecordsRequest.ValueInput.prototype.toObject>;
};
}): MapInput {
const message = new MapInput({});
if (typeof data.values == "object") {
message.values = new Map(Object.entries(data.values).map(([key, value]) => [key, QueryListRecordsRequest.ValueInput.fromObject(value)]));
}
return message;
}
toObject() {
const data: {
values?: {
[key: string]: ReturnType<typeof QueryListRecordsRequest.ValueInput.prototype.toObject>;
};
} = {};
if (this.values != null) {
data.values = (Object.fromEntries)((Array.from)(this.values).map(([key, value]) => [key, value.toObject()]));
}
return data;
}
serialize(): Uint8Array;
serialize(w: pb_1.BinaryWriter): void;
serialize(w?: pb_1.BinaryWriter): Uint8Array | void {
const writer = w || new pb_1.BinaryWriter();
for (const [key, value] of this.values) {
writer.writeMessage(1, this.values, () => {
writer.writeString(1, key);
writer.writeMessage(2, value, () => value.serialize(writer));
});
}
if (!w)
return writer.getResultBuffer();
}
static deserialize(bytes: Uint8Array | pb_1.BinaryReader): MapInput {
const reader = bytes instanceof pb_1.BinaryReader ? bytes : new pb_1.BinaryReader(bytes), message = new MapInput();
while (reader.nextField()) {
if (reader.isEndGroup())
break;
switch (reader.getFieldNumber()) {
case 1:
reader.readMessage(message, () => pb_1.Map.deserializeBinary(message.values as any, reader, reader.readString, () => {
let value;
reader.readMessage(message, () => value = QueryListRecordsRequest.ValueInput.deserialize(reader));
return value;
}));
break;
default: reader.skipField();
}
}
return message;
}
serializeBinary(): Uint8Array {
return this.serialize();
}
static deserializeBinary(bytes: Uint8Array): MapInput {
return MapInput.deserialize(bytes);
}
}
export class ValueInput extends pb_1.Message {
#one_of_decls: number[][] = [[1, 2, 3, 4, 5, 6, 7]];
constructor(data?: any[] | ({} & (({
string?: string;
int?: never;
float?: never;
boolean?: never;
link?: never;
array?: never;
map?: never;
} | {
string?: never;
int?: number;
float?: never;
boolean?: never;
link?: never;
array?: never;
map?: never;
} | {
string?: never;
int?: never;
float?: number;
boolean?: never;
link?: never;
array?: never;
map?: never;
} | {
string?: never;
int?: never;
float?: never;
boolean?: boolean;
link?: never;
array?: never;
map?: never;
} | {
string?: never;
int?: never;
float?: never;
boolean?: never;
link?: string;
array?: never;
map?: never;
} | {
string?: never;
int?: never;
float?: never;
boolean?: never;
link?: never;
array?: QueryListRecordsRequest.ArrayInput;
map?: never;
} | {
string?: never;
int?: never;
float?: never;
boolean?: never;
link?: never;
array?: never;
map?: QueryListRecordsRequest.MapInput;
})))) {
super();
pb_1.Message.initialize(this, Array.isArray(data) ? data : [], 0, -1, [], this.#one_of_decls);
if (!Array.isArray(data) && typeof data == "object") {
if ("string" in data && data.string != undefined) {
this.string = data.string;
}
@ -335,72 +528,105 @@ export namespace vulcanize.registry.v1beta1 {
if ("boolean" in data && data.boolean != undefined) {
this.boolean = data.boolean;
}
if ("reference" in data && data.reference != undefined) {
this.reference = data.reference;
if ("link" in data && data.link != undefined) {
this.link = data.link;
}
if ("values" in data && data.values != undefined) {
this.values = data.values;
if ("array" in data && data.array != undefined) {
this.array = data.array;
}
if ("map" in data && data.map != undefined) {
this.map = data.map;
}
}
}
get type() {
return pb_1.Message.getFieldWithDefault(this, 1, "") as string;
}
set type(value: string) {
pb_1.Message.setField(this, 1, value);
}
get string() {
return pb_1.Message.getFieldWithDefault(this, 2, "") as string;
return pb_1.Message.getFieldWithDefault(this, 1, "") as string;
}
set string(value: string) {
pb_1.Message.setField(this, 2, value);
pb_1.Message.setOneofField(this, 1, this.#one_of_decls[0], value);
}
get has_string() {
return pb_1.Message.getField(this, 1) != null;
}
get int() {
return pb_1.Message.getFieldWithDefault(this, 3, 0) as number;
return pb_1.Message.getFieldWithDefault(this, 2, 0) as number;
}
set int(value: number) {
pb_1.Message.setField(this, 3, value);
pb_1.Message.setOneofField(this, 2, this.#one_of_decls[0], value);
}
get has_int() {
return pb_1.Message.getField(this, 2) != null;
}
get float() {
return pb_1.Message.getFieldWithDefault(this, 4, 0) as number;
return pb_1.Message.getFieldWithDefault(this, 3, 0) as number;
}
set float(value: number) {
pb_1.Message.setField(this, 4, value);
pb_1.Message.setOneofField(this, 3, this.#one_of_decls[0], value);
}
get has_float() {
return pb_1.Message.getField(this, 3) != null;
}
get boolean() {
return pb_1.Message.getFieldWithDefault(this, 5, false) as boolean;
return pb_1.Message.getFieldWithDefault(this, 4, false) as boolean;
}
set boolean(value: boolean) {
pb_1.Message.setField(this, 5, value);
pb_1.Message.setOneofField(this, 4, this.#one_of_decls[0], value);
}
get reference() {
return pb_1.Message.getWrapperField(this, QueryListRecordsRequest.ReferenceInput, 6) as QueryListRecordsRequest.ReferenceInput;
get has_boolean() {
return pb_1.Message.getField(this, 4) != null;
}
set reference(value: QueryListRecordsRequest.ReferenceInput) {
pb_1.Message.setWrapperField(this, 6, value);
get link() {
return pb_1.Message.getFieldWithDefault(this, 5, "") as string;
}
get has_reference() {
set link(value: string) {
pb_1.Message.setOneofField(this, 5, this.#one_of_decls[0], value);
}
get has_link() {
return pb_1.Message.getField(this, 5) != null;
}
get array() {
return pb_1.Message.getWrapperField(this, QueryListRecordsRequest.ArrayInput, 6) as QueryListRecordsRequest.ArrayInput;
}
set array(value: QueryListRecordsRequest.ArrayInput) {
pb_1.Message.setOneofWrapperField(this, 6, this.#one_of_decls[0], value);
}
get has_array() {
return pb_1.Message.getField(this, 6) != null;
}
get values() {
return pb_1.Message.getRepeatedWrapperField(this, QueryListRecordsRequest.ValueInput, 7) as QueryListRecordsRequest.ValueInput[];
get map() {
return pb_1.Message.getWrapperField(this, QueryListRecordsRequest.MapInput, 7) as QueryListRecordsRequest.MapInput;
}
set values(value: QueryListRecordsRequest.ValueInput[]) {
pb_1.Message.setRepeatedWrapperField(this, 7, value);
set map(value: QueryListRecordsRequest.MapInput) {
pb_1.Message.setOneofWrapperField(this, 7, this.#one_of_decls[0], value);
}
get has_map() {
return pb_1.Message.getField(this, 7) != null;
}
get value() {
const cases: {
[index: number]: "none" | "string" | "int" | "float" | "boolean" | "link" | "array" | "map";
} = {
0: "none",
1: "string",
2: "int",
3: "float",
4: "boolean",
5: "link",
6: "array",
7: "map"
};
return cases[pb_1.Message.computeOneofCase(this, [1, 2, 3, 4, 5, 6, 7])];
}
static fromObject(data: {
type?: string;
string?: string;
int?: number;
float?: number;
boolean?: boolean;
reference?: ReturnType<typeof QueryListRecordsRequest.ReferenceInput.prototype.toObject>;
values?: ReturnType<typeof QueryListRecordsRequest.ValueInput.prototype.toObject>[];
link?: string;
array?: ReturnType<typeof QueryListRecordsRequest.ArrayInput.prototype.toObject>;
map?: ReturnType<typeof QueryListRecordsRequest.MapInput.prototype.toObject>;
}): ValueInput {
const message = new ValueInput({});
if (data.type != null) {
message.type = data.type;
}
if (data.string != null) {
message.string = data.string;
}
@ -413,27 +639,27 @@ export namespace vulcanize.registry.v1beta1 {
if (data.boolean != null) {
message.boolean = data.boolean;
}
if (data.reference != null) {
message.reference = QueryListRecordsRequest.ReferenceInput.fromObject(data.reference);
if (data.link != null) {
message.link = data.link;
}
if (data.values != null) {
message.values = data.values.map(item => QueryListRecordsRequest.ValueInput.fromObject(item));
if (data.array != null) {
message.array = QueryListRecordsRequest.ArrayInput.fromObject(data.array);
}
if (data.map != null) {
message.map = QueryListRecordsRequest.MapInput.fromObject(data.map);
}
return message;
}
toObject() {
const data: {
type?: string;
string?: string;
int?: number;
float?: number;
boolean?: boolean;
reference?: ReturnType<typeof QueryListRecordsRequest.ReferenceInput.prototype.toObject>;
values?: ReturnType<typeof QueryListRecordsRequest.ValueInput.prototype.toObject>[];
link?: string;
array?: ReturnType<typeof QueryListRecordsRequest.ArrayInput.prototype.toObject>;
map?: ReturnType<typeof QueryListRecordsRequest.MapInput.prototype.toObject>;
} = {};
if (this.type != null) {
data.type = this.type;
}
if (this.string != null) {
data.string = this.string;
}
@ -446,11 +672,14 @@ export namespace vulcanize.registry.v1beta1 {
if (this.boolean != null) {
data.boolean = this.boolean;
}
if (this.reference != null) {
data.reference = this.reference.toObject();
if (this.link != null) {
data.link = this.link;
}
if (this.values != null) {
data.values = this.values.map((item: QueryListRecordsRequest.ValueInput) => item.toObject());
if (this.array != null) {
data.array = this.array.toObject();
}
if (this.map != null) {
data.map = this.map.toObject();
}
return data;
}
@ -458,20 +687,20 @@ export namespace vulcanize.registry.v1beta1 {
serialize(w: pb_1.BinaryWriter): void;
serialize(w?: pb_1.BinaryWriter): Uint8Array | void {
const writer = w || new pb_1.BinaryWriter();
if (this.type.length)
writer.writeString(1, this.type);
if (this.string.length)
writer.writeString(2, this.string);
if (this.int != 0)
writer.writeInt64(3, this.int);
if (this.float != 0)
writer.writeDouble(4, this.float);
if (this.boolean != false)
writer.writeBool(5, this.boolean);
if (this.has_reference)
writer.writeMessage(6, this.reference, () => this.reference.serialize(writer));
if (this.values.length)
writer.writeRepeatedMessage(7, this.values, (item: QueryListRecordsRequest.ValueInput) => item.serialize(writer));
if (this.has_string)
writer.writeString(1, this.string);
if (this.has_int)
writer.writeInt64(2, this.int);
if (this.has_float)
writer.writeDouble(3, this.float);
if (this.has_boolean)
writer.writeBool(4, this.boolean);
if (this.has_link)
writer.writeString(5, this.link);
if (this.has_array)
writer.writeMessage(6, this.array, () => this.array.serialize(writer));
if (this.has_map)
writer.writeMessage(7, this.map, () => this.map.serialize(writer));
if (!w)
return writer.getResultBuffer();
}
@ -482,25 +711,25 @@ export namespace vulcanize.registry.v1beta1 {
break;
switch (reader.getFieldNumber()) {
case 1:
message.type = reader.readString();
break;
case 2:
message.string = reader.readString();
break;
case 3:
case 2:
message.int = reader.readInt64();
break;
case 4:
case 3:
message.float = reader.readDouble();
break;
case 5:
case 4:
message.boolean = reader.readBool();
break;
case 5:
message.link = reader.readString();
break;
case 6:
reader.readMessage(message.reference, () => message.reference = QueryListRecordsRequest.ReferenceInput.deserialize(reader));
reader.readMessage(message.array, () => message.array = QueryListRecordsRequest.ArrayInput.deserialize(reader));
break;
case 7:
reader.readMessage(message.values, () => pb_1.Message.addToRepeatedWrapperField(message, 7, QueryListRecordsRequest.ValueInput.deserialize(reader), QueryListRecordsRequest.ValueInput));
reader.readMessage(message.map, () => message.map = QueryListRecordsRequest.MapInput.deserialize(reader));
break;
default: reader.skipField();
}

View File

@ -9,7 +9,6 @@ import * as dependency_1 from "./../../../google/protobuf/duration";
import * as dependency_2 from "./../../../google/protobuf/timestamp";
import * as dependency_3 from "./../../../gogoproto/gogo";
import * as dependency_4 from "./../../../cosmos/base/v1beta1/coin";
import * as dependency_5 from "./../../../google/protobuf/any";
import * as pb_1 from "google-protobuf";
export namespace vulcanize.registry.v1beta1 {
export class Params extends pb_1.Message {
@ -348,7 +347,7 @@ export namespace vulcanize.registry.v1beta1 {
expiry_time?: string;
deleted?: boolean;
owners?: string[];
attributes?: dependency_5.google.protobuf.Any;
attributes?: Uint8Array;
names?: string[];
type?: string;
}) {
@ -421,13 +420,10 @@ export namespace vulcanize.registry.v1beta1 {
pb_1.Message.setField(this, 6, value);
}
get attributes() {
return pb_1.Message.getWrapperField(this, dependency_5.google.protobuf.Any, 7) as dependency_5.google.protobuf.Any;
return pb_1.Message.getFieldWithDefault(this, 7, new Uint8Array(0)) as Uint8Array;
}
set attributes(value: dependency_5.google.protobuf.Any) {
pb_1.Message.setWrapperField(this, 7, value);
}
get has_attributes() {
return pb_1.Message.getField(this, 7) != null;
set attributes(value: Uint8Array) {
pb_1.Message.setField(this, 7, value);
}
get names() {
return pb_1.Message.getFieldWithDefault(this, 8, []) as string[];
@ -448,7 +444,7 @@ export namespace vulcanize.registry.v1beta1 {
expiry_time?: string;
deleted?: boolean;
owners?: string[];
attributes?: ReturnType<typeof dependency_5.google.protobuf.Any.prototype.toObject>;
attributes?: Uint8Array;
names?: string[];
type?: string;
}): Record {
@ -472,7 +468,7 @@ export namespace vulcanize.registry.v1beta1 {
message.owners = data.owners;
}
if (data.attributes != null) {
message.attributes = dependency_5.google.protobuf.Any.fromObject(data.attributes);
message.attributes = data.attributes;
}
if (data.names != null) {
message.names = data.names;
@ -490,7 +486,7 @@ export namespace vulcanize.registry.v1beta1 {
expiry_time?: string;
deleted?: boolean;
owners?: string[];
attributes?: ReturnType<typeof dependency_5.google.protobuf.Any.prototype.toObject>;
attributes?: Uint8Array;
names?: string[];
type?: string;
} = {};
@ -513,7 +509,7 @@ export namespace vulcanize.registry.v1beta1 {
data.owners = this.owners;
}
if (this.attributes != null) {
data.attributes = this.attributes.toObject();
data.attributes = this.attributes;
}
if (this.names != null) {
data.names = this.names;
@ -539,8 +535,8 @@ export namespace vulcanize.registry.v1beta1 {
writer.writeBool(5, this.deleted);
if (this.owners.length)
writer.writeRepeatedString(6, this.owners);
if (this.has_attributes)
writer.writeMessage(7, this.attributes, () => this.attributes.serialize(writer));
if (this.attributes.length)
writer.writeBytes(7, this.attributes);
if (this.names.length)
writer.writeRepeatedString(8, this.names);
if (this.type.length)
@ -573,7 +569,7 @@ export namespace vulcanize.registry.v1beta1 {
pb_1.Message.addToRepeatedField(message, 6, reader.readString());
break;
case 7:
reader.readMessage(message.attributes, () => message.attributes = dependency_5.google.protobuf.Any.deserialize(reader));
message.attributes = reader.readBytes();
break;
case 8:
pb_1.Message.addToRepeatedField(message, 8, reader.readString());

View File

@ -10,16 +10,24 @@ const attributeField = `
attributes {
key
value {
null
int
float
string
boolean
json
reference {
id
... on BooleanValue { bool: value }
... on IntValue { int: value }
... on FloatValue { float: value }
... on StringValue { string: value }
... on BytesValue { bytes: value }
... on LinkValue { link: value }
... on ArrayValue {
array: value {
... on BooleanValue { bool: value }
... on IntValue { int: value }
... on FloatValue { float: value }
... on StringValue { string: value }
... on BytesValue { bytes: value }
... on LinkValue { link: value }
}
}
... on MapValue { map: value { key mapping: value { __typename } } }
}
}
`;

View File

@ -50,18 +50,18 @@ describe('Querying', () => {
expect(records.length).toBeGreaterThanOrEqual(1);
const { attributes: { repo_registration_record_cid: record_repo_registration_record_cid } } = records[0];
expect(repo_registration_record_cid).toBe(record_repo_registration_record_cid);
expect(repo_registration_record_cid).toStrictEqual(record_repo_registration_record_cid);
});
test('Query records by attributes.', async () => {
const { version, name } = watcher.record;
const records = await registry.queryRecords({ version, name }, true);
const { version, url } = watcher.record;
const records = await registry.queryRecords({ version, url, type: undefined }, true);
expect(records.length).toBe(1);
[ watcher ] = records;
const { attributes: { version: recordVersion, name: recordName } } = watcher;
const { attributes: { version: recordVersion, url: recordName } } = watcher;
expect(recordVersion).toBe(version);
expect(recordName).toBe(name);
expect(recordName).toBe(url);
});
test('Query records by id.', async () => {
@ -75,6 +75,7 @@ describe('Querying', () => {
expect(record.id).toBe(watcher.id);
// temp fix
expect(record.attributes.repo_registration_record_cid).toBeDefined();
expect(record.attributes.repo_registration_record_cid).toHaveLength(46);
expect(record.attributes.repo_registration_record_cid).toHaveProperty("/");
expect(record.attributes.repo_registration_record_cid["/"]).toHaveLength(46);
});
});

View File

@ -1,7 +1,10 @@
record:
type: WebsiteRegistrationRecord
url: 'https://cerc.io'
repo_registration_record_cid: QmSnuWmxptJZdLJpKRarxBMS2Ju2oANVrgbr2xWbie9b2D
build_artifact_cid: QmP8jTG1m9GSDJLCbeWhVSVgEzCPPwXRdCRuJtQ5Tz9Kc9
tls_cert_cid: QmbWqxBEKC3P8tqsKc98xmWNzrzDtRLMiMPL8wBuTGsMnR
repo_registration_record_cid:
/: QmSnuWmxptJZdLJpKRarxBMS2Ju2oANVrgbr2xWbie9b2D
build_artifact_cid:
/: QmP8jTG1m9GSDJLCbeWhVSVgEzCPPwXRdCRuJtQ5Tz9Kc9
tls_cert_cid:
/: QmbWqxBEKC3P8tqsKc98xmWNzrzDtRLMiMPL8wBuTGsMnR
version: 1.0.23

View File

@ -3,8 +3,6 @@ import { Validator } from 'jsonschema';
import RecordSchema from './schema/record.json';
import { Util } from './util';
import * as attributes from './proto/vulcanize/registry/v1beta1/attributes';
import * as any from './proto/google/protobuf/any';
/**
* Record.
@ -29,22 +27,7 @@ export class Record {
}
get attributes() {
let a = new any.google.protobuf.Any();
if (this._record.type) {
const namespace: any = attributes.vulcanize.registry.v1beta1;
if (!namespace[this._record.type]) {
throw new Error(`Class not found: ${this._record.type}`);
}
const value = namespace[this._record.type].fromObject(this._record);
a = new any.google.protobuf.Any({
type_url: `/vulcanize.registry.v1beta1.${this._record.type}`,
value: value.serialize(),
});
}
return a;
return Buffer.from(JSON.stringify(this._record), 'binary')
}
/**

View File

@ -10,21 +10,21 @@ export class Util {
/**
* Sorts JSON object.
*/
static sortJSON(object: any) {
if (object instanceof Array) {
for (let i = 0; i < object.length; i++) {
object[i] = Util.sortJSON(object[i]);
static sortJSON(obj: any) {
if (obj instanceof Array) {
for (let i = 0; i < obj.length; i++) {
obj[i] = Util.sortJSON(obj[i]);
}
return object;
return obj;
}
if (typeof object !== 'object' || object === null) return object;
if (typeof obj !== 'object' || obj === null) return obj;
let keys = Object.keys(object);
let keys = Object.keys(obj);
keys = keys.sort();
const newObject: {[key: string]: any} = {};
for (let i = 0; i < keys.length; i++) {
newObject[keys[i]] = Util.sortJSON(object[keys[i]]);
newObject[keys[i]] = Util.sortJSON(obj[keys[i]]);
}
return newObject;
}
@ -32,31 +32,46 @@ export class Util {
/**
* Marshal object into gql 'attributes' variable.
*/
static toGQLAttributes(object: any) {
static toGQLAttributes(obj: any) {
const vars: any[] = [];
Object.keys(obj).forEach(key => {
const value = this.toGQLValue(obj[key]);
Object.keys(object).forEach(key => {
let type: string = typeof object[key];
if (object[key] === null) {
vars.push({ key, value: { 'null': true } });
} else if (type === 'number') {
type = (object[key] % 1 === 0) ? 'int' : 'float';
vars.push({ key, value: { [type]: object[key] } });
} else if (type === 'string') {
vars.push({ key, value: { 'string': object[key] } });
} else if (type === 'boolean') {
vars.push({ key, value: { 'boolean': object[key] } });
} else if (type === 'object') {
const nestedObject = object[key];
if (nestedObject['/'] !== undefined) {
vars.push({ key, value: { 'reference': { id: nestedObject['/'] } } });
}
if (value !== undefined) {
vars.push({ key, value });
}
});
return vars;
}
static toGQLValue(obj: any) {
if (obj === null) {
return null;
}
let type: string = typeof obj;
switch (type) {
case 'number':
type = (obj % 1 === 0) ? 'int' : 'float';
return { [type]: obj };
case 'string':
return { 'string': obj };
case 'boolean':
return { 'boolean': obj };
case 'object':
if (obj['/'] !== undefined) {
return { 'link': obj['/'] };
}
if (obj instanceof Array) {
return { 'array': obj };
}
return { 'map': obj };
case 'undefined':
return undefined;
default:
throw new Error(`Unknown object type '${type}': ${obj}`);
}
}
/**
* Unmarshal attributes array to object.
*/
@ -64,24 +79,29 @@ export class Util {
const res: {[key: string]: any} = {};
attributes.forEach(attr => {
if (attr.value.null) {
res[attr.key] = null;
} else if (attr.value.json) {
res[attr.key] = JSON.parse(attr.value.json);
} else if (attr.value.reference) {
// Convert GQL reference to IPLD style link.
const ref = attr.value.reference;
res[attr.key] = { '/': ref.id };
} else {
const { values, null: n, ...types } = attr.value;
const value = Object.values(types).find(v => v !== null);
res[attr.key] = value;
}
res[attr.key] = (attr.value === null) ? null : this.fromGQLValue(attr.value);
});
return res;
}
static fromGQLValue(obj: any) {
// Get first non-null key
const present = Object.keys(obj).find(k => obj[k] !== null);
if (present === undefined) {
throw new Error('Object has no non-null values');
}
// Create an array if array type attribute
if (present === 'array') {
return obj[present].map((e: any) => {
return this.fromGQLValue(e);
});
}
return obj[present];
}
/**
* Get record content ID.
*/

1531
yarn.lock

File diff suppressed because it is too large Load Diff