diff --git a/packages/lighthouse-watcher/.eslintignore b/packages/lighthouse-watcher/.eslintignore new file mode 100644 index 00000000..653874b5 --- /dev/null +++ b/packages/lighthouse-watcher/.eslintignore @@ -0,0 +1,5 @@ +# Don't lint node_modules. +node_modules + +# Don't lint build output. +dist diff --git a/packages/lighthouse-watcher/.eslintrc.json b/packages/lighthouse-watcher/.eslintrc.json new file mode 100644 index 00000000..476d529d --- /dev/null +++ b/packages/lighthouse-watcher/.eslintrc.json @@ -0,0 +1,27 @@ +{ + "env": { + "browser": true, + "es2021": true + }, + "extends": [ + "semistandard", + "plugin:@typescript-eslint/recommended" + ], + "parser": "@typescript-eslint/parser", + "parserOptions": { + "ecmaVersion": 12, + "sourceType": "module" + }, + "plugins": [ + "@typescript-eslint" + ], + "rules": { + "@typescript-eslint/no-explicit-any": "off", + "@typescript-eslint/explicit-module-boundary-types": [ + "warn", + { + "allowArgumentsExplicitlyTypedAsAny": true + } + ] + } +} diff --git a/packages/lighthouse-watcher/.gitignore b/packages/lighthouse-watcher/.gitignore new file mode 100644 index 00000000..3ac0fb0a --- /dev/null +++ b/packages/lighthouse-watcher/.gitignore @@ -0,0 +1,10 @@ +.idea/ +.vscode/ +node_modules/ +build/ +tmp/ +temp/ + +#Hardhat files +cache +artifacts diff --git a/packages/lighthouse-watcher/.mocharc.yml b/packages/lighthouse-watcher/.mocharc.yml new file mode 100644 index 00000000..50153fbf --- /dev/null +++ b/packages/lighthouse-watcher/.mocharc.yml @@ -0,0 +1,4 @@ +timeout: '10000' +bail: true +exit: true # TODO: Find out why the program doesn't exit on its own. +require: 'ts-node/register' diff --git a/packages/lighthouse-watcher/README.md b/packages/lighthouse-watcher/README.md new file mode 100644 index 00000000..d3c03ec2 --- /dev/null +++ b/packages/lighthouse-watcher/README.md @@ -0,0 +1,77 @@ +# Lighthouse Watcher + +## Setup + +Deploy a Lighthouse contract: + +```bash +yarn lighthouse:deploy +``` + +Use the Lighthouse contract address and set `environments/local.toml` to watch the contract. + +```toml +[watch] + lighthouse = "0xLighthouseContractAddress" +``` + +## Run + +Run the server: + +```bash +$ yarn server +``` + +## Test + +To test the watcher locally: + +Open graphql playground at http://127.0.0.1:3005/graphql and set a subscription query + +```graphql +subscription { + onEvent { + block { + hash + number + timestamp + } + tx { + hash + } + contract + eventIndex + event { + __typename + ... on StorageRequestEvent { + uploader + cid + config + fileCost + } + } + proof { + data + } + } +} +``` + +To trigger StorageRequest event locally, run: + +```bash +yarn lighthouse:store --lighthouse 0xLighthouseContractAddress --cid testCid --store-config testConfig --file-cost 10 +``` + +### Smoke test + +To run a smoke test: + +* Start the server. + +* Run: + + ```bash + $ yarn smoke-test + ``` diff --git a/packages/lighthouse-watcher/environments/local.toml b/packages/lighthouse-watcher/environments/local.toml new file mode 100644 index 00000000..821582a3 --- /dev/null +++ b/packages/lighthouse-watcher/environments/local.toml @@ -0,0 +1,39 @@ +[server] + host = "127.0.0.1" + port = 3005 + +[watch] + lighthouse = "0xbDA876401576281a1912a20de135F60de6D7d711" + +[database] + type = "postgres" + host = "localhost" + port = 5432 + database = "lighthouse-watcher" + username = "postgres" + password = "postgres" + synchronize = true + logging = false + + entities = [ "src/entity/**/*.ts" ] + migrations = [ "src/migration/**/*.ts" ] + subscribers = [ "src/subscriber/**/*.ts" ] + + [database.cli] + entitiesDir = "src/entity" + migrationsDir = "src/migration" + subscribersDir = "src/subscriber" + +[upstream] + [upstream.ethServer] + gqlApiEndpoint = "http://127.0.0.1:8082/graphql" + gqlPostgraphileEndpoint = "http://127.0.0.1:5000/graphql" + + [upstream.cache] + name = "requests" + enabled = false + deleteOnStart = false + +[jobQueue] + dbConnectionString = "postgres://postgres:postgres@localhost/lighthouse-watcher-job-queue" + maxCompletionLag = 300 diff --git a/packages/lighthouse-watcher/hardhat.config.ts b/packages/lighthouse-watcher/hardhat.config.ts new file mode 100644 index 00000000..b86afbdd --- /dev/null +++ b/packages/lighthouse-watcher/hardhat.config.ts @@ -0,0 +1,14 @@ +import { HardhatUserConfig } from 'hardhat/config'; + +import './tasks/lighthouse-deploy'; +import './tasks/lighthouse-store'; + +const config: HardhatUserConfig = { + defaultNetwork: 'localhost', + solidity: '0.7.3', + paths: { + sources: './test/contracts' + } +}; + +export default config; diff --git a/packages/lighthouse-watcher/package.json b/packages/lighthouse-watcher/package.json new file mode 100644 index 00000000..051ff09f --- /dev/null +++ b/packages/lighthouse-watcher/package.json @@ -0,0 +1,50 @@ +{ + "name": "@vulcanize/lighthouse-watcher", + "version": "0.1.0", + "main": "index.js", + "license": "UNLICENSED", + "private": true, + "scripts": { + "server": "DEBUG=vulcanize:* nodemon --watch src src/server.ts -f environments/local.toml", + "test": "mocha -r ts-node/register src/**/*.spec.ts", + "lint": "eslint .", + "lighthouse:deploy": "hardhat lighthouse-deploy", + "lighthouse:store": "hardhat lighthouse-store", + "smoke-test": "mocha src/smoke.test.ts" + }, + "dependencies": { + "@graphql-tools/schema": "^7.1.5", + "@vulcanize/cache": "^0.1.0", + "@vulcanize/ipld-eth-client": "^0.1.0", + "@vulcanize/util": "^0.1.0", + "apollo-server-express": "^2.25.0", + "apollo-type-bigint": "^0.1.3", + "debug": "^4.3.1", + "ethers": "^5.2.0", + "express": "^4.17.1", + "json-bigint": "^1.0.0", + "lodash": "^4.17.21", + "reflect-metadata": "^0.1.13" + }, + "devDependencies": { + "@types/chai": "^4.2.19", + "@types/express": "^4.17.11", + "@types/fs-extra": "^9.0.11", + "@types/json-bigint": "^1.0.0", + "@types/mocha": "^8.2.2", + "@types/yargs": "^17.0.0", + "@typescript-eslint/eslint-plugin": "^4.25.0", + "@typescript-eslint/parser": "^4.25.0", + "chai": "^4.3.4", + "eslint": "^7.27.0", + "eslint-config-semistandard": "^15.0.1", + "eslint-config-standard": "^16.0.3", + "eslint-plugin-import": "^2.23.3", + "eslint-plugin-node": "^11.1.0", + "eslint-plugin-promise": "^5.1.0", + "eslint-plugin-standard": "^5.0.0", + "mocha": "^8.4.0", + "nodemon": "^2.0.7", + "hardhat": "^2.3.0" + } +} diff --git a/packages/lighthouse-watcher/src/abi/Lighthouse.json b/packages/lighthouse-watcher/src/abi/Lighthouse.json new file mode 100644 index 00000000..64edfbe5 --- /dev/null +++ b/packages/lighthouse-watcher/src/abi/Lighthouse.json @@ -0,0 +1,107 @@ +[ + { + "anonymous": false, + "inputs": [ + { + "indexed": false, + "internalType": "address", + "name": "uploader", + "type": "address" + }, + { + "indexed": false, + "internalType": "string", + "name": "cid", + "type": "string" + }, + { + "indexed": false, + "internalType": "string", + "name": "config", + "type": "string" + }, + { + "indexed": false, + "internalType": "uint256", + "name": "fileCost", + "type": "uint256" + } + ], + "name": "StorageRequest", + "type": "event" + }, + { + "stateMutability": "payable", + "type": "fallback" + }, + { + "inputs": [ + { + "internalType": "uint256", + "name": "amount", + "type": "uint256" + }, + { + "internalType": "address payable", + "name": "recipient", + "type": "address" + } + ], + "name": "getPaid", + "outputs": [], + "stateMutability": "nonpayable", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "address", + "name": "", + "type": "address" + }, + { + "internalType": "string", + "name": "", + "type": "string" + } + ], + "name": "requests", + "outputs": [ + { + "internalType": "string", + "name": "cid", + "type": "string" + }, + { + "internalType": "string", + "name": "config", + "type": "string" + }, + { + "internalType": "uint256", + "name": "fileCost", + "type": "uint256" + } + ], + "stateMutability": "view", + "type": "function" + }, + { + "inputs": [ + { + "internalType": "string", + "name": "cid", + "type": "string" + }, + { + "internalType": "string", + "name": "config", + "type": "string" + } + ], + "name": "store", + "outputs": [], + "stateMutability": "payable", + "type": "function" + } +] diff --git a/packages/lighthouse-watcher/src/client.ts b/packages/lighthouse-watcher/src/client.ts new file mode 100644 index 00000000..36f6729c --- /dev/null +++ b/packages/lighthouse-watcher/src/client.ts @@ -0,0 +1,24 @@ +import { gql } from '@apollo/client/core'; +import { GraphQLClient, GraphQLConfig } from '@vulcanize/ipld-eth-client'; + +import { subscribeEvents } from './queries'; + +export class Client { + _config: GraphQLConfig; + _client: GraphQLClient; + + constructor (config: GraphQLConfig) { + this._config = config; + + this._client = new GraphQLClient(config); + } + + async watchEvents (onNext: (value: any) => void): Promise { + return this._client.subscribe( + gql(subscribeEvents), + ({ data }) => { + onNext(data.onEvent); + } + ); + } +} diff --git a/packages/lighthouse-watcher/src/events.ts b/packages/lighthouse-watcher/src/events.ts new file mode 100644 index 00000000..ffc12836 --- /dev/null +++ b/packages/lighthouse-watcher/src/events.ts @@ -0,0 +1,66 @@ +import assert from 'assert'; +import debug from 'debug'; +import _ from 'lodash'; +import { PubSub } from 'apollo-server-express'; + +import { EthClient } from '@vulcanize/ipld-eth-client'; + +import { Indexer, ResultEvent, UNKNOWN_EVENT_NAME } from './indexer'; +const log = debug('vulcanize:events'); + +export const LighthouseEvent = 'lighthouse-event'; + +export class EventWatcher { + _ethClient: EthClient + _indexer: Indexer + _subscription: ZenObservable.Subscription | undefined + _pubsub: PubSub + + constructor (ethClient: EthClient, indexer: Indexer, pubsub: PubSub) { + this._ethClient = ethClient; + this._indexer = indexer; + this._pubsub = pubsub; + } + + getEventIterator (): AsyncIterator { + return this._pubsub.asyncIterator([LighthouseEvent]); + } + + async start (): Promise { + assert(!this._subscription, 'subscription already started'); + + await this.watchBlocksAtChainHead(); + } + + async watchBlocksAtChainHead (): Promise { + log('Started watching upstream blocks...'); + this._subscription = await this._ethClient.watchBlocks(async (value) => { + const { blockHash, blockNumber } = _.get(value, 'data.listen.relatedNode'); + log('watchBlock', blockHash, blockNumber); + + const events = await this._indexer.getOrFetchBlockEvents(blockHash); + + for (let ei = 0; ei < events.length; ei++) { + await this.publishLighthouseEventToSubscribers(events[ei]); + } + }); + } + + async publishLighthouseEventToSubscribers (resultEvent: ResultEvent): Promise { + if (resultEvent.event.__typename !== UNKNOWN_EVENT_NAME) { + log(`pushing event to GQL subscribers: ${resultEvent.event.__typename}`); + + // Publishing the event here will result in pushing the payload to GQL subscribers for `onEvent`. + await this._pubsub.publish(LighthouseEvent, { + onEvent: resultEvent + }); + } + } + + async stop (): Promise { + if (this._subscription) { + log('Stopped watching upstream blocks'); + this._subscription.unsubscribe(); + } + } +} diff --git a/packages/lighthouse-watcher/src/indexer.ts b/packages/lighthouse-watcher/src/indexer.ts new file mode 100644 index 00000000..de702766 --- /dev/null +++ b/packages/lighthouse-watcher/src/indexer.ts @@ -0,0 +1,167 @@ +import debug from 'debug'; +import JSONbig from 'json-bigint'; +import { ethers } from 'ethers'; +import assert from 'assert'; + +import { EthClient } from '@vulcanize/ipld-eth-client'; +import { Config as BaseConfig } from '@vulcanize/util'; + +import lighthouseABI from './abi/Lighthouse.json'; + +export const UNKNOWN_EVENT_NAME = '__unknown__'; + +const log = debug('vulcanize:indexer'); + +export type ResultEvent = { + block: any; + tx: any; + + contract: string; + + eventIndex: number; + event: any; + + proof: any; +}; + +export interface Config extends BaseConfig { + watch?: { + lighthouse: string + } +} + +export class Indexer { + _config: Config + _ethClient: EthClient + _postgraphileClient: EthClient + + _lighthouseContract: ethers.utils.Interface + + constructor (config: Config, ethClient: EthClient, postgraphileClient: EthClient) { + assert(config.watch); + this._config = config; + this._ethClient = ethClient; + this._postgraphileClient = postgraphileClient; + + this._lighthouseContract = new ethers.utils.Interface(lighthouseABI); + } + + // Note: Some event names might be unknown at this point, as earlier events might not yet be processed. + async getOrFetchBlockEvents (blockHash: string): Promise> { + // Fetch and save events first and make a note in the event sync progress table. + log(`getBlockEvents: fetching from upstream server ${blockHash}`); + const events = await this.fetchEvents(blockHash); + + log(`getBlockEvents: ${blockHash} num events: ${events.length}`); + + return events; + } + + parseEventNameAndArgs (logObj: any): any { + let eventName = UNKNOWN_EVENT_NAME; + let eventInfo = {}; + + const { topics, data } = logObj; + + const logDescription = this._lighthouseContract.parseLog({ data, topics }); + switch (logDescription.name) { + case 'StorageRequest': { + eventName = logDescription.name; + const { uploader, cid, config, fileCost } = logDescription.args; + eventInfo = { uploader, cid, config, fileCost }; + + break; + } + } + + return { eventName, eventInfo }; + } + + async fetchEvents (blockHash: string): Promise> { + assert(this._config.watch); + const contract = this._config.watch.lighthouse; + const { logs, block } = await this._ethClient.getLogs({ blockHash, contract }); + + const { + allEthHeaderCids: { + nodes: [ + { + ethTransactionCidsByHeaderId: { + nodes: transactions + } + } + ] + } + } = await this._postgraphileClient.getBlockWithTransactions({ blockHash }); + + const transactionMap = transactions.reduce((acc: {[key: string]: any}, transaction: {[key: string]: any}) => { + acc[transaction.txHash] = transaction; + return acc; + }, {}); + + const events: Array = []; + + for (let li = 0; li < logs.length; li++) { + const logObj = logs[li]; + const { + index: logIndex, + cid, + ipldBlock, + account: { + address + }, + transaction: { + hash: txHash + } + } = logObj; + + const tx = transactionMap[txHash]; + assert(ethers.utils.getAddress(address) === contract); + + const eventDetails = this.parseEventNameAndArgs(logObj); + const eventName = eventDetails.eventName; + const eventInfo = eventDetails.eventInfo; + + const { + hash, + number, + timestamp, + parent: { + hash: parentHash + } + } = block; + + events.push({ + block: { + hash, + number, + timestamp, + parentHash + }, + eventIndex: logIndex, + tx: { + hash: txHash, + index: tx.index, + from: tx.src, + to: tx.dst + }, + contract, + event: { + __typename: `${eventName}Event`, + ...eventInfo + }, + proof: { + data: JSONbig.stringify({ + blockHash: hash, + receipt: { + cid, + ipldBlock + } + }) + } + }); + } + + return events; + } +} diff --git a/packages/lighthouse-watcher/src/queries.ts b/packages/lighthouse-watcher/src/queries.ts new file mode 100644 index 00000000..9f083c6e --- /dev/null +++ b/packages/lighthouse-watcher/src/queries.ts @@ -0,0 +1,42 @@ +import { gql } from 'graphql-request'; + +const resultEvent = ` +{ + block { + number + hash + timestamp + parentHash + } + tx { + hash + from + to + index + } + contract + eventIndex + + event { + __typename + + ... on StorageRequestEvent { + uploader + cid + config + fileCost + } + } + + proof { + data + } +} +`; + +export const subscribeEvents = gql` + subscription SubscriptionEvents { + onEvent + ${resultEvent} + } +`; diff --git a/packages/lighthouse-watcher/src/resolvers.ts b/packages/lighthouse-watcher/src/resolvers.ts new file mode 100644 index 00000000..64c8fd3e --- /dev/null +++ b/packages/lighthouse-watcher/src/resolvers.ts @@ -0,0 +1,24 @@ +import BigInt from 'apollo-type-bigint'; +import assert from 'assert'; + +import { EventWatcher } from './events'; + +export const createResolvers = async (eventWatcher: EventWatcher): Promise => { + return { + BigInt: new BigInt('bigInt'), + + Event: { + __resolveType: (obj: any) => { + assert(obj.__typename); + + return obj.__typename; + } + }, + + Subscription: { + onEvent: { + subscribe: () => eventWatcher.getEventIterator() + } + } + }; +}; diff --git a/packages/lighthouse-watcher/src/schema.ts b/packages/lighthouse-watcher/src/schema.ts new file mode 100644 index 00000000..0b7f35f6 --- /dev/null +++ b/packages/lighthouse-watcher/src/schema.ts @@ -0,0 +1,75 @@ +import { gql } from '@apollo/client/core'; + +export default gql` +# Types + +# Support uint256 values. +scalar BigInt + +# Ethereum types + +type Block { + hash: String! + number: Int! + timestamp: Int! + parentHash: String! +} + +type Transaction { + hash: String! + index: Int! + from: String! + to: String! +} + +# event StorageRequest(address uploader, string cid, string config, uint fileCost); +type StorageRequestEvent { + uploader: String! + cid: String! + config: String! + fileCost: BigInt! +} + +# All events emitted by the watcher. +union Event = StorageRequestEvent + +# Proof for returned data. Serialized blob for now. +# Will be converted into a well defined structure later. +type Proof { + data: String! +} + +# Result event, include additional context over and above the event data. +type ResultEvent { + # Block and tx data for the event. + block: Block! + tx: Transaction! + + # Contract that generated the event. + contract: String! + + # Index of the event in the block. + eventIndex: Int! + + event: Event! + + # Proof from receipts trie. + proof: Proof +} + +# +# Queries +# +type Query { + # https://github.com/ardatan/graphql-tools/issues/764#issuecomment-419556241 + dummy: String +} + +# +# Subscriptions +# +type Subscription { + # Watch for Lighthouse events (at head of chain). + onEvent: ResultEvent! +} +`; diff --git a/packages/lighthouse-watcher/src/server.ts b/packages/lighthouse-watcher/src/server.ts new file mode 100644 index 00000000..58177619 --- /dev/null +++ b/packages/lighthouse-watcher/src/server.ts @@ -0,0 +1,90 @@ +import assert from 'assert'; +import 'reflect-metadata'; +import express, { Application } from 'express'; +import { ApolloServer, PubSub } from 'apollo-server-express'; +import yargs from 'yargs'; +import { hideBin } from 'yargs/helpers'; +import debug from 'debug'; +import { createServer } from 'http'; + +import { getCache } from '@vulcanize/cache'; +import { EthClient } from '@vulcanize/ipld-eth-client'; +import { getConfig } from '@vulcanize/util'; + +import typeDefs from './schema'; + +import { createResolvers } from './resolvers'; +import { Indexer } from './indexer'; +import { EventWatcher } from './events'; + +const log = debug('vulcanize:server'); + +export const main = async (): Promise => { + const argv = await yargs(hideBin(process.argv)) + .option('f', { + alias: 'config-file', + demandOption: true, + describe: 'configuration file path (toml)', + type: 'string' + }) + .argv; + + const config = await getConfig(argv.f); + + assert(config.server, 'Missing server config'); + + const { host, port } = config.server; + + const { upstream } = config; + + assert(upstream, 'Missing upstream config'); + const { ethServer: { gqlApiEndpoint, gqlPostgraphileEndpoint }, cache: cacheConfig } = upstream; + assert(gqlApiEndpoint, 'Missing upstream ethServer.gqlApiEndpoint'); + assert(gqlPostgraphileEndpoint, 'Missing upstream ethServer.gqlPostgraphileEndpoint'); + + const cache = await getCache(cacheConfig); + const ethClient = new EthClient({ + gqlEndpoint: gqlApiEndpoint, + gqlSubscriptionEndpoint: gqlPostgraphileEndpoint, + cache + }); + + const postgraphileClient = new EthClient({ + gqlEndpoint: gqlPostgraphileEndpoint, + cache + }); + + const indexer = new Indexer(config, ethClient, postgraphileClient); + + // Note: In-memory pubsub works fine for now, as each watcher is a single process anyway. + // Later: https://www.apollographql.com/docs/apollo-server/data/subscriptions/#production-pubsub-libraries + const pubsub = new PubSub(); + const eventWatcher = new EventWatcher(ethClient, indexer, pubsub); + await eventWatcher.start(); + + const resolvers = await createResolvers(eventWatcher); + + const app: Application = express(); + const server = new ApolloServer({ + typeDefs, + resolvers + }); + + await server.start(); + server.applyMiddleware({ app }); + + const httpServer = createServer(app); + server.installSubscriptionHandlers(httpServer); + + httpServer.listen(port, host, () => { + log(`Server is listening on host ${host} port ${port}`); + }); + + return { app, server }; +}; + +main().then(() => { + log('Starting server...'); +}).catch(err => { + log(err); +}); diff --git a/packages/lighthouse-watcher/src/smoke.test.ts b/packages/lighthouse-watcher/src/smoke.test.ts new file mode 100644 index 00000000..02f8d657 --- /dev/null +++ b/packages/lighthouse-watcher/src/smoke.test.ts @@ -0,0 +1,74 @@ +import { expect } from 'chai'; +import assert from 'assert'; +import { ethers, Contract, ContractTransaction, Signer, utils } from 'ethers'; +import 'mocha'; + +import { + getConfig +} from '@vulcanize/util'; + +import lighthouseABI from './abi/Lighthouse.json'; +import { Config } from './indexer'; +import { Client } from './client'; + +const NETWORK_RPC_URL = 'http://localhost:8545'; + +describe('lighthouse-watcher', () => { + let lighthouse: Contract; + + let config: Config; + let signer: Signer; + let client: Client; + + before(async () => { + const configFile = './environments/local.toml'; + config = await getConfig(configFile); + + const { server: { host, port }, watch } = config; + assert(watch); + + const endpoint = `http://${host}:${port}/graphql`; + const gqlEndpoint = endpoint; + const gqlSubscriptionEndpoint = endpoint; + client = new Client({ + gqlEndpoint, + gqlSubscriptionEndpoint + }); + + const provider = new ethers.providers.JsonRpcProvider(NETWORK_RPC_URL); + signer = provider.getSigner(); + lighthouse = new Contract(watch.lighthouse, lighthouseABI, signer); + }); + + it('should trigger StorageRequest event', done => { + (async () => { + const cid = 'testCid'; + const config = 'testConfig'; + const fileCost = '10'; + const signerAddress = await signer.getAddress(); + + // Subscribe using UniClient. + const subscription = await client.watchEvents((value: any) => { + if (value.event.__typename === 'StorageRequestEvent') { + expect(value.event.uploader).to.equal(signerAddress); + expect(value.event.cid).to.equal(cid); + expect(value.event.config).to.equal(config); + expect(value.event.fileCost).to.equal(fileCost); + + if (subscription) { + subscription.unsubscribe(); + } + + done(); + } + }); + + // Pool mint. + const value = utils.parseUnits(fileCost, 'wei'); + const transaction: ContractTransaction = await lighthouse.store(cid, config, { value }); + await transaction.wait(); + })().catch((error) => { + done(error); + }); + }); +}); diff --git a/packages/lighthouse-watcher/src/types/common/main.d.ts b/packages/lighthouse-watcher/src/types/common/main.d.ts new file mode 100644 index 00000000..43580f17 --- /dev/null +++ b/packages/lighthouse-watcher/src/types/common/main.d.ts @@ -0,0 +1,2 @@ +// https://medium.com/@steveruiz/using-a-javascript-library-without-type-declarations-in-a-typescript-project-3643490015f3 +declare module 'canonical-json' diff --git a/packages/lighthouse-watcher/src/types/common/package.json b/packages/lighthouse-watcher/src/types/common/package.json new file mode 100644 index 00000000..2bf0efa4 --- /dev/null +++ b/packages/lighthouse-watcher/src/types/common/package.json @@ -0,0 +1,5 @@ +{ + "name": "common", + "version": "0.1.0", + "typings": "main.d.ts" +} diff --git a/packages/lighthouse-watcher/tasks/lighthouse-deploy.ts b/packages/lighthouse-watcher/tasks/lighthouse-deploy.ts new file mode 100644 index 00000000..69ba3602 --- /dev/null +++ b/packages/lighthouse-watcher/tasks/lighthouse-deploy.ts @@ -0,0 +1,12 @@ +import { task } from 'hardhat/config'; +import '@nomiclabs/hardhat-ethers'; + +task('lighthouse-deploy', 'Deploys Lighthouse contract') + .setAction(async (_, hre) => { + await hre.run('compile'); + + const lighthouseFactory = await hre.ethers.getContractFactory('Lighthouse'); + const lighthouse = await lighthouseFactory.deploy(); + + console.log('Lighthouse deployed to:', lighthouse.address); + }); diff --git a/packages/lighthouse-watcher/tasks/lighthouse-store.ts b/packages/lighthouse-watcher/tasks/lighthouse-store.ts new file mode 100644 index 00000000..ae16470d --- /dev/null +++ b/packages/lighthouse-watcher/tasks/lighthouse-store.ts @@ -0,0 +1,41 @@ +import { task, types } from 'hardhat/config'; +import '@nomiclabs/hardhat-ethers'; +import { ContractTransaction, utils } from 'ethers'; + +task('lighthouse-store', 'Call Lighthouse store method') + .addParam('lighthouse', 'Address of Lighthouse contract', undefined, types.string) + .addParam('cid', 'store cid', undefined, types.string) + .addParam('storeConfig', 'store config', undefined, types.string) + .addParam('fileCost', 'store fileCost (wei)', undefined, types.float) + .setAction(async (args, hre) => { + const { + lighthouse: lighthouseAddress, + cid, + storeConfig: config, + fileCost + } = args; + + await hre.run('compile'); + + const Ligthouse = await hre.ethers.getContractFactory('Lighthouse'); + const lighthouse = Ligthouse.attach(lighthouseAddress); + const value = utils.parseUnits(String(fileCost), 'wei'); + + const transaction: ContractTransaction = await lighthouse.store(cid, config, { value }); + + const receipt = await transaction.wait(); + + if (receipt.events) { + console.log('receipt blockHash', receipt.blockHash); + + const storageRequestEvent = receipt.events.find(el => el.event === 'StorageRequest'); + + if (storageRequestEvent && storageRequestEvent.args) { + console.log('StorageRequest Event'); + console.log('uploader:', storageRequestEvent.args.uploader); + console.log('cid:', storageRequestEvent.args.cid); + console.log('config:', storageRequestEvent.args.config); + console.log('fileCost:', storageRequestEvent.args.fileCost.toString()); + } + } + }); diff --git a/packages/lighthouse-watcher/test/contracts/Lighthouse.sol b/packages/lighthouse-watcher/test/contracts/Lighthouse.sol new file mode 100644 index 00000000..10a53aea --- /dev/null +++ b/packages/lighthouse-watcher/test/contracts/Lighthouse.sol @@ -0,0 +1,33 @@ +pragma solidity >=0.4.22 <0.8.0; + +contract Lighthouse { + address owner = msg.sender; + + struct Content { + string cid; + string config; + uint fileCost; + } + + event StorageRequest(address uploader, string cid, string config, uint fileCost); + + mapping(address => mapping(string => Content)) public requests; + + function store(string calldata cid, string calldata config) + external + payable + { + uint fileCost = msg.value; + requests[msg.sender][cid] = Content(cid, config, fileCost); + emit StorageRequest(msg.sender, cid, config, msg.value); + } + + function getPaid(uint amount, address payable recipient) + external + { + require(msg.sender == owner); + recipient.transfer(amount); + } + + fallback () external payable {} +} diff --git a/packages/lighthouse-watcher/tsconfig.json b/packages/lighthouse-watcher/tsconfig.json new file mode 100644 index 00000000..c3742b6d --- /dev/null +++ b/packages/lighthouse-watcher/tsconfig.json @@ -0,0 +1,77 @@ +{ + "compilerOptions": { + /* Visit https://aka.ms/tsconfig.json to read more about this file */ + + /* Basic Options */ + // "incremental": true, /* Enable incremental compilation */ + "target": "es5", /* Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017', 'ES2018', 'ES2019', 'ES2020', 'ES2021', or 'ESNEXT'. */ + "module": "commonjs", /* Specify module code generation: 'none', 'commonjs', 'amd', 'system', 'umd', 'es2015', 'es2020', or 'ESNext'. */ + "lib": [ "ES5", "ES6", "ES2020" ], /* Specify library files to be included in the compilation. */ + // "allowJs": true, /* Allow javascript files to be compiled. */ + // "checkJs": true, /* Report errors in .js files. */ + // "jsx": "preserve", /* Specify JSX code generation: 'preserve', 'react-native', 'react', 'react-jsx' or 'react-jsxdev'. */ + "declaration": true, /* Generates corresponding '.d.ts' file. */ + // "declarationMap": true, /* Generates a sourcemap for each corresponding '.d.ts' file. */ + // "sourceMap": true, /* Generates corresponding '.map' file. */ + // "outFile": "./", /* Concatenate and emit output to single file. */ + "outDir": "dist", /* Redirect output structure to the directory. */ + // "rootDir": "./", /* Specify the root directory of input files. Use to control the output directory structure with --outDir. */ + // "composite": true, /* Enable project compilation */ + // "tsBuildInfoFile": "./", /* Specify file to store incremental compilation information */ + // "removeComments": true, /* Do not emit comments to output. */ + // "noEmit": true, /* Do not emit outputs. */ + // "importHelpers": true, /* Import emit helpers from 'tslib'. */ + "downlevelIteration": true, /* Provide full support for iterables in 'for-of', spread, and destructuring when targeting 'ES5' or 'ES3'. */ + // "isolatedModules": true, /* Transpile each file as a separate module (similar to 'ts.transpileModule'). */ + + /* Strict Type-Checking Options */ + "strict": true, /* Enable all strict type-checking options. */ + // "noImplicitAny": true, /* Raise error on expressions and declarations with an implied 'any' type. */ + // "strictNullChecks": true, /* Enable strict null checks. */ + // "strictFunctionTypes": true, /* Enable strict checking of function types. */ + // "strictBindCallApply": true, /* Enable strict 'bind', 'call', and 'apply' methods on functions. */ + "strictPropertyInitialization": false, /* Enable strict checking of property initialization in classes. */ + // "noImplicitThis": true, /* Raise error on 'this' expressions with an implied 'any' type. */ + // "alwaysStrict": true, /* Parse in strict mode and emit "use strict" for each source file. */ + + /* Additional Checks */ + // "noUnusedLocals": true, /* Report errors on unused locals. */ + // "noUnusedParameters": true, /* Report errors on unused parameters. */ + // "noImplicitReturns": true, /* Report error when not all code paths in function return a value. */ + // "noFallthroughCasesInSwitch": true, /* Report errors for fallthrough cases in switch statement. */ + // "noUncheckedIndexedAccess": true, /* Include 'undefined' in index signature results */ + // "noImplicitOverride": true, /* Ensure overriding members in derived classes are marked with an 'override' modifier. */ + // "noPropertyAccessFromIndexSignature": true, /* Require undeclared properties from index signatures to use element accesses. */ + + /* Module Resolution Options */ + "moduleResolution": "node", /* Specify module resolution strategy: 'node' (Node.js) or 'classic' (TypeScript pre-1.6). */ + // "baseUrl": "./", /* Base directory to resolve non-absolute module names. */ + // "paths": {}, /* A series of entries which re-map imports to lookup locations relative to the 'baseUrl'. */ + // "rootDirs": [], /* List of root folders whose combined content represents the structure of the project at runtime. */ + "typeRoots": [ + "./src/types" + ], /* List of folders to include type definitions from. */ + // "types": [], /* Type declaration files to be included in compilation. */ + // "allowSyntheticDefaultImports": true, /* Allow default imports from modules with no default export. This does not affect code emit, just typechecking. */ + "esModuleInterop": true, /* Enables emit interoperability between CommonJS and ES Modules via creation of namespace objects for all imports. Implies 'allowSyntheticDefaultImports'. */ + // "preserveSymlinks": true, /* Do not resolve the real path of symlinks. */ + // "allowUmdGlobalAccess": true, /* Allow accessing UMD globals from modules. */ + + /* Source Map Options */ + // "sourceRoot": "", /* Specify the location where debugger should locate TypeScript files instead of source locations. */ + // "mapRoot": "", /* Specify the location where debugger should locate map files instead of generated locations. */ + // "inlineSourceMap": true, /* Emit a single file with source maps instead of having a separate file. */ + // "inlineSources": true, /* Emit the source alongside the sourcemaps within a single file; requires '--inlineSourceMap' or '--sourceMap' to be set. */ + + /* Experimental Options */ + "experimentalDecorators": true, /* Enables experimental support for ES7 decorators. */ + // "emitDecoratorMetadata": true, /* Enables experimental support for emitting type metadata for decorators. */ + + /* Advanced Options */ + "skipLibCheck": true, /* Skip type checking of declaration files. */ + "forceConsistentCasingInFileNames": true, /* Disallow inconsistently-cased references to the same file. */ + "resolveJsonModule": true /* Enabling the option allows importing JSON, and validating the types in that JSON file. */ + }, + "include": ["src"], + "exclude": ["dist"] +}