Compare commits

...

3 Commits

8 changed files with 122 additions and 3 deletions

6
.gitignore vendored Normal file
View File

@ -0,0 +1,6 @@
venv
laconic_testnet.egg-info
__pycache__
*-deployment
*-spec.yml

32
cli/README.md Normal file
View File

@ -0,0 +1,32 @@
# cli
## Setup
* Run commands in repo root directory
```bash
python3 -m venv venv
source ./venv/bin/activate
```
* Install CLI in editable mode
```bash
pip install --editable .
```
* Verify installation
```bash
laconic-testnet --help
```
## Usage
```bash
laconic-testnet --onboarded-json path/to/onboarded-accounts.json --subscribers-csv path/to/subscribers.csv --output path/to/subscribed-onboarded-accounts.csv
```
## Cleanup
Deactivate virtual environment
```bash
deactivate
```

15
cli/cli.py Normal file
View File

@ -0,0 +1,15 @@
import click
from .core import process_subscribers
@click.command()
@click.option('--onboarded-json', required=True, type=click.Path(exists=True), help='Path to onboarded accounts JSON file.')
@click.option('--subscribers-csv', required=True, type=click.Path(exists=True), help='Path to the subscribers CSV file.')
@click.option('--output', required=True, type=click.Path(), help='Path to the output CSV file.')
def main(onboarded_json, subscribers_csv, output):
"""
CLI tool to match subscriber data with participant data and generate a CSV.
"""
process_subscribers(onboarded_json, subscribers_csv, output)
if __name__ == "__main__":
main()

50
cli/core.py Normal file
View File

@ -0,0 +1,50 @@
import csv
import hashlib
import json
def hash_subscriber_id(subscriber_id):
return '0x' + hashlib.sha256(subscriber_id.encode()).hexdigest()
def process_subscribers(onboarded_json, subscribers_csv, output):
# Load JSON data from the file
with open(onboarded_json, 'r') as json_file:
json_data = json.load(json_file)
# Create a dictionary mapping kyc_id to participant data
kyc_map = {participant['kyc_id']: participant for participant in json_data['participants']}
# Load subscribers data from the CSV file and process it using map
with open(subscribers_csv, 'r') as csv_file:
csv_reader = csv.DictReader(csv_file)
subscribers = list(csv_reader)
# Use map to process subscribers
def process_subscriber(subscriber):
hashed_subscriber_id = hash_subscriber_id(subscriber['subscriber_id'])
participant = kyc_map.get(hashed_subscriber_id)
if participant:
return {
'subscriber_id': subscriber['subscriber_id'],
'email': subscriber['email'],
'cosmos_address': participant['cosmos_address'],
'nitro_address': participant['nitro_address'],
'role': participant['role'],
'hashed_subscriber_id': participant['kyc_id'],
'status': subscriber['status'],
'premium': subscriber['premium?'],
'created_at': subscriber['created_at']
}
return None
# Apply the map function and filter out None values
output_data = list(filter(None, map(process_subscriber, subscribers)))
# Write the matched data to a new CSV file
with open(output, 'w', newline='') as csv_file:
fieldnames = ['subscriber_id', 'email', 'cosmos_address', 'nitro_address', 'role', 'hashed_subscriber_id', 'status', 'premium', 'created_at']
csv_writer = csv.DictWriter(csv_file, fieldnames=fieldnames)
csv_writer.writeheader()
csv_writer.writerows(output_data)
print(f'Data has been written to {output}')

View File

@ -625,7 +625,7 @@
```bash ```bash
network: network:
ports: ports:
laconic-console: console:
- '127.0.0.1:4001:80' - '127.0.0.1:4001:80'
``` ```

1
requirements.txt Normal file
View File

@ -0,0 +1 @@
Click

15
setup.py Normal file
View File

@ -0,0 +1,15 @@
from setuptools import setup, find_packages
setup(
name="laconic-testnet",
version="0.1.0",
packages=find_packages(),
install_requires=[
"Click",
],
entry_points={
'console_scripts': [
'laconic-testnet = cli.cli:main',
],
},
)

View File

@ -277,7 +277,7 @@ laconic-so deployment --dir laconic-console-deployment start
```bash ```bash
# Example # Example
laconic-so deployment --dir laconic-console-deployment exec cli "laconic registry bond create --type photon --quantity 1000000000000" laconic-so deployment --dir laconic-console-deployment exec cli "laconic registry bond create --type alnt --quantity 1000000000000"
``` ```
## Clean up ## Clean up