diff --git a/docs/demo.md b/docs/demo.md index 40270f7..251a7d5 100644 --- a/docs/demo.md +++ b/docs/demo.md @@ -10,7 +10,7 @@ cargo install tmkms --features=softsign --version=0.14.0 ``` - testnet-state.json ([exported testnet state](./run-first-validator.md#export-testnet-state)) -- LPS distribution spreadsheet URL or CSV file path +- LPS distribution Google spreadsheet URL or CSV file path ## Steps diff --git a/docs/run-first-validator.md b/docs/run-first-validator.md index 1037f56..a3bc9b6 100644 --- a/docs/run-first-validator.md +++ b/docs/run-first-validator.md @@ -4,7 +4,7 @@ - [ansible](playbooks/README.md#ansible-installation) - [laconic-so](https://github.com/cerc-io/stack-orchestrator/?tab=readme-ov-file#install) -- LPS distribution spreadsheet URL or CSV file path +- LPS distribution Google spreadsheet URL or CSV file path ## Export testnet state diff --git a/scripts/generate-lps-distribution-json.py b/scripts/generate-lps-distribution-json.py index 616896c..d22aa42 100644 --- a/scripts/generate-lps-distribution-json.py +++ b/scripts/generate-lps-distribution-json.py @@ -7,6 +7,34 @@ import argparse import urllib.parse from bech32 import bech32_decode +# Column names in the input CSV +PLACEHOLDER_COLUMN = 'Placeholder' +LACONIC_ADDRESS_COLUMN = 'Laconic Address' +TOTAL_LPS_ALLOCATION_COLUMN = 'Total LPS Allocation' +LOCK_MONTHS_COLUMN = 'Lock (months)' +VEST_MONTHS_COLUMN = 'Vest (months)' + +# Required columns in the input CSV +REQUIRED_COLUMNS = [ + PLACEHOLDER_COLUMN, + LACONIC_ADDRESS_COLUMN, + TOTAL_LPS_ALLOCATION_COLUMN, + LOCK_MONTHS_COLUMN, + VEST_MONTHS_COLUMN +] + +def to_number(val): + """ + Convert a value to a number, handling empty values and invalid inputs. + Returns None for empty or invalid values. + """ + if pd.isna(val) or str(val).strip() == '': + return None + try: + return float(val) + except (ValueError, TypeError): + return None + def get_csv_download_url(google_sheet_url): """ Convert a full Google Sheets URL to a CSV export URL using the `gid` in the query string. @@ -41,22 +69,14 @@ def convert_csv_to_json(csv_path, json_path): Read the CSV file, extract columns, and save as JSON. """ df = pd.read_csv(csv_path) - required_columns = [ - 'Placeholder', - 'Laconic Address', - 'Total LPS Allocation', - 'Lock (months)', - 'Vest (months)' - ] - for col in required_columns: + for col in REQUIRED_COLUMNS: if col not in df.columns: raise Exception(f'Missing required column: {col}') result = {} for _, row in df.iterrows(): - placeholder = str(row['Placeholder']) if not pd.isna(row['Placeholder']) else '' - - laconic_address = str(row['Laconic Address']) if not pd.isna(row['Laconic Address']) else '' + placeholder = str(row[PLACEHOLDER_COLUMN]) if not pd.isna(row[PLACEHOLDER_COLUMN]) else '' + laconic_address = str(row[LACONIC_ADDRESS_COLUMN]) if not pd.isna(row[LACONIC_ADDRESS_COLUMN]) else '' # Use laconic_address as key if placeholder is missing or empty key = placeholder if placeholder and placeholder.lower() != 'nan' else laconic_address @@ -72,19 +92,11 @@ def convert_csv_to_json(csv_path, json_path): print(f"Skipping invalid Laconic address: {laconic_address}") continue - def to_number(val): - if pd.isna(val) or str(val).strip() == '': - return None - try: - return float(val) - except (ValueError, TypeError): - return None - entry = { - 'total_lps_allocation': to_number(row['Total LPS Allocation']), - 'lock_months': row['Lock (months)'] if not pd.isna(row['Lock (months)']) else None, - 'vest_months': row['Vest (months)'] if not pd.isna(row['Vest (months)']) else None, - 'laconic_address': row['Laconic Address'] if not pd.isna(row['Laconic Address']) else None + 'total_lps_allocation': to_number(row[TOTAL_LPS_ALLOCATION_COLUMN]), + 'lock_months': row[LOCK_MONTHS_COLUMN] if not pd.isna(row[LOCK_MONTHS_COLUMN]) else None, + 'vest_months': row[VEST_MONTHS_COLUMN] if not pd.isna(row[VEST_MONTHS_COLUMN]) else None, + 'laconic_address': row[LACONIC_ADDRESS_COLUMN] if not pd.isna(row[LACONIC_ADDRESS_COLUMN]) else None } result[key] = entry