forked from cerc-io/stack-orchestrator
Compare commits
13 Commits
main
...
telackey/9
Author | SHA1 | Date | |
---|---|---|---|
578fd933a7 | |||
35d23ca858 | |||
e06de5f4b7 | |||
6e64b60295 | |||
3205478878 | |||
916724f1e2 | |||
56b010f512 | |||
023a640252 | |||
09c9214e4c | |||
2f1cde16b7 | |||
b449d88b6c | |||
07030044ec | |||
0a9d68f4e8 |
@ -38,6 +38,7 @@ from stack_orchestrator.deploy.webapp.util import (
|
||||
generate_hostname_for_app,
|
||||
match_owner,
|
||||
skip_by_tag,
|
||||
confirm_payment,
|
||||
)
|
||||
|
||||
|
||||
@ -54,6 +55,7 @@ def process_app_deployment_request(
|
||||
force_rebuild,
|
||||
fqdn_policy,
|
||||
recreate_on_deploy,
|
||||
payment_address,
|
||||
logger,
|
||||
):
|
||||
logger.log("BEGIN - process_app_deployment_request")
|
||||
@ -78,6 +80,9 @@ def process_app_deployment_request(
|
||||
else:
|
||||
fqdn = f"{requested_name}.{default_dns_suffix}"
|
||||
|
||||
# Normalize case (just in case)
|
||||
fqdn = fqdn.lower()
|
||||
|
||||
# 3. check ownership of existing dnsrecord vs this request
|
||||
dns_lrn = f"{dns_record_namespace}/{fqdn}"
|
||||
dns_record = laconic.get_record(dns_lrn)
|
||||
@ -119,7 +124,7 @@ def process_app_deployment_request(
|
||||
app_deployment_lrn = app_deployment_request.attributes.deployment
|
||||
if not app_deployment_lrn.startswith(deployment_record_namespace):
|
||||
raise Exception(
|
||||
"Deployment CRN %s is not in a supported namespace"
|
||||
"Deployment LRN %s is not in a supported namespace"
|
||||
% app_deployment_request.attributes.deployment
|
||||
)
|
||||
|
||||
@ -222,6 +227,7 @@ def process_app_deployment_request(
|
||||
dns_lrn,
|
||||
deployment_dir,
|
||||
app_deployment_request,
|
||||
payment_address,
|
||||
logger,
|
||||
)
|
||||
logger.log("Publication complete.")
|
||||
@ -305,6 +311,23 @@ def dump_known_requests(filename, requests, status="SEEN"):
|
||||
@click.option(
|
||||
"--log-dir", help="Output build/deployment logs to directory.", default=None
|
||||
)
|
||||
@click.option(
|
||||
"--min-required-payment",
|
||||
help="Requests must have a minimum payment to be processed",
|
||||
default=0,
|
||||
)
|
||||
@click.option(
|
||||
"--payment-address",
|
||||
help="The address to which payments should be made. "
|
||||
"Default is the current laconic account.",
|
||||
default=None,
|
||||
)
|
||||
@click.option(
|
||||
"--all-requests",
|
||||
help="Handle requests addressed to anyone (by default only requests to"
|
||||
"my payment address are examined).",
|
||||
is_flag=True,
|
||||
)
|
||||
@click.pass_context
|
||||
def command( # noqa: C901
|
||||
ctx,
|
||||
@ -326,6 +349,9 @@ def command( # noqa: C901
|
||||
force_rebuild,
|
||||
recreate_on_deploy,
|
||||
log_dir,
|
||||
min_required_payment,
|
||||
payment_address,
|
||||
all_requests,
|
||||
):
|
||||
if request_id and discover:
|
||||
print("Cannot specify both --request-id and --discover", file=sys.stderr)
|
||||
@ -366,6 +392,10 @@ def command( # noqa: C901
|
||||
exclude_tags = [tag.strip() for tag in exclude_tags.split(",") if tag]
|
||||
|
||||
laconic = LaconicRegistryClient(laconic_config, log_file=sys.stderr)
|
||||
if not payment_address:
|
||||
payment_address = laconic.whoami().address
|
||||
|
||||
main_logger.log(f"Payment address: {payment_address}")
|
||||
|
||||
# Find deployment requests.
|
||||
# single request
|
||||
@ -375,18 +405,20 @@ def command( # noqa: C901
|
||||
# all requests
|
||||
elif discover:
|
||||
main_logger.log("Discovering deployment requests...")
|
||||
requests = laconic.app_deployment_requests()
|
||||
if all_requests:
|
||||
requests = laconic.app_deployment_requests()
|
||||
else:
|
||||
requests = laconic.app_deployment_requests({"to": payment_address})
|
||||
|
||||
if only_update_state:
|
||||
if not dry_run:
|
||||
dump_known_requests(state_file, requests)
|
||||
return
|
||||
|
||||
previous_requests = {}
|
||||
if state_file:
|
||||
main_logger.log(f"Loading known requests from {state_file}...")
|
||||
previous_requests = load_known_requests(state_file)
|
||||
else:
|
||||
previous_requests = {}
|
||||
|
||||
# Collapse related requests.
|
||||
requests.sort(key=lambda r: r.createTime)
|
||||
@ -452,7 +484,10 @@ def command( # noqa: C901
|
||||
|
||||
# Find deployments.
|
||||
main_logger.log("Discovering existing app deployments...")
|
||||
deployments = laconic.app_deployments()
|
||||
if all_requests:
|
||||
deployments = laconic.app_deployments()
|
||||
else:
|
||||
deployments = laconic.app_deployments({"by": payment_address})
|
||||
deployments_by_request = {}
|
||||
for d in deployments:
|
||||
if d.attributes.request:
|
||||
@ -466,7 +501,7 @@ def command( # noqa: C901
|
||||
if r.attributes.request:
|
||||
cancellation_requests[r.attributes.request] = r
|
||||
|
||||
requests_to_execute = []
|
||||
requests_to_check_for_payment = []
|
||||
for r in requests_by_name.values():
|
||||
if r.id in cancellation_requests and match_owner(
|
||||
cancellation_requests[r.id], r
|
||||
@ -488,7 +523,24 @@ def command( # noqa: C901
|
||||
)
|
||||
else:
|
||||
main_logger.log(f"Request {r.id} needs to processed.")
|
||||
requests_to_check_for_payment.append(r)
|
||||
|
||||
requests_to_execute = []
|
||||
if min_required_payment:
|
||||
for r in requests_to_check_for_payment:
|
||||
main_logger.log(f"{r.id}: Confirming payment...")
|
||||
if confirm_payment(
|
||||
laconic, r, payment_address, min_required_payment, main_logger
|
||||
):
|
||||
main_logger.log(f"{r.id}: Payment confirmed.")
|
||||
requests_to_execute.append(r)
|
||||
else:
|
||||
main_logger.log(
|
||||
f"Skipping request {r.id}: unable to verify payment."
|
||||
)
|
||||
dump_known_requests(state_file, [r], status="UNPAID")
|
||||
else:
|
||||
requests_to_execute = requests_to_check_for_payment
|
||||
|
||||
main_logger.log(
|
||||
"Found %d unsatisfied request(s) to process." % len(requests_to_execute)
|
||||
@ -531,6 +583,7 @@ def command( # noqa: C901
|
||||
force_rebuild,
|
||||
fqdn_policy,
|
||||
recreate_on_deploy,
|
||||
payment_address,
|
||||
build_logger,
|
||||
)
|
||||
status = "DEPLOYED"
|
||||
|
@ -20,18 +20,33 @@ import sys
|
||||
|
||||
import click
|
||||
|
||||
from stack_orchestrator.deploy.webapp.util import LaconicRegistryClient, match_owner, skip_by_tag
|
||||
from stack_orchestrator.deploy.webapp.util import (
|
||||
TimedLogger,
|
||||
LaconicRegistryClient,
|
||||
match_owner,
|
||||
skip_by_tag,
|
||||
confirm_payment,
|
||||
)
|
||||
|
||||
main_logger = TimedLogger(file=sys.stderr)
|
||||
|
||||
|
||||
def process_app_removal_request(ctx,
|
||||
laconic: LaconicRegistryClient,
|
||||
app_removal_request,
|
||||
deployment_parent_dir,
|
||||
delete_volumes,
|
||||
delete_names):
|
||||
deployment_record = laconic.get_record(app_removal_request.attributes.deployment, require=True)
|
||||
def process_app_removal_request(
|
||||
ctx,
|
||||
laconic: LaconicRegistryClient,
|
||||
app_removal_request,
|
||||
deployment_parent_dir,
|
||||
delete_volumes,
|
||||
delete_names,
|
||||
payment_address,
|
||||
):
|
||||
deployment_record = laconic.get_record(
|
||||
app_removal_request.attributes.deployment, require=True
|
||||
)
|
||||
dns_record = laconic.get_record(deployment_record.attributes.dns, require=True)
|
||||
deployment_dir = os.path.join(deployment_parent_dir, dns_record.attributes.name)
|
||||
deployment_dir = os.path.join(
|
||||
deployment_parent_dir, dns_record.attributes.name.lower()
|
||||
)
|
||||
|
||||
if not os.path.exists(deployment_dir):
|
||||
raise Exception("Deployment directory %s does not exist." % deployment_dir)
|
||||
@ -41,13 +56,18 @@ def process_app_removal_request(ctx,
|
||||
|
||||
# Or of the original deployment request.
|
||||
if not matched_owner and deployment_record.attributes.request:
|
||||
matched_owner = match_owner(app_removal_request, laconic.get_record(deployment_record.attributes.request, require=True))
|
||||
matched_owner = match_owner(
|
||||
app_removal_request,
|
||||
laconic.get_record(deployment_record.attributes.request, require=True),
|
||||
)
|
||||
|
||||
if matched_owner:
|
||||
print("Matched deployment ownership:", matched_owner)
|
||||
main_logger.log("Matched deployment ownership:", matched_owner)
|
||||
else:
|
||||
raise Exception("Unable to confirm ownership of deployment %s for removal request %s" %
|
||||
(deployment_record.id, app_removal_request.id))
|
||||
raise Exception(
|
||||
"Unable to confirm ownership of deployment %s for removal request %s"
|
||||
% (deployment_record.id, app_removal_request.id)
|
||||
)
|
||||
|
||||
# TODO(telackey): Call the function directly. The easiest way to build the correct click context is to
|
||||
# exec the process, but it would be better to refactor so we could just call down_operation with the
|
||||
@ -64,8 +84,13 @@ def process_app_removal_request(ctx,
|
||||
"version": "1.0.0",
|
||||
"request": app_removal_request.id,
|
||||
"deployment": deployment_record.id,
|
||||
"by": payment_address,
|
||||
}
|
||||
}
|
||||
|
||||
if app_removal_request.attributes.payment:
|
||||
removal_record["record"]["payment"] = app_removal_request.attributes.payment
|
||||
|
||||
laconic.publish(removal_record)
|
||||
|
||||
if delete_names:
|
||||
@ -97,22 +122,85 @@ def dump_known_requests(filename, requests):
|
||||
|
||||
|
||||
@click.command()
|
||||
@click.option("--laconic-config", help="Provide a config file for laconicd", required=True)
|
||||
@click.option("--deployment-parent-dir", help="Create deployment directories beneath this directory", required=True)
|
||||
@click.option(
|
||||
"--laconic-config", help="Provide a config file for laconicd", required=True
|
||||
)
|
||||
@click.option(
|
||||
"--deployment-parent-dir",
|
||||
help="Create deployment directories beneath this directory",
|
||||
required=True,
|
||||
)
|
||||
@click.option("--request-id", help="The ApplicationDeploymentRemovalRequest to process")
|
||||
@click.option("--discover", help="Discover and process all pending ApplicationDeploymentRemovalRequests",
|
||||
is_flag=True, default=False)
|
||||
@click.option("--state-file", help="File to store state about previously seen requests.")
|
||||
@click.option("--only-update-state", help="Only update the state file, don't process any requests anything.", is_flag=True)
|
||||
@click.option("--delete-names/--preserve-names", help="Delete all names associated with removed deployments.", default=True)
|
||||
@click.option("--delete-volumes/--preserve-volumes", default=True, help="delete data volumes")
|
||||
@click.option("--dry-run", help="Don't do anything, just report what would be done.", is_flag=True)
|
||||
@click.option("--include-tags", help="Only include requests with matching tags (comma-separated).", default="")
|
||||
@click.option("--exclude-tags", help="Exclude requests with matching tags (comma-separated).", default="")
|
||||
@click.option(
|
||||
"--discover",
|
||||
help="Discover and process all pending ApplicationDeploymentRemovalRequests",
|
||||
is_flag=True,
|
||||
default=False,
|
||||
)
|
||||
@click.option(
|
||||
"--state-file", help="File to store state about previously seen requests."
|
||||
)
|
||||
@click.option(
|
||||
"--only-update-state",
|
||||
help="Only update the state file, don't process any requests anything.",
|
||||
is_flag=True,
|
||||
)
|
||||
@click.option(
|
||||
"--delete-names/--preserve-names",
|
||||
help="Delete all names associated with removed deployments.",
|
||||
default=True,
|
||||
)
|
||||
@click.option(
|
||||
"--delete-volumes/--preserve-volumes", default=True, help="delete data volumes"
|
||||
)
|
||||
@click.option(
|
||||
"--dry-run", help="Don't do anything, just report what would be done.", is_flag=True
|
||||
)
|
||||
@click.option(
|
||||
"--include-tags",
|
||||
help="Only include requests with matching tags (comma-separated).",
|
||||
default="",
|
||||
)
|
||||
@click.option(
|
||||
"--exclude-tags",
|
||||
help="Exclude requests with matching tags (comma-separated).",
|
||||
default="",
|
||||
)
|
||||
@click.option(
|
||||
"--min-required-payment",
|
||||
help="Requests must have a minimum payment to be processed",
|
||||
default=0,
|
||||
)
|
||||
@click.option(
|
||||
"--payment-address",
|
||||
help="The address to which payments should be made. "
|
||||
"Default is the current laconic account.",
|
||||
default=None,
|
||||
)
|
||||
@click.option(
|
||||
"--all-requests",
|
||||
help="Handle requests addressed to anyone (by default only requests to"
|
||||
"my payment address are examined).",
|
||||
is_flag=True,
|
||||
)
|
||||
@click.pass_context
|
||||
def command(ctx, laconic_config, deployment_parent_dir,
|
||||
request_id, discover, state_file, only_update_state,
|
||||
delete_names, delete_volumes, dry_run, include_tags, exclude_tags):
|
||||
def command( # noqa: C901
|
||||
ctx,
|
||||
laconic_config,
|
||||
deployment_parent_dir,
|
||||
request_id,
|
||||
discover,
|
||||
state_file,
|
||||
only_update_state,
|
||||
delete_names,
|
||||
delete_volumes,
|
||||
dry_run,
|
||||
include_tags,
|
||||
exclude_tags,
|
||||
min_required_payment,
|
||||
payment_address,
|
||||
all_requests,
|
||||
):
|
||||
if request_id and discover:
|
||||
print("Cannot specify both --request-id and --discover", file=sys.stderr)
|
||||
sys.exit(2)
|
||||
@ -129,34 +217,47 @@ def command(ctx, laconic_config, deployment_parent_dir,
|
||||
include_tags = [tag.strip() for tag in include_tags.split(",") if tag]
|
||||
exclude_tags = [tag.strip() for tag in exclude_tags.split(",") if tag]
|
||||
|
||||
laconic = LaconicRegistryClient(laconic_config)
|
||||
laconic = LaconicRegistryClient(laconic_config, log_file=sys.stderr)
|
||||
if not payment_address:
|
||||
payment_address = laconic.whoami().address
|
||||
|
||||
# Find deployment removal requests.
|
||||
# single request
|
||||
if request_id:
|
||||
main_logger.log(f"Retrieving request {request_id}...")
|
||||
requests = [laconic.get_record(request_id, require=True)]
|
||||
# TODO: assert record type
|
||||
# all requests
|
||||
elif discover:
|
||||
requests = laconic.app_deployment_removal_requests()
|
||||
main_logger.log("Discovering removal requests...")
|
||||
if all_requests:
|
||||
requests = laconic.app_deployment_removal_requests()
|
||||
else:
|
||||
requests = laconic.app_deployment_removal_requests({"to": payment_address})
|
||||
|
||||
if only_update_state:
|
||||
if not dry_run:
|
||||
dump_known_requests(state_file, requests)
|
||||
return
|
||||
|
||||
previous_requests = load_known_requests(state_file)
|
||||
previous_requests = {}
|
||||
if state_file:
|
||||
main_logger.log(f"Loading known requests from {state_file}...")
|
||||
previous_requests = load_known_requests(state_file)
|
||||
|
||||
requests.sort(key=lambda r: r.createTime)
|
||||
requests.reverse()
|
||||
|
||||
# Find deployments.
|
||||
deployments = {}
|
||||
for d in laconic.app_deployments(all=True):
|
||||
deployments[d.id] = d
|
||||
named_deployments = {}
|
||||
main_logger.log("Discovering app deployments...")
|
||||
for d in laconic.app_deployments(all=False):
|
||||
named_deployments[d.id] = d
|
||||
|
||||
# Find removal requests.
|
||||
removals_by_deployment = {}
|
||||
removals_by_request = {}
|
||||
main_logger.log("Discovering deployment removals...")
|
||||
for r in laconic.app_deployment_removals():
|
||||
if r.attributes.deployment:
|
||||
# TODO: should we handle CRNs?
|
||||
@ -165,33 +266,65 @@ def command(ctx, laconic_config, deployment_parent_dir,
|
||||
one_per_deployment = {}
|
||||
for r in requests:
|
||||
if not r.attributes.deployment:
|
||||
print(f"Skipping removal request {r.id} since it was a cancellation.")
|
||||
main_logger.log(
|
||||
f"Skipping removal request {r.id} since it was a cancellation."
|
||||
)
|
||||
elif r.attributes.deployment in one_per_deployment:
|
||||
print(f"Skipping removal request {r.id} since it was superseded.")
|
||||
main_logger.log(f"Skipping removal request {r.id} since it was superseded.")
|
||||
else:
|
||||
one_per_deployment[r.attributes.deployment] = r
|
||||
|
||||
requests_to_execute = []
|
||||
requests_to_check_for_payment = []
|
||||
for r in one_per_deployment.values():
|
||||
if skip_by_tag(r, include_tags, exclude_tags):
|
||||
print("Skipping removal request %s, filtered by tag (include %s, exclude %s, present %s)" % (r.id,
|
||||
include_tags,
|
||||
exclude_tags,
|
||||
r.attributes.tags))
|
||||
elif r.id in removals_by_request:
|
||||
print(f"Found satisfied request for {r.id} at {removals_by_request[r.id].id}")
|
||||
elif r.attributes.deployment in removals_by_deployment:
|
||||
print(
|
||||
f"Found removal record for indicated deployment {r.attributes.deployment} at "
|
||||
f"{removals_by_deployment[r.attributes.deployment].id}")
|
||||
else:
|
||||
if r.id not in previous_requests:
|
||||
print(f"Request {r.id} needs to processed.")
|
||||
try:
|
||||
if r.attributes.deployment not in named_deployments:
|
||||
main_logger.log(
|
||||
f"Skipping removal request {r.id} for {r.attributes.deployment} because it does"
|
||||
f"not appear to refer to a live, named deployment."
|
||||
)
|
||||
elif skip_by_tag(r, include_tags, exclude_tags):
|
||||
main_logger.log(
|
||||
"Skipping removal request %s, filtered by tag (include %s, exclude %s, present %s)"
|
||||
% (r.id, include_tags, exclude_tags, r.attributes.tags)
|
||||
)
|
||||
elif r.id in removals_by_request:
|
||||
main_logger.log(
|
||||
f"Found satisfied request for {r.id} at {removals_by_request[r.id].id}"
|
||||
)
|
||||
elif r.attributes.deployment in removals_by_deployment:
|
||||
main_logger.log(
|
||||
f"Found removal record for indicated deployment {r.attributes.deployment} at "
|
||||
f"{removals_by_deployment[r.attributes.deployment].id}"
|
||||
)
|
||||
else:
|
||||
if r.id not in previous_requests:
|
||||
main_logger.log(f"Request {r.id} needs to processed.")
|
||||
requests_to_check_for_payment.append(r)
|
||||
else:
|
||||
main_logger.log(
|
||||
f"Skipping unsatisfied request {r.id} because we have seen it before."
|
||||
)
|
||||
except Exception as e:
|
||||
main_logger.log(f"ERROR examining {r.id}: {e}")
|
||||
|
||||
requests_to_execute = []
|
||||
if min_required_payment:
|
||||
for r in requests_to_check_for_payment:
|
||||
main_logger.log(f"{r.id}: Confirming payment...")
|
||||
if confirm_payment(
|
||||
laconic, r, payment_address, min_required_payment, main_logger
|
||||
):
|
||||
main_logger.log(f"{r.id}: Payment confirmed.")
|
||||
requests_to_execute.append(r)
|
||||
else:
|
||||
print(f"Skipping unsatisfied request {r.id} because we have seen it before.")
|
||||
main_logger.log(f"Skipping request {r.id}: unable to verify payment.")
|
||||
dump_known_requests(state_file, [r])
|
||||
else:
|
||||
requests_to_execute = requests_to_check_for_payment
|
||||
|
||||
print("Found %d unsatisfied request(s) to process." % len(requests_to_execute))
|
||||
main_logger.log(
|
||||
"Found %d unsatisfied request(s) to process." % len(requests_to_execute)
|
||||
)
|
||||
|
||||
if not dry_run:
|
||||
for r in requests_to_execute:
|
||||
@ -202,7 +335,10 @@ def command(ctx, laconic_config, deployment_parent_dir,
|
||||
r,
|
||||
os.path.abspath(deployment_parent_dir),
|
||||
delete_volumes,
|
||||
delete_names
|
||||
delete_names,
|
||||
payment_address,
|
||||
)
|
||||
except Exception as e:
|
||||
main_logger.log(f"ERROR processing removal request {r.id}: {e}")
|
||||
finally:
|
||||
dump_known_requests(state_file, [r])
|
||||
|
@ -22,7 +22,6 @@ import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
import uuid
|
||||
|
||||
import yaml
|
||||
|
||||
|
||||
@ -83,6 +82,84 @@ def match_owner(recordA, *records):
|
||||
return None
|
||||
|
||||
|
||||
def is_lrn(name_or_id: str):
|
||||
if name_or_id:
|
||||
return str(name_or_id).startswith("lrn://")
|
||||
return False
|
||||
|
||||
|
||||
def is_id(name_or_id: str):
|
||||
return not is_lrn(name_or_id)
|
||||
|
||||
|
||||
def confirm_payment(laconic, record, payment_address, min_amount, logger):
|
||||
req_owner = laconic.get_owner(record)
|
||||
if req_owner == payment_address:
|
||||
# No need to confirm payment if the sender and recipient are the same account.
|
||||
return True
|
||||
|
||||
if not record.attributes.payment:
|
||||
logger.log(f"{record.id}: no payment tx info")
|
||||
return False
|
||||
|
||||
tx = laconic.get_tx(record.attributes.payment)
|
||||
if not tx:
|
||||
logger.log(f"{record.id}: cannot locate payment tx")
|
||||
return False
|
||||
|
||||
if tx.code != 0:
|
||||
logger.log(
|
||||
f"{record.id}: payment tx {tx.hash} was not successful - code: {tx.code}, log: {tx.log}"
|
||||
)
|
||||
return False
|
||||
|
||||
if tx.sender != req_owner:
|
||||
logger.log(
|
||||
f"{record.id}: payment sender {tx.sender} in tx {tx.hash} does not match deployment "
|
||||
f"request owner {req_owner}"
|
||||
)
|
||||
return False
|
||||
|
||||
if tx.recipient != payment_address:
|
||||
logger.log(
|
||||
f"{record.id}: payment recipient {tx.recipient} in tx {tx.hash} does not match {payment_address}"
|
||||
)
|
||||
return False
|
||||
|
||||
pay_denom = "".join([i for i in tx.amount if not i.isdigit()])
|
||||
if pay_denom != "alnt":
|
||||
logger.log(
|
||||
f"{record.id}: {pay_denom} in tx {tx.hash} is not an expected payment denomination"
|
||||
)
|
||||
return False
|
||||
|
||||
pay_amount = int("".join([i for i in tx.amount if i.isdigit()]))
|
||||
if pay_amount < min_amount:
|
||||
logger.log(
|
||||
f"{record.id}: payment amount {tx.amount} is less than minimum {min_amount}"
|
||||
)
|
||||
return False
|
||||
|
||||
# Check if the payment was already used on a
|
||||
used = laconic.app_deployments(
|
||||
{"by": payment_address, "payment": tx.hash}, all=True
|
||||
)
|
||||
if len(used):
|
||||
logger.log(f"{record.id}: payment {tx.hash} already used on deployment {used}")
|
||||
return False
|
||||
|
||||
used = laconic.app_deployment_removals(
|
||||
{"by": payment_address, "payment": tx.hash}, all=True
|
||||
)
|
||||
if len(used):
|
||||
logger.log(
|
||||
f"{record.id}: payment {tx.hash} already used on deployment removal {used}"
|
||||
)
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
class LaconicRegistryClient:
|
||||
def __init__(self, config_file, log_file=None):
|
||||
self.config_file = config_file
|
||||
@ -90,10 +167,94 @@ class LaconicRegistryClient:
|
||||
self.cache = AttrDict(
|
||||
{
|
||||
"name_or_id": {},
|
||||
"accounts": {},
|
||||
"txs": {},
|
||||
}
|
||||
)
|
||||
|
||||
def list_records(self, criteria={}, all=False):
|
||||
def whoami(self, refresh=False):
|
||||
if not refresh and "whoami" in self.cache:
|
||||
return self.cache["whoami"]
|
||||
|
||||
args = ["laconic", "-c", self.config_file, "registry", "account", "get"]
|
||||
results = [
|
||||
AttrDict(r) for r in json.loads(logged_cmd(self.log_file, *args)) if r
|
||||
]
|
||||
|
||||
if len(results):
|
||||
self.cache["whoami"] = results[0]
|
||||
return results[0]
|
||||
|
||||
return None
|
||||
|
||||
def get_owner(self, record, require=False):
|
||||
bond = self.get_bond(record.bondId, require)
|
||||
if bond:
|
||||
return bond.owner
|
||||
|
||||
return bond
|
||||
|
||||
def get_account(self, address, refresh=False, require=False):
|
||||
if not refresh and address in self.cache["accounts"]:
|
||||
return self.cache["accounts"][address]
|
||||
|
||||
args = [
|
||||
"laconic",
|
||||
"-c",
|
||||
self.config_file,
|
||||
"registry",
|
||||
"account",
|
||||
"get",
|
||||
"--address",
|
||||
address,
|
||||
]
|
||||
results = [
|
||||
AttrDict(r) for r in json.loads(logged_cmd(self.log_file, *args)) if r
|
||||
]
|
||||
if len(results):
|
||||
self.cache["accounts"][address] = results[0]
|
||||
return results[0]
|
||||
|
||||
if require:
|
||||
raise Exception("Cannot locate account:", address)
|
||||
return None
|
||||
|
||||
def get_bond(self, id, require=False):
|
||||
if id in self.cache.name_or_id:
|
||||
return self.cache.name_or_id[id]
|
||||
|
||||
args = [
|
||||
"laconic",
|
||||
"-c",
|
||||
self.config_file,
|
||||
"registry",
|
||||
"bond",
|
||||
"get",
|
||||
"--id",
|
||||
id,
|
||||
]
|
||||
results = [
|
||||
AttrDict(r) for r in json.loads(logged_cmd(self.log_file, *args)) if r
|
||||
]
|
||||
self._add_to_cache(results)
|
||||
if len(results):
|
||||
return results[0]
|
||||
|
||||
if require:
|
||||
raise Exception("Cannot locate bond:", id)
|
||||
return None
|
||||
|
||||
def list_bonds(self):
|
||||
args = ["laconic", "-c", self.config_file, "registry", "bond", "list"]
|
||||
results = [
|
||||
AttrDict(r) for r in json.loads(logged_cmd(self.log_file, *args)) if r
|
||||
]
|
||||
self._add_to_cache(results)
|
||||
return results
|
||||
|
||||
def list_records(self, criteria=None, all=False):
|
||||
if criteria is None:
|
||||
criteria = {}
|
||||
args = ["laconic", "-c", self.config_file, "registry", "record", "list"]
|
||||
|
||||
if all:
|
||||
@ -104,22 +265,17 @@ class LaconicRegistryClient:
|
||||
args.append("--%s" % k)
|
||||
args.append(str(v))
|
||||
|
||||
results = [AttrDict(r) for r in json.loads(logged_cmd(self.log_file, *args))]
|
||||
results = [
|
||||
AttrDict(r) for r in json.loads(logged_cmd(self.log_file, *args)) if r
|
||||
]
|
||||
|
||||
# Most recent records first
|
||||
results.sort(key=lambda r: r.createTime)
|
||||
results.reverse()
|
||||
self._add_to_cache(results)
|
||||
|
||||
return results
|
||||
|
||||
def is_lrn(self, name_or_id: str):
|
||||
if name_or_id:
|
||||
return str(name_or_id).startswith("lrn://")
|
||||
return False
|
||||
|
||||
def is_id(self, name_or_id: str):
|
||||
return not self.is_lrn(name_or_id)
|
||||
|
||||
def _add_to_cache(self, records):
|
||||
if not records:
|
||||
return
|
||||
@ -129,9 +285,10 @@ class LaconicRegistryClient:
|
||||
if p.names:
|
||||
for lrn in p.names:
|
||||
self.cache["name_or_id"][lrn] = p
|
||||
if p.attributes.type not in self.cache:
|
||||
self.cache[p.attributes.type] = []
|
||||
self.cache[p.attributes.type].append(p)
|
||||
if p.attributes and p.attributes.type:
|
||||
if p.attributes.type not in self.cache:
|
||||
self.cache[p.attributes.type] = []
|
||||
self.cache[p.attributes.type].append(p)
|
||||
|
||||
def resolve(self, name):
|
||||
if not name:
|
||||
@ -142,7 +299,9 @@ class LaconicRegistryClient:
|
||||
|
||||
args = ["laconic", "-c", self.config_file, "registry", "name", "resolve", name]
|
||||
|
||||
parsed = [AttrDict(r) for r in json.loads(logged_cmd(self.log_file, *args))]
|
||||
parsed = [
|
||||
AttrDict(r) for r in json.loads(logged_cmd(self.log_file, *args)) if r
|
||||
]
|
||||
if parsed:
|
||||
self._add_to_cache(parsed)
|
||||
return parsed[0]
|
||||
@ -158,7 +317,7 @@ class LaconicRegistryClient:
|
||||
if name_or_id in self.cache.name_or_id:
|
||||
return self.cache.name_or_id[name_or_id]
|
||||
|
||||
if self.is_lrn(name_or_id):
|
||||
if is_lrn(name_or_id):
|
||||
return self.resolve(name_or_id)
|
||||
|
||||
args = [
|
||||
@ -172,7 +331,9 @@ class LaconicRegistryClient:
|
||||
name_or_id,
|
||||
]
|
||||
|
||||
parsed = [AttrDict(r) for r in json.loads(logged_cmd(self.log_file, *args)) if r]
|
||||
parsed = [
|
||||
AttrDict(r) for r in json.loads(logged_cmd(self.log_file, *args)) if r
|
||||
]
|
||||
if len(parsed):
|
||||
self._add_to_cache(parsed)
|
||||
return parsed[0]
|
||||
@ -181,38 +342,85 @@ class LaconicRegistryClient:
|
||||
raise Exception("Cannot locate record:", name_or_id)
|
||||
return None
|
||||
|
||||
def app_deployment_requests(self, all=True):
|
||||
return self.list_records({"type": "ApplicationDeploymentRequest"}, all)
|
||||
def get_tx(self, txHash, require=False):
|
||||
if txHash in self.cache["txs"]:
|
||||
return self.cache["txs"][txHash]
|
||||
|
||||
def app_deployments(self, all=True):
|
||||
return self.list_records({"type": "ApplicationDeploymentRecord"}, all)
|
||||
args = [
|
||||
"laconic",
|
||||
"-c",
|
||||
self.config_file,
|
||||
"registry",
|
||||
"tokens",
|
||||
"gettx",
|
||||
"--hash",
|
||||
txHash,
|
||||
]
|
||||
|
||||
def app_deployment_removal_requests(self, all=True):
|
||||
return self.list_records({"type": "ApplicationDeploymentRemovalRequest"}, all)
|
||||
parsed = None
|
||||
try:
|
||||
parsed = AttrDict(json.loads(logged_cmd(self.log_file, *args)))
|
||||
except: # noqa: E722
|
||||
pass
|
||||
|
||||
def app_deployment_removals(self, all=True):
|
||||
return self.list_records({"type": "ApplicationDeploymentRemovalRecord"}, all)
|
||||
if parsed:
|
||||
self.cache["txs"][txHash] = parsed
|
||||
return parsed
|
||||
|
||||
def publish(self, record, names=[]):
|
||||
if require:
|
||||
raise Exception("Cannot locate tx:", hash)
|
||||
|
||||
def app_deployment_requests(self, criteria=None, all=True):
|
||||
if criteria is None:
|
||||
criteria = {}
|
||||
criteria = criteria.copy()
|
||||
criteria["type"] = "ApplicationDeploymentRequest"
|
||||
return self.list_records(criteria, all)
|
||||
|
||||
def app_deployments(self, criteria=None, all=True):
|
||||
if criteria is None:
|
||||
criteria = {}
|
||||
criteria = criteria.copy()
|
||||
criteria["type"] = "ApplicationDeploymentRecord"
|
||||
return self.list_records(criteria, all)
|
||||
|
||||
def app_deployment_removal_requests(self, criteria=None, all=True):
|
||||
if criteria is None:
|
||||
criteria = {}
|
||||
criteria = criteria.copy()
|
||||
criteria["type"] = "ApplicationDeploymentRemovalRequest"
|
||||
return self.list_records(criteria, all)
|
||||
|
||||
def app_deployment_removals(self, criteria=None, all=True):
|
||||
if criteria is None:
|
||||
criteria = {}
|
||||
criteria = criteria.copy()
|
||||
criteria["type"] = "ApplicationDeploymentRemovalRecord"
|
||||
return self.list_records(criteria, all)
|
||||
|
||||
def publish(self, record, names=None):
|
||||
if names is None:
|
||||
names = []
|
||||
tmpdir = tempfile.mkdtemp()
|
||||
try:
|
||||
record_fname = os.path.join(tmpdir, "record.yml")
|
||||
record_file = open(record_fname, 'w')
|
||||
record_file = open(record_fname, "w")
|
||||
yaml.dump(record, record_file)
|
||||
record_file.close()
|
||||
print(open(record_fname, 'r').read(), file=self.log_file)
|
||||
print(open(record_fname, "r").read(), file=self.log_file)
|
||||
|
||||
new_record_id = json.loads(
|
||||
logged_cmd(
|
||||
self.log_file,
|
||||
"laconic", "-c",
|
||||
"laconic",
|
||||
"-c",
|
||||
self.config_file,
|
||||
"registry",
|
||||
"record",
|
||||
"publish",
|
||||
"--filename",
|
||||
record_fname
|
||||
)
|
||||
record_fname,
|
||||
)
|
||||
)["id"]
|
||||
for name in names:
|
||||
self.set_name(name, new_record_id)
|
||||
@ -221,10 +429,29 @@ class LaconicRegistryClient:
|
||||
logged_cmd(self.log_file, "rm", "-rf", tmpdir)
|
||||
|
||||
def set_name(self, name, record_id):
|
||||
logged_cmd(self.log_file, "laconic", "-c", self.config_file, "registry", "name", "set", name, record_id)
|
||||
logged_cmd(
|
||||
self.log_file,
|
||||
"laconic",
|
||||
"-c",
|
||||
self.config_file,
|
||||
"registry",
|
||||
"name",
|
||||
"set",
|
||||
name,
|
||||
record_id,
|
||||
)
|
||||
|
||||
def delete_name(self, name):
|
||||
logged_cmd(self.log_file, "laconic", "-c", self.config_file, "registry", "name", "delete", name)
|
||||
logged_cmd(
|
||||
self.log_file,
|
||||
"laconic",
|
||||
"-c",
|
||||
self.config_file,
|
||||
"registry",
|
||||
"name",
|
||||
"delete",
|
||||
name,
|
||||
)
|
||||
|
||||
|
||||
def file_hash(filename):
|
||||
@ -248,7 +475,9 @@ def determine_base_container(clone_dir, app_type="webapp"):
|
||||
return base_container
|
||||
|
||||
|
||||
def build_container_image(app_record, tag, extra_build_args=[], logger=None):
|
||||
def build_container_image(app_record, tag, extra_build_args=None, logger=None):
|
||||
if extra_build_args is None:
|
||||
extra_build_args = []
|
||||
tmpdir = tempfile.mkdtemp()
|
||||
|
||||
# TODO: determine if this code could be calling into the Python git library like setup-repositories
|
||||
@ -265,9 +494,15 @@ def build_container_image(app_record, tag, extra_build_args=[], logger=None):
|
||||
if github_token:
|
||||
logger.log("Github token detected, setting it in the git environment")
|
||||
git_config_args = [
|
||||
"git", "config", "--global", f"url.https://{github_token}:@github.com/.insteadOf", "https://github.com/"
|
||||
]
|
||||
result = subprocess.run(git_config_args, stdout=logger.file, stderr=logger.file)
|
||||
"git",
|
||||
"config",
|
||||
"--global",
|
||||
f"url.https://{github_token}:@github.com/.insteadOf",
|
||||
"https://github.com/",
|
||||
]
|
||||
result = subprocess.run(
|
||||
git_config_args, stdout=logger.file, stderr=logger.file
|
||||
)
|
||||
result.check_returncode()
|
||||
if ref:
|
||||
# TODO: Determing branch or hash, and use depth 1 if we can.
|
||||
@ -275,30 +510,50 @@ def build_container_image(app_record, tag, extra_build_args=[], logger=None):
|
||||
# Never prompt
|
||||
git_env["GIT_TERMINAL_PROMPT"] = "0"
|
||||
try:
|
||||
subprocess.check_call(["git", "clone", repo, clone_dir], env=git_env, stdout=logger.file, stderr=logger.file)
|
||||
subprocess.check_call(
|
||||
["git", "clone", repo, clone_dir],
|
||||
env=git_env,
|
||||
stdout=logger.file,
|
||||
stderr=logger.file,
|
||||
)
|
||||
except Exception as e:
|
||||
logger.log(f"git clone failed. Is the repository {repo} private?")
|
||||
raise e
|
||||
try:
|
||||
subprocess.check_call(["git", "checkout", ref], cwd=clone_dir, env=git_env, stdout=logger.file, stderr=logger.file)
|
||||
subprocess.check_call(
|
||||
["git", "checkout", ref],
|
||||
cwd=clone_dir,
|
||||
env=git_env,
|
||||
stdout=logger.file,
|
||||
stderr=logger.file,
|
||||
)
|
||||
except Exception as e:
|
||||
logger.log(f"git checkout failed. Does ref {ref} exist?")
|
||||
raise e
|
||||
else:
|
||||
# TODO: why is this code different vs the branch above (run vs check_call, and no prompt disable)?
|
||||
result = subprocess.run(["git", "clone", "--depth", "1", repo, clone_dir], stdout=logger.file, stderr=logger.file)
|
||||
result = subprocess.run(
|
||||
["git", "clone", "--depth", "1", repo, clone_dir],
|
||||
stdout=logger.file,
|
||||
stderr=logger.file,
|
||||
)
|
||||
result.check_returncode()
|
||||
|
||||
base_container = determine_base_container(clone_dir, app_record.attributes.app_type)
|
||||
base_container = determine_base_container(
|
||||
clone_dir, app_record.attributes.app_type
|
||||
)
|
||||
|
||||
logger.log("Building webapp ...")
|
||||
build_command = [
|
||||
sys.argv[0],
|
||||
"--verbose",
|
||||
"build-webapp",
|
||||
"--source-repo", clone_dir,
|
||||
"--tag", tag,
|
||||
"--base-container", base_container
|
||||
"--source-repo",
|
||||
clone_dir,
|
||||
"--tag",
|
||||
tag,
|
||||
"--base-container",
|
||||
base_container,
|
||||
]
|
||||
if extra_build_args:
|
||||
build_command.append("--extra-build-args")
|
||||
@ -312,8 +567,11 @@ def build_container_image(app_record, tag, extra_build_args=[], logger=None):
|
||||
|
||||
def push_container_image(deployment_dir, logger):
|
||||
logger.log("Pushing images ...")
|
||||
result = subprocess.run([sys.argv[0], "deployment", "--dir", deployment_dir, "push-images"],
|
||||
stdout=logger.file, stderr=logger.file)
|
||||
result = subprocess.run(
|
||||
[sys.argv[0], "deployment", "--dir", deployment_dir, "push-images"],
|
||||
stdout=logger.file,
|
||||
stderr=logger.file,
|
||||
)
|
||||
result.check_returncode()
|
||||
logger.log("Finished pushing images.")
|
||||
|
||||
@ -331,27 +589,35 @@ def deploy_to_k8s(deploy_record, deployment_dir, recreate, logger):
|
||||
|
||||
for command in commands_to_run:
|
||||
logger.log(f"Running {command} command on deployment dir: {deployment_dir}")
|
||||
result = subprocess.run([sys.argv[0], "deployment", "--dir", deployment_dir, command],
|
||||
stdout=logger.file, stderr=logger.file)
|
||||
result = subprocess.run(
|
||||
[sys.argv[0], "deployment", "--dir", deployment_dir, command],
|
||||
stdout=logger.file,
|
||||
stderr=logger.file,
|
||||
)
|
||||
result.check_returncode()
|
||||
logger.log(f"Finished {command} command on deployment dir: {deployment_dir}")
|
||||
|
||||
logger.log("Finished deploying to k8s.")
|
||||
|
||||
|
||||
def publish_deployment(laconic: LaconicRegistryClient,
|
||||
app_record,
|
||||
deploy_record,
|
||||
deployment_lrn,
|
||||
dns_record,
|
||||
dns_lrn,
|
||||
deployment_dir,
|
||||
app_deployment_request=None,
|
||||
logger=None):
|
||||
def publish_deployment(
|
||||
laconic: LaconicRegistryClient,
|
||||
app_record,
|
||||
deploy_record,
|
||||
deployment_lrn,
|
||||
dns_record,
|
||||
dns_lrn,
|
||||
deployment_dir,
|
||||
app_deployment_request=None,
|
||||
payment_address=None,
|
||||
logger=None,
|
||||
):
|
||||
if not deploy_record:
|
||||
deploy_ver = "0.0.1"
|
||||
else:
|
||||
deploy_ver = "0.0.%d" % (int(deploy_record.attributes.version.split(".")[-1]) + 1)
|
||||
deploy_ver = "0.0.%d" % (
|
||||
int(deploy_record.attributes.version.split(".")[-1]) + 1
|
||||
)
|
||||
|
||||
if not dns_record:
|
||||
dns_ver = "0.0.1"
|
||||
@ -369,9 +635,7 @@ def publish_deployment(laconic: LaconicRegistryClient,
|
||||
"version": dns_ver,
|
||||
"name": fqdn,
|
||||
"resource_type": "A",
|
||||
"meta": {
|
||||
"so": uniq.hex
|
||||
},
|
||||
"meta": {"so": uniq.hex},
|
||||
}
|
||||
}
|
||||
if app_deployment_request:
|
||||
@ -391,12 +655,19 @@ def publish_deployment(laconic: LaconicRegistryClient,
|
||||
"dns": dns_id,
|
||||
"meta": {
|
||||
"config": file_hash(os.path.join(deployment_dir, "config.env")),
|
||||
"so": uniq.hex
|
||||
"so": uniq.hex,
|
||||
},
|
||||
}
|
||||
}
|
||||
if app_deployment_request:
|
||||
new_deployment_record["record"]["request"] = app_deployment_request.id
|
||||
if app_deployment_request.attributes.payment:
|
||||
new_deployment_record["record"][
|
||||
"payment"
|
||||
] = app_deployment_request.attributes.payment
|
||||
|
||||
if payment_address:
|
||||
new_deployment_record["record"]["by"] = payment_address
|
||||
|
||||
if logger:
|
||||
logger.log("Publishing ApplicationDeploymentRecord.")
|
||||
@ -407,7 +678,9 @@ def publish_deployment(laconic: LaconicRegistryClient,
|
||||
def hostname_for_deployment_request(app_deployment_request, laconic):
|
||||
dns_name = app_deployment_request.attributes.dns
|
||||
if not dns_name:
|
||||
app = laconic.get_record(app_deployment_request.attributes.application, require=True)
|
||||
app = laconic.get_record(
|
||||
app_deployment_request.attributes.application, require=True
|
||||
)
|
||||
dns_name = generate_hostname_for_app(app)
|
||||
elif dns_name.startswith("lrn://"):
|
||||
record = laconic.get_record(dns_name, require=True)
|
||||
|
Loading…
Reference in New Issue
Block a user