Compare commits
5 Commits
Author | SHA1 | Date | |
---|---|---|---|
69f558ecaa | |||
a00a600588 | |||
f982b7fd33 | |||
fcf2584a3a | |||
333a3eea02 |
@ -39,7 +39,7 @@ jobs:
|
|||||||
- name: "Print Python version"
|
- name: "Print Python version"
|
||||||
run: python3 --version
|
run: python3 --version
|
||||||
- name: "Install shiv"
|
- name: "Install shiv"
|
||||||
run: pip install shiv==1.0.6
|
run: pip install shiv
|
||||||
- name: "Generate build version file"
|
- name: "Generate build version file"
|
||||||
run: ./scripts/create_build_tag_file.sh
|
run: ./scripts/create_build_tag_file.sh
|
||||||
- name: "Build local shiv package"
|
- name: "Build local shiv package"
|
||||||
|
@ -35,7 +35,7 @@ jobs:
|
|||||||
- name: "Print Python version"
|
- name: "Print Python version"
|
||||||
run: python3 --version
|
run: python3 --version
|
||||||
- name: "Install shiv"
|
- name: "Install shiv"
|
||||||
run: pip install shiv==1.0.6
|
run: pip install shiv
|
||||||
- name: "Build local shiv package"
|
- name: "Build local shiv package"
|
||||||
id: build
|
id: build
|
||||||
run: |
|
run: |
|
||||||
|
@ -33,7 +33,7 @@ jobs:
|
|||||||
- name: "Print Python version"
|
- name: "Print Python version"
|
||||||
run: python3 --version
|
run: python3 --version
|
||||||
- name: "Install shiv"
|
- name: "Install shiv"
|
||||||
run: pip install shiv==1.0.6
|
run: pip install shiv
|
||||||
- name: "Generate build version file"
|
- name: "Generate build version file"
|
||||||
run: ./scripts/create_build_tag_file.sh
|
run: ./scripts/create_build_tag_file.sh
|
||||||
- name: "Build local shiv package"
|
- name: "Build local shiv package"
|
||||||
|
@ -33,7 +33,7 @@ jobs:
|
|||||||
- name: "Print Python version"
|
- name: "Print Python version"
|
||||||
run: python3 --version
|
run: python3 --version
|
||||||
- name: "Install shiv"
|
- name: "Install shiv"
|
||||||
run: pip install shiv==1.0.6
|
run: pip install shiv
|
||||||
- name: "Generate build version file"
|
- name: "Generate build version file"
|
||||||
run: ./scripts/create_build_tag_file.sh
|
run: ./scripts/create_build_tag_file.sh
|
||||||
- name: "Build local shiv package"
|
- name: "Build local shiv package"
|
||||||
|
@ -33,7 +33,7 @@ jobs:
|
|||||||
- name: "Print Python version"
|
- name: "Print Python version"
|
||||||
run: python3 --version
|
run: python3 --version
|
||||||
- name: "Install shiv"
|
- name: "Install shiv"
|
||||||
run: pip install shiv==1.0.6
|
run: pip install shiv
|
||||||
- name: "Generate build version file"
|
- name: "Generate build version file"
|
||||||
run: ./scripts/create_build_tag_file.sh
|
run: ./scripts/create_build_tag_file.sh
|
||||||
- name: "Build local shiv package"
|
- name: "Build local shiv package"
|
||||||
|
@ -33,7 +33,7 @@ jobs:
|
|||||||
- name: "Print Python version"
|
- name: "Print Python version"
|
||||||
run: python3 --version
|
run: python3 --version
|
||||||
- name: "Install shiv"
|
- name: "Install shiv"
|
||||||
run: pip install shiv==1.0.6
|
run: pip install shiv
|
||||||
- name: "Generate build version file"
|
- name: "Generate build version file"
|
||||||
run: ./scripts/create_build_tag_file.sh
|
run: ./scripts/create_build_tag_file.sh
|
||||||
- name: "Build local shiv package"
|
- name: "Build local shiv package"
|
||||||
|
@ -35,7 +35,7 @@ jobs:
|
|||||||
- name: "Print Python version"
|
- name: "Print Python version"
|
||||||
run: python3 --version
|
run: python3 --version
|
||||||
- name: "Install shiv"
|
- name: "Install shiv"
|
||||||
run: pip install shiv==1.0.6
|
run: pip install shiv
|
||||||
- name: "Generate build version file"
|
- name: "Generate build version file"
|
||||||
run: ./scripts/create_build_tag_file.sh
|
run: ./scripts/create_build_tag_file.sh
|
||||||
- name: "Build local shiv package"
|
- name: "Build local shiv package"
|
||||||
|
@ -35,7 +35,7 @@ jobs:
|
|||||||
- name: "Print Python version"
|
- name: "Print Python version"
|
||||||
run: python3 --version
|
run: python3 --version
|
||||||
- name: "Install shiv"
|
- name: "Install shiv"
|
||||||
run: pip install shiv==1.0.6
|
run: pip install shiv
|
||||||
- name: "Generate build version file"
|
- name: "Generate build version file"
|
||||||
run: ./scripts/create_build_tag_file.sh
|
run: ./scripts/create_build_tag_file.sh
|
||||||
- name: "Build local shiv package"
|
- name: "Build local shiv package"
|
||||||
|
@ -32,7 +32,7 @@ jobs:
|
|||||||
- name: "Print Python version"
|
- name: "Print Python version"
|
||||||
run: python3 --version
|
run: python3 --version
|
||||||
- name: "Install shiv"
|
- name: "Install shiv"
|
||||||
run: pip install shiv==1.0.6
|
run: pip install shiv
|
||||||
- name: "Generate build version file"
|
- name: "Generate build version file"
|
||||||
run: ./scripts/create_build_tag_file.sh
|
run: ./scripts/create_build_tag_file.sh
|
||||||
- name: "Build local shiv package"
|
- name: "Build local shiv package"
|
||||||
|
@ -33,7 +33,7 @@ jobs:
|
|||||||
- name: "Print Python version"
|
- name: "Print Python version"
|
||||||
run: python3 --version
|
run: python3 --version
|
||||||
- name: "Install shiv"
|
- name: "Install shiv"
|
||||||
run: pip install shiv==1.0.6
|
run: pip install shiv
|
||||||
- name: "Generate build version file"
|
- name: "Generate build version file"
|
||||||
run: ./scripts/create_build_tag_file.sh
|
run: ./scripts/create_build_tag_file.sh
|
||||||
- name: "Build local shiv package"
|
- name: "Build local shiv package"
|
||||||
|
@ -240,4 +240,4 @@ fi
|
|||||||
|
|
||||||
time $CERC_BUILD_TOOL run cerc_compile || exit 1
|
time $CERC_BUILD_TOOL run cerc_compile || exit 1
|
||||||
|
|
||||||
exit 0
|
exit 0
|
@ -0,0 +1,51 @@
|
|||||||
|
# Originally from: https://github.com/devcontainers/images/blob/main/src/javascript-node/.devcontainer/Dockerfile
|
||||||
|
# [Choice] Node.js version (use -bullseye variants on local arm64/Apple Silicon): 18, 16, 14, 18-bullseye, 16-bullseye, 14-bullseye, 18-buster, 16-buster, 14-buster
|
||||||
|
ARG VARIANT=20-bullseye-slim
|
||||||
|
FROM node:${VARIANT}
|
||||||
|
|
||||||
|
ARG USERNAME=node
|
||||||
|
ARG NPM_GLOBAL=/usr/local/share/npm-global
|
||||||
|
|
||||||
|
# Add NPM global to PATH.
|
||||||
|
ENV PATH=${NPM_GLOBAL}/bin:${PATH}
|
||||||
|
# Prevents npm from printing version warnings
|
||||||
|
ENV NPM_CONFIG_UPDATE_NOTIFIER=false
|
||||||
|
|
||||||
|
RUN \
|
||||||
|
# Configure global npm install location, use group to adapt to UID/GID changes
|
||||||
|
if ! cat /etc/group | grep -e "^npm:" > /dev/null 2>&1; then groupadd -r npm; fi \
|
||||||
|
&& usermod -a -G npm ${USERNAME} \
|
||||||
|
&& umask 0002 \
|
||||||
|
&& mkdir -p ${NPM_GLOBAL} \
|
||||||
|
&& touch /usr/local/etc/npmrc \
|
||||||
|
&& chown ${USERNAME}:npm ${NPM_GLOBAL} /usr/local/etc/npmrc \
|
||||||
|
&& chmod g+s ${NPM_GLOBAL} \
|
||||||
|
&& npm config -g set prefix ${NPM_GLOBAL} \
|
||||||
|
&& su ${USERNAME} -c "npm config -g set prefix ${NPM_GLOBAL}" \
|
||||||
|
# Install eslint
|
||||||
|
&& su ${USERNAME} -c "umask 0002 && npm install -g eslint" \
|
||||||
|
# Install semver
|
||||||
|
&& su ${USERNAME} -c "umask 0002 && npm install -g semver" \
|
||||||
|
# Install pnpm
|
||||||
|
&& su ${USERNAME} -c "umask 0002 && npm install -g pnpm" \
|
||||||
|
# Install bun
|
||||||
|
&& su ${USERNAME} -c "umask 0002 && npm install -g bun@1.1.x" \
|
||||||
|
&& npm cache clean --force > /dev/null 2>&1
|
||||||
|
|
||||||
|
# [Optional] Uncomment this section to install additional OS packages.
|
||||||
|
RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \
|
||||||
|
&& apt-get -y install --no-install-recommends jq gettext-base procps
|
||||||
|
|
||||||
|
# [Optional] Uncomment if you want to install more global node modules
|
||||||
|
# RUN su node -c "npm install -g <your-package-list-here>"
|
||||||
|
|
||||||
|
# We do this to get a yq binary from the published container, for the correct architecture we're building here
|
||||||
|
COPY --from=docker.io/mikefarah/yq:latest /usr/bin/yq /usr/local/bin/yq
|
||||||
|
|
||||||
|
# Expose port for http
|
||||||
|
EXPOSE 80
|
||||||
|
|
||||||
|
COPY /scripts /scripts
|
||||||
|
|
||||||
|
# Default command sleeps forever so docker doesn't kill it
|
||||||
|
ENTRYPOINT ["/scripts/start-serving-app.sh"]
|
@ -0,0 +1,9 @@
|
|||||||
|
FROM cerc/nextjs-base:local
|
||||||
|
|
||||||
|
ARG CERC_NEXT_VERSION=keep
|
||||||
|
ARG CERC_BUILD_TOOL
|
||||||
|
|
||||||
|
WORKDIR /app
|
||||||
|
COPY . .
|
||||||
|
RUN rm -rf node_modules build .next*
|
||||||
|
RUN /scripts/build-app.sh /app
|
35
stack_orchestrator/data/container-build/cerc-nextjs-snowball/build.sh
Executable file
35
stack_orchestrator/data/container-build/cerc-nextjs-snowball/build.sh
Executable file
@ -0,0 +1,35 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
# Build cerc/laconic-registry-cli
|
||||||
|
|
||||||
|
source ${CERC_CONTAINER_BASE_DIR}/build-base.sh
|
||||||
|
|
||||||
|
# See: https://stackoverflow.com/a/246128/1701505
|
||||||
|
SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )
|
||||||
|
|
||||||
|
CERC_CONTAINER_BUILD_WORK_DIR=${CERC_CONTAINER_BUILD_WORK_DIR:-$SCRIPT_DIR}
|
||||||
|
CERC_CONTAINER_BUILD_DOCKERFILE=${CERC_CONTAINER_BUILD_DOCKERFILE:-$SCRIPT_DIR/Dockerfile}
|
||||||
|
CERC_CONTAINER_BUILD_TAG=${CERC_CONTAINER_BUILD_TAG:-cerc/nextjs-base:local}
|
||||||
|
|
||||||
|
docker build -t $CERC_CONTAINER_BUILD_TAG ${build_command_args} -f $CERC_CONTAINER_BUILD_DOCKERFILE $CERC_CONTAINER_BUILD_WORK_DIR
|
||||||
|
rc=$?
|
||||||
|
|
||||||
|
if [ $rc -ne 0 ]; then
|
||||||
|
echo "BUILD FAILED" 1>&2
|
||||||
|
exit $rc
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ "$CERC_CONTAINER_BUILD_TAG" != "cerc/nextjs-base:local" ]; then
|
||||||
|
cat <<EOF
|
||||||
|
|
||||||
|
#################################################################
|
||||||
|
|
||||||
|
Built host container for $CERC_CONTAINER_BUILD_WORK_DIR with tag:
|
||||||
|
|
||||||
|
$CERC_CONTAINER_BUILD_TAG
|
||||||
|
|
||||||
|
To test locally run:
|
||||||
|
|
||||||
|
laconic-so run-webapp --image $CERC_CONTAINER_BUILD_TAG --env-file /path/to/environment.env
|
||||||
|
|
||||||
|
EOF
|
||||||
|
fi
|
@ -0,0 +1,52 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
if [ -n "$CERC_SCRIPT_DEBUG" ]; then
|
||||||
|
set -x
|
||||||
|
fi
|
||||||
|
|
||||||
|
WORK_DIR="${1:-./}"
|
||||||
|
SRC_DIR="${2:-.next}"
|
||||||
|
TRG_DIR="${3:-.next-r}"
|
||||||
|
|
||||||
|
CERC_BUILD_TOOL="${CERC_BUILD_TOOL}"
|
||||||
|
if [ -z "$CERC_BUILD_TOOL" ]; then
|
||||||
|
if [ -f "pnpm-lock.yaml" ]; then
|
||||||
|
CERC_BUILD_TOOL=pnpm
|
||||||
|
elif [ -f "yarn.lock" ]; then
|
||||||
|
CERC_BUILD_TOOL=yarn
|
||||||
|
elif [ -f "bun.lockb" ]; then
|
||||||
|
CERC_BUILD_TOOL=bun
|
||||||
|
else
|
||||||
|
CERC_BUILD_TOOL=npm
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
cd "${WORK_DIR}" || exit 1
|
||||||
|
|
||||||
|
rm -rf "$TRG_DIR"
|
||||||
|
mkdir -p "$TRG_DIR"
|
||||||
|
cp -rp "$SRC_DIR" "$TRG_DIR/"
|
||||||
|
|
||||||
|
if [ -f ".env" ]; then
|
||||||
|
TMP_ENV=`mktemp`
|
||||||
|
declare -px > $TMP_ENV
|
||||||
|
set -a
|
||||||
|
source .env
|
||||||
|
source $TMP_ENV
|
||||||
|
set +a
|
||||||
|
rm -f $TMP_ENV
|
||||||
|
fi
|
||||||
|
|
||||||
|
for f in $(find . -type f \( -regex '.*.html?' -or -regex ".*.[tj]s\(x\|on\)?$" \) | grep -v 'node_modules' | grep -v '.git'); do
|
||||||
|
for e in $(cat "${f}" | tr -s '[:blank:]' '\n' | tr -s '["/\\{},();]' '\n' | tr -s "[']" '\n' | egrep -o -e '^CERC_RUNTIME_ENV_.+$' -e '^LACONIC_HOSTED_CONFIG_.+$'); do
|
||||||
|
orig_name=$(echo -n "${e}" | sed 's/"//g')
|
||||||
|
cur_name=$(echo -n "${orig_name}" | sed 's/CERC_RUNTIME_ENV_//g')
|
||||||
|
cur_val=$(echo -n "\$${cur_name}" | envsubst)
|
||||||
|
if [ "$CERC_RETAIN_ENV_QUOTES" != "true" ]; then
|
||||||
|
cur_val=$(sed "s/^[\"']//" <<< "$cur_val" | sed "s/[\"']//")
|
||||||
|
fi
|
||||||
|
esc_val=$(sed 's/[&/\]/\\&/g' <<< "$cur_val")
|
||||||
|
echo "$f: $cur_name=$cur_val"
|
||||||
|
sed -i "s/$orig_name/$esc_val/g" $f
|
||||||
|
done
|
||||||
|
done
|
@ -0,0 +1,201 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
if [ -n "${CERC_SCRIPT_DEBUG:-}" ]; then
|
||||||
|
set -x
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Determine the build tool
|
||||||
|
CERC_BUILD_TOOL="${CERC_BUILD_TOOL:-}"
|
||||||
|
if [ -z "$CERC_BUILD_TOOL" ]; then
|
||||||
|
if [ -f "pnpm-lock.yaml" ]; then
|
||||||
|
CERC_BUILD_TOOL=pnpm
|
||||||
|
elif [ -f "yarn.lock" ]; then
|
||||||
|
CERC_BUILD_TOOL=yarn
|
||||||
|
elif [ -f "bun.lockb" ]; then
|
||||||
|
CERC_BUILD_TOOL=bun
|
||||||
|
else
|
||||||
|
CERC_BUILD_TOOL=npm
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
SCRIPT_DIR=$(dirname "$(readlink -f "$0")")
|
||||||
|
WORK_DIR="${1:-/app}"
|
||||||
|
|
||||||
|
cd "${WORK_DIR}" || exit 1
|
||||||
|
|
||||||
|
# Get the app's Next.js version
|
||||||
|
CERC_NEXT_VERSION=$(jq -r '.dependencies.next' package.json)
|
||||||
|
echo "Using Next.js version: $CERC_NEXT_VERSION"
|
||||||
|
|
||||||
|
# Determine webpack version based on Next.js version (unused if already installed)
|
||||||
|
determine_webpack_version() {
|
||||||
|
local next_version=$1
|
||||||
|
local major_version=${next_version%%.*}
|
||||||
|
case $major_version in
|
||||||
|
13)
|
||||||
|
echo "5.75.0"
|
||||||
|
;;
|
||||||
|
14)
|
||||||
|
echo "5.88.0"
|
||||||
|
;;
|
||||||
|
*)
|
||||||
|
echo "5.93.0" # Default to latest stable version if unknown
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
}
|
||||||
|
|
||||||
|
# Check if webpack is already in package.json
|
||||||
|
if CERC_WEBPACK_VERSION=$(jq -r '.dependencies.webpack // .devDependencies.webpack // empty' package.json); then
|
||||||
|
if [ -n "$CERC_WEBPACK_VERSION" ]; then
|
||||||
|
echo "Using existing webpack version: $CERC_WEBPACK_VERSION"
|
||||||
|
else
|
||||||
|
CERC_WEBPACK_VERSION=$(determine_webpack_version "$CERC_NEXT_VERSION")
|
||||||
|
echo "Determined webpack version based on Next.js: $CERC_WEBPACK_VERSION"
|
||||||
|
# Add webpack to devDependencies
|
||||||
|
jq ".devDependencies.webpack = \"$CERC_WEBPACK_VERSION\"" package.json > package.json.tmp && mv package.json.tmp package.json
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
CERC_WEBPACK_VERSION=$(determine_webpack_version "$CERC_NEXT_VERSION")
|
||||||
|
echo "Determined webpack version based on Next.js: $CERC_WEBPACK_VERSION"
|
||||||
|
# Add webpack to devDependencies
|
||||||
|
jq ".devDependencies.webpack = \"$CERC_WEBPACK_VERSION\"" package.json > package.json.tmp && mv package.json.tmp package.json
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Determine the Next.js config file
|
||||||
|
if [ -f "next.config.mjs" ]; then
|
||||||
|
NEXT_CONFIG_JS="next.config.mjs"
|
||||||
|
IMPORT_OR_REQUIRE="import"
|
||||||
|
else
|
||||||
|
NEXT_CONFIG_JS="next.config.js"
|
||||||
|
IMPORT_OR_REQUIRE="require"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# If this file doesn't exist at all, we'll get errors below.
|
||||||
|
if [ ! -f "${NEXT_CONFIG_JS}" ]; then
|
||||||
|
touch ${NEXT_CONFIG_JS}
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Backup the original config if not already done
|
||||||
|
if [ ! -f "next.config.dist" ]; then
|
||||||
|
cp "$NEXT_CONFIG_JS" next.config.dist
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Install js-beautify if not present
|
||||||
|
command -v js-beautify >/dev/null || npm i -g js-beautify
|
||||||
|
|
||||||
|
# Beautify the config file
|
||||||
|
js-beautify "$NEXT_CONFIG_JS" > "${NEXT_CONFIG_JS}.pretty"
|
||||||
|
mv "${NEXT_CONFIG_JS}.pretty" "$NEXT_CONFIG_JS"
|
||||||
|
|
||||||
|
# Add webpack import/require if not present
|
||||||
|
if [ "$IMPORT_OR_REQUIRE" = "import" ]; then
|
||||||
|
if ! grep -q "import.*webpack" "$NEXT_CONFIG_JS"; then
|
||||||
|
sed -i '1iimport webpack from "webpack";' "$NEXT_CONFIG_JS"
|
||||||
|
fi
|
||||||
|
if ! grep -q "import.*createRequire" "$NEXT_CONFIG_JS"; then
|
||||||
|
sed -i '2iimport { createRequire } from "module";\nconst require = createRequire(import.meta.url);' "$NEXT_CONFIG_JS"
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
if ! grep -q "require.*webpack" "$NEXT_CONFIG_JS"; then
|
||||||
|
sed -i '1iconst webpack = require("webpack");' "$NEXT_CONFIG_JS"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Add environment mapping logic if not present
|
||||||
|
if ! grep -q "let envMap;" "$NEXT_CONFIG_JS"; then
|
||||||
|
cat << 'EOF' >> "$NEXT_CONFIG_JS"
|
||||||
|
|
||||||
|
let envMap;
|
||||||
|
try {
|
||||||
|
envMap = require('./.env-list.json').reduce((a, v) => {
|
||||||
|
a[v] = JSON.stringify(`CERC_RUNTIME_ENV_${v.split(/\./).pop()}`);
|
||||||
|
return a;
|
||||||
|
}, {});
|
||||||
|
} catch (e) {
|
||||||
|
console.error('Error loading .env-list.json:', e);
|
||||||
|
envMap = Object.keys(process.env).reduce((a, v) => {
|
||||||
|
if (v.startsWith('CERC_')) {
|
||||||
|
a[`process.env.${v}`] = JSON.stringify(process.env[v]);
|
||||||
|
}
|
||||||
|
return a;
|
||||||
|
}, {});
|
||||||
|
}
|
||||||
|
console.log('Environment map:', envMap);
|
||||||
|
EOF
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Add or update webpack configuration
|
||||||
|
if ! grep -q '__xCfg__' "$NEXT_CONFIG_JS"; then
|
||||||
|
cat << 'EOF' >> "$NEXT_CONFIG_JS"
|
||||||
|
|
||||||
|
const __xCfg__ = (nextConfig) => {
|
||||||
|
return {
|
||||||
|
...nextConfig,
|
||||||
|
webpack: (config, options) => {
|
||||||
|
config.plugins.push(new webpack.DefinePlugin(envMap));
|
||||||
|
if (typeof nextConfig.webpack === 'function') {
|
||||||
|
return nextConfig.webpack(config, options);
|
||||||
|
}
|
||||||
|
return config;
|
||||||
|
},
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
EOF
|
||||||
|
|
||||||
|
# Update the export/module.exports line
|
||||||
|
if [ "$IMPORT_OR_REQUIRE" = "import" ]; then
|
||||||
|
sed -i 's/export default/const __orig_cfg__ =/' "$NEXT_CONFIG_JS"
|
||||||
|
echo "export default __xCfg__(__orig_cfg__);" >> "$NEXT_CONFIG_JS"
|
||||||
|
else
|
||||||
|
sed -i 's/module.exports =/const __orig_cfg__ =/' "$NEXT_CONFIG_JS"
|
||||||
|
echo "module.exports = __xCfg__(__orig_cfg__);" >> "$NEXT_CONFIG_JS"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Clean up the config file
|
||||||
|
js-beautify "$NEXT_CONFIG_JS" > "${NEXT_CONFIG_JS}.pretty"
|
||||||
|
mv "${NEXT_CONFIG_JS}.pretty" "$NEXT_CONFIG_JS"
|
||||||
|
|
||||||
|
# Generate .env-list.json
|
||||||
|
"${SCRIPT_DIR}/find-env.sh" "$(pwd)" > .env-list.json
|
||||||
|
|
||||||
|
# Update package.json
|
||||||
|
[ ! -f "package.dist" ] && cp package.json package.dist
|
||||||
|
|
||||||
|
# Install dependencies
|
||||||
|
$CERC_BUILD_TOOL install || exit 1
|
||||||
|
|
||||||
|
# Get the build command from package.json
|
||||||
|
BUILD_COMMAND=`jq -r '.scripts.build // ""' package.json`
|
||||||
|
|
||||||
|
if [ -z "$BUILD_COMMAND" ]; then
|
||||||
|
echo "No build command found in package.json. Using default 'next build'."
|
||||||
|
BUILD_COMMAND="next build"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Determine the appropriate build commands based on the Next.js version
|
||||||
|
if semver -p -r ">=14.2.0" "$CERC_NEXT_VERSION"; then
|
||||||
|
# For >= 14.2.0
|
||||||
|
CERC_NEXT_COMPILE_COMMAND="${BUILD_COMMAND/next build/next build --experimental-build-mode compile}"
|
||||||
|
CERC_NEXT_GENERATE_COMMAND="${BUILD_COMMAND/next build/next build --experimental-build-mode generate}"
|
||||||
|
elif semver -p -r ">=13.4.2" "$CERC_NEXT_VERSION"; then
|
||||||
|
# For 13.4.2 to 14.1.x
|
||||||
|
CERC_NEXT_COMPILE_COMMAND="${BUILD_COMMAND/next build/next experimental-compile}"
|
||||||
|
CERC_NEXT_GENERATE_COMMAND="${BUILD_COMMAND/next build/next experimental-generate}"
|
||||||
|
else
|
||||||
|
# For versions before 13.4.2
|
||||||
|
CERC_NEXT_COMPILE_COMMAND="$BUILD_COMMAND"
|
||||||
|
CERC_NEXT_GENERATE_COMMAND="$BUILD_COMMAND"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Update package.json with the appropriate scripts
|
||||||
|
cat package.json | jq ".scripts.cerc_compile = \"$CERC_NEXT_COMPILE_COMMAND\"" | jq ".scripts.cerc_generate = \"$CERC_NEXT_GENERATE_COMMAND\"" > package.json.$$
|
||||||
|
mv package.json.$$ package.json
|
||||||
|
|
||||||
|
# Run the compile command
|
||||||
|
time $CERC_BUILD_TOOL run cerc_compile || exit 1
|
||||||
|
|
||||||
|
exit 0
|
@ -0,0 +1,31 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
if [ -n "$CERC_SCRIPT_DEBUG" ]; then
|
||||||
|
set -x
|
||||||
|
fi
|
||||||
|
|
||||||
|
WORK_DIR="${1:-./}"
|
||||||
|
TMPF=$(mktemp)
|
||||||
|
|
||||||
|
cd "$WORK_DIR" || exit 1
|
||||||
|
|
||||||
|
for d in $(find . -maxdepth 1 -type d | grep -v '\./\.' | grep '/' | cut -d'/' -f2); do
|
||||||
|
egrep "/$d[/$]?" .gitignore >/dev/null 2>/dev/null
|
||||||
|
if [ $? -eq 0 ]; then
|
||||||
|
continue
|
||||||
|
fi
|
||||||
|
|
||||||
|
for f in $(find "$d" -regex ".*.[tj]sx?$" -type f); do
|
||||||
|
cat "$f" | tr -s '[:blank:]' '\n' | tr -s '[{},()]' '\n' | egrep -o 'process.env.[A-Za-z0-9_]+' >> $TMPF
|
||||||
|
done
|
||||||
|
done
|
||||||
|
|
||||||
|
NEXT_CONF="next.config.mjs next.config.js next.config.dist"
|
||||||
|
for f in $NEXT_CONF; do
|
||||||
|
if [ -f "$f" ]; then
|
||||||
|
cat "$f" | tr -s '[:blank:]' '\n' | tr -s '[{},()]' '\n' | egrep -o 'process.env.[A-Za-z0-9_]+' >> $TMPF
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
cat $TMPF | sort -u | jq --raw-input . | jq --slurp .
|
||||||
|
rm -f $TMPF
|
@ -0,0 +1,65 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
if [ -n "$CERC_SCRIPT_DEBUG" ]; then
|
||||||
|
set -x
|
||||||
|
fi
|
||||||
|
|
||||||
|
|
||||||
|
SCRIPT_DIR=$( cd -- "$( dirname -- "${BASH_SOURCE[0]}" )" &> /dev/null && pwd )
|
||||||
|
CERC_MAX_GENERATE_TIME=${CERC_MAX_GENERATE_TIME:-120}
|
||||||
|
tpid=""
|
||||||
|
|
||||||
|
ctrl_c() {
|
||||||
|
kill $tpid $(ps -ef | grep node | grep next | awk '{print $2}') 2>/dev/null
|
||||||
|
}
|
||||||
|
|
||||||
|
trap ctrl_c INT
|
||||||
|
|
||||||
|
CERC_BUILD_TOOL="${CERC_BUILD_TOOL}"
|
||||||
|
if [ -z "$CERC_BUILD_TOOL" ]; then
|
||||||
|
if [ -f "pnpm-lock.yaml" ]; then
|
||||||
|
CERC_BUILD_TOOL=pnpm
|
||||||
|
elif [ -f "yarn.lock" ]; then
|
||||||
|
CERC_BUILD_TOOL=yarn
|
||||||
|
elif [ -f "bun.lockb" ]; then
|
||||||
|
CERC_BUILD_TOOL=bun
|
||||||
|
else
|
||||||
|
CERC_BUILD_TOOL=npm
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
CERC_WEBAPP_FILES_DIR="${CERC_WEBAPP_FILES_DIR:-/app}"
|
||||||
|
cd "$CERC_WEBAPP_FILES_DIR"
|
||||||
|
|
||||||
|
"$SCRIPT_DIR/apply-runtime-env.sh" "`pwd`" .next .next-r
|
||||||
|
mv .next .next.old
|
||||||
|
mv .next-r/.next .
|
||||||
|
|
||||||
|
if [ "$CERC_NEXTJS_SKIP_GENERATE" != "true" ]; then
|
||||||
|
jq -e '.scripts.cerc_generate' package.json >/dev/null
|
||||||
|
if [ $? -eq 0 ]; then
|
||||||
|
npm run cerc_generate > gen.out 2>&1 &
|
||||||
|
tail -f gen.out &
|
||||||
|
tpid=$!
|
||||||
|
|
||||||
|
count=0
|
||||||
|
generate_done="false"
|
||||||
|
while [ $count -lt $CERC_MAX_GENERATE_TIME ] && [ "$generate_done" == "false" ]; do
|
||||||
|
sleep 1
|
||||||
|
count=$((count + 1))
|
||||||
|
grep 'rendered as static' gen.out > /dev/null
|
||||||
|
if [ $? -eq 0 ]; then
|
||||||
|
generate_done="true"
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
if [ $generate_done != "true" ]; then
|
||||||
|
echo "ERROR: 'npm run cerc_generate' not successful within CERC_MAX_GENERATE_TIME" 1>&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
kill $tpid $(ps -ef | grep node | grep next | grep generate | awk '{print $2}') 2>/dev/null
|
||||||
|
tpid=""
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
$CERC_BUILD_TOOL start . -- -p ${CERC_LISTEN_PORT:-80}
|
@ -27,25 +27,3 @@ The Package Registry Stack supports a build environment that requires a package
|
|||||||
```
|
```
|
||||||
|
|
||||||
* The local gitea registry can now be accessed at <http://localhost:3000> (the username and password can be taken from the deployment logs)
|
* The local gitea registry can now be accessed at <http://localhost:3000> (the username and password can be taken from the deployment logs)
|
||||||
|
|
||||||
* Configure the hostname `gitea.local`:
|
|
||||||
|
|
||||||
Update `/etc/hosts`:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
sudo nano /etc/hosts
|
|
||||||
|
|
||||||
# Add the following line
|
|
||||||
127.0.0.1 gitea.local
|
|
||||||
```
|
|
||||||
|
|
||||||
Check resolution:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
ping gitea.local
|
|
||||||
|
|
||||||
PING gitea.local (127.0.0.1) 56(84) bytes of data.
|
|
||||||
64 bytes from localhost (127.0.0.1): icmp_seq=1 ttl=64 time=0.147 ms
|
|
||||||
64 bytes from localhost (127.0.0.1): icmp_seq=2 ttl=64 time=0.033 ms
|
|
||||||
...
|
|
||||||
```
|
|
||||||
|
@ -33,7 +33,6 @@ from stack_orchestrator.deploy.webapp.util import (
|
|||||||
LaconicRegistryClient,
|
LaconicRegistryClient,
|
||||||
TimedLogger,
|
TimedLogger,
|
||||||
build_container_image,
|
build_container_image,
|
||||||
confirm_auction,
|
|
||||||
push_container_image,
|
push_container_image,
|
||||||
file_hash,
|
file_hash,
|
||||||
deploy_to_k8s,
|
deploy_to_k8s,
|
||||||
@ -43,7 +42,6 @@ from stack_orchestrator.deploy.webapp.util import (
|
|||||||
match_owner,
|
match_owner,
|
||||||
skip_by_tag,
|
skip_by_tag,
|
||||||
confirm_payment,
|
confirm_payment,
|
||||||
load_known_requests,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -259,6 +257,12 @@ def process_app_deployment_request(
|
|||||||
logger.log("END - process_app_deployment_request")
|
logger.log("END - process_app_deployment_request")
|
||||||
|
|
||||||
|
|
||||||
|
def load_known_requests(filename):
|
||||||
|
if filename and os.path.exists(filename):
|
||||||
|
return json.load(open(filename, "r"))
|
||||||
|
return {}
|
||||||
|
|
||||||
|
|
||||||
def dump_known_requests(filename, requests, status="SEEN"):
|
def dump_known_requests(filename, requests, status="SEEN"):
|
||||||
if not filename:
|
if not filename:
|
||||||
return
|
return
|
||||||
@ -346,12 +350,6 @@ def dump_known_requests(filename, requests, status="SEEN"):
|
|||||||
"my payment address are examined).",
|
"my payment address are examined).",
|
||||||
is_flag=True,
|
is_flag=True,
|
||||||
)
|
)
|
||||||
@click.option(
|
|
||||||
"--auction-requests",
|
|
||||||
help="Handle requests with auction id set (skips payment confirmation).",
|
|
||||||
is_flag=True,
|
|
||||||
default=False,
|
|
||||||
)
|
|
||||||
@click.option(
|
@click.option(
|
||||||
"--config-upload-dir",
|
"--config-upload-dir",
|
||||||
help="The directory containing uploaded config.",
|
help="The directory containing uploaded config.",
|
||||||
@ -360,9 +358,6 @@ def dump_known_requests(filename, requests, status="SEEN"):
|
|||||||
@click.option(
|
@click.option(
|
||||||
"--private-key-file", help="The private key for decrypting config.", required=True
|
"--private-key-file", help="The private key for decrypting config.", required=True
|
||||||
)
|
)
|
||||||
@click.option(
|
|
||||||
"--registry-lock-file", help="File path to use for registry mutex lock", default=None
|
|
||||||
)
|
|
||||||
@click.option(
|
@click.option(
|
||||||
"--private-key-passphrase",
|
"--private-key-passphrase",
|
||||||
help="The passphrase for the private key.",
|
help="The passphrase for the private key.",
|
||||||
@ -395,8 +390,6 @@ def command( # noqa: C901
|
|||||||
private_key_file,
|
private_key_file,
|
||||||
private_key_passphrase,
|
private_key_passphrase,
|
||||||
all_requests,
|
all_requests,
|
||||||
auction_requests,
|
|
||||||
registry_lock_file,
|
|
||||||
):
|
):
|
||||||
if request_id and discover:
|
if request_id and discover:
|
||||||
print("Cannot specify both --request-id and --discover", file=sys.stderr)
|
print("Cannot specify both --request-id and --discover", file=sys.stderr)
|
||||||
@ -448,7 +441,7 @@ def command( # noqa: C901
|
|||||||
include_tags = [tag.strip() for tag in include_tags.split(",") if tag]
|
include_tags = [tag.strip() for tag in include_tags.split(",") if tag]
|
||||||
exclude_tags = [tag.strip() for tag in exclude_tags.split(",") if tag]
|
exclude_tags = [tag.strip() for tag in exclude_tags.split(",") if tag]
|
||||||
|
|
||||||
laconic = LaconicRegistryClient(laconic_config, log_file=sys.stderr, mutex_lock_file=registry_lock_file)
|
laconic = LaconicRegistryClient(laconic_config, log_file=sys.stderr)
|
||||||
webapp_deployer_record = laconic.get_record(lrn, require=True)
|
webapp_deployer_record = laconic.get_record(lrn, require=True)
|
||||||
payment_address = webapp_deployer_record.attributes.paymentAddress
|
payment_address = webapp_deployer_record.attributes.paymentAddress
|
||||||
main_logger.log(f"Payment address: {payment_address}")
|
main_logger.log(f"Payment address: {payment_address}")
|
||||||
@ -589,29 +582,8 @@ def command( # noqa: C901
|
|||||||
requests_to_check_for_payment.append(r)
|
requests_to_check_for_payment.append(r)
|
||||||
|
|
||||||
requests_to_execute = []
|
requests_to_execute = []
|
||||||
for r in requests_to_check_for_payment:
|
if min_required_payment:
|
||||||
if r.attributes.auction:
|
for r in requests_to_check_for_payment:
|
||||||
if auction_requests:
|
|
||||||
if confirm_auction(
|
|
||||||
laconic,
|
|
||||||
r,
|
|
||||||
lrn,
|
|
||||||
payment_address,
|
|
||||||
main_logger
|
|
||||||
):
|
|
||||||
main_logger.log(f"{r.id}: Auction confirmed.")
|
|
||||||
requests_to_execute.append(r)
|
|
||||||
else:
|
|
||||||
main_logger.log(
|
|
||||||
f"Skipping request {r.id}: unable to verify auction."
|
|
||||||
)
|
|
||||||
dump_known_requests(state_file, [r], status="SKIP")
|
|
||||||
else:
|
|
||||||
main_logger.log(
|
|
||||||
f"Skipping request {r.id}: not handling requests with auction."
|
|
||||||
)
|
|
||||||
dump_known_requests(state_file, [r], status="SKIP")
|
|
||||||
elif min_required_payment:
|
|
||||||
main_logger.log(f"{r.id}: Confirming payment...")
|
main_logger.log(f"{r.id}: Confirming payment...")
|
||||||
if confirm_payment(
|
if confirm_payment(
|
||||||
laconic,
|
laconic,
|
||||||
@ -627,8 +599,8 @@ def command( # noqa: C901
|
|||||||
f"Skipping request {r.id}: unable to verify payment."
|
f"Skipping request {r.id}: unable to verify payment."
|
||||||
)
|
)
|
||||||
dump_known_requests(state_file, [r], status="UNPAID")
|
dump_known_requests(state_file, [r], status="UNPAID")
|
||||||
else:
|
else:
|
||||||
requests_to_execute.append(r)
|
requests_to_execute = requests_to_check_for_payment
|
||||||
|
|
||||||
main_logger.log(
|
main_logger.log(
|
||||||
"Found %d unsatisfied request(s) to process." % len(requests_to_execute)
|
"Found %d unsatisfied request(s) to process." % len(requests_to_execute)
|
||||||
@ -653,7 +625,7 @@ def command( # noqa: C901
|
|||||||
)
|
)
|
||||||
run_log_file = open(run_log_file_path, "wt")
|
run_log_file = open(run_log_file_path, "wt")
|
||||||
run_reg_client = LaconicRegistryClient(
|
run_reg_client = LaconicRegistryClient(
|
||||||
laconic_config, log_file=run_log_file, mutex_lock_file=registry_lock_file
|
laconic_config, log_file=run_log_file
|
||||||
)
|
)
|
||||||
|
|
||||||
build_logger = TimedLogger(run_id, run_log_file)
|
build_logger = TimedLogger(run_id, run_log_file)
|
||||||
|
@ -1,220 +0,0 @@
|
|||||||
# Copyright ©2023 Vulcanize
|
|
||||||
# This program is free software: you can redistribute it and/or modify
|
|
||||||
# it under the terms of the GNU Affero General Public License as published by
|
|
||||||
# the Free Software Foundation, either version 3 of the License, or
|
|
||||||
# (at your option) any later version.
|
|
||||||
|
|
||||||
# This program is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU Affero General Public License for more details.
|
|
||||||
|
|
||||||
# You should have received a copy of the GNU Affero General Public License
|
|
||||||
# along with this program. If not, see <http:#www.gnu.org/licenses/>.
|
|
||||||
|
|
||||||
import sys
|
|
||||||
import json
|
|
||||||
|
|
||||||
import click
|
|
||||||
|
|
||||||
from stack_orchestrator.deploy.webapp.util import (
|
|
||||||
AttrDict,
|
|
||||||
LaconicRegistryClient,
|
|
||||||
TimedLogger,
|
|
||||||
load_known_requests,
|
|
||||||
AUCTION_KIND_PROVIDER,
|
|
||||||
AuctionStatus,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def process_app_deployment_auction(
|
|
||||||
ctx,
|
|
||||||
laconic: LaconicRegistryClient,
|
|
||||||
request,
|
|
||||||
current_status,
|
|
||||||
reveal_file_path,
|
|
||||||
bid_amount,
|
|
||||||
logger,
|
|
||||||
):
|
|
||||||
# Fetch auction details
|
|
||||||
auction_id = request.attributes.auction
|
|
||||||
auction = laconic.get_auction(auction_id)
|
|
||||||
if not auction:
|
|
||||||
raise Exception(f"Unable to locate auction: {auction_id}")
|
|
||||||
|
|
||||||
# Check auction kind
|
|
||||||
if auction.kind != AUCTION_KIND_PROVIDER:
|
|
||||||
raise Exception(f"Auction kind needs to be ${AUCTION_KIND_PROVIDER}, got {auction.kind}")
|
|
||||||
|
|
||||||
if current_status == "PENDING":
|
|
||||||
# Skip if pending auction not in commit state
|
|
||||||
if auction.status != AuctionStatus.COMMIT:
|
|
||||||
logger.log(f"Skipping pending request, auction {auction_id} status: {auction.status}")
|
|
||||||
return "SKIP", ""
|
|
||||||
|
|
||||||
# Check max_price
|
|
||||||
bid_amount_int = int(bid_amount)
|
|
||||||
max_price_int = int(auction.maxPrice.quantity)
|
|
||||||
if max_price_int < bid_amount_int:
|
|
||||||
logger.log(f"Skipping auction {auction_id} with max_price ({max_price_int}) less than bid_amount ({bid_amount_int})")
|
|
||||||
return "SKIP", ""
|
|
||||||
|
|
||||||
# Bid on the auction
|
|
||||||
reveal_file_path = laconic.commit_bid(auction_id, bid_amount_int)
|
|
||||||
logger.log(f"Commited bid on auction {auction_id} with amount {bid_amount_int}")
|
|
||||||
|
|
||||||
return "COMMIT", reveal_file_path
|
|
||||||
|
|
||||||
if current_status == "COMMIT":
|
|
||||||
# Return if auction still in commit state
|
|
||||||
if auction.status == AuctionStatus.COMMIT:
|
|
||||||
logger.log(f"Auction {auction_id} status: {auction.status}")
|
|
||||||
return current_status, reveal_file_path
|
|
||||||
|
|
||||||
# Reveal bid
|
|
||||||
if auction.status == AuctionStatus.REVEAL:
|
|
||||||
laconic.reveal_bid(auction_id, reveal_file_path)
|
|
||||||
logger.log(f"Revealed bid on auction {auction_id}")
|
|
||||||
|
|
||||||
return "REVEAL", reveal_file_path
|
|
||||||
|
|
||||||
raise Exception(f"Unexpected auction {auction_id} status: {auction.status}")
|
|
||||||
|
|
||||||
if current_status == "REVEAL":
|
|
||||||
# Return if auction still in reveal state
|
|
||||||
if auction.status == AuctionStatus.REVEAL:
|
|
||||||
logger.log(f"Auction {auction_id} status: {auction.status}")
|
|
||||||
return current_status, reveal_file_path
|
|
||||||
|
|
||||||
# Return if auction is completed
|
|
||||||
if auction.status == AuctionStatus.COMPLETED:
|
|
||||||
logger.log(f"Auction {auction_id} completed")
|
|
||||||
return "COMPLETED", ""
|
|
||||||
|
|
||||||
raise Exception(f"Unexpected auction {auction_id} status: {auction.status}")
|
|
||||||
|
|
||||||
raise Exception(f"Got request with unexpected status: {current_status}")
|
|
||||||
|
|
||||||
|
|
||||||
def dump_known_auction_requests(filename, requests, status="SEEN"):
|
|
||||||
if not filename:
|
|
||||||
return
|
|
||||||
known_requests = load_known_requests(filename)
|
|
||||||
for r in requests:
|
|
||||||
known_requests[r.id] = {"revealFile": r.revealFile, "status": status}
|
|
||||||
with open(filename, "w") as f:
|
|
||||||
json.dump(known_requests, f)
|
|
||||||
|
|
||||||
|
|
||||||
@click.command()
|
|
||||||
@click.option(
|
|
||||||
"--laconic-config", help="Provide a config file for laconicd", required=True
|
|
||||||
)
|
|
||||||
@click.option(
|
|
||||||
"--state-file",
|
|
||||||
help="File to store state about previously seen auction requests.",
|
|
||||||
required=True,
|
|
||||||
)
|
|
||||||
@click.option(
|
|
||||||
"--bid-amount",
|
|
||||||
help="Bid to place on application deployment auctions (in alnt)",
|
|
||||||
required=True,
|
|
||||||
)
|
|
||||||
@click.option(
|
|
||||||
"--registry-lock-file", help="File path to use for registry mutex lock", default=None
|
|
||||||
)
|
|
||||||
@click.option(
|
|
||||||
"--dry-run", help="Don't do anything, just report what would be done.", is_flag=True
|
|
||||||
)
|
|
||||||
@click.pass_context
|
|
||||||
def command(
|
|
||||||
ctx,
|
|
||||||
laconic_config,
|
|
||||||
state_file,
|
|
||||||
bid_amount,
|
|
||||||
registry_lock_file,
|
|
||||||
dry_run,
|
|
||||||
):
|
|
||||||
if int(bid_amount) < 0:
|
|
||||||
print("--bid-amount cannot be less than 0", file=sys.stderr)
|
|
||||||
sys.exit(2)
|
|
||||||
|
|
||||||
logger = TimedLogger(file=sys.stderr)
|
|
||||||
|
|
||||||
try:
|
|
||||||
laconic = LaconicRegistryClient(laconic_config, log_file=sys.stderr, mutex_lock_file=registry_lock_file)
|
|
||||||
auctions_requests = laconic.app_deployment_auctions()
|
|
||||||
|
|
||||||
previous_requests = {}
|
|
||||||
logger.log(f"Loading known auctions from {state_file}...")
|
|
||||||
previous_requests = load_known_requests(state_file)
|
|
||||||
|
|
||||||
# Process new requests first
|
|
||||||
auctions_requests.sort(key=lambda r: r.createTime)
|
|
||||||
auctions_requests.reverse()
|
|
||||||
|
|
||||||
requests_to_execute = []
|
|
||||||
|
|
||||||
for r in auctions_requests:
|
|
||||||
logger.log(f"BEGIN: Examining request {r.id}")
|
|
||||||
result_status = "PENDING"
|
|
||||||
reveal_file_path = ""
|
|
||||||
try:
|
|
||||||
application = r.attributes.application
|
|
||||||
|
|
||||||
# Handle already seen requests
|
|
||||||
if r.id in previous_requests:
|
|
||||||
# If it's not in commit or reveal status, skip the request as we've already seen it
|
|
||||||
current_status = previous_requests[r.id].get("status", "")
|
|
||||||
result_status = current_status
|
|
||||||
if current_status not in ["COMMIT", "REVEAL"]:
|
|
||||||
logger.log(f"Skipping request {r.id}, we've already seen it.")
|
|
||||||
continue
|
|
||||||
|
|
||||||
reveal_file_path = previous_requests[r.id].get("revealFile", "")
|
|
||||||
logger.log(f"Found existing auction request {r.id} for application {application}, status {current_status}.")
|
|
||||||
else:
|
|
||||||
# It's a fresh request, check application record
|
|
||||||
app = laconic.get_record(application)
|
|
||||||
if not app:
|
|
||||||
logger.log(f"Skipping request {r.id}, cannot locate app.")
|
|
||||||
result_status = "ERROR"
|
|
||||||
continue
|
|
||||||
|
|
||||||
logger.log(f"Found pending auction request {r.id} for application {application}.")
|
|
||||||
|
|
||||||
# Add requests to be processed
|
|
||||||
requests_to_execute.append((r, result_status, reveal_file_path))
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
result_status = "ERROR"
|
|
||||||
logger.log(f"ERROR: examining request {r.id}: " + str(e))
|
|
||||||
finally:
|
|
||||||
logger.log(f"DONE: Examining request {r.id} with result {result_status}.")
|
|
||||||
if result_status in ["ERROR"]:
|
|
||||||
dump_known_auction_requests(state_file, [AttrDict({"id": r.id, "revealFile": reveal_file_path})], result_status)
|
|
||||||
|
|
||||||
logger.log(f"Found {len(requests_to_execute)} request(s) to process.")
|
|
||||||
|
|
||||||
if not dry_run:
|
|
||||||
for r, current_status, reveal_file_path in requests_to_execute:
|
|
||||||
logger.log(f"Processing {r.id}: BEGIN")
|
|
||||||
result_status = "ERROR"
|
|
||||||
try:
|
|
||||||
result_status, reveal_file_path = process_app_deployment_auction(
|
|
||||||
ctx,
|
|
||||||
laconic,
|
|
||||||
r,
|
|
||||||
current_status,
|
|
||||||
reveal_file_path,
|
|
||||||
bid_amount,
|
|
||||||
logger,
|
|
||||||
)
|
|
||||||
except Exception as e:
|
|
||||||
logger.log(f"ERROR {r.id}:" + str(e))
|
|
||||||
finally:
|
|
||||||
logger.log(f"Processing {r.id}: END - {result_status}")
|
|
||||||
dump_known_auction_requests(state_file, [AttrDict({"id": r.id, "revealFile": reveal_file_path})], result_status)
|
|
||||||
except Exception as e:
|
|
||||||
logger.log("UNCAUGHT ERROR:" + str(e))
|
|
||||||
raise e
|
|
@ -1,124 +0,0 @@
|
|||||||
# Copyright ©2023 Vulcanize
|
|
||||||
# This program is free software: you can redistribute it and/or modify
|
|
||||||
# it under the terms of the GNU Affero General Public License as published by
|
|
||||||
# the Free Software Foundation, either version 3 of the License, or
|
|
||||||
# (at your option) any later version.
|
|
||||||
|
|
||||||
# This program is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU Affero General Public License for more details.
|
|
||||||
|
|
||||||
# You should have received a copy of the GNU Affero General Public License
|
|
||||||
# along with this program. If not, see <http:#www.gnu.org/licenses/>.
|
|
||||||
|
|
||||||
import sys
|
|
||||||
|
|
||||||
import click
|
|
||||||
import yaml
|
|
||||||
|
|
||||||
from stack_orchestrator.deploy.webapp.util import (
|
|
||||||
AUCTION_KIND_PROVIDER,
|
|
||||||
TOKEN_DENOM,
|
|
||||||
LaconicRegistryClient,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def fatal(msg: str):
|
|
||||||
print(msg, file=sys.stderr)
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
|
|
||||||
@click.command()
|
|
||||||
@click.option(
|
|
||||||
"--laconic-config", help="Provide a config file for laconicd", required=True
|
|
||||||
)
|
|
||||||
@click.option(
|
|
||||||
"--app",
|
|
||||||
help="The LRN of the application to deploy.",
|
|
||||||
required=True,
|
|
||||||
)
|
|
||||||
@click.option(
|
|
||||||
"--commits-duration",
|
|
||||||
help="Auction commits duration (in seconds) (default: 600).",
|
|
||||||
default=600,
|
|
||||||
)
|
|
||||||
@click.option(
|
|
||||||
"--reveals-duration",
|
|
||||||
help="Auction reveals duration (in seconds) (default: 600).",
|
|
||||||
default=600,
|
|
||||||
)
|
|
||||||
@click.option(
|
|
||||||
"--commit-fee",
|
|
||||||
help="Auction bid commit fee (in alnt) (default: 100000).",
|
|
||||||
default=100000,
|
|
||||||
)
|
|
||||||
@click.option(
|
|
||||||
"--reveal-fee",
|
|
||||||
help="Auction bid reveal fee (in alnt) (default: 100000).",
|
|
||||||
default=100000,
|
|
||||||
)
|
|
||||||
@click.option(
|
|
||||||
"--max-price",
|
|
||||||
help="Max acceptable bid price (in alnt).",
|
|
||||||
required=True,
|
|
||||||
)
|
|
||||||
@click.option(
|
|
||||||
"--num-providers",
|
|
||||||
help="Max acceptable bid price (in alnt).",
|
|
||||||
required=True,
|
|
||||||
)
|
|
||||||
@click.option(
|
|
||||||
"--dry-run",
|
|
||||||
help="Don't publish anything, just report what would be done.",
|
|
||||||
is_flag=True,
|
|
||||||
)
|
|
||||||
@click.pass_context
|
|
||||||
def command(
|
|
||||||
ctx,
|
|
||||||
laconic_config,
|
|
||||||
app,
|
|
||||||
commits_duration,
|
|
||||||
reveals_duration,
|
|
||||||
commit_fee,
|
|
||||||
reveal_fee,
|
|
||||||
max_price,
|
|
||||||
num_providers,
|
|
||||||
dry_run,
|
|
||||||
):
|
|
||||||
laconic = LaconicRegistryClient(laconic_config)
|
|
||||||
|
|
||||||
app_record = laconic.get_record(app)
|
|
||||||
if not app_record:
|
|
||||||
fatal(f"Unable to locate app: {app}")
|
|
||||||
|
|
||||||
provider_auction_params = {
|
|
||||||
"kind": AUCTION_KIND_PROVIDER,
|
|
||||||
"commits_duration": commits_duration,
|
|
||||||
"reveals_duration": reveals_duration,
|
|
||||||
"denom": TOKEN_DENOM,
|
|
||||||
"commit_fee": commit_fee,
|
|
||||||
"reveal_fee": reveal_fee,
|
|
||||||
"max_price": max_price,
|
|
||||||
"num_providers": num_providers,
|
|
||||||
}
|
|
||||||
auction_id = laconic.create_deployment_auction(provider_auction_params)
|
|
||||||
print("Deployment auction created:", auction_id)
|
|
||||||
|
|
||||||
if not auction_id:
|
|
||||||
fatal("Unable to create a provider auction")
|
|
||||||
|
|
||||||
deployment_auction = {
|
|
||||||
"record": {
|
|
||||||
"type": "ApplicationDeploymentAuction",
|
|
||||||
"application": app,
|
|
||||||
"auction": auction_id,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if dry_run:
|
|
||||||
print(yaml.dump(deployment_auction))
|
|
||||||
return
|
|
||||||
|
|
||||||
# Publish the deployment auction record
|
|
||||||
laconic.publish(deployment_auction)
|
|
@ -1,77 +0,0 @@
|
|||||||
from functools import wraps
|
|
||||||
import os
|
|
||||||
import time
|
|
||||||
|
|
||||||
# Define default file path for the lock
|
|
||||||
DEFAULT_LOCK_FILE_PATH = "/tmp/registry_mutex_lock_file"
|
|
||||||
LOCK_TIMEOUT = 30
|
|
||||||
LOCK_RETRY_INTERVAL = 3
|
|
||||||
|
|
||||||
|
|
||||||
def acquire_lock(client, lock_file_path, timeout):
|
|
||||||
# Lock alreay acquired by the current client
|
|
||||||
if client.mutex_lock_acquired:
|
|
||||||
return
|
|
||||||
|
|
||||||
while True:
|
|
||||||
try:
|
|
||||||
# Check if lock file exists and is potentially stale
|
|
||||||
if os.path.exists(lock_file_path):
|
|
||||||
with open(lock_file_path, 'r') as lock_file:
|
|
||||||
timestamp = float(lock_file.read().strip())
|
|
||||||
|
|
||||||
# If lock is stale, remove the lock file
|
|
||||||
if time.time() - timestamp > timeout:
|
|
||||||
print(f"Stale lock detected, removing lock file {lock_file_path}")
|
|
||||||
os.remove(lock_file_path)
|
|
||||||
else:
|
|
||||||
print(f"Lock file {lock_file_path} exists and is recent, waiting...")
|
|
||||||
time.sleep(LOCK_RETRY_INTERVAL)
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Try to create a new lock file with the current timestamp
|
|
||||||
fd = os.open(lock_file_path, os.O_CREAT | os.O_EXCL | os.O_RDWR)
|
|
||||||
with os.fdopen(fd, 'w') as lock_file:
|
|
||||||
lock_file.write(str(time.time()))
|
|
||||||
|
|
||||||
client.mutex_lock_acquired = True
|
|
||||||
print(f"Registry lock acquired, {lock_file_path}")
|
|
||||||
|
|
||||||
# Lock successfully acquired
|
|
||||||
return
|
|
||||||
|
|
||||||
except FileExistsError:
|
|
||||||
print(f"Lock file {lock_file_path} exists, waiting...")
|
|
||||||
time.sleep(LOCK_RETRY_INTERVAL)
|
|
||||||
|
|
||||||
|
|
||||||
def release_lock(client, lock_file_path):
|
|
||||||
try:
|
|
||||||
os.remove(lock_file_path)
|
|
||||||
|
|
||||||
client.mutex_lock_acquired = False
|
|
||||||
print(f"Registry lock released, {lock_file_path}")
|
|
||||||
except FileNotFoundError:
|
|
||||||
# Lock file already removed
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
def registry_mutex():
|
|
||||||
def decorator(func):
|
|
||||||
@wraps(func)
|
|
||||||
def wrapper(self, *args, **kwargs):
|
|
||||||
lock_file_path = DEFAULT_LOCK_FILE_PATH
|
|
||||||
if self.mutex_lock_file:
|
|
||||||
lock_file_path = self.mutex_lock_file
|
|
||||||
|
|
||||||
# Acquire the lock before running the function
|
|
||||||
acquire_lock(self, lock_file_path, LOCK_TIMEOUT)
|
|
||||||
try:
|
|
||||||
return func(self, *args, **kwargs)
|
|
||||||
finally:
|
|
||||||
# Release the lock after the function completes
|
|
||||||
release_lock(self, lock_file_path)
|
|
||||||
|
|
||||||
return wrapper
|
|
||||||
|
|
||||||
return decorator
|
|
@ -3,6 +3,7 @@
|
|||||||
# it under the terms of the GNU Affero General Public License as published by
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
# the Free Software Foundation, either version 3 of the License, or
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
# (at your option) any later version.
|
# (at your option) any later version.
|
||||||
|
import base64
|
||||||
|
|
||||||
# This program is distributed in the hope that it will be useful,
|
# This program is distributed in the hope that it will be useful,
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
@ -16,7 +17,6 @@ import shutil
|
|||||||
import sys
|
import sys
|
||||||
import tempfile
|
import tempfile
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
import base64
|
|
||||||
|
|
||||||
import gnupg
|
import gnupg
|
||||||
import click
|
import click
|
||||||
@ -24,8 +24,6 @@ import requests
|
|||||||
import yaml
|
import yaml
|
||||||
|
|
||||||
from stack_orchestrator.deploy.webapp.util import (
|
from stack_orchestrator.deploy.webapp.util import (
|
||||||
AUCTION_KIND_PROVIDER,
|
|
||||||
AuctionStatus,
|
|
||||||
LaconicRegistryClient,
|
LaconicRegistryClient,
|
||||||
)
|
)
|
||||||
from dotenv import dotenv_values
|
from dotenv import dotenv_values
|
||||||
@ -45,13 +43,10 @@ def fatal(msg: str):
|
|||||||
help="The LRN of the application to deploy.",
|
help="The LRN of the application to deploy.",
|
||||||
required=True,
|
required=True,
|
||||||
)
|
)
|
||||||
@click.option(
|
|
||||||
"--auction-id",
|
|
||||||
help="Deployment auction id. Can be used instead of deployer and payment.",
|
|
||||||
)
|
|
||||||
@click.option(
|
@click.option(
|
||||||
"--deployer",
|
"--deployer",
|
||||||
help="The LRN of the deployer to process this request.",
|
help="The LRN of the deployer to process this request.",
|
||||||
|
required=True,
|
||||||
)
|
)
|
||||||
@click.option("--env-file", help="environment file for webapp")
|
@click.option("--env-file", help="environment file for webapp")
|
||||||
@click.option("--config-ref", help="The ref of an existing config upload to use.")
|
@click.option("--config-ref", help="The ref of an existing config upload to use.")
|
||||||
@ -69,11 +64,10 @@ def fatal(msg: str):
|
|||||||
is_flag=True,
|
is_flag=True,
|
||||||
)
|
)
|
||||||
@click.pass_context
|
@click.pass_context
|
||||||
def command( # noqa: C901
|
def command(
|
||||||
ctx,
|
ctx,
|
||||||
laconic_config,
|
laconic_config,
|
||||||
app,
|
app,
|
||||||
auction_id,
|
|
||||||
deployer,
|
deployer,
|
||||||
env_file,
|
env_file,
|
||||||
config_ref,
|
config_ref,
|
||||||
@ -81,135 +75,58 @@ def command( # noqa: C901
|
|||||||
use_payment,
|
use_payment,
|
||||||
dns,
|
dns,
|
||||||
dry_run,
|
dry_run,
|
||||||
):
|
): # noqa: C901
|
||||||
if auction_id and deployer:
|
tempdir = tempfile.mkdtemp()
|
||||||
print("Cannot specify both --auction-id and --deployer", file=sys.stderr)
|
try:
|
||||||
sys.exit(2)
|
laconic = LaconicRegistryClient(laconic_config)
|
||||||
|
|
||||||
if not auction_id and not deployer:
|
app_record = laconic.get_record(app)
|
||||||
print("Must specify either --auction-id or --deployer", file=sys.stderr)
|
if not app_record:
|
||||||
sys.exit(2)
|
fatal(f"Unable to locate app: {app}")
|
||||||
|
|
||||||
if auction_id and (make_payment or use_payment):
|
|
||||||
print("Cannot specify --auction-id with --make-payment or --use-payment", file=sys.stderr)
|
|
||||||
sys.exit(2)
|
|
||||||
|
|
||||||
if env_file and config_ref:
|
|
||||||
fatal("Cannot use --env-file and --config-ref at the same time.")
|
|
||||||
|
|
||||||
laconic = LaconicRegistryClient(laconic_config)
|
|
||||||
|
|
||||||
app_record = laconic.get_record(app)
|
|
||||||
if not app_record:
|
|
||||||
fatal(f"Unable to locate app: {app}")
|
|
||||||
|
|
||||||
# Deployers to send requests to
|
|
||||||
deployer_records = []
|
|
||||||
|
|
||||||
auction = None
|
|
||||||
auction_winners = None
|
|
||||||
if auction_id:
|
|
||||||
# Fetch auction record for given auction
|
|
||||||
auction_records_by_id = laconic.app_deployment_auctions({"auction": auction_id})
|
|
||||||
if len(auction_records_by_id) == 0:
|
|
||||||
fatal(f"Unable to locate record for auction: {auction_id}")
|
|
||||||
|
|
||||||
# Cross check app against application in the auction record
|
|
||||||
auction_app = auction_records_by_id[0].attributes.application
|
|
||||||
if auction_app != app:
|
|
||||||
fatal(f"Requested application {app} does not match application from auction record {auction_app}")
|
|
||||||
|
|
||||||
# Fetch auction details
|
|
||||||
auction = laconic.get_auction(auction_id)
|
|
||||||
if not auction:
|
|
||||||
fatal(f"Unable to locate auction: {auction_id}")
|
|
||||||
|
|
||||||
# Check auction owner
|
|
||||||
if auction.ownerAddress != laconic.whoami().address:
|
|
||||||
fatal(f"Auction {auction_id} owner mismatch")
|
|
||||||
|
|
||||||
# Check auction kind
|
|
||||||
if auction.kind != AUCTION_KIND_PROVIDER:
|
|
||||||
fatal(f"Auction kind needs to be ${AUCTION_KIND_PROVIDER}, got {auction.kind}")
|
|
||||||
|
|
||||||
# Check auction status
|
|
||||||
if auction.status != AuctionStatus.COMPLETED:
|
|
||||||
fatal(f"Auction {auction_id} not completed yet, status {auction.status}")
|
|
||||||
|
|
||||||
# Check that winner list is not empty
|
|
||||||
if len(auction.winnerAddresses) == 0:
|
|
||||||
fatal(f"Auction {auction_id} has no winners")
|
|
||||||
|
|
||||||
auction_winners = auction.winnerAddresses
|
|
||||||
|
|
||||||
# Get deployer record for all the auction winners
|
|
||||||
for auction_winner in auction_winners:
|
|
||||||
# TODO: Match auction winner address with provider address?
|
|
||||||
deployer_records_by_owner = laconic.webapp_deployers({"paymentAddress": auction_winner})
|
|
||||||
if len(deployer_records_by_owner) == 0:
|
|
||||||
print(f"WARNING: Unable to locate deployer for auction winner {auction_winner}")
|
|
||||||
|
|
||||||
# Take first record with name set
|
|
||||||
target_deployer_record = deployer_records_by_owner[0]
|
|
||||||
for r in deployer_records_by_owner:
|
|
||||||
if len(r.names) > 0:
|
|
||||||
target_deployer_record = r
|
|
||||||
break
|
|
||||||
deployer_records.append(target_deployer_record)
|
|
||||||
else:
|
|
||||||
deployer_record = laconic.get_record(deployer)
|
deployer_record = laconic.get_record(deployer)
|
||||||
if not deployer_record:
|
if not deployer_record:
|
||||||
fatal(f"Unable to locate deployer: {deployer}")
|
fatal(f"Unable to locate deployer: {deployer}")
|
||||||
|
|
||||||
deployer_records.append(deployer_record)
|
if env_file and config_ref:
|
||||||
|
fatal("Cannot use --env-file and --config-ref at the same time.")
|
||||||
|
|
||||||
# Create and send request to each deployer
|
# If env_file
|
||||||
deployment_requests = []
|
|
||||||
for deployer_record in deployer_records:
|
|
||||||
# Upload config to deployers if env_file is passed
|
|
||||||
if env_file:
|
if env_file:
|
||||||
tempdir = tempfile.mkdtemp()
|
gpg = gnupg.GPG(gnupghome=tempdir)
|
||||||
try:
|
|
||||||
gpg = gnupg.GPG(gnupghome=tempdir)
|
|
||||||
|
|
||||||
# Import the deployer's public key
|
# Import the deployer's public key
|
||||||
result = gpg.import_keys(
|
result = gpg.import_keys(
|
||||||
base64.b64decode(deployer_record.attributes.publicKey)
|
base64.b64decode(deployer_record.attributes.publicKey)
|
||||||
)
|
)
|
||||||
if 1 != result.imported:
|
if 1 != result.imported:
|
||||||
fatal("Failed to import deployer's public key.")
|
fatal("Failed to import deployer's public key.")
|
||||||
|
|
||||||
recip = gpg.list_keys()[0]["uids"][0]
|
recip = gpg.list_keys()[0]["uids"][0]
|
||||||
|
|
||||||
# Wrap the config
|
# Wrap the config
|
||||||
config = {
|
config = {
|
||||||
# Include account (and payment?) details
|
# Include account (and payment?) details
|
||||||
"authorized": [laconic.whoami().address],
|
"authorized": [laconic.whoami().address],
|
||||||
"config": {"env": dict(dotenv_values(env_file))},
|
"config": {"env": dict(dotenv_values(env_file))},
|
||||||
}
|
}
|
||||||
serialized = yaml.dump(config)
|
serialized = yaml.dump(config)
|
||||||
|
|
||||||
# Encrypt
|
# Encrypt
|
||||||
result = gpg.encrypt(serialized, recip, always_trust=True, armor=False)
|
result = gpg.encrypt(serialized, recip, always_trust=True, armor=False)
|
||||||
if not result.ok:
|
if not result.ok:
|
||||||
fatal("Failed to encrypt config.")
|
fatal("Failed to encrypt config.")
|
||||||
|
|
||||||
# Upload it to the deployer's API
|
# Upload it to the deployer's API
|
||||||
response = requests.post(
|
response = requests.post(
|
||||||
f"{deployer_record.attributes.apiUrl}/upload/config",
|
f"{deployer_record.attributes.apiUrl}/upload/config",
|
||||||
data=result.data,
|
data=result.data,
|
||||||
headers={"Content-Type": "application/octet-stream"},
|
headers={"Content-Type": "application/octet-stream"},
|
||||||
)
|
)
|
||||||
if not response.ok:
|
if not response.ok:
|
||||||
response.raise_for_status()
|
response.raise_for_status()
|
||||||
|
|
||||||
config_ref = response.json()["id"]
|
config_ref = response.json()["id"]
|
||||||
finally:
|
|
||||||
shutil.rmtree(tempdir, ignore_errors=True)
|
|
||||||
|
|
||||||
target_deployer = deployer
|
|
||||||
if (not deployer) and len(deployer_record.names):
|
|
||||||
target_deployer = deployer_record.names[0]
|
|
||||||
|
|
||||||
deployment_request = {
|
deployment_request = {
|
||||||
"record": {
|
"record": {
|
||||||
@ -217,14 +134,11 @@ def command( # noqa: C901
|
|||||||
"application": app,
|
"application": app,
|
||||||
"version": "1.0.0",
|
"version": "1.0.0",
|
||||||
"name": f"{app_record.attributes.name}@{app_record.attributes.version}",
|
"name": f"{app_record.attributes.name}@{app_record.attributes.version}",
|
||||||
"deployer": target_deployer,
|
"deployer": deployer,
|
||||||
"meta": {"when": str(datetime.utcnow())},
|
"meta": {"when": str(datetime.utcnow())},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if auction_id:
|
|
||||||
deployment_request["record"]["auction"] = auction_id
|
|
||||||
|
|
||||||
if config_ref:
|
if config_ref:
|
||||||
deployment_request["record"]["config"] = {"ref": config_ref}
|
deployment_request["record"]["config"] = {"ref": config_ref}
|
||||||
|
|
||||||
@ -251,12 +165,11 @@ def command( # noqa: C901
|
|||||||
elif use_payment:
|
elif use_payment:
|
||||||
deployment_request["record"]["payment"] = use_payment
|
deployment_request["record"]["payment"] = use_payment
|
||||||
|
|
||||||
deployment_requests.append(deployment_request)
|
|
||||||
|
|
||||||
# Send all requests
|
|
||||||
for deployment_request in deployment_requests:
|
|
||||||
if dry_run:
|
if dry_run:
|
||||||
print(yaml.dump(deployment_request))
|
print(yaml.dump(deployment_request))
|
||||||
continue
|
return
|
||||||
|
|
||||||
|
# Send the request
|
||||||
laconic.publish(deployment_request)
|
laconic.publish(deployment_request)
|
||||||
|
finally:
|
||||||
|
shutil.rmtree(tempdir, ignore_errors=True)
|
||||||
|
@ -1,106 +0,0 @@
|
|||||||
# Copyright ©2023 Vulcanize
|
|
||||||
# This program is free software: you can redistribute it and/or modify
|
|
||||||
# it under the terms of the GNU Affero General Public License as published by
|
|
||||||
# the Free Software Foundation, either version 3 of the License, or
|
|
||||||
# (at your option) any later version.
|
|
||||||
|
|
||||||
# This program is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU Affero General Public License for more details.
|
|
||||||
|
|
||||||
# You should have received a copy of the GNU Affero General Public License
|
|
||||||
# along with this program. If not, see <http:#www.gnu.org/licenses/>.
|
|
||||||
|
|
||||||
import sys
|
|
||||||
|
|
||||||
import click
|
|
||||||
import yaml
|
|
||||||
|
|
||||||
from stack_orchestrator.deploy.webapp.util import (LaconicRegistryClient)
|
|
||||||
|
|
||||||
|
|
||||||
def fatal(msg: str):
|
|
||||||
print(msg, file=sys.stderr)
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
|
|
||||||
@click.command()
|
|
||||||
@click.option(
|
|
||||||
"--laconic-config", help="Provide a config file for laconicd", required=True
|
|
||||||
)
|
|
||||||
@click.option(
|
|
||||||
"--deployer",
|
|
||||||
help="The LRN of the deployer to process this request.",
|
|
||||||
required=True
|
|
||||||
)
|
|
||||||
@click.option(
|
|
||||||
"--deployment",
|
|
||||||
help="Deployment record (ApplicationDeploymentRecord) id of the deployment to remove.",
|
|
||||||
required=True,
|
|
||||||
)
|
|
||||||
@click.option(
|
|
||||||
"--make-payment",
|
|
||||||
help="The payment to make (in alnt). The value should be a number or 'auto' to use the deployer's minimum required payment.",
|
|
||||||
)
|
|
||||||
@click.option(
|
|
||||||
"--use-payment", help="The TX id of an existing, unused payment", default=None
|
|
||||||
)
|
|
||||||
@click.option(
|
|
||||||
"--dry-run",
|
|
||||||
help="Don't publish anything, just report what would be done.",
|
|
||||||
is_flag=True,
|
|
||||||
)
|
|
||||||
@click.pass_context
|
|
||||||
def command(
|
|
||||||
ctx,
|
|
||||||
laconic_config,
|
|
||||||
deployer,
|
|
||||||
deployment,
|
|
||||||
make_payment,
|
|
||||||
use_payment,
|
|
||||||
dry_run,
|
|
||||||
):
|
|
||||||
if make_payment and use_payment:
|
|
||||||
fatal("Cannot use --make-payment and --use-payment at the same time.")
|
|
||||||
|
|
||||||
laconic = LaconicRegistryClient(laconic_config)
|
|
||||||
|
|
||||||
deployer_record = laconic.get_record(deployer)
|
|
||||||
if not deployer_record:
|
|
||||||
fatal(f"Unable to locate deployer: {deployer}")
|
|
||||||
|
|
||||||
undeployment_request = {
|
|
||||||
"record": {
|
|
||||||
"type": "ApplicationDeploymentRemovalRequest",
|
|
||||||
"version": "1.0.0",
|
|
||||||
"deployer": deployer,
|
|
||||||
"deployment": deployment,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if make_payment:
|
|
||||||
amount = 0
|
|
||||||
if dry_run:
|
|
||||||
undeployment_request["record"]["payment"] = "DRY_RUN"
|
|
||||||
elif "auto" == make_payment:
|
|
||||||
if "minimumPayment" in deployer_record.attributes:
|
|
||||||
amount = int(
|
|
||||||
deployer_record.attributes.minimumPayment.replace("alnt", "")
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
amount = make_payment
|
|
||||||
if amount:
|
|
||||||
receipt = laconic.send_tokens(
|
|
||||||
deployer_record.attributes.paymentAddress, amount
|
|
||||||
)
|
|
||||||
undeployment_request["record"]["payment"] = receipt.tx.hash
|
|
||||||
print("Payment TX:", receipt.tx.hash)
|
|
||||||
elif use_payment:
|
|
||||||
undeployment_request["record"]["payment"] = use_payment
|
|
||||||
|
|
||||||
if dry_run:
|
|
||||||
print(yaml.dump(undeployment_request))
|
|
||||||
return
|
|
||||||
|
|
||||||
laconic.publish(undeployment_request)
|
|
@ -178,9 +178,6 @@ def dump_known_requests(filename, requests):
|
|||||||
"my payment address are examined).",
|
"my payment address are examined).",
|
||||||
is_flag=True,
|
is_flag=True,
|
||||||
)
|
)
|
||||||
@click.option(
|
|
||||||
"--registry-lock-file", help="File path to use for registry mutex lock", default=None
|
|
||||||
)
|
|
||||||
@click.pass_context
|
@click.pass_context
|
||||||
def command( # noqa: C901
|
def command( # noqa: C901
|
||||||
ctx,
|
ctx,
|
||||||
@ -198,7 +195,6 @@ def command( # noqa: C901
|
|||||||
min_required_payment,
|
min_required_payment,
|
||||||
lrn,
|
lrn,
|
||||||
all_requests,
|
all_requests,
|
||||||
registry_lock_file,
|
|
||||||
):
|
):
|
||||||
if request_id and discover:
|
if request_id and discover:
|
||||||
print("Cannot specify both --request-id and --discover", file=sys.stderr)
|
print("Cannot specify both --request-id and --discover", file=sys.stderr)
|
||||||
@ -216,7 +212,7 @@ def command( # noqa: C901
|
|||||||
include_tags = [tag.strip() for tag in include_tags.split(",") if tag]
|
include_tags = [tag.strip() for tag in include_tags.split(",") if tag]
|
||||||
exclude_tags = [tag.strip() for tag in exclude_tags.split(",") if tag]
|
exclude_tags = [tag.strip() for tag in exclude_tags.split(",") if tag]
|
||||||
|
|
||||||
laconic = LaconicRegistryClient(laconic_config, log_file=sys.stderr, mutex_lock_file=registry_lock_file)
|
laconic = LaconicRegistryClient(laconic_config, log_file=sys.stderr)
|
||||||
deployer_record = laconic.get_record(lrn, require=True)
|
deployer_record = laconic.get_record(lrn, require=True)
|
||||||
payment_address = deployer_record.attributes.paymentAddress
|
payment_address = deployer_record.attributes.paymentAddress
|
||||||
main_logger.log(f"Payment address: {payment_address}")
|
main_logger.log(f"Payment address: {payment_address}")
|
||||||
@ -315,7 +311,6 @@ def command( # noqa: C901
|
|||||||
main_logger.log(f"ERROR examining {r.id}: {e}")
|
main_logger.log(f"ERROR examining {r.id}: {e}")
|
||||||
|
|
||||||
requests_to_execute = []
|
requests_to_execute = []
|
||||||
# TODO: Handle requests with auction
|
|
||||||
if min_required_payment:
|
if min_required_payment:
|
||||||
for r in requests_to_check_for_payment:
|
for r in requests_to_check_for_payment:
|
||||||
main_logger.log(f"{r.id}: Confirming payment...")
|
main_logger.log(f"{r.id}: Confirming payment...")
|
||||||
|
@ -24,21 +24,6 @@ import tempfile
|
|||||||
import uuid
|
import uuid
|
||||||
import yaml
|
import yaml
|
||||||
|
|
||||||
from enum import Enum
|
|
||||||
|
|
||||||
from stack_orchestrator.deploy.webapp.registry_mutex import registry_mutex
|
|
||||||
|
|
||||||
|
|
||||||
class AuctionStatus(str, Enum):
|
|
||||||
COMMIT = "commit"
|
|
||||||
REVEAL = "reveal"
|
|
||||||
COMPLETED = "completed"
|
|
||||||
EXPIRED = "expired"
|
|
||||||
|
|
||||||
|
|
||||||
TOKEN_DENOM = "alnt"
|
|
||||||
AUCTION_KIND_PROVIDER = "provider"
|
|
||||||
|
|
||||||
|
|
||||||
class AttrDict(dict):
|
class AttrDict(dict):
|
||||||
def __init__(self, *args, **kwargs):
|
def __init__(self, *args, **kwargs):
|
||||||
@ -73,12 +58,6 @@ class TimedLogger:
|
|||||||
self.last = datetime.datetime.now()
|
self.last = datetime.datetime.now()
|
||||||
|
|
||||||
|
|
||||||
def load_known_requests(filename):
|
|
||||||
if filename and os.path.exists(filename):
|
|
||||||
return json.load(open(filename, "r"))
|
|
||||||
return {}
|
|
||||||
|
|
||||||
|
|
||||||
def logged_cmd(log_file, *vargs):
|
def logged_cmd(log_file, *vargs):
|
||||||
result = None
|
result = None
|
||||||
try:
|
try:
|
||||||
@ -113,8 +92,76 @@ def is_id(name_or_id: str):
|
|||||||
return not is_lrn(name_or_id)
|
return not is_lrn(name_or_id)
|
||||||
|
|
||||||
|
|
||||||
|
def confirm_payment(laconic, record, payment_address, min_amount, logger):
|
||||||
|
req_owner = laconic.get_owner(record)
|
||||||
|
if req_owner == payment_address:
|
||||||
|
# No need to confirm payment if the sender and recipient are the same account.
|
||||||
|
return True
|
||||||
|
|
||||||
|
if not record.attributes.payment:
|
||||||
|
logger.log(f"{record.id}: no payment tx info")
|
||||||
|
return False
|
||||||
|
|
||||||
|
tx = laconic.get_tx(record.attributes.payment)
|
||||||
|
if not tx:
|
||||||
|
logger.log(f"{record.id}: cannot locate payment tx")
|
||||||
|
return False
|
||||||
|
|
||||||
|
if tx.code != 0:
|
||||||
|
logger.log(
|
||||||
|
f"{record.id}: payment tx {tx.hash} was not successful - code: {tx.code}, log: {tx.log}"
|
||||||
|
)
|
||||||
|
return False
|
||||||
|
|
||||||
|
if tx.sender != req_owner:
|
||||||
|
logger.log(
|
||||||
|
f"{record.id}: payment sender {tx.sender} in tx {tx.hash} does not match deployment "
|
||||||
|
f"request owner {req_owner}"
|
||||||
|
)
|
||||||
|
return False
|
||||||
|
|
||||||
|
if tx.recipient != payment_address:
|
||||||
|
logger.log(
|
||||||
|
f"{record.id}: payment recipient {tx.recipient} in tx {tx.hash} does not match {payment_address}"
|
||||||
|
)
|
||||||
|
return False
|
||||||
|
|
||||||
|
pay_denom = "".join([i for i in tx.amount if not i.isdigit()])
|
||||||
|
if pay_denom != "alnt":
|
||||||
|
logger.log(
|
||||||
|
f"{record.id}: {pay_denom} in tx {tx.hash} is not an expected payment denomination"
|
||||||
|
)
|
||||||
|
return False
|
||||||
|
|
||||||
|
pay_amount = int("".join([i for i in tx.amount if i.isdigit()]))
|
||||||
|
if pay_amount < min_amount:
|
||||||
|
logger.log(
|
||||||
|
f"{record.id}: payment amount {tx.amount} is less than minimum {min_amount}"
|
||||||
|
)
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Check if the payment was already used on a
|
||||||
|
used = laconic.app_deployments(
|
||||||
|
{"deployer": payment_address, "payment": tx.hash}, all=True
|
||||||
|
)
|
||||||
|
if len(used):
|
||||||
|
logger.log(f"{record.id}: payment {tx.hash} already used on deployment {used}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
used = laconic.app_deployment_removals(
|
||||||
|
{"deployer": payment_address, "payment": tx.hash}, all=True
|
||||||
|
)
|
||||||
|
if len(used):
|
||||||
|
logger.log(
|
||||||
|
f"{record.id}: payment {tx.hash} already used on deployment removal {used}"
|
||||||
|
)
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
class LaconicRegistryClient:
|
class LaconicRegistryClient:
|
||||||
def __init__(self, config_file, log_file=None, mutex_lock_file=None):
|
def __init__(self, config_file, log_file=None):
|
||||||
self.config_file = config_file
|
self.config_file = config_file
|
||||||
self.log_file = log_file
|
self.log_file = log_file
|
||||||
self.cache = AttrDict(
|
self.cache = AttrDict(
|
||||||
@ -125,9 +172,6 @@ class LaconicRegistryClient:
|
|||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
self.mutex_lock_file = mutex_lock_file
|
|
||||||
self.mutex_lock_acquired = False
|
|
||||||
|
|
||||||
def whoami(self, refresh=False):
|
def whoami(self, refresh=False):
|
||||||
if not refresh and "whoami" in self.cache:
|
if not refresh and "whoami" in self.cache:
|
||||||
return self.cache["whoami"]
|
return self.cache["whoami"]
|
||||||
@ -326,34 +370,6 @@ class LaconicRegistryClient:
|
|||||||
if require:
|
if require:
|
||||||
raise Exception("Cannot locate tx:", hash)
|
raise Exception("Cannot locate tx:", hash)
|
||||||
|
|
||||||
def get_auction(self, auction_id, require=False):
|
|
||||||
args = [
|
|
||||||
"laconic",
|
|
||||||
"-c",
|
|
||||||
self.config_file,
|
|
||||||
"registry",
|
|
||||||
"auction",
|
|
||||||
"get",
|
|
||||||
"--id",
|
|
||||||
auction_id,
|
|
||||||
]
|
|
||||||
|
|
||||||
results = None
|
|
||||||
try:
|
|
||||||
results = [
|
|
||||||
AttrDict(r) for r in json.loads(logged_cmd(self.log_file, *args)) if r
|
|
||||||
]
|
|
||||||
except: # noqa: E722
|
|
||||||
pass
|
|
||||||
|
|
||||||
if results and len(results):
|
|
||||||
return results[0]
|
|
||||||
|
|
||||||
if require:
|
|
||||||
raise Exception("Cannot locate auction:", auction_id)
|
|
||||||
|
|
||||||
return None
|
|
||||||
|
|
||||||
def app_deployment_requests(self, criteria=None, all=True):
|
def app_deployment_requests(self, criteria=None, all=True):
|
||||||
if criteria is None:
|
if criteria is None:
|
||||||
criteria = {}
|
criteria = {}
|
||||||
@ -382,21 +398,6 @@ class LaconicRegistryClient:
|
|||||||
criteria["type"] = "ApplicationDeploymentRemovalRecord"
|
criteria["type"] = "ApplicationDeploymentRemovalRecord"
|
||||||
return self.list_records(criteria, all)
|
return self.list_records(criteria, all)
|
||||||
|
|
||||||
def webapp_deployers(self, criteria=None, all=True):
|
|
||||||
if criteria is None:
|
|
||||||
criteria = {}
|
|
||||||
criteria = criteria.copy()
|
|
||||||
criteria["type"] = "WebappDeployer"
|
|
||||||
return self.list_records(criteria, all)
|
|
||||||
|
|
||||||
def app_deployment_auctions(self, criteria=None, all=True):
|
|
||||||
if criteria is None:
|
|
||||||
criteria = {}
|
|
||||||
criteria = criteria.copy()
|
|
||||||
criteria["type"] = "ApplicationDeploymentAuction"
|
|
||||||
return self.list_records(criteria, all)
|
|
||||||
|
|
||||||
@registry_mutex()
|
|
||||||
def publish(self, record, names=None):
|
def publish(self, record, names=None):
|
||||||
if names is None:
|
if names is None:
|
||||||
names = []
|
names = []
|
||||||
@ -427,7 +428,6 @@ class LaconicRegistryClient:
|
|||||||
finally:
|
finally:
|
||||||
logged_cmd(self.log_file, "rm", "-rf", tmpdir)
|
logged_cmd(self.log_file, "rm", "-rf", tmpdir)
|
||||||
|
|
||||||
@registry_mutex()
|
|
||||||
def set_name(self, name, record_id):
|
def set_name(self, name, record_id):
|
||||||
logged_cmd(
|
logged_cmd(
|
||||||
self.log_file,
|
self.log_file,
|
||||||
@ -441,7 +441,6 @@ class LaconicRegistryClient:
|
|||||||
record_id,
|
record_id,
|
||||||
)
|
)
|
||||||
|
|
||||||
@registry_mutex()
|
|
||||||
def delete_name(self, name):
|
def delete_name(self, name):
|
||||||
logged_cmd(
|
logged_cmd(
|
||||||
self.log_file,
|
self.log_file,
|
||||||
@ -454,7 +453,6 @@ class LaconicRegistryClient:
|
|||||||
name,
|
name,
|
||||||
)
|
)
|
||||||
|
|
||||||
@registry_mutex()
|
|
||||||
def send_tokens(self, address, amount, type="alnt"):
|
def send_tokens(self, address, amount, type="alnt"):
|
||||||
args = [
|
args = [
|
||||||
"laconic",
|
"laconic",
|
||||||
@ -473,67 +471,6 @@ class LaconicRegistryClient:
|
|||||||
|
|
||||||
return AttrDict(json.loads(logged_cmd(self.log_file, *args)))
|
return AttrDict(json.loads(logged_cmd(self.log_file, *args)))
|
||||||
|
|
||||||
@registry_mutex()
|
|
||||||
def create_deployment_auction(self, auction):
|
|
||||||
args = [
|
|
||||||
"laconic",
|
|
||||||
"-c",
|
|
||||||
self.config_file,
|
|
||||||
"registry",
|
|
||||||
"auction",
|
|
||||||
"create",
|
|
||||||
"--kind",
|
|
||||||
auction["kind"],
|
|
||||||
"--commits-duration",
|
|
||||||
str(auction["commits_duration"]),
|
|
||||||
"--reveals-duration",
|
|
||||||
str(auction["reveals_duration"]),
|
|
||||||
"--denom",
|
|
||||||
auction["denom"],
|
|
||||||
"--commit-fee",
|
|
||||||
str(auction["commit_fee"]),
|
|
||||||
"--reveal-fee",
|
|
||||||
str(auction["reveal_fee"]),
|
|
||||||
"--max-price",
|
|
||||||
str(auction["max_price"]),
|
|
||||||
"--num-providers",
|
|
||||||
str(auction["num_providers"])
|
|
||||||
]
|
|
||||||
|
|
||||||
return json.loads(logged_cmd(self.log_file, *args))["auctionId"]
|
|
||||||
|
|
||||||
@registry_mutex()
|
|
||||||
def commit_bid(self, auction_id, amount, type="alnt"):
|
|
||||||
args = [
|
|
||||||
"laconic",
|
|
||||||
"-c",
|
|
||||||
self.config_file,
|
|
||||||
"registry",
|
|
||||||
"auction",
|
|
||||||
"bid",
|
|
||||||
"commit",
|
|
||||||
auction_id,
|
|
||||||
str(amount),
|
|
||||||
type,
|
|
||||||
]
|
|
||||||
|
|
||||||
return json.loads(logged_cmd(self.log_file, *args))["reveal_file"]
|
|
||||||
|
|
||||||
@registry_mutex()
|
|
||||||
def reveal_bid(self, auction_id, reveal_file_path):
|
|
||||||
logged_cmd(
|
|
||||||
self.log_file,
|
|
||||||
"laconic",
|
|
||||||
"-c",
|
|
||||||
self.config_file,
|
|
||||||
"registry",
|
|
||||||
"auction",
|
|
||||||
"bid",
|
|
||||||
"reveal",
|
|
||||||
auction_id,
|
|
||||||
reveal_file_path,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def file_hash(filename):
|
def file_hash(filename):
|
||||||
return hashlib.sha1(open(filename).read().encode()).hexdigest()
|
return hashlib.sha1(open(filename).read().encode()).hexdigest()
|
||||||
@ -544,6 +481,8 @@ def determine_base_container(clone_dir, app_type="webapp"):
|
|||||||
raise Exception(f"Unsupported app_type {app_type}")
|
raise Exception(f"Unsupported app_type {app_type}")
|
||||||
|
|
||||||
base_container = "cerc/webapp-base"
|
base_container = "cerc/webapp-base"
|
||||||
|
if app_type == "webapp/snowball/nextjs":
|
||||||
|
base_container = "cerc/nextjs-snowball"
|
||||||
if app_type == "webapp/next":
|
if app_type == "webapp/next":
|
||||||
base_container = "cerc/nextjs-base"
|
base_container = "cerc/nextjs-base"
|
||||||
elif app_type == "webapp":
|
elif app_type == "webapp":
|
||||||
@ -740,15 +679,12 @@ def publish_deployment(
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if app_deployment_request:
|
if app_deployment_request:
|
||||||
new_deployment_record["record"]["request"] = app_deployment_request.id
|
new_deployment_record["record"]["request"] = app_deployment_request.id
|
||||||
|
if app_deployment_request.attributes.payment:
|
||||||
# Set auction or payment id from request
|
new_deployment_record["record"][
|
||||||
if app_deployment_request.attributes.auction:
|
"payment"
|
||||||
new_deployment_record["record"]["auction"] = app_deployment_request.attributes.auction
|
] = app_deployment_request.attributes.payment
|
||||||
elif app_deployment_request.attributes.payment:
|
|
||||||
new_deployment_record["record"]["payment"] = app_deployment_request.attributes.payment
|
|
||||||
|
|
||||||
if webapp_deployer_record:
|
if webapp_deployer_record:
|
||||||
new_deployment_record["record"]["deployer"] = webapp_deployer_record.names[0]
|
new_deployment_record["record"]["deployer"] = webapp_deployer_record.names[0]
|
||||||
@ -796,108 +732,3 @@ def skip_by_tag(r, include_tags, exclude_tags):
|
|||||||
return True
|
return True
|
||||||
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
def confirm_payment(laconic: LaconicRegistryClient, record, payment_address, min_amount, logger):
|
|
||||||
req_owner = laconic.get_owner(record)
|
|
||||||
if req_owner == payment_address:
|
|
||||||
# No need to confirm payment if the sender and recipient are the same account.
|
|
||||||
return True
|
|
||||||
|
|
||||||
if not record.attributes.payment:
|
|
||||||
logger.log(f"{record.id}: no payment tx info")
|
|
||||||
return False
|
|
||||||
|
|
||||||
tx = laconic.get_tx(record.attributes.payment)
|
|
||||||
if not tx:
|
|
||||||
logger.log(f"{record.id}: cannot locate payment tx")
|
|
||||||
return False
|
|
||||||
|
|
||||||
if tx.code != 0:
|
|
||||||
logger.log(
|
|
||||||
f"{record.id}: payment tx {tx.hash} was not successful - code: {tx.code}, log: {tx.log}"
|
|
||||||
)
|
|
||||||
return False
|
|
||||||
|
|
||||||
if tx.sender != req_owner:
|
|
||||||
logger.log(
|
|
||||||
f"{record.id}: payment sender {tx.sender} in tx {tx.hash} does not match deployment "
|
|
||||||
f"request owner {req_owner}"
|
|
||||||
)
|
|
||||||
return False
|
|
||||||
|
|
||||||
if tx.recipient != payment_address:
|
|
||||||
logger.log(
|
|
||||||
f"{record.id}: payment recipient {tx.recipient} in tx {tx.hash} does not match {payment_address}"
|
|
||||||
)
|
|
||||||
return False
|
|
||||||
|
|
||||||
pay_denom = "".join([i for i in tx.amount if not i.isdigit()])
|
|
||||||
if pay_denom != "alnt":
|
|
||||||
logger.log(
|
|
||||||
f"{record.id}: {pay_denom} in tx {tx.hash} is not an expected payment denomination"
|
|
||||||
)
|
|
||||||
return False
|
|
||||||
|
|
||||||
pay_amount = int("".join([i for i in tx.amount if i.isdigit()]))
|
|
||||||
if pay_amount < min_amount:
|
|
||||||
logger.log(
|
|
||||||
f"{record.id}: payment amount {tx.amount} is less than minimum {min_amount}"
|
|
||||||
)
|
|
||||||
return False
|
|
||||||
|
|
||||||
# Check if the payment was already used on a deployment
|
|
||||||
used = laconic.app_deployments(
|
|
||||||
{"deployer": record.attributes.deployer, "payment": tx.hash}, all=True
|
|
||||||
)
|
|
||||||
if len(used):
|
|
||||||
# Fetch the app name from request record
|
|
||||||
used_request = laconic.get_record(used[0].attributes.request, require=True)
|
|
||||||
|
|
||||||
# Check that payment was used for deployment of same application
|
|
||||||
if record.attributes.application != used_request.attributes.application:
|
|
||||||
logger.log(f"{record.id}: payment {tx.hash} already used on a different application deployment {used}")
|
|
||||||
return False
|
|
||||||
|
|
||||||
used = laconic.app_deployment_removals(
|
|
||||||
{"deployer": record.attributes.deployer, "payment": tx.hash}, all=True
|
|
||||||
)
|
|
||||||
if len(used):
|
|
||||||
logger.log(
|
|
||||||
f"{record.id}: payment {tx.hash} already used on deployment removal {used}"
|
|
||||||
)
|
|
||||||
return False
|
|
||||||
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
def confirm_auction(laconic: LaconicRegistryClient, record, deployer_lrn, payment_address, logger):
|
|
||||||
auction_id = record.attributes.auction
|
|
||||||
auction = laconic.get_auction(auction_id)
|
|
||||||
|
|
||||||
# Fetch auction record for given auction
|
|
||||||
auction_records_by_id = laconic.app_deployment_auctions({"auction": auction_id})
|
|
||||||
if len(auction_records_by_id) == 0:
|
|
||||||
logger.log(f"{record.id}: unable to locate record for auction {auction_id}")
|
|
||||||
return False
|
|
||||||
|
|
||||||
# Cross check app against application in the auction record
|
|
||||||
requested_app = laconic.get_record(record.attributes.application, require=True)
|
|
||||||
auction_app = laconic.get_record(auction_records_by_id[0].attributes.application, require=True)
|
|
||||||
if requested_app.id != auction_app.id:
|
|
||||||
logger.log(
|
|
||||||
f"{record.id}: requested application {record.attributes.application} does not match application from "
|
|
||||||
f"auction record {auction_records_by_id[0].attributes.application}"
|
|
||||||
)
|
|
||||||
return False
|
|
||||||
|
|
||||||
if not auction:
|
|
||||||
logger.log(f"{record.id}: unable to locate auction {auction_id}")
|
|
||||||
return False
|
|
||||||
|
|
||||||
# Check if the deployer payment address is in auction winners list
|
|
||||||
if payment_address not in auction.winnerAddresses:
|
|
||||||
logger.log(f"{record.id}: deployer payment address not in auction winners.")
|
|
||||||
return False
|
|
||||||
|
|
||||||
return True
|
|
||||||
|
@ -26,10 +26,7 @@ from stack_orchestrator.deploy.webapp import (run_webapp,
|
|||||||
deploy_webapp_from_registry,
|
deploy_webapp_from_registry,
|
||||||
undeploy_webapp_from_registry,
|
undeploy_webapp_from_registry,
|
||||||
publish_webapp_deployer,
|
publish_webapp_deployer,
|
||||||
publish_deployment_auction,
|
request_webapp_deployment)
|
||||||
handle_deployment_auction,
|
|
||||||
request_webapp_deployment,
|
|
||||||
request_webapp_undeployment)
|
|
||||||
from stack_orchestrator.deploy import deploy
|
from stack_orchestrator.deploy import deploy
|
||||||
from stack_orchestrator import version
|
from stack_orchestrator import version
|
||||||
from stack_orchestrator.deploy import deployment
|
from stack_orchestrator.deploy import deployment
|
||||||
@ -67,10 +64,7 @@ cli.add_command(deploy_webapp.command, "deploy-webapp")
|
|||||||
cli.add_command(deploy_webapp_from_registry.command, "deploy-webapp-from-registry")
|
cli.add_command(deploy_webapp_from_registry.command, "deploy-webapp-from-registry")
|
||||||
cli.add_command(undeploy_webapp_from_registry.command, "undeploy-webapp-from-registry")
|
cli.add_command(undeploy_webapp_from_registry.command, "undeploy-webapp-from-registry")
|
||||||
cli.add_command(publish_webapp_deployer.command, "publish-deployer-to-registry")
|
cli.add_command(publish_webapp_deployer.command, "publish-deployer-to-registry")
|
||||||
cli.add_command(publish_deployment_auction.command, "publish-deployment-auction")
|
|
||||||
cli.add_command(handle_deployment_auction.command, "handle-deployment-auction")
|
|
||||||
cli.add_command(request_webapp_deployment.command, "request-webapp-deployment")
|
cli.add_command(request_webapp_deployment.command, "request-webapp-deployment")
|
||||||
cli.add_command(request_webapp_undeployment.command, "request-webapp-undeployment")
|
|
||||||
cli.add_command(deploy.command, "deploy") # deploy is an alias for deploy-system
|
cli.add_command(deploy.command, "deploy") # deploy is an alias for deploy-system
|
||||||
cli.add_command(deploy.command, "deploy-system")
|
cli.add_command(deploy.command, "deploy-system")
|
||||||
cli.add_command(deployment.command, "deployment")
|
cli.add_command(deployment.command, "deployment")
|
||||||
|
Loading…
Reference in New Issue
Block a user