Compare commits

..

2 Commits

Author SHA1 Message Date
Roy Crihfield bd5b806ae3 Merge branch 'build-forward-vars' into vaasl-deploy
Lint Checks / Run linter (push) Failing after 4s Details
Database Test / Run database hosting test on kind/k8s (push) Failing after 4s Details
External Stack Test / Run external stack test suite (push) Failing after 2s Details
2025-10-28 14:55:10 +08:00
Roy Crihfield 58e645dcfe forward some env vars to build command 2025-10-28 14:54:46 +08:00
6 changed files with 22 additions and 109 deletions

View File

@ -26,6 +26,7 @@ from decouple import config
import subprocess
import click
from pathlib import Path
from typing import Any
from stack_orchestrator.opts import opts
from stack_orchestrator.util import include_exclude_check, stack_is_external, error_exit
from stack_orchestrator.base import get_npm_registry_url
@ -42,7 +43,7 @@ def make_container_build_env(dev_root_path: str,
debug: bool,
force_rebuild: bool,
extra_build_args: str):
container_build_env = {
command_env: dict[str, Any] = {
"CERC_NPM_REGISTRY_URL": get_npm_registry_url(),
"CERC_GO_AUTH_TOKEN": config("CERC_GO_AUTH_TOKEN", default=""),
"CERC_NPM_AUTH_TOKEN": config("CERC_NPM_AUTH_TOKEN", default=""),
@ -52,14 +53,16 @@ def make_container_build_env(dev_root_path: str,
"CERC_HOST_GID": f"{os.getgid()}",
"DOCKER_BUILDKIT": config("DOCKER_BUILDKIT", default="0")
}
container_build_env.update({"CERC_SCRIPT_DEBUG": "true"} if debug else {})
container_build_env.update({"CERC_FORCE_REBUILD": "true"} if force_rebuild else {})
container_build_env.update({"CERC_CONTAINER_EXTRA_BUILD_ARGS": extra_build_args} if extra_build_args else {})
docker_host_env = os.getenv("DOCKER_HOST")
if docker_host_env:
container_build_env.update({"DOCKER_HOST": docker_host_env})
command_env.update({"CERC_SCRIPT_DEBUG": "true"} if debug else {})
command_env.update({"CERC_FORCE_REBUILD": "true"} if force_rebuild else {})
command_env.update({"CERC_CONTAINER_EXTRA_BUILD_ARGS": extra_build_args} if extra_build_args else {})
return container_build_env
forwarded_vars = ("DOCKER_HOST", "BUILDKIT_PROGRESS", "http_proxy", "https_proxy")
for var in forwarded_vars:
if value := config(var, default=None):
command_env[var] = value
return command_env
def process_container(build_context: BuildContext) -> bool:

View File

@ -443,17 +443,16 @@ def _check_volume_definitions(spec):
@click.command()
@click.option("--spec-file", required=True, help="Spec file to use to create this deployment")
@click.option("--deployment-dir", help="Create deployment files in this directory")
@click.option("--sync", is_flag=True, default=False, help="Update existing deployment directory without overwriting data volumes")
@click.argument('extra_args', nargs=-1, type=click.UNPROCESSED)
@click.pass_context
def create(ctx, spec_file, deployment_dir, sync, extra_args):
def create(ctx, spec_file, deployment_dir, extra_args):
deployment_command_context = ctx.obj
return create_operation(deployment_command_context, spec_file, deployment_dir, sync, extra_args)
return create_operation(deployment_command_context, spec_file, deployment_dir, extra_args)
# The init command's implementation is in a separate function so that we can
# call it from other commands, bypassing the click decoration stuff
def create_operation(deployment_command_context, spec_file, deployment_dir, sync, extra_args):
def create_operation(deployment_command_context, spec_file, deployment_dir, extra_args):
parsed_spec = Spec(os.path.abspath(spec_file), get_parsed_deployment_spec(spec_file))
_check_volume_definitions(parsed_spec)
stack_name = parsed_spec["stack"]
@ -467,21 +466,12 @@ def create_operation(deployment_command_context, spec_file, deployment_dir, sync
else:
deployment_dir_path = Path(deployment_dir)
if deployment_dir_path.exists():
if not sync:
error_exit(f"{deployment_dir_path} already exists")
if opts.o.debug:
print(f"Syncing existing deployment at {deployment_dir_path}")
else:
if sync:
error_exit(f"--sync requires that {deployment_dir_path} already exists")
os.mkdir(deployment_dir_path)
error_exit(f"{deployment_dir_path} already exists")
os.mkdir(deployment_dir_path)
# Copy spec file and the stack file into the deployment dir
copyfile(spec_file, deployment_dir_path.joinpath(constants.spec_file_name))
copyfile(stack_file, deployment_dir_path.joinpath(constants.stack_file_name))
# Only create deployment file if it doesn't exist (preserve cluster ID on sync)
deployment_file_path = deployment_dir_path.joinpath(constants.deployment_file_name)
if not deployment_file_path.exists():
_create_deployment_file(deployment_dir_path)
_create_deployment_file(deployment_dir_path)
# Copy any config varibles from the spec file into an env file suitable for compose
_write_config_file(spec_file, deployment_dir_path.joinpath(constants.config_file_name))
# Copy any k8s config file into the deployment dir
@ -491,16 +481,16 @@ def create_operation(deployment_command_context, spec_file, deployment_dir, sync
# Copy the pod files into the deployment dir, fixing up content
pods = get_pod_list(parsed_stack)
destination_compose_dir = deployment_dir_path.joinpath("compose")
os.makedirs(destination_compose_dir, exist_ok=True)
os.mkdir(destination_compose_dir)
destination_pods_dir = deployment_dir_path.joinpath("pods")
os.makedirs(destination_pods_dir, exist_ok=True)
os.mkdir(destination_pods_dir)
yaml = get_yaml()
for pod in pods:
pod_file_path = get_pod_file_path(stack_name, parsed_stack, pod)
parsed_pod_file = yaml.load(open(pod_file_path, "r"))
extra_config_dirs = _find_extra_config_dirs(parsed_pod_file, pod)
destination_pod_dir = destination_pods_dir.joinpath(pod)
os.makedirs(destination_pod_dir, exist_ok=True)
os.mkdir(destination_pod_dir)
if opts.o.debug:
print(f"extra config dirs: {extra_config_dirs}")
_fixup_pod_file(parsed_pod_file, parsed_spec, destination_compose_dir)
@ -519,7 +509,7 @@ def create_operation(deployment_command_context, spec_file, deployment_dir, sync
# Copy the script files for the pod, if any
if pod_has_scripts(parsed_stack, pod):
destination_script_dir = destination_pod_dir.joinpath("scripts")
os.makedirs(destination_script_dir, exist_ok=True)
os.mkdir(destination_script_dir)
script_paths = get_pod_script_paths(parsed_stack, pod)
_copy_files_to_directory(script_paths, destination_script_dir)
if parsed_spec.is_kubernetes_deployment():

View File

@ -91,7 +91,6 @@ def create_deployment(ctx, deployment_dir, image, url, kube_config, image_regist
deploy_command_context,
spec_file_name,
deployment_dir,
False,
None,
)
# Fix up the container tag inside the deployment compose file

View File

@ -86,7 +86,7 @@ fi
echo "deploy init test: passed"
# Switch to a full path for the data dir so it gets provisioned as a host bind mounted volume and preserved beyond cluster lifetime
sed -i.bak "s|^\(\s*db-data:$\)$|\1 ${test_deployment_dir}/data/db-data|" $test_deployment_spec
sed -i "s|^\(\s*db-data:$\)$|\1 ${test_deployment_dir}/data/db-data|" $test_deployment_spec
$TEST_TARGET_SO --stack ${stack} deploy create --spec-file $test_deployment_spec --deployment-dir $test_deployment_dir
# Check the deployment dir exists

View File

@ -85,7 +85,6 @@ else
exit 1
fi
$TEST_TARGET_SO --stack test deploy down --delete-volumes
# Basic test of creating a deployment
test_deployment_dir=$CERC_REPO_BASE_DIR/test-deployment-dir
test_deployment_spec=$CERC_REPO_BASE_DIR/test-deployment-spec.yml
@ -123,41 +122,6 @@ fi
echo "dbfc7a4d-44a7-416d-b5f3-29842cc47650" > $test_deployment_dir/data/test-config/test_config
echo "deploy create output file test: passed"
# Test sync functionality: update deployment without destroying data
# First, create a marker file in the data directory to verify it's preserved
test_data_marker="$test_deployment_dir/data/test-data-bind/sync-test-marker.txt"
echo "original-data-$(date +%s)" > "$test_data_marker"
original_marker_content=$(<$test_data_marker)
# Also save original spec content to compare
original_spec_content=$(<$test_deployment_spec)
# Modify spec file to simulate an update
sed -i.bak 's/CERC_TEST_PARAM_3:/CERC_TEST_PARAM_3: FASTER/' $test_deployment_spec
# Run sync to update deployment files without destroying data
$TEST_TARGET_SO --stack test deploy create --spec-file $test_deployment_spec --deployment-dir $test_deployment_dir --sync
# Verify the spec file was updated in deployment dir
updated_deployed_spec=$(<$test_deployment_dir/spec.yml)
if [[ "$updated_deployed_spec" == *"FASTER"* ]]; then
echo "deploy sync test: spec file updated"
else
echo "deploy sync test: spec file not updated - FAILED"
exit 1
fi
# Verify the data marker file still exists with original content
if [ ! -f "$test_data_marker" ]; then
echo "deploy sync test: data file deleted - FAILED"
exit 1
fi
synced_marker_content=$(<$test_data_marker)
if [ "$synced_marker_content" == "$original_marker_content" ]; then
echo "deploy sync test: data preserved - passed"
else
echo "deploy sync test: data corrupted - FAILED"
exit 1
fi
echo "deploy sync test: passed"
# Try to start the deployment
$TEST_TARGET_SO deployment --dir $test_deployment_dir start
# Check logs command works

View File

@ -125,49 +125,6 @@ fi
echo "dbfc7a4d-44a7-416d-b5f3-29842cc47650" > $test_deployment_dir/data/test-config/test_config
echo "deploy create output file test: passed"
# Test sync functionality: update deployment without destroying data
# First, create a marker file in the data directory to verify it's preserved
test_data_marker="$test_deployment_dir/data/test-data/sync-test-marker.txt"
mkdir -p "$test_deployment_dir/data/test-data"
echo "external-stack-data-$(date +%s)" > "$test_data_marker"
original_marker_content=$(<$test_data_marker)
# Verify deployment file exists and preserve its cluster ID
original_cluster_id=$(grep "cluster-id:" "$test_deployment_dir/deployment.yml" 2>/dev/null || echo "")
# Modify spec file to simulate an update
sed -i.bak 's/CERC_TEST_PARAM_1=PASSED/CERC_TEST_PARAM_1=UPDATED/' $test_deployment_spec
# Run sync to update deployment files without destroying data
$TEST_TARGET_SO_STACK deploy create --spec-file $test_deployment_spec --deployment-dir $test_deployment_dir --sync
# Verify the spec file was updated in deployment dir
updated_deployed_spec=$(<$test_deployment_dir/spec.yml)
if [[ "$updated_deployed_spec" == *"UPDATED"* ]]; then
echo "deploy sync test: spec file updated"
else
echo "deploy sync test: spec file not updated - FAILED"
exit 1
fi
# Verify the data marker file still exists with original content
if [ ! -f "$test_data_marker" ]; then
echo "deploy sync test: data file deleted - FAILED"
exit 1
fi
synced_marker_content=$(<$test_data_marker)
if [ "$synced_marker_content" == "$original_marker_content" ]; then
echo "deploy sync test: data preserved - passed"
else
echo "deploy sync test: data corrupted - FAILED"
exit 1
fi
# Verify cluster ID was preserved (not regenerated)
new_cluster_id=$(grep "cluster-id:" "$test_deployment_dir/deployment.yml" 2>/dev/null || echo "")
if [ -n "$original_cluster_id" ] && [ "$original_cluster_id" == "$new_cluster_id" ]; then
echo "deploy sync test: cluster ID preserved - passed"
else
echo "deploy sync test: cluster ID not preserved - FAILED"
exit 1
fi
echo "deploy sync test: passed"
# Try to start the deployment
$TEST_TARGET_SO deployment --dir $test_deployment_dir start
# Check logs command works