From 61c18980bc4096c512cb3ce46ae35e51756d1892 Mon Sep 17 00:00:00 2001 From: Roy Crihfield Date: Fri, 17 Oct 2025 23:21:23 +0800 Subject: [PATCH 1/3] Add --sync option to deploy create To allow updating an existing deployment --- .../deploy/deployment_create.py | 28 +++++++----- .../deploy/webapp/deploy_webapp.py | 1 + tests/database/run-test.sh | 35 +++++++++++++++ tests/deploy/run-deploy-test.sh | 44 ++++++++++++++++++- tests/external-stack/run-test.sh | 43 ++++++++++++++++++ 5 files changed, 139 insertions(+), 12 deletions(-) diff --git a/stack_orchestrator/deploy/deployment_create.py b/stack_orchestrator/deploy/deployment_create.py index 9d45f226..35b66a1a 100644 --- a/stack_orchestrator/deploy/deployment_create.py +++ b/stack_orchestrator/deploy/deployment_create.py @@ -443,18 +443,19 @@ def _check_volume_definitions(spec): @click.command() @click.option("--spec-file", required=True, help="Spec file to use to create this deployment") @click.option("--deployment-dir", help="Create deployment files in this directory") +@click.option("--sync", is_flag=True, default=False, help="Update existing deployment directory without overwriting data volumes") # TODO: Hack @click.option("--network-dir", help="Network configuration supplied in this directory") @click.option("--initial-peers", help="Initial set of persistent peers") @click.pass_context -def create(ctx, spec_file, deployment_dir, network_dir, initial_peers): +def create(ctx, spec_file, deployment_dir, sync, network_dir, initial_peers): deployment_command_context = ctx.obj - return create_operation(deployment_command_context, spec_file, deployment_dir, network_dir, initial_peers) + return create_operation(deployment_command_context, spec_file, deployment_dir, sync, network_dir, initial_peers) # The init command's implementation is in a separate function so that we can # call it from other commands, bypassing the click decoration stuff -def create_operation(deployment_command_context, spec_file, deployment_dir, network_dir, initial_peers): +def create_operation(deployment_command_context, spec_file, deployment_dir, sync, network_dir, initial_peers): parsed_spec = Spec(os.path.abspath(spec_file), get_parsed_deployment_spec(spec_file)) _check_volume_definitions(parsed_spec) stack_name = parsed_spec["stack"] @@ -468,12 +469,19 @@ def create_operation(deployment_command_context, spec_file, deployment_dir, netw else: deployment_dir_path = Path(deployment_dir) if deployment_dir_path.exists(): - error_exit(f"{deployment_dir_path} already exists") - os.mkdir(deployment_dir_path) + if not sync: + error_exit(f"{deployment_dir_path} already exists") + if opts.o.debug: + print(f"Syncing existing deployment at {deployment_dir_path}") + else: + os.mkdir(deployment_dir_path) # Copy spec file and the stack file into the deployment dir copyfile(spec_file, deployment_dir_path.joinpath(constants.spec_file_name)) copyfile(stack_file, deployment_dir_path.joinpath(constants.stack_file_name)) - _create_deployment_file(deployment_dir_path) + # Only create deployment file if it doesn't exist (preserve cluster ID on sync) + deployment_file_path = deployment_dir_path.joinpath(constants.deployment_file_name) + if not deployment_file_path.exists(): + _create_deployment_file(deployment_dir_path) # Copy any config varibles from the spec file into an env file suitable for compose _write_config_file(spec_file, deployment_dir_path.joinpath(constants.config_file_name)) # Copy any k8s config file into the deployment dir @@ -483,16 +491,16 @@ def create_operation(deployment_command_context, spec_file, deployment_dir, netw # Copy the pod files into the deployment dir, fixing up content pods = get_pod_list(parsed_stack) destination_compose_dir = deployment_dir_path.joinpath("compose") - os.mkdir(destination_compose_dir) + os.makedirs(destination_compose_dir, exist_ok=True) destination_pods_dir = deployment_dir_path.joinpath("pods") - os.mkdir(destination_pods_dir) + os.makedirs(destination_pods_dir, exist_ok=True) yaml = get_yaml() for pod in pods: pod_file_path = get_pod_file_path(stack_name, parsed_stack, pod) parsed_pod_file = yaml.load(open(pod_file_path, "r")) extra_config_dirs = _find_extra_config_dirs(parsed_pod_file, pod) destination_pod_dir = destination_pods_dir.joinpath(pod) - os.mkdir(destination_pod_dir) + os.makedirs(destination_pod_dir, exist_ok=True) if opts.o.debug: print(f"extra config dirs: {extra_config_dirs}") _fixup_pod_file(parsed_pod_file, parsed_spec, destination_compose_dir) @@ -511,7 +519,7 @@ def create_operation(deployment_command_context, spec_file, deployment_dir, netw # Copy the script files for the pod, if any if pod_has_scripts(parsed_stack, pod): destination_script_dir = destination_pod_dir.joinpath("scripts") - os.mkdir(destination_script_dir) + os.makedirs(destination_script_dir, exist_ok=True) script_paths = get_pod_script_paths(parsed_stack, pod) _copy_files_to_directory(script_paths, destination_script_dir) if parsed_spec.is_kubernetes_deployment(): diff --git a/stack_orchestrator/deploy/webapp/deploy_webapp.py b/stack_orchestrator/deploy/webapp/deploy_webapp.py index 4c91dec3..c51f0781 100644 --- a/stack_orchestrator/deploy/webapp/deploy_webapp.py +++ b/stack_orchestrator/deploy/webapp/deploy_webapp.py @@ -91,6 +91,7 @@ def create_deployment(ctx, deployment_dir, image, url, kube_config, image_regist deploy_command_context, spec_file_name, deployment_dir, + False, None, None ) diff --git a/tests/database/run-test.sh b/tests/database/run-test.sh index 405f6d34..e8ba2f76 100755 --- a/tests/database/run-test.sh +++ b/tests/database/run-test.sh @@ -97,6 +97,41 @@ if [ ! -d "$test_deployment_dir" ]; then fi echo "deploy create test: passed" +# Test sync functionality: update deployment without destroying data +# Create a marker file in the database data directory to verify it's preserved +test_data_marker="$test_deployment_dir/data/db-data/sync-test-marker.txt" +mkdir -p "$test_deployment_dir/data/db-data" +echo "db-marker-$(date +%s)" > "$test_data_marker" +original_marker_content=$(<$test_data_marker) +# Save current timestamp of deployment file to verify it gets preserved +if [ -f "$test_deployment_dir/deployment.yml" ]; then + original_deployment_file_time=$(stat -c %Y "$test_deployment_dir/deployment.yml" 2>/dev/null || stat -f %m "$test_deployment_dir/deployment.yml") +fi +# Modify spec to simulate an update +echo " test-annotation: sync-update" >> $test_deployment_spec +# Run sync to update deployment files without destroying data +$TEST_TARGET_SO --stack ${stack} deploy create --spec-file $test_deployment_spec --deployment-dir $test_deployment_dir --sync +# Verify the data marker file still exists with original content +if [ ! -f "$test_data_marker" ]; then + echo "deploy sync test: database data marker deleted - FAILED" + exit 1 +fi +synced_marker_content=$(<$test_data_marker) +if [ "$synced_marker_content" == "$original_marker_content" ]; then + echo "deploy sync test: database data preserved - passed" +else + echo "deploy sync test: database data corrupted - FAILED" + exit 1 +fi +# Verify compose files were updated (should have newer timestamp) +if [ -f "$test_deployment_dir/compose/docker-compose-${stack}.yml" ]; then + echo "deploy sync test: compose files present - passed" +else + echo "deploy sync test: compose files missing - FAILED" + exit 1 +fi +echo "deploy sync test: passed" + # Try to start the deployment $TEST_TARGET_SO deployment --dir $test_deployment_dir start wait_for_pods_started diff --git a/tests/deploy/run-deploy-test.sh b/tests/deploy/run-deploy-test.sh index fb574b03..c5e87ebf 100755 --- a/tests/deploy/run-deploy-test.sh +++ b/tests/deploy/run-deploy-test.sh @@ -14,8 +14,13 @@ delete_cluster_exit () { # Test basic stack-orchestrator deploy echo "Running stack-orchestrator deploy test" -# Bit of a hack, test the most recent package -TEST_TARGET_SO=$( ls -t1 ./package/laconic-so* | head -1 ) + +if [ "$1" == "from-path" ]; then + TEST_TARGET_SO="laconic-so" +else + TEST_TARGET_SO=$( ls -t1 ./package/laconic-so* | head -1 ) +fi + # Set a non-default repo dir export CERC_REPO_BASE_DIR=~/stack-orchestrator-test/repo-base-dir echo "Testing this package: $TEST_TARGET_SO" @@ -117,6 +122,41 @@ fi echo "dbfc7a4d-44a7-416d-b5f3-29842cc47650" > $test_deployment_dir/data/test-config/test_config echo "deploy create output file test: passed" + +# Test sync functionality: update deployment without destroying data +# First, create a marker file in the data directory to verify it's preserved +test_data_marker="$test_deployment_dir/data/test-data/sync-test-marker.txt" +mkdir -p "$test_deployment_dir/data/test-data" +echo "original-data-$(date +%s)" > "$test_data_marker" +original_marker_content=$(<$test_data_marker) +# Also save original spec content to compare +original_spec_content=$(<$test_deployment_spec) +# Modify spec file to simulate an update +sed -i.bak 's/CERC_TEST_PARAM_3=FAST/CERC_TEST_PARAM_3=FASTER/' $test_deployment_spec +# Run sync to update deployment files without destroying data +$TEST_TARGET_SO --stack test deploy create --spec-file $test_deployment_spec --deployment-dir $test_deployment_dir --sync +# Verify the spec file was updated in deployment dir +updated_deployed_spec=$(<$test_deployment_dir/spec.yml) +if [[ "$updated_deployed_spec" == *"FASTER"* ]]; then + echo "deploy sync test: spec file updated" +else + echo "deploy sync test: spec file not updated - FAILED" + exit 1 +fi +# Verify the data marker file still exists with original content +if [ ! -f "$test_data_marker" ]; then + echo "deploy sync test: data file deleted - FAILED" + exit 1 +fi +synced_marker_content=$(<$test_data_marker) +if [ "$synced_marker_content" == "$original_marker_content" ]; then + echo "deploy sync test: data preserved - passed" +else + echo "deploy sync test: data corrupted - FAILED" + exit 1 +fi +echo "deploy sync test: passed" + # Try to start the deployment $TEST_TARGET_SO deployment --dir $test_deployment_dir start # Check logs command works diff --git a/tests/external-stack/run-test.sh b/tests/external-stack/run-test.sh index 084f3b9d..de89f645 100755 --- a/tests/external-stack/run-test.sh +++ b/tests/external-stack/run-test.sh @@ -125,6 +125,49 @@ fi echo "dbfc7a4d-44a7-416d-b5f3-29842cc47650" > $test_deployment_dir/data/test-config/test_config echo "deploy create output file test: passed" + +# Test sync functionality: update deployment without destroying data +# First, create a marker file in the data directory to verify it's preserved +test_data_marker="$test_deployment_dir/data/test-data/sync-test-marker.txt" +mkdir -p "$test_deployment_dir/data/test-data" +echo "external-stack-data-$(date +%s)" > "$test_data_marker" +original_marker_content=$(<$test_data_marker) +# Verify deployment file exists and preserve its cluster ID +original_cluster_id=$(grep "cluster-id:" "$test_deployment_dir/deployment.yml" 2>/dev/null || echo "") +# Modify spec file to simulate an update +sed -i.bak 's/CERC_TEST_PARAM_1=PASSED/CERC_TEST_PARAM_1=UPDATED/' $test_deployment_spec +# Run sync to update deployment files without destroying data +$TEST_TARGET_SO_STACK deploy create --spec-file $test_deployment_spec --deployment-dir $test_deployment_dir --sync +# Verify the spec file was updated in deployment dir +updated_deployed_spec=$(<$test_deployment_dir/spec.yml) +if [[ "$updated_deployed_spec" == *"UPDATED"* ]]; then + echo "deploy sync test: spec file updated" +else + echo "deploy sync test: spec file not updated - FAILED" + exit 1 +fi +# Verify the data marker file still exists with original content +if [ ! -f "$test_data_marker" ]; then + echo "deploy sync test: data file deleted - FAILED" + exit 1 +fi +synced_marker_content=$(<$test_data_marker) +if [ "$synced_marker_content" == "$original_marker_content" ]; then + echo "deploy sync test: data preserved - passed" +else + echo "deploy sync test: data corrupted - FAILED" + exit 1 +fi +# Verify cluster ID was preserved (not regenerated) +new_cluster_id=$(grep "cluster-id:" "$test_deployment_dir/deployment.yml" 2>/dev/null || echo "") +if [ -n "$original_cluster_id" ] && [ "$original_cluster_id" == "$new_cluster_id" ]; then + echo "deploy sync test: cluster ID preserved - passed" +else + echo "deploy sync test: cluster ID not preserved - FAILED" + exit 1 +fi +echo "deploy sync test: passed" + # Try to start the deployment $TEST_TARGET_SO deployment --dir $test_deployment_dir start # Check logs command works From eb3a0c5541084c8b4e21872af01a9977a7a7d721 Mon Sep 17 00:00:00 2001 From: Roy Crihfield Date: Sun, 19 Oct 2025 14:36:49 +0800 Subject: [PATCH 2/3] test stack does not need laconicd --- stack_orchestrator/data/stacks/test/stack.yml | 1 - 1 file changed, 1 deletion(-) diff --git a/stack_orchestrator/data/stacks/test/stack.yml b/stack_orchestrator/data/stacks/test/stack.yml index ac724c89..93d3ecd3 100644 --- a/stack_orchestrator/data/stacks/test/stack.yml +++ b/stack_orchestrator/data/stacks/test/stack.yml @@ -2,7 +2,6 @@ version: "1.0" name: test description: "A test stack" repos: - - git.vdb.to/cerc-io/laconicd - git.vdb.to/cerc-io/test-project@test-branch containers: - cerc/test-container From 82fdbbf0a6a4592a2d7f94ba03a3e159a6b35049 Mon Sep 17 00:00:00 2001 From: Roy Crihfield Date: Sun, 19 Oct 2025 15:06:41 +0800 Subject: [PATCH 3/3] fix test --- tests/deploy/run-deploy-test.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/deploy/run-deploy-test.sh b/tests/deploy/run-deploy-test.sh index c5e87ebf..3add14ce 100755 --- a/tests/deploy/run-deploy-test.sh +++ b/tests/deploy/run-deploy-test.sh @@ -132,7 +132,7 @@ original_marker_content=$(<$test_data_marker) # Also save original spec content to compare original_spec_content=$(<$test_deployment_spec) # Modify spec file to simulate an update -sed -i.bak 's/CERC_TEST_PARAM_3=FAST/CERC_TEST_PARAM_3=FASTER/' $test_deployment_spec +sed -i.bak 's/CERC_TEST_PARAM_3:/CERC_TEST_PARAM_3: FASTER/' $test_deployment_spec # Run sync to update deployment files without destroying data $TEST_TARGET_SO --stack test deploy create --spec-file $test_deployment_spec --deployment-dir $test_deployment_dir --sync # Verify the spec file was updated in deployment dir