diff --git a/stack_orchestrator/deploy/deployment_create.py b/stack_orchestrator/deploy/deployment_create.py index 5daffad8..59188b95 100644 --- a/stack_orchestrator/deploy/deployment_create.py +++ b/stack_orchestrator/deploy/deployment_create.py @@ -443,16 +443,17 @@ def _check_volume_definitions(spec): @click.command() @click.option("--spec-file", required=True, help="Spec file to use to create this deployment") @click.option("--deployment-dir", help="Create deployment files in this directory") +@click.option("--sync", is_flag=True, default=False, help="Update existing deployment directory without overwriting data volumes") @click.argument('extra_args', nargs=-1, type=click.UNPROCESSED) @click.pass_context -def create(ctx, spec_file, deployment_dir, extra_args): +def create(ctx, spec_file, deployment_dir, sync, extra_args): deployment_command_context = ctx.obj - return create_operation(deployment_command_context, spec_file, deployment_dir, extra_args) + return create_operation(deployment_command_context, spec_file, deployment_dir, sync, extra_args) # The init command's implementation is in a separate function so that we can # call it from other commands, bypassing the click decoration stuff -def create_operation(deployment_command_context, spec_file, deployment_dir, extra_args): +def create_operation(deployment_command_context, spec_file, deployment_dir, sync, extra_args): parsed_spec = Spec(os.path.abspath(spec_file), get_parsed_deployment_spec(spec_file)) _check_volume_definitions(parsed_spec) stack_name = parsed_spec["stack"] @@ -466,12 +467,21 @@ def create_operation(deployment_command_context, spec_file, deployment_dir, extr else: deployment_dir_path = Path(deployment_dir) if deployment_dir_path.exists(): - error_exit(f"{deployment_dir_path} already exists") - os.mkdir(deployment_dir_path) + if not sync: + error_exit(f"{deployment_dir_path} already exists") + if opts.o.debug: + print(f"Syncing existing deployment at {deployment_dir_path}") + else: + if sync: + error_exit(f"--sync requires that {deployment_dir_path} already exists") + os.mkdir(deployment_dir_path) # Copy spec file and the stack file into the deployment dir copyfile(spec_file, deployment_dir_path.joinpath(constants.spec_file_name)) copyfile(stack_file, deployment_dir_path.joinpath(constants.stack_file_name)) - _create_deployment_file(deployment_dir_path) + # Only create deployment file if it doesn't exist (preserve cluster ID on sync) + deployment_file_path = deployment_dir_path.joinpath(constants.deployment_file_name) + if not deployment_file_path.exists(): + _create_deployment_file(deployment_dir_path) # Copy any config varibles from the spec file into an env file suitable for compose _write_config_file(spec_file, deployment_dir_path.joinpath(constants.config_file_name)) # Copy any k8s config file into the deployment dir @@ -481,16 +491,16 @@ def create_operation(deployment_command_context, spec_file, deployment_dir, extr # Copy the pod files into the deployment dir, fixing up content pods = get_pod_list(parsed_stack) destination_compose_dir = deployment_dir_path.joinpath("compose") - os.mkdir(destination_compose_dir) + os.makedirs(destination_compose_dir, exist_ok=True) destination_pods_dir = deployment_dir_path.joinpath("pods") - os.mkdir(destination_pods_dir) + os.makedirs(destination_pods_dir, exist_ok=True) yaml = get_yaml() for pod in pods: pod_file_path = get_pod_file_path(stack_name, parsed_stack, pod) parsed_pod_file = yaml.load(open(pod_file_path, "r")) extra_config_dirs = _find_extra_config_dirs(parsed_pod_file, pod) destination_pod_dir = destination_pods_dir.joinpath(pod) - os.mkdir(destination_pod_dir) + os.makedirs(destination_pod_dir, exist_ok=True) if opts.o.debug: print(f"extra config dirs: {extra_config_dirs}") _fixup_pod_file(parsed_pod_file, parsed_spec, destination_compose_dir) @@ -509,7 +519,7 @@ def create_operation(deployment_command_context, spec_file, deployment_dir, extr # Copy the script files for the pod, if any if pod_has_scripts(parsed_stack, pod): destination_script_dir = destination_pod_dir.joinpath("scripts") - os.mkdir(destination_script_dir) + os.makedirs(destination_script_dir, exist_ok=True) script_paths = get_pod_script_paths(parsed_stack, pod) _copy_files_to_directory(script_paths, destination_script_dir) if parsed_spec.is_kubernetes_deployment(): diff --git a/stack_orchestrator/deploy/webapp/deploy_webapp.py b/stack_orchestrator/deploy/webapp/deploy_webapp.py index 0353ad16..cdd07481 100644 --- a/stack_orchestrator/deploy/webapp/deploy_webapp.py +++ b/stack_orchestrator/deploy/webapp/deploy_webapp.py @@ -91,6 +91,7 @@ def create_deployment(ctx, deployment_dir, image, url, kube_config, image_regist deploy_command_context, spec_file_name, deployment_dir, + False, None, ) # Fix up the container tag inside the deployment compose file diff --git a/tests/database/run-test.sh b/tests/database/run-test.sh index 405f6d34..2b68cb2c 100755 --- a/tests/database/run-test.sh +++ b/tests/database/run-test.sh @@ -86,7 +86,7 @@ fi echo "deploy init test: passed" # Switch to a full path for the data dir so it gets provisioned as a host bind mounted volume and preserved beyond cluster lifetime -sed -i "s|^\(\s*db-data:$\)$|\1 ${test_deployment_dir}/data/db-data|" $test_deployment_spec +sed -i.bak "s|^\(\s*db-data:$\)$|\1 ${test_deployment_dir}/data/db-data|" $test_deployment_spec $TEST_TARGET_SO --stack ${stack} deploy create --spec-file $test_deployment_spec --deployment-dir $test_deployment_dir # Check the deployment dir exists diff --git a/tests/deploy/run-deploy-test.sh b/tests/deploy/run-deploy-test.sh index c18c5cb0..5cafd689 100755 --- a/tests/deploy/run-deploy-test.sh +++ b/tests/deploy/run-deploy-test.sh @@ -85,6 +85,7 @@ else exit 1 fi $TEST_TARGET_SO --stack test deploy down --delete-volumes + # Basic test of creating a deployment test_deployment_dir=$CERC_REPO_BASE_DIR/test-deployment-dir test_deployment_spec=$CERC_REPO_BASE_DIR/test-deployment-spec.yml @@ -122,6 +123,41 @@ fi echo "dbfc7a4d-44a7-416d-b5f3-29842cc47650" > $test_deployment_dir/data/test-config/test_config echo "deploy create output file test: passed" + +# Test sync functionality: update deployment without destroying data +# First, create a marker file in the data directory to verify it's preserved +test_data_marker="$test_deployment_dir/data/test-data-bind/sync-test-marker.txt" +echo "original-data-$(date +%s)" > "$test_data_marker" +original_marker_content=$(<$test_data_marker) +# Also save original spec content to compare +original_spec_content=$(<$test_deployment_spec) + +# Modify spec file to simulate an update +sed -i.bak 's/CERC_TEST_PARAM_3:/CERC_TEST_PARAM_3: FASTER/' $test_deployment_spec +# Run sync to update deployment files without destroying data +$TEST_TARGET_SO --stack test deploy create --spec-file $test_deployment_spec --deployment-dir $test_deployment_dir --sync +# Verify the spec file was updated in deployment dir +updated_deployed_spec=$(<$test_deployment_dir/spec.yml) +if [[ "$updated_deployed_spec" == *"FASTER"* ]]; then + echo "deploy sync test: spec file updated" +else + echo "deploy sync test: spec file not updated - FAILED" + exit 1 +fi +# Verify the data marker file still exists with original content +if [ ! -f "$test_data_marker" ]; then + echo "deploy sync test: data file deleted - FAILED" + exit 1 +fi +synced_marker_content=$(<$test_data_marker) +if [ "$synced_marker_content" == "$original_marker_content" ]; then + echo "deploy sync test: data preserved - passed" +else + echo "deploy sync test: data corrupted - FAILED" + exit 1 +fi +echo "deploy sync test: passed" + # Try to start the deployment $TEST_TARGET_SO deployment --dir $test_deployment_dir start # Check logs command works diff --git a/tests/external-stack/run-test.sh b/tests/external-stack/run-test.sh index 084f3b9d..de89f645 100755 --- a/tests/external-stack/run-test.sh +++ b/tests/external-stack/run-test.sh @@ -125,6 +125,49 @@ fi echo "dbfc7a4d-44a7-416d-b5f3-29842cc47650" > $test_deployment_dir/data/test-config/test_config echo "deploy create output file test: passed" + +# Test sync functionality: update deployment without destroying data +# First, create a marker file in the data directory to verify it's preserved +test_data_marker="$test_deployment_dir/data/test-data/sync-test-marker.txt" +mkdir -p "$test_deployment_dir/data/test-data" +echo "external-stack-data-$(date +%s)" > "$test_data_marker" +original_marker_content=$(<$test_data_marker) +# Verify deployment file exists and preserve its cluster ID +original_cluster_id=$(grep "cluster-id:" "$test_deployment_dir/deployment.yml" 2>/dev/null || echo "") +# Modify spec file to simulate an update +sed -i.bak 's/CERC_TEST_PARAM_1=PASSED/CERC_TEST_PARAM_1=UPDATED/' $test_deployment_spec +# Run sync to update deployment files without destroying data +$TEST_TARGET_SO_STACK deploy create --spec-file $test_deployment_spec --deployment-dir $test_deployment_dir --sync +# Verify the spec file was updated in deployment dir +updated_deployed_spec=$(<$test_deployment_dir/spec.yml) +if [[ "$updated_deployed_spec" == *"UPDATED"* ]]; then + echo "deploy sync test: spec file updated" +else + echo "deploy sync test: spec file not updated - FAILED" + exit 1 +fi +# Verify the data marker file still exists with original content +if [ ! -f "$test_data_marker" ]; then + echo "deploy sync test: data file deleted - FAILED" + exit 1 +fi +synced_marker_content=$(<$test_data_marker) +if [ "$synced_marker_content" == "$original_marker_content" ]; then + echo "deploy sync test: data preserved - passed" +else + echo "deploy sync test: data corrupted - FAILED" + exit 1 +fi +# Verify cluster ID was preserved (not regenerated) +new_cluster_id=$(grep "cluster-id:" "$test_deployment_dir/deployment.yml" 2>/dev/null || echo "") +if [ -n "$original_cluster_id" ] && [ "$original_cluster_id" == "$new_cluster_id" ]; then + echo "deploy sync test: cluster ID preserved - passed" +else + echo "deploy sync test: cluster ID not preserved - FAILED" + exit 1 +fi +echo "deploy sync test: passed" + # Try to start the deployment $TEST_TARGET_SO deployment --dir $test_deployment_dir start # Check logs command works