#!/usr/bin/env bash set -euo pipefail ACTION_TABLE="${ACTION_TABLE:-lake.db1.assistant_actions}" ACTION_ID="${1:-}" CREATED_AT_UTC="${2:-}" TASK_TYPE="${3:-}" RELEASE_NAME="${4:-}" OBJECTIVE_B64="${5:-}" STEP_ID="${6:-}" STEP_TITLE_B64="${7:-}" ACTION_TYPE="${8:-}" REQUIRES_APPROVAL="${9:-false}" APPROVED="${10:-false}" STATUS="${11:-}" OUTPUT_B64="${12:-}" ERROR_B64="${13:-}" if [[ -z "$ACTION_ID" || -z "$CREATED_AT_UTC" || -z "$TASK_TYPE" || -z "$STEP_ID" || -z "$ACTION_TYPE" || -z "$STATUS" ]]; then echo "Usage: $0 " >&2 exit 1 fi CONTAINER_NAME="${SPARK_CONTAINER_NAME:-spark}" SPARK_PROPS="${SPARK_PROPS:-/opt/lakehouse/spark-conf/lakehouse-spark-defaults.conf}" PACKAGES="${SPARK_PACKAGES:-org.apache.iceberg:iceberg-spark-runtime-3.5_2.12:1.10.1,org.apache.iceberg:iceberg-aws-bundle:1.10.1,org.projectnessie.nessie-integrations:nessie-spark-extensions-3.5_2.12:0.104.5}" SCRIPT_LOCAL="${SCRIPT_LOCAL:-./write_assistant_action.py}" SCRIPT_REMOTE="/tmp/write_assistant_action.py" if [[ ! -f "$SCRIPT_LOCAL" ]]; then echo "write_assistant_action.py not found at: $SCRIPT_LOCAL" >&2 exit 1 fi docker cp "$SCRIPT_LOCAL" "$CONTAINER_NAME":"$SCRIPT_REMOTE" docker exec \ -e AWS_REGION="${AWS_REGION:-us-east-1}" \ -e AWS_DEFAULT_REGION="${AWS_DEFAULT_REGION:-us-east-1}" \ "$CONTAINER_NAME" \ /opt/spark/bin/spark-submit \ --properties-file "$SPARK_PROPS" \ --packages "$PACKAGES" \ "$SCRIPT_REMOTE" \ --table "$ACTION_TABLE" \ --action-id "$ACTION_ID" \ --created-at-utc "$CREATED_AT_UTC" \ --task-type "$TASK_TYPE" \ --release-name "$RELEASE_NAME" \ --objective-b64 "$OBJECTIVE_B64" \ --step-id "$STEP_ID" \ --step-title-b64 "$STEP_TITLE_B64" \ --action-type "$ACTION_TYPE" \ --requires-approval "$REQUIRES_APPROVAL" \ --approved "$APPROVED" \ --status "$STATUS" \ --output-b64 "$OUTPUT_B64" \ --error-b64 "$ERROR_B64" echo "[DONE] Recorded assistant action ${ACTION_ID} into ${ACTION_TABLE}"