#!/usr/bin/env bash set -euo pipefail PROPOSAL_TABLE="${PROPOSAL_TABLE:-lake.db1.assistant_proposals}" PROPOSAL_SET_ID="${1:-}" CREATED_AT_UTC="${2:-}" OBJECTIVE_B64="${3:-}" RELEASE_NAME="${4:-}" SUMMARY_B64="${5:-}" SIGNALS_B64="${6:-}" PROPOSALS_B64="${7:-}" if [[ -z "$PROPOSAL_SET_ID" || -z "$CREATED_AT_UTC" ]]; then echo "Usage: $0 " >&2 exit 1 fi CONTAINER_NAME="${SPARK_CONTAINER_NAME:-spark}" SPARK_PROPS="${SPARK_PROPS:-/opt/lakehouse/spark-conf/lakehouse-spark-defaults.conf}" PACKAGES="${SPARK_PACKAGES:-org.apache.iceberg:iceberg-spark-runtime-3.5_2.12:1.10.1,org.apache.iceberg:iceberg-aws-bundle:1.10.1,org.projectnessie.nessie-integrations:nessie-spark-extensions-3.5_2.12:0.104.5}" SCRIPT_LOCAL="${SCRIPT_LOCAL:-./write_assistant_proposals.py}" SCRIPT_REMOTE="/tmp/write_assistant_proposals.py" if [[ ! -f "$SCRIPT_LOCAL" ]]; then echo "write_assistant_proposals.py not found at: $SCRIPT_LOCAL" >&2 exit 1 fi docker cp "$SCRIPT_LOCAL" "$CONTAINER_NAME":"$SCRIPT_REMOTE" docker exec \ -e AWS_REGION="${AWS_REGION:-us-east-1}" \ -e AWS_DEFAULT_REGION="${AWS_DEFAULT_REGION:-us-east-1}" \ "$CONTAINER_NAME" \ /opt/spark/bin/spark-submit \ --properties-file "$SPARK_PROPS" \ --packages "$PACKAGES" \ "$SCRIPT_REMOTE" \ --table "$PROPOSAL_TABLE" \ --proposal-set-id "$PROPOSAL_SET_ID" \ --created-at-utc "$CREATED_AT_UTC" \ --objective-b64 "$OBJECTIVE_B64" \ --release-name "$RELEASE_NAME" \ --summary-b64 "$SUMMARY_B64" \ --signals-b64 "$SIGNALS_B64" \ --proposals-b64 "$PROPOSALS_B64" echo "[DONE] Recorded proposal set ${PROPOSAL_SET_ID} into ${PROPOSAL_TABLE}"