44 lines
1.2 KiB
Python
44 lines
1.2 KiB
Python
|
|
import argparse
|
||
|
|
import json
|
||
|
|
import os
|
||
|
|
|
||
|
|
from pyspark.sql import SparkSession
|
||
|
|
from pyspark.sql import functions as F
|
||
|
|
|
||
|
|
|
||
|
|
def main() -> None:
|
||
|
|
p = argparse.ArgumentParser(description="Query assistant feedback rows")
|
||
|
|
p.add_argument("--table", default=os.getenv("FEEDBACK_TABLE", "lake.db1.assistant_feedback"))
|
||
|
|
p.add_argument("--outcome", default="")
|
||
|
|
p.add_argument("--task-type", default="")
|
||
|
|
p.add_argument("--release-name", default="")
|
||
|
|
p.add_argument("--limit", type=int, default=50)
|
||
|
|
args = p.parse_args()
|
||
|
|
|
||
|
|
spark = SparkSession.builder.appName("query-assistant-feedback").getOrCreate()
|
||
|
|
df = spark.table(args.table)
|
||
|
|
|
||
|
|
if args.outcome:
|
||
|
|
df = df.where(F.col("outcome") == args.outcome)
|
||
|
|
if args.task_type:
|
||
|
|
df = df.where(F.col("task_type") == args.task_type)
|
||
|
|
if args.release_name:
|
||
|
|
df = df.where(F.col("release_name") == args.release_name)
|
||
|
|
|
||
|
|
rows = (
|
||
|
|
df.orderBy(F.col("created_at_utc").desc_nulls_last())
|
||
|
|
.limit(max(1, min(args.limit, 500)))
|
||
|
|
.collect()
|
||
|
|
)
|
||
|
|
|
||
|
|
out = []
|
||
|
|
for r in rows:
|
||
|
|
item = r.asDict(recursive=True)
|
||
|
|
out.append(item)
|
||
|
|
|
||
|
|
print(json.dumps(out, ensure_ascii=False))
|
||
|
|
|
||
|
|
|
||
|
|
if __name__ == "__main__":
|
||
|
|
main()
|