470 lines
13 KiB
Bash
Executable File
470 lines
13 KiB
Bash
Executable File
#!/usr/bin/env bash
|
|
# Windmill REST API ヘルパースクリプト
|
|
# Usage: ./wm-api.sh <command> [args...]
|
|
|
|
set -euo pipefail
|
|
|
|
# 設定
|
|
WINDMILL_URL="${WINDMILL_URL:-https://windmill.keinafarm.net}"
|
|
WINDMILL_TOKEN="${WINDMILL_TOKEN:-qLJ3VPZ61kTDiIwaUPUu1dXszGrsN1Dh}"
|
|
WINDMILL_WORKSPACE="${WINDMILL_WORKSPACE:-admins}"
|
|
STATE_DIR="${STATE_DIR:-state}"
|
|
REMOTE_INDEX_FILE="${REMOTE_INDEX_FILE:-${STATE_DIR}/remote_index.json}"
|
|
|
|
API_BASE="${WINDMILL_URL}/api/w/${WINDMILL_WORKSPACE}"
|
|
AUTH_HEADER="Authorization: Bearer ${WINDMILL_TOKEN}"
|
|
|
|
require_arg() {
|
|
if [ -z "${2:-}" ]; then
|
|
echo "Usage: $1"
|
|
exit 1
|
|
fi
|
|
}
|
|
|
|
ensure_parent_dir() {
|
|
local out="$1"
|
|
local dir
|
|
dir="$(dirname "$out")"
|
|
if [ "$dir" != "." ]; then
|
|
mkdir -p "$dir"
|
|
fi
|
|
}
|
|
|
|
api_get() {
|
|
curl -sk -H "${AUTH_HEADER}" "${API_BASE}$1" 2>/dev/null
|
|
}
|
|
|
|
api_post() {
|
|
curl -sk -X POST -H "${AUTH_HEADER}" -H "Content-Type: application/json" -d "$2" "${API_BASE}$1" 2>/dev/null
|
|
}
|
|
|
|
api_put() {
|
|
curl -sk -X PUT -H "${AUTH_HEADER}" -H "Content-Type: application/json" -d "$2" "${API_BASE}$1" 2>/dev/null
|
|
}
|
|
|
|
api_delete() {
|
|
curl -sk -X DELETE -H "${AUTH_HEADER}" "${API_BASE}$1" 2>/dev/null
|
|
}
|
|
|
|
json_pretty() {
|
|
python3 -m json.tool
|
|
}
|
|
|
|
save_json_pretty() {
|
|
local out="$1"
|
|
ensure_parent_dir "$out"
|
|
python3 -c 'import json,sys; print(json.dumps(json.load(sys.stdin), ensure_ascii=False, indent=2))' > "$out"
|
|
}
|
|
|
|
python_json() {
|
|
python3 - "$@"
|
|
}
|
|
|
|
path_to_script_file() {
|
|
local path="$1"
|
|
echo "scripts/${path##*/}.ts"
|
|
}
|
|
|
|
path_to_flow_file() {
|
|
local path="$1"
|
|
echo "flows/${path##*/}.flow.json"
|
|
}
|
|
|
|
pull_script() {
|
|
local script_path="$1"
|
|
local outfile="$2"
|
|
|
|
local data
|
|
data="$(api_get "/scripts/get/p/${script_path}")"
|
|
if [ -z "$data" ]; then
|
|
echo "pull-script failed: empty response (${script_path})" >&2
|
|
exit 1
|
|
fi
|
|
|
|
ensure_parent_dir "$outfile"
|
|
python_json "$outfile" "$data" <<'PY'
|
|
import json
|
|
import pathlib
|
|
import sys
|
|
|
|
outfile = pathlib.Path(sys.argv[1])
|
|
obj = json.loads(sys.argv[2])
|
|
content = obj.get("content")
|
|
if content is None:
|
|
raise SystemExit("content not found in script payload")
|
|
outfile.write_text(content, encoding="utf-8", newline="\n")
|
|
PY
|
|
echo "pulled script: ${script_path} -> ${outfile}"
|
|
}
|
|
|
|
push_script() {
|
|
local script_path="$1"
|
|
local infile="$2"
|
|
|
|
if [ ! -f "$infile" ]; then
|
|
echo "push-script failed: file not found (${infile})" >&2
|
|
exit 1
|
|
fi
|
|
|
|
local current payload
|
|
current="$(api_get "/scripts/get/p/${script_path}")"
|
|
if [ -z "$current" ]; then
|
|
echo "push-script failed: empty response (${script_path})" >&2
|
|
exit 1
|
|
fi
|
|
|
|
payload="$(python_json "$script_path" "$infile" "$current" <<'PY'
|
|
import json
|
|
import pathlib
|
|
import sys
|
|
|
|
script_path, infile = sys.argv[1], pathlib.Path(sys.argv[2])
|
|
remote = json.loads(sys.argv[3])
|
|
content = infile.read_text(encoding="utf-8")
|
|
|
|
payload = {
|
|
"path": script_path,
|
|
"parent_hash": remote.get("hash"),
|
|
"summary": remote.get("summary") or "",
|
|
"description": remote.get("description") or "",
|
|
"content": content,
|
|
"schema": remote.get("schema") or {"type": "object", "properties": {}, "required": []},
|
|
"language": remote.get("language") or "bun",
|
|
"kind": remote.get("kind") or "script",
|
|
"lock": remote.get("lock") or "",
|
|
}
|
|
|
|
missing = [k for k in ("path", "parent_hash", "content", "schema", "language", "kind", "lock") if payload.get(k) is None]
|
|
if missing:
|
|
raise SystemExit(f"missing required payload fields: {', '.join(missing)}")
|
|
|
|
print(json.dumps(payload, ensure_ascii=False))
|
|
PY
|
|
)"
|
|
api_post "/scripts/create" "$payload" | json_pretty
|
|
}
|
|
|
|
pull_flow() {
|
|
local flow_path="$1"
|
|
local outfile="$2"
|
|
|
|
local data
|
|
data="$(api_get "/flows/get/${flow_path}")"
|
|
if [ -z "$data" ]; then
|
|
echo "pull-flow failed: empty response (${flow_path})" >&2
|
|
exit 1
|
|
fi
|
|
|
|
ensure_parent_dir "$outfile"
|
|
python_json "$data" <<'PY' | save_json_pretty "$outfile"
|
|
import json
|
|
import sys
|
|
|
|
obj = json.loads(sys.argv[1])
|
|
out = {
|
|
"path": obj.get("path"),
|
|
"summary": obj.get("summary") or "",
|
|
"description": obj.get("description") or "",
|
|
"value": obj.get("value") or {},
|
|
"schema": obj.get("schema") or {"type": "object", "properties": {}, "required": []},
|
|
}
|
|
print(json.dumps(out, ensure_ascii=False))
|
|
PY
|
|
echo "pulled flow: ${flow_path} -> ${outfile}"
|
|
}
|
|
|
|
push_flow() {
|
|
local json_file="$1"
|
|
if [ ! -f "$json_file" ]; then
|
|
echo "push-flow failed: file not found (${json_file})" >&2
|
|
exit 1
|
|
fi
|
|
|
|
local flow_path payload
|
|
flow_path="$(python_json "$json_file" <<'PY'
|
|
import json
|
|
import pathlib
|
|
import sys
|
|
obj = json.loads(pathlib.Path(sys.argv[1]).read_text(encoding="utf-8"))
|
|
path = obj.get("path")
|
|
if not path:
|
|
raise SystemExit("path is required in flow json")
|
|
print(path)
|
|
PY
|
|
)"
|
|
payload="$(cat "$json_file")"
|
|
|
|
echo "push-flow: delete ${flow_path}"
|
|
api_delete "/flows/delete/${flow_path}" >/dev/null || true
|
|
echo "push-flow: create ${flow_path}"
|
|
api_post "/flows/create" "$payload" | json_pretty
|
|
}
|
|
|
|
pull_all() {
|
|
mkdir -p scripts flows "${STATE_DIR}"
|
|
local scripts_json flows_json now_utc
|
|
|
|
scripts_json="$(api_get "/scripts/list?per_page=1000")"
|
|
flows_json="$(api_get "/flows/list?per_page=1000")"
|
|
now_utc="$(date -u +"%Y-%m-%dT%H:%M:%SZ")"
|
|
|
|
python_json "$scripts_json" <<'PY' > "${STATE_DIR}/scripts.list.json"
|
|
import json
|
|
import sys
|
|
print(json.dumps(json.loads(sys.argv[1]), ensure_ascii=False, indent=2))
|
|
PY
|
|
python_json "$flows_json" <<'PY' > "${STATE_DIR}/flows.list.json"
|
|
import json
|
|
import sys
|
|
print(json.dumps(json.loads(sys.argv[1]), ensure_ascii=False, indent=2))
|
|
PY
|
|
|
|
local script_paths flow_paths
|
|
script_paths="$(python_json "$scripts_json" <<'PY'
|
|
import json
|
|
import sys
|
|
items = json.loads(sys.argv[1])
|
|
for it in items:
|
|
path = it.get("path")
|
|
if path:
|
|
print(path)
|
|
PY
|
|
)"
|
|
flow_paths="$(python_json "$flows_json" <<'PY'
|
|
import json
|
|
import sys
|
|
items = json.loads(sys.argv[1])
|
|
for it in items:
|
|
path = it.get("path")
|
|
if path:
|
|
print(path)
|
|
PY
|
|
)"
|
|
|
|
while IFS= read -r p; do
|
|
[ -z "$p" ] && continue
|
|
pull_script "$p" "$(path_to_script_file "$p")"
|
|
done <<< "$script_paths"
|
|
|
|
while IFS= read -r p; do
|
|
[ -z "$p" ] && continue
|
|
pull_flow "$p" "$(path_to_flow_file "$p")"
|
|
done <<< "$flow_paths"
|
|
|
|
build_remote_index "$scripts_json" "$flows_json" "$now_utc" > "${REMOTE_INDEX_FILE}"
|
|
echo "remote index updated: ${REMOTE_INDEX_FILE}"
|
|
}
|
|
|
|
build_remote_index() {
|
|
local scripts_json="$1"
|
|
local flows_json="$2"
|
|
local synced_at="$3"
|
|
python_json "$scripts_json" "$flows_json" "$synced_at" <<'PY'
|
|
import json
|
|
import sys
|
|
|
|
scripts = json.loads(sys.argv[1])
|
|
flows = json.loads(sys.argv[2])
|
|
synced_at = sys.argv[3]
|
|
|
|
index = {
|
|
"synced_at": synced_at,
|
|
"workspace": None,
|
|
"scripts": {},
|
|
"flows": {},
|
|
}
|
|
|
|
for item in scripts:
|
|
path = item.get("path")
|
|
if not path:
|
|
continue
|
|
if index["workspace"] is None:
|
|
index["workspace"] = item.get("workspace_id")
|
|
index["scripts"][path] = {
|
|
"hash": item.get("hash"),
|
|
"updated_at": item.get("edited_at") or item.get("created_at"),
|
|
}
|
|
|
|
for item in flows:
|
|
path = item.get("path")
|
|
if not path:
|
|
continue
|
|
if index["workspace"] is None:
|
|
index["workspace"] = item.get("workspace_id")
|
|
index["flows"][path] = {
|
|
"hash": item.get("hash"),
|
|
"updated_at": item.get("edited_at") or item.get("created_at"),
|
|
}
|
|
|
|
print(json.dumps(index, ensure_ascii=False, indent=2))
|
|
PY
|
|
}
|
|
|
|
status_remote() {
|
|
mkdir -p "${STATE_DIR}"
|
|
local scripts_json flows_json now_utc
|
|
scripts_json="$(api_get "/scripts/list?per_page=1000")"
|
|
flows_json="$(api_get "/flows/list?per_page=1000")"
|
|
now_utc="$(date -u +"%Y-%m-%dT%H:%M:%SZ")"
|
|
|
|
local current_index
|
|
current_index="$(build_remote_index "$scripts_json" "$flows_json" "$now_utc")"
|
|
printf '%s\n' "$current_index" > "${STATE_DIR}/remote_index.current.json"
|
|
|
|
if [ ! -f "${REMOTE_INDEX_FILE}" ]; then
|
|
echo "No baseline index: ${REMOTE_INDEX_FILE}"
|
|
echo "Run ./wm-api.sh pull-all first."
|
|
exit 0
|
|
fi
|
|
|
|
python_json "${REMOTE_INDEX_FILE}" "${STATE_DIR}/remote_index.current.json" <<'PY'
|
|
import json
|
|
import pathlib
|
|
import sys
|
|
|
|
old = json.loads(pathlib.Path(sys.argv[1]).read_text(encoding="utf-8"))
|
|
new = json.loads(pathlib.Path(sys.argv[2]).read_text(encoding="utf-8"))
|
|
|
|
def diff(kind):
|
|
old_map = old.get(kind, {})
|
|
new_map = new.get(kind, {})
|
|
added = sorted(set(new_map) - set(old_map))
|
|
removed = sorted(set(old_map) - set(new_map))
|
|
changed = sorted(
|
|
path for path in (set(new_map) & set(old_map))
|
|
if (new_map[path].get("hash") != old_map[path].get("hash"))
|
|
)
|
|
return added, changed, removed
|
|
|
|
for kind in ("scripts", "flows"):
|
|
added, changed, removed = diff(kind)
|
|
print(f"[{kind}]")
|
|
if not (added or changed or removed):
|
|
print(" no changes")
|
|
continue
|
|
for p in added:
|
|
print(f" + {p}")
|
|
for p in changed:
|
|
print(f" ~ {p}")
|
|
for p in removed:
|
|
print(f" - {p}")
|
|
PY
|
|
}
|
|
|
|
case "${1:-help}" in
|
|
whoami)
|
|
api_get "/users/whoami" | json_pretty
|
|
;;
|
|
scripts|list-scripts)
|
|
api_get "/scripts/list?per_page=${2:-100}" | json_pretty
|
|
;;
|
|
flows|list-flows)
|
|
api_get "/flows/list?per_page=${2:-100}" | json_pretty
|
|
;;
|
|
schedules|list-schedules)
|
|
api_get "/schedules/list?per_page=${2:-100}" | json_pretty
|
|
;;
|
|
get-script)
|
|
require_arg "$0 get-script <path>" "${2:-}"
|
|
api_get "/scripts/get/p/$2" | json_pretty
|
|
;;
|
|
get-flow)
|
|
require_arg "$0 get-flow <path>" "${2:-}"
|
|
api_get "/flows/get/$2" | json_pretty
|
|
;;
|
|
create-script)
|
|
require_arg "$0 create-script <json-file>" "${2:-}"
|
|
api_post "/scripts/create" "$(cat "$2")"
|
|
;;
|
|
create-flow)
|
|
require_arg "$0 create-flow <json-file>" "${2:-}"
|
|
api_post "/flows/create" "$(cat "$2")"
|
|
;;
|
|
update-flow)
|
|
require_arg "$0 update-flow <path> <json-file>" "${2:-}"
|
|
require_arg "$0 update-flow <path> <json-file>" "${3:-}"
|
|
api_put "/flows/update/$2" "$(cat "$3")"
|
|
;;
|
|
create-schedule)
|
|
require_arg "$0 create-schedule <json-file>" "${2:-}"
|
|
api_post "/schedules/create" "$(cat "$2")"
|
|
;;
|
|
run-script)
|
|
require_arg "$0 run-script <path> [json-args]" "${2:-}"
|
|
local_args="${3:-{}}"
|
|
api_post "/jobs/run/p/$2" "${local_args}"
|
|
;;
|
|
run-flow)
|
|
require_arg "$0 run-flow <path> [json-args]" "${2:-}"
|
|
local_args="${3:-{}}"
|
|
api_post "/jobs/run/f/$2" "${local_args}"
|
|
;;
|
|
job-status)
|
|
require_arg "$0 job-status <job-id>" "${2:-}"
|
|
api_get "/jobs_u/get/$2" | json_pretty
|
|
;;
|
|
job-result)
|
|
require_arg "$0 job-result <job-id>" "${2:-}"
|
|
api_get "/jobs_u/completed/get_result/$2" | json_pretty
|
|
;;
|
|
pull-script)
|
|
require_arg "$0 pull-script <path> <outfile>" "${2:-}"
|
|
require_arg "$0 pull-script <path> <outfile>" "${3:-}"
|
|
pull_script "$2" "$3"
|
|
;;
|
|
push-script)
|
|
require_arg "$0 push-script <path> <infile>" "${2:-}"
|
|
require_arg "$0 push-script <path> <infile>" "${3:-}"
|
|
push_script "$2" "$3"
|
|
;;
|
|
pull-flow)
|
|
require_arg "$0 pull-flow <path> <outfile>" "${2:-}"
|
|
require_arg "$0 pull-flow <path> <outfile>" "${3:-}"
|
|
pull_flow "$2" "$3"
|
|
;;
|
|
push-flow)
|
|
require_arg "$0 push-flow <json-file>" "${2:-}"
|
|
push_flow "$2"
|
|
;;
|
|
pull-all)
|
|
pull_all
|
|
;;
|
|
status-remote)
|
|
status_remote
|
|
;;
|
|
version)
|
|
curl -sk "${WINDMILL_URL}/api/version" 2>/dev/null
|
|
echo ""
|
|
;;
|
|
help|*)
|
|
cat <<'EOF'
|
|
Windmill REST API ヘルパー
|
|
|
|
使い方: ./wm-api.sh <command> [args...]
|
|
|
|
コマンド:
|
|
whoami - 現在のユーザー情報を表示
|
|
version - サーバーバージョンを表示
|
|
scripts|list-scripts [n] - スクリプト一覧を表示
|
|
flows|list-flows [n] - フロー一覧を表示
|
|
schedules|list-schedules [n] - スケジュール一覧を表示
|
|
get-script <path> - スクリプトの詳細を取得
|
|
get-flow <path> - フローの詳細を取得
|
|
create-script <file> - JSONファイルからスクリプトを作成
|
|
create-flow <file> - JSONファイルからフローを作成
|
|
update-flow <path> <file> - フローを更新
|
|
create-schedule <file> - JSONファイルからスケジュールを作成
|
|
run-script <path> [args] - スクリプトを実行
|
|
run-flow <path> [args] - フローを実行
|
|
job-status <id> - ジョブのステータスを確認
|
|
job-result <id> - ジョブの結果を取得
|
|
pull-script <path> <outfile> - スクリプトをローカルへ保存
|
|
push-script <path> <infile> - ローカルファイルをスクリプトへ反映
|
|
pull-flow <path> <outfile> - フローをローカルJSONへ保存
|
|
push-flow <json-file> - フローJSONを削除再作成で反映
|
|
pull-all - scripts/flowsを一括pullしてstate更新
|
|
status-remote - remote_index基準で差分表示
|
|
EOF
|
|
;;
|
|
esac
|