サーバーからフローを取得
This commit is contained in:
24
flows/git_sync.flow.json
Normal file
24
flows/git_sync.flow.json
Normal file
@@ -0,0 +1,24 @@
|
||||
{
|
||||
"path": "u/antigravity/git_sync",
|
||||
"summary": "Git Sync Workflow",
|
||||
"description": "Automatically sync Windmill workflows to Git repository (sync branch)",
|
||||
"value": {
|
||||
"modules": [
|
||||
{
|
||||
"id": "a",
|
||||
"value": {
|
||||
"lock": "",
|
||||
"type": "rawscript",
|
||||
"content": "#!/bin/bash\nset -e\nexport PATH=/usr/bin:/usr/local/bin:/usr/sbin:/sbin:/bin:$PATH\n\nGREEN=\"\\033[0;32m\"\nYELLOW=\"\\033[1;33m\"\nRED=\"\\033[0;31m\"\nNC=\"\\033[0m\"\n\necho -e \"${GREEN}=== Windmill Workflow Git Sync ===${NC}\"\n\nREPO_ROOT=\"/workspace\"\nWMILL_DIR=\"${REPO_ROOT}/workflows\"\n\nif ! command -v wmill &> /dev/null; then\n echo -e \"${YELLOW}Installing windmill-cli...${NC}\"\n npm install -g windmill-cli\n export PATH=$(npm prefix -g)/bin:$PATH\nfi\n\ngit config --global --add safe.directory \"$REPO_ROOT\"\ngit config --global user.email \"bot@keinafarm.net\"\ngit config --global user.name \"Windmill Bot\"\n\n# sync ブランチを使用\nCURRENT_BRANCH=$(git -C \"$REPO_ROOT\" rev-parse --abbrev-ref HEAD)\nif [ \"$CURRENT_BRANCH\" != \"sync\" ]; then\n echo -e \"${YELLOW}Switching to sync branch...${NC}\"\n git -C \"$REPO_ROOT\" fetch origin sync\n git -C \"$REPO_ROOT\" checkout sync\nfi\n\necho -e \"${YELLOW}Pulling from origin/sync...${NC}\"\ngit -C \"$REPO_ROOT\" pull --rebase origin sync || {\n echo -e \"${RED}Failed to pull from remote. Continuing...${NC}\"\n}\n\necho -e \"${YELLOW}Pulling from Windmill...${NC}\"\ncd \"$WMILL_DIR\"\nwmill sync pull --config-dir /workspace/wmill_config --skip-variables --skip-secrets --skip-resources --yes || exit 1\n\ncd \"$REPO_ROOT\"\nif [[ -n $(git status --porcelain) ]]; then\n echo -e \"${YELLOW}Changes detected, committing to Git...${NC}\"\n git add -A\n TIMESTAMP=$(date \"+%Y-%m-%d %H:%M:%S\")\n git commit -m \"Auto-sync: ${TIMESTAMP}\"\n echo -e \"${YELLOW}Pushing to Gitea (sync branch)...${NC}\"\n git push origin sync || {\n echo -e \"${RED}Failed to push.${NC}\"\n exit 1\n }\n echo -e \"${GREEN}Changes pushed to Gitea (sync branch)${NC}\"\nelse\n echo -e \"${GREEN}No changes detected${NC}\"\nfi\n\necho -e \"${GREEN}=== Sync Complete ===${NC}\"\n",
|
||||
"language": "bash",
|
||||
"input_transforms": {}
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
"schema": {
|
||||
"type": "object",
|
||||
"properties": {},
|
||||
"required": []
|
||||
}
|
||||
}
|
||||
38
flows/hourly_chime.flow.json
Normal file
38
flows/hourly_chime.flow.json
Normal file
@@ -0,0 +1,38 @@
|
||||
{
|
||||
"path": "u/akiracraftwork/hourly_chime",
|
||||
"summary": "鳩時計機能",
|
||||
"description": "",
|
||||
"value": {
|
||||
"modules": [
|
||||
{
|
||||
"id": "a",
|
||||
"value": {
|
||||
"lock": "{\n \"dependencies\": {}\n}\n//bun.lock\n<empty>",
|
||||
"type": "rawscript",
|
||||
"content": "export async function main(\n device: string = \"オフィスの右エコー\",\n prefix: string = \"現在時刻は\",\n suffix: string = \"です\"\n) {\n const now = new Date();\n const hhmm = new Intl.DateTimeFormat(\"ja-JP\", {\n timeZone: \"Asia/Tokyo\",\n hour: \"2-digit\",\n minute: \"2-digit\",\n hour12: false,\n }).format(now); // 例: 09:30\n\n const [h, m] = hhmm.split(\":\");\n const text = `${prefix}${Number(h)}時${Number(m)}分${suffix}`;\n\n const res = await fetch(\"http://alexa_api:3500/speak\", {\n method: \"POST\",\n headers: { \"Content-Type\": \"application/json\" },\n body: JSON.stringify({ device, text }),\n });\n\n if (!res.ok) {\n const body = await res.text();\n throw new Error(`alexa-api error ${res.status}: ${body}`);\n }\n\n return { ok: true, device, text };\n}\n",
|
||||
"language": "bun",
|
||||
"input_transforms": {
|
||||
"device": {
|
||||
"type": "static",
|
||||
"value": "オフィスの右エコー"
|
||||
},
|
||||
"prefix": {
|
||||
"type": "static",
|
||||
"value": "現在時刻は"
|
||||
},
|
||||
"suffix": {
|
||||
"type": "static",
|
||||
"value": "です"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
"schema": {
|
||||
"$schema": "https://json-schema.org/draft/2020-12/schema",
|
||||
"properties": {},
|
||||
"required": [],
|
||||
"type": "object"
|
||||
}
|
||||
}
|
||||
24
flows/konnnichiha.flow.json
Normal file
24
flows/konnnichiha.flow.json
Normal file
@@ -0,0 +1,24 @@
|
||||
{
|
||||
"path": "f/dev/konnnichiha",
|
||||
"summary": "Print greeting",
|
||||
"description": "",
|
||||
"value": {
|
||||
"modules": [
|
||||
{
|
||||
"id": "a",
|
||||
"value": {
|
||||
"lock": "# py: 3.12\n",
|
||||
"type": "rawscript",
|
||||
"content": "def main():\n print('こんにちは、世界')",
|
||||
"language": "python3",
|
||||
"input_transforms": {}
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
"schema": {
|
||||
"type": "object",
|
||||
"properties": {},
|
||||
"required": []
|
||||
}
|
||||
}
|
||||
File diff suppressed because one or more lines are too long
@@ -6,14 +6,14 @@
|
||||
"modules": [
|
||||
{
|
||||
"id": "a",
|
||||
"summary": "変更確認・LINE通知",
|
||||
"value": {
|
||||
"lock": "# py: 3.12\nanyio==4.12.1\ncertifi==2026.1.4\nh11==0.16.0\nhttpcore==1.0.9\nhttpx==0.28.1\nidna==3.11\ntyping-extensions==4.15.0\nwmill==1.640.0",
|
||||
"type": "rawscript",
|
||||
"language": "python3",
|
||||
"content": "import urllib.request\nimport urllib.parse\nimport json\nimport ssl\nfrom datetime import datetime, timezone, timedelta\nimport wmill\n\nJST = timezone(timedelta(hours=9))\n\n\ndef main():\n # シークレット取得\n api_key = wmill.get_variable(\"u/admin/NOTIFICATION_API_KEY\")\n line_token = wmill.get_variable(\"u/admin/LINE_CHANNEL_ACCESS_TOKEN\")\n line_to = wmill.get_variable(\"u/admin/LINE_TO\")\n\n # 前回実行時刻を取得(初回は現在時刻 - 10分)\n try:\n last_checked = wmill.get_variable(\"u/admin/SHIRAOU_LAST_CHECKED_AT\")\n if not last_checked:\n last_checked = None\n except Exception:\n last_checked = None\n\n if last_checked:\n since = last_checked\n else:\n since = (datetime.now(JST) - timedelta(minutes=10)).isoformat()\n\n print(f\"[通知] 変更確認: since={since}\")\n\n # API呼び出し\n ssl_ctx = ssl.create_default_context()\n ssl_ctx.check_hostname = False\n ssl_ctx.verify_mode = ssl.CERT_NONE\n\n params = urllib.parse.urlencode({\"since\": since})\n url = f\"https://shiraou.keinafarm.net/reservations/api/changes/?{params}\"\n\n req = urllib.request.Request(url, headers={\"X-API-Key\": api_key})\n with urllib.request.urlopen(req, context=ssl_ctx, timeout=30) as resp:\n data = json.loads(resp.read().decode(\"utf-8\"))\n\n checked_at = data[\"checked_at\"]\n reservations = data.get(\"reservations\", [])\n usages = data.get(\"usages\", [])\n\n print(f\"[通知] checked_at={checked_at}, 予約={len(reservations)}件, 実績={len(usages)}件\")\n\n # 変更があればLINE通知(エラー時は状態を更新しない)\n if reservations or usages:\n message = _format_message(reservations, usages)\n _send_line(line_token, line_to, message)\n print(\"[通知] LINE送信完了\")\n else:\n print(\"[通知] 変更なし、通知スキップ\")\n\n # 正常完了時のみ状態更新\n wmill.set_variable(\"u/admin/SHIRAOU_LAST_CHECKED_AT\", checked_at)\n print(f\"[通知] last_checked_at更新: {checked_at}\")\n\n return {\n \"since\": since,\n \"checked_at\": checked_at,\n \"reservations_count\": len(reservations),\n \"usages_count\": len(usages),\n \"notified\": bool(reservations or usages),\n }\n\n\ndef _format_message(reservations, usages):\n lines = [\"\\U0001f4cb 営農システム 変更通知\\n\"]\n\n OP_R = {\n \"create\": (\"\\U0001f7e2\", \"予約作成\"),\n \"update\": (\"\\U0001f535\", \"予約変更\"),\n \"cancel\": (\"\\U0001f534\", \"予約キャンセル\"),\n }\n OP_U = {\n \"create\": (\"\\U0001f7e2\", \"実績登録\"),\n \"update\": (\"\\U0001f535\", \"実績修正\"),\n \"delete\": (\"\\U0001f534\", \"実績削除\"),\n }\n\n for r in reservations:\n start = r[\"start_at\"][:16].replace(\"T\", \" \")\n end = r[\"end_at\"][:16].replace(\"T\", \" \")\n icon, label = OP_R.get(r[\"operation\"], (\"\\u26aa\", r[\"operation\"]))\n lines += [\n f\"{icon} {label}\",\n f\" 機械: {r['machine_name']}\",\n f\" 利用者: {r['user_name']}\",\n f\" 日時: {start} \\uff5e {end}\",\n ]\n if r.get(\"reason\"):\n lines.append(f\" 理由: {r['reason']}\")\n lines.append(\"\")\n\n for u in usages:\n start = u[\"start_at\"][:16].replace(\"T\", \" \")\n icon, label = OP_U.get(u[\"operation\"], (\"\\u26aa\", u[\"operation\"]))\n lines += [\n f\"{icon} {label}\",\n f\" 機械: {u['machine_name']}\",\n f\" 利用者: {u['user_name']}\",\n f\" 利用量: {u['amount']}{u['unit']}\",\n f\" 日: {start[:10]}\",\n ]\n if u.get(\"reason\"):\n lines.append(f\" 理由: {u['reason']}\")\n lines.append(\"\")\n\n return \"\\n\".join(lines).strip()\n\n\ndef _send_line(token, to, message):\n payload = json.dumps({\n \"to\": to,\n \"messages\": [{\"type\": \"text\", \"text\": message}],\n }).encode(\"utf-8\")\n\n req = urllib.request.Request(\n \"https://api.line.me/v2/bot/message/push\",\n data=payload,\n headers={\n \"Authorization\": f\"Bearer {token}\",\n \"Content-Type\": \"application/json\",\n },\n method=\"POST\",\n )\n with urllib.request.urlopen(req, timeout=30) as resp:\n return resp.read().decode(\"utf-8\")\n",
|
||||
"input_transforms": {},
|
||||
"lock": ""
|
||||
}
|
||||
"language": "python3",
|
||||
"input_transforms": {}
|
||||
},
|
||||
"summary": "変更確認・LINE通知"
|
||||
}
|
||||
]
|
||||
},
|
||||
|
||||
@@ -6,70 +6,70 @@
|
||||
"modules": [
|
||||
{
|
||||
"id": "a",
|
||||
"summary": "Step1: 診断データ生成",
|
||||
"value": {
|
||||
"lock": "# py: 3.12\n",
|
||||
"type": "rawscript",
|
||||
"language": "python3",
|
||||
"content": "import uuid\nfrom datetime import datetime, timezone\n\ndef main():\n \"\"\"診断データを生成する\"\"\"\n now = datetime.now(timezone.utc)\n run_id = str(uuid.uuid4())\n check_value = 2 + 2\n \n result = {\n \"timestamp\": now.isoformat(),\n \"run_id\": run_id,\n \"check\": check_value,\n \"python_version\": __import__('sys').version\n }\n print(f\"[Step1] 診断データ生成完了\")\n print(f\" run_id: {run_id}\")\n print(f\" timestamp: {now.isoformat()}\")\n print(f\" check: {check_value}\")\n return result\n",
|
||||
"input_transforms": {},
|
||||
"lock": ""
|
||||
}
|
||||
"language": "python3",
|
||||
"input_transforms": {}
|
||||
},
|
||||
"summary": "Step1: 診断データ生成"
|
||||
},
|
||||
{
|
||||
"id": "b",
|
||||
"summary": "Step2: データ検証",
|
||||
"value": {
|
||||
"lock": "# py: 3.12\n",
|
||||
"type": "rawscript",
|
||||
"language": "python3",
|
||||
"content": "from datetime import datetime, timezone\n\ndef main(step1_result: dict):\n \"\"\"Step1の結果を検証する\"\"\"\n errors = []\n \n # 計算チェック\n if step1_result.get(\"check\") != 4:\n errors.append(f\"計算エラー: expected 4, got {step1_result.get('check')}\")\n \n # run_idの存在チェック\n if not step1_result.get(\"run_id\"):\n errors.append(\"run_idが存在しない\")\n \n # timestampの存在チェック\n if not step1_result.get(\"timestamp\"):\n errors.append(\"timestampが存在しない\")\n \n if errors:\n error_msg = \"; \".join(errors)\n print(f\"[Step2] 検証失敗: {error_msg}\")\n raise Exception(f\"検証失敗: {error_msg}\")\n \n print(f\"[Step2] データ検証OK\")\n print(f\" 計算チェック: 2+2={step1_result['check']} ✓\")\n print(f\" run_id: {step1_result['run_id']} ✓\")\n print(f\" timestamp: {step1_result['timestamp']} ✓\")\n \n return {\n \"verification\": \"PASS\",\n \"step1_data\": step1_result\n }\n",
|
||||
"language": "python3",
|
||||
"input_transforms": {
|
||||
"step1_result": {
|
||||
"type": "javascript",
|
||||
"expr": "results.a"
|
||||
"expr": "results.a",
|
||||
"type": "javascript"
|
||||
}
|
||||
}
|
||||
},
|
||||
"lock": ""
|
||||
}
|
||||
"summary": "Step2: データ検証"
|
||||
},
|
||||
{
|
||||
"id": "c",
|
||||
"summary": "Step3: HTTPヘルスチェック",
|
||||
"value": {
|
||||
"lock": "# py: 3.12\n",
|
||||
"type": "rawscript",
|
||||
"language": "python3",
|
||||
"content": "import urllib.request\nimport ssl\n\ndef main(verification_result: dict):\n \"\"\"Windmillサーバー自身へのHTTPチェック\"\"\"\n url = \"https://windmill.keinafarm.net/api/version\"\n \n # SSL検証をスキップ(自己署名証明書対応)\n ctx = ssl.create_default_context()\n ctx.check_hostname = False\n ctx.verify_mode = ssl.CERT_NONE\n \n try:\n req = urllib.request.Request(url)\n with urllib.request.urlopen(req, context=ctx, timeout=10) as response:\n status_code = response.status\n body = response.read().decode('utf-8')\n except Exception as e:\n print(f\"[Step3] HTTPチェック失敗: {e}\")\n raise Exception(f\"HTTPヘルスチェック失敗: {e}\")\n \n print(f\"[Step3] HTTPヘルスチェックOK\")\n print(f\" URL: {url}\")\n print(f\" Status: {status_code}\")\n print(f\" Version: {body}\")\n \n return {\n \"http_check\": \"PASS\",\n \"status_code\": status_code,\n \"server_version\": body\n }\n",
|
||||
"language": "python3",
|
||||
"input_transforms": {
|
||||
"verification_result": {
|
||||
"type": "javascript",
|
||||
"expr": "results.b"
|
||||
"expr": "results.b",
|
||||
"type": "javascript"
|
||||
}
|
||||
}
|
||||
},
|
||||
"lock": ""
|
||||
}
|
||||
"summary": "Step3: HTTPヘルスチェック"
|
||||
},
|
||||
{
|
||||
"id": "d",
|
||||
"summary": "Step4: 年度判定 & 最終レポート",
|
||||
"value": {
|
||||
"lock": "# py: 3.12\n",
|
||||
"type": "rawscript",
|
||||
"language": "python3",
|
||||
"content": "from datetime import datetime, timezone\n\ndef main(step1_data: dict, verification: dict, http_check: dict):\n \"\"\"年度判定と最終診断レポートを生成\"\"\"\n now = datetime.now(timezone.utc)\n \n # 日本の年度判定(4月始まり)\n fiscal_year = now.year if now.month >= 4 else now.year - 1\n \n report = {\n \"status\": \"ALL OK\",\n \"fiscal_year\": fiscal_year,\n \"diagnostics\": {\n \"data_generation\": \"PASS\",\n \"data_verification\": verification.get(\"verification\", \"UNKNOWN\"),\n \"http_health\": http_check.get(\"http_check\", \"UNKNOWN\"),\n \"server_version\": http_check.get(\"server_version\", \"UNKNOWN\")\n },\n \"run_id\": step1_data.get(\"run_id\"),\n \"started_at\": step1_data.get(\"timestamp\"),\n \"completed_at\": now.isoformat()\n }\n \n print(\"\")\n print(\"========================================\")\n print(\" Windmill Heartbeat - 診断レポート\")\n print(\"========================================\")\n print(f\" Status: {report['status']}\")\n print(f\" 年度: {fiscal_year}年度\")\n print(f\" Run ID: {report['run_id']}\")\n print(f\" Server: {report['diagnostics']['server_version']}\")\n print(f\" 開始: {report['started_at']}\")\n print(f\" 完了: {report['completed_at']}\")\n print(\" ────────────────────────────────────\")\n print(f\" データ生成: PASS ✓\")\n print(f\" データ検証: {report['diagnostics']['data_verification']} ✓\")\n print(f\" HTTP確認: {report['diagnostics']['http_health']} ✓\")\n print(\"========================================\")\n print(\"\")\n \n return report\n",
|
||||
"language": "python3",
|
||||
"input_transforms": {
|
||||
"http_check": {
|
||||
"expr": "results.c",
|
||||
"type": "javascript"
|
||||
},
|
||||
"step1_data": {
|
||||
"type": "javascript",
|
||||
"expr": "results.a"
|
||||
"expr": "results.a",
|
||||
"type": "javascript"
|
||||
},
|
||||
"verification": {
|
||||
"type": "javascript",
|
||||
"expr": "results.b"
|
||||
},
|
||||
"http_check": {
|
||||
"type": "javascript",
|
||||
"expr": "results.c"
|
||||
"expr": "results.b",
|
||||
"type": "javascript"
|
||||
}
|
||||
}
|
||||
},
|
||||
"lock": ""
|
||||
}
|
||||
"summary": "Step4: 年度判定 & 最終レポート"
|
||||
}
|
||||
]
|
||||
},
|
||||
|
||||
24
flows/textout.flow.json
Normal file
24
flows/textout.flow.json
Normal file
@@ -0,0 +1,24 @@
|
||||
{
|
||||
"path": "f/dev/textout",
|
||||
"summary": "Display current time on startup",
|
||||
"description": "",
|
||||
"value": {
|
||||
"modules": [
|
||||
{
|
||||
"id": "a",
|
||||
"value": {
|
||||
"lock": "# py: 3.12\n",
|
||||
"type": "rawscript",
|
||||
"content": "def main():\n from datetime import datetime\n print(datetime.now().strftime('%H:%M:%S'))",
|
||||
"language": "python3",
|
||||
"input_transforms": {}
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
"schema": {
|
||||
"type": "object",
|
||||
"properties": {},
|
||||
"required": []
|
||||
}
|
||||
}
|
||||
27
flows/weather_sync.flow.json
Normal file
27
flows/weather_sync.flow.json
Normal file
@@ -0,0 +1,27 @@
|
||||
{
|
||||
"path": "f/weather/weather_sync",
|
||||
"summary": "Weather Sync - 気象データ日次同期",
|
||||
"description": "Open-Meteo から昨日の気象データを取得し、Keinasystem DB に保存する。毎朝6時実行。",
|
||||
"value": {
|
||||
"modules": [
|
||||
{
|
||||
"id": "a",
|
||||
"value": {
|
||||
"lock": "# py: 3.12\nanyio==4.12.1\ncertifi==2026.2.25\ncharset-normalizer==3.4.4\nh11==0.16.0\nhttpcore==1.0.9\nhttpx==0.28.1\nidna==3.11\nrequests==2.32.5\ntyping-extensions==4.15.0\nurllib3==2.6.3\nwmill==1.646.0",
|
||||
"type": "rawscript",
|
||||
"content": "import wmill\nimport requests\nimport datetime\n\nLATITUDE = 33.213\nLONGITUDE = 133.133\nTIMEZONE = \"Asia/Tokyo\"\n\nOPEN_METEO_URL = \"https://archive-api.open-meteo.com/v1/archive\"\nDAILY_VARS = [\n \"temperature_2m_mean\",\n \"temperature_2m_max\",\n \"temperature_2m_min\",\n \"sunshine_duration\",\n \"precipitation_sum\",\n \"wind_speed_10m_max\",\n \"surface_pressure_min\",\n]\n\n\ndef main():\n api_key = wmill.get_variable(\"u/admin/KEINASYSTEM_API_KEY\")\n base_url = wmill.get_variable(\"u/admin/KEINASYSTEM_API_URL\").rstrip(\"/\")\n sync_url = f\"{base_url}/api/weather/sync/\"\n\n yesterday = (datetime.date.today() - datetime.timedelta(days=1)).isoformat()\n print(f\"Fetching weather data for {yesterday} ...\")\n\n params = {\n \"latitude\": LATITUDE,\n \"longitude\": LONGITUDE,\n \"start_date\": yesterday,\n \"end_date\": yesterday,\n \"daily\": DAILY_VARS,\n \"timezone\": TIMEZONE,\n }\n resp = requests.get(OPEN_METEO_URL, params=params, timeout=30)\n if resp.status_code != 200:\n raise Exception(f\"Open-Meteo API error: {resp.status_code} {resp.text[:300]}\")\n\n daily = resp.json().get(\"daily\", {})\n dates = daily.get(\"time\", [])\n if not dates:\n print(\"No data returned from Open-Meteo.\")\n return {\"status\": \"no_data\"}\n\n sunshine_raw = daily.get(\"sunshine_duration\", [])\n records = []\n for i, d in enumerate(dates):\n sun_sec = sunshine_raw[i]\n records.append({\n \"date\": d,\n \"temp_mean\": daily[\"temperature_2m_mean\"][i],\n \"temp_max\": daily[\"temperature_2m_max\"][i],\n \"temp_min\": daily[\"temperature_2m_min\"][i],\n \"sunshine_h\": round(sun_sec / 3600, 2) if sun_sec is not None else None,\n \"precip_mm\": daily[\"precipitation_sum\"][i],\n \"wind_max\": daily[\"wind_speed_10m_max\"][i],\n \"pressure_min\": daily[\"surface_pressure_min\"][i],\n })\n\n headers = {\n \"X-API-Key\": api_key,\n \"Content-Type\": \"application/json\",\n }\n post_resp = requests.post(sync_url, json=records, headers=headers, timeout=30)\n if post_resp.status_code not in (200, 201):\n raise Exception(f\"Keinasystem sync error: {post_resp.status_code} {post_resp.text[:300]}\")\n\n result = post_resp.json()\n print(f\"Sync complete: {result}\")\n return result\n",
|
||||
"language": "python3",
|
||||
"input_transforms": {}
|
||||
},
|
||||
"summary": "気象データ取得・同期"
|
||||
}
|
||||
]
|
||||
},
|
||||
"schema": {
|
||||
"$schema": "https://json-schema.org/draft/2020-12/schema",
|
||||
"type": "object",
|
||||
"order": [],
|
||||
"properties": {},
|
||||
"required": []
|
||||
}
|
||||
}
|
||||
@@ -1,69 +1,20 @@
|
||||
/**
|
||||
* alexa_speak.ts
|
||||
* 指定した Echo デバイスにテキストを読み上げさせる Windmill スクリプト
|
||||
*
|
||||
* パラメータ:
|
||||
* device - ドロップダウンから選択するデバイス(内部的にはシリアル番号)
|
||||
* text - 読み上げるテキスト
|
||||
*/
|
||||
|
||||
const ALEXA_API_URL = "http://alexa_api:3500";
|
||||
|
||||
type DeviceOption = { value: string; label: string };
|
||||
|
||||
const FALLBACK_DEVICE_OPTIONS: DeviceOption[] = [
|
||||
{ value: "G0922H085165007R", label: "プレハブ (G0922H085165007R)" },
|
||||
{ value: "G8M2DB08522600RL", label: "リビングエコー1 (G8M2DB08522600RL)" },
|
||||
{ value: "G8M2DB08522503WF", label: "リビングエコー2 (G8M2DB08522503WF)" },
|
||||
{ value: "G0922H08525302K5", label: "オフィスの右エコー (G0922H08525302K5)" },
|
||||
{ value: "G0922H08525302J9", label: "オフィスの左エコー (G0922H08525302J9)" },
|
||||
{ value: "G8M2HN08534302XH", label: "寝室のエコー (G8M2HN08534302XH)" },
|
||||
];
|
||||
|
||||
// Windmill Dynamic Select: 引数名 `device` に対応する `DynSelect_device` と `device()` を定義
|
||||
export type DynSelect_device = string;
|
||||
|
||||
export async function device(): Promise<DeviceOption[]> {
|
||||
try {
|
||||
const res = await fetch(`${ALEXA_API_URL}/devices`);
|
||||
if (!res.ok) return FALLBACK_DEVICE_OPTIONS;
|
||||
|
||||
const devices = (await res.json()) as Array<{
|
||||
name?: string;
|
||||
serial?: string;
|
||||
family?: string;
|
||||
}>;
|
||||
|
||||
const options = devices
|
||||
.filter((d) => d.family === "ECHO" && d.serial)
|
||||
.map((d) => ({
|
||||
value: d.serial as string,
|
||||
label: `${d.name ?? d.serial} (${d.serial})`,
|
||||
}))
|
||||
.sort((a, b) => a.label.localeCompare(b.label, "ja"));
|
||||
|
||||
return options.length > 0 ? options : FALLBACK_DEVICE_OPTIONS;
|
||||
} catch {
|
||||
return FALLBACK_DEVICE_OPTIONS;
|
||||
}
|
||||
}
|
||||
|
||||
export async function main(
|
||||
device: DynSelect_device,
|
||||
device: string,
|
||||
text: string,
|
||||
): Promise<{ ok: boolean; device: string; text: string }> {
|
||||
const ALEXA_API_URL = "http://alexa_api:3500";
|
||||
|
||||
const res = await fetch(`${ALEXA_API_URL}/speak`, {
|
||||
method: "POST",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
body: JSON.stringify({ device, text }),
|
||||
body: JSON.stringify({ device, text }), // ← SSMLなし、素のテキスト
|
||||
});
|
||||
|
||||
if (!res.ok) {
|
||||
const body = await res.json().catch(() => ({}));
|
||||
throw new Error(
|
||||
`alexa-api error ${res.status}: ${JSON.stringify(body)}`
|
||||
);
|
||||
throw new Error(`alexa-api error ${res.status}: ${JSON.stringify(body)}`);
|
||||
}
|
||||
|
||||
return await res.json();
|
||||
}
|
||||
|
||||
|
||||
106
state/flows.list.json
Normal file
106
state/flows.list.json
Normal file
@@ -0,0 +1,106 @@
|
||||
[
|
||||
{
|
||||
"workspace_id": "admins",
|
||||
"path": "u/akiracraftwork/hourly_chime",
|
||||
"summary": "鳩時計機能",
|
||||
"description": "",
|
||||
"edited_by": "akiracraftwork@gmail.com",
|
||||
"edited_at": "2026-03-03T05:37:39.969305Z",
|
||||
"archived": false,
|
||||
"extra_perms": {},
|
||||
"starred": false,
|
||||
"has_draft": false,
|
||||
"ws_error_handler_muted": false
|
||||
},
|
||||
{
|
||||
"workspace_id": "admins",
|
||||
"path": "f/dev/textout",
|
||||
"summary": "Display current time on startup",
|
||||
"description": "",
|
||||
"edited_by": "akiracraftwork@gmail.com",
|
||||
"edited_at": "2026-03-02T05:05:05.215985Z",
|
||||
"archived": false,
|
||||
"extra_perms": {},
|
||||
"starred": false,
|
||||
"has_draft": false,
|
||||
"ws_error_handler_muted": false
|
||||
},
|
||||
{
|
||||
"workspace_id": "admins",
|
||||
"path": "f/dev/konnnichiha",
|
||||
"summary": "Print greeting",
|
||||
"description": "",
|
||||
"edited_by": "akiracraftwork@gmail.com",
|
||||
"edited_at": "2026-03-02T04:53:56.968574Z",
|
||||
"archived": false,
|
||||
"extra_perms": {},
|
||||
"starred": false,
|
||||
"has_draft": false,
|
||||
"ws_error_handler_muted": false
|
||||
},
|
||||
{
|
||||
"workspace_id": "admins",
|
||||
"path": "u/antigravity/git_sync",
|
||||
"summary": "Git Sync Workflow",
|
||||
"description": "Automatically sync Windmill workflows to Git repository (sync branch)",
|
||||
"edited_by": "akiracraftwork@gmail.com",
|
||||
"edited_at": "2026-03-01T17:28:14.331046Z",
|
||||
"archived": false,
|
||||
"extra_perms": {},
|
||||
"starred": false,
|
||||
"has_draft": false,
|
||||
"ws_error_handler_muted": false
|
||||
},
|
||||
{
|
||||
"workspace_id": "admins",
|
||||
"path": "f/weather/weather_sync",
|
||||
"summary": "Weather Sync - 気象データ日次同期",
|
||||
"description": "Open-Meteo から昨日の気象データを取得し、Keinasystem DB に保存する。毎朝6時実行。",
|
||||
"edited_by": "akiracraftwork@gmail.com",
|
||||
"edited_at": "2026-02-28T04:31:27.835748Z",
|
||||
"archived": false,
|
||||
"extra_perms": {},
|
||||
"starred": false,
|
||||
"has_draft": false,
|
||||
"ws_error_handler_muted": false
|
||||
},
|
||||
{
|
||||
"workspace_id": "admins",
|
||||
"path": "f/mail/mail_filter",
|
||||
"summary": "メールフィルタリング",
|
||||
"description": "IMAPで新着メールを受信し、送信者ルール確認→LLM判定→LINE通知を行う。Keinasystemと連携。Gmail→Hotmail→Xserverの順で段階的に有効化する。",
|
||||
"edited_by": "akiracraftwork@gmail.com",
|
||||
"edited_at": "2026-02-24T06:41:54.748865Z",
|
||||
"archived": false,
|
||||
"extra_perms": {},
|
||||
"starred": false,
|
||||
"has_draft": false,
|
||||
"ws_error_handler_muted": false
|
||||
},
|
||||
{
|
||||
"workspace_id": "admins",
|
||||
"path": "f/shiraou/shiraou_notification",
|
||||
"summary": "白皇集落営農 変更通知",
|
||||
"description": "shiraou.keinafarm.net の予約・実績変更をポーリングし、変更があればLINEで管理者に通知する。5分毎に実行。",
|
||||
"edited_by": "akiracraftwork@gmail.com",
|
||||
"edited_at": "2026-02-21T06:33:11.078673Z",
|
||||
"archived": false,
|
||||
"extra_perms": {},
|
||||
"starred": false,
|
||||
"has_draft": false,
|
||||
"ws_error_handler_muted": false
|
||||
},
|
||||
{
|
||||
"workspace_id": "admins",
|
||||
"path": "f/app_custom/system_heartbeat",
|
||||
"summary": "Windmill Heartbeat - システム自己診断",
|
||||
"description": "Windmillの動作確認用ワークフロー。UUID生成、時刻取得、計算チェック、HTTPヘルスチェック、年度判定を行い、全ステップの正常性を検証する。",
|
||||
"edited_by": "akiracraftwork@gmail.com",
|
||||
"edited_at": "2026-02-21T03:43:55.495111Z",
|
||||
"archived": false,
|
||||
"extra_perms": {},
|
||||
"starred": false,
|
||||
"has_draft": false,
|
||||
"ws_error_handler_muted": false
|
||||
}
|
||||
]
|
||||
44
state/remote_index.json
Normal file
44
state/remote_index.json
Normal file
@@ -0,0 +1,44 @@
|
||||
{
|
||||
"synced_at": "2026-03-03T06:24:34Z",
|
||||
"workspace": "admins",
|
||||
"scripts": {
|
||||
"u/admin/alexa_speak": {
|
||||
"hash": "3783872112d1a24c",
|
||||
"updated_at": "2026-03-03T02:57:13.068287Z"
|
||||
}
|
||||
},
|
||||
"flows": {
|
||||
"u/akiracraftwork/hourly_chime": {
|
||||
"hash": null,
|
||||
"updated_at": "2026-03-03T05:37:39.969305Z"
|
||||
},
|
||||
"f/dev/textout": {
|
||||
"hash": null,
|
||||
"updated_at": "2026-03-02T05:05:05.215985Z"
|
||||
},
|
||||
"f/dev/konnnichiha": {
|
||||
"hash": null,
|
||||
"updated_at": "2026-03-02T04:53:56.968574Z"
|
||||
},
|
||||
"u/antigravity/git_sync": {
|
||||
"hash": null,
|
||||
"updated_at": "2026-03-01T17:28:14.331046Z"
|
||||
},
|
||||
"f/weather/weather_sync": {
|
||||
"hash": null,
|
||||
"updated_at": "2026-02-28T04:31:27.835748Z"
|
||||
},
|
||||
"f/mail/mail_filter": {
|
||||
"hash": null,
|
||||
"updated_at": "2026-02-24T06:41:54.748865Z"
|
||||
},
|
||||
"f/shiraou/shiraou_notification": {
|
||||
"hash": null,
|
||||
"updated_at": "2026-02-21T06:33:11.078673Z"
|
||||
},
|
||||
"f/app_custom/system_heartbeat": {
|
||||
"hash": null,
|
||||
"updated_at": "2026-02-21T03:43:55.495111Z"
|
||||
}
|
||||
}
|
||||
}
|
||||
18
state/scripts.list.json
Normal file
18
state/scripts.list.json
Normal file
@@ -0,0 +1,18 @@
|
||||
[
|
||||
{
|
||||
"hash": "3783872112d1a24c",
|
||||
"path": "u/admin/alexa_speak",
|
||||
"summary": "Echo デバイスに TTS で読み上げ",
|
||||
"created_at": "2026-03-03T02:57:13.068287Z",
|
||||
"archived": false,
|
||||
"extra_perms": {},
|
||||
"language": "bun",
|
||||
"starred": false,
|
||||
"tag": null,
|
||||
"description": "指定した Echo デバイスにテキストを読み上げさせる",
|
||||
"has_draft": false,
|
||||
"has_deploy_errors": false,
|
||||
"ws_error_handler_muted": false,
|
||||
"kind": "script"
|
||||
}
|
||||
]
|
||||
437
wm-api.sh
437
wm-api.sh
@@ -8,11 +8,28 @@ set -euo pipefail
|
||||
WINDMILL_URL="${WINDMILL_URL:-https://windmill.keinafarm.net}"
|
||||
WINDMILL_TOKEN="${WINDMILL_TOKEN:-qLJ3VPZ61kTDiIwaUPUu1dXszGrsN1Dh}"
|
||||
WINDMILL_WORKSPACE="${WINDMILL_WORKSPACE:-admins}"
|
||||
STATE_DIR="${STATE_DIR:-state}"
|
||||
REMOTE_INDEX_FILE="${REMOTE_INDEX_FILE:-${STATE_DIR}/remote_index.json}"
|
||||
|
||||
API_BASE="${WINDMILL_URL}/api/w/${WINDMILL_WORKSPACE}"
|
||||
AUTH_HEADER="Authorization: Bearer ${WINDMILL_TOKEN}"
|
||||
|
||||
# ヘルパー関数
|
||||
require_arg() {
|
||||
if [ -z "${2:-}" ]; then
|
||||
echo "Usage: $1"
|
||||
exit 1
|
||||
fi
|
||||
}
|
||||
|
||||
ensure_parent_dir() {
|
||||
local out="$1"
|
||||
local dir
|
||||
dir="$(dirname "$out")"
|
||||
if [ "$dir" != "." ]; then
|
||||
mkdir -p "$dir"
|
||||
fi
|
||||
}
|
||||
|
||||
api_get() {
|
||||
curl -sk -H "${AUTH_HEADER}" "${API_BASE}$1" 2>/dev/null
|
||||
}
|
||||
@@ -29,98 +46,398 @@ api_delete() {
|
||||
curl -sk -X DELETE -H "${AUTH_HEADER}" "${API_BASE}$1" 2>/dev/null
|
||||
}
|
||||
|
||||
# コマンド
|
||||
json_pretty() {
|
||||
python3 -m json.tool
|
||||
}
|
||||
|
||||
save_json_pretty() {
|
||||
local out="$1"
|
||||
ensure_parent_dir "$out"
|
||||
python3 -c 'import json,sys; print(json.dumps(json.load(sys.stdin), ensure_ascii=False, indent=2))' > "$out"
|
||||
}
|
||||
|
||||
python_json() {
|
||||
python3 - "$@"
|
||||
}
|
||||
|
||||
path_to_script_file() {
|
||||
local path="$1"
|
||||
echo "scripts/${path##*/}.ts"
|
||||
}
|
||||
|
||||
path_to_flow_file() {
|
||||
local path="$1"
|
||||
echo "flows/${path##*/}.flow.json"
|
||||
}
|
||||
|
||||
pull_script() {
|
||||
local script_path="$1"
|
||||
local outfile="$2"
|
||||
|
||||
local data
|
||||
data="$(api_get "/scripts/get/p/${script_path}")"
|
||||
if [ -z "$data" ]; then
|
||||
echo "pull-script failed: empty response (${script_path})" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
ensure_parent_dir "$outfile"
|
||||
python_json "$outfile" "$data" <<'PY'
|
||||
import json
|
||||
import pathlib
|
||||
import sys
|
||||
|
||||
outfile = pathlib.Path(sys.argv[1])
|
||||
obj = json.loads(sys.argv[2])
|
||||
content = obj.get("content")
|
||||
if content is None:
|
||||
raise SystemExit("content not found in script payload")
|
||||
outfile.write_text(content, encoding="utf-8", newline="\n")
|
||||
PY
|
||||
echo "pulled script: ${script_path} -> ${outfile}"
|
||||
}
|
||||
|
||||
push_script() {
|
||||
local script_path="$1"
|
||||
local infile="$2"
|
||||
|
||||
if [ ! -f "$infile" ]; then
|
||||
echo "push-script failed: file not found (${infile})" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
local current payload
|
||||
current="$(api_get "/scripts/get/p/${script_path}")"
|
||||
if [ -z "$current" ]; then
|
||||
echo "push-script failed: empty response (${script_path})" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
payload="$(python_json "$script_path" "$infile" "$current" <<'PY'
|
||||
import json
|
||||
import pathlib
|
||||
import sys
|
||||
|
||||
script_path, infile = sys.argv[1], pathlib.Path(sys.argv[2])
|
||||
remote = json.loads(sys.argv[3])
|
||||
content = infile.read_text(encoding="utf-8")
|
||||
|
||||
payload = {
|
||||
"path": script_path,
|
||||
"parent_hash": remote.get("hash"),
|
||||
"summary": remote.get("summary") or "",
|
||||
"description": remote.get("description") or "",
|
||||
"content": content,
|
||||
"schema": remote.get("schema") or {"type": "object", "properties": {}, "required": []},
|
||||
"language": remote.get("language") or "bun",
|
||||
"kind": remote.get("kind") or "script",
|
||||
"lock": remote.get("lock") or "",
|
||||
}
|
||||
|
||||
missing = [k for k in ("path", "parent_hash", "content", "schema", "language", "kind", "lock") if payload.get(k) is None]
|
||||
if missing:
|
||||
raise SystemExit(f"missing required payload fields: {', '.join(missing)}")
|
||||
|
||||
print(json.dumps(payload, ensure_ascii=False))
|
||||
PY
|
||||
)"
|
||||
api_post "/scripts/create" "$payload" | json_pretty
|
||||
}
|
||||
|
||||
pull_flow() {
|
||||
local flow_path="$1"
|
||||
local outfile="$2"
|
||||
|
||||
local data
|
||||
data="$(api_get "/flows/get/${flow_path}")"
|
||||
if [ -z "$data" ]; then
|
||||
echo "pull-flow failed: empty response (${flow_path})" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
ensure_parent_dir "$outfile"
|
||||
python_json "$data" <<'PY' | save_json_pretty "$outfile"
|
||||
import json
|
||||
import sys
|
||||
|
||||
obj = json.loads(sys.argv[1])
|
||||
out = {
|
||||
"path": obj.get("path"),
|
||||
"summary": obj.get("summary") or "",
|
||||
"description": obj.get("description") or "",
|
||||
"value": obj.get("value") or {},
|
||||
"schema": obj.get("schema") or {"type": "object", "properties": {}, "required": []},
|
||||
}
|
||||
print(json.dumps(out, ensure_ascii=False))
|
||||
PY
|
||||
echo "pulled flow: ${flow_path} -> ${outfile}"
|
||||
}
|
||||
|
||||
push_flow() {
|
||||
local json_file="$1"
|
||||
if [ ! -f "$json_file" ]; then
|
||||
echo "push-flow failed: file not found (${json_file})" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
local flow_path payload
|
||||
flow_path="$(python_json "$json_file" <<'PY'
|
||||
import json
|
||||
import pathlib
|
||||
import sys
|
||||
obj = json.loads(pathlib.Path(sys.argv[1]).read_text(encoding="utf-8"))
|
||||
path = obj.get("path")
|
||||
if not path:
|
||||
raise SystemExit("path is required in flow json")
|
||||
print(path)
|
||||
PY
|
||||
)"
|
||||
payload="$(cat "$json_file")"
|
||||
|
||||
echo "push-flow: delete ${flow_path}"
|
||||
api_delete "/flows/delete/${flow_path}" >/dev/null || true
|
||||
echo "push-flow: create ${flow_path}"
|
||||
api_post "/flows/create" "$payload" | json_pretty
|
||||
}
|
||||
|
||||
pull_all() {
|
||||
mkdir -p scripts flows "${STATE_DIR}"
|
||||
local scripts_json flows_json now_utc
|
||||
|
||||
scripts_json="$(api_get "/scripts/list?per_page=1000")"
|
||||
flows_json="$(api_get "/flows/list?per_page=1000")"
|
||||
now_utc="$(date -u +"%Y-%m-%dT%H:%M:%SZ")"
|
||||
|
||||
python_json "$scripts_json" <<'PY' > "${STATE_DIR}/scripts.list.json"
|
||||
import json
|
||||
import sys
|
||||
print(json.dumps(json.loads(sys.argv[1]), ensure_ascii=False, indent=2))
|
||||
PY
|
||||
python_json "$flows_json" <<'PY' > "${STATE_DIR}/flows.list.json"
|
||||
import json
|
||||
import sys
|
||||
print(json.dumps(json.loads(sys.argv[1]), ensure_ascii=False, indent=2))
|
||||
PY
|
||||
|
||||
local script_paths flow_paths
|
||||
script_paths="$(python_json "$scripts_json" <<'PY'
|
||||
import json
|
||||
import sys
|
||||
items = json.loads(sys.argv[1])
|
||||
for it in items:
|
||||
path = it.get("path")
|
||||
if path:
|
||||
print(path)
|
||||
PY
|
||||
)"
|
||||
flow_paths="$(python_json "$flows_json" <<'PY'
|
||||
import json
|
||||
import sys
|
||||
items = json.loads(sys.argv[1])
|
||||
for it in items:
|
||||
path = it.get("path")
|
||||
if path:
|
||||
print(path)
|
||||
PY
|
||||
)"
|
||||
|
||||
while IFS= read -r p; do
|
||||
[ -z "$p" ] && continue
|
||||
pull_script "$p" "$(path_to_script_file "$p")"
|
||||
done <<< "$script_paths"
|
||||
|
||||
while IFS= read -r p; do
|
||||
[ -z "$p" ] && continue
|
||||
pull_flow "$p" "$(path_to_flow_file "$p")"
|
||||
done <<< "$flow_paths"
|
||||
|
||||
build_remote_index "$scripts_json" "$flows_json" "$now_utc" > "${REMOTE_INDEX_FILE}"
|
||||
echo "remote index updated: ${REMOTE_INDEX_FILE}"
|
||||
}
|
||||
|
||||
build_remote_index() {
|
||||
local scripts_json="$1"
|
||||
local flows_json="$2"
|
||||
local synced_at="$3"
|
||||
python_json "$scripts_json" "$flows_json" "$synced_at" <<'PY'
|
||||
import json
|
||||
import sys
|
||||
|
||||
scripts = json.loads(sys.argv[1])
|
||||
flows = json.loads(sys.argv[2])
|
||||
synced_at = sys.argv[3]
|
||||
|
||||
index = {
|
||||
"synced_at": synced_at,
|
||||
"workspace": None,
|
||||
"scripts": {},
|
||||
"flows": {},
|
||||
}
|
||||
|
||||
for item in scripts:
|
||||
path = item.get("path")
|
||||
if not path:
|
||||
continue
|
||||
if index["workspace"] is None:
|
||||
index["workspace"] = item.get("workspace_id")
|
||||
index["scripts"][path] = {
|
||||
"hash": item.get("hash"),
|
||||
"updated_at": item.get("edited_at") or item.get("created_at"),
|
||||
}
|
||||
|
||||
for item in flows:
|
||||
path = item.get("path")
|
||||
if not path:
|
||||
continue
|
||||
if index["workspace"] is None:
|
||||
index["workspace"] = item.get("workspace_id")
|
||||
index["flows"][path] = {
|
||||
"hash": item.get("hash"),
|
||||
"updated_at": item.get("edited_at") or item.get("created_at"),
|
||||
}
|
||||
|
||||
print(json.dumps(index, ensure_ascii=False, indent=2))
|
||||
PY
|
||||
}
|
||||
|
||||
status_remote() {
|
||||
mkdir -p "${STATE_DIR}"
|
||||
local scripts_json flows_json now_utc
|
||||
scripts_json="$(api_get "/scripts/list?per_page=1000")"
|
||||
flows_json="$(api_get "/flows/list?per_page=1000")"
|
||||
now_utc="$(date -u +"%Y-%m-%dT%H:%M:%SZ")"
|
||||
|
||||
local current_index
|
||||
current_index="$(build_remote_index "$scripts_json" "$flows_json" "$now_utc")"
|
||||
printf '%s\n' "$current_index" > "${STATE_DIR}/remote_index.current.json"
|
||||
|
||||
if [ ! -f "${REMOTE_INDEX_FILE}" ]; then
|
||||
echo "No baseline index: ${REMOTE_INDEX_FILE}"
|
||||
echo "Run ./wm-api.sh pull-all first."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
python_json "${REMOTE_INDEX_FILE}" "${STATE_DIR}/remote_index.current.json" <<'PY'
|
||||
import json
|
||||
import pathlib
|
||||
import sys
|
||||
|
||||
old = json.loads(pathlib.Path(sys.argv[1]).read_text(encoding="utf-8"))
|
||||
new = json.loads(pathlib.Path(sys.argv[2]).read_text(encoding="utf-8"))
|
||||
|
||||
def diff(kind):
|
||||
old_map = old.get(kind, {})
|
||||
new_map = new.get(kind, {})
|
||||
added = sorted(set(new_map) - set(old_map))
|
||||
removed = sorted(set(old_map) - set(new_map))
|
||||
changed = sorted(
|
||||
path for path in (set(new_map) & set(old_map))
|
||||
if (new_map[path].get("hash") != old_map[path].get("hash"))
|
||||
)
|
||||
return added, changed, removed
|
||||
|
||||
for kind in ("scripts", "flows"):
|
||||
added, changed, removed = diff(kind)
|
||||
print(f"[{kind}]")
|
||||
if not (added or changed or removed):
|
||||
print(" no changes")
|
||||
continue
|
||||
for p in added:
|
||||
print(f" + {p}")
|
||||
for p in changed:
|
||||
print(f" ~ {p}")
|
||||
for p in removed:
|
||||
print(f" - {p}")
|
||||
PY
|
||||
}
|
||||
|
||||
case "${1:-help}" in
|
||||
whoami)
|
||||
api_get "/users/whoami" | python3 -m json.tool
|
||||
api_get "/users/whoami" | json_pretty
|
||||
;;
|
||||
scripts|list-scripts)
|
||||
api_get "/scripts/list?per_page=${2:-100}" | python3 -m json.tool
|
||||
api_get "/scripts/list?per_page=${2:-100}" | json_pretty
|
||||
;;
|
||||
flows|list-flows)
|
||||
api_get "/flows/list?per_page=${2:-100}" | python3 -m json.tool
|
||||
api_get "/flows/list?per_page=${2:-100}" | json_pretty
|
||||
;;
|
||||
schedules|list-schedules)
|
||||
api_get "/schedules/list?per_page=${2:-100}" | json_pretty
|
||||
;;
|
||||
get-script)
|
||||
if [ -z "${2:-}" ]; then
|
||||
echo "Usage: $0 get-script <path>"
|
||||
exit 1
|
||||
fi
|
||||
api_get "/scripts/get/p/$2" | python3 -m json.tool
|
||||
require_arg "$0 get-script <path>" "${2:-}"
|
||||
api_get "/scripts/get/p/$2" | json_pretty
|
||||
;;
|
||||
get-flow)
|
||||
if [ -z "${2:-}" ]; then
|
||||
echo "Usage: $0 get-flow <path>"
|
||||
exit 1
|
||||
fi
|
||||
api_get "/flows/get/$2" | python3 -m json.tool
|
||||
require_arg "$0 get-flow <path>" "${2:-}"
|
||||
api_get "/flows/get/$2" | json_pretty
|
||||
;;
|
||||
create-script)
|
||||
if [ -z "${2:-}" ]; then
|
||||
echo "Usage: $0 create-script <json-file>"
|
||||
exit 1
|
||||
fi
|
||||
require_arg "$0 create-script <json-file>" "${2:-}"
|
||||
api_post "/scripts/create" "$(cat "$2")"
|
||||
;;
|
||||
create-flow)
|
||||
if [ -z "${2:-}" ]; then
|
||||
echo "Usage: $0 create-flow <json-file>"
|
||||
exit 1
|
||||
fi
|
||||
require_arg "$0 create-flow <json-file>" "${2:-}"
|
||||
api_post "/flows/create" "$(cat "$2")"
|
||||
;;
|
||||
update-flow)
|
||||
if [ -z "${2:-}" ] || [ -z "${3:-}" ]; then
|
||||
echo "Usage: $0 update-flow <path> <json-file>"
|
||||
exit 1
|
||||
fi
|
||||
require_arg "$0 update-flow <path> <json-file>" "${2:-}"
|
||||
require_arg "$0 update-flow <path> <json-file>" "${3:-}"
|
||||
api_put "/flows/update/$2" "$(cat "$3")"
|
||||
;;
|
||||
create-schedule)
|
||||
if [ -z "${2:-}" ]; then
|
||||
echo "Usage: $0 create-schedule <json-file>"
|
||||
exit 1
|
||||
fi
|
||||
require_arg "$0 create-schedule <json-file>" "${2:-}"
|
||||
api_post "/schedules/create" "$(cat "$2")"
|
||||
;;
|
||||
run-script)
|
||||
if [ -z "${2:-}" ]; then
|
||||
echo "Usage: $0 run-script <path> [json-args]"
|
||||
exit 1
|
||||
fi
|
||||
local_args="${3:-{\}}"
|
||||
require_arg "$0 run-script <path> [json-args]" "${2:-}"
|
||||
local_args="${3:-{}}"
|
||||
api_post "/jobs/run/p/$2" "${local_args}"
|
||||
;;
|
||||
run-flow)
|
||||
if [ -z "${2:-}" ]; then
|
||||
echo "Usage: $0 run-flow <path> [json-args]"
|
||||
exit 1
|
||||
fi
|
||||
local_args="${3:-{\}}"
|
||||
require_arg "$0 run-flow <path> [json-args]" "${2:-}"
|
||||
local_args="${3:-{}}"
|
||||
api_post "/jobs/run/f/$2" "${local_args}"
|
||||
;;
|
||||
job-status)
|
||||
if [ -z "${2:-}" ]; then
|
||||
echo "Usage: $0 job-status <job-id>"
|
||||
exit 1
|
||||
fi
|
||||
api_get "/jobs_u/get/$2" | python3 -m json.tool
|
||||
require_arg "$0 job-status <job-id>" "${2:-}"
|
||||
api_get "/jobs_u/get/$2" | json_pretty
|
||||
;;
|
||||
job-result)
|
||||
if [ -z "${2:-}" ]; then
|
||||
echo "Usage: $0 job-result <job-id>"
|
||||
exit 1
|
||||
fi
|
||||
api_get "/jobs_u/completed/get_result/$2" | python3 -m json.tool
|
||||
require_arg "$0 job-result <job-id>" "${2:-}"
|
||||
api_get "/jobs_u/completed/get_result/$2" | json_pretty
|
||||
;;
|
||||
schedules|list-schedules)
|
||||
api_get "/schedules/list?per_page=${2:-100}" | python3 -m json.tool
|
||||
pull-script)
|
||||
require_arg "$0 pull-script <path> <outfile>" "${2:-}"
|
||||
require_arg "$0 pull-script <path> <outfile>" "${3:-}"
|
||||
pull_script "$2" "$3"
|
||||
;;
|
||||
push-script)
|
||||
require_arg "$0 push-script <path> <infile>" "${2:-}"
|
||||
require_arg "$0 push-script <path> <infile>" "${3:-}"
|
||||
push_script "$2" "$3"
|
||||
;;
|
||||
pull-flow)
|
||||
require_arg "$0 pull-flow <path> <outfile>" "${2:-}"
|
||||
require_arg "$0 pull-flow <path> <outfile>" "${3:-}"
|
||||
pull_flow "$2" "$3"
|
||||
;;
|
||||
push-flow)
|
||||
require_arg "$0 push-flow <json-file>" "${2:-}"
|
||||
push_flow "$2"
|
||||
;;
|
||||
pull-all)
|
||||
pull_all
|
||||
;;
|
||||
status-remote)
|
||||
status_remote
|
||||
;;
|
||||
version)
|
||||
curl -sk "${WINDMILL_URL}/api/version" 2>/dev/null
|
||||
echo ""
|
||||
;;
|
||||
help|*)
|
||||
cat << 'EOF'
|
||||
cat <<'EOF'
|
||||
Windmill REST API ヘルパー
|
||||
|
||||
使い方: ./wm-api.sh <command> [args...]
|
||||
@@ -128,8 +445,9 @@ Windmill REST API ヘルパー
|
||||
コマンド:
|
||||
whoami - 現在のユーザー情報を表示
|
||||
version - サーバーバージョンを表示
|
||||
scripts - スクリプト一覧を表示
|
||||
flows - フロー一覧を表示
|
||||
scripts|list-scripts [n] - スクリプト一覧を表示
|
||||
flows|list-flows [n] - フロー一覧を表示
|
||||
schedules|list-schedules [n] - スケジュール一覧を表示
|
||||
get-script <path> - スクリプトの詳細を取得
|
||||
get-flow <path> - フローの詳細を取得
|
||||
create-script <file> - JSONファイルからスクリプトを作成
|
||||
@@ -140,7 +458,12 @@ Windmill REST API ヘルパー
|
||||
run-flow <path> [args] - フローを実行
|
||||
job-status <id> - ジョブのステータスを確認
|
||||
job-result <id> - ジョブの結果を取得
|
||||
schedules - スケジュール一覧を表示
|
||||
pull-script <path> <outfile> - スクリプトをローカルへ保存
|
||||
push-script <path> <infile> - ローカルファイルをスクリプトへ反映
|
||||
pull-flow <path> <outfile> - フローをローカルJSONへ保存
|
||||
push-flow <json-file> - フローJSONを削除再作成で反映
|
||||
pull-all - scripts/flowsを一括pullしてstate更新
|
||||
status-remote - remote_index基準で差分表示
|
||||
EOF
|
||||
;;
|
||||
esac
|
||||
|
||||
Reference in New Issue
Block a user