diff --git a/flows/git_sync.flow.json b/flows/git_sync.flow.json new file mode 100644 index 0000000..cb12194 --- /dev/null +++ b/flows/git_sync.flow.json @@ -0,0 +1,24 @@ +{ + "path": "u/antigravity/git_sync", + "summary": "Git Sync Workflow", + "description": "Automatically sync Windmill workflows to Git repository (sync branch)", + "value": { + "modules": [ + { + "id": "a", + "value": { + "lock": "", + "type": "rawscript", + "content": "#!/bin/bash\nset -e\nexport PATH=/usr/bin:/usr/local/bin:/usr/sbin:/sbin:/bin:$PATH\n\nGREEN=\"\\033[0;32m\"\nYELLOW=\"\\033[1;33m\"\nRED=\"\\033[0;31m\"\nNC=\"\\033[0m\"\n\necho -e \"${GREEN}=== Windmill Workflow Git Sync ===${NC}\"\n\nREPO_ROOT=\"/workspace\"\nWMILL_DIR=\"${REPO_ROOT}/workflows\"\n\nif ! command -v wmill &> /dev/null; then\n echo -e \"${YELLOW}Installing windmill-cli...${NC}\"\n npm install -g windmill-cli\n export PATH=$(npm prefix -g)/bin:$PATH\nfi\n\ngit config --global --add safe.directory \"$REPO_ROOT\"\ngit config --global user.email \"bot@keinafarm.net\"\ngit config --global user.name \"Windmill Bot\"\n\n# sync ブランチを使用\nCURRENT_BRANCH=$(git -C \"$REPO_ROOT\" rev-parse --abbrev-ref HEAD)\nif [ \"$CURRENT_BRANCH\" != \"sync\" ]; then\n echo -e \"${YELLOW}Switching to sync branch...${NC}\"\n git -C \"$REPO_ROOT\" fetch origin sync\n git -C \"$REPO_ROOT\" checkout sync\nfi\n\necho -e \"${YELLOW}Pulling from origin/sync...${NC}\"\ngit -C \"$REPO_ROOT\" pull --rebase origin sync || {\n echo -e \"${RED}Failed to pull from remote. Continuing...${NC}\"\n}\n\necho -e \"${YELLOW}Pulling from Windmill...${NC}\"\ncd \"$WMILL_DIR\"\nwmill sync pull --config-dir /workspace/wmill_config --skip-variables --skip-secrets --skip-resources --yes || exit 1\n\ncd \"$REPO_ROOT\"\nif [[ -n $(git status --porcelain) ]]; then\n echo -e \"${YELLOW}Changes detected, committing to Git...${NC}\"\n git add -A\n TIMESTAMP=$(date \"+%Y-%m-%d %H:%M:%S\")\n git commit -m \"Auto-sync: ${TIMESTAMP}\"\n echo -e \"${YELLOW}Pushing to Gitea (sync branch)...${NC}\"\n git push origin sync || {\n echo -e \"${RED}Failed to push.${NC}\"\n exit 1\n }\n echo -e \"${GREEN}Changes pushed to Gitea (sync branch)${NC}\"\nelse\n echo -e \"${GREEN}No changes detected${NC}\"\nfi\n\necho -e \"${GREEN}=== Sync Complete ===${NC}\"\n", + "language": "bash", + "input_transforms": {} + } + } + ] + }, + "schema": { + "type": "object", + "properties": {}, + "required": [] + } +} diff --git a/flows/hourly_chime.flow.json b/flows/hourly_chime.flow.json new file mode 100644 index 0000000..0cdeb91 --- /dev/null +++ b/flows/hourly_chime.flow.json @@ -0,0 +1,38 @@ +{ + "path": "u/akiracraftwork/hourly_chime", + "summary": "鳩時計機能", + "description": "", + "value": { + "modules": [ + { + "id": "a", + "value": { + "lock": "{\n \"dependencies\": {}\n}\n//bun.lock\n", + "type": "rawscript", + "content": "export async function main(\n device: string = \"オフィスの右エコー\",\n prefix: string = \"現在時刻は\",\n suffix: string = \"です\"\n) {\n const now = new Date();\n const hhmm = new Intl.DateTimeFormat(\"ja-JP\", {\n timeZone: \"Asia/Tokyo\",\n hour: \"2-digit\",\n minute: \"2-digit\",\n hour12: false,\n }).format(now); // 例: 09:30\n\n const [h, m] = hhmm.split(\":\");\n const text = `${prefix}${Number(h)}時${Number(m)}分${suffix}`;\n\n const res = await fetch(\"http://alexa_api:3500/speak\", {\n method: \"POST\",\n headers: { \"Content-Type\": \"application/json\" },\n body: JSON.stringify({ device, text }),\n });\n\n if (!res.ok) {\n const body = await res.text();\n throw new Error(`alexa-api error ${res.status}: ${body}`);\n }\n\n return { ok: true, device, text };\n}\n", + "language": "bun", + "input_transforms": { + "device": { + "type": "static", + "value": "オフィスの右エコー" + }, + "prefix": { + "type": "static", + "value": "現在時刻は" + }, + "suffix": { + "type": "static", + "value": "です" + } + } + } + } + ] + }, + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "properties": {}, + "required": [], + "type": "object" + } +} diff --git a/flows/konnnichiha.flow.json b/flows/konnnichiha.flow.json new file mode 100644 index 0000000..fb7edef --- /dev/null +++ b/flows/konnnichiha.flow.json @@ -0,0 +1,24 @@ +{ + "path": "f/dev/konnnichiha", + "summary": "Print greeting", + "description": "", + "value": { + "modules": [ + { + "id": "a", + "value": { + "lock": "# py: 3.12\n", + "type": "rawscript", + "content": "def main():\n print('こんにちは、世界')", + "language": "python3", + "input_transforms": {} + } + } + ] + }, + "schema": { + "type": "object", + "properties": {}, + "required": [] + } +} diff --git a/flows/mail_filter.flow.json b/flows/mail_filter.flow.json index b70eb42..c3ffd9d 100644 --- a/flows/mail_filter.flow.json +++ b/flows/mail_filter.flow.json @@ -6,14 +6,14 @@ "modules": [ { "id": "a", - "summary": "メール取得・判定・通知", "value": { + "lock": "# py: 3.12\nanyio==4.12.1\ncertifi==2026.1.4\nh11==0.16.0\nhttpcore==1.0.9\nhttpx==0.28.1\nidna==3.11\ntyping-extensions==4.15.0\nwmill==1.642.0", "type": "rawscript", - "language": "python3", "content": "import imaplib\nimport email\nimport email.header\nimport json\nimport re\nimport ssl\nimport urllib.request\nimport urllib.parse\nfrom datetime import datetime, timezone, timedelta\nfrom email.utils import parsedate_to_datetime\nimport wmill\n\nJST = timezone(timedelta(hours=9))\n\n# ============================================================\n# アカウント設定\n# 新しいアカウントを追加する際は enabled: True にする\n# ============================================================\nACCOUNTS = [\n {\n \"name\": \"gmail\",\n \"account_code\": \"gmail\",\n \"host\": \"imap.gmail.com\",\n \"port\": 993,\n \"user_var\": \"u/admin/GMAIL_IMAP_USER\",\n \"pass_var\": \"u/admin/GMAIL_IMAP_PASSWORD\",\n \"last_uid_var\": \"u/admin/MAIL_FILTER_GMAIL_LAST_UID\",\n \"mailbox\": \"[Gmail]/All Mail\",\n \"enabled\": True,\n },\n {\n \"name\": \"gmail_service\",\n \"account_code\": \"gmail_service\",\n \"host\": \"imap.gmail.com\",\n \"port\": 993,\n \"user_var\": \"u/admin/GMAIL2_IMAP_USER\",\n \"pass_var\": \"u/admin/GMAIL2_IMAP_PASSWORD\",\n \"last_uid_var\": \"u/admin/MAIL_FILTER_GMAIL2_LAST_UID\",\n \"mailbox\": \"[Gmail]/All Mail\",\n \"enabled\": True,\n },\n # Hotmail テスト後に有効化\n # {\n # \"name\": \"hotmail\",\n # \"account_code\": \"hotmail\",\n # \"host\": \"outlook.office365.com\",\n # \"port\": 993,\n # \"user_var\": \"u/admin/HOTMAIL_IMAP_USER\",\n # \"pass_var\": \"u/admin/HOTMAIL_IMAP_PASSWORD\",\n # \"last_uid_var\": \"u/admin/MAIL_FILTER_HOTMAIL_LAST_UID\",\n # \"enabled\": False,\n # },\n # Xserver (keinafarm.com) 6アカウント\n {\n \"name\": \"xserver_akiracraftwork\",\n \"account_code\": \"xserver\",\n \"host\": \"sv579.xserver.jp\",\n \"port\": 993,\n \"user_var\": \"u/admin/XSERVER1_IMAP_USER\",\n \"pass_var\": \"u/admin/XSERVER1_IMAP_PASSWORD\",\n \"last_uid_var\": \"u/admin/MAIL_FILTER_XSERVER1_LAST_UID\",\n \"enabled\": True,\n },\n {\n \"name\": \"xserver_service\",\n \"account_code\": \"xserver\",\n \"host\": \"sv579.xserver.jp\",\n \"port\": 993,\n \"user_var\": \"u/admin/XSERVER2_IMAP_USER\",\n \"pass_var\": \"u/admin/XSERVER2_IMAP_PASSWORD\",\n \"last_uid_var\": \"u/admin/MAIL_FILTER_XSERVER2_LAST_UID\",\n \"enabled\": True,\n },\n {\n \"name\": \"xserver_midori\",\n \"account_code\": \"xserver\",\n \"host\": \"sv579.xserver.jp\",\n \"port\": 993,\n \"user_var\": \"u/admin/XSERVER3_IMAP_USER\",\n \"pass_var\": \"u/admin/XSERVER3_IMAP_PASSWORD\",\n \"last_uid_var\": \"u/admin/MAIL_FILTER_XSERVER3_LAST_UID\",\n \"enabled\": True,\n },\n {\n \"name\": \"xserver_kouseiren\",\n \"account_code\": \"xserver\",\n \"host\": \"sv579.xserver.jp\",\n \"port\": 993,\n \"user_var\": \"u/admin/XSERVER4_IMAP_USER\",\n \"pass_var\": \"u/admin/XSERVER4_IMAP_PASSWORD\",\n \"last_uid_var\": \"u/admin/MAIL_FILTER_XSERVER4_LAST_UID\",\n \"enabled\": True,\n },\n {\n \"name\": \"xserver_post\",\n \"account_code\": \"xserver\",\n \"host\": \"sv579.xserver.jp\",\n \"port\": 993,\n \"user_var\": \"u/admin/XSERVER5_IMAP_USER\",\n \"pass_var\": \"u/admin/XSERVER5_IMAP_PASSWORD\",\n \"last_uid_var\": \"u/admin/MAIL_FILTER_XSERVER5_LAST_UID\",\n \"enabled\": True,\n },\n {\n \"name\": \"xserver_sales\",\n \"account_code\": \"xserver\",\n \"host\": \"sv579.xserver.jp\",\n \"port\": 993,\n \"user_var\": \"u/admin/XSERVER6_IMAP_USER\",\n \"pass_var\": \"u/admin/XSERVER6_IMAP_PASSWORD\",\n \"last_uid_var\": \"u/admin/MAIL_FILTER_XSERVER6_LAST_UID\",\n \"enabled\": True,\n },\n]\n\n\ndef main():\n # 共通変数取得\n api_key = wmill.get_variable(\"u/admin/KEINASYSTEM_API_KEY\")\n api_url = wmill.get_variable(\"u/admin/KEINASYSTEM_API_URL\").rstrip(\"/\")\n gemini_key = wmill.get_variable(\"u/admin/GEMINI_API_KEY\")\n line_token = wmill.get_variable(\"u/admin/LINE_CHANNEL_ACCESS_TOKEN\")\n line_to = wmill.get_variable(\"u/admin/LINE_TO\")\n\n total_processed = 0\n total_notified = 0\n\n for account in ACCOUNTS:\n if not account[\"enabled\"]:\n continue\n print(f\"[{account['name']}] 処理開始\")\n try:\n processed, notified = process_account(\n account, api_key, api_url, gemini_key, line_token, line_to\n )\n total_processed += processed\n total_notified += notified\n print(f\"[{account['name']}] 処理完了: {processed}件処理, {notified}件通知\")\n except Exception as e:\n print(f\"[{account['name']}] エラー: {e}\")\n # 1アカウントが失敗しても他のアカウントは継続\n\n return {\n \"total_processed\": total_processed,\n \"total_notified\": total_notified,\n }\n\n\ndef process_account(account, api_key, api_url, gemini_key, line_token, line_to):\n user = wmill.get_variable(account[\"user_var\"])\n password = wmill.get_variable(account[\"pass_var\"])\n\n # 前回の最終UID取得\n try:\n last_uid_str = wmill.get_variable(account[\"last_uid_var\"])\n last_uid = int(last_uid_str) if last_uid_str and last_uid_str != \"0\" else None\n except Exception:\n last_uid = None\n\n # IMAP接続\n ssl_ctx = ssl.create_default_context()\n mail = imaplib.IMAP4_SSL(account[\"host\"], account[\"port\"], ssl_context=ssl_ctx)\n mail.login(user, password)\n mailbox = account.get(\"mailbox\", \"INBOX\")\n imap_mailbox = resolve_mailbox(mail, mailbox)\n\n try:\n if last_uid is None:\n # 初回実行: 現在の最大UIDを記録して終了(既存メールは処理しない)\n _, data = mail.uid(\"SEARCH\", None, \"ALL\")\n all_uids = data[0].split() if data[0] else []\n max_uid = int(all_uids[-1]) if all_uids else 0\n wmill.set_variable(account[\"last_uid_var\"], str(max_uid))\n print(f\"[{account['name']}] 初回実行: 最大UID={max_uid} を記録、既存メールはスキップ\")\n return 0, 0\n\n # last_uid より大きい UID を検索\n search_criterion = f\"UID {last_uid + 1}:*\"\n _, data = mail.uid(\"SEARCH\", None, search_criterion)\n raw_uids = data[0].split() if data[0] else []\n new_uids = [u for u in raw_uids if int(u) > last_uid]\n\n if not new_uids:\n print(f\"[{account['name']}] 新着メールなし\")\n return 0, 0\n\n print(f\"[{account['name']}] 新着{len(new_uids)}件\")\n\n processed = 0\n notified = 0\n max_processed_uid = last_uid\n\n for uid_bytes in new_uids:\n uid = int(uid_bytes)\n try:\n result = process_message(\n mail, uid, account,\n api_key, api_url, gemini_key, line_token, line_to\n )\n processed += 1\n if result == \"notified\":\n notified += 1\n max_processed_uid = max(max_processed_uid, uid)\n except Exception as e:\n print(f\"[{account['name']}] UID={uid} 処理エラー: {e}\")\n # 個別メッセージのエラーは継続、UIDは進めない\n\n # 処理済み最大UIDを保存(正常完了時のみ)\n if max_processed_uid > last_uid:\n wmill.set_variable(account[\"last_uid_var\"], str(max_processed_uid))\n\n return processed, notified\n finally:\n mail.logout()\n\n\ndef process_message(mail, uid, account, api_key, api_url, gemini_key, line_token, line_to):\n \"\"\"メッセージを1通処理。戻り値: 'skipped' / 'not_important' / 'notified'\"\"\"\n account_code = account[\"account_code\"]\n forwarding_map = account.get(\"forwarding_map\", {})\n\n # メール取得\n _, data = mail.uid(\"FETCH\", str(uid), \"(RFC822)\")\n if not data or not data[0]:\n return \"skipped\"\n\n raw_email = data[0][1]\n msg = email.message_from_bytes(raw_email)\n\n # ヘッダー解析\n message_id = msg.get(\"Message-ID\", \"\").strip()\n if not message_id:\n message_id = f\"{account_code}-uid-{uid}\"\n\n sender_raw = msg.get(\"From\", \"\")\n sender_email_addr = extract_email_address(sender_raw)\n sender_domain = sender_email_addr.split(\"@\")[-1] if \"@\" in sender_email_addr else \"\"\n\n subject = decode_header_value(msg.get(\"Subject\", \"(件名なし)\"))\n\n date_str = msg.get(\"Date\", \"\")\n try:\n received_at = parsedate_to_datetime(date_str).isoformat()\n except Exception:\n received_at = datetime.now(JST).isoformat()\n\n body_preview = extract_body_preview(msg, max_chars=500)\n\n # 転送検出: To:ヘッダーのドメインが forwarding_map に存在する場合は account_code を上書き\n if forwarding_map:\n to_raw = msg.get(\"To\", \"\")\n if to_raw:\n to_addr = extract_email_address(to_raw)\n to_domain = to_addr.split(\"@\")[-1] if \"@\" in to_addr else \"\"\n if to_domain in forwarding_map:\n account_code = forwarding_map[to_domain]\n print(f\" [転送検出] To:{to_addr} → account: {account_code}\")\n\n print(f\" From: {sender_email_addr}, Subject: {subject[:50]}\")\n\n # --- ステップ1: 送信者ルール確認 ---\n rule_result = call_api_get(api_key, api_url, \"/api/mail/sender-rule/\", {\n \"email\": sender_email_addr,\n \"domain\": sender_domain,\n })\n\n if rule_result.get(\"matched\"):\n rule = rule_result[\"rule\"]\n\n if rule == \"never_notify\":\n print(f\" → never_notify ルール一致、スキップ\")\n return \"skipped\"\n\n elif rule == \"always_notify\":\n print(f\" → always_notify ルール一致、即通知\")\n result = post_email(api_key, api_url, {\n \"account\": account_code,\n \"message_id\": message_id,\n \"sender_email\": sender_email_addr,\n \"sender_domain\": sender_domain,\n \"subject\": subject,\n \"body_preview\": body_preview,\n \"received_at\": received_at,\n \"llm_verdict\": \"important\",\n })\n if result.get(\"feedback_url\"):\n send_line_notification(line_token, line_to, account_code, sender_email_addr, subject, result[\"feedback_url\"])\n return \"notified\"\n return \"skipped\"\n\n # --- ステップ2: LLM判定 ---\n context = call_api_get(api_key, api_url, \"/api/mail/sender-context/\", {\n \"email\": sender_email_addr,\n \"domain\": sender_domain,\n })\n verdict = judge_with_llm(gemini_key, sender_email_addr, subject, body_preview, context)\n print(f\" → LLM判定: {verdict}\")\n\n # --- ステップ3: Keinasystemに記録 ---\n result = post_email(api_key, api_url, {\n \"account\": account_code,\n \"message_id\": message_id,\n \"sender_email\": sender_email_addr,\n \"sender_domain\": sender_domain,\n \"subject\": subject,\n \"body_preview\": body_preview,\n \"received_at\": received_at,\n \"llm_verdict\": verdict,\n })\n\n if verdict == \"important\" and result.get(\"feedback_url\"):\n send_line_notification(line_token, line_to, account_code, sender_email_addr, subject, result[\"feedback_url\"])\n return \"notified\"\n\n return \"not_important\"\n\n\n# ============================================================\n# メールボックス解決\n# ============================================================\n\ndef resolve_mailbox(mail, mailbox):\n \"\"\"メールボックスを選択し SELECT する。\n INBOX はそのまま、それ以外は指定名 -> \\\\All 属性でフォールバック。\n \"\"\"\n if mailbox == \"INBOX\":\n typ, data = mail.select(\"INBOX\")\n if typ != 'OK':\n raise Exception(f\"SELECT INBOX failed: {data}\")\n return \"INBOX\"\n\n # まず指定名で試行\n imap_name = '\"' + mailbox + '\"'\n typ, data = mail.select(imap_name)\n if typ == 'OK':\n return imap_name\n\n # 失敗した場合: \\\\All 属性を持つメールボックスを自動検出\n print(f\" [INFO] {mailbox} not found, searching for \\\\\\\\All mailbox...\")\n typ2, mboxes = mail.list()\n if typ2 == 'OK':\n for mb in mboxes:\n if not mb:\n continue\n mb_str = mb.decode() if isinstance(mb, bytes) else mb\n if '\\\\\\\\All' in mb_str or '\\\\All' in mb_str:\n # \"(attrs) \\\".\\\" \\\"name\\\"\" 形式から名前を抽出\n parts = mb_str.rsplit('\"', 2)\n if len(parts) >= 2 and parts[-2]:\n found = parts[-2]\n else:\n found = mb_str.split()[-1].strip('\"')\n print(f\" [INFO] Found All Mail mailbox: {found}\")\n imap_found = '\"' + found + '\"'\n typ3, data3 = mail.select(imap_found)\n if typ3 == 'OK':\n return imap_found\n raise Exception(f\"Could not select any All Mail mailbox (tried: {mailbox})\")\n\n\n# ============================================================\n# APIヘルパー\n# ============================================================\n\ndef _make_ssl_ctx():\n ctx = ssl.create_default_context()\n ctx.check_hostname = False\n ctx.verify_mode = ssl.CERT_NONE\n return ctx\n\n\ndef call_api_get(api_key, api_url, path, params):\n qs = urllib.parse.urlencode(params)\n url = f\"{api_url}{path}?{qs}\"\n req = urllib.request.Request(url, headers={\"X-API-Key\": api_key})\n with urllib.request.urlopen(req, context=_make_ssl_ctx(), timeout=10) as resp:\n return json.loads(resp.read().decode(\"utf-8\"))\n\n\ndef post_email(api_key, api_url, data):\n url = f\"{api_url}/api/mail/emails/\"\n payload = json.dumps(data).encode(\"utf-8\")\n req = urllib.request.Request(\n url,\n data=payload,\n headers={\"X-API-Key\": api_key, \"Content-Type\": \"application/json\"},\n method=\"POST\",\n )\n try:\n with urllib.request.urlopen(req, context=_make_ssl_ctx(), timeout=10) as resp:\n return json.loads(resp.read().decode(\"utf-8\"))\n except urllib.error.HTTPError as e:\n body = e.read().decode(\"utf-8\")\n if e.code == 400 and \"message_id\" in body:\n # 重複message_idは正常(再実行時の冦殁)\n print(f\" 重複メール、スキップ\")\n return {}\n raise\n\n\nACCOUNT_LABELS = {\n \"gmail\": \"Gmail (メイン)\",\n \"gmail_service\": \"Gmail (サービス用)\",\n \"hotmail\": \"Hotmail\",\n \"xserver\": \"Xserver\",\n}\n\ndef send_line_notification(line_token, line_to, account_code, sender_email_addr, subject, feedback_url):\n account_label = ACCOUNT_LABELS.get(account_code, account_code)\n message = (\n f\"📧 重要なメールが届きました\\n\\n\"\n f\"宛先: {account_label}\\n\"\n f\"差出人: {sender_email_addr}\\n\"\n f\"件名: {subject}\\n\\n\"\n f\"フィードバック:\\n{feedback_url}\"\n )\n payload = json.dumps({\n \"to\": line_to,\n \"messages\": [{\"type\": \"text\", \"text\": message}],\n }).encode(\"utf-8\")\n req = urllib.request.Request(\n \"https://api.line.me/v2/bot/message/push\",\n data=payload,\n headers={\n \"Authorization\": f\"Bearer {line_token}\",\n \"Content-Type\": \"application/json\",\n },\n method=\"POST\",\n )\n with urllib.request.urlopen(req, timeout=30) as resp:\n resp.read()\n\n\n# ============================================================\n# LLM判定(Gemini API)\n# ============================================================\n\ndef judge_with_llm(gemini_key, sender_email_addr, subject, body_preview, context):\n \"\"\"農家にとって重要なメールか判定。'important' または 'not_important' を返す。\"\"\"\n\n context_text = \"\"\n total = context.get(\"total_notified\", 0)\n if total > 0:\n context_text = (\n f\"\\n\\n[この送信者の過去データ] \"\n f\"通知済み{total}件: \"\n f\"重要{context.get('important', 0)}件 / \"\n f\"普通{context.get('not_important', 0)}件 / \"\n f\"通知不要{context.get('never_notify', 0)}件 / \"\n f\"未評価{context.get('no_feedback', 0)}件\"\n )\n\n user_message = (\n f\"送信者: {sender_email_addr}\\n\"\n f\"件名: {subject}\\n\"\n f\"本文冠頭:\\n{body_preview}\"\n f\"{context_text}\\n\\n\"\n f\"このメールは農家にとって重要ですか?\\n\"\n f\"1: 重要(要確認)\\n\"\n f\"2: 重要でない(営業・通知等)\\n\"\n f\"数字1文字のみで答えてください。\"\n )\n\n payload = json.dumps({\n \"system_instruction\": {\n \"parts\": [{\"text\": \"あなたは農家のメールフィルタリングアシスタントです。メールが重要かどうかを判定してください。\"}]\n },\n \"contents\": [{\n \"role\": \"user\",\n \"parts\": [{\"text\": user_message}]\n }],\n \"generationConfig\": {\n \"maxOutputTokens\": 10,\n \"temperature\": 0\n }\n }).encode(\"utf-8\")\n\n url = f\"https://generativelanguage.googleapis.com/v1beta/models/gemini-2.0-flash:generateContent?key={gemini_key}\"\n req = urllib.request.Request(\n url,\n data=payload,\n headers={\"Content-Type\": \"application/json\"},\n method=\"POST\",\n )\n with urllib.request.urlopen(req, timeout=30) as resp:\n result = json.loads(resp.read().decode(\"utf-8\"))\n answer = result[\"candidates\"][0][\"content\"][\"parts\"][0][\"text\"].strip()\n\n return \"important\" if answer.startswith(\"1\") else \"not_important\"\n\n\n# ============================================================\n# メール解析ヘルパー\n# ============================================================\n\ndef extract_email_address(raw):\n \"\"\"'Name ' または 'email@example.com' からアドレスを抽出\"\"\"\n match = re.search(r'<([^>]+)>', raw)\n if match:\n return match.group(1).strip().lower()\n return raw.strip().lower()\n\n\ndef decode_header_value(value):\n \"\"\"MIMEエンコードされたヘッダー値をデコード\"\"\"\n if not value:\n return \"\"\n parts = email.header.decode_header(value)\n decoded = []\n for part, charset in parts:\n if isinstance(part, bytes):\n decoded.append(part.decode(charset or \"utf-8\", errors=\"replace\"))\n else:\n decoded.append(part)\n return \"\".join(decoded)\n\n\ndef extract_body_preview(msg, max_chars=500):\n \"\"\"メール本文の冠頭を抽出(テキスト優先、HTMLフォールバック)\"\"\"\n text_content = \"\"\n html_content = \"\"\n\n if msg.is_multipart():\n for part in msg.walk():\n ctype = part.get_content_type()\n if ctype == \"text/plain\" and not text_content:\n charset = part.get_content_charset() or \"utf-8\"\n try:\n text_content = part.get_payload(decode=True).decode(charset, errors=\"replace\")\n except Exception:\n pass\n elif ctype == \"text/html\" and not html_content:\n charset = part.get_content_charset() or \"utf-8\"\n try:\n html_content = part.get_payload(decode=True).decode(charset, errors=\"replace\")\n except Exception:\n pass\n else:\n charset = msg.get_content_charset() or \"utf-8\"\n try:\n content = msg.get_payload(decode=True).decode(charset, errors=\"replace\")\n if msg.get_content_type() == \"text/html\":\n html_content = content\n else:\n text_content = content\n except Exception:\n pass\n\n if text_content:\n # フッター・区切り線を除去\n text = re.sub(r'\\n[-_=]{10,}\\n.*', '', text_content, flags=re.DOTALL)\n text = re.sub(r'\\s+', ' ', text).strip()\n return text[:max_chars]\n\n if html_content:\n # HTMLタグを除去\n text = re.sub(r'<[^>]+>', ' ', html_content)\n text = re.sub(r'\\s+', ' ', text).strip()\n return text[:max_chars]\n\n return \"\"\n", - "input_transforms": {}, - "lock": "" - } + "language": "python3", + "input_transforms": {} + }, + "summary": "メール取得・判定・通知" } ] }, @@ -24,4 +24,4 @@ "properties": {}, "required": [] } -} \ No newline at end of file +} diff --git a/flows/shiraou_notification.flow.json b/flows/shiraou_notification.flow.json index 12a2a7e..8c26034 100644 --- a/flows/shiraou_notification.flow.json +++ b/flows/shiraou_notification.flow.json @@ -6,14 +6,14 @@ "modules": [ { "id": "a", - "summary": "変更確認・LINE通知", "value": { + "lock": "# py: 3.12\nanyio==4.12.1\ncertifi==2026.1.4\nh11==0.16.0\nhttpcore==1.0.9\nhttpx==0.28.1\nidna==3.11\ntyping-extensions==4.15.0\nwmill==1.640.0", "type": "rawscript", - "language": "python3", "content": "import urllib.request\nimport urllib.parse\nimport json\nimport ssl\nfrom datetime import datetime, timezone, timedelta\nimport wmill\n\nJST = timezone(timedelta(hours=9))\n\n\ndef main():\n # シークレット取得\n api_key = wmill.get_variable(\"u/admin/NOTIFICATION_API_KEY\")\n line_token = wmill.get_variable(\"u/admin/LINE_CHANNEL_ACCESS_TOKEN\")\n line_to = wmill.get_variable(\"u/admin/LINE_TO\")\n\n # 前回実行時刻を取得(初回は現在時刻 - 10分)\n try:\n last_checked = wmill.get_variable(\"u/admin/SHIRAOU_LAST_CHECKED_AT\")\n if not last_checked:\n last_checked = None\n except Exception:\n last_checked = None\n\n if last_checked:\n since = last_checked\n else:\n since = (datetime.now(JST) - timedelta(minutes=10)).isoformat()\n\n print(f\"[通知] 変更確認: since={since}\")\n\n # API呼び出し\n ssl_ctx = ssl.create_default_context()\n ssl_ctx.check_hostname = False\n ssl_ctx.verify_mode = ssl.CERT_NONE\n\n params = urllib.parse.urlencode({\"since\": since})\n url = f\"https://shiraou.keinafarm.net/reservations/api/changes/?{params}\"\n\n req = urllib.request.Request(url, headers={\"X-API-Key\": api_key})\n with urllib.request.urlopen(req, context=ssl_ctx, timeout=30) as resp:\n data = json.loads(resp.read().decode(\"utf-8\"))\n\n checked_at = data[\"checked_at\"]\n reservations = data.get(\"reservations\", [])\n usages = data.get(\"usages\", [])\n\n print(f\"[通知] checked_at={checked_at}, 予約={len(reservations)}件, 実績={len(usages)}件\")\n\n # 変更があればLINE通知(エラー時は状態を更新しない)\n if reservations or usages:\n message = _format_message(reservations, usages)\n _send_line(line_token, line_to, message)\n print(\"[通知] LINE送信完了\")\n else:\n print(\"[通知] 変更なし、通知スキップ\")\n\n # 正常完了時のみ状態更新\n wmill.set_variable(\"u/admin/SHIRAOU_LAST_CHECKED_AT\", checked_at)\n print(f\"[通知] last_checked_at更新: {checked_at}\")\n\n return {\n \"since\": since,\n \"checked_at\": checked_at,\n \"reservations_count\": len(reservations),\n \"usages_count\": len(usages),\n \"notified\": bool(reservations or usages),\n }\n\n\ndef _format_message(reservations, usages):\n lines = [\"\\U0001f4cb 営農システム 変更通知\\n\"]\n\n OP_R = {\n \"create\": (\"\\U0001f7e2\", \"予約作成\"),\n \"update\": (\"\\U0001f535\", \"予約変更\"),\n \"cancel\": (\"\\U0001f534\", \"予約キャンセル\"),\n }\n OP_U = {\n \"create\": (\"\\U0001f7e2\", \"実績登録\"),\n \"update\": (\"\\U0001f535\", \"実績修正\"),\n \"delete\": (\"\\U0001f534\", \"実績削除\"),\n }\n\n for r in reservations:\n start = r[\"start_at\"][:16].replace(\"T\", \" \")\n end = r[\"end_at\"][:16].replace(\"T\", \" \")\n icon, label = OP_R.get(r[\"operation\"], (\"\\u26aa\", r[\"operation\"]))\n lines += [\n f\"{icon} {label}\",\n f\" 機械: {r['machine_name']}\",\n f\" 利用者: {r['user_name']}\",\n f\" 日時: {start} \\uff5e {end}\",\n ]\n if r.get(\"reason\"):\n lines.append(f\" 理由: {r['reason']}\")\n lines.append(\"\")\n\n for u in usages:\n start = u[\"start_at\"][:16].replace(\"T\", \" \")\n icon, label = OP_U.get(u[\"operation\"], (\"\\u26aa\", u[\"operation\"]))\n lines += [\n f\"{icon} {label}\",\n f\" 機械: {u['machine_name']}\",\n f\" 利用者: {u['user_name']}\",\n f\" 利用量: {u['amount']}{u['unit']}\",\n f\" 日: {start[:10]}\",\n ]\n if u.get(\"reason\"):\n lines.append(f\" 理由: {u['reason']}\")\n lines.append(\"\")\n\n return \"\\n\".join(lines).strip()\n\n\ndef _send_line(token, to, message):\n payload = json.dumps({\n \"to\": to,\n \"messages\": [{\"type\": \"text\", \"text\": message}],\n }).encode(\"utf-8\")\n\n req = urllib.request.Request(\n \"https://api.line.me/v2/bot/message/push\",\n data=payload,\n headers={\n \"Authorization\": f\"Bearer {token}\",\n \"Content-Type\": \"application/json\",\n },\n method=\"POST\",\n )\n with urllib.request.urlopen(req, timeout=30) as resp:\n return resp.read().decode(\"utf-8\")\n", - "input_transforms": {}, - "lock": "" - } + "language": "python3", + "input_transforms": {} + }, + "summary": "変更確認・LINE通知" } ] }, diff --git a/flows/system_heartbeat.flow.json b/flows/system_heartbeat.flow.json index 8d97cd4..ee152fb 100644 --- a/flows/system_heartbeat.flow.json +++ b/flows/system_heartbeat.flow.json @@ -6,70 +6,70 @@ "modules": [ { "id": "a", - "summary": "Step1: 診断データ生成", "value": { + "lock": "# py: 3.12\n", "type": "rawscript", - "language": "python3", "content": "import uuid\nfrom datetime import datetime, timezone\n\ndef main():\n \"\"\"診断データを生成する\"\"\"\n now = datetime.now(timezone.utc)\n run_id = str(uuid.uuid4())\n check_value = 2 + 2\n \n result = {\n \"timestamp\": now.isoformat(),\n \"run_id\": run_id,\n \"check\": check_value,\n \"python_version\": __import__('sys').version\n }\n print(f\"[Step1] 診断データ生成完了\")\n print(f\" run_id: {run_id}\")\n print(f\" timestamp: {now.isoformat()}\")\n print(f\" check: {check_value}\")\n return result\n", - "input_transforms": {}, - "lock": "" - } + "language": "python3", + "input_transforms": {} + }, + "summary": "Step1: 診断データ生成" }, { "id": "b", - "summary": "Step2: データ検証", "value": { + "lock": "# py: 3.12\n", "type": "rawscript", - "language": "python3", "content": "from datetime import datetime, timezone\n\ndef main(step1_result: dict):\n \"\"\"Step1の結果を検証する\"\"\"\n errors = []\n \n # 計算チェック\n if step1_result.get(\"check\") != 4:\n errors.append(f\"計算エラー: expected 4, got {step1_result.get('check')}\")\n \n # run_idの存在チェック\n if not step1_result.get(\"run_id\"):\n errors.append(\"run_idが存在しない\")\n \n # timestampの存在チェック\n if not step1_result.get(\"timestamp\"):\n errors.append(\"timestampが存在しない\")\n \n if errors:\n error_msg = \"; \".join(errors)\n print(f\"[Step2] 検証失敗: {error_msg}\")\n raise Exception(f\"検証失敗: {error_msg}\")\n \n print(f\"[Step2] データ検証OK\")\n print(f\" 計算チェック: 2+2={step1_result['check']} ✓\")\n print(f\" run_id: {step1_result['run_id']} ✓\")\n print(f\" timestamp: {step1_result['timestamp']} ✓\")\n \n return {\n \"verification\": \"PASS\",\n \"step1_data\": step1_result\n }\n", + "language": "python3", "input_transforms": { "step1_result": { - "type": "javascript", - "expr": "results.a" + "expr": "results.a", + "type": "javascript" } - }, - "lock": "" - } + } + }, + "summary": "Step2: データ検証" }, { "id": "c", - "summary": "Step3: HTTPヘルスチェック", "value": { + "lock": "# py: 3.12\n", "type": "rawscript", - "language": "python3", "content": "import urllib.request\nimport ssl\n\ndef main(verification_result: dict):\n \"\"\"Windmillサーバー自身へのHTTPチェック\"\"\"\n url = \"https://windmill.keinafarm.net/api/version\"\n \n # SSL検証をスキップ(自己署名証明書対応)\n ctx = ssl.create_default_context()\n ctx.check_hostname = False\n ctx.verify_mode = ssl.CERT_NONE\n \n try:\n req = urllib.request.Request(url)\n with urllib.request.urlopen(req, context=ctx, timeout=10) as response:\n status_code = response.status\n body = response.read().decode('utf-8')\n except Exception as e:\n print(f\"[Step3] HTTPチェック失敗: {e}\")\n raise Exception(f\"HTTPヘルスチェック失敗: {e}\")\n \n print(f\"[Step3] HTTPヘルスチェックOK\")\n print(f\" URL: {url}\")\n print(f\" Status: {status_code}\")\n print(f\" Version: {body}\")\n \n return {\n \"http_check\": \"PASS\",\n \"status_code\": status_code,\n \"server_version\": body\n }\n", + "language": "python3", "input_transforms": { "verification_result": { - "type": "javascript", - "expr": "results.b" + "expr": "results.b", + "type": "javascript" } - }, - "lock": "" - } + } + }, + "summary": "Step3: HTTPヘルスチェック" }, { "id": "d", - "summary": "Step4: 年度判定 & 最終レポート", "value": { + "lock": "# py: 3.12\n", "type": "rawscript", - "language": "python3", "content": "from datetime import datetime, timezone\n\ndef main(step1_data: dict, verification: dict, http_check: dict):\n \"\"\"年度判定と最終診断レポートを生成\"\"\"\n now = datetime.now(timezone.utc)\n \n # 日本の年度判定(4月始まり)\n fiscal_year = now.year if now.month >= 4 else now.year - 1\n \n report = {\n \"status\": \"ALL OK\",\n \"fiscal_year\": fiscal_year,\n \"diagnostics\": {\n \"data_generation\": \"PASS\",\n \"data_verification\": verification.get(\"verification\", \"UNKNOWN\"),\n \"http_health\": http_check.get(\"http_check\", \"UNKNOWN\"),\n \"server_version\": http_check.get(\"server_version\", \"UNKNOWN\")\n },\n \"run_id\": step1_data.get(\"run_id\"),\n \"started_at\": step1_data.get(\"timestamp\"),\n \"completed_at\": now.isoformat()\n }\n \n print(\"\")\n print(\"========================================\")\n print(\" Windmill Heartbeat - 診断レポート\")\n print(\"========================================\")\n print(f\" Status: {report['status']}\")\n print(f\" 年度: {fiscal_year}年度\")\n print(f\" Run ID: {report['run_id']}\")\n print(f\" Server: {report['diagnostics']['server_version']}\")\n print(f\" 開始: {report['started_at']}\")\n print(f\" 完了: {report['completed_at']}\")\n print(\" ────────────────────────────────────\")\n print(f\" データ生成: PASS ✓\")\n print(f\" データ検証: {report['diagnostics']['data_verification']} ✓\")\n print(f\" HTTP確認: {report['diagnostics']['http_health']} ✓\")\n print(\"========================================\")\n print(\"\")\n \n return report\n", + "language": "python3", "input_transforms": { + "http_check": { + "expr": "results.c", + "type": "javascript" + }, "step1_data": { - "type": "javascript", - "expr": "results.a" + "expr": "results.a", + "type": "javascript" }, "verification": { - "type": "javascript", - "expr": "results.b" - }, - "http_check": { - "type": "javascript", - "expr": "results.c" + "expr": "results.b", + "type": "javascript" } - }, - "lock": "" - } + } + }, + "summary": "Step4: 年度判定 & 最終レポート" } ] }, diff --git a/flows/textout.flow.json b/flows/textout.flow.json new file mode 100644 index 0000000..543438b --- /dev/null +++ b/flows/textout.flow.json @@ -0,0 +1,24 @@ +{ + "path": "f/dev/textout", + "summary": "Display current time on startup", + "description": "", + "value": { + "modules": [ + { + "id": "a", + "value": { + "lock": "# py: 3.12\n", + "type": "rawscript", + "content": "def main():\n from datetime import datetime\n print(datetime.now().strftime('%H:%M:%S'))", + "language": "python3", + "input_transforms": {} + } + } + ] + }, + "schema": { + "type": "object", + "properties": {}, + "required": [] + } +} diff --git a/flows/weather_sync.flow.json b/flows/weather_sync.flow.json new file mode 100644 index 0000000..31f6c76 --- /dev/null +++ b/flows/weather_sync.flow.json @@ -0,0 +1,27 @@ +{ + "path": "f/weather/weather_sync", + "summary": "Weather Sync - 気象データ日次同期", + "description": "Open-Meteo から昨日の気象データを取得し、Keinasystem DB に保存する。毎朝6時実行。", + "value": { + "modules": [ + { + "id": "a", + "value": { + "lock": "# py: 3.12\nanyio==4.12.1\ncertifi==2026.2.25\ncharset-normalizer==3.4.4\nh11==0.16.0\nhttpcore==1.0.9\nhttpx==0.28.1\nidna==3.11\nrequests==2.32.5\ntyping-extensions==4.15.0\nurllib3==2.6.3\nwmill==1.646.0", + "type": "rawscript", + "content": "import wmill\nimport requests\nimport datetime\n\nLATITUDE = 33.213\nLONGITUDE = 133.133\nTIMEZONE = \"Asia/Tokyo\"\n\nOPEN_METEO_URL = \"https://archive-api.open-meteo.com/v1/archive\"\nDAILY_VARS = [\n \"temperature_2m_mean\",\n \"temperature_2m_max\",\n \"temperature_2m_min\",\n \"sunshine_duration\",\n \"precipitation_sum\",\n \"wind_speed_10m_max\",\n \"surface_pressure_min\",\n]\n\n\ndef main():\n api_key = wmill.get_variable(\"u/admin/KEINASYSTEM_API_KEY\")\n base_url = wmill.get_variable(\"u/admin/KEINASYSTEM_API_URL\").rstrip(\"/\")\n sync_url = f\"{base_url}/api/weather/sync/\"\n\n yesterday = (datetime.date.today() - datetime.timedelta(days=1)).isoformat()\n print(f\"Fetching weather data for {yesterday} ...\")\n\n params = {\n \"latitude\": LATITUDE,\n \"longitude\": LONGITUDE,\n \"start_date\": yesterday,\n \"end_date\": yesterday,\n \"daily\": DAILY_VARS,\n \"timezone\": TIMEZONE,\n }\n resp = requests.get(OPEN_METEO_URL, params=params, timeout=30)\n if resp.status_code != 200:\n raise Exception(f\"Open-Meteo API error: {resp.status_code} {resp.text[:300]}\")\n\n daily = resp.json().get(\"daily\", {})\n dates = daily.get(\"time\", [])\n if not dates:\n print(\"No data returned from Open-Meteo.\")\n return {\"status\": \"no_data\"}\n\n sunshine_raw = daily.get(\"sunshine_duration\", [])\n records = []\n for i, d in enumerate(dates):\n sun_sec = sunshine_raw[i]\n records.append({\n \"date\": d,\n \"temp_mean\": daily[\"temperature_2m_mean\"][i],\n \"temp_max\": daily[\"temperature_2m_max\"][i],\n \"temp_min\": daily[\"temperature_2m_min\"][i],\n \"sunshine_h\": round(sun_sec / 3600, 2) if sun_sec is not None else None,\n \"precip_mm\": daily[\"precipitation_sum\"][i],\n \"wind_max\": daily[\"wind_speed_10m_max\"][i],\n \"pressure_min\": daily[\"surface_pressure_min\"][i],\n })\n\n headers = {\n \"X-API-Key\": api_key,\n \"Content-Type\": \"application/json\",\n }\n post_resp = requests.post(sync_url, json=records, headers=headers, timeout=30)\n if post_resp.status_code not in (200, 201):\n raise Exception(f\"Keinasystem sync error: {post_resp.status_code} {post_resp.text[:300]}\")\n\n result = post_resp.json()\n print(f\"Sync complete: {result}\")\n return result\n", + "language": "python3", + "input_transforms": {} + }, + "summary": "気象データ取得・同期" + } + ] + }, + "schema": { + "$schema": "https://json-schema.org/draft/2020-12/schema", + "type": "object", + "order": [], + "properties": {}, + "required": [] + } +} diff --git a/scripts/alexa_speak.ts b/scripts/alexa_speak.ts index cf6a39c..a25942c 100644 --- a/scripts/alexa_speak.ts +++ b/scripts/alexa_speak.ts @@ -1,69 +1,20 @@ -/** - * alexa_speak.ts - * 指定した Echo デバイスにテキストを読み上げさせる Windmill スクリプト - * - * パラメータ: - * device - ドロップダウンから選択するデバイス(内部的にはシリアル番号) - * text - 読み上げるテキスト - */ - -const ALEXA_API_URL = "http://alexa_api:3500"; - -type DeviceOption = { value: string; label: string }; - -const FALLBACK_DEVICE_OPTIONS: DeviceOption[] = [ - { value: "G0922H085165007R", label: "プレハブ (G0922H085165007R)" }, - { value: "G8M2DB08522600RL", label: "リビングエコー1 (G8M2DB08522600RL)" }, - { value: "G8M2DB08522503WF", label: "リビングエコー2 (G8M2DB08522503WF)" }, - { value: "G0922H08525302K5", label: "オフィスの右エコー (G0922H08525302K5)" }, - { value: "G0922H08525302J9", label: "オフィスの左エコー (G0922H08525302J9)" }, - { value: "G8M2HN08534302XH", label: "寝室のエコー (G8M2HN08534302XH)" }, -]; - -// Windmill Dynamic Select: 引数名 `device` に対応する `DynSelect_device` と `device()` を定義 -export type DynSelect_device = string; - -export async function device(): Promise { - try { - const res = await fetch(`${ALEXA_API_URL}/devices`); - if (!res.ok) return FALLBACK_DEVICE_OPTIONS; - - const devices = (await res.json()) as Array<{ - name?: string; - serial?: string; - family?: string; - }>; - - const options = devices - .filter((d) => d.family === "ECHO" && d.serial) - .map((d) => ({ - value: d.serial as string, - label: `${d.name ?? d.serial} (${d.serial})`, - })) - .sort((a, b) => a.label.localeCompare(b.label, "ja")); - - return options.length > 0 ? options : FALLBACK_DEVICE_OPTIONS; - } catch { - return FALLBACK_DEVICE_OPTIONS; - } -} - export async function main( - device: DynSelect_device, + device: string, text: string, ): Promise<{ ok: boolean; device: string; text: string }> { + const ALEXA_API_URL = "http://alexa_api:3500"; + const res = await fetch(`${ALEXA_API_URL}/speak`, { method: "POST", headers: { "Content-Type": "application/json" }, - body: JSON.stringify({ device, text }), + body: JSON.stringify({ device, text }), // ← SSMLなし、素のテキスト }); if (!res.ok) { const body = await res.json().catch(() => ({})); - throw new Error( - `alexa-api error ${res.status}: ${JSON.stringify(body)}` - ); + throw new Error(`alexa-api error ${res.status}: ${JSON.stringify(body)}`); } return await res.json(); } + diff --git a/state/flows.list.json b/state/flows.list.json new file mode 100644 index 0000000..478fb8d --- /dev/null +++ b/state/flows.list.json @@ -0,0 +1,106 @@ +[ + { + "workspace_id": "admins", + "path": "u/akiracraftwork/hourly_chime", + "summary": "鳩時計機能", + "description": "", + "edited_by": "akiracraftwork@gmail.com", + "edited_at": "2026-03-03T05:37:39.969305Z", + "archived": false, + "extra_perms": {}, + "starred": false, + "has_draft": false, + "ws_error_handler_muted": false + }, + { + "workspace_id": "admins", + "path": "f/dev/textout", + "summary": "Display current time on startup", + "description": "", + "edited_by": "akiracraftwork@gmail.com", + "edited_at": "2026-03-02T05:05:05.215985Z", + "archived": false, + "extra_perms": {}, + "starred": false, + "has_draft": false, + "ws_error_handler_muted": false + }, + { + "workspace_id": "admins", + "path": "f/dev/konnnichiha", + "summary": "Print greeting", + "description": "", + "edited_by": "akiracraftwork@gmail.com", + "edited_at": "2026-03-02T04:53:56.968574Z", + "archived": false, + "extra_perms": {}, + "starred": false, + "has_draft": false, + "ws_error_handler_muted": false + }, + { + "workspace_id": "admins", + "path": "u/antigravity/git_sync", + "summary": "Git Sync Workflow", + "description": "Automatically sync Windmill workflows to Git repository (sync branch)", + "edited_by": "akiracraftwork@gmail.com", + "edited_at": "2026-03-01T17:28:14.331046Z", + "archived": false, + "extra_perms": {}, + "starred": false, + "has_draft": false, + "ws_error_handler_muted": false + }, + { + "workspace_id": "admins", + "path": "f/weather/weather_sync", + "summary": "Weather Sync - 気象データ日次同期", + "description": "Open-Meteo から昨日の気象データを取得し、Keinasystem DB に保存する。毎朝6時実行。", + "edited_by": "akiracraftwork@gmail.com", + "edited_at": "2026-02-28T04:31:27.835748Z", + "archived": false, + "extra_perms": {}, + "starred": false, + "has_draft": false, + "ws_error_handler_muted": false + }, + { + "workspace_id": "admins", + "path": "f/mail/mail_filter", + "summary": "メールフィルタリング", + "description": "IMAPで新着メールを受信し、送信者ルール確認→LLM判定→LINE通知を行う。Keinasystemと連携。Gmail→Hotmail→Xserverの順で段階的に有効化する。", + "edited_by": "akiracraftwork@gmail.com", + "edited_at": "2026-02-24T06:41:54.748865Z", + "archived": false, + "extra_perms": {}, + "starred": false, + "has_draft": false, + "ws_error_handler_muted": false + }, + { + "workspace_id": "admins", + "path": "f/shiraou/shiraou_notification", + "summary": "白皇集落営農 変更通知", + "description": "shiraou.keinafarm.net の予約・実績変更をポーリングし、変更があればLINEで管理者に通知する。5分毎に実行。", + "edited_by": "akiracraftwork@gmail.com", + "edited_at": "2026-02-21T06:33:11.078673Z", + "archived": false, + "extra_perms": {}, + "starred": false, + "has_draft": false, + "ws_error_handler_muted": false + }, + { + "workspace_id": "admins", + "path": "f/app_custom/system_heartbeat", + "summary": "Windmill Heartbeat - システム自己診断", + "description": "Windmillの動作確認用ワークフロー。UUID生成、時刻取得、計算チェック、HTTPヘルスチェック、年度判定を行い、全ステップの正常性を検証する。", + "edited_by": "akiracraftwork@gmail.com", + "edited_at": "2026-02-21T03:43:55.495111Z", + "archived": false, + "extra_perms": {}, + "starred": false, + "has_draft": false, + "ws_error_handler_muted": false + } +] diff --git a/state/remote_index.json b/state/remote_index.json new file mode 100644 index 0000000..bf8dd56 --- /dev/null +++ b/state/remote_index.json @@ -0,0 +1,44 @@ +{ + "synced_at": "2026-03-03T06:24:34Z", + "workspace": "admins", + "scripts": { + "u/admin/alexa_speak": { + "hash": "3783872112d1a24c", + "updated_at": "2026-03-03T02:57:13.068287Z" + } + }, + "flows": { + "u/akiracraftwork/hourly_chime": { + "hash": null, + "updated_at": "2026-03-03T05:37:39.969305Z" + }, + "f/dev/textout": { + "hash": null, + "updated_at": "2026-03-02T05:05:05.215985Z" + }, + "f/dev/konnnichiha": { + "hash": null, + "updated_at": "2026-03-02T04:53:56.968574Z" + }, + "u/antigravity/git_sync": { + "hash": null, + "updated_at": "2026-03-01T17:28:14.331046Z" + }, + "f/weather/weather_sync": { + "hash": null, + "updated_at": "2026-02-28T04:31:27.835748Z" + }, + "f/mail/mail_filter": { + "hash": null, + "updated_at": "2026-02-24T06:41:54.748865Z" + }, + "f/shiraou/shiraou_notification": { + "hash": null, + "updated_at": "2026-02-21T06:33:11.078673Z" + }, + "f/app_custom/system_heartbeat": { + "hash": null, + "updated_at": "2026-02-21T03:43:55.495111Z" + } + } +} diff --git a/state/scripts.list.json b/state/scripts.list.json new file mode 100644 index 0000000..ddedb6e --- /dev/null +++ b/state/scripts.list.json @@ -0,0 +1,18 @@ +[ + { + "hash": "3783872112d1a24c", + "path": "u/admin/alexa_speak", + "summary": "Echo デバイスに TTS で読み上げ", + "created_at": "2026-03-03T02:57:13.068287Z", + "archived": false, + "extra_perms": {}, + "language": "bun", + "starred": false, + "tag": null, + "description": "指定した Echo デバイスにテキストを読み上げさせる", + "has_draft": false, + "has_deploy_errors": false, + "ws_error_handler_muted": false, + "kind": "script" + } +] diff --git a/wm-api.sh b/wm-api.sh index 028b2ec..8a844b2 100755 --- a/wm-api.sh +++ b/wm-api.sh @@ -8,139 +8,462 @@ set -euo pipefail WINDMILL_URL="${WINDMILL_URL:-https://windmill.keinafarm.net}" WINDMILL_TOKEN="${WINDMILL_TOKEN:-qLJ3VPZ61kTDiIwaUPUu1dXszGrsN1Dh}" WINDMILL_WORKSPACE="${WINDMILL_WORKSPACE:-admins}" +STATE_DIR="${STATE_DIR:-state}" +REMOTE_INDEX_FILE="${REMOTE_INDEX_FILE:-${STATE_DIR}/remote_index.json}" API_BASE="${WINDMILL_URL}/api/w/${WINDMILL_WORKSPACE}" AUTH_HEADER="Authorization: Bearer ${WINDMILL_TOKEN}" -# ヘルパー関数 +require_arg() { + if [ -z "${2:-}" ]; then + echo "Usage: $1" + exit 1 + fi +} + +ensure_parent_dir() { + local out="$1" + local dir + dir="$(dirname "$out")" + if [ "$dir" != "." ]; then + mkdir -p "$dir" + fi +} + api_get() { - curl -sk -H "${AUTH_HEADER}" "${API_BASE}$1" 2>/dev/null + curl -sk -H "${AUTH_HEADER}" "${API_BASE}$1" 2>/dev/null } api_post() { - curl -sk -X POST -H "${AUTH_HEADER}" -H "Content-Type: application/json" -d "$2" "${API_BASE}$1" 2>/dev/null + curl -sk -X POST -H "${AUTH_HEADER}" -H "Content-Type: application/json" -d "$2" "${API_BASE}$1" 2>/dev/null } api_put() { - curl -sk -X PUT -H "${AUTH_HEADER}" -H "Content-Type: application/json" -d "$2" "${API_BASE}$1" 2>/dev/null + curl -sk -X PUT -H "${AUTH_HEADER}" -H "Content-Type: application/json" -d "$2" "${API_BASE}$1" 2>/dev/null } api_delete() { - curl -sk -X DELETE -H "${AUTH_HEADER}" "${API_BASE}$1" 2>/dev/null + curl -sk -X DELETE -H "${AUTH_HEADER}" "${API_BASE}$1" 2>/dev/null +} + +json_pretty() { + python3 -m json.tool +} + +save_json_pretty() { + local out="$1" + ensure_parent_dir "$out" + python3 -c 'import json,sys; print(json.dumps(json.load(sys.stdin), ensure_ascii=False, indent=2))' > "$out" +} + +python_json() { + python3 - "$@" +} + +path_to_script_file() { + local path="$1" + echo "scripts/${path##*/}.ts" +} + +path_to_flow_file() { + local path="$1" + echo "flows/${path##*/}.flow.json" +} + +pull_script() { + local script_path="$1" + local outfile="$2" + + local data + data="$(api_get "/scripts/get/p/${script_path}")" + if [ -z "$data" ]; then + echo "pull-script failed: empty response (${script_path})" >&2 + exit 1 + fi + + ensure_parent_dir "$outfile" + python_json "$outfile" "$data" <<'PY' +import json +import pathlib +import sys + +outfile = pathlib.Path(sys.argv[1]) +obj = json.loads(sys.argv[2]) +content = obj.get("content") +if content is None: + raise SystemExit("content not found in script payload") +outfile.write_text(content, encoding="utf-8", newline="\n") +PY + echo "pulled script: ${script_path} -> ${outfile}" +} + +push_script() { + local script_path="$1" + local infile="$2" + + if [ ! -f "$infile" ]; then + echo "push-script failed: file not found (${infile})" >&2 + exit 1 + fi + + local current payload + current="$(api_get "/scripts/get/p/${script_path}")" + if [ -z "$current" ]; then + echo "push-script failed: empty response (${script_path})" >&2 + exit 1 + fi + + payload="$(python_json "$script_path" "$infile" "$current" <<'PY' +import json +import pathlib +import sys + +script_path, infile = sys.argv[1], pathlib.Path(sys.argv[2]) +remote = json.loads(sys.argv[3]) +content = infile.read_text(encoding="utf-8") + +payload = { + "path": script_path, + "parent_hash": remote.get("hash"), + "summary": remote.get("summary") or "", + "description": remote.get("description") or "", + "content": content, + "schema": remote.get("schema") or {"type": "object", "properties": {}, "required": []}, + "language": remote.get("language") or "bun", + "kind": remote.get("kind") or "script", + "lock": remote.get("lock") or "", +} + +missing = [k for k in ("path", "parent_hash", "content", "schema", "language", "kind", "lock") if payload.get(k) is None] +if missing: + raise SystemExit(f"missing required payload fields: {', '.join(missing)}") + +print(json.dumps(payload, ensure_ascii=False)) +PY +)" + api_post "/scripts/create" "$payload" | json_pretty +} + +pull_flow() { + local flow_path="$1" + local outfile="$2" + + local data + data="$(api_get "/flows/get/${flow_path}")" + if [ -z "$data" ]; then + echo "pull-flow failed: empty response (${flow_path})" >&2 + exit 1 + fi + + ensure_parent_dir "$outfile" + python_json "$data" <<'PY' | save_json_pretty "$outfile" +import json +import sys + +obj = json.loads(sys.argv[1]) +out = { + "path": obj.get("path"), + "summary": obj.get("summary") or "", + "description": obj.get("description") or "", + "value": obj.get("value") or {}, + "schema": obj.get("schema") or {"type": "object", "properties": {}, "required": []}, +} +print(json.dumps(out, ensure_ascii=False)) +PY + echo "pulled flow: ${flow_path} -> ${outfile}" +} + +push_flow() { + local json_file="$1" + if [ ! -f "$json_file" ]; then + echo "push-flow failed: file not found (${json_file})" >&2 + exit 1 + fi + + local flow_path payload + flow_path="$(python_json "$json_file" <<'PY' +import json +import pathlib +import sys +obj = json.loads(pathlib.Path(sys.argv[1]).read_text(encoding="utf-8")) +path = obj.get("path") +if not path: + raise SystemExit("path is required in flow json") +print(path) +PY +)" + payload="$(cat "$json_file")" + + echo "push-flow: delete ${flow_path}" + api_delete "/flows/delete/${flow_path}" >/dev/null || true + echo "push-flow: create ${flow_path}" + api_post "/flows/create" "$payload" | json_pretty +} + +pull_all() { + mkdir -p scripts flows "${STATE_DIR}" + local scripts_json flows_json now_utc + + scripts_json="$(api_get "/scripts/list?per_page=1000")" + flows_json="$(api_get "/flows/list?per_page=1000")" + now_utc="$(date -u +"%Y-%m-%dT%H:%M:%SZ")" + + python_json "$scripts_json" <<'PY' > "${STATE_DIR}/scripts.list.json" +import json +import sys +print(json.dumps(json.loads(sys.argv[1]), ensure_ascii=False, indent=2)) +PY + python_json "$flows_json" <<'PY' > "${STATE_DIR}/flows.list.json" +import json +import sys +print(json.dumps(json.loads(sys.argv[1]), ensure_ascii=False, indent=2)) +PY + + local script_paths flow_paths + script_paths="$(python_json "$scripts_json" <<'PY' +import json +import sys +items = json.loads(sys.argv[1]) +for it in items: + path = it.get("path") + if path: + print(path) +PY +)" + flow_paths="$(python_json "$flows_json" <<'PY' +import json +import sys +items = json.loads(sys.argv[1]) +for it in items: + path = it.get("path") + if path: + print(path) +PY +)" + + while IFS= read -r p; do + [ -z "$p" ] && continue + pull_script "$p" "$(path_to_script_file "$p")" + done <<< "$script_paths" + + while IFS= read -r p; do + [ -z "$p" ] && continue + pull_flow "$p" "$(path_to_flow_file "$p")" + done <<< "$flow_paths" + + build_remote_index "$scripts_json" "$flows_json" "$now_utc" > "${REMOTE_INDEX_FILE}" + echo "remote index updated: ${REMOTE_INDEX_FILE}" +} + +build_remote_index() { + local scripts_json="$1" + local flows_json="$2" + local synced_at="$3" + python_json "$scripts_json" "$flows_json" "$synced_at" <<'PY' +import json +import sys + +scripts = json.loads(sys.argv[1]) +flows = json.loads(sys.argv[2]) +synced_at = sys.argv[3] + +index = { + "synced_at": synced_at, + "workspace": None, + "scripts": {}, + "flows": {}, +} + +for item in scripts: + path = item.get("path") + if not path: + continue + if index["workspace"] is None: + index["workspace"] = item.get("workspace_id") + index["scripts"][path] = { + "hash": item.get("hash"), + "updated_at": item.get("edited_at") or item.get("created_at"), + } + +for item in flows: + path = item.get("path") + if not path: + continue + if index["workspace"] is None: + index["workspace"] = item.get("workspace_id") + index["flows"][path] = { + "hash": item.get("hash"), + "updated_at": item.get("edited_at") or item.get("created_at"), + } + +print(json.dumps(index, ensure_ascii=False, indent=2)) +PY +} + +status_remote() { + mkdir -p "${STATE_DIR}" + local scripts_json flows_json now_utc + scripts_json="$(api_get "/scripts/list?per_page=1000")" + flows_json="$(api_get "/flows/list?per_page=1000")" + now_utc="$(date -u +"%Y-%m-%dT%H:%M:%SZ")" + + local current_index + current_index="$(build_remote_index "$scripts_json" "$flows_json" "$now_utc")" + printf '%s\n' "$current_index" > "${STATE_DIR}/remote_index.current.json" + + if [ ! -f "${REMOTE_INDEX_FILE}" ]; then + echo "No baseline index: ${REMOTE_INDEX_FILE}" + echo "Run ./wm-api.sh pull-all first." + exit 0 + fi + + python_json "${REMOTE_INDEX_FILE}" "${STATE_DIR}/remote_index.current.json" <<'PY' +import json +import pathlib +import sys + +old = json.loads(pathlib.Path(sys.argv[1]).read_text(encoding="utf-8")) +new = json.loads(pathlib.Path(sys.argv[2]).read_text(encoding="utf-8")) + +def diff(kind): + old_map = old.get(kind, {}) + new_map = new.get(kind, {}) + added = sorted(set(new_map) - set(old_map)) + removed = sorted(set(old_map) - set(new_map)) + changed = sorted( + path for path in (set(new_map) & set(old_map)) + if (new_map[path].get("hash") != old_map[path].get("hash")) + ) + return added, changed, removed + +for kind in ("scripts", "flows"): + added, changed, removed = diff(kind) + print(f"[{kind}]") + if not (added or changed or removed): + print(" no changes") + continue + for p in added: + print(f" + {p}") + for p in changed: + print(f" ~ {p}") + for p in removed: + print(f" - {p}") +PY } -# コマンド case "${1:-help}" in - whoami) - api_get "/users/whoami" | python3 -m json.tool - ;; - scripts|list-scripts) - api_get "/scripts/list?per_page=${2:-100}" | python3 -m json.tool - ;; - flows|list-flows) - api_get "/flows/list?per_page=${2:-100}" | python3 -m json.tool - ;; - get-script) - if [ -z "${2:-}" ]; then - echo "Usage: $0 get-script " - exit 1 - fi - api_get "/scripts/get/p/$2" | python3 -m json.tool - ;; - get-flow) - if [ -z "${2:-}" ]; then - echo "Usage: $0 get-flow " - exit 1 - fi - api_get "/flows/get/$2" | python3 -m json.tool - ;; - create-script) - if [ -z "${2:-}" ]; then - echo "Usage: $0 create-script " - exit 1 - fi - api_post "/scripts/create" "$(cat "$2")" - ;; - create-flow) - if [ -z "${2:-}" ]; then - echo "Usage: $0 create-flow " - exit 1 - fi - api_post "/flows/create" "$(cat "$2")" - ;; - update-flow) - if [ -z "${2:-}" ] || [ -z "${3:-}" ]; then - echo "Usage: $0 update-flow " - exit 1 - fi - api_put "/flows/update/$2" "$(cat "$3")" - ;; - create-schedule) - if [ -z "${2:-}" ]; then - echo "Usage: $0 create-schedule " - exit 1 - fi - api_post "/schedules/create" "$(cat "$2")" - ;; - run-script) - if [ -z "${2:-}" ]; then - echo "Usage: $0 run-script [json-args]" - exit 1 - fi - local_args="${3:-{\}}" - api_post "/jobs/run/p/$2" "${local_args}" - ;; - run-flow) - if [ -z "${2:-}" ]; then - echo "Usage: $0 run-flow [json-args]" - exit 1 - fi - local_args="${3:-{\}}" - api_post "/jobs/run/f/$2" "${local_args}" - ;; - job-status) - if [ -z "${2:-}" ]; then - echo "Usage: $0 job-status " - exit 1 - fi - api_get "/jobs_u/get/$2" | python3 -m json.tool - ;; - job-result) - if [ -z "${2:-}" ]; then - echo "Usage: $0 job-result " - exit 1 - fi - api_get "/jobs_u/completed/get_result/$2" | python3 -m json.tool - ;; - schedules|list-schedules) - api_get "/schedules/list?per_page=${2:-100}" | python3 -m json.tool - ;; - version) - curl -sk "${WINDMILL_URL}/api/version" 2>/dev/null - echo "" - ;; - help|*) - cat << 'EOF' + whoami) + api_get "/users/whoami" | json_pretty + ;; + scripts|list-scripts) + api_get "/scripts/list?per_page=${2:-100}" | json_pretty + ;; + flows|list-flows) + api_get "/flows/list?per_page=${2:-100}" | json_pretty + ;; + schedules|list-schedules) + api_get "/schedules/list?per_page=${2:-100}" | json_pretty + ;; + get-script) + require_arg "$0 get-script " "${2:-}" + api_get "/scripts/get/p/$2" | json_pretty + ;; + get-flow) + require_arg "$0 get-flow " "${2:-}" + api_get "/flows/get/$2" | json_pretty + ;; + create-script) + require_arg "$0 create-script " "${2:-}" + api_post "/scripts/create" "$(cat "$2")" + ;; + create-flow) + require_arg "$0 create-flow " "${2:-}" + api_post "/flows/create" "$(cat "$2")" + ;; + update-flow) + require_arg "$0 update-flow " "${2:-}" + require_arg "$0 update-flow " "${3:-}" + api_put "/flows/update/$2" "$(cat "$3")" + ;; + create-schedule) + require_arg "$0 create-schedule " "${2:-}" + api_post "/schedules/create" "$(cat "$2")" + ;; + run-script) + require_arg "$0 run-script [json-args]" "${2:-}" + local_args="${3:-{}}" + api_post "/jobs/run/p/$2" "${local_args}" + ;; + run-flow) + require_arg "$0 run-flow [json-args]" "${2:-}" + local_args="${3:-{}}" + api_post "/jobs/run/f/$2" "${local_args}" + ;; + job-status) + require_arg "$0 job-status " "${2:-}" + api_get "/jobs_u/get/$2" | json_pretty + ;; + job-result) + require_arg "$0 job-result " "${2:-}" + api_get "/jobs_u/completed/get_result/$2" | json_pretty + ;; + pull-script) + require_arg "$0 pull-script " "${2:-}" + require_arg "$0 pull-script " "${3:-}" + pull_script "$2" "$3" + ;; + push-script) + require_arg "$0 push-script " "${2:-}" + require_arg "$0 push-script " "${3:-}" + push_script "$2" "$3" + ;; + pull-flow) + require_arg "$0 pull-flow " "${2:-}" + require_arg "$0 pull-flow " "${3:-}" + pull_flow "$2" "$3" + ;; + push-flow) + require_arg "$0 push-flow " "${2:-}" + push_flow "$2" + ;; + pull-all) + pull_all + ;; + status-remote) + status_remote + ;; + version) + curl -sk "${WINDMILL_URL}/api/version" 2>/dev/null + echo "" + ;; + help|*) + cat <<'EOF' Windmill REST API ヘルパー 使い方: ./wm-api.sh [args...] コマンド: - whoami - 現在のユーザー情報を表示 - version - サーバーバージョンを表示 - scripts - スクリプト一覧を表示 - flows - フロー一覧を表示 - get-script - スクリプトの詳細を取得 - get-flow - フローの詳細を取得 - create-script - JSONファイルからスクリプトを作成 - create-flow - JSONファイルからフローを作成 - update-flow - フローを更新 - create-schedule - JSONファイルからスケジュールを作成 - run-script [args] - スクリプトを実行 - run-flow [args] - フローを実行 - job-status - ジョブのステータスを確認 - job-result - ジョブの結果を取得 - schedules - スケジュール一覧を表示 + whoami - 現在のユーザー情報を表示 + version - サーバーバージョンを表示 + scripts|list-scripts [n] - スクリプト一覧を表示 + flows|list-flows [n] - フロー一覧を表示 + schedules|list-schedules [n] - スケジュール一覧を表示 + get-script - スクリプトの詳細を取得 + get-flow - フローの詳細を取得 + create-script - JSONファイルからスクリプトを作成 + create-flow - JSONファイルからフローを作成 + update-flow - フローを更新 + create-schedule - JSONファイルからスケジュールを作成 + run-script [args] - スクリプトを実行 + run-flow [args] - フローを実行 + job-status - ジョブのステータスを確認 + job-result - ジョブの結果を取得 + pull-script - スクリプトをローカルへ保存 + push-script - ローカルファイルをスクリプトへ反映 + pull-flow - フローをローカルJSONへ保存 + push-flow - フローJSONを削除再作成で反映 + pull-all - scripts/flowsを一括pullしてstate更新 + status-remote - remote_index基準で差分表示 EOF - ;; + ;; esac