diff --git a/dashboard/README.md b/dashboard/README.md new file mode 100644 index 0000000..25f96a6 --- /dev/null +++ b/dashboard/README.md @@ -0,0 +1,95 @@ +# SentryAgent.ai AgentIdP — Web Dashboard + +## 1. Overview + +The AgentIdP Dashboard is a React 18 single-page application (SPA) that provides a visual +management interface for the AgentIdP API. It allows operators to: + +- Browse, search, and filter all registered AI agents +- View agent details and manage lifecycle (suspend / reactivate) +- Generate, rotate, and revoke agent credentials +- Query the audit log with filters for agent, action, outcome, and date range +- Monitor PostgreSQL and Redis connectivity in real time + +The dashboard is co-served by the Express API server at `/dashboard/` — no separate hosting +is required. + +## 2. Prerequisites + +- Node.js 18+ +- A running AgentIdP server (local or remote) +- An active agent credential (Client ID + Client Secret) with full scopes + +## 3. Development + +Install dashboard dependencies: + +```bash +cd dashboard +npm install +``` + +Start the Vite dev server: + +```bash +npm run dev +``` + +The dev server starts at `http://localhost:5173/dashboard/`. API calls are made to +`window.location.origin` (defaulted in the Login form), so either: + +- Set the **API Base URL** field to your local server (e.g. `http://localhost:3000`) +- Or configure a Vite proxy in `vite.config.ts` for `/api` and `/health` paths + +## 4. Building + +Compile TypeScript and bundle with Vite: + +```bash +npm run build +``` + +Output is written to `dashboard/dist/`. The build is an optimised static bundle (HTML, CSS, JS). + +To verify the build locally: + +```bash +npm run preview +``` + +## 5. Deployment + +The AgentIdP Express server automatically serves the built dashboard: + +- Static assets at `/dashboard/` (via `express.static`) +- SPA fallback — all `/dashboard/*` requests not matching a static file return `index.html` + +**Steps:** + +1. Build the dashboard: `cd dashboard && npm run build` +2. Start (or restart) the AgentIdP server: `npm start` +3. Open `https://your-api-host/dashboard/` in a browser + +No additional nginx or CDN configuration is required for basic deployments. + +## 6. Login + +The login form has three fields: + +| Field | Description | +|---|---| +| **API Base URL** | Base URL of the AgentIdP server, e.g. `https://api.example.com`. Defaults to the current page origin, which works when the dashboard is co-served. | +| **Client ID** | The UUID of an agent registered in AgentIdP. This agent must have the scopes `agents:read agents:write tokens:read audit:read`. | +| **Client Secret** | The plain-text client secret for the agent. Validated against the token endpoint on login. | + +Credentials are stored in `sessionStorage` only — they are cleared when the browser tab is closed. + +## 7. Pages + +| Page | Route | Description | +|---|---|---| +| **Agents** | `/dashboard/agents` | Paginated list of all agents. Search by email (debounced), filter by status. Click a row for details. | +| **Agent Detail** | `/dashboard/agents/:agentId` | Full agent metadata. Suspend or reactivate (with confirmation). Link to credentials. | +| **Credentials** | `/dashboard/agents/:agentId/credentials` | List all credentials. Generate, rotate, or revoke. New secrets shown exactly once. | +| **Audit Log** | `/dashboard/audit` | Paginated audit events with filters for agent ID, action, outcome, and date range. | +| **Health** | `/dashboard/health` | PostgreSQL and Redis connectivity cards. Auto-refreshes every 30 seconds. | diff --git a/dashboard/index.html b/dashboard/index.html new file mode 100644 index 0000000..da3ccf7 --- /dev/null +++ b/dashboard/index.html @@ -0,0 +1,12 @@ + + + + + + SentryAgent.ai — AgentIdP Dashboard + + +
+ + + diff --git a/dashboard/package-lock.json b/dashboard/package-lock.json new file mode 100644 index 0000000..8cbc904 --- /dev/null +++ b/dashboard/package-lock.json @@ -0,0 +1,2755 @@ +{ + "name": "@sentryagent/dashboard", + "version": "1.0.0", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "@sentryagent/dashboard", + "version": "1.0.0", + "dependencies": { + "@sentryagent/idp-sdk": "file:../sdk", + "clsx": "^2.1.1", + "lucide-react": "^0.446.0", + "react": "^18.3.1", + "react-dom": "^18.3.1", + "react-router-dom": "^6.26.2", + "tailwind-merge": "^2.5.2" + }, + "devDependencies": { + "@types/react": "^18.3.5", + "@types/react-dom": "^18.3.0", + "@vitejs/plugin-react": "^4.3.1", + "autoprefixer": "^10.4.20", + "postcss": "^8.4.47", + "tailwindcss": "^3.4.12", + "typescript": "^5.5.3", + "vite": "^5.4.8" + } + }, + "../sdk": { + "name": "@sentryagent/idp-sdk", + "version": "1.0.0", + "license": "MIT", + "devDependencies": { + "typescript": "^5.4.5" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@alloc/quick-lru": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/@alloc/quick-lru/-/quick-lru-5.2.0.tgz", + "integrity": "sha512-UrcABB+4bUrFABwbluTIBErXwvbsU/V7TZWfmbgJfbkwiBuziS9gxdODUyuiecfdGQ85jglMW6juS3+z5TsKLw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@babel/code-frame": { + "version": "7.29.0", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.29.0.tgz", + "integrity": "sha512-9NhCeYjq9+3uxgdtp20LSiJXJvN0FeCtNGpJxuMFZ1Kv3cWUNb6DOhJwUvcVCzKGR66cw4njwM6hrJLqgOwbcw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-validator-identifier": "^7.28.5", + "js-tokens": "^4.0.0", + "picocolors": "^1.1.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/compat-data": { + "version": "7.29.0", + "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.29.0.tgz", + "integrity": "sha512-T1NCJqT/j9+cn8fvkt7jtwbLBfLC/1y1c7NtCeXFRgzGTsafi68MRv8yzkYSapBnFA6L3U2VSc02ciDzoAJhJg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/core": { + "version": "7.29.0", + "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.29.0.tgz", + "integrity": "sha512-CGOfOJqWjg2qW/Mb6zNsDm+u5vFQ8DxXfbM09z69p5Z6+mE1ikP2jUXw+j42Pf1XTYED2Rni5f95npYeuwMDQA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/code-frame": "^7.29.0", + "@babel/generator": "^7.29.0", + "@babel/helper-compilation-targets": "^7.28.6", + "@babel/helper-module-transforms": "^7.28.6", + "@babel/helpers": "^7.28.6", + "@babel/parser": "^7.29.0", + "@babel/template": "^7.28.6", + "@babel/traverse": "^7.29.0", + "@babel/types": "^7.29.0", + "@jridgewell/remapping": "^2.3.5", + "convert-source-map": "^2.0.0", + "debug": "^4.1.0", + "gensync": "^1.0.0-beta.2", + "json5": "^2.2.3", + "semver": "^6.3.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/babel" + } + }, + "node_modules/@babel/generator": { + "version": "7.29.1", + "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.29.1.tgz", + "integrity": "sha512-qsaF+9Qcm2Qv8SRIMMscAvG4O3lJ0F1GuMo5HR/Bp02LopNgnZBC/EkbevHFeGs4ls/oPz9v+Bsmzbkbe+0dUw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/parser": "^7.29.0", + "@babel/types": "^7.29.0", + "@jridgewell/gen-mapping": "^0.3.12", + "@jridgewell/trace-mapping": "^0.3.28", + "jsesc": "^3.0.2" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-compilation-targets": { + "version": "7.28.6", + "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.28.6.tgz", + "integrity": "sha512-JYtls3hqi15fcx5GaSNL7SCTJ2MNmjrkHXg4FSpOA/grxK8KwyZ5bubHsCq8FXCkua6xhuaaBit+3b7+VZRfcA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/compat-data": "^7.28.6", + "@babel/helper-validator-option": "^7.27.1", + "browserslist": "^4.24.0", + "lru-cache": "^5.1.1", + "semver": "^6.3.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-globals": { + "version": "7.28.0", + "resolved": "https://registry.npmjs.org/@babel/helper-globals/-/helper-globals-7.28.0.tgz", + "integrity": "sha512-+W6cISkXFa1jXsDEdYA8HeevQT/FULhxzR99pxphltZcVaugps53THCeiWA8SguxxpSp3gKPiuYfSWopkLQ4hw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-module-imports": { + "version": "7.28.6", + "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.28.6.tgz", + "integrity": "sha512-l5XkZK7r7wa9LucGw9LwZyyCUscb4x37JWTPz7swwFE/0FMQAGpiWUZn8u9DzkSBWEcK25jmvubfpw2dnAMdbw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/traverse": "^7.28.6", + "@babel/types": "^7.28.6" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-module-transforms": { + "version": "7.28.6", + "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.28.6.tgz", + "integrity": "sha512-67oXFAYr2cDLDVGLXTEABjdBJZ6drElUSI7WKp70NrpyISso3plG9SAGEF6y7zbha/wOzUByWWTJvEDVNIUGcA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-module-imports": "^7.28.6", + "@babel/helper-validator-identifier": "^7.28.5", + "@babel/traverse": "^7.28.6" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/@babel/helper-plugin-utils": { + "version": "7.28.6", + "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.28.6.tgz", + "integrity": "sha512-S9gzZ/bz83GRysI7gAD4wPT/AI3uCnY+9xn+Mx/KPs2JwHJIz1W8PZkg2cqyt3RNOBM8ejcXhV6y8Og7ly/Dug==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-string-parser": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.27.1.tgz", + "integrity": "sha512-qMlSxKbpRlAridDExk92nSobyDdpPijUq2DW6oDnUqd0iOGxmQjyqhMIihI9+zv4LPyZdRje2cavWPbCbWm3eA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-validator-identifier": { + "version": "7.28.5", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.28.5.tgz", + "integrity": "sha512-qSs4ifwzKJSV39ucNjsvc6WVHs6b7S03sOh2OcHF9UHfVPqWWALUsNUVzhSBiItjRZoLHx7nIarVjqKVusUZ1Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-validator-option": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.27.1.tgz", + "integrity": "sha512-YvjJow9FxbhFFKDSuFnVCe2WxXk1zWc22fFePVNEaWJEu8IrZVlda6N0uHwzZrUM1il7NC9Mlp4MaJYbYd9JSg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helpers": { + "version": "7.29.2", + "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.29.2.tgz", + "integrity": "sha512-HoGuUs4sCZNezVEKdVcwqmZN8GoHirLUcLaYVNBK2J0DadGtdcqgr3BCbvH8+XUo4NGjNl3VOtSjEKNzqfFgKw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/template": "^7.28.6", + "@babel/types": "^7.29.0" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/parser": { + "version": "7.29.2", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.29.2.tgz", + "integrity": "sha512-4GgRzy/+fsBa72/RZVJmGKPmZu9Byn8o4MoLpmNe1m8ZfYnz5emHLQz3U4gLud6Zwl0RZIcgiLD7Uq7ySFuDLA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/types": "^7.29.0" + }, + "bin": { + "parser": "bin/babel-parser.js" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@babel/plugin-transform-react-jsx-self": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-jsx-self/-/plugin-transform-react-jsx-self-7.27.1.tgz", + "integrity": "sha512-6UzkCs+ejGdZ5mFFC/OCUrv028ab2fp1znZmCZjAOBKiBK2jXD1O+BPSfX8X2qjJ75fZBMSnQn3Rq2mrBJK2mw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-react-jsx-source": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-jsx-source/-/plugin-transform-react-jsx-source-7.27.1.tgz", + "integrity": "sha512-zbwoTsBruTeKB9hSq73ha66iFeJHuaFkUbwvqElnygoNbj/jHRsSeokowZFN3CZ64IvEqcmmkVe89OPXc7ldAw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/template": { + "version": "7.28.6", + "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.28.6.tgz", + "integrity": "sha512-YA6Ma2KsCdGb+WC6UpBVFJGXL58MDA6oyONbjyF/+5sBgxY/dwkhLogbMT2GXXyU84/IhRw/2D1Os1B/giz+BQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/code-frame": "^7.28.6", + "@babel/parser": "^7.28.6", + "@babel/types": "^7.28.6" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/traverse": { + "version": "7.29.0", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.29.0.tgz", + "integrity": "sha512-4HPiQr0X7+waHfyXPZpWPfWL/J7dcN1mx9gL6WdQVMbPnF3+ZhSMs8tCxN7oHddJE9fhNE7+lxdnlyemKfJRuA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/code-frame": "^7.29.0", + "@babel/generator": "^7.29.0", + "@babel/helper-globals": "^7.28.0", + "@babel/parser": "^7.29.0", + "@babel/template": "^7.28.6", + "@babel/types": "^7.29.0", + "debug": "^4.3.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/types": { + "version": "7.29.0", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.29.0.tgz", + "integrity": "sha512-LwdZHpScM4Qz8Xw2iKSzS+cfglZzJGvofQICy7W7v4caru4EaAmyUuO6BGrbyQ2mYV11W0U8j5mBhd14dd3B0A==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-string-parser": "^7.27.1", + "@babel/helper-validator-identifier": "^7.28.5" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@esbuild/aix-ppc64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.21.5.tgz", + "integrity": "sha512-1SDgH6ZSPTlggy1yI6+Dbkiz8xzpHJEVAlF/AM1tHPLsf5STom9rwtjE4hKAF20FfXXNTFqEYXyJNWh1GiZedQ==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "aix" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/android-arm": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.21.5.tgz", + "integrity": "sha512-vCPvzSjpPHEi1siZdlvAlsPxXl7WbOVUBBAowWug4rJHb68Ox8KualB+1ocNvT5fjv6wpkX6o/iEpbDrf68zcg==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/android-arm64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.21.5.tgz", + "integrity": "sha512-c0uX9VAUBQ7dTDCjq+wdyGLowMdtR/GoC2U5IYk/7D1H1JYC0qseD7+11iMP2mRLN9RcCMRcjC4YMclCzGwS/A==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/android-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.21.5.tgz", + "integrity": "sha512-D7aPRUUNHRBwHxzxRvp856rjUHRFW1SdQATKXH2hqA0kAZb1hKmi02OpYRacl0TxIGz/ZmXWlbZgjwWYaCakTA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/darwin-arm64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.21.5.tgz", + "integrity": "sha512-DwqXqZyuk5AiWWf3UfLiRDJ5EDd49zg6O9wclZ7kUMv2WRFr4HKjXp/5t8JZ11QbQfUS6/cRCKGwYhtNAY88kQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/darwin-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.21.5.tgz", + "integrity": "sha512-se/JjF8NlmKVG4kNIuyWMV/22ZaerB+qaSi5MdrXtd6R08kvs2qCN4C09miupktDitvh8jRFflwGFBQcxZRjbw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/freebsd-arm64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.21.5.tgz", + "integrity": "sha512-5JcRxxRDUJLX8JXp/wcBCy3pENnCgBR9bN6JsY4OmhfUtIHe3ZW0mawA7+RDAcMLrMIZaf03NlQiX9DGyB8h4g==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/freebsd-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.21.5.tgz", + "integrity": "sha512-J95kNBj1zkbMXtHVH29bBriQygMXqoVQOQYA+ISs0/2l3T9/kj42ow2mpqerRBxDJnmkUDCaQT/dfNXWX/ZZCQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-arm": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.21.5.tgz", + "integrity": "sha512-bPb5AHZtbeNGjCKVZ9UGqGwo8EUu4cLq68E95A53KlxAPRmUyYv2D6F0uUI65XisGOL1hBP5mTronbgo+0bFcA==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-arm64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.21.5.tgz", + "integrity": "sha512-ibKvmyYzKsBeX8d8I7MH/TMfWDXBF3db4qM6sy+7re0YXya+K1cem3on9XgdT2EQGMu4hQyZhan7TeQ8XkGp4Q==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-ia32": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.21.5.tgz", + "integrity": "sha512-YvjXDqLRqPDl2dvRODYmmhz4rPeVKYvppfGYKSNGdyZkA01046pLWyRKKI3ax8fbJoK5QbxblURkwK/MWY18Tg==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-loong64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.21.5.tgz", + "integrity": "sha512-uHf1BmMG8qEvzdrzAqg2SIG/02+4/DHB6a9Kbya0XDvwDEKCoC8ZRWI5JJvNdUjtciBGFQ5PuBlpEOXQj+JQSg==", + "cpu": [ + "loong64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-mips64el": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.21.5.tgz", + "integrity": "sha512-IajOmO+KJK23bj52dFSNCMsz1QP1DqM6cwLUv3W1QwyxkyIWecfafnI555fvSGqEKwjMXVLokcV5ygHW5b3Jbg==", + "cpu": [ + "mips64el" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-ppc64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.21.5.tgz", + "integrity": "sha512-1hHV/Z4OEfMwpLO8rp7CvlhBDnjsC3CttJXIhBi+5Aj5r+MBvy4egg7wCbe//hSsT+RvDAG7s81tAvpL2XAE4w==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-riscv64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.21.5.tgz", + "integrity": "sha512-2HdXDMd9GMgTGrPWnJzP2ALSokE/0O5HhTUvWIbD3YdjME8JwvSCnNGBnTThKGEB91OZhzrJ4qIIxk/SBmyDDA==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-s390x": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.21.5.tgz", + "integrity": "sha512-zus5sxzqBJD3eXxwvjN1yQkRepANgxE9lgOW2qLnmr8ikMTphkjgXu1HR01K4FJg8h1kEEDAqDcZQtbrRnB41A==", + "cpu": [ + "s390x" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.21.5.tgz", + "integrity": "sha512-1rYdTpyv03iycF1+BhzrzQJCdOuAOtaqHTWJZCWvijKD2N5Xu0TtVC8/+1faWqcP9iBCWOmjmhoH94dH82BxPQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/netbsd-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.21.5.tgz", + "integrity": "sha512-Woi2MXzXjMULccIwMnLciyZH4nCIMpWQAs049KEeMvOcNADVxo0UBIQPfSmxB3CWKedngg7sWZdLvLczpe0tLg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/openbsd-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.21.5.tgz", + "integrity": "sha512-HLNNw99xsvx12lFBUwoT8EVCsSvRNDVxNpjZ7bPn947b8gJPzeHWyNVhFsaerc0n3TsbOINvRP2byTZ5LKezow==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/sunos-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.21.5.tgz", + "integrity": "sha512-6+gjmFpfy0BHU5Tpptkuh8+uw3mnrvgs+dSPQXQOv3ekbordwnzTVEb4qnIvQcYXq6gzkyTnoZ9dZG+D4garKg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "sunos" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/win32-arm64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.21.5.tgz", + "integrity": "sha512-Z0gOTd75VvXqyq7nsl93zwahcTROgqvuAcYDUr+vOv8uHhNSKROyU961kgtCD1e95IqPKSQKH7tBTslnS3tA8A==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/win32-ia32": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.21.5.tgz", + "integrity": "sha512-SWXFF1CL2RVNMaVs+BBClwtfZSvDgtL//G/smwAc5oVK/UPu2Gu9tIaRgFmYFFKrmg3SyAjSrElf0TiJ1v8fYA==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/win32-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.21.5.tgz", + "integrity": "sha512-tQd/1efJuzPC6rCFwEvLtci/xNFcTZknmXs98FYDfGE4wP9ClFV98nyKrzJKVPMhdDnjzLhdUyMX4PsQAPjwIw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@jridgewell/gen-mapping": { + "version": "0.3.13", + "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.13.tgz", + "integrity": "sha512-2kkt/7niJ6MgEPxF0bYdQ6etZaA+fQvDcLKckhy1yIQOzaoKjBBjSj63/aLVjYE3qhRt5dvM+uUyfCg6UKCBbA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/sourcemap-codec": "^1.5.0", + "@jridgewell/trace-mapping": "^0.3.24" + } + }, + "node_modules/@jridgewell/remapping": { + "version": "2.3.5", + "resolved": "https://registry.npmjs.org/@jridgewell/remapping/-/remapping-2.3.5.tgz", + "integrity": "sha512-LI9u/+laYG4Ds1TDKSJW2YPrIlcVYOwi2fUC6xB43lueCjgxV4lffOCZCtYFiH6TNOX+tQKXx97T4IKHbhyHEQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/gen-mapping": "^0.3.5", + "@jridgewell/trace-mapping": "^0.3.24" + } + }, + "node_modules/@jridgewell/resolve-uri": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz", + "integrity": "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/sourcemap-codec": { + "version": "1.5.5", + "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.5.tgz", + "integrity": "sha512-cYQ9310grqxueWbl+WuIUIaiUaDcj7WOq5fVhEljNVgRfOUhY9fy2zTvfoqWsnebh8Sl70VScFbICvJnLKB0Og==", + "dev": true, + "license": "MIT" + }, + "node_modules/@jridgewell/trace-mapping": { + "version": "0.3.31", + "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.31.tgz", + "integrity": "sha512-zzNR+SdQSDJzc8joaeP8QQoCQr8NuYx2dIIytl1QeBEZHJ9uW6hebsrYgbz8hJwUQao3TWCMtmfV8Nu1twOLAw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/resolve-uri": "^3.1.0", + "@jridgewell/sourcemap-codec": "^1.4.14" + } + }, + "node_modules/@nodelib/fs.scandir": { + "version": "2.1.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", + "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@nodelib/fs.stat": "2.0.5", + "run-parallel": "^1.1.9" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.stat": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz", + "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.walk": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz", + "integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@nodelib/fs.scandir": "2.1.5", + "fastq": "^1.6.0" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@remix-run/router": { + "version": "1.23.2", + "resolved": "https://registry.npmjs.org/@remix-run/router/-/router-1.23.2.tgz", + "integrity": "sha512-Ic6m2U/rMjTkhERIa/0ZtXJP17QUi2CbWE7cqx4J58M8aA3QTfW+2UlQ4psvTX9IO1RfNVhK3pcpdjej7L+t2w==", + "license": "MIT", + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@rolldown/pluginutils": { + "version": "1.0.0-beta.27", + "resolved": "https://registry.npmjs.org/@rolldown/pluginutils/-/pluginutils-1.0.0-beta.27.tgz", + "integrity": "sha512-+d0F4MKMCbeVUJwG96uQ4SgAznZNSq93I3V+9NHA4OpvqG8mRCpGdKmK8l/dl02h2CCDHwW2FqilnTyDcAnqjA==", + "dev": true, + "license": "MIT" + }, + "node_modules/@rollup/rollup-android-arm-eabi": { + "version": "4.60.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.60.0.tgz", + "integrity": "sha512-WOhNW9K8bR3kf4zLxbfg6Pxu2ybOUbB2AjMDHSQx86LIF4rH4Ft7vmMwNt0loO0eonglSNy4cpD3MKXXKQu0/A==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ] + }, + "node_modules/@rollup/rollup-android-arm64": { + "version": "4.60.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.60.0.tgz", + "integrity": "sha512-u6JHLll5QKRvjciE78bQXDmqRqNs5M/3GVqZeMwvmjaNODJih/WIrJlFVEihvV0MiYFmd+ZyPr9wxOVbPAG2Iw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ] + }, + "node_modules/@rollup/rollup-darwin-arm64": { + "version": "4.60.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.60.0.tgz", + "integrity": "sha512-qEF7CsKKzSRc20Ciu2Zw1wRrBz4g56F7r/vRwY430UPp/nt1x21Q/fpJ9N5l47WWvJlkNCPJz3QRVw008fi7yA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@rollup/rollup-darwin-x64": { + "version": "4.60.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.60.0.tgz", + "integrity": "sha512-WADYozJ4QCnXCH4wPB+3FuGmDPoFseVCUrANmA5LWwGmC6FL14BWC7pcq+FstOZv3baGX65tZ378uT6WG8ynTw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@rollup/rollup-freebsd-arm64": { + "version": "4.60.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.60.0.tgz", + "integrity": "sha512-6b8wGHJlDrGeSE3aH5mGNHBjA0TTkxdoNHik5EkvPHCt351XnigA4pS7Wsj/Eo9Y8RBU6f35cjN9SYmCFBtzxw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ] + }, + "node_modules/@rollup/rollup-freebsd-x64": { + "version": "4.60.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.60.0.tgz", + "integrity": "sha512-h25Ga0t4jaylMB8M/JKAyrvvfxGRjnPQIR8lnCayyzEjEOx2EJIlIiMbhpWxDRKGKF8jbNH01NnN663dH638mA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ] + }, + "node_modules/@rollup/rollup-linux-arm-gnueabihf": { + "version": "4.60.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.60.0.tgz", + "integrity": "sha512-RzeBwv0B3qtVBWtcuABtSuCzToo2IEAIQrcyB/b2zMvBWVbjo8bZDjACUpnaafaxhTw2W+imQbP2BD1usasK4g==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm-musleabihf": { + "version": "4.60.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.60.0.tgz", + "integrity": "sha512-Sf7zusNI2CIU1HLzuu9Tc5YGAHEZs5Lu7N1ssJG4Tkw6e0MEsN7NdjUDDfGNHy2IU+ENyWT+L2obgWiguWibWQ==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm64-gnu": { + "version": "4.60.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.60.0.tgz", + "integrity": "sha512-DX2x7CMcrJzsE91q7/O02IJQ5/aLkVtYFryqCjduJhUfGKG6yJV8hxaw8pZa93lLEpPTP/ohdN4wFz7yp/ry9A==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm64-musl": { + "version": "4.60.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.60.0.tgz", + "integrity": "sha512-09EL+yFVbJZlhcQfShpswwRZ0Rg+z/CsSELFCnPt3iK+iqwGsI4zht3secj5vLEs957QvFFXnzAT0FFPIxSrkQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-loong64-gnu": { + "version": "4.60.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loong64-gnu/-/rollup-linux-loong64-gnu-4.60.0.tgz", + "integrity": "sha512-i9IcCMPr3EXm8EQg5jnja0Zyc1iFxJjZWlb4wr7U2Wx/GrddOuEafxRdMPRYVaXjgbhvqalp6np07hN1w9kAKw==", + "cpu": [ + "loong64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-loong64-musl": { + "version": "4.60.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loong64-musl/-/rollup-linux-loong64-musl-4.60.0.tgz", + "integrity": "sha512-DGzdJK9kyJ+B78MCkWeGnpXJ91tK/iKA6HwHxF4TAlPIY7GXEvMe8hBFRgdrR9Ly4qebR/7gfUs9y2IoaVEyog==", + "cpu": [ + "loong64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-ppc64-gnu": { + "version": "4.60.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-ppc64-gnu/-/rollup-linux-ppc64-gnu-4.60.0.tgz", + "integrity": "sha512-RwpnLsqC8qbS8z1H1AxBA1H6qknR4YpPR9w2XX0vo2Sz10miu57PkNcnHVaZkbqyw/kUWfKMI73jhmfi9BRMUQ==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-ppc64-musl": { + "version": "4.60.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-ppc64-musl/-/rollup-linux-ppc64-musl-4.60.0.tgz", + "integrity": "sha512-Z8pPf54Ly3aqtdWC3G4rFigZgNvd+qJlOE52fmko3KST9SoGfAdSRCwyoyG05q1HrrAblLbk1/PSIV+80/pxLg==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-riscv64-gnu": { + "version": "4.60.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.60.0.tgz", + "integrity": "sha512-3a3qQustp3COCGvnP4SvrMHnPQ9d1vzCakQVRTliaz8cIp/wULGjiGpbcqrkv0WrHTEp8bQD/B3HBjzujVWLOA==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-riscv64-musl": { + "version": "4.60.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-musl/-/rollup-linux-riscv64-musl-4.60.0.tgz", + "integrity": "sha512-pjZDsVH/1VsghMJ2/kAaxt6dL0psT6ZexQVrijczOf+PeP2BUqTHYejk3l6TlPRydggINOeNRhvpLa0AYpCWSQ==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-s390x-gnu": { + "version": "4.60.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.60.0.tgz", + "integrity": "sha512-3ObQs0BhvPgiUVZrN7gqCSvmFuMWvWvsjG5ayJ3Lraqv+2KhOsp+pUbigqbeWqueGIsnn+09HBw27rJ+gYK4VQ==", + "cpu": [ + "s390x" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-x64-gnu": { + "version": "4.60.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.60.0.tgz", + "integrity": "sha512-EtylprDtQPdS5rXvAayrNDYoJhIz1/vzN2fEubo3yLE7tfAw+948dO0g4M0vkTVFhKojnF+n6C8bDNe+gDRdTg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-x64-musl": { + "version": "4.60.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.60.0.tgz", + "integrity": "sha512-k09oiRCi/bHU9UVFqD17r3eJR9bn03TyKraCrlz5ULFJGdJGi7VOmm9jl44vOJvRJ6P7WuBi/s2A97LxxHGIdw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-openbsd-x64": { + "version": "4.60.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-openbsd-x64/-/rollup-openbsd-x64-4.60.0.tgz", + "integrity": "sha512-1o/0/pIhozoSaDJoDcec+IVLbnRtQmHwPV730+AOD29lHEEo4F5BEUB24H0OBdhbBBDwIOSuf7vgg0Ywxdfiiw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ] + }, + "node_modules/@rollup/rollup-openharmony-arm64": { + "version": "4.60.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-openharmony-arm64/-/rollup-openharmony-arm64-4.60.0.tgz", + "integrity": "sha512-pESDkos/PDzYwtyzB5p/UoNU/8fJo68vcXM9ZW2V0kjYayj1KaaUfi1NmTUTUpMn4UhU4gTuK8gIaFO4UGuMbA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openharmony" + ] + }, + "node_modules/@rollup/rollup-win32-arm64-msvc": { + "version": "4.60.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.60.0.tgz", + "integrity": "sha512-hj1wFStD7B1YBeYmvY+lWXZ7ey73YGPcViMShYikqKT1GtstIKQAtfUI6yrzPjAy/O7pO0VLXGmUVWXQMaYgTQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@rollup/rollup-win32-ia32-msvc": { + "version": "4.60.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.60.0.tgz", + "integrity": "sha512-SyaIPFoxmUPlNDq5EHkTbiKzmSEmq/gOYFI/3HHJ8iS/v1mbugVa7dXUzcJGQfoytp9DJFLhHH4U3/eTy2Bq4w==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@rollup/rollup-win32-x64-gnu": { + "version": "4.60.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-gnu/-/rollup-win32-x64-gnu-4.60.0.tgz", + "integrity": "sha512-RdcryEfzZr+lAr5kRm2ucN9aVlCCa2QNq4hXelZxb8GG0NJSazq44Z3PCCc8wISRuCVnGs0lQJVX5Vp6fKA+IA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@rollup/rollup-win32-x64-msvc": { + "version": "4.60.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.60.0.tgz", + "integrity": "sha512-PrsWNQ8BuE00O3Xsx3ALh2Df8fAj9+cvvX9AIA6o4KpATR98c9mud4XtDWVvsEuyia5U4tVSTKygawyJkjm60w==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@sentryagent/idp-sdk": { + "resolved": "../sdk", + "link": true + }, + "node_modules/@types/babel__core": { + "version": "7.20.5", + "resolved": "https://registry.npmjs.org/@types/babel__core/-/babel__core-7.20.5.tgz", + "integrity": "sha512-qoQprZvz5wQFJwMDqeseRXWv3rqMvhgpbXFfVyWhbx9X47POIA6i/+dXefEmZKoAgOaTdaIgNSMqMIU61yRyzA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/parser": "^7.20.7", + "@babel/types": "^7.20.7", + "@types/babel__generator": "*", + "@types/babel__template": "*", + "@types/babel__traverse": "*" + } + }, + "node_modules/@types/babel__generator": { + "version": "7.27.0", + "resolved": "https://registry.npmjs.org/@types/babel__generator/-/babel__generator-7.27.0.tgz", + "integrity": "sha512-ufFd2Xi92OAVPYsy+P4n7/U7e68fex0+Ee8gSG9KX7eo084CWiQ4sdxktvdl0bOPupXtVJPY19zk6EwWqUQ8lg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/types": "^7.0.0" + } + }, + "node_modules/@types/babel__template": { + "version": "7.4.4", + "resolved": "https://registry.npmjs.org/@types/babel__template/-/babel__template-7.4.4.tgz", + "integrity": "sha512-h/NUaSyG5EyxBIp8YRxo4RMe2/qQgvyowRwVMzhYhBCONbW8PUsg4lkFMrhgZhUe5z3L3MiLDuvyJ/CaPa2A8A==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/parser": "^7.1.0", + "@babel/types": "^7.0.0" + } + }, + "node_modules/@types/babel__traverse": { + "version": "7.28.0", + "resolved": "https://registry.npmjs.org/@types/babel__traverse/-/babel__traverse-7.28.0.tgz", + "integrity": "sha512-8PvcXf70gTDZBgt9ptxJ8elBeBjcLOAcOtoO/mPJjtji1+CdGbHgm77om1GrsPxsiE+uXIpNSK64UYaIwQXd4Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/types": "^7.28.2" + } + }, + "node_modules/@types/estree": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.8.tgz", + "integrity": "sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/prop-types": { + "version": "15.7.15", + "resolved": "https://registry.npmjs.org/@types/prop-types/-/prop-types-15.7.15.tgz", + "integrity": "sha512-F6bEyamV9jKGAFBEmlQnesRPGOQqS2+Uwi0Em15xenOxHaf2hv6L8YCVn3rPdPJOiJfPiCnLIRyvwVaqMY3MIw==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/react": { + "version": "18.3.28", + "resolved": "https://registry.npmjs.org/@types/react/-/react-18.3.28.tgz", + "integrity": "sha512-z9VXpC7MWrhfWipitjNdgCauoMLRdIILQsAEV+ZesIzBq/oUlxk0m3ApZuMFCXdnS4U7KrI+l3WRUEGQ8K1QKw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/prop-types": "*", + "csstype": "^3.2.2" + } + }, + "node_modules/@types/react-dom": { + "version": "18.3.7", + "resolved": "https://registry.npmjs.org/@types/react-dom/-/react-dom-18.3.7.tgz", + "integrity": "sha512-MEe3UeoENYVFXzoXEWsvcpg6ZvlrFNlOQ7EOsvhI3CfAXwzPfO8Qwuxd40nepsYKqyyVQnTdEfv68q91yLcKrQ==", + "dev": true, + "license": "MIT", + "peerDependencies": { + "@types/react": "^18.0.0" + } + }, + "node_modules/@vitejs/plugin-react": { + "version": "4.7.0", + "resolved": "https://registry.npmjs.org/@vitejs/plugin-react/-/plugin-react-4.7.0.tgz", + "integrity": "sha512-gUu9hwfWvvEDBBmgtAowQCojwZmJ5mcLn3aufeCsitijs3+f2NsrPtlAWIR6OPiqljl96GVCUbLe0HyqIpVaoA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/core": "^7.28.0", + "@babel/plugin-transform-react-jsx-self": "^7.27.1", + "@babel/plugin-transform-react-jsx-source": "^7.27.1", + "@rolldown/pluginutils": "1.0.0-beta.27", + "@types/babel__core": "^7.20.5", + "react-refresh": "^0.17.0" + }, + "engines": { + "node": "^14.18.0 || >=16.0.0" + }, + "peerDependencies": { + "vite": "^4.2.0 || ^5.0.0 || ^6.0.0 || ^7.0.0" + } + }, + "node_modules/any-promise": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/any-promise/-/any-promise-1.3.0.tgz", + "integrity": "sha512-7UvmKalWRt1wgjL1RrGxoSJW/0QZFIegpeGvZG9kjp8vrRu55XTHbwnqq2GpXm9uLbcuhxm3IqX9OB4MZR1b2A==", + "dev": true, + "license": "MIT" + }, + "node_modules/anymatch": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.3.tgz", + "integrity": "sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==", + "dev": true, + "license": "ISC", + "dependencies": { + "normalize-path": "^3.0.0", + "picomatch": "^2.0.4" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/arg": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/arg/-/arg-5.0.2.tgz", + "integrity": "sha512-PYjyFOLKQ9y57JvQ6QLo8dAgNqswh8M1RMJYdQduT6xbWSgK36P/Z/v+p888pM69jMMfS8Xd8F6I1kQ/I9HUGg==", + "dev": true, + "license": "MIT" + }, + "node_modules/autoprefixer": { + "version": "10.4.27", + "resolved": "https://registry.npmjs.org/autoprefixer/-/autoprefixer-10.4.27.tgz", + "integrity": "sha512-NP9APE+tO+LuJGn7/9+cohklunJsXWiaWEfV3si4Gi/XHDwVNgkwr1J3RQYFIvPy76GmJ9/bW8vyoU1LcxwKHA==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/autoprefixer" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "browserslist": "^4.28.1", + "caniuse-lite": "^1.0.30001774", + "fraction.js": "^5.3.4", + "picocolors": "^1.1.1", + "postcss-value-parser": "^4.2.0" + }, + "bin": { + "autoprefixer": "bin/autoprefixer" + }, + "engines": { + "node": "^10 || ^12 || >=14" + }, + "peerDependencies": { + "postcss": "^8.1.0" + } + }, + "node_modules/baseline-browser-mapping": { + "version": "2.10.12", + "resolved": "https://registry.npmjs.org/baseline-browser-mapping/-/baseline-browser-mapping-2.10.12.tgz", + "integrity": "sha512-qyq26DxfY4awP2gIRXhhLWfwzwI+N5Nxk6iQi8EFizIaWIjqicQTE4sLnZZVdeKPRcVNoJOkkpfzoIYuvCKaIQ==", + "dev": true, + "license": "Apache-2.0", + "bin": { + "baseline-browser-mapping": "dist/cli.cjs" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/binary-extensions": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.3.0.tgz", + "integrity": "sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/braces": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz", + "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==", + "dev": true, + "license": "MIT", + "dependencies": { + "fill-range": "^7.1.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/browserslist": { + "version": "4.28.1", + "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.28.1.tgz", + "integrity": "sha512-ZC5Bd0LgJXgwGqUknZY/vkUQ04r8NXnJZ3yYi4vDmSiZmC/pdSN0NbNRPxZpbtO4uAfDUAFffO8IZoM3Gj8IkA==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/browserslist" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "baseline-browser-mapping": "^2.9.0", + "caniuse-lite": "^1.0.30001759", + "electron-to-chromium": "^1.5.263", + "node-releases": "^2.0.27", + "update-browserslist-db": "^1.2.0" + }, + "bin": { + "browserslist": "cli.js" + }, + "engines": { + "node": "^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7" + } + }, + "node_modules/camelcase-css": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/camelcase-css/-/camelcase-css-2.0.1.tgz", + "integrity": "sha512-QOSvevhslijgYwRx6Rv7zKdMF8lbRmx+uQGx2+vDc+KI/eBnsy9kit5aj23AgGu3pa4t9AgwbnXWqS+iOY+2aA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 6" + } + }, + "node_modules/caniuse-lite": { + "version": "1.0.30001781", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001781.tgz", + "integrity": "sha512-RdwNCyMsNBftLjW6w01z8bKEvT6e/5tpPVEgtn22TiLGlstHOVecsX2KHFkD5e/vRnIE4EGzpuIODb3mtswtkw==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/caniuse-lite" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "CC-BY-4.0" + }, + "node_modules/chokidar": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.6.0.tgz", + "integrity": "sha512-7VT13fmjotKpGipCW9JEQAusEPE+Ei8nl6/g4FBAmIm0GOOLMua9NDDo/DWp0ZAxCr3cPq5ZpBqmPAQgDda2Pw==", + "dev": true, + "license": "MIT", + "dependencies": { + "anymatch": "~3.1.2", + "braces": "~3.0.2", + "glob-parent": "~5.1.2", + "is-binary-path": "~2.1.0", + "is-glob": "~4.0.1", + "normalize-path": "~3.0.0", + "readdirp": "~3.6.0" + }, + "engines": { + "node": ">= 8.10.0" + }, + "funding": { + "url": "https://paulmillr.com/funding/" + }, + "optionalDependencies": { + "fsevents": "~2.3.2" + } + }, + "node_modules/chokidar/node_modules/glob-parent": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "dev": true, + "license": "ISC", + "dependencies": { + "is-glob": "^4.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/clsx": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/clsx/-/clsx-2.1.1.tgz", + "integrity": "sha512-eYm0QWBtUrBWZWG0d386OGAw16Z995PiOVo2B7bjWSbHedGl5e0ZWaq65kOGgUSNesEIDkB9ISbTg/JK9dhCZA==", + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/commander": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/commander/-/commander-4.1.1.tgz", + "integrity": "sha512-NOKm8xhkzAjzFx8B2v5OAHT+u5pRQc2UCa2Vq9jYL/31o2wi9mxBA7LIFs3sV5VSC49z6pEhfbMULvShKj26WA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 6" + } + }, + "node_modules/convert-source-map": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-2.0.0.tgz", + "integrity": "sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==", + "dev": true, + "license": "MIT" + }, + "node_modules/cssesc": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/cssesc/-/cssesc-3.0.0.tgz", + "integrity": "sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg==", + "dev": true, + "license": "MIT", + "bin": { + "cssesc": "bin/cssesc" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/csstype": { + "version": "3.2.3", + "resolved": "https://registry.npmjs.org/csstype/-/csstype-3.2.3.tgz", + "integrity": "sha512-z1HGKcYy2xA8AGQfwrn0PAy+PB7X/GSj3UVJW9qKyn43xWa+gl5nXmU4qqLMRzWVLFC8KusUX8T/0kCiOYpAIQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/debug": { + "version": "4.4.3", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.3.tgz", + "integrity": "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==", + "dev": true, + "license": "MIT", + "dependencies": { + "ms": "^2.1.3" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/didyoumean": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/didyoumean/-/didyoumean-1.2.2.tgz", + "integrity": "sha512-gxtyfqMg7GKyhQmb056K7M3xszy/myH8w+B4RT+QXBQsvAOdc3XymqDDPHx1BgPgsdAA5SIifona89YtRATDzw==", + "dev": true, + "license": "Apache-2.0" + }, + "node_modules/dlv": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/dlv/-/dlv-1.1.3.tgz", + "integrity": "sha512-+HlytyjlPKnIG8XuRG8WvmBP8xs8P71y+SKKS6ZXWoEgLuePxtDoUEiH7WkdePWrQ5JBpE6aoVqfZfJUQkjXwA==", + "dev": true, + "license": "MIT" + }, + "node_modules/electron-to-chromium": { + "version": "1.5.328", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.328.tgz", + "integrity": "sha512-QNQ5l45DzYytThO21403XN3FvK0hOkWDG8viNf6jqS42msJ8I4tGDSpBCgvDRRPnkffafiwAym2X2eHeGD2V0w==", + "dev": true, + "license": "ISC" + }, + "node_modules/esbuild": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.21.5.tgz", + "integrity": "sha512-mg3OPMV4hXywwpoDxu3Qda5xCKQi+vCTZq8S9J/EpkhB2HzKXq4SNFZE3+NK93JYxc8VMSep+lOUSC/RVKaBqw==", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "bin": { + "esbuild": "bin/esbuild" + }, + "engines": { + "node": ">=12" + }, + "optionalDependencies": { + "@esbuild/aix-ppc64": "0.21.5", + "@esbuild/android-arm": "0.21.5", + "@esbuild/android-arm64": "0.21.5", + "@esbuild/android-x64": "0.21.5", + "@esbuild/darwin-arm64": "0.21.5", + "@esbuild/darwin-x64": "0.21.5", + "@esbuild/freebsd-arm64": "0.21.5", + "@esbuild/freebsd-x64": "0.21.5", + "@esbuild/linux-arm": "0.21.5", + "@esbuild/linux-arm64": "0.21.5", + "@esbuild/linux-ia32": "0.21.5", + "@esbuild/linux-loong64": "0.21.5", + "@esbuild/linux-mips64el": "0.21.5", + "@esbuild/linux-ppc64": "0.21.5", + "@esbuild/linux-riscv64": "0.21.5", + "@esbuild/linux-s390x": "0.21.5", + "@esbuild/linux-x64": "0.21.5", + "@esbuild/netbsd-x64": "0.21.5", + "@esbuild/openbsd-x64": "0.21.5", + "@esbuild/sunos-x64": "0.21.5", + "@esbuild/win32-arm64": "0.21.5", + "@esbuild/win32-ia32": "0.21.5", + "@esbuild/win32-x64": "0.21.5" + } + }, + "node_modules/escalade": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.2.0.tgz", + "integrity": "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/fast-glob": { + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.3.3.tgz", + "integrity": "sha512-7MptL8U0cqcFdzIzwOTHoilX9x5BrNqye7Z/LuC7kCMRio1EMSyqRK3BEAUD7sXRq4iT4AzTVuZdhgQ2TCvYLg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@nodelib/fs.stat": "^2.0.2", + "@nodelib/fs.walk": "^1.2.3", + "glob-parent": "^5.1.2", + "merge2": "^1.3.0", + "micromatch": "^4.0.8" + }, + "engines": { + "node": ">=8.6.0" + } + }, + "node_modules/fast-glob/node_modules/glob-parent": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "dev": true, + "license": "ISC", + "dependencies": { + "is-glob": "^4.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/fastq": { + "version": "1.20.1", + "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.20.1.tgz", + "integrity": "sha512-GGToxJ/w1x32s/D2EKND7kTil4n8OVk/9mycTc4VDza13lOvpUZTGX3mFSCtV9ksdGBVzvsyAVLM6mHFThxXxw==", + "dev": true, + "license": "ISC", + "dependencies": { + "reusify": "^1.0.4" + } + }, + "node_modules/fill-range": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz", + "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==", + "dev": true, + "license": "MIT", + "dependencies": { + "to-regex-range": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/fraction.js": { + "version": "5.3.4", + "resolved": "https://registry.npmjs.org/fraction.js/-/fraction.js-5.3.4.tgz", + "integrity": "sha512-1X1NTtiJphryn/uLQz3whtY6jK3fTqoE3ohKs0tT+Ujr1W59oopxmoEh7Lu5p6vBaPbgoM0bzveAW4Qi5RyWDQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": "*" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/rawify" + } + }, + "node_modules/fsevents": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", + "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^8.16.0 || ^10.6.0 || >=11.0.0" + } + }, + "node_modules/function-bind": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", + "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==", + "dev": true, + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/gensync": { + "version": "1.0.0-beta.2", + "resolved": "https://registry.npmjs.org/gensync/-/gensync-1.0.0-beta.2.tgz", + "integrity": "sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/glob-parent": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz", + "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==", + "dev": true, + "license": "ISC", + "dependencies": { + "is-glob": "^4.0.3" + }, + "engines": { + "node": ">=10.13.0" + } + }, + "node_modules/hasown": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", + "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/is-binary-path": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz", + "integrity": "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==", + "dev": true, + "license": "MIT", + "dependencies": { + "binary-extensions": "^2.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/is-core-module": { + "version": "2.16.1", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.16.1.tgz", + "integrity": "sha512-UfoeMA6fIJ8wTYFEUjelnaGI67v6+N7qXJEvQuIGa99l4xsCruSYOVSQ0uPANn4dAzm8lkYPaKLrrijLq7x23w==", + "dev": true, + "license": "MIT", + "dependencies": { + "hasown": "^2.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-extglob": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", + "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-glob": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", + "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-extglob": "^2.1.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-number": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", + "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.12.0" + } + }, + "node_modules/jiti": { + "version": "1.21.7", + "resolved": "https://registry.npmjs.org/jiti/-/jiti-1.21.7.tgz", + "integrity": "sha512-/imKNG4EbWNrVjoNC/1H5/9GFy+tqjGBHCaSsN+P2RnPqjsLmv6UD3Ej+Kj8nBWaRAwyk7kK5ZUc+OEatnTR3A==", + "dev": true, + "license": "MIT", + "bin": { + "jiti": "bin/jiti.js" + } + }, + "node_modules/js-tokens": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", + "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==", + "license": "MIT" + }, + "node_modules/jsesc": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-3.1.0.tgz", + "integrity": "sha512-/sM3dO2FOzXjKQhJuo0Q173wf2KOo8t4I8vHy6lF9poUp7bKT0/NHE8fPX23PwfhnykfqnC2xRxOnVw5XuGIaA==", + "dev": true, + "license": "MIT", + "bin": { + "jsesc": "bin/jsesc" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/json5": { + "version": "2.2.3", + "resolved": "https://registry.npmjs.org/json5/-/json5-2.2.3.tgz", + "integrity": "sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==", + "dev": true, + "license": "MIT", + "bin": { + "json5": "lib/cli.js" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/lilconfig": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/lilconfig/-/lilconfig-3.1.3.tgz", + "integrity": "sha512-/vlFKAoH5Cgt3Ie+JLhRbwOsCQePABiU3tJ1egGvyQ+33R/vcwM2Zl2QR/LzjsBeItPt3oSVXapn+m4nQDvpzw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/antonk52" + } + }, + "node_modules/lines-and-columns": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.2.4.tgz", + "integrity": "sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==", + "dev": true, + "license": "MIT" + }, + "node_modules/loose-envify": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/loose-envify/-/loose-envify-1.4.0.tgz", + "integrity": "sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==", + "license": "MIT", + "dependencies": { + "js-tokens": "^3.0.0 || ^4.0.0" + }, + "bin": { + "loose-envify": "cli.js" + } + }, + "node_modules/lru-cache": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz", + "integrity": "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==", + "dev": true, + "license": "ISC", + "dependencies": { + "yallist": "^3.0.2" + } + }, + "node_modules/lucide-react": { + "version": "0.446.0", + "resolved": "https://registry.npmjs.org/lucide-react/-/lucide-react-0.446.0.tgz", + "integrity": "sha512-BU7gy8MfBMqvEdDPH79VhOXSEgyG8TSPOKWaExWGCQVqnGH7wGgDngPbofu+KdtVjPQBWbEmnfMTq90CTiiDRg==", + "license": "ISC", + "peerDependencies": { + "react": "^16.5.1 || ^17.0.0 || ^18.0.0 || ^19.0.0-rc" + } + }, + "node_modules/merge2": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", + "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 8" + } + }, + "node_modules/micromatch": { + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz", + "integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==", + "dev": true, + "license": "MIT", + "dependencies": { + "braces": "^3.0.3", + "picomatch": "^2.3.1" + }, + "engines": { + "node": ">=8.6" + } + }, + "node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "dev": true, + "license": "MIT" + }, + "node_modules/mz": { + "version": "2.7.0", + "resolved": "https://registry.npmjs.org/mz/-/mz-2.7.0.tgz", + "integrity": "sha512-z81GNO7nnYMEhrGh9LeymoE4+Yr0Wn5McHIZMK5cfQCl+NDX08sCZgUc9/6MHni9IWuFLm1Z3HTCXu2z9fN62Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "any-promise": "^1.0.0", + "object-assign": "^4.0.1", + "thenify-all": "^1.0.0" + } + }, + "node_modules/nanoid": { + "version": "3.3.11", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.11.tgz", + "integrity": "sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "bin": { + "nanoid": "bin/nanoid.cjs" + }, + "engines": { + "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1" + } + }, + "node_modules/node-releases": { + "version": "2.0.36", + "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.36.tgz", + "integrity": "sha512-TdC8FSgHz8Mwtw9g5L4gR/Sh9XhSP/0DEkQxfEFXOpiul5IiHgHan2VhYYb6agDSfp4KuvltmGApc8HMgUrIkA==", + "dev": true, + "license": "MIT" + }, + "node_modules/normalize-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", + "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/object-assign": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", + "integrity": "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/object-hash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/object-hash/-/object-hash-3.0.0.tgz", + "integrity": "sha512-RSn9F68PjH9HqtltsSnqYC1XXoWe9Bju5+213R98cNGttag9q9yAOTzdbsqvIa7aNm5WffBZFpWYr2aWrklWAw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 6" + } + }, + "node_modules/path-parse": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz", + "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==", + "dev": true, + "license": "MIT" + }, + "node_modules/picocolors": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz", + "integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==", + "dev": true, + "license": "ISC" + }, + "node_modules/picomatch": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.2.tgz", + "integrity": "sha512-V7+vQEJ06Z+c5tSye8S+nHUfI51xoXIXjHQ99cQtKUkQqqO1kO/KCJUfZXuB47h/YBlDhah2H3hdUGXn8ie0oA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8.6" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/pify": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz", + "integrity": "sha512-udgsAY+fTnvv7kI7aaxbqwWNb0AHiB0qBO89PZKPkoTmGOgdbrHDKD+0B2X4uTfJ/FT1R09r9gTsjUjNJotuog==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/pirates": { + "version": "4.0.7", + "resolved": "https://registry.npmjs.org/pirates/-/pirates-4.0.7.tgz", + "integrity": "sha512-TfySrs/5nm8fQJDcBDuUng3VOUKsd7S+zqvbOTiGXHfxX4wK31ard+hoNuvkicM/2YFzlpDgABOevKSsB4G/FA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 6" + } + }, + "node_modules/postcss": { + "version": "8.5.8", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.5.8.tgz", + "integrity": "sha512-OW/rX8O/jXnm82Ey1k44pObPtdblfiuWnrd8X7GJ7emImCOstunGbXUpp7HdBrFQX6rJzn3sPT397Wp5aCwCHg==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/postcss" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "nanoid": "^3.3.11", + "picocolors": "^1.1.1", + "source-map-js": "^1.2.1" + }, + "engines": { + "node": "^10 || ^12 || >=14" + } + }, + "node_modules/postcss-import": { + "version": "15.1.0", + "resolved": "https://registry.npmjs.org/postcss-import/-/postcss-import-15.1.0.tgz", + "integrity": "sha512-hpr+J05B2FVYUAXHeK1YyI267J/dDDhMU6B6civm8hSY1jYJnBXxzKDKDswzJmtLHryrjhnDjqqp/49t8FALew==", + "dev": true, + "license": "MIT", + "dependencies": { + "postcss-value-parser": "^4.0.0", + "read-cache": "^1.0.0", + "resolve": "^1.1.7" + }, + "engines": { + "node": ">=14.0.0" + }, + "peerDependencies": { + "postcss": "^8.0.0" + } + }, + "node_modules/postcss-js": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/postcss-js/-/postcss-js-4.1.0.tgz", + "integrity": "sha512-oIAOTqgIo7q2EOwbhb8UalYePMvYoIeRY2YKntdpFQXNosSu3vLrniGgmH9OKs/qAkfoj5oB3le/7mINW1LCfw==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "camelcase-css": "^2.0.1" + }, + "engines": { + "node": "^12 || ^14 || >= 16" + }, + "peerDependencies": { + "postcss": "^8.4.21" + } + }, + "node_modules/postcss-load-config": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/postcss-load-config/-/postcss-load-config-6.0.1.tgz", + "integrity": "sha512-oPtTM4oerL+UXmx+93ytZVN82RrlY/wPUV8IeDxFrzIjXOLF1pN+EmKPLbubvKHT2HC20xXsCAH2Z+CKV6Oz/g==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "lilconfig": "^3.1.1" + }, + "engines": { + "node": ">= 18" + }, + "peerDependencies": { + "jiti": ">=1.21.0", + "postcss": ">=8.0.9", + "tsx": "^4.8.1", + "yaml": "^2.4.2" + }, + "peerDependenciesMeta": { + "jiti": { + "optional": true + }, + "postcss": { + "optional": true + }, + "tsx": { + "optional": true + }, + "yaml": { + "optional": true + } + } + }, + "node_modules/postcss-nested": { + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/postcss-nested/-/postcss-nested-6.2.0.tgz", + "integrity": "sha512-HQbt28KulC5AJzG+cZtj9kvKB93CFCdLvog1WFLf1D+xmMvPGlBstkpTEZfK5+AN9hfJocyBFCNiqyS48bpgzQ==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "postcss-selector-parser": "^6.1.1" + }, + "engines": { + "node": ">=12.0" + }, + "peerDependencies": { + "postcss": "^8.2.14" + } + }, + "node_modules/postcss-selector-parser": { + "version": "6.1.2", + "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-6.1.2.tgz", + "integrity": "sha512-Q8qQfPiZ+THO/3ZrOrO0cJJKfpYCagtMUkXbnEfmgUjwXg6z/WBeOyS9APBBPCTSiDV+s4SwQGu8yFsiMRIudg==", + "dev": true, + "license": "MIT", + "dependencies": { + "cssesc": "^3.0.0", + "util-deprecate": "^1.0.2" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/postcss-value-parser": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/postcss-value-parser/-/postcss-value-parser-4.2.0.tgz", + "integrity": "sha512-1NNCs6uurfkVbeXG4S8JFT9t19m45ICnif8zWLd5oPSZ50QnwMfK+H3jv408d4jw/7Bttv5axS5IiHoLaVNHeQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/queue-microtask": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", + "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT" + }, + "node_modules/react": { + "version": "18.3.1", + "resolved": "https://registry.npmjs.org/react/-/react-18.3.1.tgz", + "integrity": "sha512-wS+hAgJShR0KhEvPJArfuPVN1+Hz1t0Y6n5jLrGQbkb4urgPE/0Rve+1kMB1v/oWgHgm4WIcV+i7F2pTVj+2iQ==", + "license": "MIT", + "dependencies": { + "loose-envify": "^1.1.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/react-dom": { + "version": "18.3.1", + "resolved": "https://registry.npmjs.org/react-dom/-/react-dom-18.3.1.tgz", + "integrity": "sha512-5m4nQKp+rZRb09LNH59GM4BxTh9251/ylbKIbpe7TpGxfJ+9kv6BLkLBXIjjspbgbnIBNqlI23tRnTWT0snUIw==", + "license": "MIT", + "dependencies": { + "loose-envify": "^1.1.0", + "scheduler": "^0.23.2" + }, + "peerDependencies": { + "react": "^18.3.1" + } + }, + "node_modules/react-refresh": { + "version": "0.17.0", + "resolved": "https://registry.npmjs.org/react-refresh/-/react-refresh-0.17.0.tgz", + "integrity": "sha512-z6F7K9bV85EfseRCp2bzrpyQ0Gkw1uLoCel9XBVWPg/TjRj94SkJzUTGfOa4bs7iJvBWtQG0Wq7wnI0syw3EBQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/react-router": { + "version": "6.30.3", + "resolved": "https://registry.npmjs.org/react-router/-/react-router-6.30.3.tgz", + "integrity": "sha512-XRnlbKMTmktBkjCLE8/XcZFlnHvr2Ltdr1eJX4idL55/9BbORzyZEaIkBFDhFGCEWBBItsVrDxwx3gnisMitdw==", + "license": "MIT", + "dependencies": { + "@remix-run/router": "1.23.2" + }, + "engines": { + "node": ">=14.0.0" + }, + "peerDependencies": { + "react": ">=16.8" + } + }, + "node_modules/react-router-dom": { + "version": "6.30.3", + "resolved": "https://registry.npmjs.org/react-router-dom/-/react-router-dom-6.30.3.tgz", + "integrity": "sha512-pxPcv1AczD4vso7G4Z3TKcvlxK7g7TNt3/FNGMhfqyntocvYKj+GCatfigGDjbLozC4baguJ0ReCigoDJXb0ag==", + "license": "MIT", + "dependencies": { + "@remix-run/router": "1.23.2", + "react-router": "6.30.3" + }, + "engines": { + "node": ">=14.0.0" + }, + "peerDependencies": { + "react": ">=16.8", + "react-dom": ">=16.8" + } + }, + "node_modules/read-cache": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/read-cache/-/read-cache-1.0.0.tgz", + "integrity": "sha512-Owdv/Ft7IjOgm/i0xvNDZ1LrRANRfew4b2prF3OWMQLxLfu3bS8FVhCsrSCMK4lR56Y9ya+AThoTpDCTxCmpRA==", + "dev": true, + "license": "MIT", + "dependencies": { + "pify": "^2.3.0" + } + }, + "node_modules/readdirp": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz", + "integrity": "sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==", + "dev": true, + "license": "MIT", + "dependencies": { + "picomatch": "^2.2.1" + }, + "engines": { + "node": ">=8.10.0" + } + }, + "node_modules/resolve": { + "version": "1.22.11", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.11.tgz", + "integrity": "sha512-RfqAvLnMl313r7c9oclB1HhUEAezcpLjz95wFH4LVuhk9JF/r22qmVP9AMmOU4vMX7Q8pN8jwNg/CSpdFnMjTQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-core-module": "^2.16.1", + "path-parse": "^1.0.7", + "supports-preserve-symlinks-flag": "^1.0.0" + }, + "bin": { + "resolve": "bin/resolve" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/reusify": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.1.0.tgz", + "integrity": "sha512-g6QUff04oZpHs0eG5p83rFLhHeV00ug/Yf9nZM6fLeUrPguBTkTQOdpAWWspMh55TZfVQDPaN3NQJfbVRAxdIw==", + "dev": true, + "license": "MIT", + "engines": { + "iojs": ">=1.0.0", + "node": ">=0.10.0" + } + }, + "node_modules/rollup": { + "version": "4.60.0", + "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.60.0.tgz", + "integrity": "sha512-yqjxruMGBQJ2gG4HtjZtAfXArHomazDHoFwFFmZZl0r7Pdo7qCIXKqKHZc8yeoMgzJJ+pO6pEEHa+V7uzWlrAQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/estree": "1.0.8" + }, + "bin": { + "rollup": "dist/bin/rollup" + }, + "engines": { + "node": ">=18.0.0", + "npm": ">=8.0.0" + }, + "optionalDependencies": { + "@rollup/rollup-android-arm-eabi": "4.60.0", + "@rollup/rollup-android-arm64": "4.60.0", + "@rollup/rollup-darwin-arm64": "4.60.0", + "@rollup/rollup-darwin-x64": "4.60.0", + "@rollup/rollup-freebsd-arm64": "4.60.0", + "@rollup/rollup-freebsd-x64": "4.60.0", + "@rollup/rollup-linux-arm-gnueabihf": "4.60.0", + "@rollup/rollup-linux-arm-musleabihf": "4.60.0", + "@rollup/rollup-linux-arm64-gnu": "4.60.0", + "@rollup/rollup-linux-arm64-musl": "4.60.0", + "@rollup/rollup-linux-loong64-gnu": "4.60.0", + "@rollup/rollup-linux-loong64-musl": "4.60.0", + "@rollup/rollup-linux-ppc64-gnu": "4.60.0", + "@rollup/rollup-linux-ppc64-musl": "4.60.0", + "@rollup/rollup-linux-riscv64-gnu": "4.60.0", + "@rollup/rollup-linux-riscv64-musl": "4.60.0", + "@rollup/rollup-linux-s390x-gnu": "4.60.0", + "@rollup/rollup-linux-x64-gnu": "4.60.0", + "@rollup/rollup-linux-x64-musl": "4.60.0", + "@rollup/rollup-openbsd-x64": "4.60.0", + "@rollup/rollup-openharmony-arm64": "4.60.0", + "@rollup/rollup-win32-arm64-msvc": "4.60.0", + "@rollup/rollup-win32-ia32-msvc": "4.60.0", + "@rollup/rollup-win32-x64-gnu": "4.60.0", + "@rollup/rollup-win32-x64-msvc": "4.60.0", + "fsevents": "~2.3.2" + } + }, + "node_modules/run-parallel": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", + "integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT", + "dependencies": { + "queue-microtask": "^1.2.2" + } + }, + "node_modules/scheduler": { + "version": "0.23.2", + "resolved": "https://registry.npmjs.org/scheduler/-/scheduler-0.23.2.tgz", + "integrity": "sha512-UOShsPwz7NrMUqhR6t0hWjFduvOzbtv7toDH1/hIrfRNIDBnnBWd0CwJTGvTpngVlmwGCdP9/Zl/tVrDqcuYzQ==", + "license": "MIT", + "dependencies": { + "loose-envify": "^1.1.0" + } + }, + "node_modules/semver": { + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + } + }, + "node_modules/source-map-js": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.1.tgz", + "integrity": "sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==", + "dev": true, + "license": "BSD-3-Clause", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/sucrase": { + "version": "3.35.1", + "resolved": "https://registry.npmjs.org/sucrase/-/sucrase-3.35.1.tgz", + "integrity": "sha512-DhuTmvZWux4H1UOnWMB3sk0sbaCVOoQZjv8u1rDoTV0HTdGem9hkAZtl4JZy8P2z4Bg0nT+YMeOFyVr4zcG5Tw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/gen-mapping": "^0.3.2", + "commander": "^4.0.0", + "lines-and-columns": "^1.1.6", + "mz": "^2.7.0", + "pirates": "^4.0.1", + "tinyglobby": "^0.2.11", + "ts-interface-checker": "^0.1.9" + }, + "bin": { + "sucrase": "bin/sucrase", + "sucrase-node": "bin/sucrase-node" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + } + }, + "node_modules/supports-preserve-symlinks-flag": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz", + "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/tailwind-merge": { + "version": "2.6.1", + "resolved": "https://registry.npmjs.org/tailwind-merge/-/tailwind-merge-2.6.1.tgz", + "integrity": "sha512-Oo6tHdpZsGpkKG88HJ8RR1rg/RdnEkQEfMoEk2x1XRI3F1AxeU+ijRXpiVUF4UbLfcxxRGw6TbUINKYdWVsQTQ==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/dcastil" + } + }, + "node_modules/tailwindcss": { + "version": "3.4.19", + "resolved": "https://registry.npmjs.org/tailwindcss/-/tailwindcss-3.4.19.tgz", + "integrity": "sha512-3ofp+LL8E+pK/JuPLPggVAIaEuhvIz4qNcf3nA1Xn2o/7fb7s/TYpHhwGDv1ZU3PkBluUVaF8PyCHcm48cKLWQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@alloc/quick-lru": "^5.2.0", + "arg": "^5.0.2", + "chokidar": "^3.6.0", + "didyoumean": "^1.2.2", + "dlv": "^1.1.3", + "fast-glob": "^3.3.2", + "glob-parent": "^6.0.2", + "is-glob": "^4.0.3", + "jiti": "^1.21.7", + "lilconfig": "^3.1.3", + "micromatch": "^4.0.8", + "normalize-path": "^3.0.0", + "object-hash": "^3.0.0", + "picocolors": "^1.1.1", + "postcss": "^8.4.47", + "postcss-import": "^15.1.0", + "postcss-js": "^4.0.1", + "postcss-load-config": "^4.0.2 || ^5.0 || ^6.0", + "postcss-nested": "^6.2.0", + "postcss-selector-parser": "^6.1.2", + "resolve": "^1.22.8", + "sucrase": "^3.35.0" + }, + "bin": { + "tailwind": "lib/cli.js", + "tailwindcss": "lib/cli.js" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/thenify": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/thenify/-/thenify-3.3.1.tgz", + "integrity": "sha512-RVZSIV5IG10Hk3enotrhvz0T9em6cyHBLkH/YAZuKqd8hRkKhSfCGIcP2KUY0EPxndzANBmNllzWPwak+bheSw==", + "dev": true, + "license": "MIT", + "dependencies": { + "any-promise": "^1.0.0" + } + }, + "node_modules/thenify-all": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/thenify-all/-/thenify-all-1.6.0.tgz", + "integrity": "sha512-RNxQH/qI8/t3thXJDwcstUO4zeqo64+Uy/+sNVRBx4Xn2OX+OZ9oP+iJnNFqplFra2ZUVeKCSa2oVWi3T4uVmA==", + "dev": true, + "license": "MIT", + "dependencies": { + "thenify": ">= 3.1.0 < 4" + }, + "engines": { + "node": ">=0.8" + } + }, + "node_modules/tinyglobby": { + "version": "0.2.15", + "resolved": "https://registry.npmjs.org/tinyglobby/-/tinyglobby-0.2.15.tgz", + "integrity": "sha512-j2Zq4NyQYG5XMST4cbs02Ak8iJUdxRM0XI5QyxXuZOzKOINmWurp3smXu3y5wDcJrptwpSjgXHzIQxR0omXljQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "fdir": "^6.5.0", + "picomatch": "^4.0.3" + }, + "engines": { + "node": ">=12.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/SuperchupuDev" + } + }, + "node_modules/tinyglobby/node_modules/fdir": { + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/fdir/-/fdir-6.5.0.tgz", + "integrity": "sha512-tIbYtZbucOs0BRGqPJkshJUYdL+SDH7dVM8gjy+ERp3WAUjLEFJE+02kanyHtwjWOnwrKYBiwAmM0p4kLJAnXg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12.0.0" + }, + "peerDependencies": { + "picomatch": "^3 || ^4" + }, + "peerDependenciesMeta": { + "picomatch": { + "optional": true + } + } + }, + "node_modules/tinyglobby/node_modules/picomatch": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.4.tgz", + "integrity": "sha512-QP88BAKvMam/3NxH6vj2o21R6MjxZUAd6nlwAS/pnGvN9IVLocLHxGYIzFhg6fUQ+5th6P4dv4eW9jX3DSIj7A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/to-regex-range": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", + "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-number": "^7.0.0" + }, + "engines": { + "node": ">=8.0" + } + }, + "node_modules/ts-interface-checker": { + "version": "0.1.13", + "resolved": "https://registry.npmjs.org/ts-interface-checker/-/ts-interface-checker-0.1.13.tgz", + "integrity": "sha512-Y/arvbn+rrz3JCKl9C4kVNfTfSm2/mEp5FSz5EsZSANGPSlQrpRI5M4PKF+mJnE52jOO90PnPSc3Ur3bTQw0gA==", + "dev": true, + "license": "Apache-2.0" + }, + "node_modules/typescript": { + "version": "5.9.3", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.9.3.tgz", + "integrity": "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw==", + "dev": true, + "license": "Apache-2.0", + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "engines": { + "node": ">=14.17" + } + }, + "node_modules/update-browserslist-db": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.2.3.tgz", + "integrity": "sha512-Js0m9cx+qOgDxo0eMiFGEueWztz+d4+M3rGlmKPT+T4IS/jP4ylw3Nwpu6cpTTP8R1MAC1kF4VbdLt3ARf209w==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/browserslist" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "escalade": "^3.2.0", + "picocolors": "^1.1.1" + }, + "bin": { + "update-browserslist-db": "cli.js" + }, + "peerDependencies": { + "browserslist": ">= 4.21.0" + } + }, + "node_modules/util-deprecate": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", + "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==", + "dev": true, + "license": "MIT" + }, + "node_modules/vite": { + "version": "5.4.21", + "resolved": "https://registry.npmjs.org/vite/-/vite-5.4.21.tgz", + "integrity": "sha512-o5a9xKjbtuhY6Bi5S3+HvbRERmouabWbyUcpXXUA1u+GNUKoROi9byOJ8M0nHbHYHkYICiMlqxkg1KkYmm25Sw==", + "dev": true, + "license": "MIT", + "dependencies": { + "esbuild": "^0.21.3", + "postcss": "^8.4.43", + "rollup": "^4.20.0" + }, + "bin": { + "vite": "bin/vite.js" + }, + "engines": { + "node": "^18.0.0 || >=20.0.0" + }, + "funding": { + "url": "https://github.com/vitejs/vite?sponsor=1" + }, + "optionalDependencies": { + "fsevents": "~2.3.3" + }, + "peerDependencies": { + "@types/node": "^18.0.0 || >=20.0.0", + "less": "*", + "lightningcss": "^1.21.0", + "sass": "*", + "sass-embedded": "*", + "stylus": "*", + "sugarss": "*", + "terser": "^5.4.0" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + }, + "less": { + "optional": true + }, + "lightningcss": { + "optional": true + }, + "sass": { + "optional": true + }, + "sass-embedded": { + "optional": true + }, + "stylus": { + "optional": true + }, + "sugarss": { + "optional": true + }, + "terser": { + "optional": true + } + } + }, + "node_modules/yallist": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz", + "integrity": "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==", + "dev": true, + "license": "ISC" + } + } +} diff --git a/dashboard/package.json b/dashboard/package.json new file mode 100644 index 0000000..965a705 --- /dev/null +++ b/dashboard/package.json @@ -0,0 +1,29 @@ +{ + "name": "@sentryagent/dashboard", + "version": "1.0.0", + "private": true, + "scripts": { + "dev": "vite", + "build": "tsc -p tsconfig.app.json && vite build", + "preview": "vite preview" + }, + "dependencies": { + "@sentryagent/idp-sdk": "file:../sdk", + "react": "^18.3.1", + "react-dom": "^18.3.1", + "react-router-dom": "^6.26.2", + "lucide-react": "^0.446.0", + "clsx": "^2.1.1", + "tailwind-merge": "^2.5.2" + }, + "devDependencies": { + "@types/react": "^18.3.5", + "@types/react-dom": "^18.3.0", + "@vitejs/plugin-react": "^4.3.1", + "autoprefixer": "^10.4.20", + "postcss": "^8.4.47", + "tailwindcss": "^3.4.12", + "typescript": "^5.5.3", + "vite": "^5.4.8" + } +} diff --git a/dashboard/postcss.config.js b/dashboard/postcss.config.js new file mode 100644 index 0000000..2aa7205 --- /dev/null +++ b/dashboard/postcss.config.js @@ -0,0 +1,6 @@ +export default { + plugins: { + tailwindcss: {}, + autoprefixer: {}, + }, +}; diff --git a/dashboard/src/App.tsx b/dashboard/src/App.tsx new file mode 100644 index 0000000..c90dcd8 --- /dev/null +++ b/dashboard/src/App.tsx @@ -0,0 +1,33 @@ +import * as React from 'react'; +import { Routes, Route, Navigate } from 'react-router-dom'; +import { AuthProvider } from '@/lib/auth'; +import { RequireAuth } from '@/components/RequireAuth'; +import { AppShell } from '@/components/layout/AppShell'; +import Login from '@/pages/Login'; +import Agents from '@/pages/Agents'; +import AgentDetail from '@/pages/AgentDetail'; +import Credentials from '@/pages/Credentials'; +import AuditLog from '@/pages/AuditLog'; +import Health from '@/pages/Health'; + +/** Top-level router — defines all application routes. */ +export default function App(): React.JSX.Element { + return ( + + + } /> + }> + }> + } /> + } /> + } /> + } /> + } /> + + + } /> + } /> + + + ); +} diff --git a/dashboard/src/components/RequireAuth.tsx b/dashboard/src/components/RequireAuth.tsx new file mode 100644 index 0000000..99b0803 --- /dev/null +++ b/dashboard/src/components/RequireAuth.tsx @@ -0,0 +1,11 @@ +import * as React from 'react'; +import { Navigate, Outlet } from 'react-router-dom'; +import { isAuthenticated } from '@/lib/auth'; + +/** Redirects to /dashboard/login if not authenticated. */ +export function RequireAuth(): React.JSX.Element { + if (!isAuthenticated()) { + return ; + } + return ; +} diff --git a/dashboard/src/components/layout/AppShell.tsx b/dashboard/src/components/layout/AppShell.tsx new file mode 100644 index 0000000..b0db26c --- /dev/null +++ b/dashboard/src/components/layout/AppShell.tsx @@ -0,0 +1,62 @@ +import * as React from 'react'; +import { NavLink, Outlet } from 'react-router-dom'; +import { cn } from '@/lib/utils'; +import { useAuth } from '@/lib/auth'; + +interface NavItem { + to: string; + label: string; +} + +const NAV_ITEMS: NavItem[] = [ + { to: '/dashboard/agents', label: 'Agents' }, + { to: '/dashboard/audit', label: 'Audit Log' }, + { to: '/dashboard/health', label: 'Health' }, +]; + +/** + * Outer application shell: top navigation bar and main content area. + * Renders the active page via . + */ +export function AppShell(): React.JSX.Element { + const { logout } = useAuth(); + + return ( +
+
+
+
+ SentryAgent.ai + +
+ +
+
+
+ +
+
+ ); +} diff --git a/dashboard/src/components/ui/badge.tsx b/dashboard/src/components/ui/badge.tsx new file mode 100644 index 0000000..e3c9ff3 --- /dev/null +++ b/dashboard/src/components/ui/badge.tsx @@ -0,0 +1,27 @@ +import * as React from 'react'; +import { cn } from '@/lib/utils'; + +type BadgeVariant = 'default' | 'success' | 'warning' | 'danger' | 'muted'; + +interface BadgeProps { + variant?: BadgeVariant; + children: React.ReactNode; + className?: string; +} + +const variantClasses: Record = { + default: 'bg-brand-100 text-brand-700', + success: 'bg-green-100 text-green-700', + warning: 'bg-yellow-100 text-yellow-700', + danger: 'bg-red-100 text-red-700', + muted: 'bg-slate-100 text-slate-600', +}; + +/** Small status badge. */ +export function Badge({ variant = 'default', children, className }: BadgeProps): React.JSX.Element { + return ( + + {children} + + ); +} diff --git a/dashboard/src/components/ui/button.tsx b/dashboard/src/components/ui/button.tsx new file mode 100644 index 0000000..364b6e6 --- /dev/null +++ b/dashboard/src/components/ui/button.tsx @@ -0,0 +1,65 @@ +import * as React from 'react'; +import { cn } from '@/lib/utils'; + +type Variant = 'default' | 'destructive' | 'outline' | 'ghost'; +type Size = 'sm' | 'md' | 'lg'; + +interface ButtonProps extends React.ButtonHTMLAttributes { + variant?: Variant; + size?: Size; + loading?: boolean; +} + +const variantClasses: Record = { + default: 'bg-brand-600 text-white hover:bg-brand-700 focus:ring-brand-500', + destructive: 'bg-red-600 text-white hover:bg-red-700 focus:ring-red-500', + outline: 'border border-slate-300 bg-white text-slate-700 hover:bg-slate-50 focus:ring-brand-500', + ghost: 'text-slate-600 hover:bg-slate-100 hover:text-slate-900 focus:ring-brand-500', +}; + +const sizeClasses: Record = { + sm: 'px-3 py-1.5 text-sm', + md: 'px-4 py-2 text-sm', + lg: 'px-6 py-3 text-base', +}; + +/** + * Reusable button component with variant and size support. + * + * @param variant - Visual style: default | destructive | outline | ghost + * @param size - Size: sm | md | lg + * @param loading - When true, shows a spinner and disables the button + */ +export function Button({ + variant = 'default', + size = 'md', + loading = false, + className, + children, + disabled, + ...props +}: ButtonProps): React.JSX.Element { + return ( + + ); +} diff --git a/dashboard/src/components/ui/dialog.tsx b/dashboard/src/components/ui/dialog.tsx new file mode 100644 index 0000000..fa61070 --- /dev/null +++ b/dashboard/src/components/ui/dialog.tsx @@ -0,0 +1,45 @@ +import * as React from 'react'; +import { Button } from './button'; + +interface DialogProps { + open: boolean; + title: string; + description: string; + confirmLabel?: string; + cancelLabel?: string; + variant?: 'default' | 'destructive'; + onConfirm: () => void; + onCancel: () => void; +} + +/** + * Modal confirmation dialog for destructive actions (suspend, revoke, rotate). + */ +export function ConfirmDialog({ + open, + title, + description, + confirmLabel = 'Confirm', + cancelLabel = 'Cancel', + variant = 'default', + onConfirm, + onCancel, +}: DialogProps): React.JSX.Element | null { + if (!open) return null; + + return ( +
+
+
+

{title}

+

{description}

+
+ + +
+
+
+ ); +} diff --git a/dashboard/src/index.css b/dashboard/src/index.css new file mode 100644 index 0000000..b940d78 --- /dev/null +++ b/dashboard/src/index.css @@ -0,0 +1,26 @@ +@tailwind base; +@tailwind components; +@tailwind utilities; + +@layer base { + :root { + --background: 0 0% 100%; + --foreground: 222.2 84% 4.9%; + --muted: 210 40% 96.1%; + --muted-foreground: 215.4 16.3% 46.9%; + --border: 214.3 31.8% 91.4%; + --input: 214.3 31.8% 91.4%; + --ring: 198 89% 48%; + --radius: 0.5rem; + } +} + +* { + box-sizing: border-box; +} + +body { + font-family: system-ui, -apple-system, sans-serif; + background-color: #f8fafc; + color: #0f172a; +} diff --git a/dashboard/src/lib/auth.tsx b/dashboard/src/lib/auth.tsx new file mode 100644 index 0000000..9a4a4c0 --- /dev/null +++ b/dashboard/src/lib/auth.tsx @@ -0,0 +1,109 @@ +import { TokenManager } from '@sentryagent/idp-sdk'; + +const SESSION_KEY = 'agentidp_credentials'; + +interface StoredCredentials { + clientId: string; + clientSecret: string; + baseUrl: string; +} + +/** + * Persists user credentials to sessionStorage (cleared on tab close). + */ +export function saveCredentials(creds: StoredCredentials): void { + sessionStorage.setItem(SESSION_KEY, JSON.stringify(creds)); +} + +/** + * Retrieves credentials from sessionStorage. + * Returns null if not logged in. + */ +export function loadCredentials(): StoredCredentials | null { + const raw = sessionStorage.getItem(SESSION_KEY); + if (!raw) return null; + try { + return JSON.parse(raw) as StoredCredentials; + } catch { + return null; + } +} + +/** + * Removes credentials from sessionStorage (logout). + */ +export function clearCredentials(): void { + sessionStorage.removeItem(SESSION_KEY); +} + +/** + * Returns true if the user has stored credentials. + */ +export function isAuthenticated(): boolean { + return loadCredentials() !== null; +} + +/** + * Validates stored credentials by requesting a token. + * Returns true if successful; false on auth failure. + */ +export async function validateCredentials(creds: StoredCredentials): Promise { + try { + const tm = new TokenManager(creds.baseUrl, creds.clientId, creds.clientSecret, 'agents:read agents:write tokens:read audit:read'); + await tm.getToken(); + return true; + } catch { + return false; + } +} + +// ── React context ────────────────────────────────────────────────────────────── + +import * as React from 'react'; +import { useNavigate } from 'react-router-dom'; + +interface AuthContextValue { + credentials: StoredCredentials | null; + login: (creds: StoredCredentials) => Promise; + logout: () => void; +} + +const AuthContext = React.createContext(null); + +/** + * Provides authentication state to the application. + * Reads initial state from sessionStorage on mount. + */ +export function AuthProvider({ children }: { children: React.ReactNode }): React.JSX.Element { + const [credentials, setCredentials] = React.useState(loadCredentials); + const navigate = useNavigate(); + + const login = React.useCallback(async (creds: StoredCredentials): Promise => { + const valid = await validateCredentials(creds); + if (valid) { + saveCredentials(creds); + setCredentials(creds); + } + return valid; + }, []); + + const logout = React.useCallback((): void => { + clearCredentials(); + setCredentials(null); + navigate('/dashboard/login'); + }, [navigate]); + + const value = React.useMemo(() => ({ credentials, login, logout }), [credentials, login, logout]); + + return {children}; +} + +/** + * Returns the current authentication context. + * Must be used inside . + */ +export function useAuth(): AuthContextValue { + const ctx = React.useContext(AuthContext); + if (!ctx) throw new Error('useAuth must be used within AuthProvider'); + return ctx; +} diff --git a/dashboard/src/lib/client.ts b/dashboard/src/lib/client.ts new file mode 100644 index 0000000..f297de0 --- /dev/null +++ b/dashboard/src/lib/client.ts @@ -0,0 +1,18 @@ +import { AgentIdPClient } from '@sentryagent/idp-sdk'; +import { loadCredentials } from './auth'; + +/** + * Returns an AgentIdPClient configured with credentials from sessionStorage. + * Throws if not authenticated (caller must ensure login first). + */ +export function getClient(): AgentIdPClient { + const creds = loadCredentials(); + if (!creds) { + throw new Error('Not authenticated. Please log in.'); + } + return new AgentIdPClient({ + baseUrl: creds.baseUrl, + clientId: creds.clientId, + clientSecret: creds.clientSecret, + }); +} diff --git a/dashboard/src/lib/utils.ts b/dashboard/src/lib/utils.ts new file mode 100644 index 0000000..150a20a --- /dev/null +++ b/dashboard/src/lib/utils.ts @@ -0,0 +1,7 @@ +import { clsx, type ClassValue } from 'clsx'; +import { twMerge } from 'tailwind-merge'; + +/** Merges Tailwind class names, handling conflicts correctly. */ +export function cn(...inputs: ClassValue[]): string { + return twMerge(clsx(inputs)); +} diff --git a/dashboard/src/main.tsx b/dashboard/src/main.tsx new file mode 100644 index 0000000..c68f490 --- /dev/null +++ b/dashboard/src/main.tsx @@ -0,0 +1,13 @@ +import React from 'react'; +import ReactDOM from 'react-dom/client'; +import { BrowserRouter } from 'react-router-dom'; +import App from './App'; +import './index.css'; + +ReactDOM.createRoot(document.getElementById('root')!).render( + + + + + , +); diff --git a/dashboard/src/pages/AgentDetail.tsx b/dashboard/src/pages/AgentDetail.tsx new file mode 100644 index 0000000..2d9fe56 --- /dev/null +++ b/dashboard/src/pages/AgentDetail.tsx @@ -0,0 +1,222 @@ +import * as React from 'react'; +import { useParams, useNavigate } from 'react-router-dom'; +import type { Agent } from '@sentryagent/idp-sdk'; +import { Badge } from '@/components/ui/badge'; +import { Button } from '@/components/ui/button'; +import { ConfirmDialog } from '@/components/ui/dialog'; +import { getClient } from '@/lib/client'; + +type BadgeVariant = 'success' | 'warning' | 'danger'; + +/** Maps AgentStatus to a Badge variant. */ +function statusVariant(status: Agent['status']): BadgeVariant { + switch (status) { + case 'active': return 'success'; + case 'suspended': return 'warning'; + case 'decommissioned': return 'danger'; + } +} + +/** Formats an ISO timestamp to a readable local date-time string. */ +function formatDateTime(iso: string): string { + return new Date(iso).toLocaleString(undefined, { + year: 'numeric', month: 'short', day: 'numeric', + hour: '2-digit', minute: '2-digit', + }); +} + +interface DetailRowProps { + label: string; + value: string; +} + +/** Single label/value row in the detail card. */ +function DetailRow({ label, value }: DetailRowProps): React.JSX.Element { + return ( +
+
{label}
+
{value}
+
+ ); +} + +type DialogAction = 'suspend' | 'reactivate'; + +/** + * Agent Detail page — shows all agent fields and provides suspend/reactivate actions. + * Route: /dashboard/agents/:agentId + */ +export default function AgentDetail(): React.JSX.Element { + const { agentId } = useParams<{ agentId: string }>(); + const navigate = useNavigate(); + + const [agent, setAgent] = React.useState(null); + const [loading, setLoading] = React.useState(true); + const [error, setError] = React.useState(null); + const [actionLoading, setActionLoading] = React.useState(false); + const [dialog, setDialog] = React.useState(null); + + React.useEffect(() => { + if (!agentId) return; + let cancelled = false; + setLoading(true); + setError(null); + + const fetchAgent = async (): Promise => { + try { + const result = await getClient().agents.getAgent(agentId); + if (!cancelled) setAgent(result); + } catch (err) { + if (!cancelled) setError(err instanceof Error ? err.message : 'Failed to load agent.'); + } finally { + if (!cancelled) setLoading(false); + } + }; + + void fetchAgent(); + return () => { cancelled = true; }; + }, [agentId]); + + const handleAction = React.useCallback( + async (action: DialogAction): Promise => { + if (!agentId) return; + setActionLoading(true); + setDialog(null); + try { + const newStatus = action === 'suspend' ? 'suspended' : 'active'; + const updated = await getClient().agents.updateAgent(agentId, { status: newStatus }); + setAgent(updated); + } catch (err) { + setError(err instanceof Error ? err.message : 'Action failed.'); + } finally { + setActionLoading(false); + } + }, + [agentId], + ); + + if (loading) { + return ( +
+ {Array.from({ length: 6 }).map((_, i) => ( +
+ ))} +
+ ); + } + + if (error || !agent) { + return ( +
+ {error ?? 'Agent not found.'} +
+ ); + } + + const dialogConfig = dialog === 'suspend' + ? { + title: `Suspend agent ${agent.email}?`, + description: `Suspending ${agent.email} means it will no longer be able to authenticate.`, + confirmLabel: 'Suspend', + variant: 'destructive' as const, + } + : { + title: `Reactivate agent ${agent.email}?`, + description: `Reactivating ${agent.email} will allow it to authenticate again.`, + confirmLabel: 'Reactivate', + variant: 'default' as const, + }; + + return ( +
+ {/* Back navigation */} + + +
+
+

{agent.email}

+

Agent ID: {agent.agentId}

+
+ {agent.status} +
+ + {error && ( +
+ {error} +
+ )} + + {/* Detail card */} +
+
+ + + + + + + + + + +
+
+ + {/* Actions */} + {agent.status !== 'decommissioned' && ( +
+ {agent.status === 'active' && ( + + )} + {agent.status === 'suspended' && ( + + )} +
+ )} + + {/* Credentials section */} +
+

Credentials

+

+ Manage client secrets for this agent. Rotate or revoke credentials as needed. +

+ +
+ + {/* Confirm dialog */} + {dialog !== null && ( + { void handleAction(dialog); }} + onCancel={() => { setDialog(null); }} + /> + )} +
+ ); +} diff --git a/dashboard/src/pages/Agents.tsx b/dashboard/src/pages/Agents.tsx new file mode 100644 index 0000000..e7b1218 --- /dev/null +++ b/dashboard/src/pages/Agents.tsx @@ -0,0 +1,204 @@ +import * as React from 'react'; +import { useNavigate } from 'react-router-dom'; +import type { Agent, AgentStatus } from '@sentryagent/idp-sdk'; +import { Badge } from '@/components/ui/badge'; +import { getClient } from '@/lib/client'; + +const PAGE_LIMIT = 20; + +/** Maps AgentStatus to a Badge variant. */ +function statusVariant(status: AgentStatus): 'success' | 'warning' | 'danger' | 'muted' { + switch (status) { + case 'active': return 'success'; + case 'suspended': return 'warning'; + case 'decommissioned': return 'danger'; + } +} + +/** Formats an ISO timestamp to a short local date string. */ +function formatDate(iso: string): string { + return new Date(iso).toLocaleDateString(undefined, { year: 'numeric', month: 'short', day: 'numeric' }); +} + +/** Skeleton row shown while loading. */ +function SkeletonRow(): React.JSX.Element { + return ( + + {Array.from({ length: 6 }).map((_, i) => ( + +
+ + ))} + + ); +} + +/** + * Agents list page — displays all registered agents with search, status filter, and pagination. + * Clicking a row navigates to the Agent Detail page. + */ +export default function Agents(): React.JSX.Element { + const navigate = useNavigate(); + + const [agents, setAgents] = React.useState([]); + const [total, setTotal] = React.useState(0); + const [page, setPage] = React.useState(1); + const [loading, setLoading] = React.useState(false); + const [error, setError] = React.useState(null); + + // Filters (client-side email search, server-side status) + const [searchInput, setSearchInput] = React.useState(''); + const [debouncedSearch, setDebouncedSearch] = React.useState(''); + const [statusFilter, setStatusFilter] = React.useState(''); + + // Debounce search input 300ms + React.useEffect(() => { + const timer = setTimeout(() => { setDebouncedSearch(searchInput); }, 300); + return () => { clearTimeout(timer); }; + }, [searchInput]); + + // Reset to page 1 on filter change + React.useEffect(() => { + setPage(1); + }, [debouncedSearch, statusFilter]); + + React.useEffect(() => { + let cancelled = false; + setLoading(true); + setError(null); + + const fetchAgents = async (): Promise => { + try { + const client = getClient(); + const result = await client.agents.listAgents({ + page, + limit: PAGE_LIMIT, + status: statusFilter !== '' ? statusFilter : undefined, + }); + if (!cancelled) { + setAgents(result.data); + setTotal(result.total); + } + } catch (err) { + if (!cancelled) { + setError(err instanceof Error ? err.message : 'Failed to load agents.'); + } + } finally { + if (!cancelled) setLoading(false); + } + }; + + void fetchAgents(); + return () => { cancelled = true; }; + }, [page, statusFilter]); + + // Client-side email filter applied after API results arrive + const filteredAgents = React.useMemo(() => { + if (!debouncedSearch.trim()) return agents; + const lower = debouncedSearch.toLowerCase(); + return agents.filter((a) => a.email.toLowerCase().includes(lower)); + }, [agents, debouncedSearch]); + + const totalPages = Math.max(1, Math.ceil(total / PAGE_LIMIT)); + + return ( +
+
+

Agents

+
+ { setSearchInput(e.target.value); }} + placeholder="Search by email…" + className="w-60 rounded-md border border-slate-300 px-3 py-2 text-sm focus:border-brand-500 focus:outline-none focus:ring-1 focus:ring-brand-500" + /> + +
+
+ + {error && ( +
+ {error} +
+ )} + +
+ + + + {['Name (Email)', 'Type', 'Status', 'Environment', 'Owner', 'Created'].map((col) => ( + + ))} + + + + {loading + ? Array.from({ length: 5 }).map((_, i) => ) + : filteredAgents.length === 0 + ? ( + + + + ) + : filteredAgents.map((agent) => ( + { navigate(`/dashboard/agents/${agent.agentId}`); }} + className="cursor-pointer hover:bg-slate-50" + > + + + + + + + + )) + } + +
+ {col} +
+ No agents found. +
{agent.email}{agent.agentType} + {agent.status} + {agent.deploymentEnv}{agent.owner}{formatDate(agent.createdAt)}
+
+ + {/* Pagination */} + {!loading && total > 0 && ( +
+ + Page {page} of {totalPages} ({total} total) + +
+ + +
+
+ )} +
+ ); +} diff --git a/dashboard/src/pages/AuditLog.tsx b/dashboard/src/pages/AuditLog.tsx new file mode 100644 index 0000000..fcc30cc --- /dev/null +++ b/dashboard/src/pages/AuditLog.tsx @@ -0,0 +1,223 @@ +import * as React from 'react'; +import type { AuditEvent, AuditAction, AuditOutcome } from '@sentryagent/idp-sdk'; +import { Badge } from '@/components/ui/badge'; +import { getClient } from '@/lib/client'; + +const PAGE_LIMIT = 20; + +/** All AuditAction values for the filter dropdown. */ +const AUDIT_ACTIONS: AuditAction[] = [ + 'agent.created', + 'agent.updated', + 'agent.decommissioned', + 'agent.suspended', + 'agent.reactivated', + 'token.issued', + 'token.revoked', + 'token.introspected', + 'credential.generated', + 'credential.rotated', + 'credential.revoked', + 'auth.failed', +]; + +/** Formats an ISO timestamp to a readable local date-time string. */ +function formatDateTime(iso: string): string { + return new Date(iso).toLocaleString(undefined, { + year: 'numeric', month: 'short', day: 'numeric', + hour: '2-digit', minute: '2-digit', second: '2-digit', + }); +} + +/** Truncates a string to a maximum length with ellipsis. */ +function truncate(value: string, maxLen = 24): string { + return value.length > maxLen ? `${value.slice(0, maxLen)}…` : value; +} + +/** + * Audit Log page — displays audit events with filters for agent, action, outcome, and date range. + * Route: /dashboard/audit + */ +export default function AuditLog(): React.JSX.Element { + const [events, setEvents] = React.useState([]); + const [total, setTotal] = React.useState(0); + const [page, setPage] = React.useState(1); + const [loading, setLoading] = React.useState(false); + const [error, setError] = React.useState(null); + + // Filters + const [agentIdFilter, setAgentIdFilter] = React.useState(''); + const [actionFilter, setActionFilter] = React.useState(''); + const [outcomeFilter, setOutcomeFilter] = React.useState(''); + const [fromDate, setFromDate] = React.useState(''); + const [toDate, setToDate] = React.useState(''); + + // Reset to page 1 on filter change + React.useEffect(() => { + setPage(1); + }, [agentIdFilter, actionFilter, outcomeFilter, fromDate, toDate]); + + React.useEffect(() => { + let cancelled = false; + setLoading(true); + setError(null); + + const fetchEvents = async (): Promise => { + try { + const result = await getClient().audit.queryAuditLog({ + page, + limit: PAGE_LIMIT, + agentId: agentIdFilter.trim() || undefined, + action: actionFilter !== '' ? actionFilter : undefined, + outcome: outcomeFilter !== '' ? outcomeFilter : undefined, + fromDate: fromDate || undefined, + toDate: toDate || undefined, + }); + if (!cancelled) { + setEvents(result.data); + setTotal(result.total); + } + } catch (err) { + if (!cancelled) { + setError(err instanceof Error ? err.message : 'Failed to load audit log.'); + } + } finally { + if (!cancelled) setLoading(false); + } + }; + + void fetchEvents(); + return () => { cancelled = true; }; + }, [page, agentIdFilter, actionFilter, outcomeFilter, fromDate, toDate]); + + const totalPages = Math.max(1, Math.ceil(total / PAGE_LIMIT)); + + return ( +
+

Audit Log

+ + {/* Filters */} +
+ { setAgentIdFilter(e.target.value); }} + placeholder="Agent ID…" + className="rounded-md border border-slate-300 px-3 py-2 text-sm focus:border-brand-500 focus:outline-none focus:ring-1 focus:ring-brand-500" + /> + + + { setFromDate(e.target.value); }} + className="rounded-md border border-slate-300 px-3 py-2 text-sm focus:border-brand-500 focus:outline-none focus:ring-1 focus:ring-brand-500" + title="From date" + /> + { setToDate(e.target.value); }} + className="rounded-md border border-slate-300 px-3 py-2 text-sm focus:border-brand-500 focus:outline-none focus:ring-1 focus:ring-brand-500" + title="To date" + /> +
+ + {error && ( +
+ {error} +
+ )} + +
+ + + + {['Timestamp', 'Agent ID', 'Action', 'Outcome', 'IP Address'].map((col) => ( + + ))} + + + + {loading + ? Array.from({ length: 5 }).map((_, i) => ( + + {Array.from({ length: 5 }).map((__, j) => ( + + ))} + + )) + : events.length === 0 + ? ( + + + + ) + : events.map((event) => ( + + + + + + + + )) + } + +
+ {col} +
+
+
+ No audit events found. +
{formatDateTime(event.timestamp)}{truncate(event.agentId)}{event.action} + + {event.outcome} + + {event.ipAddress}
+
+ + {/* Pagination */} + {!loading && total > 0 && ( +
+ + Page {page} of {totalPages} ({total} total) + +
+ + +
+
+ )} +
+ ); +} diff --git a/dashboard/src/pages/Credentials.tsx b/dashboard/src/pages/Credentials.tsx new file mode 100644 index 0000000..8eb81b5 --- /dev/null +++ b/dashboard/src/pages/Credentials.tsx @@ -0,0 +1,264 @@ +import * as React from 'react'; +import { useParams, useNavigate } from 'react-router-dom'; +import type { Credential, CredentialWithSecret } from '@sentryagent/idp-sdk'; +import { Badge } from '@/components/ui/badge'; +import { Button } from '@/components/ui/button'; +import { ConfirmDialog } from '@/components/ui/dialog'; +import { getClient } from '@/lib/client'; + +/** Truncates a string to a maximum length with ellipsis. */ +function truncate(value: string, maxLen = 16): string { + return value.length > maxLen ? `${value.slice(0, maxLen)}…` : value; +} + +/** Formats an ISO timestamp to a short local date string. */ +function formatDate(iso: string): string { + return new Date(iso).toLocaleDateString(undefined, { year: 'numeric', month: 'short', day: 'numeric' }); +} + +interface NewSecretBoxProps { + secret: string; + onDismiss: () => void; +} + +/** + * Displays a newly issued client secret exactly once. + * Provides a copy button and a dismiss button. + */ +function NewSecretBox({ secret, onDismiss }: NewSecretBoxProps): React.JSX.Element { + const [copied, setCopied] = React.useState(false); + + const handleCopy = React.useCallback(async (): Promise => { + await navigator.clipboard.writeText(secret); + setCopied(true); + setTimeout(() => { setCopied(false); }, 2000); + }, [secret]); + + return ( +
+

+ New client secret — copy it now. It will not be shown again. +

+
+ + {secret} + + +
+ +
+ ); +} + +type DialogAction = { type: 'rotate'; credentialId: string } | { type: 'revoke'; credentialId: string }; + +/** + * Credentials page — lists all credentials for an agent with rotate/revoke actions. + * Route: /dashboard/agents/:agentId/credentials + */ +export default function Credentials(): React.JSX.Element { + const { agentId } = useParams<{ agentId: string }>(); + const navigate = useNavigate(); + + const [credentials, setCredentials] = React.useState([]); + const [loading, setLoading] = React.useState(true); + const [error, setError] = React.useState(null); + const [actionLoading, setActionLoading] = React.useState(false); + const [dialog, setDialog] = React.useState(null); + const [newSecret, setNewSecret] = React.useState(null); + + const fetchCredentials = React.useCallback(async (): Promise => { + if (!agentId) return; + setLoading(true); + setError(null); + try { + const result = await getClient().credentials.listCredentials(agentId); + setCredentials(result.data); + } catch (err) { + setError(err instanceof Error ? err.message : 'Failed to load credentials.'); + } finally { + setLoading(false); + } + }, [agentId]); + + React.useEffect(() => { + void fetchCredentials(); + }, [fetchCredentials]); + + const handleGenerate = React.useCallback(async (): Promise => { + if (!agentId) return; + setActionLoading(true); + setError(null); + try { + const result = await getClient().credentials.generateCredential(agentId, {}); + setNewSecret(result); + await fetchCredentials(); + } catch (err) { + setError(err instanceof Error ? err.message : 'Failed to generate credential.'); + } finally { + setActionLoading(false); + } + }, [agentId, fetchCredentials]); + + const handleConfirm = React.useCallback(async (): Promise => { + if (!dialog || !agentId) return; + setActionLoading(true); + setDialog(null); + setError(null); + try { + if (dialog.type === 'rotate') { + const result = await getClient().credentials.rotateCredential(agentId, dialog.credentialId); + setNewSecret(result); + } else { + await getClient().credentials.revokeCredential(agentId, dialog.credentialId); + } + await fetchCredentials(); + } catch (err) { + setError(err instanceof Error ? err.message : `Failed to ${dialog.type} credential.`); + } finally { + setActionLoading(false); + } + }, [dialog, agentId, fetchCredentials]); + + const dialogConfig = React.useMemo(() => { + if (!dialog) return null; + if (dialog.type === 'rotate') { + return { + title: 'Rotate credential?', + description: 'The existing secret will be invalidated immediately. You will receive a new secret — store it securely.', + confirmLabel: 'Rotate', + variant: 'destructive' as const, + }; + } + return { + title: 'Revoke credential?', + description: 'This will permanently revoke the credential. This cannot be undone.', + confirmLabel: 'Revoke', + variant: 'destructive' as const, + }; + }, [dialog]); + + return ( +
+ {/* Back navigation */} + + +
+

Credentials

+ +
+ + {error && ( +
+ {error} +
+ )} + + {/* New secret display — shown once */} + {newSecret !== null && ( + { setNewSecret(null); }} + /> + )} + + {/* Credentials table */} +
+ + + + {['Credential ID', 'Status', 'Created', 'Actions'].map((col) => ( + + ))} + + + + {loading ? ( + Array.from({ length: 3 }).map((_, i) => ( + + {Array.from({ length: 4 }).map((__, j) => ( + + ))} + + )) + ) : credentials.length === 0 ? ( + + + + ) : credentials.map((cred) => ( + + + + + + + ))} + +
+ {col} +
+
+
+ No credentials found. Generate one above. +
+ {truncate(cred.credentialId, 24)} + + + {cred.status} + + {formatDate(cred.createdAt)} + {cred.status === 'active' && ( +
+ + +
+ )} +
+
+ + {/* Confirm dialog */} + {dialog !== null && dialogConfig !== null && ( + { void handleConfirm(); }} + onCancel={() => { setDialog(null); }} + /> + )} +
+ ); +} diff --git a/dashboard/src/pages/Health.tsx b/dashboard/src/pages/Health.tsx new file mode 100644 index 0000000..bb56cd1 --- /dev/null +++ b/dashboard/src/pages/Health.tsx @@ -0,0 +1,173 @@ +import * as React from 'react'; + +/** Shape of the /health API response. */ +interface HealthResponse { + status: 'ok' | 'degraded'; + version?: string; + uptime?: number; + services: { + postgres: 'connected' | 'disconnected'; + redis: 'connected' | 'disconnected'; + }; +} + +type ServiceStatus = 'connected' | 'disconnected' | 'unknown'; + +interface HealthState { + postgres: ServiceStatus; + redis: ServiceStatus; + version: string | null; + uptime: number | null; + lastChecked: Date | null; + reachable: boolean; +} + +const initialState: HealthState = { + postgres: 'unknown', + redis: 'unknown', + version: null, + uptime: null, + lastChecked: null, + reachable: true, +}; + +/** Formats seconds into a human-readable uptime string. */ +function formatUptime(seconds: number): string { + const days = Math.floor(seconds / 86400); + const hours = Math.floor((seconds % 86400) / 3600); + const minutes = Math.floor((seconds % 3600) / 60); + const parts: string[] = []; + if (days > 0) parts.push(`${days}d`); + if (hours > 0) parts.push(`${hours}h`); + parts.push(`${minutes}m`); + return parts.join(' '); +} + +interface StatusCardProps { + label: string; + status: ServiceStatus; +} + +/** Card displaying the connectivity status of a single service. */ +function StatusCard({ label, status }: StatusCardProps): React.JSX.Element { + const isConnected = status === 'connected'; + const isUnknown = status === 'unknown'; + + return ( +
+

{label}

+
+ + + {isUnknown ? 'Checking…' : isConnected ? 'Connected' : 'Disconnected'} + +
+
+ ); +} + +/** + * Health page — shows PostgreSQL and Redis connectivity status. + * Polls GET /health every 30 seconds. No authentication required. + * Route: /dashboard/health + */ +export default function Health(): React.JSX.Element { + const [health, setHealth] = React.useState(initialState); + const [loading, setLoading] = React.useState(true); + + const checkHealth = React.useCallback(async (): Promise => { + try { + const response = await fetch('/health'); + const data = (await response.json()) as HealthResponse; + + setHealth({ + postgres: data.services?.postgres ?? 'unknown', + redis: data.services?.redis ?? 'unknown', + version: data.version ?? null, + uptime: data.uptime ?? null, + lastChecked: new Date(), + reachable: true, + }); + } catch { + setHealth((prev) => ({ + ...prev, + postgres: 'disconnected', + redis: 'disconnected', + lastChecked: new Date(), + reachable: false, + })); + } finally { + setLoading(false); + } + }, []); + + React.useEffect(() => { + void checkHealth(); + const interval = setInterval(() => { void checkHealth(); }, 30_000); + return () => { clearInterval(interval); }; + }, [checkHealth]); + + return ( +
+
+

System Health

+ +
+ + {!health.reachable && ( +
+ API is unreachable. Check that the server is running. +
+ )} + +
+ + +
+ + {/* Metadata */} + {(health.version !== null || health.uptime !== null) && ( +
+

API Details

+
+ {health.version !== null && ( +
+
Version
+
{health.version}
+
+ )} + {health.uptime !== null && ( +
+
Uptime
+
{formatUptime(health.uptime)}
+
+ )} +
+
+ )} + + {/* Last checked */} + {health.lastChecked !== null && ( +

+ Last checked: {health.lastChecked.toLocaleTimeString()} — auto-refreshes every 30 seconds +

+ )} +
+ ); +} diff --git a/dashboard/src/pages/Login.tsx b/dashboard/src/pages/Login.tsx new file mode 100644 index 0000000..3d7ae8a --- /dev/null +++ b/dashboard/src/pages/Login.tsx @@ -0,0 +1,109 @@ +import * as React from 'react'; +import { useNavigate } from 'react-router-dom'; +import { Button } from '@/components/ui/button'; +import { useAuth } from '@/lib/auth'; + +/** + * Login page — accepts API Base URL, Client ID, and Client Secret. + * Validates credentials against the AgentIdP token endpoint before persisting. + */ +export default function Login(): React.JSX.Element { + const { login } = useAuth(); + const navigate = useNavigate(); + + const [baseUrl, setBaseUrl] = React.useState(window.location.origin); + const [clientId, setClientId] = React.useState(''); + const [clientSecret, setClientSecret] = React.useState(''); + const [loading, setLoading] = React.useState(false); + const [error, setError] = React.useState(null); + + const handleSubmit = React.useCallback( + async (e: React.FormEvent): Promise => { + e.preventDefault(); + setError(null); + setLoading(true); + + try { + const success = await login({ baseUrl: baseUrl.trim(), clientId: clientId.trim(), clientSecret }); + if (success) { + navigate('/dashboard/agents', { replace: true }); + } else { + setError('Invalid credentials. Please check your Client ID and secret.'); + setClientSecret(''); + } + } finally { + setLoading(false); + } + }, + [login, navigate, baseUrl, clientId, clientSecret], + ); + + return ( +
+
+
+

SentryAgent.ai

+

AgentIdP Dashboard — Sign In

+
+ +
{ void handleSubmit(e); }} className="space-y-5"> +
+ + { setBaseUrl(e.target.value); }} + className="mt-1 block w-full rounded-md border border-slate-300 px-3 py-2 text-sm shadow-sm focus:border-brand-500 focus:outline-none focus:ring-1 focus:ring-brand-500" + placeholder="https://api.example.com" + /> +
+ +
+ + { setClientId(e.target.value); }} + className="mt-1 block w-full rounded-md border border-slate-300 px-3 py-2 text-sm shadow-sm focus:border-brand-500 focus:outline-none focus:ring-1 focus:ring-brand-500" + placeholder="agent-uuid" + autoComplete="username" + /> +
+ +
+ + { setClientSecret(e.target.value); }} + className="mt-1 block w-full rounded-md border border-slate-300 px-3 py-2 text-sm shadow-sm focus:border-brand-500 focus:outline-none focus:ring-1 focus:ring-brand-500" + autoComplete="current-password" + /> +
+ + {error && ( +

+ {error} +

+ )} + + +
+
+
+ ); +} diff --git a/dashboard/src/vite-env.d.ts b/dashboard/src/vite-env.d.ts new file mode 100644 index 0000000..11f02fe --- /dev/null +++ b/dashboard/src/vite-env.d.ts @@ -0,0 +1 @@ +/// diff --git a/dashboard/tailwind.config.js b/dashboard/tailwind.config.js new file mode 100644 index 0000000..03e77d9 --- /dev/null +++ b/dashboard/tailwind.config.js @@ -0,0 +1,19 @@ +/** @type {import('tailwindcss').Config} */ +export default { + content: ['./index.html', './src/**/*.{ts,tsx}'], + theme: { + extend: { + colors: { + brand: { + 50: '#f0f9ff', + 100: '#e0f2fe', + 500: '#0ea5e9', + 600: '#0284c7', + 700: '#0369a1', + 900: '#0c4a6e', + }, + }, + }, + }, + plugins: [], +}; diff --git a/dashboard/tsconfig.app.json b/dashboard/tsconfig.app.json new file mode 100644 index 0000000..a95e4ee --- /dev/null +++ b/dashboard/tsconfig.app.json @@ -0,0 +1,25 @@ +{ + "compilerOptions": { + "tsBuildInfoFile": "./node_modules/.tmp/tsconfig.app.tsbuildinfo", + "target": "ES2020", + "useDefineForClassFields": true, + "lib": ["ES2020", "DOM", "DOM.Iterable"], + "module": "ESNext", + "skipLibCheck": true, + "moduleResolution": "bundler", + "allowImportingTsExtensions": true, + "isolatedModules": true, + "moduleDetection": "force", + "noEmit": true, + "jsx": "react-jsx", + "strict": true, + "noUnusedLocals": true, + "noUnusedParameters": true, + "noFallthroughCasesInSwitch": true, + "noUncheckedSideEffectImports": true, + "paths": { + "@/*": ["./src/*"] + } + }, + "include": ["src"] +} diff --git a/dashboard/tsconfig.json b/dashboard/tsconfig.json new file mode 100644 index 0000000..1ffef60 --- /dev/null +++ b/dashboard/tsconfig.json @@ -0,0 +1,7 @@ +{ + "files": [], + "references": [ + { "path": "./tsconfig.app.json" }, + { "path": "./tsconfig.node.json" } + ] +} diff --git a/dashboard/tsconfig.node.json b/dashboard/tsconfig.node.json new file mode 100644 index 0000000..1dba6de --- /dev/null +++ b/dashboard/tsconfig.node.json @@ -0,0 +1,20 @@ +{ + "compilerOptions": { + "tsBuildInfoFile": "./node_modules/.tmp/tsconfig.node.tsbuildinfo", + "target": "ES2022", + "lib": ["ES2023"], + "module": "ESNext", + "skipLibCheck": true, + "moduleResolution": "bundler", + "allowImportingTsExtensions": true, + "isolatedModules": true, + "moduleDetection": "force", + "noEmit": true, + "strict": true, + "noUnusedLocals": true, + "noUnusedParameters": true, + "noFallthroughCasesInSwitch": true, + "noUncheckedSideEffectImports": true + }, + "include": ["vite.config.ts"] +} diff --git a/dashboard/vite.config.ts b/dashboard/vite.config.ts new file mode 100644 index 0000000..ed5d450 --- /dev/null +++ b/dashboard/vite.config.ts @@ -0,0 +1,17 @@ +import { defineConfig } from 'vite'; +import react from '@vitejs/plugin-react'; +import path from 'path'; + +export default defineConfig({ + plugins: [react()], + base: '/dashboard/', + resolve: { + alias: { + '@': path.resolve(__dirname, './src'), + }, + }, + build: { + outDir: 'dist', + emptyOutDir: true, + }, +}); diff --git a/docker-compose.monitoring.yml b/docker-compose.monitoring.yml new file mode 100644 index 0000000..96bc560 --- /dev/null +++ b/docker-compose.monitoring.yml @@ -0,0 +1,50 @@ +version: '3.8' + +# Monitoring overlay — extend the base docker-compose.yml +# Usage: docker compose -f docker-compose.yml -f docker-compose.monitoring.yml up + +services: + prometheus: + image: prom/prometheus:v2.53.0 + container_name: agentidp_prometheus + volumes: + - ./monitoring/prometheus/prometheus.yml:/etc/prometheus/prometheus.yml:ro + - prometheus_data:/prometheus + command: + - '--config.file=/etc/prometheus/prometheus.yml' + - '--storage.tsdb.path=/prometheus' + - '--web.console.libraries=/etc/prometheus/console_libraries' + - '--web.console.templates=/etc/prometheus/consoles' + - '--web.enable-lifecycle' + ports: + - '9090:9090' + networks: + - agentidp_network + restart: unless-stopped + + grafana: + image: grafana/grafana:11.2.0 + container_name: agentidp_grafana + volumes: + - grafana_data:/var/lib/grafana + - ./monitoring/grafana/provisioning:/etc/grafana/provisioning:ro + - ./monitoring/grafana/dashboards:/var/lib/grafana/dashboards:ro + environment: + - GF_SECURITY_ADMIN_PASSWORD=agentidp + - GF_USERS_ALLOW_SIGN_UP=false + - GF_AUTH_ANONYMOUS_ENABLED=false + ports: + - '3001:3000' + networks: + - agentidp_network + depends_on: + - prometheus + restart: unless-stopped + +volumes: + prometheus_data: + grafana_data: + +networks: + agentidp_network: + external: true diff --git a/docs/devops/deployment.md b/docs/devops/deployment.md new file mode 100644 index 0000000..aec5e1e --- /dev/null +++ b/docs/devops/deployment.md @@ -0,0 +1,603 @@ +# Deployment Guide — SentryAgent.ai AgentIdP + +End-to-end guide for deploying AgentIdP to AWS (primary) and GCP (secondary) using the Terraform infrastructure-as-code in `terraform/`. + +--- + +## Table of Contents + +1. [Prerequisites](#1-prerequisites) +2. [AWS Deployment](#2-aws-deployment) +3. [GCP Deployment](#3-gcp-deployment) +4. [Post-Deploy Verification](#4-post-deploy-verification) +5. [Rollback Procedure](#5-rollback-procedure) +6. [Environment Variable Reference](#6-environment-variable-reference) + +--- + +## 1. Prerequisites + +### Tools + +| Tool | Minimum Version | Install | +|------|-----------------|---------| +| Terraform | 1.6.0 | https://developer.hashicorp.com/terraform/install | +| AWS CLI | 2.13 | https://docs.aws.amazon.com/cli/latest/userguide/install-cliv2.html | +| gcloud CLI | 460.0 | https://cloud.google.com/sdk/docs/install | +| Docker | 24.0 | Required only for building and pushing images | +| openssl | any | Required for generating JWT key pairs | + +Verify all tools are available: + +```bash +terraform version +aws --version +gcloud version +docker version +openssl version +``` + +### Container Image + +Build and push the `sentryagent/agentidp` image to your registry before deploying. Terraform references the image by tag — it does not build it. + +```bash +# From the project root +docker build -t sentryagent/agentidp:1.0.0 . + +# Push to your registry (ECR example): +aws ecr get-login-password --region us-east-1 \ + | docker login --username AWS --password-stdin 123456789012.dkr.ecr.us-east-1.amazonaws.com + +docker tag sentryagent/agentidp:1.0.0 \ + 123456789012.dkr.ecr.us-east-1.amazonaws.com/sentryagent/agentidp:1.0.0 + +docker push 123456789012.dkr.ecr.us-east-1.amazonaws.com/sentryagent/agentidp:1.0.0 +``` + +Update `app_image_tag` in your `terraform.tfvars` to match. + +### JWT Key Pair + +Generate the RSA-2048 key pair used for signing and verifying JWTs: + +```bash +openssl genrsa -out jwt_private.pem 2048 +openssl rsa -in jwt_private.pem -pubout -out jwt_public.pem + +# Verify +openssl rsa -in jwt_private.pem -check -noout +``` + +Keep `jwt_private.pem` secure — treat it with the same sensitivity as a TLS private key. You will paste its contents into `terraform.tfvars`. + +--- + +## 2. AWS Deployment + +### 2.1 Configure AWS CLI + +```bash +aws configure +# Provide: AWS Access Key ID, Secret Access Key, region (e.g. us-east-1), output format (json) + +# Verify credentials +aws sts get-caller-identity +``` + +The IAM principal running Terraform requires permissions to manage: VPC, ECS, RDS, ElastiCache, ALB, IAM roles, Secrets Manager, Route 53, CloudWatch, and VPC endpoints. + +### 2.2 Provision an ACM Certificate + +The ALB requires an ACM certificate for your domain. Create it in the same region as your deployment. + +```bash +aws acm request-certificate \ + --domain-name idp.example.com \ + --validation-method DNS \ + --region us-east-1 +``` + +Complete DNS validation by adding the CNAME record shown in the ACM console. Wait for the status to become `ISSUED` before proceeding. + +```bash +# Monitor validation status +aws acm describe-certificate \ + --certificate-arn arn:aws:acm:us-east-1:123456789012:certificate/XXXX \ + --region us-east-1 \ + --query 'Certificate.Status' +``` + +### 2.3 Prepare tfvars + +```bash +cd terraform/environments/aws +cp terraform.tfvars.example terraform.tfvars +``` + +Edit `terraform.tfvars`. All fields marked `REPLACE_WITH_*` are required. Key fields: + +- `region` — AWS region (must match the ACM certificate region) +- `domain_name` — your domain (e.g. `idp.example.com`) +- `certificate_arn` — ARN from step 2.2 +- `app_image_tag` — tag of the image you pushed in step 1 +- `db_password` — strong random password (no `@`, `#`, `?`, `/` characters — they break URL parsing) +- `redis_auth_token` — minimum 16 characters, no spaces +- `jwt_private_key` — full PEM contents of `jwt_private.pem` with literal `\n` for newlines +- `jwt_public_key` — full PEM contents of `jwt_public.pem` with literal `\n` for newlines + +Example for encoding PEM keys in tfvars: + +```bash +# Output the private key as a single line with \n separators (for pasting into tfvars) +awk 'NF {printf "%s\\n", $0}' jwt_private.pem +``` + +**Never commit `terraform.tfvars` to version control.** + +### 2.4 Configure Remote State (Recommended) + +Uncomment and configure the `backend "s3"` block in `terraform/environments/aws/main.tf`: + +```hcl +backend "s3" { + bucket = "your-terraform-state-bucket" + key = "agentidp/aws/production/terraform.tfstate" + region = "us-east-1" + encrypt = true + dynamodb_table = "your-terraform-locks-table" +} +``` + +Create the S3 bucket and DynamoDB table if they do not exist: + +```bash +# S3 bucket with versioning and encryption +aws s3api create-bucket --bucket your-terraform-state-bucket --region us-east-1 +aws s3api put-bucket-versioning \ + --bucket your-terraform-state-bucket \ + --versioning-configuration Status=Enabled +aws s3api put-bucket-encryption \ + --bucket your-terraform-state-bucket \ + --server-side-encryption-configuration \ + '{"Rules":[{"ApplyServerSideEncryptionByDefault":{"SSEAlgorithm":"AES256"}}]}' + +# DynamoDB table for state locking +aws dynamodb create-table \ + --table-name your-terraform-locks-table \ + --attribute-definitions AttributeName=LockID,AttributeType=S \ + --key-schema AttributeName=LockID,KeyType=HASH \ + --billing-mode PAY_PER_REQUEST \ + --region us-east-1 +``` + +### 2.5 Terraform Init + +```bash +cd terraform/environments/aws +terraform init +``` + +Expected output: provider plugins downloaded, backend initialized. + +### 2.6 Terraform Plan + +```bash +terraform plan -out=tfplan +``` + +Review the plan carefully before applying. Expected resources on first apply: ~50–60 resources (VPC, subnets, NAT gateways, VPC endpoints, IAM roles, secrets, RDS, ElastiCache, ALB, ECS cluster, task definition, service, Route 53 record). + +### 2.7 Terraform Apply + +```bash +terraform apply tfplan +``` + +**First apply takes 20–30 minutes** — RDS Multi-AZ provisioning is the longest step (~15 min). Do not interrupt the apply. + +When complete, note the outputs: + +```bash +terraform output +``` + +Key outputs: +- `service_url` — the HTTPS URL of your deployed service +- `alb_dns_name` — ALB DNS name (verify Route 53 alias is pointing here) +- `ecs_service_name` — use for ECS deployment commands +- `cloudwatch_log_group` — where container logs appear + +### 2.8 Run Database Migrations + +After first deploy, run migrations against the new RDS instance. The easiest approach is to exec into a running ECS task: + +```bash +# Get a running task ARN +TASK_ARN=$(aws ecs list-tasks \ + --cluster sentryagent-agentidp-production \ + --service-name sentryagent-agentidp-production \ + --query 'taskArns[0]' \ + --output text) + +# Run migrations via ECS Exec (requires enableExecuteCommand on the service) +aws ecs execute-command \ + --cluster sentryagent-agentidp-production \ + --task $TASK_ARN \ + --container agentidp \ + --command "node scripts/db-migrate.js" \ + --interactive +``` + +Alternatively, run a one-off ECS task with the migration command as the container override. + +--- + +## 3. GCP Deployment + +### 3.1 Configure gcloud CLI + +```bash +gcloud auth login +gcloud config set project your-gcp-project-id +gcloud auth application-default login +``` + +Verify: + +```bash +gcloud config list +gcloud projects describe your-gcp-project-id +``` + +The principal running Terraform requires the following roles on the project: +- `roles/owner` or a custom role covering: Cloud Run Admin, Cloud SQL Admin, Redis Admin, Secret Manager Admin, IAM Admin, Compute Admin, Service Networking Admin. + +### 3.2 Prepare tfvars + +```bash +cd terraform/environments/gcp +cp terraform.tfvars.example terraform.tfvars +``` + +Edit `terraform.tfvars`. Key fields: + +- `project_id` — your GCP project ID +- `region` — GCP region (e.g. `us-central1`) +- `app_image_tag` — tag of the image you built +- `db_password` — strong random password for Cloud SQL +- `jwt_private_key` / `jwt_public_key` — same PEM keys used for AWS (same key pair for both regions) + +**Never commit `terraform.tfvars` to version control.** + +### 3.3 Configure Remote State (Recommended) + +Uncomment and configure the `backend "gcs"` block in `terraform/environments/gcp/main.tf`: + +```hcl +backend "gcs" { + bucket = "your-terraform-state-bucket" + prefix = "agentidp/gcp/production" +} +``` + +Create the GCS bucket: + +```bash +gsutil mb -l us-central1 gs://your-terraform-state-bucket +gsutil versioning set on gs://your-terraform-state-bucket +``` + +### 3.4 Terraform Init + +```bash +cd terraform/environments/gcp +terraform init +``` + +### 3.5 Terraform Plan + +```bash +terraform plan -out=tfplan +``` + +Review the plan. Expected resources: ~35–45 resources (VPC, subnet, VPC connector, service accounts, secrets, Cloud SQL, Memorystore, Cloud Run service, IAM bindings, API enablement). + +### 3.6 Terraform Apply + +```bash +terraform apply tfplan +``` + +**First apply takes 15–20 minutes** — Cloud SQL provisioning is the longest step. + +When complete: + +```bash +terraform output +``` + +Key outputs: +- `service_url` — Cloud Run HTTPS URL (Google-managed TLS, no cert setup required) +- `cloud_sql_connection_name` — for Cloud SQL Proxy if needed +- `memorystore_host` — Redis private IP + +### 3.7 Run Database Migrations + +Cloud Run does not support exec. Use a one-off Cloud Run Job for migrations: + +```bash +gcloud run jobs create agentidp-migrate \ + --image sentryagent/agentidp:1.0.0 \ + --region us-central1 \ + --command node \ + --args "scripts/db-migrate.js" \ + --set-secrets "DATABASE_URL=sentryagent-agentidp-production-database-url:latest" \ + --vpc-connector sentryagent-agentidp-production-connector \ + --service-account sentryagent-agentidp-production-run-sa@your-gcp-project-id.iam.gserviceaccount.com + +gcloud run jobs execute agentidp-migrate --region us-central1 --wait +``` + +--- + +## 4. Post-Deploy Verification + +Run these checks after deploying to either environment. Replace `https://idp.example.com` with your actual service URL. + +### 4.1 Health Check + +```bash +curl -si https://idp.example.com/health +``` + +Expected response: + +``` +HTTP/2 200 +content-type: application/json + +{"status":"ok"} +``` + +If you receive a 502 or 503, the load balancer has not yet registered healthy targets. Wait 60–90 seconds and retry — ECS tasks or Cloud Run instances take time to pass health checks. + +### 4.2 Metrics Endpoint + +```bash +curl -si https://idp.example.com/metrics +``` + +Expected: HTTP 200 with Prometheus-format metrics text (lines beginning with `# HELP`, `# TYPE`, and metric values). + +### 4.3 Token Endpoint (Smoke Test) + +First, register a test agent client (requires a valid JWT or admin credentials — see [developers guide](../developers/)): + +```bash +# Issue a client credentials token (replace CLIENT_ID and CLIENT_SECRET with real values) +curl -s -X POST https://idp.example.com/api/v1/token \ + -H "Content-Type: application/x-www-form-urlencoded" \ + -d "grant_type=client_credentials&client_id=test-client&client_secret=test-secret&scope=read" +``` + +Expected response (abbreviated): + +```json +{ + "access_token": "eyJhbGciOiJSUzI1NiIsInR5cCI6IkpXVCJ9...", + "token_type": "Bearer", + "expires_in": 3600, + "scope": "read" +} +``` + +### 4.4 JWKS Endpoint + +```bash +curl -si https://idp.example.com/.well-known/jwks.json +``` + +Expected: HTTP 200 with a JSON object containing a `keys` array with at least one RSA public key entry. + +### 4.5 TLS Verification + +```bash +# Verify TLS certificate is valid and matches your domain +curl -vI https://idp.example.com 2>&1 | grep -E "(SSL|TLS|certificate|issuer|subject)" +``` + +Expected: TLS 1.2 or 1.3, certificate issued by a trusted CA, subject matching your domain. + +### 4.6 AWS-Specific: ECS Service Status + +```bash +aws ecs describe-services \ + --cluster sentryagent-agentidp-production \ + --services sentryagent-agentidp-production \ + --query 'services[0].{desired:desiredCount,running:runningCount,pending:pendingCount,status:status}' +``` + +Expected: `running` equals `desired`, `status` is `ACTIVE`. + +### 4.7 GCP-Specific: Cloud Run Service Status + +```bash +gcloud run services describe sentryagent-agentidp-production \ + --region us-central1 \ + --format='value(status.conditions[0].type,status.conditions[0].status)' +``` + +Expected: `Ready True`. + +--- + +## 5. Rollback Procedure + +### 5.1 Image Rollback (Recommended — fastest) + +To roll back to a previous image tag without modifying infrastructure: + +**AWS:** + +```bash +# Find the previous task definition revision +aws ecs list-task-definitions \ + --family-prefix sentryagent-agentidp-production \ + --sort DESC \ + --query 'taskDefinitionArns[:5]' + +# Update the service to use the previous task definition +aws ecs update-service \ + --cluster sentryagent-agentidp-production \ + --service sentryagent-agentidp-production \ + --task-definition sentryagent-agentidp-production:PREVIOUS_REVISION \ + --force-new-deployment + +# Monitor the rollout +aws ecs wait services-stable \ + --cluster sentryagent-agentidp-production \ + --services sentryagent-agentidp-production +``` + +**GCP:** + +```bash +# Deploy the previous image tag directly +gcloud run services update sentryagent-agentidp-production \ + --region us-central1 \ + --image sentryagent/agentidp:PREVIOUS_TAG + +# Or route 100% of traffic to a specific revision +gcloud run services update-traffic sentryagent-agentidp-production \ + --region us-central1 \ + --to-revisions PREVIOUS_REVISION_NAME=100 +``` + +### 5.2 Infrastructure Rollback via Terraform + +If an infrastructure change (not an image update) caused the problem: + +```bash +# Check the state and plan to understand what changed +terraform show +terraform plan + +# If you have a previous state file (S3/GCS versioning), restore it: +# AWS: +aws s3 cp s3://your-state-bucket/agentidp/aws/production/terraform.tfstate.PREVIOUS ./terraform.tfstate +terraform apply -target= + +# GCP: +gsutil cp gs://your-state-bucket/agentidp/gcp/production/PREVIOUS_VERSION ./terraform.tfstate +terraform apply -target= +``` + +**Never run `terraform destroy` in production without CEO approval.** + +### 5.3 Database Rollback + +RDS (AWS) and Cloud SQL (GCP) both support point-in-time restore. Use this only as a last resort — it creates a new DB instance and requires updating the `DATABASE_URL` secret. + +**AWS:** + +```bash +# Restore to a point before the problematic deployment +aws rds restore-db-instance-to-point-in-time \ + --source-db-instance-identifier sentryagent-agentidp-production \ + --target-db-instance-identifier sentryagent-agentidp-production-restored \ + --restore-time 2026-01-01T12:00:00Z +``` + +**GCP:** + +```bash +# List available backups +gcloud sql backups list --instance sentryagent-agentidp-production-pg14 + +# Restore from a backup +gcloud sql backups restore BACKUP_ID \ + --restore-instance sentryagent-agentidp-production-pg14 +``` + +--- + +## 6. Environment Variable Reference + +All environment variables injected into the AgentIdP container are documented in full at: + +**[docs/devops/environment-variables.md](./environment-variables.md)** + +### Quick Reference + +| Variable | Required | Source (AWS) | Source (GCP) | +|----------|----------|--------------|--------------| +| `DATABASE_URL` | Yes | Secrets Manager: `///database-url` | Secret Manager: `-database-url` | +| `REDIS_URL` | Yes | Secrets Manager: `///redis-url` | Secret Manager: `-redis-url` | +| `JWT_PRIVATE_KEY` | Yes | Secrets Manager: `///jwt-private-key` | Secret Manager: `-jwt-private-key` | +| `JWT_PUBLIC_KEY` | Yes | Secrets Manager: `///jwt-public-key` | Secret Manager: `-jwt-public-key` | +| `PORT` | No | Task definition env var (default: 3000) | Cloud Run env var (default: 3000) | +| `NODE_ENV` | No | Task definition env var (`production`) | Cloud Run env var (`production`) | +| `CORS_ORIGIN` | No | Task definition env var | Cloud Run env var | +| `POLICY_DIR` | No | Task definition env var (`/app/policies`) | Cloud Run env var (`/app/policies`) | +| `VAULT_ADDR` | No | Task definition env var | Cloud Run env var | +| `VAULT_TOKEN` | No | Secrets Manager: `///vault-token` | Secret Manager: `-vault-token` | +| `VAULT_MOUNT` | No | Task definition env var (default: `secret`) | Cloud Run env var (default: `secret`) | + +### Updating a Secret + +**AWS:** + +```bash +# Update a secret value (e.g. rotate JWT keys) +aws secretsmanager put-secret-value \ + --secret-id /sentryagent-agentidp/production/jwt-private-key \ + --secret-string "$(cat new_jwt_private.pem)" + +# Force new ECS deployment to pick up the new secret value +aws ecs update-service \ + --cluster sentryagent-agentidp-production \ + --service sentryagent-agentidp-production \ + --force-new-deployment +``` + +**GCP:** + +```bash +# Add a new version of a secret +gcloud secrets versions add sentryagent-agentidp-production-jwt-private-key \ + --data-file=new_jwt_private.pem + +# Deploy a new Cloud Run revision to pick up the latest secret version +gcloud run services update sentryagent-agentidp-production \ + --region us-central1 \ + --image sentryagent/agentidp:CURRENT_TAG +``` + +--- + +## Architecture Summary + +### AWS + +``` +Route 53 (A alias) + └── ALB (public subnets, HTTPS/443, ACM cert, HTTP→HTTPS redirect) + └── Target Group + └── ECS Fargate Service (private subnets, 2+ tasks) + ├── Secrets Manager (DATABASE_URL, REDIS_URL, JWT keys) + ├── RDS PostgreSQL 14 (private subnets, Multi-AZ, encrypted) + └── ElastiCache Redis 7 (private subnets, primary+replica, TLS) +``` + +### GCP + +``` +Internet → Cloud Run Service (Google-managed TLS, auto-scaling) + ├── Secret Manager (DATABASE_URL, REDIS_URL, JWT keys) + ├── Serverless VPC Connector + │ ├── Cloud SQL PostgreSQL 14 (private IP, REGIONAL HA) + │ └── Memorystore Redis 7 (STANDARD_HA, TLS) +``` + +Both environments share the same Docker image (`sentryagent/agentidp`) and the same JWT key pair — tokens issued in one region are verifiable in the other. diff --git a/docs/devops/environment-variables.md b/docs/devops/environment-variables.md index a98a772..d090886 100644 --- a/docs/devops/environment-variables.md +++ b/docs/devops/environment-variables.md @@ -76,6 +76,62 @@ Every authenticated request verifies the JWT signature using this key. If this k These variables have defaults and do not need to be set for local development. +### `VAULT_ADDR` + +HashiCorp Vault server address. **Required to enable Vault integration (Phase 2).** + +| | | +|-|-| +| **Required** | No (Vault is optional) | +| **Format** | URL string | +| **Example** | `VAULT_ADDR=http://127.0.0.1:8200` | + +When set alongside `VAULT_TOKEN`, new credentials are stored in Vault KV v2 instead of as bcrypt hashes in PostgreSQL. Existing bcrypt credentials continue to work unchanged until rotated. See [Vault setup guide](vault-setup.md). + +--- + +### `VAULT_TOKEN` + +Vault authentication token. Required when `VAULT_ADDR` is set. + +| | | +|-|-| +| **Required** | Only when `VAULT_ADDR` is set | +| **Format** | String | +| **Example** | `VAULT_TOKEN=hvs.XXXXXXXXXXXXXXXXXXXXXX` | + +Use a Vault service token scoped to `read`, `write`, and `delete` on `{VAULT_MOUNT}/data/agentidp/*` and `{VAULT_MOUNT}/metadata/agentidp/*`. + +--- + +### `VAULT_MOUNT` + +KV v2 secrets engine mount path. + +| | | +|-|-| +| **Required** | No | +| **Default** | `secret` | +| **Format** | String (no leading or trailing slash) | +| **Example** | `VAULT_MOUNT=agentidp` | + +--- + +### `POLICY_DIR` + +Directory containing OPA policy files (`authz.rego`, `authz.wasm`, `data/scopes.json`). + +| | | +|-|-| +| **Required** | No | +| **Default** | `/policies` | +| **Format** | Absolute or relative directory path | +| **Example** | `POLICY_DIR=/etc/sentryagent/policies` | + +At startup the OPA authorization middleware loads `${POLICY_DIR}/authz.wasm` (Wasm mode) if present; otherwise it loads `${POLICY_DIR}/data/scopes.json` (fallback mode). Send `SIGHUP` to the process to hot-reload the policy files without a restart. + +--- + ### `PORT` HTTP port the Express server listens on. @@ -141,6 +197,14 @@ MIIEowIBAAKCAQEA... JWT_PUBLIC_KEY="-----BEGIN PUBLIC KEY----- MIIBIjANBgkq... -----END PUBLIC KEY-----" + +# HashiCorp Vault (Phase 2 — optional, omit to use bcrypt mode) +# VAULT_ADDR=http://127.0.0.1:8200 +# VAULT_TOKEN=hvs.XXXXXXXXXXXXXXXXXXXXXX +# VAULT_MOUNT=secret + +# OPA Policy Engine (Phase 2 — optional, defaults to /policies) +# POLICY_DIR=/etc/sentryagent/policies ``` > Do not commit `.env` to version control. Add it to `.gitignore`. diff --git a/docs/devops/operations.md b/docs/devops/operations.md index f01e83c..7bf414a 100644 --- a/docs/devops/operations.md +++ b/docs/devops/operations.md @@ -247,3 +247,38 @@ docker-compose exec redis redis-cli GET "rate::$WINDOW" ``` **Fix:** Wait until `X-RateLimit-Reset` (Unix timestamp in the response header) before retrying. The window resets every 60 seconds. + +--- + +## Monitoring + +AgentIdP exposes a Prometheus metrics endpoint at `GET /metrics` (unauthenticated, plain text). + +### Metrics Exposed + +| Metric | Type | Labels | Description | +|--------|------|--------|-------------| +| `agentidp_tokens_issued_total` | Counter | `scope` | OAuth 2.0 tokens issued successfully | +| `agentidp_agents_registered_total` | Counter | `deployment_env` | Agents registered successfully | +| `agentidp_http_requests_total` | Counter | `method`, `route`, `status_code` | HTTP requests received | +| `agentidp_http_request_duration_seconds` | Histogram | `method`, `route`, `status_code` | HTTP request duration | +| `agentidp_db_query_duration_seconds` | Histogram | `operation` | PostgreSQL query duration | +| `agentidp_redis_command_duration_seconds` | Histogram | `command` | Redis command duration | + +### Starting the Monitoring Stack + +```bash +# Start the full stack with monitoring +docker compose -f docker-compose.yml -f docker-compose.monitoring.yml up -d + +# Prometheus: http://localhost:9090 +# Grafana: http://localhost:3001 (admin / agentidp) +``` + +The Grafana dashboard auto-provisions on first start. Navigate to **Dashboards → AgentIdP → SentryAgent.ai — AgentIdP**. + +### Security Note + +`GET /metrics` is unauthenticated. In production, ensure this endpoint is: +- Only accessible from your internal network (firewall rule or reverse proxy restriction) +- Not exposed on a public-facing port diff --git a/docs/devops/vault-setup.md b/docs/devops/vault-setup.md new file mode 100644 index 0000000..37956a8 --- /dev/null +++ b/docs/devops/vault-setup.md @@ -0,0 +1,197 @@ +# HashiCorp Vault Setup + +Phase 2 of AgentIdP optionally stores credential secrets in [HashiCorp Vault](https://www.vaultproject.io/) KV v2 instead of bcrypt hashes in PostgreSQL. This guide covers: + +- Dev mode quickstart +- Production Vault configuration +- Migration from bcrypt to Vault + +Vault is **entirely optional**. If `VAULT_ADDR` is not set, AgentIdP operates in bcrypt mode (identical to Phase 1 behaviour). + +--- + +## How Vault integration works + +When enabled: + +1. `POST /api/v1/agents/{agentId}/credentials` — the plain-text secret is written to Vault at `{mount}/data/agentidp/agents/{agentId}/credentials/{credentialId}`. Only the Vault path is stored in PostgreSQL (`credentials.vault_path`). No bcrypt hash is written. +2. `POST /api/v1/token` — the submitted `client_secret` is compared against the value read from Vault (constant-time comparison). No bcrypt is involved. +3. `POST /api/v1/agents/{agentId}/credentials/{credentialId}/rotate` — a new Vault version is written (KV v2 versioning). The path is unchanged; the old version is retained in Vault history. +4. `DELETE /api/v1/agents/{agentId}/credentials/{credentialId}` — all versions of the secret are permanently deleted from Vault. + +**Coexistence**: Credentials created before Vault was enabled keep their bcrypt hash and continue to work. New credentials use Vault. Both paths coexist until all pre-Vault credentials are rotated. + +--- + +## Dev mode quickstart + +The fastest way to get Vault running locally: + +```bash +# Pull and start Vault in dev mode (in-memory, auto-unsealed) +docker run --rm -d \ + --name vault-dev \ + -p 8200:8200 \ + -e VAULT_DEV_ROOT_TOKEN_ID=dev-root-token \ + hashicorp/vault:1.15 server -dev + +# Verify it is running +curl http://127.0.0.1:8200/v1/sys/health | jq . +``` + +Add to your `.env`: + +``` +VAULT_ADDR=http://127.0.0.1:8200 +VAULT_TOKEN=dev-root-token +VAULT_MOUNT=secret +``` + +The KV v2 secrets engine is automatically enabled at `secret/` in dev mode. No further configuration is needed. + +> **Warning**: Dev mode stores everything in memory. Data is lost when the container stops. Do not use dev mode in production. + +--- + +## Production Vault configuration + +### 1. Enable KV v2 secrets engine + +```bash +vault secrets enable -path=secret kv-v2 +``` + +Or use a custom mount path: + +```bash +vault secrets enable -path=agentidp kv-v2 +# Set VAULT_MOUNT=agentidp in your .env +``` + +### 2. Create a policy for AgentIdP + +```hcl +# agentidp-policy.hcl +path "secret/data/agentidp/*" { + capabilities = ["create", "read", "update", "delete"] +} + +path "secret/metadata/agentidp/*" { + capabilities = ["delete"] +} +``` + +Apply the policy: + +```bash +vault policy write agentidp agentidp-policy.hcl +``` + +### 3. Create a service token + +```bash +vault token create \ + -policy=agentidp \ + -ttl=8760h \ + -renewable=true \ + -display-name="agentidp-service" +``` + +Copy the `token` field from the output and set it as `VAULT_TOKEN` in your environment. + +### 4. Token renewal + +Service tokens expire unless renewed. Set up a scheduled renewal before the TTL expires: + +```bash +# Renew with a new 720-hour (30-day) lease +vault token renew -increment=720h +``` + +In Kubernetes, use Vault Agent Injector or the Vault Secrets Operator to handle renewal automatically. + +--- + +## Running migration 005 + +After configuring Vault, run the migration to add the `vault_path` column: + +```bash +npm run db:migrate +``` + +Verify the migration: + +```sql +SELECT column_name, data_type, is_nullable +FROM information_schema.columns +WHERE table_name = 'credentials' +ORDER BY ordinal_position; +``` + +You should see a `vault_path` column with `data_type = text` and `is_nullable = YES`. + +--- + +## Migrating existing credentials to Vault + +Existing credentials (with `vault_path IS NULL`) continue to work via bcrypt until they are rotated. To migrate a credential: + +```bash +# Rotate the credential — this writes the new secret to Vault +curl -s -X POST http://localhost:3000/api/v1/agents/$AGENT_ID/credentials/$CRED_ID/rotate \ + -H "Authorization: Bearer $TOKEN" | jq . +``` + +The response includes the new `clientSecret` (store it immediately). After rotation, `vault_path` is set and the bcrypt hash is cleared. + +To migrate all credentials for an agent in bulk, rotate them one by one using the API. + +--- + +## Verifying Vault secrets + +After generating a credential with Vault enabled, verify the secret was written: + +```bash +vault kv get secret/agentidp/agents/$AGENT_ID/credentials/$CRED_ID +``` + +Expected output: + +``` +====== Secret Path ====== +secret/data/agentidp/agents//credentials/ + +======= Metadata ======= +Key Value +--- ----- +created_time 2026-03-28T... +version 1 + +====== Data ====== +Key Value +--- ----- +clientSecret +``` + +--- + +## Troubleshooting + +### `VAULT_WRITE_ERROR` on credential generation + +- Verify Vault is running: `curl $VAULT_ADDR/v1/sys/health` +- Verify the token has write access: `vault token capabilities $VAULT_TOKEN secret/data/agentidp/test` +- Check Vault audit logs: `vault audit list` + +### `VAULT_READ_ERROR` on token issuance + +- Verify the `vault_path` stored in PostgreSQL matches the actual Vault path +- Check the token has read access to `secret/data/agentidp/*` + +### Vault is down — what happens? + +If Vault is unreachable, credential generation and token issuance for Vault-backed credentials will fail with a `500` error. Credentials created before Vault was enabled (bcrypt mode) continue to work. + +For high availability, run Vault in HA mode with an integrated Raft storage backend. See [Vault HA documentation](https://developer.hashicorp.com/vault/docs/concepts/ha). diff --git a/monitoring/grafana/dashboards/agentidp.json b/monitoring/grafana/dashboards/agentidp.json new file mode 100644 index 0000000..eec483d --- /dev/null +++ b/monitoring/grafana/dashboards/agentidp.json @@ -0,0 +1,226 @@ +{ + "annotations": { + "list": [ + { + "builtIn": 1, + "datasource": { "type": "grafana", "uid": "-- Grafana --" }, + "enable": true, + "hide": true, + "iconColor": "rgba(0, 211, 255, 1)", + "name": "Annotations & Alerts", + "type": "dashboard" + } + ] + }, + "description": "SentryAgent.ai AgentIdP — Application Overview", + "editable": true, + "fiscalYearStartMonth": 0, + "graphTooltip": 0, + "id": null, + "links": [], + "panels": [ + { + "datasource": { "type": "prometheus", "uid": "prometheus" }, + "fieldConfig": { + "defaults": { + "color": { "mode": "palette-classic" }, + "custom": { "lineWidth": 2, "fillOpacity": 10 } + }, + "overrides": [] + }, + "gridPos": { "h": 8, "w": 12, "x": 0, "y": 0 }, + "id": 1, + "options": { + "legend": { "calcs": ["mean", "max"], "displayMode": "list", "placement": "bottom" }, + "tooltip": { "mode": "multi" } + }, + "targets": [ + { + "datasource": { "type": "prometheus", "uid": "prometheus" }, + "expr": "rate(agentidp_tokens_issued_total[1m])", + "legendFormat": "scope={{ scope }}", + "refId": "A" + } + ], + "title": "Tokens Issued / min", + "type": "timeseries" + }, + { + "datasource": { "type": "prometheus", "uid": "prometheus" }, + "fieldConfig": { + "defaults": { + "color": { "mode": "palette-classic" }, + "custom": { "lineWidth": 2, "fillOpacity": 10 } + }, + "overrides": [] + }, + "gridPos": { "h": 8, "w": 12, "x": 12, "y": 0 }, + "id": 2, + "options": { + "legend": { "calcs": ["mean", "max"], "displayMode": "list", "placement": "bottom" }, + "tooltip": { "mode": "multi" } + }, + "targets": [ + { + "datasource": { "type": "prometheus", "uid": "prometheus" }, + "expr": "rate(agentidp_agents_registered_total[1m])", + "legendFormat": "env={{ deployment_env }}", + "refId": "A" + } + ], + "title": "Agents Registered / min", + "type": "timeseries" + }, + { + "datasource": { "type": "prometheus", "uid": "prometheus" }, + "fieldConfig": { + "defaults": { + "color": { "mode": "palette-classic" }, + "custom": { "lineWidth": 2, "fillOpacity": 10 } + }, + "overrides": [] + }, + "gridPos": { "h": 8, "w": 12, "x": 0, "y": 8 }, + "id": 3, + "options": { + "legend": { "calcs": ["mean", "max"], "displayMode": "list", "placement": "bottom" }, + "tooltip": { "mode": "multi" } + }, + "targets": [ + { + "datasource": { "type": "prometheus", "uid": "prometheus" }, + "expr": "rate(agentidp_http_requests_total[1m])", + "legendFormat": "{{ method }} {{ route }}", + "refId": "A" + } + ], + "title": "HTTP Request Rate / min", + "type": "timeseries" + }, + { + "datasource": { "type": "prometheus", "uid": "prometheus" }, + "fieldConfig": { + "defaults": { + "color": { "mode": "palette-classic" }, + "custom": { "lineWidth": 2, "fillOpacity": 10 }, + "thresholds": { + "mode": "absolute", + "steps": [ + { "color": "green", "value": null }, + { "color": "red", "value": 0.01 } + ] + } + }, + "overrides": [] + }, + "gridPos": { "h": 8, "w": 12, "x": 12, "y": 8 }, + "id": 4, + "options": { + "legend": { "calcs": ["mean", "max"], "displayMode": "list", "placement": "bottom" }, + "tooltip": { "mode": "multi" } + }, + "targets": [ + { + "datasource": { "type": "prometheus", "uid": "prometheus" }, + "expr": "rate(agentidp_http_requests_total{status_code=~\"5..\"}[1m])", + "legendFormat": "{{ method }} {{ route }} {{ status_code }}", + "refId": "A" + } + ], + "title": "HTTP Error Rate (5xx)", + "type": "timeseries" + }, + { + "datasource": { "type": "prometheus", "uid": "prometheus" }, + "fieldConfig": { + "defaults": { + "color": { "mode": "palette-classic" }, + "custom": { "lineWidth": 2, "fillOpacity": 10 }, + "unit": "s" + }, + "overrides": [] + }, + "gridPos": { "h": 8, "w": 12, "x": 0, "y": 16 }, + "id": 5, + "options": { + "legend": { "calcs": ["mean", "max"], "displayMode": "list", "placement": "bottom" }, + "tooltip": { "mode": "multi" } + }, + "targets": [ + { + "datasource": { "type": "prometheus", "uid": "prometheus" }, + "expr": "histogram_quantile(0.99, rate(agentidp_http_request_duration_seconds_bucket[5m]))", + "legendFormat": "p99 {{ method }} {{ route }}", + "refId": "A" + } + ], + "title": "HTTP P99 Latency", + "type": "timeseries" + }, + { + "datasource": { "type": "prometheus", "uid": "prometheus" }, + "fieldConfig": { + "defaults": { + "color": { "mode": "palette-classic" }, + "custom": { "lineWidth": 2, "fillOpacity": 10 }, + "unit": "s" + }, + "overrides": [] + }, + "gridPos": { "h": 8, "w": 12, "x": 12, "y": 16 }, + "id": 6, + "options": { + "legend": { "calcs": ["mean", "max"], "displayMode": "list", "placement": "bottom" }, + "tooltip": { "mode": "multi" } + }, + "targets": [ + { + "datasource": { "type": "prometheus", "uid": "prometheus" }, + "expr": "histogram_quantile(0.95, rate(agentidp_db_query_duration_seconds_bucket[5m]))", + "legendFormat": "p95 {{ operation }}", + "refId": "A" + } + ], + "title": "DB Query P95 Latency", + "type": "timeseries" + }, + { + "datasource": { "type": "prometheus", "uid": "prometheus" }, + "fieldConfig": { + "defaults": { + "color": { "mode": "palette-classic" }, + "custom": { "lineWidth": 2, "fillOpacity": 10 }, + "unit": "s" + }, + "overrides": [] + }, + "gridPos": { "h": 8, "w": 12, "x": 0, "y": 24 }, + "id": 7, + "options": { + "legend": { "calcs": ["mean", "max"], "displayMode": "list", "placement": "bottom" }, + "tooltip": { "mode": "multi" } + }, + "targets": [ + { + "datasource": { "type": "prometheus", "uid": "prometheus" }, + "expr": "histogram_quantile(0.95, rate(agentidp_redis_command_duration_seconds_bucket[5m]))", + "legendFormat": "p95 {{ command }}", + "refId": "A" + } + ], + "title": "Redis Command P95 Latency", + "type": "timeseries" + } + ], + "refresh": "30s", + "schemaVersion": 39, + "tags": ["agentidp", "sentryagent"], + "templating": { "list": [] }, + "time": { "from": "now-1h", "to": "now" }, + "timepicker": {}, + "timezone": "browser", + "title": "SentryAgent.ai — AgentIdP", + "uid": "agentidp-overview", + "version": 1, + "weekStart": "" +} diff --git a/monitoring/grafana/provisioning/dashboards/provider.yml b/monitoring/grafana/provisioning/dashboards/provider.yml new file mode 100644 index 0000000..331e073 --- /dev/null +++ b/monitoring/grafana/provisioning/dashboards/provider.yml @@ -0,0 +1,11 @@ +apiVersion: 1 + +providers: + - name: AgentIdP + orgId: 1 + folder: AgentIdP + type: file + disableDeletion: false + updateIntervalSeconds: 10 + options: + path: /var/lib/grafana/dashboards diff --git a/monitoring/grafana/provisioning/datasources/prometheus.yml b/monitoring/grafana/provisioning/datasources/prometheus.yml new file mode 100644 index 0000000..bb009bb --- /dev/null +++ b/monitoring/grafana/provisioning/datasources/prometheus.yml @@ -0,0 +1,9 @@ +apiVersion: 1 + +datasources: + - name: Prometheus + type: prometheus + access: proxy + url: http://prometheus:9090 + isDefault: true + editable: false diff --git a/monitoring/prometheus/prometheus.yml b/monitoring/prometheus/prometheus.yml new file mode 100644 index 0000000..44a5d3f --- /dev/null +++ b/monitoring/prometheus/prometheus.yml @@ -0,0 +1,10 @@ +global: + scrape_interval: 15s + evaluation_interval: 15s + +scrape_configs: + - job_name: 'agentidp' + static_configs: + - targets: ['agentidp:3000'] + metrics_path: /metrics + scheme: http diff --git a/openspec/changes/phase-2-production-ready/.openspec.yaml b/openspec/changes/phase-2-production-ready/.openspec.yaml new file mode 100644 index 0000000..fd1e0d3 --- /dev/null +++ b/openspec/changes/phase-2-production-ready/.openspec.yaml @@ -0,0 +1,3 @@ +change: phase-2-production-ready +status: proposed +date: 2026-03-28 diff --git a/openspec/changes/phase-2-production-ready/design.md b/openspec/changes/phase-2-production-ready/design.md new file mode 100644 index 0000000..a5f4663 --- /dev/null +++ b/openspec/changes/phase-2-production-ready/design.md @@ -0,0 +1,218 @@ +# Phase 2: Production-Ready — Technical Design + +**Date**: 2026-03-28 +**Author**: Virtual Architect +**Status**: Draft — pending CEO approval of proposal + +--- + +## 1. HashiCorp Vault Integration + +### Architecture + +``` +AgentIdP Server + └── CredentialService + └── VaultClient (new) + └── HashiCorp Vault (sidecar or external) + └── KV Secrets Engine v2 +``` + +### Design Decisions + +**ADR-001: Vault over AWS KMS/GCP Secret Manager** +Vault is cloud-agnostic, open-source, and already standard in enterprise environments. Using Vault keeps Phase 2 cloud-provider independent. + +**ADR-002: KV Secrets Engine v2** +KV v2 provides versioned secrets and metadata. When a credential is rotated, the old version is retained in Vault history, enabling audit-grade secret lifecycle tracking. + +**ADR-003: AgentIdP stores Vault path, not secret** +`credentials.vault_path` stores the Vault KV path (e.g. `secret/agentidp/agents/{agentId}/credentials/{credentialId}`). The secret itself is never written to PostgreSQL. + +### New environment variables +| Variable | Description | +|----------|-------------| +| `VAULT_ADDR` | Vault server address | +| `VAULT_TOKEN` | Vault root/service token | +| `VAULT_MOUNT` | KV mount path (default: `secret`) | + +### Migration +Add `vault_path` column to `credentials` table (`005_add_vault_path.sql`). Existing credentials retain bcrypt hashes; new credentials use Vault. Both code paths coexist until all credentials are rotated (migration guide provided). + +--- + +## 2. Multi-Language SDKs + +### Shared contract (all SDKs implement identically) + +``` +AgentIdPClient(baseUrl, clientId, clientSecret, scopes?) + .agents → AgentRegistryClient (5 methods) + .credentials → CredentialClient (4 methods) + .tokens → TokenClient (2 methods) + .audit → AuditClient (2 methods) + .clearTokenCache() + +TokenManager — auto-refresh 60s before expiry +AgentIdPError — code, message, httpStatus, details +``` + +### Python SDK (`sentryagent-idp`) +- Python 3.9+ (httpx for async, requests for sync) +- Both sync and async client variants +- PyPI package: `sentryagent-idp` +- Type hints throughout (`mypy --strict` clean) + +### Go SDK (`github.com/sentryagent/idp-sdk-go`) +- Go 1.21+, standard library `net/http` +- Context-aware methods (`context.Context` first arg) +- Idiomatic Go error handling (`error` return, no panic) +- Go module: `github.com/sentryagent/idp-sdk-go` + +### Java SDK (`ai.sentryagent:idp-sdk`) +- Java 17+, Apache HttpClient 5 +- Synchronous and CompletableFuture async variants +- Maven Central: `ai.sentryagent:idp-sdk` +- Fully typed with generics + +--- + +## 3. OPA Policy Engine + +### Architecture + +``` +HTTP Request + → Auth Middleware (JWT verify) — unchanged + → OPA Middleware (new) — evaluates policy + → OPA Wasm (embedded, no network call) + → Rego policy files (hot-reloadable) + → Controller +``` + +### Design Decisions + +**ADR-004: OPA Wasm over OPA sidecar** +Embedding OPA as Wasm in the Node.js process eliminates a network hop and removes a runtime dependency. Policy files are loaded from `policies/` directory at startup and reloaded on SIGHUP. + +**ADR-005: Policy replaces, does not wrap, scope check** +The existing static scope check in `auth.ts` is replaced by an OPA policy evaluation. This keeps the policy as the single source of truth for access control. + +### Policy structure (`policies/`) +``` +policies/ + authz.rego — main policy: allow/deny + data/ + scopes.json — scope → permission mapping +``` + +--- + +## 4. Web Dashboard UI + +### Architecture + +``` +dashboard/ (new — separate from sdk/) + src/ + components/ — reusable UI components + pages/ — Agents, Credentials, Audit, Health + hooks/ — useAgents, useCredentials, useAudit + lib/ + client.ts — wraps @sentryagent/idp-sdk + auth.ts — credential entry and storage +``` + +### Tech Stack +- React 18 + TypeScript strict +- Vite 5 (build tool) +- TanStack Query v5 (server state) +- shadcn/ui components (Radix UI + Tailwind CSS) + +### Pages +| Page | Scope Required | Features | +|------|---------------|----------| +| Agents | `agents:read` | List, search, view detail, suspend/reactivate | +| Credentials | `agents:read` | List credentials per agent, rotate, revoke | +| Audit Log | `audit:read` | Filter by agent/action/outcome/date, paginate | +| Health | None | Server uptime, Redis/PostgreSQL connectivity | + +### Authentication +The dashboard accepts `clientId` + `clientSecret` via a login form. The `@sentryagent/idp-sdk` `TokenManager` handles token acquisition and caching in `sessionStorage`. No backend session — all state is client-side. + +--- + +## 5. Prometheus + Grafana Monitoring + +### Metrics exposed at `GET /metrics` + +| Metric | Type | Description | +|--------|------|-------------| +| `agentidp_tokens_issued_total` | Counter | Tokens issued, labelled by outcome | +| `agentidp_agents_registered_total` | Counter | Agent registrations | +| `agentidp_http_requests_total` | Counter | All requests, labelled by method/path/status | +| `agentidp_http_request_duration_seconds` | Histogram | Request latency | +| `agentidp_rate_limit_rejections_total` | Counter | 429 responses | +| `agentidp_db_query_duration_seconds` | Histogram | PostgreSQL query latency | +| `agentidp_redis_command_duration_seconds` | Histogram | Redis command latency | + +### Grafana dashboard +Pre-built JSON dashboard shipped in `monitoring/grafana/dashboards/agentidp.json`. Auto-provisioned via `monitoring/grafana/provisioning/`. + +### Docker Compose extension +Add `prometheus` and `grafana` services to a `docker-compose.monitoring.yml` overlay — keeps the base `docker-compose.yml` clean for developers who don't need monitoring. + +--- + +## 6. Multi-Region Deployment (Terraform) + +### Structure + +``` +terraform/ + modules/ + agentidp/ — reusable module: compute + networking + rds/ — managed PostgreSQL + redis/ — managed Redis + lb/ — load balancer + TLS + environments/ + aws/ — AWS-specific config (ECS + RDS + ElastiCache) + gcp/ — GCP-specific config (Cloud Run + Cloud SQL + Memorystore) +``` + +### Design Decisions + +**ADR-006: Two provider targets (AWS + GCP) in Phase 2** +AWS and GCP cover the majority of developer deployments. Azure module is Phase 3. Each environment is a thin wrapper over the shared `agentidp` module. + +**ADR-007: Terraform over Pulumi/CDK** +Terraform is the most widely-used IaC tool, familiar to most DevOps teams. The HCL syntax is simpler for documentation purposes. + +--- + +## Component Interaction Map (Phase 2) + +``` + ┌────────────────────┐ + │ Web Dashboard │ + │ (React + Vite) │ + └────────┬───────────┘ + │ HTTPS + ┌────────────────▼────────────────┐ + │ AgentIdP Server │ + │ Auth MW → OPA MW → Controllers │ + │ /metrics (prom-client) │ + └──┬──────────┬──────────┬────────┘ + │ │ │ + ┌─────▼──┐ ┌────▼───┐ ┌──▼───────┐ + │Postgres│ │ Redis │ │ Vault │ + └────────┘ └────────┘ └──────────┘ + │ + ┌────────▼────────┐ + │ Prometheus │ + └────────┬────────┘ + │ + ┌────────▼────────┐ + │ Grafana │ + └─────────────────┘ +``` diff --git a/openspec/changes/phase-2-production-ready/proposal.md b/openspec/changes/phase-2-production-ready/proposal.md new file mode 100644 index 0000000..1d02f8d --- /dev/null +++ b/openspec/changes/phase-2-production-ready/proposal.md @@ -0,0 +1,96 @@ +# Phase 2: Production-Ready — Change Proposal + +**Date**: 2026-03-28 +**Author**: Virtual CTO +**Status**: Proposed — awaiting CEO approval + +--- + +## Summary + +Phase 1 delivered a complete, working AgentIdP MVP. Phase 2 makes it production-ready: hardened secrets management, multi-language SDKs, a policy engine, a web dashboard, observability, and multi-region deployment. + +--- + +## Problem Statement + +Phase 1 is functional but has the following production gaps: + +| Gap | Risk | +|-----|------| +| Credentials stored as bcrypt hashes in PostgreSQL | No HSM/KMS — acceptable for MVP, not for enterprise | +| Only Node.js SDK | Developers in Python/Go/Java cannot use the SDK | +| No policy engine | Scope enforcement is static — no dynamic ABAC/RBAC | +| No web UI | Operators must use `curl` to manage agents | +| No observability | No metrics, no dashboards, no alerting | +| Single-region deployment | No HA, no geo-redundancy | + +--- + +## Proposed Changes + +### 1. HashiCorp Vault Integration +Replace raw bcrypt credential storage with Vault-backed secret management. Vault handles secret generation, versioning, and revocation. AgentIdP stores only Vault secret paths, not the secrets themselves. + +### 2. Multi-Language SDKs +Add Python, Go, and Java SDKs with identical API surface to the existing Node.js SDK: `AgentIdPClient`, `TokenManager`, service clients for all 14 endpoints, typed error hierarchy. + +### 3. Advanced Policy Engine (OPA) +Integrate Open Policy Agent (OPA) as a sidecar for dynamic scope and attribute-based access control. Policies are hot-reloadable Rego files — no server restart required. + +### 4. Web Dashboard UI +A React + TypeScript dashboard for operators: agent list and management, credential overview, audit log viewer, system health panel. Read-only by default; write operations require `agents:write` scope. + +### 5. Prometheus + Grafana Monitoring +Instrument all services with Prometheus metrics (`/metrics` endpoint). Ship a pre-built Grafana dashboard for: token issuance rate, agent registration rate, error rates, Redis latency, PostgreSQL query latency. + +### 6. Multi-Region Deployment +Terraform modules for AWS/GCP deployment with: managed PostgreSQL (RDS/Cloud SQL), managed Redis (ElastiCache/Memorystore), container orchestration (ECS/Cloud Run), load balancer, and a deployment guide. + +--- + +## Out of Scope for Phase 2 + +- AGNTCY federation (Phase 3) +- W3C DID support (Phase 3) +- SOC 2 certification (Phase 3) +- Rust/C++ SDKs (Phase 3) + +--- + +## Dependencies + +| New Dependency | Purpose | CEO Approval Required | +|---------------|---------|----------------------| +| `@openpolicyagent/opa-wasm` | OPA policy evaluation | Yes | +| `node-vault` | HashiCorp Vault client | Yes | +| React 18 + Vite | Web dashboard | Yes | +| `prom-client` | Prometheus metrics | Yes | +| Terraform | Infrastructure as code | Yes | + +--- + +## Delivery Sequence (per OpenSpec spec-first workflow) + +``` +1. Vault integration (highest security impact) +2. Python SDK (highest developer demand) +3. Go SDK +4. Java SDK +5. OPA policy engine +6. Web dashboard UI +7. Prometheus + Grafana monitoring +8. Multi-region deployment (Terraform) +``` + +--- + +## Success Criteria + +- All new dependencies CEO-approved before implementation begins +- All new API endpoints have OpenAPI 3.0 specs before implementation +- TypeScript strict mode + zero `any` maintained throughout +- >80% test coverage on all new services +- All SDKs pass the same QA gate: 14-endpoint coverage, typed errors, zero `any` +- Web dashboard passes OWASP Top 10 security review +- Monitoring stack ships with pre-built dashboards — zero manual setup required diff --git a/openspec/changes/phase-2-production-ready/specs/deployment/spec.md b/openspec/changes/phase-2-production-ready/specs/deployment/spec.md new file mode 100644 index 0000000..ddede2c --- /dev/null +++ b/openspec/changes/phase-2-production-ready/specs/deployment/spec.md @@ -0,0 +1,44 @@ +# Spec: Multi-Region Deployment (Terraform) + +**Status**: Pending CEO approval +**Workstream**: 8 of 8 + +## Scope +- `terraform/` directory at project root +- Shared `agentidp` module (compute, networking, secrets) +- `environments/aws/` — ECS Fargate + RDS PostgreSQL + ElastiCache Redis +- `environments/gcp/` — Cloud Run + Cloud SQL + Memorystore Redis +- Deployment guide: `docs/devops/deployment.md` + +## Module structure + +``` +terraform/ + modules/ + agentidp/ + main.tf — compute (ECS task or Cloud Run service) + networking.tf — VPC, subnets, security groups + variables.tf — all configurable inputs + outputs.tf — service URL, DB endpoint, Redis endpoint + rds/ — managed PostgreSQL + redis/ — managed Redis + lb/ — ALB (AWS) or Cloud LB (GCP), TLS cert + environments/ + aws/ + main.tf — calls modules, sets AWS-specific vars + variables.tf + terraform.tfvars.example + gcp/ + main.tf + variables.tf + terraform.tfvars.example +``` + +## Acceptance Criteria +- [ ] `terraform validate` passes for both aws and gcp environments +- [ ] `terraform plan` produces no errors against a live AWS/GCP account (test in dev env) +- [ ] JWT_PRIVATE_KEY and JWT_PUBLIC_KEY injected as environment secrets (not hardcoded) +- [ ] TLS termination at load balancer — HTTPS only in production modules +- [ ] PostgreSQL and Redis not publicly accessible — VPC-internal only +- [ ] `docs/devops/deployment.md` — end-to-end deployment walkthrough for AWS and GCP +- [ ] `terraform.tfvars.example` provided for both environments — no secrets in version control diff --git a/openspec/changes/phase-2-production-ready/specs/go-sdk/spec.md b/openspec/changes/phase-2-production-ready/specs/go-sdk/spec.md new file mode 100644 index 0000000..60c12b7 --- /dev/null +++ b/openspec/changes/phase-2-production-ready/specs/go-sdk/spec.md @@ -0,0 +1,23 @@ +# Spec: Go SDK (`github.com/sentryagent/idp-sdk-go`) + +**Status**: Pending CEO approval +**Workstream**: 3 of 8 + +## Scope +- `sdk-go/` directory at project root +- Context-aware `AgentIdPClient` using standard library `net/http` +- `TokenManager` with mutex-guarded cache and 60s auto-refresh +- Service clients: `AgentRegistryClient`, `CredentialClient`, `TokenClient`, `AuditClient` +- Idiomatic Go error type `AgentIdPError` implementing `error` interface +- `go.mod` module: `github.com/sentryagent/idp-sdk-go` +- `sdk-go/README.md` + +## Acceptance Criteria +- [ ] All 14 endpoints covered +- [ ] All methods take `context.Context` as first argument +- [ ] No panics — all errors returned as `error` +- [ ] `AgentIdPError` implements `error` and exposes `.Code`, `.HTTPStatus`, `.Details` +- [ ] `TokenManager` is goroutine-safe (`sync.Mutex` on cache) +- [ ] `go vet` and `staticcheck` pass with zero warnings +- [ ] `go test ./...` with >80% coverage +- [ ] README matches Node.js SDK structure diff --git a/openspec/changes/phase-2-production-ready/specs/java-sdk/spec.md b/openspec/changes/phase-2-production-ready/specs/java-sdk/spec.md new file mode 100644 index 0000000..ba7cb9d --- /dev/null +++ b/openspec/changes/phase-2-production-ready/specs/java-sdk/spec.md @@ -0,0 +1,23 @@ +# Spec: Java SDK (`ai.sentryagent:idp-sdk`) + +**Status**: Pending CEO approval +**Workstream**: 4 of 8 + +## Scope +- `sdk-java/` directory at project root +- `AgentIdPClient` with sync and `CompletableFuture` async variants +- `TokenManager` with thread-safe cache and 60s auto-refresh +- Service clients: `AgentRegistryClient`, `CredentialClient`, `TokenClient`, `AuditClient` +- `AgentIdPException` extending `RuntimeException` with `code`, `httpStatus`, `details` +- `pom.xml`: groupId=`ai.sentryagent`, artifactId=`idp-sdk`, Java 17+ +- `sdk-java/README.md` + +## Acceptance Criteria +- [ ] All 14 endpoints covered +- [ ] Sync methods return typed POJOs; async methods return `CompletableFuture` +- [ ] `AgentIdPException` thrown (not raw IOException) on all failure paths +- [ ] `TokenManager` is thread-safe (`synchronized` on cache) +- [ ] Apache HttpClient 5 for HTTP transport +- [ ] Jackson for JSON serialization +- [ ] `mvn verify` passes with >80% coverage (JUnit 5) +- [ ] README matches Node.js SDK structure diff --git a/openspec/changes/phase-2-production-ready/specs/monitoring/spec.md b/openspec/changes/phase-2-production-ready/specs/monitoring/spec.md new file mode 100644 index 0000000..502064e --- /dev/null +++ b/openspec/changes/phase-2-production-ready/specs/monitoring/spec.md @@ -0,0 +1,32 @@ +# Spec: Prometheus + Grafana Monitoring + +**Status**: Pending CEO approval +**Workstream**: 7 of 8 + +## Scope +- `prom-client` integration — expose `GET /metrics` +- 7 metrics (counters + histograms) across all services +- `monitoring/` directory: Prometheus config + Grafana provisioning +- `docker-compose.monitoring.yml` overlay (adds prometheus + grafana services) +- Pre-built Grafana dashboard JSON (`monitoring/grafana/dashboards/agentidp.json`) + +## Metrics + +| Metric | Type | Labels | +|--------|------|--------| +| `agentidp_tokens_issued_total` | Counter | `outcome` (success/failure) | +| `agentidp_agents_registered_total` | Counter | `outcome` | +| `agentidp_http_requests_total` | Counter | `method`, `path`, `status_code` | +| `agentidp_http_request_duration_seconds` | Histogram | `method`, `path` | +| `agentidp_rate_limit_rejections_total` | Counter | — | +| `agentidp_db_query_duration_seconds` | Histogram | `operation` | +| `agentidp_redis_command_duration_seconds` | Histogram | `command` | + +## Acceptance Criteria +- [ ] `GET /metrics` returns Prometheus text format +- [ ] `/metrics` endpoint does NOT require Bearer auth (Prometheus scrapes it) +- [ ] All 7 metrics present and updating under load +- [ ] Grafana dashboard auto-provisions on `docker compose -f docker-compose.monitoring.yml up` +- [ ] Grafana runs on port 3001 (no conflict with AgentIdP on 3000) +- [ ] `docs/devops/operations.md` updated with monitoring section +- [ ] `prom-client` added as new dependency — CEO approval gate diff --git a/openspec/changes/phase-2-production-ready/specs/opa-policy/spec.md b/openspec/changes/phase-2-production-ready/specs/opa-policy/spec.md new file mode 100644 index 0000000..102854a --- /dev/null +++ b/openspec/changes/phase-2-production-ready/specs/opa-policy/spec.md @@ -0,0 +1,37 @@ +# Spec: OPA Policy Engine Integration + +**Status**: Pending CEO approval +**Workstream**: 5 of 8 + +## Scope +- New `OpaMiddleware` replacing static scope check in `auth.ts` +- `@openpolicyagent/opa-wasm` integration (embedded Wasm, no sidecar) +- `policies/authz.rego` — main allow/deny policy +- `policies/data/scopes.json` — scope to permission mapping +- SIGHUP handler to hot-reload policies without restart +- New env var: `POLICY_DIR` (default: `./policies`) + +## Policy interface + +``` +input = { + "method": "GET", + "path": "/api/v1/agents", + "scopes": ["agents:read"], + "agentId": "uuid" +} + +output = { + "allow": true | false, + "reason": "string" // populated when allow=false +} +``` + +## Acceptance Criteria +- [ ] All existing scope checks replaced by OPA evaluation +- [ ] Policy files hot-reloadable on SIGHUP (no restart required) +- [ ] OPA Wasm loaded at startup — fail-fast if `POLICY_DIR` invalid +- [ ] `allow=false` responses return `403` with `reason` in error body +- [ ] Existing test suite passes unchanged (OPA evaluates same rules as before) +- [ ] New unit tests for OPA middleware: allow/deny cases, missing scope, invalid input +- [ ] `POLICY_DIR` env var documented in `docs/devops/environment-variables.md` diff --git a/openspec/changes/phase-2-production-ready/specs/python-sdk/spec.md b/openspec/changes/phase-2-production-ready/specs/python-sdk/spec.md new file mode 100644 index 0000000..4e8df1f --- /dev/null +++ b/openspec/changes/phase-2-production-ready/specs/python-sdk/spec.md @@ -0,0 +1,24 @@ +# Spec: Python SDK (`sentryagent-idp`) + +**Status**: Pending CEO approval +**Workstream**: 2 of 8 + +## Scope +- `sdk-python/` directory at project root +- `AgentIdPClient` with sync and async variants +- `TokenManager` with 60s auto-refresh +- Service clients: `AgentRegistryClient`, `CredentialClient`, `TokenClient`, `AuditClient` +- `AgentIdPError` typed exception +- Full type hints — `mypy --strict` clean +- `sdk-python/README.md` with installation and usage + +## Acceptance Criteria +- [ ] All 14 API endpoints covered +- [ ] Sync client: `requests` library +- [ ] Async client: `httpx` library +- [ ] `mypy --strict` passes with zero errors +- [ ] Zero untyped code +- [ ] `AgentIdPError` raised (not raw requests/httpx exceptions) on all failure paths +- [ ] `TokenManager` tested: caches token, refreshes at exp-60s +- [ ] `pyproject.toml` with: name=sentryagent-idp, python>=3.9, dependencies declared +- [ ] README matches Node.js SDK structure diff --git a/openspec/changes/phase-2-production-ready/specs/vault/spec.md b/openspec/changes/phase-2-production-ready/specs/vault/spec.md new file mode 100644 index 0000000..6653f41 --- /dev/null +++ b/openspec/changes/phase-2-production-ready/specs/vault/spec.md @@ -0,0 +1,21 @@ +# Spec: HashiCorp Vault Integration + +**Status**: Pending CEO approval +**Workstream**: 1 of 8 + +## Scope +- VaultClient class wrapping `node-vault` +- `005_add_vault_path.sql` migration +- Updated CredentialService to write secrets to Vault instead of PostgreSQL +- New env vars: VAULT_ADDR, VAULT_TOKEN, VAULT_MOUNT +- Migration guide: bcrypt → Vault coexistence strategy + +## Acceptance Criteria +- [ ] New credentials: secret written to Vault KV v2, `vault_path` stored in PostgreSQL +- [ ] Credential rotation: Vault versioned update, `vault_path` unchanged +- [ ] Credential revocation: Vault secret deleted, DB status = `revoked` +- [ ] Existing bcrypt credentials continue to work until rotated +- [ ] VaultClient follows existing service interface pattern (DRY, SOLID) +- [ ] Zero `any` types, TypeScript strict +- [ ] `VAULT_ADDR` / `VAULT_TOKEN` validation at startup (fail-fast) +- [ ] DevOps docs updated with Vault setup section diff --git a/openspec/changes/phase-2-production-ready/specs/web-dashboard/spec.md b/openspec/changes/phase-2-production-ready/specs/web-dashboard/spec.md new file mode 100644 index 0000000..1c493f6 --- /dev/null +++ b/openspec/changes/phase-2-production-ready/specs/web-dashboard/spec.md @@ -0,0 +1,34 @@ +# Spec: Web Dashboard UI + +**Status**: Pending CEO approval +**Workstream**: 6 of 8 + +## Scope +- `dashboard/` directory at project root +- React 18 + TypeScript strict, built with Vite 5 +- TanStack Query v5 for server state +- shadcn/ui (Radix UI + Tailwind CSS) for components +- Four pages: Agents, Credentials, Audit Log, Health +- Client-side auth: `clientId` + `clientSecret` → `TokenManager` +- Served from AgentIdP server at `GET /dashboard` (static build) + +## Pages + +| Page | Route | Scope Required | +|------|-------|---------------| +| Login | `/dashboard/login` | None | +| Agents | `/dashboard/agents` | `agents:read` | +| Agent Detail | `/dashboard/agents/:id` | `agents:read` | +| Credentials | `/dashboard/agents/:id/credentials` | `agents:read` | +| Audit Log | `/dashboard/audit` | `audit:read` | +| Health | `/dashboard/health` | None | + +## Acceptance Criteria +- [ ] TypeScript strict — zero `any` across all dashboard files +- [ ] `dashboard/tsconfig.json` with `strict: true` +- [ ] Login form stores token in `sessionStorage` only (not `localStorage`) +- [ ] All write operations (suspend, revoke, rotate) require confirmation dialog +- [ ] OWASP Top 10 review: no XSS, no CSRF, no sensitive data in URL params +- [ ] Vite build outputs to `dashboard/dist/`; AgentIdP serves it as static +- [ ] `dashboard/README.md` — how to build and serve +- [ ] Responsive layout — functional on desktop and tablet diff --git a/openspec/changes/phase-2-production-ready/tasks.md b/openspec/changes/phase-2-production-ready/tasks.md new file mode 100644 index 0000000..6143012 --- /dev/null +++ b/openspec/changes/phase-2-production-ready/tasks.md @@ -0,0 +1,127 @@ +# Phase 2: Production-Ready — Tasks + +**Status**: In progress — Workstreams 1, 2, 3, 4 complete. + +## CEO Approval Gates (required before implementation) + +- [x] A0.1 Approve dependency: `node-vault` (Vault integration) +- [x] A0.2 Approve dependency: `@openpolicyagent/opa-wasm` (OPA policy engine) +- [x] A0.3 Approve dependency: React 18 + Vite 5 (web dashboard) +- [x] A0.4 Approve dependency: `prom-client` (Prometheus metrics) +- [x] A0.5 Approve dependency: Terraform (infrastructure as code) + +--- + +## Workstream 1: HashiCorp Vault Integration + +- [x] 1.1 Write `src/vault/VaultClient.ts` — wraps `node-vault`; methods: writeSecret, readSecret, deleteSecret, verifySecret +- [x] 1.2 Write `src/db/migrations/005_add_vault_path.sql` — add `vault_path` column to `credentials` +- [x] 1.3 Update `CredentialService.ts` — new credentials use Vault; existing bcrypt credentials continue to work +- [x] 1.4 Update `docs/devops/environment-variables.md` — add VAULT_ADDR, VAULT_TOKEN, VAULT_MOUNT +- [x] 1.5 Write `docs/devops/vault-setup.md` — Vault dev server setup, production Vault config, migration guide +- [x] 1.6 Write unit tests for VaultClient (mocked Vault) and updated CredentialService +- [x] 1.7 QA sign-off: zero `any`, TypeScript strict, >80% coverage, coexistence verified + +## Workstream 2: Python SDK + +- [x] 2.1 Create `sdk-python/` with `pyproject.toml` — name: sentryagent-idp, python>=3.9 +- [x] 2.2 Write `sdk-python/src/sentryagent_idp/types.py` — all request/response dataclasses +- [x] 2.3 Write `sdk-python/src/sentryagent_idp/errors.py` — AgentIdPError exception +- [x] 2.4 Write `sdk-python/src/sentryagent_idp/token_manager.py` — sync TokenManager +- [x] 2.5 Write `sdk-python/src/sentryagent_idp/async_token_manager.py` — async TokenManager (httpx) +- [x] 2.6 Write `sdk-python/src/sentryagent_idp/services/agents.py` — AgentRegistryClient (sync + async) +- [x] 2.7 Write `sdk-python/src/sentryagent_idp/services/credentials.py` — CredentialClient (sync + async) +- [x] 2.8 Write `sdk-python/src/sentryagent_idp/services/token.py` — TokenClient (sync + async) +- [x] 2.9 Write `sdk-python/src/sentryagent_idp/services/audit.py` — AuditClient (sync + async) +- [x] 2.10 Write `sdk-python/src/sentryagent_idp/client.py` — AgentIdPClient (sync) + AsyncAgentIdPClient +- [x] 2.11 Write `sdk-python/src/sentryagent_idp/__init__.py` — barrel exports +- [x] 2.12 Write `sdk-python/README.md` +- [x] 2.13 QA: `mypy --strict` clean, all 14 endpoints, AgentIdPError on all failure paths, pytest >80% + +## Workstream 3: Go SDK + +- [x] 3.1 Create `sdk-go/` with `go.mod` — module: github.com/sentryagent/idp-sdk-go, go 1.21 +- [x] 3.2 Write `sdk-go/types.go` — all request/response structs +- [x] 3.3 Write `sdk-go/errors.go` — AgentIdPError type implementing error interface +- [x] 3.4 Write `sdk-go/token_manager.go` — mutex-guarded TokenManager +- [x] 3.5 Write `sdk-go/agents.go` — AgentRegistryClient (flat package; see ADR below) +- [x] 3.6 Write `sdk-go/credentials.go` — CredentialClient +- [x] 3.7 Write `sdk-go/token_service.go` — TokenServiceClient +- [x] 3.8 Write `sdk-go/audit.go` — AuditClient +- [x] 3.9 Write `sdk-go/client.go` — AgentIdPClient +- [x] 3.10 Write `sdk-go/README.md` +- [x] 3.11 QA: `go vet` clean, `staticcheck` clean, all 14 endpoints, goroutine-safe, `go test ./...` >80% + +## Workstream 4: Java SDK + +- [x] 4.1 Create `sdk-java/` with `pom.xml` — groupId: ai.sentryagent, artifactId: idp-sdk, Java 17 +- [x] 4.2 Write all POJO request/response model classes +- [x] 4.3 Write `AgentIdPException.java` extending RuntimeException +- [x] 4.4 Write `TokenManager.java` — synchronized cache with 60s refresh buffer +- [x] 4.5 Write `AgentRegistryClient.java` — sync + CompletableFuture methods +- [x] 4.6 Write `CredentialClient.java` — sync + CompletableFuture methods +- [x] 4.7 Write `TokenClient.java` — sync + CompletableFuture methods +- [x] 4.8 Write `AuditClient.java` — sync + CompletableFuture methods +- [x] 4.9 Write `AgentIdPClient.java` — composes all service clients +- [x] 4.10 Write `sdk-java/README.md` +- [x] 4.11 QA: `mvn verify` passes, all 14 endpoints, AgentIdPException on all failure paths, JUnit 5 >80% + +## Workstream 5: OPA Policy Engine + +- [x] 5.1 Write `policies/authz.rego` — allow/deny rules matching all current scope checks +- [x] 5.2 Write `policies/data/scopes.json` — scope to endpoint permission mapping +- [x] 5.3 Write `src/middleware/opa.ts` — OpaMiddleware: loads Wasm, evaluates input, returns allow/deny +- [x] 5.4 Replace static scope check in `src/middleware/auth.ts` with OpaMiddleware +- [x] 5.5 Add SIGHUP handler in `src/server.ts` to hot-reload policy files +- [x] 5.6 Update `docs/devops/environment-variables.md` — add POLICY_DIR +- [x] 5.7 QA: all existing auth tests pass unchanged, new OPA unit tests, hot-reload verified + +## Workstream 6: Web Dashboard UI + +- [x] 6.1 Create `dashboard/` with Vite 5 + React 18 + TypeScript strict configuration +- [x] 6.2 Set up shadcn/ui with Tailwind CSS +- [x] 6.3 Write `dashboard/src/lib/auth.ts` — credential entry, TokenManager, sessionStorage +- [x] 6.4 Write `dashboard/src/lib/client.ts` — wraps @sentryagent/idp-sdk AgentIdPClient +- [x] 6.5 Write Login page (`/dashboard/login`) +- [x] 6.6 Write Agents page (`/dashboard/agents`) — list, search, filter by status +- [x] 6.7 Write Agent Detail page (`/dashboard/agents/:id`) — suspend/reactivate with confirm dialog +- [x] 6.8 Write Credentials page (`/dashboard/agents/:id/credentials`) — rotate/revoke with confirm +- [x] 6.9 Write Audit Log page (`/dashboard/audit`) — filters, pagination +- [x] 6.10 Write Health page (`/dashboard/health`) — PostgreSQL + Redis connectivity status +- [x] 6.11 Configure AgentIdP Express app to serve `dashboard/dist/` at `/dashboard` +- [x] 6.12 Write `dashboard/README.md` +- [x] 6.13 QA: TypeScript strict, zero `any`, OWASP Top 10 review, responsive layout verified + +## Workstream 7: Prometheus + Grafana Monitoring + +- [x] 7.1 Add `prom-client` to dependencies (after CEO approval A0.4) +- [x] 7.2 Write `src/metrics/registry.ts` — shared Prometheus Registry with all 7 metric definitions +- [x] 7.3 Instrument `OAuth2Service.ts` — increment `agentidp_tokens_issued_total` +- [x] 7.4 Instrument `AgentService.ts` — increment `agentidp_agents_registered_total` +- [x] 7.5 Instrument `src/middleware/` — HTTP request counter and duration histogram +- [x] 7.6 Instrument `src/db/pool.ts` — DB query duration histogram +- [x] 7.7 Instrument `src/cache/redis.ts` — Redis command duration histogram +- [x] 7.8 Add `GET /metrics` route (unauthenticated, Prometheus text format) +- [x] 7.9 Write `monitoring/prometheus/prometheus.yml` — scrape config +- [x] 7.10 Write `monitoring/grafana/provisioning/` — datasource + dashboard provisioning +- [x] 7.11 Write `monitoring/grafana/dashboards/agentidp.json` — pre-built Grafana dashboard +- [x] 7.12 Write `docker-compose.monitoring.yml` overlay +- [x] 7.13 Update `docs/devops/operations.md` — monitoring section +- [x] 7.14 QA: all 7 metrics verified under load, Grafana auto-provisions, no auth leak on /metrics + +## Workstream 8: Multi-Region Deployment (Terraform) + +- [x] 8.1 Write `terraform/modules/agentidp/main.tf` + `variables.tf` + `outputs.tf` +- [x] 8.2 Write `terraform/modules/rds/` — managed PostgreSQL module +- [x] 8.3 Write `terraform/modules/redis/` — managed Redis module +- [x] 8.4 Write `terraform/modules/lb/` — load balancer + TLS module +- [x] 8.5 Write `terraform/environments/aws/main.tf` + `variables.tf` + `terraform.tfvars.example` +- [x] 8.6 Write `terraform/environments/gcp/main.tf` + `variables.tf` + `terraform.tfvars.example` +- [x] 8.7 Write `docs/devops/deployment.md` — end-to-end AWS and GCP deployment walkthrough +- [x] 8.8 QA: secrets not hardcoded, TLS enforced, DB/Redis VPC-internal (static review passed; terraform validate requires Terraform CLI not present in this env) + +--- + +## Phase 2 Complete Criteria + +All 8 workstreams done. All tasks checked. All QA gates passed. CEO reviewed. diff --git a/package-lock.json b/package-lock.json index 055cae0..6da85ba 100644 --- a/package-lock.json +++ b/package-lock.json @@ -8,6 +8,7 @@ "name": "sentryagent-idp", "version": "1.0.0", "dependencies": { + "@open-policy-agent/opa-wasm": "^1.10.0", "bcryptjs": "^2.4.3", "cors": "^2.8.5", "dotenv": "^16.4.5", @@ -16,9 +17,11 @@ "joi": "^17.12.3", "jsonwebtoken": "^9.0.2", "morgan": "^1.10.0", + "node-vault": "^0.12.0", "pg": "^8.11.3", "pino": "^8.19.0", "pino-http": "^9.0.0", + "prom-client": "^15.1.3", "redis": "^4.6.13", "uuid": "^9.0.1" }, @@ -30,6 +33,7 @@ "@types/jsonwebtoken": "^9.0.6", "@types/morgan": "^1.9.9", "@types/node": "^20.12.7", + "@types/node-vault": "^0.9.1", "@types/pg": "^8.11.5", "@types/supertest": "^6.0.2", "@types/uuid": "^9.0.8", @@ -1261,6 +1265,31 @@ "node": ">= 8" } }, + "node_modules/@open-policy-agent/opa-wasm": { + "version": "1.10.0", + "resolved": "https://registry.npmjs.org/@open-policy-agent/opa-wasm/-/opa-wasm-1.10.0.tgz", + "integrity": "sha512-ymR/nFS3nO9o24j9xowGGQaf+Gmb813QcxUpVZkfRlJkawKWqSIllnEH15agyWjijmOIyhA+OBErenx6N3jphw==", + "license": "Apache-2.0", + "dependencies": { + "sprintf-js": "^1.1.2", + "yaml": "^1.10.2" + } + }, + "node_modules/@open-policy-agent/opa-wasm/node_modules/sprintf-js": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.1.3.tgz", + "integrity": "sha512-Oo+0REFV59/rz3gfJNKQiBlwfHaSESl1pcGyABQsnnIfWOFt6JNj5gCog2U6MLZ//IGYD+nA8nI+mTShREReaA==", + "license": "BSD-3-Clause" + }, + "node_modules/@opentelemetry/api": { + "version": "1.9.1", + "resolved": "https://registry.npmjs.org/@opentelemetry/api/-/api-1.9.1.tgz", + "integrity": "sha512-gLyJlPHPZYdAk1JENA9LeHejZe1Ti77/pTeFm/nMXmQH/HFZlcS/O2XJB+L8fkbrNSqhdtlvjBVjxwUYanNH5Q==", + "license": "Apache-2.0", + "engines": { + "node": ">=8.0.0" + } + }, "node_modules/@paralleldrive/cuid2": { "version": "2.3.1", "resolved": "https://registry.npmjs.org/@paralleldrive/cuid2/-/cuid2-2.3.1.tgz", @@ -1475,6 +1504,13 @@ "@types/node": "*" } }, + "node_modules/@types/caseless": { + "version": "0.12.5", + "resolved": "https://registry.npmjs.org/@types/caseless/-/caseless-0.12.5.tgz", + "integrity": "sha512-hWtVTC2q7hc7xZ/RLbxapMvDMgUnDvKvMOpKal4DrMyfGBUfB1oKaZlIRr6mJL+If3bAP6sV/QneGzF6tJjZDg==", + "dev": true, + "license": "MIT" + }, "node_modules/@types/connect": { "version": "3.4.38", "resolved": "https://registry.npmjs.org/@types/connect/-/connect-3.4.38.tgz", @@ -1625,6 +1661,13 @@ "dev": true, "license": "MIT" }, + "node_modules/@types/mustache": { + "version": "4.2.6", + "resolved": "https://registry.npmjs.org/@types/mustache/-/mustache-4.2.6.tgz", + "integrity": "sha512-t+8/QWTAhOFlrF1IVZqKnMRJi84EgkIK5Kh0p2JV4OLywUvCwJPFxbJAl7XAow7DVIHsF+xW9f1MVzg0L6Szjw==", + "dev": true, + "license": "MIT" + }, "node_modules/@types/node": { "version": "20.19.37", "resolved": "https://registry.npmjs.org/@types/node/-/node-20.19.37.tgz", @@ -1635,6 +1678,17 @@ "undici-types": "~6.21.0" } }, + "node_modules/@types/node-vault": { + "version": "0.9.1", + "resolved": "https://registry.npmjs.org/@types/node-vault/-/node-vault-0.9.1.tgz", + "integrity": "sha512-h7b0JZ76kvwFL/XvfNV2LJ45/SVXLkOvrIKHIGR5Cp3c/BIWsDetJR6Gfzppl3BfX5RN3rlEuHmmHhKnuL4nlA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/mustache": "*", + "@types/request": "*" + } + }, "node_modules/@types/pg": { "version": "8.20.0", "resolved": "https://registry.npmjs.org/@types/pg/-/pg-8.20.0.tgz", @@ -1661,6 +1715,37 @@ "dev": true, "license": "MIT" }, + "node_modules/@types/request": { + "version": "2.48.13", + "resolved": "https://registry.npmjs.org/@types/request/-/request-2.48.13.tgz", + "integrity": "sha512-FGJ6udDNUCjd19pp0Q3iTiDkwhYup7J8hpMW9c4k53NrccQFFWKRho6hvtPPEhnXWKvukfwAlB6DbDz4yhH5Gg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/caseless": "*", + "@types/node": "*", + "@types/tough-cookie": "*", + "form-data": "^2.5.5" + } + }, + "node_modules/@types/request/node_modules/form-data": { + "version": "2.5.5", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.5.5.tgz", + "integrity": "sha512-jqdObeR2rxZZbPSGL+3VckHMYtu+f9//KXBsVny6JSX/pa38Fy+bGjuG8eW/H6USNQWhLi8Num++cU2yOCNz4A==", + "dev": true, + "license": "MIT", + "dependencies": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.8", + "es-set-tostringtag": "^2.1.0", + "hasown": "^2.0.2", + "mime-types": "^2.1.35", + "safe-buffer": "^5.2.1" + }, + "engines": { + "node": ">= 0.12" + } + }, "node_modules/@types/send": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/@types/send/-/send-1.2.1.tgz", @@ -1725,6 +1810,13 @@ "@types/superagent": "^8.1.0" } }, + "node_modules/@types/tough-cookie": { + "version": "4.0.5", + "resolved": "https://registry.npmjs.org/@types/tough-cookie/-/tough-cookie-4.0.5.tgz", + "integrity": "sha512-/Ad8+nIOV7Rl++6f1BdKxFSMgmoqEoYbHRpPcx3JEfv8VRsQe9Z4mCXeJBzxs7mbHY/XOZZuXlRNfhpVPbs6ZA==", + "dev": true, + "license": "MIT" + }, "node_modules/@types/uuid": { "version": "9.0.8", "resolved": "https://registry.npmjs.org/@types/uuid/-/uuid-9.0.8.tgz", @@ -2137,7 +2229,6 @@ "version": "0.4.0", "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==", - "dev": true, "license": "MIT" }, "node_modules/atomic-sleep": { @@ -2149,6 +2240,17 @@ "node": ">=8.0.0" } }, + "node_modules/axios": { + "version": "1.14.0", + "resolved": "https://registry.npmjs.org/axios/-/axios-1.14.0.tgz", + "integrity": "sha512-3Y8yrqLSwjuzpXuZ0oIYZ/XGgLwUIBU3uLvbcpb0pidD9ctpShJd43KSlEEkVQg6DS0G9NKyzOvBfUtDKEyHvQ==", + "license": "MIT", + "dependencies": { + "follow-redirects": "^1.15.11", + "form-data": "^4.0.5", + "proxy-from-env": "^2.1.0" + } + }, "node_modules/babel-jest": { "version": "29.7.0", "resolved": "https://registry.npmjs.org/babel-jest/-/babel-jest-29.7.0.tgz", @@ -2339,6 +2441,12 @@ "integrity": "sha512-V/Hy/X9Vt7f3BbPJEi8BdVFMByHi+jNXrYkW3huaybV/kQ0KJg0Y6PkEMbn+zeT+i+SiKZ/HMqJGIIt4LZDqNQ==", "license": "MIT" }, + "node_modules/bintrees": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/bintrees/-/bintrees-1.0.2.tgz", + "integrity": "sha512-VOMgTMwjAaUG580SXn3LacVgjurrbMme7ZZNYGSSV7mmtY6QQRh0Eg3pwIcntQ77DErK1L0NxkbetjcoXzVwKw==", + "license": "MIT" + }, "node_modules/body-parser": { "version": "1.20.4", "resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.4.tgz", @@ -2690,7 +2798,6 @@ "version": "1.0.8", "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", - "dev": true, "license": "MIT", "dependencies": { "delayed-stream": "~1.0.0" @@ -2831,7 +2938,6 @@ "version": "4.4.3", "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.3.tgz", "integrity": "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==", - "dev": true, "license": "MIT", "dependencies": { "ms": "^2.1.3" @@ -2881,7 +2987,6 @@ "version": "1.0.0", "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==", - "dev": true, "license": "MIT", "engines": { "node": ">=0.4.0" @@ -3094,7 +3199,6 @@ "version": "2.1.0", "resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.1.0.tgz", "integrity": "sha512-j6vWzfrGVfyXxge+O0x5sh6cvxAog0a/4Rdd2K36zCMV5eJ+/+tOAngRO8cODMNWbVRdVlmGZQL2YS3yR8bIUA==", - "dev": true, "license": "MIT", "dependencies": { "es-errors": "^1.3.0", @@ -3647,11 +3751,30 @@ "dev": true, "license": "ISC" }, + "node_modules/follow-redirects": { + "version": "1.15.11", + "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.11.tgz", + "integrity": "sha512-deG2P0JfjrTxl50XGCDyfI97ZGVCxIpfKYmfyrQ54n5FO/0gfIES8C/Psl6kWVDolizcaaxZJnTS0QSMxvnsBQ==", + "funding": [ + { + "type": "individual", + "url": "https://github.com/sponsors/RubenVerborgh" + } + ], + "license": "MIT", + "engines": { + "node": ">=4.0" + }, + "peerDependenciesMeta": { + "debug": { + "optional": true + } + } + }, "node_modules/form-data": { "version": "4.0.5", "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.5.tgz", "integrity": "sha512-8RipRLol37bNs2bhoV67fiTEvdTrbMUYcFTiy3+wuuOnUog2QBHCZWXDRijWQfAkhBj2Uf5UnVaiWwA5vdd82w==", - "dev": true, "license": "MIT", "dependencies": { "asynckit": "^0.4.0", @@ -3987,7 +4110,6 @@ "version": "1.0.2", "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.2.tgz", "integrity": "sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==", - "dev": true, "license": "MIT", "dependencies": { "has-symbols": "^1.0.3" @@ -5414,6 +5536,15 @@ "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", "license": "MIT" }, + "node_modules/mustache": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/mustache/-/mustache-4.2.0.tgz", + "integrity": "sha512-71ippSywq5Yb7/tVYyGbkBggbU8H3u5Rz56fH60jGFgr8uHwxs+aSKeqmluIVzM0m0kB7xQjKS6qPfd0b2ZoqQ==", + "license": "MIT", + "bin": { + "mustache": "bin/mustache" + } + }, "node_modules/natural-compare": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz", @@ -5451,6 +5582,21 @@ "dev": true, "license": "MIT" }, + "node_modules/node-vault": { + "version": "0.12.0", + "resolved": "https://registry.npmjs.org/node-vault/-/node-vault-0.12.0.tgz", + "integrity": "sha512-+SL3DSREptI+UJMM8UUwlI3jR5agPuAgCxSdUfeybGKszXiILXTCUHxErDdpgNgug8oj4v2rOmyrXhRJ4LZsyQ==", + "license": "MIT", + "dependencies": { + "axios": "^1.13.6", + "debug": "^4.3.4", + "mustache": "^4.2.0", + "tv4": "^1.3.0" + }, + "engines": { + "node": ">= 18.0.0" + } + }, "node_modules/normalize-path": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", @@ -6051,6 +6197,19 @@ "integrity": "sha512-mqn0kFRl0EoqhnL0GQ0veqFHyIN1yig9RHh/InzORTUiZHFRAur+aMtRkELNwGs9aNwKS6tg/An4NYBPGwvtzQ==", "license": "MIT" }, + "node_modules/prom-client": { + "version": "15.1.3", + "resolved": "https://registry.npmjs.org/prom-client/-/prom-client-15.1.3.tgz", + "integrity": "sha512-6ZiOBfCywsD4k1BN9IX0uZhF+tJkV8q8llP64G5Hajs4JOeVLPCwpPVcpXy3BwYiUGgyJzsJJQeOIv7+hDSq8g==", + "license": "Apache-2.0", + "dependencies": { + "@opentelemetry/api": "^1.4.0", + "tdigest": "^0.1.1" + }, + "engines": { + "node": "^16 || ^18 || >=20" + } + }, "node_modules/prompts": { "version": "2.4.2", "resolved": "https://registry.npmjs.org/prompts/-/prompts-2.4.2.tgz", @@ -6078,6 +6237,15 @@ "node": ">= 0.10" } }, + "node_modules/proxy-from-env": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-2.1.0.tgz", + "integrity": "sha512-cJ+oHTW1VAEa8cJslgmUZrc+sjRKgAKl3Zyse6+PV38hZe/V6Z14TbCuXcan9F9ghlz4QrFr2c92TNF82UkYHA==", + "license": "MIT", + "engines": { + "node": ">=10" + } + }, "node_modules/punycode": { "version": "2.3.1", "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz", @@ -6811,6 +6979,15 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/tdigest": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/tdigest/-/tdigest-0.1.2.tgz", + "integrity": "sha512-+G0LLgjjo9BZX2MfdvPfH+MKLCrxlXSYec5DaPYP1fe6Iyhf0/fSmJ0bFiZ1F8BT6cGXl2LpltQptzjXKWEkKA==", + "license": "MIT", + "dependencies": { + "bintrees": "1.0.2" + } + }, "node_modules/test-exclude": { "version": "6.0.0", "resolved": "https://registry.npmjs.org/test-exclude/-/test-exclude-6.0.0.tgz", @@ -7018,6 +7195,24 @@ } } }, + "node_modules/tv4": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/tv4/-/tv4-1.3.0.tgz", + "integrity": "sha512-afizzfpJgvPr+eDkREK4MxJ/+r8nEEHcmitwgnPUqpaP+FpwQyadnxNoSACbgc/b1LsZYtODGoPiFxQrgJgjvw==", + "license": [ + { + "type": "Public Domain", + "url": "http://geraintluff.github.io/tv4/LICENSE.txt" + }, + { + "type": "MIT", + "url": "http://jsonary.com/LICENSE.txt" + } + ], + "engines": { + "node": ">= 0.8.0" + } + }, "node_modules/type-check": { "version": "0.4.0", "resolved": "https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz", @@ -7313,6 +7508,15 @@ "dev": true, "license": "ISC" }, + "node_modules/yaml": { + "version": "1.10.3", + "resolved": "https://registry.npmjs.org/yaml/-/yaml-1.10.3.tgz", + "integrity": "sha512-vIYeF1u3CjlhAFekPPAk2h/Kv4T3mAkMox5OymRiJQB0spDP10LHvt+K7G9Ny6NuuMAb25/6n1qyUjAcGNf/AA==", + "license": "ISC", + "engines": { + "node": ">= 6" + } + }, "node_modules/yargs": { "version": "17.7.2", "resolved": "https://registry.npmjs.org/yargs/-/yargs-17.7.2.tgz", diff --git a/package.json b/package.json index 15d4b05..1524b98 100644 --- a/package.json +++ b/package.json @@ -15,6 +15,7 @@ "format": "prettier --write src/**/*.ts" }, "dependencies": { + "@open-policy-agent/opa-wasm": "^1.10.0", "bcryptjs": "^2.4.3", "cors": "^2.8.5", "dotenv": "^16.4.5", @@ -23,9 +24,11 @@ "joi": "^17.12.3", "jsonwebtoken": "^9.0.2", "morgan": "^1.10.0", + "node-vault": "^0.12.0", "pg": "^8.11.3", "pino": "^8.19.0", "pino-http": "^9.0.0", + "prom-client": "^15.1.3", "redis": "^4.6.13", "uuid": "^9.0.1" }, @@ -37,6 +40,7 @@ "@types/jsonwebtoken": "^9.0.6", "@types/morgan": "^1.9.9", "@types/node": "^20.12.7", + "@types/node-vault": "^0.9.1", "@types/pg": "^8.11.5", "@types/supertest": "^6.0.2", "@types/uuid": "^9.0.8", diff --git a/policies/authz.rego b/policies/authz.rego new file mode 100644 index 0000000..b91e97a --- /dev/null +++ b/policies/authz.rego @@ -0,0 +1,86 @@ +package authz + +import rego.v1 + +# ─── Data ───────────────────────────────────────────────────────────────────── +# data.endpoint_permissions is loaded from policies/data/scopes.json +# Structure: { "METHOD:/path/pattern": ["scope1", ...], ... } + +# ─── Default ────────────────────────────────────────────────────────────────── +default allow := false + +default reason := "insufficient_scope" + +# ─── Path pattern normalisation ─────────────────────────────────────────────── +# Converts a concrete request path to a pattern key by replacing UUID-like +# segments with named placeholders. +# +# Supported patterns (longest-match wins via iteration): +# /api/v1/agents/{uuid}/credentials/{uuid}/rotate +# /api/v1/agents/{uuid}/credentials/{uuid} +# /api/v1/agents/{uuid}/credentials +# /api/v1/agents/{uuid} +# /api/v1/agents +# /api/v1/token/introspect +# /api/v1/token/revoke +# /api/v1/audit/{uuid} +# /api/v1/audit + +# Build the lookup key from method + normalised path. +lookup_key(method, path) := key if { + normalised := normalise_path(path) + key := concat(":", [method, normalised]) +} + +# Normalise a concrete path to its pattern form. +normalise_path(path) := "/api/v1/agents/:id/credentials/:credId/rotate" if { + regex.match(`^/api/v1/agents/[^/]+/credentials/[^/]+/rotate$`, path) +} + +normalise_path(path) := "/api/v1/agents/:id/credentials/:credId" if { + regex.match(`^/api/v1/agents/[^/]+/credentials/[^/]+$`, path) +} + +normalise_path(path) := "/api/v1/agents/:id/credentials" if { + regex.match(`^/api/v1/agents/[^/]+/credentials$`, path) +} + +normalise_path(path) := "/api/v1/agents/:id" if { + regex.match(`^/api/v1/agents/[^/]+$`, path) +} + +normalise_path(path) := "/api/v1/agents" if { + path == "/api/v1/agents" +} + +normalise_path(path) := "/api/v1/token/introspect" if { + path == "/api/v1/token/introspect" +} + +normalise_path(path) := "/api/v1/token/revoke" if { + path == "/api/v1/token/revoke" +} + +normalise_path(path) := "/api/v1/audit/:id" if { + regex.match(`^/api/v1/audit/[^/]+$`, path) +} + +normalise_path(path) := "/api/v1/audit" if { + path == "/api/v1/audit" +} + +# ─── Core allow rule ────────────────────────────────────────────────────────── +# allow = true if every required scope for the endpoint is present in input.scopes. + +allow if { + key := lookup_key(input.method, input.path) + required := data.endpoint_permissions[key] + every req_scope in required { + req_scope in input.scopes + } +} + +# reason is populated only on deny. +reason := "missing required scope for this endpoint" if { + not allow +} diff --git a/policies/data/scopes.json b/policies/data/scopes.json new file mode 100644 index 0000000..e13ebce --- /dev/null +++ b/policies/data/scopes.json @@ -0,0 +1,17 @@ +{ + "endpoint_permissions": { + "GET:/api/v1/agents": ["agents:read"], + "GET:/api/v1/agents/:id": ["agents:read"], + "POST:/api/v1/agents": ["agents:write"], + "PATCH:/api/v1/agents/:id": ["agents:write"], + "DELETE:/api/v1/agents/:id": ["agents:write"], + "GET:/api/v1/agents/:id/credentials": ["agents:read"], + "POST:/api/v1/agents/:id/credentials": ["agents:write"], + "POST:/api/v1/agents/:id/credentials/:credId/rotate": ["agents:write"], + "DELETE:/api/v1/agents/:id/credentials/:credId": ["agents:write"], + "POST:/api/v1/token/introspect": ["tokens:read"], + "POST:/api/v1/token/revoke": ["tokens:read"], + "GET:/api/v1/audit": ["audit:read"], + "GET:/api/v1/audit/:id": ["audit:read"] + } +} diff --git a/sdk-go/README.md b/sdk-go/README.md new file mode 100644 index 0000000..fd1917c --- /dev/null +++ b/sdk-go/README.md @@ -0,0 +1,200 @@ +# SentryAgent.ai AgentIdP — Go SDK + +Official Go client for the [SentryAgent.ai AgentIdP](https://sentryagent.ai) — an open-source Identity Provider for AI agents built on OAuth 2.0 (RFC 6749) and aligned with the [AGNTCY](https://agntcy.org) open standard. + +## Requirements + +- Go 1.21+ +- A running AgentIdP server + +## Installation + +```bash +go get github.com/sentryagent/idp-sdk-go +``` + +## Quick Start + +```go +package main + +import ( + "context" + "fmt" + "log" + + agentidp "github.com/sentryagent/idp-sdk-go" +) + +func main() { + ctx := context.Background() + + client := agentidp.NewAgentIdPClient(agentidp.AgentIdPClientConfig{ + BaseURL: "https://idp.example.com", + ClientID: "your-agent-client-id", + ClientSecret: "sk_live_...", + }) + + // Register a new AI agent + agent, err := client.Agents.RegisterAgent(ctx, agentidp.RegisterAgentRequest{ + Email: "screener@example.com", + AgentType: "screener", + Version: "1.0.0", + Capabilities: []string{"read", "classify"}, + Owner: "platform-team", + DeploymentEnv: "production", + }) + if err != nil { + log.Fatal(err) + } + fmt.Printf("Registered agent: %s\n", agent.AgentID) +} +``` + +## Authentication + +The SDK handles OAuth 2.0 Client Credentials automatically. Tokens are cached and refreshed 60 seconds before expiry. All operations are goroutine-safe. + +```go +client := agentidp.NewAgentIdPClient(agentidp.AgentIdPClientConfig{ + BaseURL: "https://idp.example.com", + ClientID: "my-client-id", + ClientSecret: "my-client-secret", + Scope: "agents:read agents:write", // optional, defaults to all four scopes +}) +``` + +## Agent Registry + +```go +ctx := context.Background() + +// Register +agent, err := client.Agents.RegisterAgent(ctx, agentidp.RegisterAgentRequest{...}) + +// List (with optional filters) +agents, err := client.Agents.ListAgents(ctx, &agentidp.ListAgentsParams{ + Status: "active", + AgentType: "screener", + Page: 1, + Limit: 20, +}) + +// Get by ID +agent, err := client.Agents.GetAgent(ctx, "agent-uuid") + +// Partial update +version := "2.0.0" +agent, err := client.Agents.UpdateAgent(ctx, "agent-uuid", agentidp.UpdateAgentRequest{ + Version: &version, +}) + +// Decommission (permanent) +err = client.Agents.DecommissionAgent(ctx, "agent-uuid") +``` + +## Credential Management + +```go +// Generate (returns one-time ClientSecret) +cred, err := client.Credentials.GenerateCredential(ctx, "agent-uuid") +fmt.Println(cred.ClientSecret) // store this — it is never shown again + +// List +creds, err := client.Credentials.ListCredentials(ctx, "agent-uuid", 1, 20) + +// Rotate (old secret is immediately invalidated) +newCred, err := client.Credentials.RotateCredential(ctx, "agent-uuid", "cred-uuid") + +// Revoke +revoked, err := client.Credentials.RevokeCredential(ctx, "agent-uuid", "cred-uuid") +``` + +## Token Operations + +```go +// Introspect (RFC 7662) +result, err := client.Tokens.IntrospectToken(ctx, "access-token-to-check") +if result.Active { + fmt.Printf("Token belongs to: %s\n", *result.Sub) +} + +// Revoke +err = client.Tokens.RevokeToken(ctx, "access-token-to-revoke") +``` + +## Audit Log + +```go +// Query with filters +events, err := client.Audit.QueryAuditLog(ctx, &agentidp.QueryAuditParams{ + AgentID: "agent-uuid", + Action: "token.issued", + Outcome: "success", + FromDate: "2026-01-01", + ToDate: "2026-01-31", + Page: 1, + Limit: 50, +}) + +// Get single event +event, err := client.Audit.GetAuditEvent(ctx, "event-uuid") +``` + +## Error Handling + +All errors are returned as `*AgentIdPError`: + +```go +agent, err := client.Agents.GetAgent(ctx, "unknown-id") +if err != nil { + if apiErr, ok := err.(*agentidp.AgentIdPError); ok { + fmt.Printf("code=%s status=%d\n", apiErr.Code, apiErr.HTTPStatus) + // e.g. code=AgentNotFoundError status=404 + } + return err +} +``` + +| Field | Type | Description | +|--------------|--------------------------|-------------------------------------------------| +| `Code` | `string` | Machine-readable error code | +| `Message` | `string` | Human-readable description | +| `HTTPStatus` | `int` | HTTP status code (0 for network/build errors) | +| `Details` | `map[string]interface{}` | Optional structured context from the API | + +## Custom HTTP Client + +Inject a custom `*http.Client` for proxy support, custom timeouts, or test mocking: + +```go +client := agentidp.NewAgentIdPClient(agentidp.AgentIdPClientConfig{ + BaseURL: "https://idp.example.com", + ClientID: "cid", + ClientSecret: "secret", + HTTPClient: &http.Client{Timeout: 5 * time.Second}, +}) +``` + +## API Coverage + +| Endpoint | Method | SDK Method | +|--------------------------------------------------|--------|-----------------------------------------| +| POST /api/v1/agents | POST | `Agents.RegisterAgent` | +| GET /api/v1/agents | GET | `Agents.ListAgents` | +| GET /api/v1/agents/:id | GET | `Agents.GetAgent` | +| PATCH /api/v1/agents/:id | PATCH | `Agents.UpdateAgent` | +| DELETE /api/v1/agents/:id | DELETE | `Agents.DecommissionAgent` | +| POST /api/v1/agents/:id/credentials | POST | `Credentials.GenerateCredential` | +| GET /api/v1/agents/:id/credentials | GET | `Credentials.ListCredentials` | +| POST /api/v1/agents/:id/credentials/:cid/rotate | POST | `Credentials.RotateCredential` | +| DELETE /api/v1/agents/:id/credentials/:cid | DELETE | `Credentials.RevokeCredential` | +| POST /api/v1/token | POST | (TokenManager — automatic) | +| POST /api/v1/token/introspect | POST | `Tokens.IntrospectToken` | +| POST /api/v1/token/revoke | POST | `Tokens.RevokeToken` | +| GET /api/v1/audit | GET | `Audit.QueryAuditLog` | +| GET /api/v1/audit/:id | GET | `Audit.GetAuditEvent` | + +## License + +Apache 2.0 — see [LICENSE](../LICENSE). diff --git a/sdk-go/agents.go b/sdk-go/agents.go new file mode 100644 index 0000000..3b4eeeb --- /dev/null +++ b/sdk-go/agents.go @@ -0,0 +1,113 @@ +package agentidp + +import ( + "context" + "fmt" + "net/http" + "net/url" + "strings" +) + +// AgentRegistryClient provides methods for the Agent Registry API endpoints. +// All methods take a context.Context as first argument. +type AgentRegistryClient struct { + baseURL string + getToken func(ctx context.Context) (string, error) + httpClient *http.Client +} + +func newAgentRegistryClient(baseURL string, getToken func(ctx context.Context) (string, error), httpClient *http.Client) *AgentRegistryClient { + return &AgentRegistryClient{ + baseURL: strings.TrimRight(baseURL, "/"), + getToken: getToken, + httpClient: httpClient, + } +} + +// RegisterAgent registers a new AI agent identity. +// POST /api/v1/agents → 201 Agent +func (c *AgentRegistryClient) RegisterAgent(ctx context.Context, req RegisterAgentRequest) (*Agent, error) { + token, err := c.getToken(ctx) + if err != nil { + return nil, err + } + var agent Agent + if err := doRequest(ctx, c.httpClient, http.MethodPost, c.baseURL+"/api/v1/agents", req, token, &agent); err != nil { + return nil, err + } + return &agent, nil +} + +// ListAgents returns a paginated list of registered agents. +// GET /api/v1/agents → 200 PaginatedAgents +func (c *AgentRegistryClient) ListAgents(ctx context.Context, params *ListAgentsParams) (*PaginatedAgents, error) { + token, err := c.getToken(ctx) + if err != nil { + return nil, err + } + rawURL := c.baseURL + "/api/v1/agents" + if params != nil { + q := url.Values{} + if params.Status != "" { + q.Set("status", params.Status) + } + if params.AgentType != "" { + q.Set("agentType", params.AgentType) + } + if params.DeploymentEnv != "" { + q.Set("deploymentEnv", params.DeploymentEnv) + } + if params.Page > 0 { + q.Set("page", fmt.Sprintf("%d", params.Page)) + } + if params.Limit > 0 { + q.Set("limit", fmt.Sprintf("%d", params.Limit)) + } + if len(q) > 0 { + rawURL += "?" + q.Encode() + } + } + var result PaginatedAgents + if err := doRequest(ctx, c.httpClient, http.MethodGet, rawURL, nil, token, &result); err != nil { + return nil, err + } + return &result, nil +} + +// GetAgent retrieves a single agent by ID. +// GET /api/v1/agents/:id → 200 Agent +func (c *AgentRegistryClient) GetAgent(ctx context.Context, agentID string) (*Agent, error) { + token, err := c.getToken(ctx) + if err != nil { + return nil, err + } + var agent Agent + if err := doRequest(ctx, c.httpClient, http.MethodGet, c.baseURL+"/api/v1/agents/"+agentID, nil, token, &agent); err != nil { + return nil, err + } + return &agent, nil +} + +// UpdateAgent partially updates an agent. +// PATCH /api/v1/agents/:id → 200 Agent +func (c *AgentRegistryClient) UpdateAgent(ctx context.Context, agentID string, req UpdateAgentRequest) (*Agent, error) { + token, err := c.getToken(ctx) + if err != nil { + return nil, err + } + var agent Agent + if err := doRequest(ctx, c.httpClient, http.MethodPatch, c.baseURL+"/api/v1/agents/"+agentID, req, token, &agent); err != nil { + return nil, err + } + return &agent, nil +} + +// DecommissionAgent permanently removes an agent. +// DELETE /api/v1/agents/:id → 204 No Content +func (c *AgentRegistryClient) DecommissionAgent(ctx context.Context, agentID string) error { + token, err := c.getToken(ctx) + if err != nil { + return err + } + return doRequest(ctx, c.httpClient, http.MethodDelete, c.baseURL+"/api/v1/agents/"+agentID, nil, token, nil) +} diff --git a/sdk-go/agents_test.go b/sdk-go/agents_test.go new file mode 100644 index 0000000..851f2b7 --- /dev/null +++ b/sdk-go/agents_test.go @@ -0,0 +1,181 @@ +package agentidp + +import ( + "context" + "encoding/json" + "net/http" + "net/http/httptest" + "testing" +) + +// mockAgent is the canonical test agent fixture. +var mockAgent = Agent{ + AgentID: "uuid-1", + Email: "a@b.ai", + AgentType: "screener", + Version: "1.0.0", + Capabilities: []string{"read"}, + Owner: "team", + DeploymentEnv: "production", + Status: "active", + CreatedAt: "2026-01-01T00:00:00Z", + UpdatedAt: "2026-01-01T00:00:00Z", +} + +var mockPaginatedAgents = PaginatedAgents{ + Data: []Agent{mockAgent}, + Total: 1, + Page: 1, + Limit: 20, +} + +// staticToken returns a fixed token for all test service clients. +func staticToken(_ context.Context) (string, error) { + return "test-bearer-token", nil +} + +func newAgentServer(t *testing.T, method, path string, status int, body interface{}) *httptest.Server { + t.Helper() + return httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + if r.Method != method { + t.Errorf("expected method %s, got %s", method, r.Method) + } + if r.URL.Path != path { + t.Errorf("expected path %s, got %s", path, r.URL.Path) + } + if r.Header.Get("Authorization") == "" { + t.Error("missing Authorization header") + } + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(status) + if body != nil { + _ = json.NewEncoder(w).Encode(body) + } + })) +} + +func TestAgentRegistryClient_RegisterAgent(t *testing.T) { + srv := newAgentServer(t, http.MethodPost, "/api/v1/agents", 201, mockAgent) + defer srv.Close() + + client := newAgentRegistryClient(srv.URL, staticToken, &http.Client{}) + agent, err := client.RegisterAgent(context.Background(), RegisterAgentRequest{ + Email: "a@b.ai", AgentType: "screener", Version: "1.0.0", + Capabilities: []string{"read"}, Owner: "team", DeploymentEnv: "production", + }) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if agent.AgentID != "uuid-1" { + t.Errorf("expected uuid-1, got %q", agent.AgentID) + } +} + +func TestAgentRegistryClient_ListAgents(t *testing.T) { + srv := newAgentServer(t, http.MethodGet, "/api/v1/agents", 200, mockPaginatedAgents) + defer srv.Close() + + client := newAgentRegistryClient(srv.URL, staticToken, &http.Client{}) + result, err := client.ListAgents(context.Background(), nil) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if result.Total != 1 { + t.Errorf("expected total 1, got %d", result.Total) + } + if len(result.Data) != 1 || result.Data[0].AgentID != "uuid-1" { + t.Error("unexpected data in paginated result") + } +} + +func TestAgentRegistryClient_ListAgents_WithParams(t *testing.T) { + srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + if r.URL.Query().Get("status") != "active" { + t.Errorf("expected status=active, got %q", r.URL.Query().Get("status")) + } + w.Header().Set("Content-Type", "application/json") + _ = json.NewEncoder(w).Encode(mockPaginatedAgents) + })) + defer srv.Close() + + client := newAgentRegistryClient(srv.URL, staticToken, &http.Client{}) + _, err := client.ListAgents(context.Background(), &ListAgentsParams{Status: "active"}) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } +} + +func TestAgentRegistryClient_GetAgent(t *testing.T) { + srv := newAgentServer(t, http.MethodGet, "/api/v1/agents/uuid-1", 200, mockAgent) + defer srv.Close() + + client := newAgentRegistryClient(srv.URL, staticToken, &http.Client{}) + agent, err := client.GetAgent(context.Background(), "uuid-1") + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if agent.AgentID != "uuid-1" { + t.Errorf("expected uuid-1, got %q", agent.AgentID) + } +} + +func TestAgentRegistryClient_GetAgent_NotFound(t *testing.T) { + srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(404) + _ = json.NewEncoder(w).Encode(map[string]string{ + "code": "AgentNotFoundError", + "message": "Agent not found.", + }) + })) + defer srv.Close() + + client := newAgentRegistryClient(srv.URL, staticToken, &http.Client{}) + _, err := client.GetAgent(context.Background(), "bad-id") + if err == nil { + t.Fatal("expected error, got nil") + } + apiErr, ok := err.(*AgentIdPError) + if !ok { + t.Fatalf("expected *AgentIdPError, got %T", err) + } + if apiErr.Code != "AgentNotFoundError" { + t.Errorf("expected AgentNotFoundError, got %q", apiErr.Code) + } + if apiErr.HTTPStatus != 404 { + t.Errorf("expected 404, got %d", apiErr.HTTPStatus) + } +} + +func TestAgentRegistryClient_UpdateAgent(t *testing.T) { + updated := mockAgent + updated.Version = "2.0.0" + srv := newAgentServer(t, http.MethodPatch, "/api/v1/agents/uuid-1", 200, updated) + defer srv.Close() + + v := "2.0.0" + client := newAgentRegistryClient(srv.URL, staticToken, &http.Client{}) + agent, err := client.UpdateAgent(context.Background(), "uuid-1", UpdateAgentRequest{Version: &v}) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if agent.Version != "2.0.0" { + t.Errorf("expected version 2.0.0, got %q", agent.Version) + } +} + +func TestAgentRegistryClient_DecommissionAgent(t *testing.T) { + srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + if r.Method != http.MethodDelete { + t.Errorf("expected DELETE, got %s", r.Method) + } + w.WriteHeader(204) + })) + defer srv.Close() + + client := newAgentRegistryClient(srv.URL, staticToken, &http.Client{}) + err := client.DecommissionAgent(context.Background(), "uuid-1") + if err != nil { + t.Fatalf("unexpected error: %v", err) + } +} diff --git a/sdk-go/audit.go b/sdk-go/audit.go new file mode 100644 index 0000000..eb01414 --- /dev/null +++ b/sdk-go/audit.go @@ -0,0 +1,80 @@ +package agentidp + +import ( + "context" + "fmt" + "net/http" + "net/url" + "strings" +) + +// AuditClient provides methods for querying the Audit Log API endpoints. +type AuditClient struct { + baseURL string + getToken func(ctx context.Context) (string, error) + httpClient *http.Client +} + +func newAuditClient(baseURL string, getToken func(ctx context.Context) (string, error), httpClient *http.Client) *AuditClient { + return &AuditClient{ + baseURL: strings.TrimRight(baseURL, "/"), + getToken: getToken, + httpClient: httpClient, + } +} + +// QueryAuditLog returns a filtered, paginated list of audit events. +// GET /api/v1/audit → 200 PaginatedAuditEvents +func (c *AuditClient) QueryAuditLog(ctx context.Context, params *QueryAuditParams) (*PaginatedAuditEvents, error) { + token, err := c.getToken(ctx) + if err != nil { + return nil, err + } + rawURL := c.baseURL + "/api/v1/audit" + if params != nil { + q := url.Values{} + if params.AgentID != "" { + q.Set("agentId", params.AgentID) + } + if params.Action != "" { + q.Set("action", params.Action) + } + if params.Outcome != "" { + q.Set("outcome", params.Outcome) + } + if params.FromDate != "" { + q.Set("fromDate", params.FromDate) + } + if params.ToDate != "" { + q.Set("toDate", params.ToDate) + } + if params.Page > 0 { + q.Set("page", fmt.Sprintf("%d", params.Page)) + } + if params.Limit > 0 { + q.Set("limit", fmt.Sprintf("%d", params.Limit)) + } + if len(q) > 0 { + rawURL += "?" + q.Encode() + } + } + var result PaginatedAuditEvents + if err := doRequest(ctx, c.httpClient, http.MethodGet, rawURL, nil, token, &result); err != nil { + return nil, err + } + return &result, nil +} + +// GetAuditEvent retrieves a single audit event by ID. +// GET /api/v1/audit/:id → 200 AuditEvent +func (c *AuditClient) GetAuditEvent(ctx context.Context, eventID string) (*AuditEvent, error) { + token, err := c.getToken(ctx) + if err != nil { + return nil, err + } + var event AuditEvent + if err := doRequest(ctx, c.httpClient, http.MethodGet, c.baseURL+"/api/v1/audit/"+eventID, nil, token, &event); err != nil { + return nil, err + } + return &event, nil +} diff --git a/sdk-go/audit_test.go b/sdk-go/audit_test.go new file mode 100644 index 0000000..3f4796f --- /dev/null +++ b/sdk-go/audit_test.go @@ -0,0 +1,126 @@ +package agentidp + +import ( + "context" + "encoding/json" + "net/http" + "net/http/httptest" + "testing" +) + +var mockAuditEvent = AuditEvent{ + EventID: "ev-1", + AgentID: "uuid-1", + Action: "token.issued", + Outcome: "success", + IPAddress: "1.2.3.4", + UserAgent: "curl", + Metadata: map[string]interface{}{}, + Timestamp: "2026-01-01T00:00:00Z", +} + +var mockPaginatedAudit = PaginatedAuditEvents{ + Data: []AuditEvent{mockAuditEvent}, + Total: 1, + Page: 1, + Limit: 20, +} + +func TestAuditClient_QueryAuditLog(t *testing.T) { + srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + if r.Method != http.MethodGet || r.URL.Path != "/api/v1/audit" { + t.Errorf("unexpected: %s %s", r.Method, r.URL.Path) + } + w.Header().Set("Content-Type", "application/json") + _ = json.NewEncoder(w).Encode(mockPaginatedAudit) + })) + defer srv.Close() + + client := newAuditClient(srv.URL, staticToken, &http.Client{}) + result, err := client.QueryAuditLog(context.Background(), nil) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if result.Total != 1 { + t.Errorf("expected total 1, got %d", result.Total) + } + if len(result.Data) == 0 || result.Data[0].EventID != "ev-1" { + t.Error("unexpected data in paginated result") + } +} + +func TestAuditClient_QueryAuditLog_WithParams(t *testing.T) { + srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + q := r.URL.Query() + if q.Get("agentId") != "uuid-1" { + t.Errorf("expected agentId=uuid-1, got %q", q.Get("agentId")) + } + if q.Get("action") != "token.issued" { + t.Errorf("expected action=token.issued, got %q", q.Get("action")) + } + if q.Get("fromDate") != "2026-01-01" { + t.Errorf("expected fromDate=2026-01-01, got %q", q.Get("fromDate")) + } + w.Header().Set("Content-Type", "application/json") + _ = json.NewEncoder(w).Encode(mockPaginatedAudit) + })) + defer srv.Close() + + client := newAuditClient(srv.URL, staticToken, &http.Client{}) + _, err := client.QueryAuditLog(context.Background(), &QueryAuditParams{ + AgentID: "uuid-1", + Action: "token.issued", + FromDate: "2026-01-01", + }) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } +} + +func TestAuditClient_GetAuditEvent(t *testing.T) { + srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + if r.Method != http.MethodGet || r.URL.Path != "/api/v1/audit/ev-1" { + t.Errorf("unexpected: %s %s", r.Method, r.URL.Path) + } + w.Header().Set("Content-Type", "application/json") + _ = json.NewEncoder(w).Encode(mockAuditEvent) + })) + defer srv.Close() + + client := newAuditClient(srv.URL, staticToken, &http.Client{}) + event, err := client.GetAuditEvent(context.Background(), "ev-1") + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if event.EventID != "ev-1" { + t.Errorf("expected ev-1, got %q", event.EventID) + } + if event.Action != "token.issued" { + t.Errorf("expected token.issued, got %q", event.Action) + } +} + +func TestAuditClient_Error_Propagated(t *testing.T) { + srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(404) + _ = json.NewEncoder(w).Encode(map[string]string{ + "code": "AuditEventNotFoundError", + "message": "Event not found.", + }) + })) + defer srv.Close() + + client := newAuditClient(srv.URL, staticToken, &http.Client{}) + _, err := client.GetAuditEvent(context.Background(), "bad-id") + if err == nil { + t.Fatal("expected error, got nil") + } + apiErr, ok := err.(*AgentIdPError) + if !ok { + t.Fatalf("expected *AgentIdPError, got %T", err) + } + if apiErr.Code != "AuditEventNotFoundError" { + t.Errorf("expected AuditEventNotFoundError, got %q", apiErr.Code) + } +} diff --git a/sdk-go/client.go b/sdk-go/client.go new file mode 100644 index 0000000..c7930db --- /dev/null +++ b/sdk-go/client.go @@ -0,0 +1,83 @@ +package agentidp + +import ( + "context" + "net/http" + "strings" + "time" +) + +// AgentIdPClientConfig holds all configuration for AgentIdPClient. +type AgentIdPClientConfig struct { + // BaseURL is the root URL of the AgentIdP server (e.g. "https://idp.example.com"). + BaseURL string + // ClientID is the agent's OAuth 2.0 client ID. + ClientID string + // ClientSecret is the agent's OAuth 2.0 client secret. + ClientSecret string + // Scope is the space-separated list of OAuth 2.0 scopes to request. + // Defaults to all four scopes when empty. + Scope string + // HTTPClient allows injecting a custom *http.Client (e.g. for testing). + // When nil, a default client with a 30-second timeout is used. + HTTPClient *http.Client +} + +// AgentIdPClient is the top-level client for the SentryAgent.ai AgentIdP API. +// It composes all four service clients and manages token acquisition automatically. +// +// Usage: +// +// client := agentidp.NewAgentIdPClient(agentidp.AgentIdPClientConfig{ +// BaseURL: "https://idp.example.com", +// ClientID: "my-agent-id", +// ClientSecret: "sk_live_...", +// }) +// agent, err := client.Agents.GetAgent(ctx, "uuid-1") +type AgentIdPClient struct { + // Agents provides access to the Agent Registry endpoints. + Agents *AgentRegistryClient + // Credentials provides access to the Credential Management endpoints. + Credentials *CredentialClient + // Tokens provides access to the Token introspection and revocation endpoints. + Tokens *TokenServiceClient + // Audit provides access to the Audit Log endpoints. + Audit *AuditClient + + tokenManager *TokenManager +} + +// NewAgentIdPClient creates a new AgentIdPClient with the given configuration. +func NewAgentIdPClient(cfg AgentIdPClientConfig) *AgentIdPClient { + baseURL := strings.TrimRight(cfg.BaseURL, "/") + + scope := cfg.Scope + if scope == "" { + scope = "agents:read agents:write tokens:read audit:read" + } + + httpClient := cfg.HTTPClient + if httpClient == nil { + httpClient = &http.Client{Timeout: 30 * time.Second} + } + + tm := NewTokenManager(baseURL, cfg.ClientID, cfg.ClientSecret, scope) + + getToken := func(ctx context.Context) (string, error) { + return tm.GetToken(ctx) + } + + return &AgentIdPClient{ + Agents: newAgentRegistryClient(baseURL, getToken, httpClient), + Credentials: newCredentialClient(baseURL, getToken, httpClient), + Tokens: newTokenServiceClient(baseURL, getToken, httpClient), + Audit: newAuditClient(baseURL, getToken, httpClient), + tokenManager: tm, + } +} + +// ClearTokenCache invalidates the cached access token. +// The next API call will fetch a fresh token from the server. +func (c *AgentIdPClient) ClearTokenCache() { + c.tokenManager.ClearCache() +} diff --git a/sdk-go/client_test.go b/sdk-go/client_test.go new file mode 100644 index 0000000..0ff36b6 --- /dev/null +++ b/sdk-go/client_test.go @@ -0,0 +1,124 @@ +package agentidp + +import ( + "context" + "encoding/json" + "net/http" + "net/http/httptest" + "strings" + "testing" +) + +// integrationServer returns a minimal mock server that handles the token endpoint +// plus a provided handler for all other routes. +func integrationServer(t *testing.T, handler http.HandlerFunc) *httptest.Server { + t.Helper() + mux := http.NewServeMux() + mux.HandleFunc("/api/v1/token", func(w http.ResponseWriter, r *http.Request) { + w.Header().Set("Content-Type", "application/json") + _ = json.NewEncoder(w).Encode(map[string]interface{}{ + "access_token": "integration-token", + "token_type": "Bearer", + "expires_in": 3600, + "scope": "agents:read agents:write tokens:read audit:read", + }) + }) + mux.HandleFunc("/", handler) + return httptest.NewServer(mux) +} + +func TestNewAgentIdPClient_GetAgent(t *testing.T) { + srv := integrationServer(t, func(w http.ResponseWriter, r *http.Request) { + if !strings.HasPrefix(r.URL.Path, "/api/v1/agents/") { + t.Errorf("unexpected path: %s", r.URL.Path) + } + if r.Header.Get("Authorization") != "Bearer integration-token" { + t.Errorf("unexpected Authorization: %q", r.Header.Get("Authorization")) + } + w.Header().Set("Content-Type", "application/json") + _ = json.NewEncoder(w).Encode(mockAgent) + }) + defer srv.Close() + + client := NewAgentIdPClient(AgentIdPClientConfig{ + BaseURL: srv.URL, + ClientID: "cid", + ClientSecret: "secret", + }) + + agent, err := client.Agents.GetAgent(context.Background(), "uuid-1") + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if agent.AgentID != "uuid-1" { + t.Errorf("expected uuid-1, got %q", agent.AgentID) + } +} + +func TestNewAgentIdPClient_ClearTokenCache(t *testing.T) { + callCount := 0 + srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + if r.URL.Path == "/api/v1/token" { + callCount++ + w.Header().Set("Content-Type", "application/json") + _ = json.NewEncoder(w).Encode(map[string]interface{}{ + "access_token": "tok", + "token_type": "Bearer", + "expires_in": 3600, + "scope": "agents:read", + }) + return + } + w.Header().Set("Content-Type", "application/json") + _ = json.NewEncoder(w).Encode(mockAgent) + })) + defer srv.Close() + + client := NewAgentIdPClient(AgentIdPClientConfig{ + BaseURL: srv.URL, + ClientID: "cid", + ClientSecret: "secret", + }) + + _, _ = client.Agents.GetAgent(context.Background(), "uuid-1") + client.ClearTokenCache() + _, _ = client.Agents.GetAgent(context.Background(), "uuid-1") + + if callCount != 2 { + t.Errorf("expected 2 token fetches after ClearTokenCache, got %d", callCount) + } +} + +func TestNewAgentIdPClient_DefaultScope(t *testing.T) { + var capturedScope string + srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + if r.URL.Path == "/api/v1/token" { + _ = r.ParseForm() + capturedScope = r.FormValue("scope") + w.Header().Set("Content-Type", "application/json") + _ = json.NewEncoder(w).Encode(map[string]interface{}{ + "access_token": "tok", + "token_type": "Bearer", + "expires_in": 3600, + "scope": capturedScope, + }) + return + } + w.Header().Set("Content-Type", "application/json") + _ = json.NewEncoder(w).Encode(mockAgent) + })) + defer srv.Close() + + client := NewAgentIdPClient(AgentIdPClientConfig{ + BaseURL: srv.URL, + ClientID: "cid", + ClientSecret: "secret", + // Scope intentionally omitted → defaults applied + }) + _, _ = client.Agents.GetAgent(context.Background(), "uuid-1") + + expected := "agents:read agents:write tokens:read audit:read" + if capturedScope != expected { + t.Errorf("expected scope %q, got %q", expected, capturedScope) + } +} diff --git a/sdk-go/credentials.go b/sdk-go/credentials.go new file mode 100644 index 0000000..f0cc5f5 --- /dev/null +++ b/sdk-go/credentials.go @@ -0,0 +1,93 @@ +package agentidp + +import ( + "context" + "fmt" + "net/http" + "net/url" + "strings" +) + +// CredentialClient provides methods for the Credential Management API endpoints. +type CredentialClient struct { + baseURL string + getToken func(ctx context.Context) (string, error) + httpClient *http.Client +} + +func newCredentialClient(baseURL string, getToken func(ctx context.Context) (string, error), httpClient *http.Client) *CredentialClient { + return &CredentialClient{ + baseURL: strings.TrimRight(baseURL, "/"), + getToken: getToken, + httpClient: httpClient, + } +} + +// GenerateCredential creates a new credential for the given agent. +// POST /api/v1/agents/:id/credentials → 201 CredentialWithSecret +func (c *CredentialClient) GenerateCredential(ctx context.Context, agentID string) (*CredentialWithSecret, error) { + token, err := c.getToken(ctx) + if err != nil { + return nil, err + } + var cred CredentialWithSecret + if err := doRequest(ctx, c.httpClient, http.MethodPost, c.baseURL+"/api/v1/agents/"+agentID+"/credentials", struct{}{}, token, &cred); err != nil { + return nil, err + } + return &cred, nil +} + +// ListCredentials returns a paginated list of credentials for the given agent. +// GET /api/v1/agents/:id/credentials → 200 PaginatedCredentials +func (c *CredentialClient) ListCredentials(ctx context.Context, agentID string, page, limit int) (*PaginatedCredentials, error) { + token, err := c.getToken(ctx) + if err != nil { + return nil, err + } + rawURL := c.baseURL + "/api/v1/agents/" + agentID + "/credentials" + q := url.Values{} + if page > 0 { + q.Set("page", fmt.Sprintf("%d", page)) + } + if limit > 0 { + q.Set("limit", fmt.Sprintf("%d", limit)) + } + if len(q) > 0 { + rawURL += "?" + q.Encode() + } + var result PaginatedCredentials + if err := doRequest(ctx, c.httpClient, http.MethodGet, rawURL, nil, token, &result); err != nil { + return nil, err + } + return &result, nil +} + +// RotateCredential generates a new secret for the given credential. +// POST /api/v1/agents/:id/credentials/:credId/rotate → 200 CredentialWithSecret +func (c *CredentialClient) RotateCredential(ctx context.Context, agentID, credentialID string) (*CredentialWithSecret, error) { + token, err := c.getToken(ctx) + if err != nil { + return nil, err + } + rawURL := c.baseURL + "/api/v1/agents/" + agentID + "/credentials/" + credentialID + "/rotate" + var cred CredentialWithSecret + if err := doRequest(ctx, c.httpClient, http.MethodPost, rawURL, struct{}{}, token, &cred); err != nil { + return nil, err + } + return &cred, nil +} + +// RevokeCredential permanently revokes a credential. +// DELETE /api/v1/agents/:id/credentials/:credId → 200 Credential +func (c *CredentialClient) RevokeCredential(ctx context.Context, agentID, credentialID string) (*Credential, error) { + token, err := c.getToken(ctx) + if err != nil { + return nil, err + } + rawURL := c.baseURL + "/api/v1/agents/" + agentID + "/credentials/" + credentialID + var cred Credential + if err := doRequest(ctx, c.httpClient, http.MethodDelete, rawURL, nil, token, &cred); err != nil { + return nil, err + } + return &cred, nil +} diff --git a/sdk-go/credentials_test.go b/sdk-go/credentials_test.go new file mode 100644 index 0000000..dcc2e2a --- /dev/null +++ b/sdk-go/credentials_test.go @@ -0,0 +1,146 @@ +package agentidp + +import ( + "context" + "encoding/json" + "net/http" + "net/http/httptest" + "testing" +) + +var mockCred = Credential{ + CredentialID: "cred-1", + ClientID: "uuid-1", + Status: "active", + CreatedAt: "2026-01-01T00:00:00Z", +} + +var mockCredWithSecret = CredentialWithSecret{ + Credential: mockCred, + ClientSecret: "sk_live_abc", +} + +var mockPaginatedCreds = PaginatedCredentials{ + Data: []Credential{mockCred}, + Total: 1, + Page: 1, + Limit: 20, +} + +func TestCredentialClient_GenerateCredential(t *testing.T) { + srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + if r.Method != http.MethodPost || r.URL.Path != "/api/v1/agents/uuid-1/credentials" { + t.Errorf("unexpected: %s %s", r.Method, r.URL.Path) + } + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(201) + _ = json.NewEncoder(w).Encode(mockCredWithSecret) + })) + defer srv.Close() + + client := newCredentialClient(srv.URL, staticToken, &http.Client{}) + cred, err := client.GenerateCredential(context.Background(), "uuid-1") + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if cred.ClientSecret != "sk_live_abc" { + t.Errorf("expected sk_live_abc, got %q", cred.ClientSecret) + } + if cred.CredentialID != "cred-1" { + t.Errorf("expected cred-1, got %q", cred.CredentialID) + } +} + +func TestCredentialClient_ListCredentials(t *testing.T) { + srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + if r.Method != http.MethodGet || r.URL.Path != "/api/v1/agents/uuid-1/credentials" { + t.Errorf("unexpected: %s %s", r.Method, r.URL.Path) + } + w.Header().Set("Content-Type", "application/json") + _ = json.NewEncoder(w).Encode(mockPaginatedCreds) + })) + defer srv.Close() + + client := newCredentialClient(srv.URL, staticToken, &http.Client{}) + result, err := client.ListCredentials(context.Background(), "uuid-1", 0, 0) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if result.Total != 1 { + t.Errorf("expected total 1, got %d", result.Total) + } +} + +func TestCredentialClient_RotateCredential(t *testing.T) { + srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + expectedPath := "/api/v1/agents/uuid-1/credentials/cred-1/rotate" + if r.Method != http.MethodPost || r.URL.Path != expectedPath { + t.Errorf("unexpected: %s %s", r.Method, r.URL.Path) + } + w.Header().Set("Content-Type", "application/json") + _ = json.NewEncoder(w).Encode(mockCredWithSecret) + })) + defer srv.Close() + + client := newCredentialClient(srv.URL, staticToken, &http.Client{}) + cred, err := client.RotateCredential(context.Background(), "uuid-1", "cred-1") + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if cred.ClientSecret != "sk_live_abc" { + t.Errorf("expected sk_live_abc, got %q", cred.ClientSecret) + } +} + +func TestCredentialClient_RevokeCredential(t *testing.T) { + revokedAt := "2026-01-02T00:00:00Z" + revoked := Credential{ + CredentialID: "cred-1", + ClientID: "uuid-1", + Status: "revoked", + CreatedAt: "2026-01-01T00:00:00Z", + RevokedAt: &revokedAt, + } + srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + if r.Method != http.MethodDelete { + t.Errorf("expected DELETE, got %s", r.Method) + } + w.Header().Set("Content-Type", "application/json") + _ = json.NewEncoder(w).Encode(revoked) + })) + defer srv.Close() + + client := newCredentialClient(srv.URL, staticToken, &http.Client{}) + cred, err := client.RevokeCredential(context.Background(), "uuid-1", "cred-1") + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if cred.Status != "revoked" { + t.Errorf("expected revoked, got %q", cred.Status) + } +} + +func TestCredentialClient_Error_Propagated(t *testing.T) { + srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(404) + _ = json.NewEncoder(w).Encode(map[string]string{ + "code": "AgentNotFoundError", + "message": "Not found.", + }) + })) + defer srv.Close() + + client := newCredentialClient(srv.URL, staticToken, &http.Client{}) + _, err := client.GenerateCredential(context.Background(), "bad-id") + if err == nil { + t.Fatal("expected error, got nil") + } + apiErr, ok := err.(*AgentIdPError) + if !ok { + t.Fatalf("expected *AgentIdPError, got %T", err) + } + if apiErr.HTTPStatus != 404 { + t.Errorf("expected 404, got %d", apiErr.HTTPStatus) + } +} diff --git a/sdk-go/errors.go b/sdk-go/errors.go new file mode 100644 index 0000000..b7e1899 --- /dev/null +++ b/sdk-go/errors.go @@ -0,0 +1,83 @@ +package agentidp + +import ( + "encoding/json" + "fmt" +) + +// AgentIdPError is returned for all API and network failures. +// It implements the error interface. +type AgentIdPError struct { + // Code is a machine-readable error code (e.g. "AgentNotFoundError"). + Code string + // Message is a human-readable description. + Message string + // HTTPStatus is the HTTP response status code, or 0 for network errors. + HTTPStatus int + // Details contains additional structured context, if provided by the API. + Details map[string]interface{} +} + +// Error implements the error interface. +func (e *AgentIdPError) Error() string { + return e.Message +} + +// apiErrorBody is the standard JSON error body from the AgentIdP REST API. +type apiErrorBody struct { + Code string `json:"code"` + Message string `json:"message"` + Details map[string]interface{} `json:"details,omitempty"` +} + +// oauth2ErrorBody is the standard JSON error body from OAuth 2.0 token endpoints. +type oauth2ErrorBody struct { + Error string `json:"error"` + ErrorDescription string `json:"error_description"` +} + +// parseAPIError attempts to unmarshal a JSON response body into an AgentIdPError. +// Falls back to a generic UNKNOWN_ERROR if the body cannot be parsed. +func parseAPIError(body []byte, status int) *AgentIdPError { + var apiErr apiErrorBody + if err := json.Unmarshal(body, &apiErr); err == nil && apiErr.Code != "" { + return &AgentIdPError{ + Code: apiErr.Code, + Message: apiErr.Message, + HTTPStatus: status, + Details: apiErr.Details, + } + } + return &AgentIdPError{ + Code: "UNKNOWN_ERROR", + Message: fmt.Sprintf("unexpected HTTP %d", status), + HTTPStatus: status, + } +} + +// parseOAuth2Error attempts to unmarshal a JSON response body into an AgentIdPError +// using the OAuth 2.0 error format. Falls back to UNKNOWN_ERROR on parse failure. +func parseOAuth2Error(body []byte, status int) *AgentIdPError { + var oauthErr oauth2ErrorBody + if err := json.Unmarshal(body, &oauthErr); err == nil && oauthErr.Error != "" { + return &AgentIdPError{ + Code: oauthErr.Error, + Message: oauthErr.ErrorDescription, + HTTPStatus: status, + } + } + return &AgentIdPError{ + Code: "UNKNOWN_ERROR", + Message: fmt.Sprintf("unexpected HTTP %d", status), + HTTPStatus: status, + } +} + +// newNetworkError creates an AgentIdPError for transport-level failures. +func newNetworkError(cause error) *AgentIdPError { + return &AgentIdPError{ + Code: "NETWORK_ERROR", + Message: fmt.Sprintf("network error: %s", cause.Error()), + HTTPStatus: 0, + } +} diff --git a/sdk-go/errors_test.go b/sdk-go/errors_test.go new file mode 100644 index 0000000..999ca07 --- /dev/null +++ b/sdk-go/errors_test.go @@ -0,0 +1,85 @@ +package agentidp + +import ( + "strings" + "testing" +) + +func TestAgentIdPError_Error(t *testing.T) { + err := &AgentIdPError{Code: "AgentNotFoundError", Message: "Agent not found.", HTTPStatus: 404} + if err.Error() != "Agent not found." { + t.Errorf("expected 'Agent not found.', got %q", err.Error()) + } +} + +func TestParseAPIError_ValidBody(t *testing.T) { + body := []byte(`{"code":"AgentNotFoundError","message":"Not found.","details":{"id":"x"}}`) + err := parseAPIError(body, 404) + if err.Code != "AgentNotFoundError" { + t.Errorf("expected code AgentNotFoundError, got %q", err.Code) + } + if err.HTTPStatus != 404 { + t.Errorf("expected status 404, got %d", err.HTTPStatus) + } + if err.Details == nil { + t.Error("expected non-nil Details") + } +} + +func TestParseAPIError_UnparseableBody(t *testing.T) { + err := parseAPIError([]byte("not json"), 500) + if err.Code != "UNKNOWN_ERROR" { + t.Errorf("expected UNKNOWN_ERROR, got %q", err.Code) + } + if err.HTTPStatus != 500 { + t.Errorf("expected 500, got %d", err.HTTPStatus) + } +} + +func TestParseAPIError_EmptyCode(t *testing.T) { + // Valid JSON but no "code" field → falls back to UNKNOWN_ERROR + err := parseAPIError([]byte(`{"message":"oops"}`), 503) + if err.Code != "UNKNOWN_ERROR" { + t.Errorf("expected UNKNOWN_ERROR, got %q", err.Code) + } +} + +func TestParseOAuth2Error_ValidBody(t *testing.T) { + body := []byte(`{"error":"invalid_client","error_description":"Bad credentials."}`) + err := parseOAuth2Error(body, 401) + if err.Code != "invalid_client" { + t.Errorf("expected invalid_client, got %q", err.Code) + } + if err.Message != "Bad credentials." { + t.Errorf("expected 'Bad credentials.', got %q", err.Message) + } + if err.HTTPStatus != 401 { + t.Errorf("expected 401, got %d", err.HTTPStatus) + } +} + +func TestParseOAuth2Error_UnparseableBody(t *testing.T) { + err := parseOAuth2Error([]byte("garbage"), 400) + if err.Code != "UNKNOWN_ERROR" { + t.Errorf("expected UNKNOWN_ERROR, got %q", err.Code) + } +} + +func TestNewNetworkError(t *testing.T) { + cause := &testError{msg: "connection refused"} + err := newNetworkError(cause) + if err.Code != "NETWORK_ERROR" { + t.Errorf("expected NETWORK_ERROR, got %q", err.Code) + } + if err.HTTPStatus != 0 { + t.Errorf("expected HTTPStatus 0, got %d", err.HTTPStatus) + } + if !strings.Contains(err.Message, "connection refused") { + t.Errorf("expected message to contain 'connection refused', got %q", err.Message) + } +} + +// testError is a simple error implementation for testing. +type testError struct{ msg string } + +func (e *testError) Error() string { return e.msg } diff --git a/sdk-go/go.mod b/sdk-go/go.mod new file mode 100644 index 0000000..ff7a10d --- /dev/null +++ b/sdk-go/go.mod @@ -0,0 +1,3 @@ +module github.com/sentryagent/idp-sdk-go + +go 1.21 diff --git a/sdk-go/request.go b/sdk-go/request.go new file mode 100644 index 0000000..26d7e44 --- /dev/null +++ b/sdk-go/request.go @@ -0,0 +1,79 @@ +package agentidp + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "io" + "net/http" +) + +// doRequest performs an authenticated JSON HTTP request. +// +// - method: HTTP method (GET, POST, PATCH, DELETE) +// - url: full URL (base + path + query) +// - body: request body (marshalled to JSON), or nil for bodyless requests +// - token: Bearer token for Authorization header +// - out: pointer to unmarshal the response body into, or nil to discard +// +// Returns nil on 2xx; returns *AgentIdPError on HTTP errors or network failures. +// 204 No Content responses are considered success; out is not populated. +func doRequest(ctx context.Context, client *http.Client, method, url string, body interface{}, token string, out interface{}) error { + var bodyReader io.Reader + if body != nil { + b, err := json.Marshal(body) + if err != nil { + return &AgentIdPError{ + Code: "SERIALIZATION_ERROR", + Message: fmt.Sprintf("failed to marshal request body: %s", err.Error()), + HTTPStatus: 0, + } + } + bodyReader = bytes.NewReader(b) + } + + req, err := http.NewRequestWithContext(ctx, method, url, bodyReader) + if err != nil { + return &AgentIdPError{ + Code: "REQUEST_BUILD_ERROR", + Message: fmt.Sprintf("failed to build request: %s", err.Error()), + HTTPStatus: 0, + } + } + + if body != nil { + req.Header.Set("Content-Type", "application/json") + } + req.Header.Set("Accept", "application/json") + if token != "" { + req.Header.Set("Authorization", "Bearer "+token) + } + + resp, err := client.Do(req) + if err != nil { + return newNetworkError(err) + } + defer resp.Body.Close() //nolint:errcheck + + respBody, err := io.ReadAll(resp.Body) + if err != nil { + return newNetworkError(err) + } + + if resp.StatusCode < 200 || resp.StatusCode >= 300 { + return parseAPIError(respBody, resp.StatusCode) + } + + if out != nil && resp.StatusCode != http.StatusNoContent { + if err := json.Unmarshal(respBody, out); err != nil { + return &AgentIdPError{ + Code: "PARSE_ERROR", + Message: fmt.Sprintf("failed to parse response: %s", err.Error()), + HTTPStatus: resp.StatusCode, + } + } + } + + return nil +} diff --git a/sdk-go/token_manager.go b/sdk-go/token_manager.go new file mode 100644 index 0000000..3d75b64 --- /dev/null +++ b/sdk-go/token_manager.go @@ -0,0 +1,129 @@ +package agentidp + +import ( + "bytes" + "context" + "encoding/json" + "fmt" + "io" + "net/http" + "net/url" + "strings" + "sync" + "time" +) + +const refreshBufferSeconds = 60 + +// cachedToken holds an access token and its expiry time. +type cachedToken struct { + accessToken string + expiresAt time.Time +} + +// isValid returns true if the token will not expire within the refresh buffer. +func (c *cachedToken) isValid() bool { + return time.Now().Add(refreshBufferSeconds * time.Second).Before(c.expiresAt) +} + +// TokenManager obtains and caches OAuth 2.0 client credentials tokens. +// It is safe for concurrent use by multiple goroutines. +type TokenManager struct { + baseURL string + clientID string + clientSecret string + scope string + httpClient *http.Client + mu sync.Mutex + cached *cachedToken +} + +// NewTokenManager creates a TokenManager that fetches tokens from baseURL +// using the given client credentials and scope. +func NewTokenManager(baseURL, clientID, clientSecret, scope string) *TokenManager { + return &TokenManager{ + baseURL: strings.TrimRight(baseURL, "/"), + clientID: clientID, + clientSecret: clientSecret, + scope: scope, + httpClient: &http.Client{Timeout: 10 * time.Second}, + } +} + +// GetToken returns a valid access token, fetching a new one if the cache is +// empty or the cached token is within the refresh buffer window. +// It is goroutine-safe. +func (tm *TokenManager) GetToken(ctx context.Context) (string, error) { + tm.mu.Lock() + defer tm.mu.Unlock() + + if tm.cached != nil && tm.cached.isValid() { + return tm.cached.accessToken, nil + } + + token, err := tm.fetchToken(ctx) + if err != nil { + return "", err + } + + tm.cached = token + return token.accessToken, nil +} + +// ClearCache invalidates the cached token. The next call to GetToken will +// fetch a fresh token from the server. +func (tm *TokenManager) ClearCache() { + tm.mu.Lock() + defer tm.mu.Unlock() + tm.cached = nil +} + +// fetchToken performs the OAuth 2.0 client credentials grant. +// Must be called with mu held. +func (tm *TokenManager) fetchToken(ctx context.Context) (*cachedToken, error) { + form := url.Values{} + form.Set("grant_type", "client_credentials") + form.Set("client_id", tm.clientID) + form.Set("client_secret", tm.clientSecret) + form.Set("scope", tm.scope) + + tokenURL := tm.baseURL + "/api/v1/token" + req, err := http.NewRequestWithContext(ctx, http.MethodPost, tokenURL, bytes.NewBufferString(form.Encode())) + if err != nil { + return nil, &AgentIdPError{ + Code: "REQUEST_BUILD_ERROR", + Message: fmt.Sprintf("failed to build token request: %s", err.Error()), + HTTPStatus: 0, + } + } + req.Header.Set("Content-Type", "application/x-www-form-urlencoded") + + resp, err := tm.httpClient.Do(req) + if err != nil { + return nil, newNetworkError(err) + } + defer resp.Body.Close() //nolint:errcheck + + respBody, err := io.ReadAll(resp.Body) + if err != nil { + return nil, newNetworkError(err) + } + + if resp.StatusCode != http.StatusOK { + return nil, parseOAuth2Error(respBody, resp.StatusCode) + } + + var tr TokenResponse + if err := json.Unmarshal(respBody, &tr); err != nil { + return nil, &AgentIdPError{ + Code: "PARSE_ERROR", + Message: fmt.Sprintf("failed to parse token response: %s", err.Error()), + HTTPStatus: resp.StatusCode, + } + } + + return &cachedToken{ + accessToken: tr.AccessToken, + expiresAt: time.Now().Add(time.Duration(tr.ExpiresIn) * time.Second), + }, nil +} diff --git a/sdk-go/token_manager_test.go b/sdk-go/token_manager_test.go new file mode 100644 index 0000000..f899404 --- /dev/null +++ b/sdk-go/token_manager_test.go @@ -0,0 +1,169 @@ +package agentidp + +import ( + "context" + "encoding/json" + "net/http" + "net/http/httptest" + "sync" + "testing" + "time" +) + +func newTokenServer(t *testing.T, statusCode int, body interface{}) *httptest.Server { + t.Helper() + return httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + if r.Method != http.MethodPost || r.URL.Path != "/api/v1/token" { + t.Errorf("unexpected request: %s %s", r.Method, r.URL.Path) + } + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(statusCode) + _ = json.NewEncoder(w).Encode(body) + })) +} + +var tokenResp = map[string]interface{}{ + "access_token": "eyJ.abc.def", + "token_type": "Bearer", + "expires_in": 3600, + "scope": "agents:read", +} + +func TestTokenManager_GetToken_Issues(t *testing.T) { + srv := newTokenServer(t, 200, tokenResp) + defer srv.Close() + + tm := NewTokenManager(srv.URL, "client-id", "secret", "agents:read") + tok, err := tm.GetToken(context.Background()) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if tok != "eyJ.abc.def" { + t.Errorf("expected token eyJ.abc.def, got %q", tok) + } +} + +func TestTokenManager_GetToken_Caches(t *testing.T) { + callCount := 0 + srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + callCount++ + w.Header().Set("Content-Type", "application/json") + _ = json.NewEncoder(w).Encode(tokenResp) + })) + defer srv.Close() + + tm := NewTokenManager(srv.URL, "client-id", "secret", "agents:read") + _, _ = tm.GetToken(context.Background()) + _, _ = tm.GetToken(context.Background()) + + if callCount != 1 { + t.Errorf("expected 1 HTTP call (cached), got %d", callCount) + } +} + +func TestTokenManager_GetToken_RefreshesNearExpiry(t *testing.T) { + callCount := 0 + srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + callCount++ + resp := map[string]interface{}{ + "access_token": "eyJ.abc.def", + "token_type": "Bearer", + "expires_in": 3600, + "scope": "agents:read", + } + w.Header().Set("Content-Type", "application/json") + _ = json.NewEncoder(w).Encode(resp) + })) + defer srv.Close() + + tm := NewTokenManager(srv.URL, "client-id", "secret", "agents:read") + _, _ = tm.GetToken(context.Background()) + + // Force the cached token to appear nearly expired + tm.mu.Lock() + tm.cached = &cachedToken{ + accessToken: "old-token", + expiresAt: time.Now().Add(30 * time.Second), // < refreshBufferSeconds + } + tm.mu.Unlock() + + tok, err := tm.GetToken(context.Background()) + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if tok != "eyJ.abc.def" { + t.Errorf("expected refreshed token, got %q", tok) + } + if callCount != 2 { + t.Errorf("expected 2 HTTP calls (initial + refresh), got %d", callCount) + } +} + +func TestTokenManager_GetToken_AuthFailure(t *testing.T) { + srv := newTokenServer(t, 401, map[string]interface{}{ + "error": "invalid_client", + "error_description": "Bad credentials.", + }) + defer srv.Close() + + tm := NewTokenManager(srv.URL, "client-id", "bad-secret", "agents:read") + _, err := tm.GetToken(context.Background()) + if err == nil { + t.Fatal("expected error, got nil") + } + apiErr, ok := err.(*AgentIdPError) + if !ok { + t.Fatalf("expected *AgentIdPError, got %T", err) + } + if apiErr.Code != "invalid_client" { + t.Errorf("expected code invalid_client, got %q", apiErr.Code) + } + if apiErr.HTTPStatus != 401 { + t.Errorf("expected HTTPStatus 401, got %d", apiErr.HTTPStatus) + } +} + +func TestTokenManager_ClearCache(t *testing.T) { + callCount := 0 + srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + callCount++ + w.Header().Set("Content-Type", "application/json") + _ = json.NewEncoder(w).Encode(tokenResp) + })) + defer srv.Close() + + tm := NewTokenManager(srv.URL, "client-id", "secret", "agents:read") + _, _ = tm.GetToken(context.Background()) + tm.ClearCache() + _, _ = tm.GetToken(context.Background()) + + if callCount != 2 { + t.Errorf("expected 2 HTTP calls (cache cleared), got %d", callCount) + } +} + +func TestTokenManager_GoroutineSafe(t *testing.T) { + srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.Header().Set("Content-Type", "application/json") + _ = json.NewEncoder(w).Encode(tokenResp) + })) + defer srv.Close() + + tm := NewTokenManager(srv.URL, "client-id", "secret", "agents:read") + + var wg sync.WaitGroup + for i := 0; i < 20; i++ { + wg.Add(1) + go func() { + defer wg.Done() + tok, err := tm.GetToken(context.Background()) + if err != nil { + t.Errorf("goroutine error: %v", err) + } + if tok != "eyJ.abc.def" { + t.Errorf("unexpected token: %q", tok) + } + }() + } + wg.Wait() +} diff --git a/sdk-go/token_service.go b/sdk-go/token_service.go new file mode 100644 index 0000000..f9ec5c3 --- /dev/null +++ b/sdk-go/token_service.go @@ -0,0 +1,103 @@ +package agentidp + +import ( + "bytes" + "context" + "encoding/json" + "io" + "net/http" + "net/url" + "strings" +) + +// TokenServiceClient provides token introspection and revocation. +// Token acquisition is handled separately by TokenManager. +type TokenServiceClient struct { + baseURL string + getToken func(ctx context.Context) (string, error) + httpClient *http.Client +} + +func newTokenServiceClient(baseURL string, getToken func(ctx context.Context) (string, error), httpClient *http.Client) *TokenServiceClient { + return &TokenServiceClient{ + baseURL: strings.TrimRight(baseURL, "/"), + getToken: getToken, + httpClient: httpClient, + } +} + +// IntrospectToken introspects an access token per RFC 7662. +// POST /api/v1/token/introspect (form-encoded) → 200 IntrospectResponse +func (c *TokenServiceClient) IntrospectToken(ctx context.Context, accessToken string) (*IntrospectResponse, error) { + bearerToken, err := c.getToken(ctx) + if err != nil { + return nil, err + } + + form := url.Values{} + form.Set("token", accessToken) + + req, err := http.NewRequestWithContext(ctx, http.MethodPost, c.baseURL+"/api/v1/token/introspect", bytes.NewBufferString(form.Encode())) + if err != nil { + return nil, &AgentIdPError{Code: "REQUEST_BUILD_ERROR", Message: "failed to build introspect request: " + err.Error()} + } + req.Header.Set("Content-Type", "application/x-www-form-urlencoded") + req.Header.Set("Accept", "application/json") + req.Header.Set("Authorization", "Bearer "+bearerToken) + + resp, err := c.httpClient.Do(req) + if err != nil { + return nil, newNetworkError(err) + } + defer resp.Body.Close() //nolint:errcheck + + respBody, err := io.ReadAll(resp.Body) + if err != nil { + return nil, newNetworkError(err) + } + + if resp.StatusCode < 200 || resp.StatusCode >= 300 { + return nil, parseAPIError(respBody, resp.StatusCode) + } + + var result IntrospectResponse + if err := json.Unmarshal(respBody, &result); err != nil { + return nil, &AgentIdPError{Code: "PARSE_ERROR", Message: "failed to parse introspect response: " + err.Error(), HTTPStatus: resp.StatusCode} + } + return &result, nil +} + +// RevokeToken revokes an access token. +// POST /api/v1/token/revoke (form-encoded) → 200 +func (c *TokenServiceClient) RevokeToken(ctx context.Context, accessToken string) error { + bearerToken, err := c.getToken(ctx) + if err != nil { + return err + } + + form := url.Values{} + form.Set("token", accessToken) + + req, err := http.NewRequestWithContext(ctx, http.MethodPost, c.baseURL+"/api/v1/token/revoke", bytes.NewBufferString(form.Encode())) + if err != nil { + return &AgentIdPError{Code: "REQUEST_BUILD_ERROR", Message: "failed to build revoke request: " + err.Error()} + } + req.Header.Set("Content-Type", "application/x-www-form-urlencoded") + req.Header.Set("Authorization", "Bearer "+bearerToken) + + resp, err := c.httpClient.Do(req) + if err != nil { + return newNetworkError(err) + } + defer resp.Body.Close() //nolint:errcheck + + respBody, err := io.ReadAll(resp.Body) + if err != nil { + return newNetworkError(err) + } + + if resp.StatusCode < 200 || resp.StatusCode >= 300 { + return parseAPIError(respBody, resp.StatusCode) + } + return nil +} diff --git a/sdk-go/token_service_test.go b/sdk-go/token_service_test.go new file mode 100644 index 0000000..c98cc1b --- /dev/null +++ b/sdk-go/token_service_test.go @@ -0,0 +1,108 @@ +package agentidp + +import ( + "context" + "encoding/json" + "net/http" + "net/http/httptest" + "testing" +) + +func TestTokenServiceClient_IntrospectToken_Active(t *testing.T) { + introspectResp := map[string]interface{}{ + "active": true, + "sub": "uuid-1", + "exp": 9999999999, + } + srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + if r.Method != http.MethodPost || r.URL.Path != "/api/v1/token/introspect" { + t.Errorf("unexpected: %s %s", r.Method, r.URL.Path) + } + if ct := r.Header.Get("Content-Type"); ct != "application/x-www-form-urlencoded" { + t.Errorf("expected form content-type, got %q", ct) + } + if err := r.ParseForm(); err != nil { + t.Fatalf("parse form: %v", err) + } + if r.FormValue("token") == "" { + t.Error("missing 'token' form field") + } + w.Header().Set("Content-Type", "application/json") + _ = json.NewEncoder(w).Encode(introspectResp) + })) + defer srv.Close() + + client := newTokenServiceClient(srv.URL, staticToken, &http.Client{}) + result, err := client.IntrospectToken(context.Background(), "some-token") + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if !result.Active { + t.Error("expected active=true") + } + if result.Sub == nil || *result.Sub != "uuid-1" { + t.Errorf("expected sub=uuid-1, got %v", result.Sub) + } +} + +func TestTokenServiceClient_IntrospectToken_Inactive(t *testing.T) { + srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.Header().Set("Content-Type", "application/json") + _ = json.NewEncoder(w).Encode(map[string]interface{}{"active": false}) + })) + defer srv.Close() + + client := newTokenServiceClient(srv.URL, staticToken, &http.Client{}) + result, err := client.IntrospectToken(context.Background(), "expired-token") + if err != nil { + t.Fatalf("unexpected error: %v", err) + } + if result.Active { + t.Error("expected active=false") + } +} + +func TestTokenServiceClient_RevokeToken(t *testing.T) { + srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + if r.Method != http.MethodPost || r.URL.Path != "/api/v1/token/revoke" { + t.Errorf("unexpected: %s %s", r.Method, r.URL.Path) + } + if ct := r.Header.Get("Content-Type"); ct != "application/x-www-form-urlencoded" { + t.Errorf("expected form content-type, got %q", ct) + } + w.WriteHeader(200) + _, _ = w.Write([]byte("{}")) + })) + defer srv.Close() + + client := newTokenServiceClient(srv.URL, staticToken, &http.Client{}) + err := client.RevokeToken(context.Background(), "some-token") + if err != nil { + t.Fatalf("unexpected error: %v", err) + } +} + +func TestTokenServiceClient_IntrospectToken_Error(t *testing.T) { + srv := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(401) + _ = json.NewEncoder(w).Encode(map[string]string{ + "code": "UnauthorizedError", + "message": "Invalid token.", + }) + })) + defer srv.Close() + + client := newTokenServiceClient(srv.URL, staticToken, &http.Client{}) + _, err := client.IntrospectToken(context.Background(), "bad-token") + if err == nil { + t.Fatal("expected error, got nil") + } + apiErr, ok := err.(*AgentIdPError) + if !ok { + t.Fatalf("expected *AgentIdPError, got %T", err) + } + if apiErr.HTTPStatus != 401 { + t.Errorf("expected 401, got %d", apiErr.HTTPStatus) + } +} diff --git a/sdk-go/types.go b/sdk-go/types.go new file mode 100644 index 0000000..d65f50e --- /dev/null +++ b/sdk-go/types.go @@ -0,0 +1,131 @@ +// Package agentidp provides a Go client for the SentryAgent.ai AgentIdP API. +// It covers all 14 endpoints across agent registry, credential management, +// OAuth 2.0 token operations, and audit log queries. +package agentidp + +// Agent is a registered AI agent identity. +type Agent struct { + AgentID string `json:"agentId"` + Email string `json:"email"` + AgentType string `json:"agentType"` + Version string `json:"version"` + Capabilities []string `json:"capabilities"` + Owner string `json:"owner"` + DeploymentEnv string `json:"deploymentEnv"` + Status string `json:"status"` + CreatedAt string `json:"createdAt"` + UpdatedAt string `json:"updatedAt"` +} + +// RegisterAgentRequest is the body for POST /api/v1/agents. +type RegisterAgentRequest struct { + Email string `json:"email"` + AgentType string `json:"agentType"` + Version string `json:"version"` + Capabilities []string `json:"capabilities"` + Owner string `json:"owner"` + DeploymentEnv string `json:"deploymentEnv"` +} + +// UpdateAgentRequest is the body for PATCH /api/v1/agents/:id. +// All fields are optional — only non-nil pointer fields are sent. +type UpdateAgentRequest struct { + AgentType *string `json:"agentType,omitempty"` + Version *string `json:"version,omitempty"` + Capabilities []string `json:"capabilities,omitempty"` + Owner *string `json:"owner,omitempty"` + DeploymentEnv *string `json:"deploymentEnv,omitempty"` + Status *string `json:"status,omitempty"` +} + +// PaginatedAgents is a paginated list of agents. +type PaginatedAgents struct { + Data []Agent `json:"data"` + Total int `json:"total"` + Page int `json:"page"` + Limit int `json:"limit"` +} + +// ListAgentsParams contains optional query parameters for ListAgents. +type ListAgentsParams struct { + Status string + AgentType string + DeploymentEnv string + Page int + Limit int +} + +// Credential is a credential record (ClientSecret is never included). +type Credential struct { + CredentialID string `json:"credentialId"` + ClientID string `json:"clientId"` + Status string `json:"status"` + CreatedAt string `json:"createdAt"` + ExpiresAt *string `json:"expiresAt"` + RevokedAt *string `json:"revokedAt"` +} + +// CredentialWithSecret is a Credential with a one-time plaintext secret. +// Returned only on credential creation and rotation. +type CredentialWithSecret struct { + Credential + ClientSecret string `json:"clientSecret"` +} + +// PaginatedCredentials is a paginated list of credentials. +type PaginatedCredentials struct { + Data []Credential `json:"data"` + Total int `json:"total"` + Page int `json:"page"` + Limit int `json:"limit"` +} + +// TokenResponse is the OAuth 2.0 access token response (RFC 6749). +type TokenResponse struct { + AccessToken string `json:"access_token"` + TokenType string `json:"token_type"` + ExpiresIn int `json:"expires_in"` + Scope string `json:"scope"` +} + +// IntrospectResponse is the token introspection response (RFC 7662). +type IntrospectResponse struct { + Active bool `json:"active"` + Sub *string `json:"sub,omitempty"` + ClientID *string `json:"client_id,omitempty"` + Scope *string `json:"scope,omitempty"` + TokenType *string `json:"token_type,omitempty"` + Iat *int64 `json:"iat,omitempty"` + Exp *int64 `json:"exp,omitempty"` +} + +// AuditEvent is an immutable audit event record. +type AuditEvent struct { + EventID string `json:"eventId"` + AgentID string `json:"agentId"` + Action string `json:"action"` + Outcome string `json:"outcome"` + IPAddress string `json:"ipAddress"` + UserAgent string `json:"userAgent"` + Metadata map[string]interface{} `json:"metadata"` + Timestamp string `json:"timestamp"` +} + +// PaginatedAuditEvents is a paginated list of audit events. +type PaginatedAuditEvents struct { + Data []AuditEvent `json:"data"` + Total int `json:"total"` + Page int `json:"page"` + Limit int `json:"limit"` +} + +// QueryAuditParams contains optional query parameters for QueryAuditLog. +type QueryAuditParams struct { + AgentID string + Action string + Outcome string + FromDate string + ToDate string + Page int + Limit int +} diff --git a/sdk-java/.gitignore b/sdk-java/.gitignore new file mode 100644 index 0000000..2f7896d --- /dev/null +++ b/sdk-java/.gitignore @@ -0,0 +1 @@ +target/ diff --git a/sdk-java/README.md b/sdk-java/README.md new file mode 100644 index 0000000..1650520 --- /dev/null +++ b/sdk-java/README.md @@ -0,0 +1,190 @@ +# SentryAgent.ai AgentIdP — Java SDK + +Official Java client for the [SentryAgent.ai AgentIdP](https://sentryagent.ai) — an open-source Identity Provider for AI agents built on OAuth 2.0 (RFC 6749) and aligned with the [AGNTCY](https://agntcy.org) open standard. + +## Requirements + +- Java 17+ +- A running AgentIdP server + +## Installation + +### Maven + +```xml + + ai.sentryagent + idp-sdk + 1.0.0 + +``` + +## Quick Start + +```java +import ai.sentryagent.idp.AgentIdPClient; +import ai.sentryagent.idp.models.*; + +AgentIdPClient client = new AgentIdPClient( + "https://idp.example.com", + "your-agent-client-id", + "sk_live_..." +); + +// Register a new AI agent +Agent agent = client.agents().registerAgent( + RegisterAgentRequest.builder() + .email("screener@example.com") + .agentType("screener") + .version("1.0.0") + .capabilities(List.of("read", "classify")) + .owner("platform-team") + .deploymentEnv("production") + .build() +); +System.out.println("Registered: " + agent.getAgentId()); +``` + +## Authentication + +OAuth 2.0 Client Credentials are managed automatically. Tokens are cached and refreshed 60 seconds before expiry. The `TokenManager` is thread-safe. + +```java +// Custom scope (optional — defaults to all four scopes) +AgentIdPClient client = new AgentIdPClient( + "https://idp.example.com", + "my-client-id", + "my-client-secret", + "agents:read agents:write" +); +``` + +## Agent Registry + +```java +// Register +Agent agent = client.agents().registerAgent( + RegisterAgentRequest.builder() + .email("...").agentType("screener").version("1.0.0") + .capabilities(List.of("read")).owner("team").deploymentEnv("production") + .build()); + +// List (with optional filters) +PaginatedAgents agents = client.agents().listAgents( + ListAgentsParams.builder().status("active").page(1).limit(20).build()); + +// Get by ID +Agent agent = client.agents().getAgent("agent-uuid"); + +// Partial update +Agent updated = client.agents().updateAgent("agent-uuid", + UpdateAgentRequest.builder().version("2.0.0").build()); + +// Decommission (permanent) +client.agents().decommissionAgent("agent-uuid"); +``` + +## Credential Management + +```java +// Generate (returns one-time ClientSecret) +CredentialWithSecret cred = client.credentials().generateCredential("agent-uuid"); +System.out.println(cred.getClientSecret()); // store this — shown only once + +// List +PaginatedCredentials creds = client.credentials().listCredentials("agent-uuid", 1, 20); + +// Rotate +CredentialWithSecret newCred = client.credentials().rotateCredential("agent-uuid", "cred-uuid"); + +// Revoke +Credential revoked = client.credentials().revokeCredential("agent-uuid", "cred-uuid"); +``` + +## Token Operations + +```java +// Introspect (RFC 7662) +IntrospectResponse result = client.tokens().introspectToken("access-token-to-check"); +if (result.isActive()) { + System.out.println("Token belongs to: " + result.getSub()); +} + +// Revoke +client.tokens().revokeToken("access-token-to-revoke"); +``` + +## Audit Log + +```java +// Query with filters +PaginatedAuditEvents events = client.audit().queryAuditLog( + QueryAuditParams.builder() + .agentId("agent-uuid") + .action("token.issued") + .outcome("success") + .fromDate("2026-01-01") + .toDate("2026-01-31") + .page(1).limit(50) + .build()); + +// Get single event +AuditEvent event = client.audit().getAuditEvent("event-uuid"); +``` + +## Async Methods + +Every sync method has an async counterpart returning `CompletableFuture`: + +```java +CompletableFuture future = client.agents().getAgentAsync("uuid-1"); +future.thenAccept(agent -> System.out.println(agent.getAgentId())); + +// Compose multiple async calls +client.agents().getAgentAsync("uuid-1") + .thenCompose(agent -> client.credentials().generateCredentialAsync(agent.getAgentId())) + .thenAccept(cred -> System.out.println("New secret: " + cred.getClientSecret())); +``` + +## Error Handling + +All errors are thrown as `AgentIdPException` (extends `RuntimeException`): + +```java +try { + Agent agent = client.agents().getAgent("unknown-id"); +} catch (AgentIdPException ex) { + System.out.printf("code=%s status=%d%n", ex.getCode(), ex.getHttpStatus()); + // e.g. code=AgentNotFoundError status=404 +} +``` + +| Method | Type | Description | +|------------------|--------------------------|-------------------------------------------------| +| `getCode()` | `String` | Machine-readable error code | +| `getMessage()` | `String` | Human-readable description | +| `getHttpStatus()`| `int` | HTTP status code (0 for network/build errors) | +| `getDetails()` | `Map` | Optional structured context from the API | + +## API Coverage + +| Endpoint | Method | SDK Method | +|--------------------------------------------------|--------|-----------------------------------------| +| POST /api/v1/agents | POST | `agents().registerAgent()` | +| GET /api/v1/agents | GET | `agents().listAgents()` | +| GET /api/v1/agents/:id | GET | `agents().getAgent()` | +| PATCH /api/v1/agents/:id | PATCH | `agents().updateAgent()` | +| DELETE /api/v1/agents/:id | DELETE | `agents().decommissionAgent()` | +| POST /api/v1/agents/:id/credentials | POST | `credentials().generateCredential()` | +| GET /api/v1/agents/:id/credentials | GET | `credentials().listCredentials()` | +| POST /api/v1/agents/:id/credentials/:cid/rotate | POST | `credentials().rotateCredential()` | +| DELETE /api/v1/agents/:id/credentials/:cid | DELETE | `credentials().revokeCredential()` | +| POST /api/v1/token | POST | (TokenManager — automatic) | +| POST /api/v1/token/introspect | POST | `tokens().introspectToken()` | +| POST /api/v1/token/revoke | POST | `tokens().revokeToken()` | +| GET /api/v1/audit | GET | `audit().queryAuditLog()` | +| GET /api/v1/audit/:id | GET | `audit().getAuditEvent()` | + +## License + +Apache 2.0 — see [LICENSE](../LICENSE). diff --git a/sdk-java/pom.xml b/sdk-java/pom.xml new file mode 100644 index 0000000..3456155 --- /dev/null +++ b/sdk-java/pom.xml @@ -0,0 +1,100 @@ + + + 4.0.0 + + ai.sentryagent + idp-sdk + 1.0.0 + jar + + SentryAgent.ai AgentIdP Java SDK + Java client for the SentryAgent.ai AgentIdP API + + + 17 + 17 + UTF-8 + 2.17.0 + 5.10.2 + 0.8.11 + + + + + + com.fasterxml.jackson.core + jackson-databind + ${jackson.version} + + + + + org.junit.jupiter + junit-jupiter + ${junit.version} + test + + + + + + + org.apache.maven.plugins + maven-compiler-plugin + 3.11.0 + + ${java.version} + + + + + org.apache.maven.plugins + maven-surefire-plugin + 3.2.5 + + false + + + + + + org.jacoco + jacoco-maven-plugin + ${jacoco.version} + + + prepare-agent + prepare-agent + + + report + test + report + + + check + verify + check + + + + BUNDLE + + + INSTRUCTION + COVEREDRATIO + 0.80 + + + + + + + + + + + diff --git a/sdk-java/src/main/java/ai/sentryagent/idp/AgentIdPClient.java b/sdk-java/src/main/java/ai/sentryagent/idp/AgentIdPClient.java new file mode 100644 index 0000000..1ccd69b --- /dev/null +++ b/sdk-java/src/main/java/ai/sentryagent/idp/AgentIdPClient.java @@ -0,0 +1,88 @@ +package ai.sentryagent.idp; + +import ai.sentryagent.idp.internal.HttpHelper; +import ai.sentryagent.idp.services.*; + +import java.net.http.HttpClient; +import java.time.Duration; + +/** + * Top-level client for the SentryAgent.ai AgentIdP API. + * Composes all four service clients and manages token acquisition automatically. + * + *
{@code
+ * AgentIdPClient client = new AgentIdPClient(
+ *     "https://idp.example.com",
+ *     "my-client-id",
+ *     "sk_live_...",
+ *     "agents:read agents:write tokens:read audit:read"
+ * );
+ *
+ * Agent agent = client.agents().getAgent("uuid-1");
+ * }
+ */ +public final class AgentIdPClient { + + private static final String DEFAULT_SCOPE = "agents:read agents:write tokens:read audit:read"; + + private final TokenManager tokenManager; + private final AgentRegistryClient agentsClient; + private final CredentialClient credentialsClient; + private final TokenClient tokensClient; + private final AuditClient auditClient; + + /** + * Creates a new AgentIdPClient with default scope and a shared HttpClient. + * + * @param baseUrl Root URL of the AgentIdP server (e.g. {@code "https://idp.example.com"}) + * @param clientId OAuth 2.0 client ID + * @param clientSecret OAuth 2.0 client secret + */ + public AgentIdPClient(String baseUrl, String clientId, String clientSecret) { + this(baseUrl, clientId, clientSecret, DEFAULT_SCOPE); + } + + /** + * Creates a new AgentIdPClient with a custom scope. + * + * @param baseUrl Root URL of the AgentIdP server + * @param clientId OAuth 2.0 client ID + * @param clientSecret OAuth 2.0 client secret + * @param scope Space-separated OAuth 2.0 scopes to request + */ + public AgentIdPClient(String baseUrl, String clientId, String clientSecret, String scope) { + this(baseUrl, clientId, clientSecret, scope, + HttpClient.newBuilder() + .connectTimeout(Duration.ofSeconds(10)) + .build()); + } + + /** + * Package-visible constructor that accepts a custom HttpClient (for testing). + */ + AgentIdPClient(String baseUrl, String clientId, String clientSecret, String scope, HttpClient httpClient) { + String base = baseUrl.endsWith("/") ? baseUrl.substring(0, baseUrl.length() - 1) : baseUrl; + this.tokenManager = new TokenManager(base, clientId, clientSecret, scope, httpClient); + + HttpHelper httpHelper = new HttpHelper(httpClient); + this.agentsClient = new AgentRegistryClient(base, tokenManager::getToken, httpHelper); + this.credentialsClient = new CredentialClient(base, tokenManager::getToken, httpHelper); + this.tokensClient = new TokenClient(base, tokenManager::getToken, httpClient); + this.auditClient = new AuditClient(base, tokenManager::getToken, httpHelper); + } + + /** Returns the Agent Registry service client. */ + public AgentRegistryClient agents() { return agentsClient; } + + /** Returns the Credential Management service client. */ + public CredentialClient credentials() { return credentialsClient; } + + /** Returns the Token service client (introspect + revoke). */ + public TokenClient tokens() { return tokensClient; } + + /** Returns the Audit Log service client. */ + public AuditClient audit() { return auditClient; } + + /** Invalidates the cached access token. The next API call will fetch a fresh one. */ + public void clearTokenCache() { tokenManager.clearCache(); } +} diff --git a/sdk-java/src/main/java/ai/sentryagent/idp/AgentIdPException.java b/sdk-java/src/main/java/ai/sentryagent/idp/AgentIdPException.java new file mode 100644 index 0000000..ab546b5 --- /dev/null +++ b/sdk-java/src/main/java/ai/sentryagent/idp/AgentIdPException.java @@ -0,0 +1,82 @@ +package ai.sentryagent.idp; + +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import java.util.Map; + +/** + * Thrown for all API and network-level failures. + * Extends RuntimeException — callers may catch if needed but are not required to. + */ +public final class AgentIdPException extends RuntimeException { + + private static final ObjectMapper MAPPER = new ObjectMapper(); + + private final String code; + private final int httpStatus; + private final Map details; + + public AgentIdPException(String code, String message, int httpStatus, Map details, Throwable cause) { + super(message, cause); + this.code = code; + this.httpStatus = httpStatus; + this.details = details; + } + + public AgentIdPException(String code, String message, int httpStatus) { + this(code, message, httpStatus, null, null); + } + + /** Machine-readable error code (e.g. {@code "AgentNotFoundError"}). */ + public String getCode() { return code; } + + /** HTTP response status code, or 0 for network/build errors. */ + public int getHttpStatus() { return httpStatus; } + + /** Optional structured context from the API response. */ + public Map getDetails() { return details; } + + // ─── Factory methods ────────────────────────────────────────────────────── + + /** + * Creates an AgentIdPException from a raw JSON API error response body. + * Falls back to UNKNOWN_ERROR if the body cannot be parsed. + */ + public static AgentIdPException fromApiError(String responseBody, int httpStatus) { + try { + JsonNode node = MAPPER.readTree(responseBody); + String code = node.path("code").asText("UNKNOWN_ERROR"); + String message = node.path("message").asText("Unexpected HTTP " + httpStatus); + if (code.isEmpty()) code = "UNKNOWN_ERROR"; + return new AgentIdPException(code, message, httpStatus); + } catch (Exception e) { + return new AgentIdPException("UNKNOWN_ERROR", "Unexpected HTTP " + httpStatus, httpStatus); + } + } + + /** + * Creates an AgentIdPException from an OAuth 2.0 error response body. + * Falls back to unknown_error if the body cannot be parsed. + */ + public static AgentIdPException fromOAuth2Error(String responseBody, int httpStatus) { + try { + JsonNode node = MAPPER.readTree(responseBody); + String code = node.path("error").asText("unknown_error"); + String message = node.path("error_description").asText("Unexpected HTTP " + httpStatus); + if (code.isEmpty()) code = "unknown_error"; + return new AgentIdPException(code, message, httpStatus); + } catch (Exception e) { + return new AgentIdPException("unknown_error", "Unexpected HTTP " + httpStatus, httpStatus); + } + } + + /** Creates an AgentIdPException wrapping a transport-level failure. */ + public static AgentIdPException networkError(Throwable cause) { + return new AgentIdPException("NETWORK_ERROR", "Network error: " + cause.getMessage(), 0, null, cause); + } + + @Override + public String toString() { + return "AgentIdPException{code='" + code + "', httpStatus=" + httpStatus + ", message='" + getMessage() + "'}"; + } +} diff --git a/sdk-java/src/main/java/ai/sentryagent/idp/TokenManager.java b/sdk-java/src/main/java/ai/sentryagent/idp/TokenManager.java new file mode 100644 index 0000000..adb41bf --- /dev/null +++ b/sdk-java/src/main/java/ai/sentryagent/idp/TokenManager.java @@ -0,0 +1,101 @@ +package ai.sentryagent.idp; + +import ai.sentryagent.idp.models.TokenResponse; +import com.fasterxml.jackson.databind.ObjectMapper; + +import java.io.IOException; +import java.net.URI; +import java.net.URLEncoder; +import java.net.http.HttpClient; +import java.net.http.HttpRequest; +import java.net.http.HttpResponse; +import java.nio.charset.StandardCharsets; +import java.time.Duration; +import java.time.Instant; + +/** + * Obtains and caches OAuth 2.0 client credentials tokens. + * Thread-safe: all cache access is synchronized. + * Tokens are refreshed 60 seconds before they expire. + */ +public final class TokenManager { + + private static final int REFRESH_BUFFER_SECONDS = 60; + private static final ObjectMapper MAPPER = new ObjectMapper(); + + private final String baseUrl; + private final String clientId; + private final String clientSecret; + private final String scope; + private final HttpClient httpClient; + + private String cachedToken; + private Instant tokenExpiresAt; + + public TokenManager(String baseUrl, String clientId, String clientSecret, String scope) { + this(baseUrl, clientId, clientSecret, scope, + HttpClient.newBuilder().connectTimeout(Duration.ofSeconds(10)).build()); + } + + /** Package-visible constructor for injecting a custom HttpClient in tests. */ + TokenManager(String baseUrl, String clientId, String clientSecret, String scope, HttpClient httpClient) { + this.baseUrl = baseUrl.endsWith("/") ? baseUrl.substring(0, baseUrl.length() - 1) : baseUrl; + this.clientId = clientId; + this.clientSecret = clientSecret; + this.scope = scope; + this.httpClient = httpClient; + } + + /** + * Returns a valid access token, fetching a new one if the cache is empty + * or within the 60-second refresh buffer. + */ + public synchronized String getToken() { + if (cachedToken != null && tokenExpiresAt != null + && Instant.now().plusSeconds(REFRESH_BUFFER_SECONDS).isBefore(tokenExpiresAt)) { + return cachedToken; + } + TokenResponse tr = fetchToken(); + cachedToken = tr.getAccessToken(); + tokenExpiresAt = Instant.now().plusSeconds(tr.getExpiresIn()); + return cachedToken; + } + + /** Invalidates the cached token. The next call to {@link #getToken()} fetches a fresh one. */ + public synchronized void clearCache() { + cachedToken = null; + tokenExpiresAt = null; + } + + private TokenResponse fetchToken() { + String form = "grant_type=client_credentials" + + "&client_id=" + encode(clientId) + + "&client_secret=" + encode(clientSecret) + + "&scope=" + encode(scope); + + HttpRequest req = HttpRequest.newBuilder() + .uri(URI.create(baseUrl + "/api/v1/token")) + .POST(HttpRequest.BodyPublishers.ofString(form)) + .header("Content-Type", "application/x-www-form-urlencoded") + .build(); + + try { + HttpResponse resp = httpClient.send(req, HttpResponse.BodyHandlers.ofString()); + if (resp.statusCode() != 200) { + throw AgentIdPException.fromOAuth2Error(resp.body(), resp.statusCode()); + } + return MAPPER.readValue(resp.body(), TokenResponse.class); + } catch (AgentIdPException e) { + throw e; + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + throw AgentIdPException.networkError(e); + } catch (IOException e) { + throw AgentIdPException.networkError(e); + } + } + + private static String encode(String value) { + return URLEncoder.encode(value, StandardCharsets.UTF_8); + } +} diff --git a/sdk-java/src/main/java/ai/sentryagent/idp/internal/HttpHelper.java b/sdk-java/src/main/java/ai/sentryagent/idp/internal/HttpHelper.java new file mode 100644 index 0000000..348adc0 --- /dev/null +++ b/sdk-java/src/main/java/ai/sentryagent/idp/internal/HttpHelper.java @@ -0,0 +1,97 @@ +package ai.sentryagent.idp.internal; + +import ai.sentryagent.idp.AgentIdPException; +import com.fasterxml.jackson.databind.ObjectMapper; + +import java.io.IOException; +import java.net.URI; +import java.net.http.HttpClient; +import java.net.http.HttpRequest; +import java.net.http.HttpResponse; +import java.util.concurrent.CompletableFuture; + +/** + * Shared HTTP helper for all service clients. + * Handles JSON serialization, Authorization header injection, and error mapping. + */ +public final class HttpHelper { + + private static final ObjectMapper MAPPER = new ObjectMapper(); + + private final HttpClient httpClient; + + public HttpHelper(HttpClient httpClient) { + this.httpClient = httpClient; + } + + /** + * Performs a synchronous JSON request and unmarshals the response into {@code responseType}. + * Returns null for 204 No Content responses. + * + * @throws AgentIdPException on HTTP errors or network failures + */ + public T request(String method, String url, Object body, String token, Class responseType) { + try { + HttpRequest req = buildRequest(method, url, body, token); + HttpResponse resp = httpClient.send(req, HttpResponse.BodyHandlers.ofString()); + return handleResponse(resp, responseType); + } catch (AgentIdPException e) { + throw e; + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + throw AgentIdPException.networkError(e); + } catch (IOException e) { + throw AgentIdPException.networkError(e); + } + } + + /** + * Performs an asynchronous JSON request and returns a CompletableFuture. + * + * @throws AgentIdPException (wrapped in CompletableFuture) on HTTP errors + */ + public CompletableFuture requestAsync(String method, String url, Object body, String token, Class responseType) { + try { + HttpRequest req = buildRequest(method, url, body, token); + return httpClient.sendAsync(req, HttpResponse.BodyHandlers.ofString()) + .thenApply(resp -> handleResponse(resp, responseType)); + } catch (Exception e) { + return CompletableFuture.failedFuture(AgentIdPException.networkError(e)); + } + } + + private HttpRequest buildRequest(String method, String url, Object body, String token) throws IOException { + HttpRequest.BodyPublisher publisher = body != null + ? HttpRequest.BodyPublishers.ofString(MAPPER.writeValueAsString(body)) + : HttpRequest.BodyPublishers.noBody(); + + HttpRequest.Builder builder = HttpRequest.newBuilder() + .uri(URI.create(url)) + .method(method, publisher) + .header("Accept", "application/json"); + + if (body != null) { + builder.header("Content-Type", "application/json"); + } + if (token != null && !token.isEmpty()) { + builder.header("Authorization", "Bearer " + token); + } + + return builder.build(); + } + + private T handleResponse(HttpResponse resp, Class responseType) { + int status = resp.statusCode(); + if (status < 200 || status >= 300) { + throw AgentIdPException.fromApiError(resp.body(), status); + } + if (status == 204 || responseType == Void.class) { + return null; + } + try { + return MAPPER.readValue(resp.body(), responseType); + } catch (IOException e) { + throw new AgentIdPException("PARSE_ERROR", "Failed to parse response: " + e.getMessage(), status); + } + } +} diff --git a/sdk-java/src/main/java/ai/sentryagent/idp/models/Agent.java b/sdk-java/src/main/java/ai/sentryagent/idp/models/Agent.java new file mode 100644 index 0000000..2be86c5 --- /dev/null +++ b/sdk-java/src/main/java/ai/sentryagent/idp/models/Agent.java @@ -0,0 +1,39 @@ +package ai.sentryagent.idp.models; + +import com.fasterxml.jackson.annotation.JsonIgnoreProperties; +import com.fasterxml.jackson.annotation.JsonProperty; + +/** A registered AI agent identity. */ +@JsonIgnoreProperties(ignoreUnknown = true) +public final class Agent { + + @JsonProperty("agentId") private String agentId; + @JsonProperty("email") private String email; + @JsonProperty("agentType") private String agentType; + @JsonProperty("version") private String version; + @JsonProperty("capabilities") private java.util.List capabilities; + @JsonProperty("owner") private String owner; + @JsonProperty("deploymentEnv") private String deploymentEnv; + @JsonProperty("status") private String status; + @JsonProperty("createdAt") private String createdAt; + @JsonProperty("updatedAt") private String updatedAt; + + /** Required by Jackson. */ + public Agent() {} + + public String getAgentId() { return agentId; } + public String getEmail() { return email; } + public String getAgentType() { return agentType; } + public String getVersion() { return version; } + public java.util.List getCapabilities() { return capabilities; } + public String getOwner() { return owner; } + public String getDeploymentEnv() { return deploymentEnv; } + public String getStatus() { return status; } + public String getCreatedAt() { return createdAt; } + public String getUpdatedAt() { return updatedAt; } + + @Override + public String toString() { + return "Agent{agentId='" + agentId + "', email='" + email + "', status='" + status + "'}"; + } +} diff --git a/sdk-java/src/main/java/ai/sentryagent/idp/models/AuditEvent.java b/sdk-java/src/main/java/ai/sentryagent/idp/models/AuditEvent.java new file mode 100644 index 0000000..ce929c1 --- /dev/null +++ b/sdk-java/src/main/java/ai/sentryagent/idp/models/AuditEvent.java @@ -0,0 +1,35 @@ +package ai.sentryagent.idp.models; + +import com.fasterxml.jackson.annotation.JsonIgnoreProperties; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.Map; + +/** An immutable audit event record. */ +@JsonIgnoreProperties(ignoreUnknown = true) +public final class AuditEvent { + + @JsonProperty("eventId") private String eventId; + @JsonProperty("agentId") private String agentId; + @JsonProperty("action") private String action; + @JsonProperty("outcome") private String outcome; + @JsonProperty("ipAddress") private String ipAddress; + @JsonProperty("userAgent") private String userAgent; + @JsonProperty("metadata") private Map metadata; + @JsonProperty("timestamp") private String timestamp; + + public AuditEvent() {} + + public String getEventId() { return eventId; } + public String getAgentId() { return agentId; } + public String getAction() { return action; } + public String getOutcome() { return outcome; } + public String getIpAddress() { return ipAddress; } + public String getUserAgent() { return userAgent; } + public Map getMetadata() { return metadata; } + public String getTimestamp() { return timestamp; } + + @Override + public String toString() { + return "AuditEvent{eventId='" + eventId + "', action='" + action + "', outcome='" + outcome + "'}"; + } +} diff --git a/sdk-java/src/main/java/ai/sentryagent/idp/models/Credential.java b/sdk-java/src/main/java/ai/sentryagent/idp/models/Credential.java new file mode 100644 index 0000000..bac541f --- /dev/null +++ b/sdk-java/src/main/java/ai/sentryagent/idp/models/Credential.java @@ -0,0 +1,30 @@ +package ai.sentryagent.idp.models; + +import com.fasterxml.jackson.annotation.JsonIgnoreProperties; +import com.fasterxml.jackson.annotation.JsonProperty; + +/** A credential record (clientSecret is never included). */ +@JsonIgnoreProperties(ignoreUnknown = true) +public class Credential { + + @JsonProperty("credentialId") protected String credentialId; + @JsonProperty("clientId") protected String clientId; + @JsonProperty("status") protected String status; + @JsonProperty("createdAt") protected String createdAt; + @JsonProperty("expiresAt") protected String expiresAt; + @JsonProperty("revokedAt") protected String revokedAt; + + public Credential() {} + + public String getCredentialId() { return credentialId; } + public String getClientId() { return clientId; } + public String getStatus() { return status; } + public String getCreatedAt() { return createdAt; } + public String getExpiresAt() { return expiresAt; } + public String getRevokedAt() { return revokedAt; } + + @Override + public String toString() { + return "Credential{credentialId='" + credentialId + "', status='" + status + "'}"; + } +} diff --git a/sdk-java/src/main/java/ai/sentryagent/idp/models/CredentialWithSecret.java b/sdk-java/src/main/java/ai/sentryagent/idp/models/CredentialWithSecret.java new file mode 100644 index 0000000..0485ea2 --- /dev/null +++ b/sdk-java/src/main/java/ai/sentryagent/idp/models/CredentialWithSecret.java @@ -0,0 +1,19 @@ +package ai.sentryagent.idp.models; + +import com.fasterxml.jackson.annotation.JsonIgnoreProperties; +import com.fasterxml.jackson.annotation.JsonProperty; + +/** + * Credential with a one-time plaintext clientSecret. + * Returned only on credential creation and rotation. + */ +@JsonIgnoreProperties(ignoreUnknown = true) +public final class CredentialWithSecret extends Credential { + + @JsonProperty("clientSecret") private String clientSecret; + + public CredentialWithSecret() {} + + /** The one-time plaintext secret. Store it securely; it is never shown again. */ + public String getClientSecret() { return clientSecret; } +} diff --git a/sdk-java/src/main/java/ai/sentryagent/idp/models/IntrospectResponse.java b/sdk-java/src/main/java/ai/sentryagent/idp/models/IntrospectResponse.java new file mode 100644 index 0000000..39c4310 --- /dev/null +++ b/sdk-java/src/main/java/ai/sentryagent/idp/models/IntrospectResponse.java @@ -0,0 +1,27 @@ +package ai.sentryagent.idp.models; + +import com.fasterxml.jackson.annotation.JsonIgnoreProperties; +import com.fasterxml.jackson.annotation.JsonProperty; + +/** Token introspection response (RFC 7662). */ +@JsonIgnoreProperties(ignoreUnknown = true) +public final class IntrospectResponse { + + @JsonProperty("active") private boolean active; + @JsonProperty("sub") private String sub; + @JsonProperty("client_id") private String clientId; + @JsonProperty("scope") private String scope; + @JsonProperty("token_type") private String tokenType; + @JsonProperty("iat") private Long iat; + @JsonProperty("exp") private Long exp; + + public IntrospectResponse() {} + + public boolean isActive() { return active; } + public String getSub() { return sub; } + public String getClientId() { return clientId; } + public String getScope() { return scope; } + public String getTokenType() { return tokenType; } + public Long getIat() { return iat; } + public Long getExp() { return exp; } +} diff --git a/sdk-java/src/main/java/ai/sentryagent/idp/models/ListAgentsParams.java b/sdk-java/src/main/java/ai/sentryagent/idp/models/ListAgentsParams.java new file mode 100644 index 0000000..e4615bf --- /dev/null +++ b/sdk-java/src/main/java/ai/sentryagent/idp/models/ListAgentsParams.java @@ -0,0 +1,42 @@ +package ai.sentryagent.idp.models; + +/** Optional query parameters for listing agents. */ +public final class ListAgentsParams { + private final String status; + private final String agentType; + private final String deploymentEnv; + private final Integer page; + private final Integer limit; + + private ListAgentsParams(Builder b) { + this.status = b.status; + this.agentType = b.agentType; + this.deploymentEnv = b.deploymentEnv; + this.page = b.page; + this.limit = b.limit; + } + + public String getStatus() { return status; } + public String getAgentType() { return agentType; } + public String getDeploymentEnv() { return deploymentEnv; } + public Integer getPage() { return page; } + public Integer getLimit() { return limit; } + + public static Builder builder() { return new Builder(); } + + public static final class Builder { + private String status; + private String agentType; + private String deploymentEnv; + private Integer page; + private Integer limit; + + public Builder status(String status) { this.status = status; return this; } + public Builder agentType(String agentType) { this.agentType = agentType; return this; } + public Builder deploymentEnv(String env) { this.deploymentEnv = env; return this; } + public Builder page(int page) { this.page = page; return this; } + public Builder limit(int limit) { this.limit = limit; return this; } + + public ListAgentsParams build() { return new ListAgentsParams(this); } + } +} diff --git a/sdk-java/src/main/java/ai/sentryagent/idp/models/PaginatedAgents.java b/sdk-java/src/main/java/ai/sentryagent/idp/models/PaginatedAgents.java new file mode 100644 index 0000000..c932e22 --- /dev/null +++ b/sdk-java/src/main/java/ai/sentryagent/idp/models/PaginatedAgents.java @@ -0,0 +1,22 @@ +package ai.sentryagent.idp.models; + +import com.fasterxml.jackson.annotation.JsonIgnoreProperties; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.List; + +/** Paginated list of agents. */ +@JsonIgnoreProperties(ignoreUnknown = true) +public final class PaginatedAgents { + + @JsonProperty("data") private List data; + @JsonProperty("total") private int total; + @JsonProperty("page") private int page; + @JsonProperty("limit") private int limit; + + public PaginatedAgents() {} + + public List getData() { return data; } + public int getTotal() { return total; } + public int getPage() { return page; } + public int getLimit() { return limit; } +} diff --git a/sdk-java/src/main/java/ai/sentryagent/idp/models/PaginatedAuditEvents.java b/sdk-java/src/main/java/ai/sentryagent/idp/models/PaginatedAuditEvents.java new file mode 100644 index 0000000..0a7c996 --- /dev/null +++ b/sdk-java/src/main/java/ai/sentryagent/idp/models/PaginatedAuditEvents.java @@ -0,0 +1,22 @@ +package ai.sentryagent.idp.models; + +import com.fasterxml.jackson.annotation.JsonIgnoreProperties; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.List; + +/** Paginated list of audit events. */ +@JsonIgnoreProperties(ignoreUnknown = true) +public final class PaginatedAuditEvents { + + @JsonProperty("data") private List data; + @JsonProperty("total") private int total; + @JsonProperty("page") private int page; + @JsonProperty("limit") private int limit; + + public PaginatedAuditEvents() {} + + public List getData() { return data; } + public int getTotal() { return total; } + public int getPage() { return page; } + public int getLimit() { return limit; } +} diff --git a/sdk-java/src/main/java/ai/sentryagent/idp/models/PaginatedCredentials.java b/sdk-java/src/main/java/ai/sentryagent/idp/models/PaginatedCredentials.java new file mode 100644 index 0000000..c45f170 --- /dev/null +++ b/sdk-java/src/main/java/ai/sentryagent/idp/models/PaginatedCredentials.java @@ -0,0 +1,22 @@ +package ai.sentryagent.idp.models; + +import com.fasterxml.jackson.annotation.JsonIgnoreProperties; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.List; + +/** Paginated list of credentials. */ +@JsonIgnoreProperties(ignoreUnknown = true) +public final class PaginatedCredentials { + + @JsonProperty("data") private List data; + @JsonProperty("total") private int total; + @JsonProperty("page") private int page; + @JsonProperty("limit") private int limit; + + public PaginatedCredentials() {} + + public List getData() { return data; } + public int getTotal() { return total; } + public int getPage() { return page; } + public int getLimit() { return limit; } +} diff --git a/sdk-java/src/main/java/ai/sentryagent/idp/models/QueryAuditParams.java b/sdk-java/src/main/java/ai/sentryagent/idp/models/QueryAuditParams.java new file mode 100644 index 0000000..02cf9a2 --- /dev/null +++ b/sdk-java/src/main/java/ai/sentryagent/idp/models/QueryAuditParams.java @@ -0,0 +1,52 @@ +package ai.sentryagent.idp.models; + +/** Optional query parameters for querying the audit log. */ +public final class QueryAuditParams { + private final String agentId; + private final String action; + private final String outcome; + private final String fromDate; + private final String toDate; + private final Integer page; + private final Integer limit; + + private QueryAuditParams(Builder b) { + this.agentId = b.agentId; + this.action = b.action; + this.outcome = b.outcome; + this.fromDate = b.fromDate; + this.toDate = b.toDate; + this.page = b.page; + this.limit = b.limit; + } + + public String getAgentId() { return agentId; } + public String getAction() { return action; } + public String getOutcome() { return outcome; } + public String getFromDate() { return fromDate; } + public String getToDate() { return toDate; } + public Integer getPage() { return page; } + public Integer getLimit() { return limit; } + + public static Builder builder() { return new Builder(); } + + public static final class Builder { + private String agentId; + private String action; + private String outcome; + private String fromDate; + private String toDate; + private Integer page; + private Integer limit; + + public Builder agentId(String agentId) { this.agentId = agentId; return this; } + public Builder action(String action) { this.action = action; return this; } + public Builder outcome(String outcome) { this.outcome = outcome; return this; } + public Builder fromDate(String from) { this.fromDate = from; return this; } + public Builder toDate(String to) { this.toDate = to; return this; } + public Builder page(int page) { this.page = page; return this; } + public Builder limit(int limit) { this.limit = limit; return this; } + + public QueryAuditParams build() { return new QueryAuditParams(this); } + } +} diff --git a/sdk-java/src/main/java/ai/sentryagent/idp/models/RegisterAgentRequest.java b/sdk-java/src/main/java/ai/sentryagent/idp/models/RegisterAgentRequest.java new file mode 100644 index 0000000..b67a109 --- /dev/null +++ b/sdk-java/src/main/java/ai/sentryagent/idp/models/RegisterAgentRequest.java @@ -0,0 +1,51 @@ +package ai.sentryagent.idp.models; + +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.List; + +/** Request body for POST /api/v1/agents. */ +public final class RegisterAgentRequest { + + @JsonProperty("email") private final String email; + @JsonProperty("agentType") private final String agentType; + @JsonProperty("version") private final String version; + @JsonProperty("capabilities") private final List capabilities; + @JsonProperty("owner") private final String owner; + @JsonProperty("deploymentEnv") private final String deploymentEnv; + + private RegisterAgentRequest(Builder b) { + this.email = b.email; + this.agentType = b.agentType; + this.version = b.version; + this.capabilities = b.capabilities; + this.owner = b.owner; + this.deploymentEnv = b.deploymentEnv; + } + + public String getEmail() { return email; } + public String getAgentType() { return agentType; } + public String getVersion() { return version; } + public List getCapabilities() { return capabilities; } + public String getOwner() { return owner; } + public String getDeploymentEnv() { return deploymentEnv; } + + public static Builder builder() { return new Builder(); } + + public static final class Builder { + private String email; + private String agentType; + private String version; + private List capabilities; + private String owner; + private String deploymentEnv; + + public Builder email(String email) { this.email = email; return this; } + public Builder agentType(String agentType) { this.agentType = agentType; return this; } + public Builder version(String version) { this.version = version; return this; } + public Builder capabilities(List capabilities) { this.capabilities = capabilities; return this; } + public Builder owner(String owner) { this.owner = owner; return this; } + public Builder deploymentEnv(String deploymentEnv) { this.deploymentEnv = deploymentEnv; return this; } + + public RegisterAgentRequest build() { return new RegisterAgentRequest(this); } + } +} diff --git a/sdk-java/src/main/java/ai/sentryagent/idp/models/TokenResponse.java b/sdk-java/src/main/java/ai/sentryagent/idp/models/TokenResponse.java new file mode 100644 index 0000000..aa6f871 --- /dev/null +++ b/sdk-java/src/main/java/ai/sentryagent/idp/models/TokenResponse.java @@ -0,0 +1,21 @@ +package ai.sentryagent.idp.models; + +import com.fasterxml.jackson.annotation.JsonIgnoreProperties; +import com.fasterxml.jackson.annotation.JsonProperty; + +/** OAuth 2.0 access token response (RFC 6749). */ +@JsonIgnoreProperties(ignoreUnknown = true) +public final class TokenResponse { + + @JsonProperty("access_token") private String accessToken; + @JsonProperty("token_type") private String tokenType; + @JsonProperty("expires_in") private int expiresIn; + @JsonProperty("scope") private String scope; + + public TokenResponse() {} + + public String getAccessToken() { return accessToken; } + public String getTokenType() { return tokenType; } + public int getExpiresIn() { return expiresIn; } + public String getScope() { return scope; } +} diff --git a/sdk-java/src/main/java/ai/sentryagent/idp/models/UpdateAgentRequest.java b/sdk-java/src/main/java/ai/sentryagent/idp/models/UpdateAgentRequest.java new file mode 100644 index 0000000..9df409d --- /dev/null +++ b/sdk-java/src/main/java/ai/sentryagent/idp/models/UpdateAgentRequest.java @@ -0,0 +1,56 @@ +package ai.sentryagent.idp.models; + +import com.fasterxml.jackson.annotation.JsonInclude; +import com.fasterxml.jackson.annotation.JsonProperty; +import java.util.List; + +/** + * Request body for PATCH /api/v1/agents/:id. + * All fields are optional — null fields are omitted from the JSON body. + */ +@JsonInclude(JsonInclude.Include.NON_NULL) +public final class UpdateAgentRequest { + + @JsonProperty("agentType") private final String agentType; + @JsonProperty("version") private final String version; + @JsonProperty("capabilities") private final List capabilities; + @JsonProperty("owner") private final String owner; + @JsonProperty("deploymentEnv") private final String deploymentEnv; + @JsonProperty("status") private final String status; + + private UpdateAgentRequest(Builder b) { + this.agentType = b.agentType; + this.version = b.version; + this.capabilities = b.capabilities; + this.owner = b.owner; + this.deploymentEnv = b.deploymentEnv; + this.status = b.status; + } + + public String getAgentType() { return agentType; } + public String getVersion() { return version; } + public List getCapabilities() { return capabilities; } + public String getOwner() { return owner; } + public String getDeploymentEnv() { return deploymentEnv; } + public String getStatus() { return status; } + + public static Builder builder() { return new Builder(); } + + public static final class Builder { + private String agentType; + private String version; + private List capabilities; + private String owner; + private String deploymentEnv; + private String status; + + public Builder agentType(String agentType) { this.agentType = agentType; return this; } + public Builder version(String version) { this.version = version; return this; } + public Builder capabilities(List capabilities) { this.capabilities = capabilities; return this; } + public Builder owner(String owner) { this.owner = owner; return this; } + public Builder deploymentEnv(String deploymentEnv) { this.deploymentEnv = deploymentEnv; return this; } + public Builder status(String status) { this.status = status; return this; } + + public UpdateAgentRequest build() { return new UpdateAgentRequest(this); } + } +} diff --git a/sdk-java/src/main/java/ai/sentryagent/idp/services/AgentRegistryClient.java b/sdk-java/src/main/java/ai/sentryagent/idp/services/AgentRegistryClient.java new file mode 100644 index 0000000..b0f5d9e --- /dev/null +++ b/sdk-java/src/main/java/ai/sentryagent/idp/services/AgentRegistryClient.java @@ -0,0 +1,105 @@ +package ai.sentryagent.idp.services; + +import ai.sentryagent.idp.internal.HttpHelper; +import ai.sentryagent.idp.models.*; + +import java.util.concurrent.CompletableFuture; +import java.util.function.Supplier; + +/** + * Client for the Agent Registry API endpoints. + * Provides both synchronous and asynchronous (CompletableFuture) methods. + */ +public final class AgentRegistryClient { + + private final String baseUrl; + private final Supplier tokenSupplier; + private final HttpHelper http; + + public AgentRegistryClient(String baseUrl, Supplier tokenSupplier, HttpHelper http) { + this.baseUrl = baseUrl.endsWith("/") ? baseUrl.substring(0, baseUrl.length() - 1) : baseUrl; + this.tokenSupplier = tokenSupplier; + this.http = http; + } + + // ─── Sync ───────────────────────────────────────────────────────────────── + + /** POST /api/v1/agents → 201 Agent */ + public Agent registerAgent(RegisterAgentRequest request) { + return http.request("POST", baseUrl + "/api/v1/agents", request, tokenSupplier.get(), Agent.class); + } + + /** GET /api/v1/agents → 200 PaginatedAgents */ + public PaginatedAgents listAgents(ListAgentsParams params) { + return http.request("GET", buildListUrl(params), null, tokenSupplier.get(), PaginatedAgents.class); + } + + /** GET /api/v1/agents/:id → 200 Agent */ + public Agent getAgent(String agentId) { + return http.request("GET", baseUrl + "/api/v1/agents/" + agentId, null, tokenSupplier.get(), Agent.class); + } + + /** PATCH /api/v1/agents/:id → 200 Agent */ + public Agent updateAgent(String agentId, UpdateAgentRequest request) { + return http.request("PATCH", baseUrl + "/api/v1/agents/" + agentId, request, tokenSupplier.get(), Agent.class); + } + + /** DELETE /api/v1/agents/:id → 204 No Content */ + public void decommissionAgent(String agentId) { + http.request("DELETE", baseUrl + "/api/v1/agents/" + agentId, null, tokenSupplier.get(), Void.class); + } + + // ─── Async ──────────────────────────────────────────────────────────────── + + /** Async version of {@link #registerAgent}. */ + public CompletableFuture registerAgentAsync(RegisterAgentRequest request) { + return CompletableFuture.supplyAsync(tokenSupplier) + .thenCompose(token -> http.requestAsync("POST", baseUrl + "/api/v1/agents", request, token, Agent.class)); + } + + /** Async version of {@link #listAgents}. */ + public CompletableFuture listAgentsAsync(ListAgentsParams params) { + return CompletableFuture.supplyAsync(tokenSupplier) + .thenCompose(token -> http.requestAsync("GET", buildListUrl(params), null, token, PaginatedAgents.class)); + } + + /** Async version of {@link #getAgent}. */ + public CompletableFuture getAgentAsync(String agentId) { + return CompletableFuture.supplyAsync(tokenSupplier) + .thenCompose(token -> http.requestAsync("GET", baseUrl + "/api/v1/agents/" + agentId, null, token, Agent.class)); + } + + /** Async version of {@link #updateAgent}. */ + public CompletableFuture updateAgentAsync(String agentId, UpdateAgentRequest request) { + return CompletableFuture.supplyAsync(tokenSupplier) + .thenCompose(token -> http.requestAsync("PATCH", baseUrl + "/api/v1/agents/" + agentId, request, token, Agent.class)); + } + + /** Async version of {@link #decommissionAgent}. */ + public CompletableFuture decommissionAgentAsync(String agentId) { + return CompletableFuture.supplyAsync(tokenSupplier) + .thenCompose(token -> http.requestAsync("DELETE", baseUrl + "/api/v1/agents/" + agentId, null, token, Void.class)); + } + + // ─── URL builder ────────────────────────────────────────────────────────── + + private String buildListUrl(ListAgentsParams params) { + StringBuilder url = new StringBuilder(baseUrl + "/api/v1/agents"); + if (params != null) { + StringBuilder query = new StringBuilder(); + appendParam(query, "status", params.getStatus()); + appendParam(query, "agentType", params.getAgentType()); + appendParam(query, "deploymentEnv", params.getDeploymentEnv()); + if (params.getPage() != null) appendParam(query, "page", params.getPage().toString()); + if (params.getLimit() != null) appendParam(query, "limit", params.getLimit().toString()); + if (query.length() > 0) url.append("?").append(query.substring(1)); // trim leading & + } + return url.toString(); + } + + private static void appendParam(StringBuilder sb, String key, String value) { + if (value != null && !value.isEmpty()) { + sb.append("&").append(key).append("=").append(value); + } + } +} diff --git a/sdk-java/src/main/java/ai/sentryagent/idp/services/AuditClient.java b/sdk-java/src/main/java/ai/sentryagent/idp/services/AuditClient.java new file mode 100644 index 0000000..222676d --- /dev/null +++ b/sdk-java/src/main/java/ai/sentryagent/idp/services/AuditClient.java @@ -0,0 +1,76 @@ +package ai.sentryagent.idp.services; + +import ai.sentryagent.idp.internal.HttpHelper; +import ai.sentryagent.idp.models.AuditEvent; +import ai.sentryagent.idp.models.PaginatedAuditEvents; +import ai.sentryagent.idp.models.QueryAuditParams; + +import java.util.concurrent.CompletableFuture; +import java.util.function.Supplier; + +/** + * Client for the Audit Log API endpoints. + * Provides both synchronous and asynchronous (CompletableFuture) methods. + */ +public final class AuditClient { + + private final String baseUrl; + private final Supplier tokenSupplier; + private final HttpHelper http; + + public AuditClient(String baseUrl, Supplier tokenSupplier, HttpHelper http) { + this.baseUrl = baseUrl.endsWith("/") ? baseUrl.substring(0, baseUrl.length() - 1) : baseUrl; + this.tokenSupplier = tokenSupplier; + this.http = http; + } + + // ─── Sync ───────────────────────────────────────────────────────────────── + + /** GET /api/v1/audit → 200 PaginatedAuditEvents */ + public PaginatedAuditEvents queryAuditLog(QueryAuditParams params) { + return http.request("GET", buildQueryUrl(params), null, tokenSupplier.get(), PaginatedAuditEvents.class); + } + + /** GET /api/v1/audit/:id → 200 AuditEvent */ + public AuditEvent getAuditEvent(String eventId) { + return http.request("GET", baseUrl + "/api/v1/audit/" + eventId, null, tokenSupplier.get(), AuditEvent.class); + } + + // ─── Async ──────────────────────────────────────────────────────────────── + + /** Async version of {@link #queryAuditLog}. */ + public CompletableFuture queryAuditLogAsync(QueryAuditParams params) { + return CompletableFuture.supplyAsync(tokenSupplier) + .thenCompose(token -> http.requestAsync("GET", buildQueryUrl(params), null, token, PaginatedAuditEvents.class)); + } + + /** Async version of {@link #getAuditEvent}. */ + public CompletableFuture getAuditEventAsync(String eventId) { + return CompletableFuture.supplyAsync(tokenSupplier) + .thenCompose(token -> http.requestAsync("GET", baseUrl + "/api/v1/audit/" + eventId, null, token, AuditEvent.class)); + } + + // ─── URL builder ────────────────────────────────────────────────────────── + + private String buildQueryUrl(QueryAuditParams params) { + StringBuilder url = new StringBuilder(baseUrl + "/api/v1/audit"); + StringBuilder query = new StringBuilder(); + if (params != null) { + appendParam(query, "agentId", params.getAgentId()); + appendParam(query, "action", params.getAction()); + appendParam(query, "outcome", params.getOutcome()); + appendParam(query, "fromDate", params.getFromDate()); + appendParam(query, "toDate", params.getToDate()); + if (params.getPage() != null) appendParam(query, "page", params.getPage().toString()); + if (params.getLimit() != null) appendParam(query, "limit", params.getLimit().toString()); + } + if (query.length() > 0) url.append("?").append(query.substring(1)); + return url.toString(); + } + + private static void appendParam(StringBuilder sb, String key, String value) { + if (value != null && !value.isEmpty()) { + sb.append("&").append(key).append("=").append(value); + } + } +} diff --git a/sdk-java/src/main/java/ai/sentryagent/idp/services/CredentialClient.java b/sdk-java/src/main/java/ai/sentryagent/idp/services/CredentialClient.java new file mode 100644 index 0000000..3984c68 --- /dev/null +++ b/sdk-java/src/main/java/ai/sentryagent/idp/services/CredentialClient.java @@ -0,0 +1,94 @@ +package ai.sentryagent.idp.services; + +import ai.sentryagent.idp.internal.HttpHelper; +import ai.sentryagent.idp.models.*; + +import java.util.concurrent.CompletableFuture; +import java.util.function.Supplier; + +/** + * Client for the Credential Management API endpoints. + * Provides both synchronous and asynchronous (CompletableFuture) methods. + */ +public final class CredentialClient { + + private final String baseUrl; + private final Supplier tokenSupplier; + private final HttpHelper http; + + public CredentialClient(String baseUrl, Supplier tokenSupplier, HttpHelper http) { + this.baseUrl = baseUrl.endsWith("/") ? baseUrl.substring(0, baseUrl.length() - 1) : baseUrl; + this.tokenSupplier = tokenSupplier; + this.http = http; + } + + // ─── Sync ───────────────────────────────────────────────────────────────── + + /** POST /api/v1/agents/:id/credentials → 201 CredentialWithSecret */ + public CredentialWithSecret generateCredential(String agentId) { + return http.request("POST", baseUrl + "/api/v1/agents/" + agentId + "/credentials", + null, tokenSupplier.get(), CredentialWithSecret.class); + } + + /** GET /api/v1/agents/:id/credentials → 200 PaginatedCredentials */ + public PaginatedCredentials listCredentials(String agentId, Integer page, Integer limit) { + return http.request("GET", buildListUrl(agentId, page, limit), + null, tokenSupplier.get(), PaginatedCredentials.class); + } + + /** POST /api/v1/agents/:id/credentials/:credId/rotate → 200 CredentialWithSecret */ + public CredentialWithSecret rotateCredential(String agentId, String credentialId) { + return http.request("POST", + baseUrl + "/api/v1/agents/" + agentId + "/credentials/" + credentialId + "/rotate", + null, tokenSupplier.get(), CredentialWithSecret.class); + } + + /** DELETE /api/v1/agents/:id/credentials/:credId → 200 Credential */ + public Credential revokeCredential(String agentId, String credentialId) { + return http.request("DELETE", + baseUrl + "/api/v1/agents/" + agentId + "/credentials/" + credentialId, + null, tokenSupplier.get(), Credential.class); + } + + // ─── Async ──────────────────────────────────────────────────────────────── + + /** Async version of {@link #generateCredential}. */ + public CompletableFuture generateCredentialAsync(String agentId) { + return CompletableFuture.supplyAsync(tokenSupplier) + .thenCompose(token -> http.requestAsync("POST", + baseUrl + "/api/v1/agents/" + agentId + "/credentials", + null, token, CredentialWithSecret.class)); + } + + /** Async version of {@link #listCredentials}. */ + public CompletableFuture listCredentialsAsync(String agentId, Integer page, Integer limit) { + return CompletableFuture.supplyAsync(tokenSupplier) + .thenCompose(token -> http.requestAsync("GET", buildListUrl(agentId, page, limit), + null, token, PaginatedCredentials.class)); + } + + /** Async version of {@link #rotateCredential}. */ + public CompletableFuture rotateCredentialAsync(String agentId, String credentialId) { + return CompletableFuture.supplyAsync(tokenSupplier) + .thenCompose(token -> http.requestAsync("POST", + baseUrl + "/api/v1/agents/" + agentId + "/credentials/" + credentialId + "/rotate", + null, token, CredentialWithSecret.class)); + } + + /** Async version of {@link #revokeCredential}. */ + public CompletableFuture revokeCredentialAsync(String agentId, String credentialId) { + return CompletableFuture.supplyAsync(tokenSupplier) + .thenCompose(token -> http.requestAsync("DELETE", + baseUrl + "/api/v1/agents/" + agentId + "/credentials/" + credentialId, + null, token, Credential.class)); + } + + private String buildListUrl(String agentId, Integer page, Integer limit) { + StringBuilder url = new StringBuilder(baseUrl + "/api/v1/agents/" + agentId + "/credentials"); + StringBuilder query = new StringBuilder(); + if (page != null) { query.append("&page=").append(page); } + if (limit != null) { query.append("&limit=").append(limit); } + if (query.length() > 0) url.append("?").append(query.substring(1)); + return url.toString(); + } +} diff --git a/sdk-java/src/main/java/ai/sentryagent/idp/services/TokenClient.java b/sdk-java/src/main/java/ai/sentryagent/idp/services/TokenClient.java new file mode 100644 index 0000000..a41f1d8 --- /dev/null +++ b/sdk-java/src/main/java/ai/sentryagent/idp/services/TokenClient.java @@ -0,0 +1,127 @@ +package ai.sentryagent.idp.services; + +import ai.sentryagent.idp.AgentIdPException; +import ai.sentryagent.idp.models.IntrospectResponse; +import com.fasterxml.jackson.databind.ObjectMapper; + +import java.io.IOException; +import java.net.URI; +import java.net.URLEncoder; +import java.net.http.HttpClient; +import java.net.http.HttpRequest; +import java.net.http.HttpResponse; +import java.nio.charset.StandardCharsets; +import java.util.concurrent.CompletableFuture; +import java.util.function.Supplier; + +/** + * Client for token introspection and revocation endpoints. + * Uses form-encoded POST bodies (not JSON), per RFC 7009 / RFC 7662. + */ +public final class TokenClient { + + private static final ObjectMapper MAPPER = new ObjectMapper(); + + private final String baseUrl; + private final Supplier tokenSupplier; + private final HttpClient httpClient; + + public TokenClient(String baseUrl, Supplier tokenSupplier, HttpClient httpClient) { + this.baseUrl = baseUrl.endsWith("/") ? baseUrl.substring(0, baseUrl.length() - 1) : baseUrl; + this.tokenSupplier = tokenSupplier; + this.httpClient = httpClient; + } + + // ─── Sync ───────────────────────────────────────────────────────────────── + + /** POST /api/v1/token/introspect (form-encoded) → 200 IntrospectResponse */ + public IntrospectResponse introspectToken(String accessToken) { + String body = "token=" + encode(accessToken); + HttpRequest req = buildFormRequest(baseUrl + "/api/v1/token/introspect", body, tokenSupplier.get()); + try { + HttpResponse resp = httpClient.send(req, HttpResponse.BodyHandlers.ofString()); + if (resp.statusCode() < 200 || resp.statusCode() >= 300) { + throw AgentIdPException.fromApiError(resp.body(), resp.statusCode()); + } + return MAPPER.readValue(resp.body(), IntrospectResponse.class); + } catch (AgentIdPException e) { + throw e; + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + throw AgentIdPException.networkError(e); + } catch (IOException e) { + throw AgentIdPException.networkError(e); + } + } + + /** POST /api/v1/token/revoke (form-encoded) → 200 */ + public void revokeToken(String accessToken) { + String body = "token=" + encode(accessToken); + HttpRequest req = buildFormRequest(baseUrl + "/api/v1/token/revoke", body, tokenSupplier.get()); + try { + HttpResponse resp = httpClient.send(req, HttpResponse.BodyHandlers.ofString()); + if (resp.statusCode() < 200 || resp.statusCode() >= 300) { + throw AgentIdPException.fromApiError(resp.body(), resp.statusCode()); + } + } catch (AgentIdPException e) { + throw e; + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + throw AgentIdPException.networkError(e); + } catch (IOException e) { + throw AgentIdPException.networkError(e); + } + } + + // ─── Async ──────────────────────────────────────────────────────────────── + + /** Async version of {@link #introspectToken}. */ + public CompletableFuture introspectTokenAsync(String accessToken) { + return CompletableFuture.supplyAsync(tokenSupplier).thenCompose(token -> { + String body = "token=" + encode(accessToken); + HttpRequest req = buildFormRequest(baseUrl + "/api/v1/token/introspect", body, token); + return httpClient.sendAsync(req, HttpResponse.BodyHandlers.ofString()) + .thenApply(resp -> { + if (resp.statusCode() < 200 || resp.statusCode() >= 300) { + throw AgentIdPException.fromApiError(resp.body(), resp.statusCode()); + } + try { + return MAPPER.readValue(resp.body(), IntrospectResponse.class); + } catch (IOException e) { + throw new AgentIdPException("PARSE_ERROR", "Failed to parse introspect response: " + e.getMessage(), resp.statusCode()); + } + }); + }); + } + + /** Async version of {@link #revokeToken}. */ + public CompletableFuture revokeTokenAsync(String accessToken) { + return CompletableFuture.supplyAsync(tokenSupplier).thenCompose(token -> { + String body = "token=" + encode(accessToken); + HttpRequest req = buildFormRequest(baseUrl + "/api/v1/token/revoke", body, token); + return httpClient.sendAsync(req, HttpResponse.BodyHandlers.ofString()) + .thenApply(resp -> { + if (resp.statusCode() < 200 || resp.statusCode() >= 300) { + throw AgentIdPException.fromApiError(resp.body(), resp.statusCode()); + } + return (Void) null; + }); + }); + } + + // ─── Helpers ────────────────────────────────────────────────────────────── + + private HttpRequest buildFormRequest(String url, String formBody, String token) { + return HttpRequest.newBuilder() + .uri(URI.create(url)) + .POST(HttpRequest.BodyPublishers.ofString(formBody)) + .header("Content-Type", "application/x-www-form-urlencoded") + .header("Accept", "application/json") + .header("Authorization", "Bearer " + token) + .build(); + } + + private static String encode(String value) { + return URLEncoder.encode(value, StandardCharsets.UTF_8); + } +} diff --git a/sdk-java/src/test/java/ai/sentryagent/idp/AgentIdPClientTest.java b/sdk-java/src/test/java/ai/sentryagent/idp/AgentIdPClientTest.java new file mode 100644 index 0000000..6a5d07f --- /dev/null +++ b/sdk-java/src/test/java/ai/sentryagent/idp/AgentIdPClientTest.java @@ -0,0 +1,122 @@ +package ai.sentryagent.idp; + +import ai.sentryagent.idp.models.Agent; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import java.io.IOException; +import java.net.http.HttpClient; +import java.time.Duration; +import java.util.concurrent.atomic.AtomicInteger; + +import static org.junit.jupiter.api.Assertions.*; + +class AgentIdPClientTest { + + private MockServer srv; + + private static final String TOKEN_BODY = + "{\"access_token\":\"integration-token\",\"token_type\":\"Bearer\",\"expires_in\":3600,\"scope\":\"agents:read agents:write tokens:read audit:read\"}"; + + private static final String AGENT_JSON = + "{\"agentId\":\"uuid-1\",\"email\":\"a@b.ai\",\"agentType\":\"screener\",\"version\":\"1.0.0\"," + + "\"capabilities\":[\"read\"],\"owner\":\"team\",\"deploymentEnv\":\"production\"," + + "\"status\":\"active\",\"createdAt\":\"2026-01-01T00:00:00Z\",\"updatedAt\":\"2026-01-01T00:00:00Z\"}"; + + @BeforeEach + void setUp() throws IOException { + srv = new MockServer(); + // Register token endpoint for every test (each test gets a fresh MockServer) + srv.addHandler("/api/v1/token", 200, TOKEN_BODY); + } + + @AfterEach + void tearDown() { srv.stop(); } + + private AgentIdPClient makeClient() { + HttpClient httpClient = HttpClient.newBuilder().connectTimeout(Duration.ofSeconds(5)).build(); + return new AgentIdPClient(srv.baseUrl(), "cid", "secret", "agents:read", httpClient); + } + + @Test + void getAgent_endToEnd() { + srv.addHandler("/api/v1/agents/uuid-1", 200, AGENT_JSON); + AgentIdPClient client = makeClient(); + Agent agent = client.agents().getAgent("uuid-1"); + assertEquals("uuid-1", agent.getAgentId()); + assertEquals("screener", agent.getAgentType()); + } + + @Test + void serviceClients_areAccessible() { + AgentIdPClient client = makeClient(); + assertNotNull(client.agents()); + assertNotNull(client.credentials()); + assertNotNull(client.tokens()); + assertNotNull(client.audit()); + } + + @Test + void clearTokenCache_forcesRefetch() throws IOException { + // Dedicated MockServer so we control the token counter from scratch + MockServer dedicated = new MockServer(); + AtomicInteger tokenCalls = new AtomicInteger(0); + dedicated.addHandler("/api/v1/token", exchange -> { + tokenCalls.incrementAndGet(); + try { + byte[] body = TOKEN_BODY.getBytes(); + exchange.getResponseHeaders().set("Content-Type", "application/json"); + exchange.sendResponseHeaders(200, body.length); + exchange.getResponseBody().write(body); + exchange.getResponseBody().close(); + } catch (IOException e) { + throw new RuntimeException(e); + } + }); + dedicated.addHandler("/api/v1/agents/uuid-1", 200, AGENT_JSON); + + try { + HttpClient httpClient = HttpClient.newBuilder().connectTimeout(Duration.ofSeconds(5)).build(); + AgentIdPClient client = new AgentIdPClient(dedicated.baseUrl(), "cid", "secret", "agents:read", httpClient); + client.agents().getAgent("uuid-1"); + client.clearTokenCache(); + client.agents().getAgent("uuid-1"); + assertEquals(2, tokenCalls.get(), "Token should be refetched after clearTokenCache"); + } finally { + dedicated.stop(); + } + } + + @Test + void defaultScope_containsAllFourScopes() throws IOException { + MockServer dedicated = new MockServer(); + StringBuilder capturedBody = new StringBuilder(); + dedicated.addHandler("/api/v1/token", exchange -> { + try { + String body = new String(exchange.getRequestBody().readAllBytes()); + capturedBody.append(body); + byte[] resp = TOKEN_BODY.getBytes(); + exchange.getResponseHeaders().set("Content-Type", "application/json"); + exchange.sendResponseHeaders(200, resp.length); + exchange.getResponseBody().write(resp); + exchange.getResponseBody().close(); + } catch (IOException e) { + throw new RuntimeException(e); + } + }); + dedicated.addHandler("/api/v1/agents/uuid-1", 200, AGENT_JSON); + + try { + HttpClient httpClient = HttpClient.newBuilder().connectTimeout(Duration.ofSeconds(5)).build(); + // Two-arg constructor → default scope applied + AgentIdPClient client = new AgentIdPClient(dedicated.baseUrl(), "cid", "secret", + "agents:read agents:write tokens:read audit:read", httpClient); + client.agents().getAgent("uuid-1"); + String captured = capturedBody.toString(); + assertTrue(captured.contains("agents"), "Scope should be present in token request body: " + captured); + } finally { + dedicated.stop(); + } + } +} diff --git a/sdk-java/src/test/java/ai/sentryagent/idp/AgentIdPExceptionTest.java b/sdk-java/src/test/java/ai/sentryagent/idp/AgentIdPExceptionTest.java new file mode 100644 index 0000000..4230a56 --- /dev/null +++ b/sdk-java/src/test/java/ai/sentryagent/idp/AgentIdPExceptionTest.java @@ -0,0 +1,72 @@ +package ai.sentryagent.idp; + +import org.junit.jupiter.api.Test; +import static org.junit.jupiter.api.Assertions.*; + +class AgentIdPExceptionTest { + + @Test + void constructor_setsFields() { + AgentIdPException ex = new AgentIdPException("AgentNotFoundError", "Not found.", 404); + assertEquals("AgentNotFoundError", ex.getCode()); + assertEquals("Not found.", ex.getMessage()); + assertEquals(404, ex.getHttpStatus()); + assertNull(ex.getDetails()); + } + + @Test + void fromApiError_validBody() { + String body = "{\"code\":\"AgentNotFoundError\",\"message\":\"Not found.\"}"; + AgentIdPException ex = AgentIdPException.fromApiError(body, 404); + assertEquals("AgentNotFoundError", ex.getCode()); + assertEquals("Not found.", ex.getMessage()); + assertEquals(404, ex.getHttpStatus()); + } + + @Test + void fromApiError_emptyCode_fallsBackToUnknown() { + String body = "{\"message\":\"oops\"}"; + AgentIdPException ex = AgentIdPException.fromApiError(body, 503); + assertEquals("UNKNOWN_ERROR", ex.getCode()); + assertEquals(503, ex.getHttpStatus()); + } + + @Test + void fromApiError_unparseable_fallsBackToUnknown() { + AgentIdPException ex = AgentIdPException.fromApiError("not json", 500); + assertEquals("UNKNOWN_ERROR", ex.getCode()); + assertEquals(500, ex.getHttpStatus()); + } + + @Test + void fromOAuth2Error_validBody() { + String body = "{\"error\":\"invalid_client\",\"error_description\":\"Bad credentials.\"}"; + AgentIdPException ex = AgentIdPException.fromOAuth2Error(body, 401); + assertEquals("invalid_client", ex.getCode()); + assertEquals("Bad credentials.", ex.getMessage()); + assertEquals(401, ex.getHttpStatus()); + } + + @Test + void fromOAuth2Error_unparseable_fallsBackToUnknown() { + AgentIdPException ex = AgentIdPException.fromOAuth2Error("garbage", 400); + assertEquals("unknown_error", ex.getCode()); + } + + @Test + void networkError_setsCodeAndCause() { + RuntimeException cause = new RuntimeException("connection refused"); + AgentIdPException ex = AgentIdPException.networkError(cause); + assertEquals("NETWORK_ERROR", ex.getCode()); + assertEquals(0, ex.getHttpStatus()); + assertSame(cause, ex.getCause()); + assertTrue(ex.getMessage().contains("connection refused")); + } + + @Test + void toString_containsCodeAndStatus() { + AgentIdPException ex = new AgentIdPException("CODE", "msg", 400); + assertTrue(ex.toString().contains("CODE")); + assertTrue(ex.toString().contains("400")); + } +} diff --git a/sdk-java/src/test/java/ai/sentryagent/idp/MockServer.java b/sdk-java/src/test/java/ai/sentryagent/idp/MockServer.java new file mode 100644 index 0000000..9c357ac --- /dev/null +++ b/sdk-java/src/test/java/ai/sentryagent/idp/MockServer.java @@ -0,0 +1,73 @@ +package ai.sentryagent.idp; + +import com.sun.net.httpserver.HttpExchange; +import com.sun.net.httpserver.HttpHandler; +import com.sun.net.httpserver.HttpServer; + +import java.io.IOException; +import java.io.OutputStream; +import java.net.InetSocketAddress; +import java.nio.charset.StandardCharsets; + +/** + * Lightweight in-process HTTP server for unit tests. + * Uses the JDK's built-in {@code com.sun.net.httpserver.HttpServer}. + */ +public final class MockServer { + + private final HttpServer server; + private final int port; + + public MockServer() throws IOException { + server = HttpServer.create(new InetSocketAddress(0), 0); + server.start(); + port = server.getAddress().getPort(); + } + + /** Base URL of the mock server (e.g. {@code "http://localhost:PORT"}). */ + public String baseUrl() { + return "http://localhost:" + port; + } + + /** + * Registers a handler for an exact path. + * + * @param path URL path (e.g. {@code "/api/v1/agents"}) + * @param statusCode HTTP status code to return + * @param responseBody JSON body to return (may be null for empty body) + */ + public void addHandler(String path, int statusCode, String responseBody) { + server.createContext(path, new StaticHandler(statusCode, responseBody)); + } + + /** + * Registers a custom handler for an exact path. + */ + public void addHandler(String path, HttpHandler handler) { + server.createContext(path, handler); + } + + /** Stops the server. */ + public void stop() { + server.stop(0); + } + + private static final class StaticHandler implements HttpHandler { + private final int statusCode; + private final byte[] body; + + StaticHandler(int statusCode, String body) { + this.statusCode = statusCode; + this.body = body != null ? body.getBytes(StandardCharsets.UTF_8) : new byte[0]; + } + + @Override + public void handle(HttpExchange exchange) throws IOException { + exchange.getResponseHeaders().set("Content-Type", "application/json"); + exchange.sendResponseHeaders(statusCode, body.length); + try (OutputStream os = exchange.getResponseBody()) { + os.write(body); + } + } + } +} diff --git a/sdk-java/src/test/java/ai/sentryagent/idp/TokenManagerTest.java b/sdk-java/src/test/java/ai/sentryagent/idp/TokenManagerTest.java new file mode 100644 index 0000000..0fe33b6 --- /dev/null +++ b/sdk-java/src/test/java/ai/sentryagent/idp/TokenManagerTest.java @@ -0,0 +1,102 @@ +package ai.sentryagent.idp; + +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import java.io.IOException; +import java.net.http.HttpClient; +import java.time.Duration; +import java.util.concurrent.atomic.AtomicInteger; + +import static org.junit.jupiter.api.Assertions.*; + +class TokenManagerTest { + + private MockServer srv; + private HttpClient httpClient; + + private static final String TOKEN_BODY = """ + {"access_token":"eyJ.abc.def","token_type":"Bearer","expires_in":3600,"scope":"agents:read"} + """; + + @BeforeEach + void setUp() throws IOException { + srv = new MockServer(); + httpClient = HttpClient.newBuilder().connectTimeout(Duration.ofSeconds(5)).build(); + } + + @AfterEach + void tearDown() { srv.stop(); } + + @Test + void getToken_issuesToken() { + srv.addHandler("/api/v1/token", 200, TOKEN_BODY); + TokenManager tm = new TokenManager(srv.baseUrl(), "cid", "secret", "agents:read", httpClient); + assertEquals("eyJ.abc.def", tm.getToken()); + } + + @Test + void getToken_cachesToken() { + AtomicInteger calls = new AtomicInteger(0); + srv.addHandler("/api/v1/token", exchange -> { + calls.incrementAndGet(); + byte[] body = TOKEN_BODY.getBytes(); + exchange.getResponseHeaders().set("Content-Type", "application/json"); + exchange.sendResponseHeaders(200, body.length); + exchange.getResponseBody().write(body); + exchange.getResponseBody().close(); + }); + TokenManager tm = new TokenManager(srv.baseUrl(), "cid", "secret", "agents:read", httpClient); + tm.getToken(); + tm.getToken(); + assertEquals(1, calls.get(), "Should only call the token endpoint once"); + } + + @Test + void getToken_authFailure_throwsAgentIdPException() { + srv.addHandler("/api/v1/token", 401, + "{\"error\":\"invalid_client\",\"error_description\":\"Bad credentials.\"}"); + TokenManager tm = new TokenManager(srv.baseUrl(), "cid", "bad-secret", "agents:read", httpClient); + AgentIdPException ex = assertThrows(AgentIdPException.class, tm::getToken); + assertEquals("invalid_client", ex.getCode()); + assertEquals(401, ex.getHttpStatus()); + } + + @Test + void clearCache_forcesRefetch() { + AtomicInteger calls = new AtomicInteger(0); + srv.addHandler("/api/v1/token", exchange -> { + calls.incrementAndGet(); + byte[] body = TOKEN_BODY.getBytes(); + exchange.getResponseHeaders().set("Content-Type", "application/json"); + exchange.sendResponseHeaders(200, body.length); + exchange.getResponseBody().write(body); + exchange.getResponseBody().close(); + }); + TokenManager tm = new TokenManager(srv.baseUrl(), "cid", "secret", "agents:read", httpClient); + tm.getToken(); + tm.clearCache(); + tm.getToken(); + assertEquals(2, calls.get(), "Should call token endpoint again after clearCache"); + } + + @Test + void getToken_threadSafe() throws InterruptedException { + srv.addHandler("/api/v1/token", 200, TOKEN_BODY); + TokenManager tm = new TokenManager(srv.baseUrl(), "cid", "secret", "agents:read", httpClient); + + Thread[] threads = new Thread[10]; + String[] results = new String[10]; + for (int i = 0; i < threads.length; i++) { + int idx = i; + threads[idx] = new Thread(() -> results[idx] = tm.getToken()); + } + for (Thread t : threads) t.start(); + for (Thread t : threads) t.join(); + + for (String result : results) { + assertEquals("eyJ.abc.def", result); + } + } +} diff --git a/sdk-java/src/test/java/ai/sentryagent/idp/services/AgentRegistryClientTest.java b/sdk-java/src/test/java/ai/sentryagent/idp/services/AgentRegistryClientTest.java new file mode 100644 index 0000000..2d78a59 --- /dev/null +++ b/sdk-java/src/test/java/ai/sentryagent/idp/services/AgentRegistryClientTest.java @@ -0,0 +1,133 @@ +package ai.sentryagent.idp.services; + +import ai.sentryagent.idp.AgentIdPException; +import ai.sentryagent.idp.MockServer; +import ai.sentryagent.idp.internal.HttpHelper; +import ai.sentryagent.idp.models.*; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import java.io.IOException; +import java.net.http.HttpClient; +import java.time.Duration; +import java.util.List; + +import static org.junit.jupiter.api.Assertions.*; + +class AgentRegistryClientTest { + + private MockServer srv; + private AgentRegistryClient client; + + private static final String AGENT_JSON = """ + {"agentId":"uuid-1","email":"a@b.ai","agentType":"screener","version":"1.0.0", + "capabilities":["read"],"owner":"team","deploymentEnv":"production", + "status":"active","createdAt":"2026-01-01T00:00:00Z","updatedAt":"2026-01-01T00:00:00Z"} + """; + + private static final String PAGINATED_AGENTS = """ + {"data":[%s],"total":1,"page":1,"limit":20} + """.formatted(AGENT_JSON.strip()); + + @BeforeEach + void setUp() throws IOException { + srv = new MockServer(); + HttpClient httpClient = HttpClient.newBuilder().connectTimeout(Duration.ofSeconds(5)).build(); + HttpHelper httpHelper = new HttpHelper(httpClient); + client = new AgentRegistryClient(srv.baseUrl(), () -> "test-token", httpHelper); + } + + @AfterEach + void tearDown() { srv.stop(); } + + @Test + void registerAgent_returns201() { + srv.addHandler("/api/v1/agents", 201, AGENT_JSON); + Agent agent = client.registerAgent(RegisterAgentRequest.builder() + .email("a@b.ai").agentType("screener").version("1.0.0") + .capabilities(List.of("read")).owner("team").deploymentEnv("production") + .build()); + assertEquals("uuid-1", agent.getAgentId()); + assertEquals("screener", agent.getAgentType()); + } + + @Test + void listAgents_returnsPaginated() { + srv.addHandler("/api/v1/agents", 200, PAGINATED_AGENTS); + PaginatedAgents result = client.listAgents(null); + assertEquals(1, result.getTotal()); + assertEquals("uuid-1", result.getData().get(0).getAgentId()); + } + + @Test + void getAgent_returnsAgent() { + srv.addHandler("/api/v1/agents/uuid-1", 200, AGENT_JSON); + Agent agent = client.getAgent("uuid-1"); + assertEquals("uuid-1", agent.getAgentId()); + } + + @Test + void getAgent_notFound_throwsAgentIdPException() { + srv.addHandler("/api/v1/agents/bad-id", 404, + "{\"code\":\"AgentNotFoundError\",\"message\":\"Not found.\"}"); + AgentIdPException ex = assertThrows(AgentIdPException.class, () -> client.getAgent("bad-id")); + assertEquals("AgentNotFoundError", ex.getCode()); + assertEquals(404, ex.getHttpStatus()); + } + + @Test + void updateAgent_returnsUpdated() { + srv.addHandler("/api/v1/agents/uuid-1", 200, AGENT_JSON); + Agent agent = client.updateAgent("uuid-1", + UpdateAgentRequest.builder().version("2.0.0").build()); + assertNotNull(agent); + assertEquals("uuid-1", agent.getAgentId()); + } + + @Test + void decommissionAgent_returns204() { + srv.addHandler("/api/v1/agents/uuid-1", 204, null); + assertDoesNotThrow(() -> client.decommissionAgent("uuid-1")); + } + + @Test + void registerAgentAsync_returnsCompletableFuture() throws Exception { + srv.addHandler("/api/v1/agents", 201, AGENT_JSON); + Agent agent = client.registerAgentAsync(RegisterAgentRequest.builder() + .email("a@b.ai").agentType("screener").version("1.0.0") + .capabilities(List.of("read")).owner("team").deploymentEnv("production") + .build()).get(); + assertEquals("uuid-1", agent.getAgentId()); + } + + @Test + void getAgentAsync_returnsCompletableFuture() throws Exception { + srv.addHandler("/api/v1/agents/uuid-1", 200, AGENT_JSON); + Agent agent = client.getAgentAsync("uuid-1").get(); + assertEquals("uuid-1", agent.getAgentId()); + } + + @Test + void listAgentsAsync_withParams() throws Exception { + srv.addHandler("/api/v1/agents", 200, PAGINATED_AGENTS); + PaginatedAgents result = client.listAgentsAsync( + ListAgentsParams.builder().status("active").page(1).limit(20).build() + ).get(); + assertEquals(1, result.getTotal()); + } + + @Test + void decommissionAgentAsync_completesSuccessfully() throws Exception { + srv.addHandler("/api/v1/agents/uuid-1", 204, null); + assertDoesNotThrow(() -> client.decommissionAgentAsync("uuid-1").get()); + } + + @Test + void updateAgentAsync_returnsCompletableFuture() throws Exception { + srv.addHandler("/api/v1/agents/uuid-1", 200, AGENT_JSON); + Agent agent = client.updateAgentAsync("uuid-1", + UpdateAgentRequest.builder().version("2.0.0").build()).get(); + assertEquals("uuid-1", agent.getAgentId()); + } +} diff --git a/sdk-java/src/test/java/ai/sentryagent/idp/services/AuditClientTest.java b/sdk-java/src/test/java/ai/sentryagent/idp/services/AuditClientTest.java new file mode 100644 index 0000000..0776b80 --- /dev/null +++ b/sdk-java/src/test/java/ai/sentryagent/idp/services/AuditClientTest.java @@ -0,0 +1,93 @@ +package ai.sentryagent.idp.services; + +import ai.sentryagent.idp.AgentIdPException; +import ai.sentryagent.idp.MockServer; +import ai.sentryagent.idp.internal.HttpHelper; +import ai.sentryagent.idp.models.*; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import java.io.IOException; +import java.net.http.HttpClient; +import java.time.Duration; + +import static org.junit.jupiter.api.Assertions.*; + +class AuditClientTest { + + private MockServer srv; + private AuditClient client; + + private static final String AUDIT_EVENT = """ + {"eventId":"ev-1","agentId":"uuid-1","action":"token.issued","outcome":"success", + "ipAddress":"1.2.3.4","userAgent":"curl","metadata":{},"timestamp":"2026-01-01T00:00:00Z"} + """; + + private static final String PAGINATED_AUDIT = """ + {"data":[%s],"total":1,"page":1,"limit":20} + """.formatted(AUDIT_EVENT.strip()); + + @BeforeEach + void setUp() throws IOException { + srv = new MockServer(); + HttpClient httpClient = HttpClient.newBuilder().connectTimeout(Duration.ofSeconds(5)).build(); + client = new AuditClient(srv.baseUrl(), () -> "test-token", new HttpHelper(httpClient)); + } + + @AfterEach + void tearDown() { srv.stop(); } + + @Test + void queryAuditLog_returnsPaginated() { + srv.addHandler("/api/v1/audit", 200, PAGINATED_AUDIT); + PaginatedAuditEvents result = client.queryAuditLog(null); + assertEquals(1, result.getTotal()); + assertEquals("ev-1", result.getData().get(0).getEventId()); + } + + @Test + void queryAuditLog_withParams() { + srv.addHandler("/api/v1/audit", 200, PAGINATED_AUDIT); + PaginatedAuditEvents result = client.queryAuditLog( + QueryAuditParams.builder() + .agentId("uuid-1") + .action("token.issued") + .fromDate("2026-01-01") + .build()); + assertEquals(1, result.getTotal()); + } + + @Test + void getAuditEvent_returnsEvent() { + srv.addHandler("/api/v1/audit/ev-1", 200, AUDIT_EVENT); + AuditEvent event = client.getAuditEvent("ev-1"); + assertEquals("ev-1", event.getEventId()); + assertEquals("token.issued", event.getAction()); + assertEquals("success", event.getOutcome()); + } + + @Test + void getAuditEvent_notFound_throwsAgentIdPException() { + srv.addHandler("/api/v1/audit/bad-id", 404, + "{\"code\":\"AuditEventNotFoundError\",\"message\":\"Event not found.\"}"); + AgentIdPException ex = assertThrows(AgentIdPException.class, + () -> client.getAuditEvent("bad-id")); + assertEquals("AuditEventNotFoundError", ex.getCode()); + assertEquals(404, ex.getHttpStatus()); + } + + @Test + void queryAuditLogAsync_returnsPaginated() throws Exception { + srv.addHandler("/api/v1/audit", 200, PAGINATED_AUDIT); + PaginatedAuditEvents result = client.queryAuditLogAsync(null).get(); + assertEquals(1, result.getTotal()); + } + + @Test + void getAuditEventAsync_returnsEvent() throws Exception { + srv.addHandler("/api/v1/audit/ev-1", 200, AUDIT_EVENT); + AuditEvent event = client.getAuditEventAsync("ev-1").get(); + assertEquals("ev-1", event.getEventId()); + } +} diff --git a/sdk-java/src/test/java/ai/sentryagent/idp/services/CredentialClientTest.java b/sdk-java/src/test/java/ai/sentryagent/idp/services/CredentialClientTest.java new file mode 100644 index 0000000..0f06c06 --- /dev/null +++ b/sdk-java/src/test/java/ai/sentryagent/idp/services/CredentialClientTest.java @@ -0,0 +1,116 @@ +package ai.sentryagent.idp.services; + +import ai.sentryagent.idp.AgentIdPException; +import ai.sentryagent.idp.MockServer; +import ai.sentryagent.idp.internal.HttpHelper; +import ai.sentryagent.idp.models.*; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import java.io.IOException; +import java.net.http.HttpClient; +import java.time.Duration; + +import static org.junit.jupiter.api.Assertions.*; + +class CredentialClientTest { + + private MockServer srv; + private CredentialClient client; + + private static final String CRED_JSON = """ + {"credentialId":"cred-1","clientId":"uuid-1","status":"active", + "createdAt":"2026-01-01T00:00:00Z","expiresAt":null,"revokedAt":null} + """; + + private static final String CRED_WITH_SECRET = """ + {"credentialId":"cred-1","clientId":"uuid-1","status":"active", + "createdAt":"2026-01-01T00:00:00Z","expiresAt":null,"revokedAt":null, + "clientSecret":"sk_live_abc"} + """; + + private static final String PAGINATED_CREDS = """ + {"data":[%s],"total":1,"page":1,"limit":20} + """.formatted(CRED_JSON.strip()); + + @BeforeEach + void setUp() throws IOException { + srv = new MockServer(); + HttpClient httpClient = HttpClient.newBuilder().connectTimeout(Duration.ofSeconds(5)).build(); + client = new CredentialClient(srv.baseUrl(), () -> "test-token", new HttpHelper(httpClient)); + } + + @AfterEach + void tearDown() { srv.stop(); } + + @Test + void generateCredential_returnsSecret() { + srv.addHandler("/api/v1/agents/uuid-1/credentials", 201, CRED_WITH_SECRET); + CredentialWithSecret cred = client.generateCredential("uuid-1"); + assertEquals("sk_live_abc", cred.getClientSecret()); + assertEquals("cred-1", cred.getCredentialId()); + } + + @Test + void listCredentials_returnsPaginated() { + srv.addHandler("/api/v1/agents/uuid-1/credentials", 200, PAGINATED_CREDS); + PaginatedCredentials result = client.listCredentials("uuid-1", null, null); + assertEquals(1, result.getTotal()); + assertEquals("cred-1", result.getData().get(0).getCredentialId()); + } + + @Test + void rotateCredential_returnsNewSecret() { + srv.addHandler("/api/v1/agents/uuid-1/credentials/cred-1/rotate", 200, CRED_WITH_SECRET); + CredentialWithSecret cred = client.rotateCredential("uuid-1", "cred-1"); + assertEquals("sk_live_abc", cred.getClientSecret()); + } + + @Test + void revokeCredential_returnsRevoked() { + String revoked = """ + {"credentialId":"cred-1","clientId":"uuid-1","status":"revoked", + "createdAt":"2026-01-01T00:00:00Z","expiresAt":null, + "revokedAt":"2026-01-02T00:00:00Z"} + """; + srv.addHandler("/api/v1/agents/uuid-1/credentials/cred-1", 200, revoked); + Credential cred = client.revokeCredential("uuid-1", "cred-1"); + assertEquals("revoked", cred.getStatus()); + } + + @Test + void generateCredential_error_throwsAgentIdPException() { + srv.addHandler("/api/v1/agents/bad/credentials", 404, + "{\"code\":\"AgentNotFoundError\",\"message\":\"Not found.\"}"); + AgentIdPException ex = assertThrows(AgentIdPException.class, + () -> client.generateCredential("bad")); + assertEquals(404, ex.getHttpStatus()); + } + + @Test + void generateCredentialAsync_returnsCompletableFuture() throws Exception { + srv.addHandler("/api/v1/agents/uuid-1/credentials", 201, CRED_WITH_SECRET); + CredentialWithSecret cred = client.generateCredentialAsync("uuid-1").get(); + assertEquals("sk_live_abc", cred.getClientSecret()); + } + + @Test + void rotateCredentialAsync_returnsCompletableFuture() throws Exception { + srv.addHandler("/api/v1/agents/uuid-1/credentials/cred-1/rotate", 200, CRED_WITH_SECRET); + CredentialWithSecret cred = client.rotateCredentialAsync("uuid-1", "cred-1").get(); + assertEquals("sk_live_abc", cred.getClientSecret()); + } + + @Test + void revokeCredentialAsync_returnsCompletableFuture() throws Exception { + String revoked = """ + {"credentialId":"cred-1","clientId":"uuid-1","status":"revoked", + "createdAt":"2026-01-01T00:00:00Z","expiresAt":null, + "revokedAt":"2026-01-02T00:00:00Z"} + """; + srv.addHandler("/api/v1/agents/uuid-1/credentials/cred-1", 200, revoked); + Credential cred = client.revokeCredentialAsync("uuid-1", "cred-1").get(); + assertEquals("revoked", cred.getStatus()); + } +} diff --git a/sdk-java/src/test/java/ai/sentryagent/idp/services/TokenClientTest.java b/sdk-java/src/test/java/ai/sentryagent/idp/services/TokenClientTest.java new file mode 100644 index 0000000..766a504 --- /dev/null +++ b/sdk-java/src/test/java/ai/sentryagent/idp/services/TokenClientTest.java @@ -0,0 +1,86 @@ +package ai.sentryagent.idp.services; + +import ai.sentryagent.idp.AgentIdPException; +import ai.sentryagent.idp.MockServer; +import ai.sentryagent.idp.models.IntrospectResponse; +import org.junit.jupiter.api.AfterEach; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; + +import java.io.IOException; +import java.net.http.HttpClient; +import java.time.Duration; + +import static org.junit.jupiter.api.Assertions.*; + +class TokenClientTest { + + private MockServer srv; + private TokenClient client; + + @BeforeEach + void setUp() throws IOException { + srv = new MockServer(); + HttpClient httpClient = HttpClient.newBuilder().connectTimeout(Duration.ofSeconds(5)).build(); + client = new TokenClient(srv.baseUrl(), () -> "test-token", httpClient); + } + + @AfterEach + void tearDown() { srv.stop(); } + + @Test + void introspectToken_active() { + srv.addHandler("/api/v1/token/introspect", 200, + "{\"active\":true,\"sub\":\"uuid-1\",\"exp\":9999999999}"); + IntrospectResponse result = client.introspectToken("some-token"); + assertTrue(result.isActive()); + assertEquals("uuid-1", result.getSub()); + } + + @Test + void introspectToken_inactive() { + srv.addHandler("/api/v1/token/introspect", 200, "{\"active\":false}"); + IntrospectResponse result = client.introspectToken("expired-token"); + assertFalse(result.isActive()); + assertNull(result.getSub()); + } + + @Test + void revokeToken_succeeds() { + srv.addHandler("/api/v1/token/revoke", 200, "{}"); + assertDoesNotThrow(() -> client.revokeToken("some-token")); + } + + @Test + void introspectToken_error_throwsAgentIdPException() { + srv.addHandler("/api/v1/token/introspect", 401, + "{\"code\":\"UnauthorizedError\",\"message\":\"Invalid token.\"}"); + AgentIdPException ex = assertThrows(AgentIdPException.class, + () -> client.introspectToken("bad-token")); + assertEquals(401, ex.getHttpStatus()); + assertEquals("UnauthorizedError", ex.getCode()); + } + + @Test + void revokeToken_error_throwsAgentIdPException() { + srv.addHandler("/api/v1/token/revoke", 401, + "{\"code\":\"UnauthorizedError\",\"message\":\"Invalid token.\"}"); + AgentIdPException ex = assertThrows(AgentIdPException.class, + () -> client.revokeToken("bad-token")); + assertEquals(401, ex.getHttpStatus()); + } + + @Test + void introspectTokenAsync_active() throws Exception { + srv.addHandler("/api/v1/token/introspect", 200, + "{\"active\":true,\"sub\":\"uuid-1\",\"exp\":9999999999}"); + IntrospectResponse result = client.introspectTokenAsync("some-token").get(); + assertTrue(result.isActive()); + } + + @Test + void revokeTokenAsync_succeeds() throws Exception { + srv.addHandler("/api/v1/token/revoke", 200, "{}"); + assertDoesNotThrow(() -> client.revokeTokenAsync("some-token").get()); + } +} diff --git a/sdk-python/.coverage b/sdk-python/.coverage new file mode 100644 index 0000000..14fabff Binary files /dev/null and b/sdk-python/.coverage differ diff --git a/sdk-python/README.md b/sdk-python/README.md new file mode 100644 index 0000000..fe1ebda --- /dev/null +++ b/sdk-python/README.md @@ -0,0 +1,214 @@ +# sentryagent-idp + +Python SDK for the [SentryAgent.ai AgentIdP](https://sentryagent.ai) — the open-source Identity Provider for AI agents. + +Handles token acquisition and caching automatically. Covers all 14 AgentIdP API endpoints. Provides both synchronous (`requests`) and asynchronous (`httpx`) clients. + +--- + +## Requirements + +- Python 3.9 or later +- A running AgentIdP server +- A registered agent with a valid `client_id` and `client_secret` + +--- + +## Installation + +```bash +pip install sentryagent-idp +``` + +--- + +## Quick start + +### Synchronous + +```python +from sentryagent_idp import AgentIdPClient + +client = AgentIdPClient( + base_url="http://localhost:3000", + client_id="your-agent-id", # the agent's agentId (UUID) + client_secret="your-client-secret", +) + +# List agents — token is acquired and cached automatically +result = client.agents.list_agents() +print(result.data) +``` + +### Asynchronous + +```python +import asyncio +from sentryagent_idp import AsyncAgentIdPClient + +async def main() -> None: + client = AsyncAgentIdPClient( + base_url="http://localhost:3000", + client_id="your-agent-id", + client_secret="your-client-secret", + ) + result = await client.agents.list_agents() + print(result.data) + +asyncio.run(main()) +``` + +--- + +## Configuration + +```python +client = AgentIdPClient( + base_url="http://localhost:3000", + client_id="a1b2c3d4-e5f6-7890-abcd-ef1234567890", + client_secret="your-client-secret", + # Optional: restrict scopes. Defaults to all four. + scopes=["agents:read", "tokens:read"], +) +``` + +| Parameter | Required | Description | +|-----------|----------|-------------| +| `base_url` | Yes | Base URL of the AgentIdP server | +| `client_id` | Yes | The agent's `agentId` (UUID) | +| `client_secret` | Yes | The credential secret | +| `scopes` | No | OAuth 2.0 scopes to request. Defaults to all four. | + +--- + +## Token management + +The SDK fetches and caches access tokens automatically. A new token is requested when the cached token is within 60 seconds of expiry. + +```python +# Force a fresh token on the next request (e.g. after rotating credentials) +client.clear_token_cache() +``` + +--- + +## Agent Registry + +```python +from sentryagent_idp import RegisterAgentRequest, UpdateAgentRequest + +# Register a new agent +agent = client.agents.register_agent(RegisterAgentRequest( + email="classifier-v2@myorg.ai", + agent_type="classifier", + version="2.0.0", + capabilities=["text-classification", "sentiment-analysis"], + owner="platform-team", + deployment_env="production", +)) +print(agent.agent_id) # UUID assigned by AgentIdP + +# List agents +result = client.agents.list_agents(status="active", limit=20) + +# Get a single agent +agent = client.agents.get_agent("a1b2c3d4-...") + +# Update an agent +updated = client.agents.update_agent("a1b2c3d4-...", UpdateAgentRequest( + version="2.1.0", + capabilities=["text-classification", "sentiment-analysis", "intent-detection"], +)) + +# Decommission (irreversible) +client.agents.decommission_agent("a1b2c3d4-...") +``` + +--- + +## Credentials + +```python +# Generate a credential — client_secret shown once, store it securely +cred = client.credentials.generate_credential("a1b2c3d4-...") +print(cred.client_secret) # only available here + +# List credentials +result = client.credentials.list_credentials("a1b2c3d4-...") + +# Rotate — same credential_id, new secret, old secret immediately invalid +rotated = client.credentials.rotate_credential("a1b2c3d4-...", "cred-uuid") +print(rotated.client_secret) # new secret — store immediately + +# Revoke +client.credentials.revoke_credential("a1b2c3d4-...", "cred-uuid") +``` + +--- + +## Token operations + +```python +# Introspect — check whether a token is active +result = client.tokens.introspect_token(some_token) +if result.active: + print(f"Token valid, expires at {result.exp}") +else: + print("Token is expired or revoked") + +# Revoke — immediately invalidates the token +client.tokens.revoke_token(some_token) +``` + +--- + +## Audit log + +```python +# Query audit events +result = client.audit.query_audit_log( + agent_id="a1b2c3d4-...", + action="token.issued", + outcome="success", + from_date="2026-03-01T00:00:00Z", + to_date="2026-03-31T23:59:59Z", + limit=50, +) + +# Get a single event +event = client.audit.get_audit_event("event-uuid") +``` + +--- + +## Error handling + +All API errors are raised as `AgentIdPError`: + +```python +from sentryagent_idp import AgentIdPClient, AgentIdPError + +try: + client.agents.get_agent("non-existent-id") +except AgentIdPError as err: + print(err.code) # e.g. "AgentNotFoundError" + print(err.http_status) # e.g. 404 + print(str(err)) # human-readable description +``` + +--- + +## Available scopes + +| Scope | What it allows | +|-------|----------------| +| `agents:read` | Read agent records | +| `agents:write` | Create, update, decommission agents | +| `tokens:read` | Introspect tokens | +| `audit:read` | Query audit logs | + +--- + +## License + +Apache 2.0 — see `LICENSE` in the repository root. diff --git a/sdk-python/pyproject.toml b/sdk-python/pyproject.toml new file mode 100644 index 0000000..e33e351 --- /dev/null +++ b/sdk-python/pyproject.toml @@ -0,0 +1,61 @@ +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[project] +name = "sentryagent-idp" +version = "1.0.0" +description = "Python SDK for the SentryAgent.ai AgentIdP — Identity Provider for AI agents" +readme = "README.md" +license = { text = "Apache-2.0" } +requires-python = ">=3.9" +keywords = ["ai", "agents", "identity", "oauth2", "agntcy"] +classifiers = [ + "Development Status :: 5 - Production/Stable", + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Topic :: Internet :: WWW/HTTP", + "Topic :: Security", + "Typing :: Typed", +] +dependencies = [ + "requests>=2.28.0", + "httpx>=0.25.0", +] + +[project.optional-dependencies] +dev = [ + "mypy>=1.8.0", + "pytest>=7.4.0", + "pytest-asyncio>=0.23.0", + "pytest-cov>=4.1.0", + "respx>=0.20.0", + "responses>=0.24.0", + "types-requests>=2.31.0", +] + +[tool.hatch.build.targets.wheel] +packages = ["src/sentryagent_idp"] + +[tool.mypy] +strict = true +python_version = "3.9" +warn_return_any = true +warn_unused_configs = true +disallow_untyped_defs = true +disallow_incomplete_defs = true +check_untyped_defs = true +disallow_any_generics = true +no_implicit_optional = true +warn_redundant_casts = true +warn_unused_ignores = true + +[tool.pytest.ini_options] +asyncio_mode = "auto" +testpaths = ["tests"] +addopts = "--cov=src/sentryagent_idp --cov-report=term-missing --cov-fail-under=80" diff --git a/sdk-python/src/sentryagent_idp/__init__.py b/sdk-python/src/sentryagent_idp/__init__.py new file mode 100644 index 0000000..36d5648 --- /dev/null +++ b/sdk-python/src/sentryagent_idp/__init__.py @@ -0,0 +1,82 @@ +""" +SentryAgent.ai AgentIdP Python SDK. + +Provides synchronous and asynchronous clients for the AgentIdP API. + +Example (sync):: + + from sentryagent_idp import AgentIdPClient + + client = AgentIdPClient( + base_url="http://localhost:3000", + client_id="your-agent-id", + client_secret="your-client-secret", + ) + result = client.agents.list_agents() + +Example (async):: + + from sentryagent_idp import AsyncAgentIdPClient + + client = AsyncAgentIdPClient( + base_url="http://localhost:3000", + client_id="your-agent-id", + client_secret="your-client-secret", + ) + result = await client.agents.list_agents() +""" + +from .client import AgentIdPClient, AsyncAgentIdPClient +from .errors import AgentIdPError +from .token_manager import TokenManager +from .async_token_manager import AsyncTokenManager +from .types import ( + Agent, + AgentStatus, + AgentType, + AuditAction, + AuditEvent, + AuditOutcome, + Credential, + CredentialStatus, + CredentialWithSecret, + DeploymentEnv, + IntrospectResponse, + OAuthScope, + PaginatedAgents, + PaginatedAuditEvents, + PaginatedCredentials, + RegisterAgentRequest, + TokenResponse, + UpdateAgentRequest, +) + +__all__ = [ + # Clients + "AgentIdPClient", + "AsyncAgentIdPClient", + # Errors + "AgentIdPError", + # Token managers (for advanced use) + "TokenManager", + "AsyncTokenManager", + # Types + "Agent", + "AgentStatus", + "AgentType", + "AuditAction", + "AuditEvent", + "AuditOutcome", + "Credential", + "CredentialStatus", + "CredentialWithSecret", + "DeploymentEnv", + "IntrospectResponse", + "OAuthScope", + "PaginatedAgents", + "PaginatedAuditEvents", + "PaginatedCredentials", + "RegisterAgentRequest", + "TokenResponse", + "UpdateAgentRequest", +] diff --git a/sdk-python/src/sentryagent_idp/__pycache__/__init__.cpython-312.pyc b/sdk-python/src/sentryagent_idp/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..2affff3 Binary files /dev/null and b/sdk-python/src/sentryagent_idp/__pycache__/__init__.cpython-312.pyc differ diff --git a/sdk-python/src/sentryagent_idp/__pycache__/_request.cpython-312.pyc b/sdk-python/src/sentryagent_idp/__pycache__/_request.cpython-312.pyc new file mode 100644 index 0000000..a125f2c Binary files /dev/null and b/sdk-python/src/sentryagent_idp/__pycache__/_request.cpython-312.pyc differ diff --git a/sdk-python/src/sentryagent_idp/__pycache__/async_token_manager.cpython-312.pyc b/sdk-python/src/sentryagent_idp/__pycache__/async_token_manager.cpython-312.pyc new file mode 100644 index 0000000..890eef8 Binary files /dev/null and b/sdk-python/src/sentryagent_idp/__pycache__/async_token_manager.cpython-312.pyc differ diff --git a/sdk-python/src/sentryagent_idp/__pycache__/client.cpython-312.pyc b/sdk-python/src/sentryagent_idp/__pycache__/client.cpython-312.pyc new file mode 100644 index 0000000..9619ee3 Binary files /dev/null and b/sdk-python/src/sentryagent_idp/__pycache__/client.cpython-312.pyc differ diff --git a/sdk-python/src/sentryagent_idp/__pycache__/errors.cpython-312.pyc b/sdk-python/src/sentryagent_idp/__pycache__/errors.cpython-312.pyc new file mode 100644 index 0000000..f3ea3ad Binary files /dev/null and b/sdk-python/src/sentryagent_idp/__pycache__/errors.cpython-312.pyc differ diff --git a/sdk-python/src/sentryagent_idp/__pycache__/token_manager.cpython-312.pyc b/sdk-python/src/sentryagent_idp/__pycache__/token_manager.cpython-312.pyc new file mode 100644 index 0000000..aa7ee43 Binary files /dev/null and b/sdk-python/src/sentryagent_idp/__pycache__/token_manager.cpython-312.pyc differ diff --git a/sdk-python/src/sentryagent_idp/__pycache__/types.cpython-312.pyc b/sdk-python/src/sentryagent_idp/__pycache__/types.cpython-312.pyc new file mode 100644 index 0000000..ca4230e Binary files /dev/null and b/sdk-python/src/sentryagent_idp/__pycache__/types.cpython-312.pyc differ diff --git a/sdk-python/src/sentryagent_idp/_request.py b/sdk-python/src/sentryagent_idp/_request.py new file mode 100644 index 0000000..2fcb3f4 --- /dev/null +++ b/sdk-python/src/sentryagent_idp/_request.py @@ -0,0 +1,127 @@ +""" +Internal HTTP request helpers shared by all service clients. +Not part of the public API. +""" + +from __future__ import annotations + +from typing import Any, Callable, Dict, Optional + +import requests +import httpx + +from .errors import AgentIdPError + + +def sync_request( + method: str, + base_url: str, + path: str, + token: str, + body: Optional[Any] = None, + params: Optional[Dict[str, Any]] = None, +) -> Any: + """ + Make a synchronous authenticated JSON request to the AgentIdP API. + + Args: + method: HTTP method (GET, POST, PATCH, DELETE). + base_url: AgentIdP base URL. + path: API path (e.g. ``/api/v1/agents``). + token: Bearer access token. + body: Optional request body (serialised as JSON). + params: Optional query parameters (None values are excluded). + + Returns: + Parsed JSON response body, or None for 204 responses. + + Raises: + AgentIdPError: On any API or network failure. + """ + url = base_url.rstrip("/") + path + headers = { + "Authorization": f"Bearer {token}", + "Accept": "application/json", + } + filtered_params: Optional[Dict[str, str]] = ( + {k: str(v) for k, v in params.items() if v is not None} + if params + else None + ) + + try: + response = requests.request( + method=method, + url=url, + headers=headers, + json=body, + params=filtered_params, + timeout=30, + ) + except requests.RequestException as exc: + raise AgentIdPError.network_error(exc) from exc + + if response.status_code == 204: + return None + + resp_body = response.json() if response.content else {} + if not response.ok: + raise AgentIdPError.from_api_error(resp_body, response.status_code) + return resp_body + + +async def async_request( + method: str, + base_url: str, + path: str, + token: str, + body: Optional[Any] = None, + params: Optional[Dict[str, Any]] = None, +) -> Any: + """ + Make an asynchronous authenticated JSON request to the AgentIdP API. + + Args: + method: HTTP method (GET, POST, PATCH, DELETE). + base_url: AgentIdP base URL. + path: API path. + token: Bearer access token. + body: Optional request body (serialised as JSON). + params: Optional query parameters (None values are excluded). + + Returns: + Parsed JSON response body, or None for 204 responses. + + Raises: + AgentIdPError: On any API or network failure. + """ + url = base_url.rstrip("/") + path + headers = { + "Authorization": f"Bearer {token}", + "Accept": "application/json", + } + filtered_params: Optional[Dict[str, str]] = ( + {k: str(v) for k, v in params.items() if v is not None} + if params + else None + ) + + try: + async with httpx.AsyncClient(timeout=30) as client: + response = await client.request( + method=method, + url=url, + headers=headers, + json=body, + params=filtered_params, + ) + except httpx.RequestError as exc: + raise AgentIdPError.network_error(exc) from exc + + if response.status_code == 204: + return None + + resp_body = response.json() if response.content else {} + if not response.is_success: + raise AgentIdPError.from_api_error(resp_body, response.status_code) + return resp_body diff --git a/sdk-python/src/sentryagent_idp/async_token_manager.py b/sdk-python/src/sentryagent_idp/async_token_manager.py new file mode 100644 index 0000000..0173531 --- /dev/null +++ b/sdk-python/src/sentryagent_idp/async_token_manager.py @@ -0,0 +1,117 @@ +""" +Asynchronous TokenManager — handles OAuth 2.0 token acquisition, caching, and refresh. +Uses httpx for async HTTP. Tokens are re-issued automatically when within 60 seconds of expiry. +""" + +from __future__ import annotations + +import asyncio +import time +from dataclasses import dataclass +from typing import Optional + +import httpx + +from .errors import AgentIdPError +from .types import TokenResponse + +#: Seconds before expiry at which a token refresh is triggered. +REFRESH_BUFFER_SECONDS = 60 + + +@dataclass +class _CachedToken: + access_token: str + expires_at: float # Unix timestamp (seconds) + + +class AsyncTokenManager: + """ + Asyncio-safe asynchronous token manager. + + Acquires and caches OAuth 2.0 access tokens. Automatically refreshes + the token when it is within :data:`REFRESH_BUFFER_SECONDS` of expiry. + + Args: + base_url: AgentIdP server base URL (e.g. ``http://localhost:3000``). + client_id: The agent's ``agentId`` (UUID). + client_secret: The agent's credential secret. + scopes: Space-separated OAuth 2.0 scopes to request. + """ + + def __init__( + self, + base_url: str, + client_id: str, + client_secret: str, + scopes: str, + ) -> None: + self._base_url = base_url.rstrip("/") + self._client_id = client_id + self._client_secret = client_secret + self._scopes = scopes + self._cached: Optional[_CachedToken] = None + self._lock: Optional[asyncio.Lock] = None + + def _get_lock(self) -> asyncio.Lock: + """Lazily create the asyncio.Lock on first use (supports different event loops).""" + if self._lock is None: + self._lock = asyncio.Lock() + return self._lock + + async def get_token(self) -> str: + """ + Return a valid access token, refreshing if necessary. + + Returns: + A valid JWT access token string. + + Raises: + AgentIdPError: If token acquisition fails. + """ + async with self._get_lock(): + now = time.time() + if ( + self._cached is not None + and self._cached.expires_at - now > REFRESH_BUFFER_SECONDS + ): + return self._cached.access_token + + token_response = await self._issue_token() + self._cached = _CachedToken( + access_token=token_response.access_token, + expires_at=now + token_response.expires_in, + ) + return self._cached.access_token + + def clear_cache(self) -> None: + """Clear the cached token, forcing re-acquisition on the next call.""" + self._cached = None + + async def _issue_token(self) -> TokenResponse: + """ + POST /api/v1/token to obtain a new access token. + + Returns: + TokenResponse from the API. + + Raises: + AgentIdPError: On authentication failure or network error. + """ + url = f"{self._base_url}/api/v1/token" + data = { + "grant_type": "client_credentials", + "client_id": self._client_id, + "client_secret": self._client_secret, + "scope": self._scopes, + } + try: + async with httpx.AsyncClient(timeout=30) as client: + response = await client.post(url, data=data) + except httpx.RequestError as exc: + raise AgentIdPError.network_error(exc) from exc + + body = response.json() + if not response.is_success: + raise AgentIdPError.from_oauth2_error(body, response.status_code) + return TokenResponse.from_dict(body) diff --git a/sdk-python/src/sentryagent_idp/client.py b/sdk-python/src/sentryagent_idp/client.py new file mode 100644 index 0000000..7f1390d --- /dev/null +++ b/sdk-python/src/sentryagent_idp/client.py @@ -0,0 +1,128 @@ +""" +Top-level client for the SentryAgent.ai AgentIdP API. +Provides both synchronous (AgentIdPClient) and asynchronous (AsyncAgentIdPClient) variants. +""" + +from __future__ import annotations + +from typing import List, Optional + +from .token_manager import TokenManager +from .async_token_manager import AsyncTokenManager +from .services.agents import AgentRegistryClient, AsyncAgentRegistryClient +from .services.credentials import CredentialClient, AsyncCredentialClient +from .services.token import TokenClient, AsyncTokenClient +from .services.audit import AuditClient, AsyncAuditClient +from .types import OAuthScope + +_DEFAULT_SCOPES: List[OAuthScope] = [ + "agents:read", + "agents:write", + "tokens:read", + "audit:read", +] + + +class AgentIdPClient: + """ + Synchronous client for the SentryAgent.ai AgentIdP API. + + Composes all service clients under a single entry point. Handles token + acquisition and caching automatically via :class:`~.token_manager.TokenManager`. + + Args: + base_url: Base URL of the AgentIdP server (e.g. ``http://localhost:3000``). + client_id: The agent's ``agentId`` (UUID). + client_secret: The credential secret. + scopes: OAuth 2.0 scopes to request. Defaults to all four scopes. + + Example:: + + from sentryagent_idp import AgentIdPClient, RegisterAgentRequest + + client = AgentIdPClient( + base_url="http://localhost:3000", + client_id="your-agent-id", + client_secret="your-client-secret", + ) + agents = client.agents.list_agents() + """ + + def __init__( + self, + base_url: str, + client_id: str, + client_secret: str, + scopes: Optional[List[OAuthScope]] = None, + ) -> None: + scope_str = " ".join(scopes if scopes is not None else _DEFAULT_SCOPES) + self._token_manager = TokenManager(base_url, client_id, client_secret, scope_str) + + get_token = self._token_manager.get_token + + #: Agent Registry operations: register, list, get, update, decommission. + self.agents = AgentRegistryClient(base_url, get_token) + #: Credential operations: generate, list, rotate, revoke. + self.credentials = CredentialClient(base_url, get_token) + #: Token operations: introspect, revoke. + self.tokens = TokenClient(base_url, get_token) + #: Audit log operations: query, get event. + self.audit = AuditClient(base_url, get_token) + + def clear_token_cache(self) -> None: + """ + Clear the cached access token. + The next API call will request a new token. Use this after rotating credentials. + """ + self._token_manager.clear_cache() + + +class AsyncAgentIdPClient: + """ + Asynchronous client for the SentryAgent.ai AgentIdP API. + + All methods are coroutines and must be awaited. Token acquisition and caching + are handled automatically via :class:`~.async_token_manager.AsyncTokenManager`. + + Args: + base_url: Base URL of the AgentIdP server. + client_id: The agent's ``agentId`` (UUID). + client_secret: The credential secret. + scopes: OAuth 2.0 scopes to request. Defaults to all four scopes. + + Example:: + + from sentryagent_idp import AsyncAgentIdPClient + + client = AsyncAgentIdPClient( + base_url="http://localhost:3000", + client_id="your-agent-id", + client_secret="your-client-secret", + ) + agents = await client.agents.list_agents() + """ + + def __init__( + self, + base_url: str, + client_id: str, + client_secret: str, + scopes: Optional[List[OAuthScope]] = None, + ) -> None: + scope_str = " ".join(scopes if scopes is not None else _DEFAULT_SCOPES) + self._token_manager = AsyncTokenManager(base_url, client_id, client_secret, scope_str) + + get_token = self._token_manager.get_token + + #: Agent Registry operations (async). + self.agents = AsyncAgentRegistryClient(base_url, get_token) + #: Credential operations (async). + self.credentials = AsyncCredentialClient(base_url, get_token) + #: Token operations (async). + self.tokens = AsyncTokenClient(base_url, get_token) + #: Audit log operations (async). + self.audit = AsyncAuditClient(base_url, get_token) + + def clear_token_cache(self) -> None: + """Clear the cached access token.""" + self._token_manager.clear_cache() diff --git a/sdk-python/src/sentryagent_idp/errors.py b/sdk-python/src/sentryagent_idp/errors.py new file mode 100644 index 0000000..8cee647 --- /dev/null +++ b/sdk-python/src/sentryagent_idp/errors.py @@ -0,0 +1,108 @@ +""" +Error types for the SentryAgent.ai AgentIdP Python SDK. +All API failures are raised as AgentIdPError — never as raw requests/httpx exceptions. +""" + +from __future__ import annotations + +from typing import Any, Dict, Optional + + +class AgentIdPError(Exception): + """ + Typed exception raised for all AgentIdP API failures. + + Attributes: + code: Machine-readable error code from the API (e.g. ``AgentNotFoundError``). + http_status: HTTP status code of the failed response. + details: Optional structured details from the API error response. + """ + + def __init__( + self, + code: str, + message: str, + http_status: int, + details: Optional[Dict[str, Any]] = None, + ) -> None: + super().__init__(message) + self.code = code + self.http_status = http_status + self.details = details + + def __repr__(self) -> str: + return ( + f"AgentIdPError(code={self.code!r}, " + f"http_status={self.http_status}, " + f"message={str(self)!r})" + ) + + @classmethod + def from_api_error( + cls, body: Any, http_status: int + ) -> "AgentIdPError": + """ + Create an AgentIdPError from a standard API error response body. + + Args: + body: Parsed response body (dict or unknown). + http_status: HTTP status code. + + Returns: + AgentIdPError instance. + """ + if isinstance(body, dict) and "code" in body and "message" in body: + return cls( + code=str(body["code"]), + message=str(body["message"]), + http_status=http_status, + details=body.get("details"), + ) + return cls( + code="UNKNOWN_ERROR", + message=str(body), + http_status=http_status, + ) + + @classmethod + def from_oauth2_error( + cls, body: Any, http_status: int + ) -> "AgentIdPError": + """ + Create an AgentIdPError from an OAuth 2.0 error response body. + + Args: + body: Parsed response body. + http_status: HTTP status code. + + Returns: + AgentIdPError instance. + """ + if isinstance(body, dict): + return cls( + code=str(body.get("error", "unknown_error")), + message=str(body.get("error_description", "Token request failed.")), + http_status=http_status, + ) + return cls( + code="unknown_error", + message=str(body), + http_status=http_status, + ) + + @classmethod + def network_error(cls, cause: Exception) -> "AgentIdPError": + """ + Create an AgentIdPError for a network-level failure (no HTTP response). + + Args: + cause: The underlying exception. + + Returns: + AgentIdPError with http_status=0. + """ + return cls( + code="NETWORK_ERROR", + message=f"Network error: {cause}", + http_status=0, + ) diff --git a/sdk-python/src/sentryagent_idp/services/__init__.py b/sdk-python/src/sentryagent_idp/services/__init__.py new file mode 100644 index 0000000..a70b302 --- /dev/null +++ b/sdk-python/src/sentryagent_idp/services/__init__.py @@ -0,0 +1 @@ +# Services package diff --git a/sdk-python/src/sentryagent_idp/services/__pycache__/__init__.cpython-312.pyc b/sdk-python/src/sentryagent_idp/services/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..3673052 Binary files /dev/null and b/sdk-python/src/sentryagent_idp/services/__pycache__/__init__.cpython-312.pyc differ diff --git a/sdk-python/src/sentryagent_idp/services/__pycache__/agents.cpython-312.pyc b/sdk-python/src/sentryagent_idp/services/__pycache__/agents.cpython-312.pyc new file mode 100644 index 0000000..e124b29 Binary files /dev/null and b/sdk-python/src/sentryagent_idp/services/__pycache__/agents.cpython-312.pyc differ diff --git a/sdk-python/src/sentryagent_idp/services/__pycache__/audit.cpython-312.pyc b/sdk-python/src/sentryagent_idp/services/__pycache__/audit.cpython-312.pyc new file mode 100644 index 0000000..d0213f5 Binary files /dev/null and b/sdk-python/src/sentryagent_idp/services/__pycache__/audit.cpython-312.pyc differ diff --git a/sdk-python/src/sentryagent_idp/services/__pycache__/credentials.cpython-312.pyc b/sdk-python/src/sentryagent_idp/services/__pycache__/credentials.cpython-312.pyc new file mode 100644 index 0000000..9b177b9 Binary files /dev/null and b/sdk-python/src/sentryagent_idp/services/__pycache__/credentials.cpython-312.pyc differ diff --git a/sdk-python/src/sentryagent_idp/services/__pycache__/token.cpython-312.pyc b/sdk-python/src/sentryagent_idp/services/__pycache__/token.cpython-312.pyc new file mode 100644 index 0000000..86f95f0 Binary files /dev/null and b/sdk-python/src/sentryagent_idp/services/__pycache__/token.cpython-312.pyc differ diff --git a/sdk-python/src/sentryagent_idp/services/agents.py b/sdk-python/src/sentryagent_idp/services/agents.py new file mode 100644 index 0000000..c2ccaf8 --- /dev/null +++ b/sdk-python/src/sentryagent_idp/services/agents.py @@ -0,0 +1,202 @@ +""" +Agent Registry service clients — sync and async. +Covers all five agent endpoints: register, list, get, update, decommission. +""" + +from __future__ import annotations + +from typing import Any, Callable, Coroutine, Dict, Optional + +from .._request import sync_request, async_request +from ..types import ( + Agent, + AgentStatus, + AgentType, + PaginatedAgents, + RegisterAgentRequest, + UpdateAgentRequest, +) + + +class AgentRegistryClient: + """ + Synchronous client for the Agent Registry service. + + Args: + base_url: AgentIdP server base URL. + get_token: Callable that returns a valid Bearer token. + """ + + def __init__( + self, + base_url: str, + get_token: Callable[[], str], + ) -> None: + self._base_url = base_url + self._get_token = get_token + + def register_agent(self, request: RegisterAgentRequest) -> Agent: + """ + Register a new AI agent. + + Args: + request: Agent registration parameters. + + Returns: + The created Agent record. + + Raises: + AgentIdPError: On API or network failure. + """ + data = sync_request( + "POST", self._base_url, "/api/v1/agents", + self._get_token(), body=request.to_dict(), + ) + return Agent.from_dict(data) + + def list_agents( + self, + status: Optional[AgentStatus] = None, + agent_type: Optional[AgentType] = None, + page: int = 1, + limit: int = 20, + ) -> PaginatedAgents: + """ + List all registered agents with optional filters. + + Args: + status: Filter by lifecycle status. + agent_type: Filter by agent type. + page: Page number (1-based). + limit: Results per page. + + Returns: + PaginatedAgents response. + + Raises: + AgentIdPError: On API or network failure. + """ + data = sync_request( + "GET", self._base_url, "/api/v1/agents", + self._get_token(), + params={"status": status, "agentType": agent_type, "page": page, "limit": limit}, + ) + return PaginatedAgents.from_dict(data) + + def get_agent(self, agent_id: str) -> Agent: + """ + Get a single agent by its agentId. + + Args: + agent_id: The agent UUID. + + Returns: + Agent record. + + Raises: + AgentIdPError: If agent not found or network failure. + """ + data = sync_request( + "GET", self._base_url, f"/api/v1/agents/{agent_id}", + self._get_token(), + ) + return Agent.from_dict(data) + + def update_agent(self, agent_id: str, request: UpdateAgentRequest) -> Agent: + """ + Update mutable fields on an existing agent. + + Args: + agent_id: The agent UUID. + request: Fields to update. + + Returns: + Updated Agent record. + + Raises: + AgentIdPError: On API or network failure. + """ + data = sync_request( + "PATCH", self._base_url, f"/api/v1/agents/{agent_id}", + self._get_token(), body=request.to_dict(), + ) + return Agent.from_dict(data) + + def decommission_agent(self, agent_id: str) -> None: + """ + Decommission an agent. This is irreversible. + + Args: + agent_id: The agent UUID. + + Raises: + AgentIdPError: On API or network failure. + """ + sync_request( + "DELETE", self._base_url, f"/api/v1/agents/{agent_id}", + self._get_token(), + ) + + +class AsyncAgentRegistryClient: + """ + Asynchronous client for the Agent Registry service. + + Args: + base_url: AgentIdP server base URL. + get_token: Async callable that returns a valid Bearer token. + """ + + def __init__( + self, + base_url: str, + get_token: Callable[[], Coroutine[Any, Any, str]], + ) -> None: + self._base_url = base_url + self._get_token = get_token + + async def register_agent(self, request: RegisterAgentRequest) -> Agent: + """Register a new AI agent (async).""" + data = await async_request( + "POST", self._base_url, "/api/v1/agents", + await self._get_token(), body=request.to_dict(), + ) + return Agent.from_dict(data) + + async def list_agents( + self, + status: Optional[AgentStatus] = None, + agent_type: Optional[AgentType] = None, + page: int = 1, + limit: int = 20, + ) -> PaginatedAgents: + """List all registered agents with optional filters (async).""" + data = await async_request( + "GET", self._base_url, "/api/v1/agents", + await self._get_token(), + params={"status": status, "agentType": agent_type, "page": page, "limit": limit}, + ) + return PaginatedAgents.from_dict(data) + + async def get_agent(self, agent_id: str) -> Agent: + """Get a single agent by its agentId (async).""" + data = await async_request( + "GET", self._base_url, f"/api/v1/agents/{agent_id}", + await self._get_token(), + ) + return Agent.from_dict(data) + + async def update_agent(self, agent_id: str, request: UpdateAgentRequest) -> Agent: + """Update mutable fields on an existing agent (async).""" + data = await async_request( + "PATCH", self._base_url, f"/api/v1/agents/{agent_id}", + await self._get_token(), body=request.to_dict(), + ) + return Agent.from_dict(data) + + async def decommission_agent(self, agent_id: str) -> None: + """Decommission an agent — irreversible (async).""" + await async_request( + "DELETE", self._base_url, f"/api/v1/agents/{agent_id}", + await self._get_token(), + ) diff --git a/sdk-python/src/sentryagent_idp/services/audit.py b/sdk-python/src/sentryagent_idp/services/audit.py new file mode 100644 index 0000000..f3dd753 --- /dev/null +++ b/sdk-python/src/sentryagent_idp/services/audit.py @@ -0,0 +1,144 @@ +""" +Audit Log service clients — sync and async. +Covers query (list) and get-by-id operations. +""" + +from __future__ import annotations + +from typing import Any, Callable, Coroutine, Optional + +from .._request import sync_request, async_request +from ..types import AuditAction, AuditEvent, AuditOutcome, PaginatedAuditEvents + + +class AuditClient: + """ + Synchronous client for the Audit Log service. + + Args: + base_url: AgentIdP server base URL. + get_token: Callable that returns a valid Bearer token. + """ + + def __init__( + self, + base_url: str, + get_token: Callable[[], str], + ) -> None: + self._base_url = base_url + self._get_token = get_token + + def query_audit_log( + self, + agent_id: Optional[str] = None, + action: Optional[AuditAction] = None, + outcome: Optional[AuditOutcome] = None, + from_date: Optional[str] = None, + to_date: Optional[str] = None, + page: int = 1, + limit: int = 20, + ) -> PaginatedAuditEvents: + """ + Query audit log events with optional filters. Requires ``audit:read`` scope. + Events are retained for 90 days. + + Args: + agent_id: Filter by agent UUID. + action: Filter by audit action type. + outcome: Filter by outcome (success or failure). + from_date: ISO 8601 start datetime (inclusive). + to_date: ISO 8601 end datetime (inclusive). + page: Page number (1-based). + limit: Results per page. + + Returns: + PaginatedAuditEvents response. + + Raises: + AgentIdPError: On API or network failure. + """ + data = sync_request( + "GET", self._base_url, "/api/v1/audit", + self._get_token(), + params={ + "agentId": agent_id, + "action": action, + "outcome": outcome, + "fromDate": from_date, + "toDate": to_date, + "page": page, + "limit": limit, + }, + ) + return PaginatedAuditEvents.from_dict(data) + + def get_audit_event(self, event_id: str) -> AuditEvent: + """ + Get a single audit event by its eventId. Requires ``audit:read`` scope. + + Args: + event_id: The audit event UUID. + + Returns: + AuditEvent record. + + Raises: + AgentIdPError: On API or network failure. + """ + data = sync_request( + "GET", self._base_url, f"/api/v1/audit/{event_id}", + self._get_token(), + ) + return AuditEvent.from_dict(data) + + +class AsyncAuditClient: + """ + Asynchronous client for the Audit Log service. + + Args: + base_url: AgentIdP server base URL. + get_token: Async callable that returns a valid Bearer token. + """ + + def __init__( + self, + base_url: str, + get_token: Callable[[], Coroutine[Any, Any, str]], + ) -> None: + self._base_url = base_url + self._get_token = get_token + + async def query_audit_log( + self, + agent_id: Optional[str] = None, + action: Optional[AuditAction] = None, + outcome: Optional[AuditOutcome] = None, + from_date: Optional[str] = None, + to_date: Optional[str] = None, + page: int = 1, + limit: int = 20, + ) -> PaginatedAuditEvents: + """Query audit log events with optional filters (async).""" + data = await async_request( + "GET", self._base_url, "/api/v1/audit", + await self._get_token(), + params={ + "agentId": agent_id, + "action": action, + "outcome": outcome, + "fromDate": from_date, + "toDate": to_date, + "page": page, + "limit": limit, + }, + ) + return PaginatedAuditEvents.from_dict(data) + + async def get_audit_event(self, event_id: str) -> AuditEvent: + """Get a single audit event by its eventId (async).""" + data = await async_request( + "GET", self._base_url, f"/api/v1/audit/{event_id}", + await self._get_token(), + ) + return AuditEvent.from_dict(data) diff --git a/sdk-python/src/sentryagent_idp/services/credentials.py b/sdk-python/src/sentryagent_idp/services/credentials.py new file mode 100644 index 0000000..2fde78c --- /dev/null +++ b/sdk-python/src/sentryagent_idp/services/credentials.py @@ -0,0 +1,209 @@ +""" +Credential Management service clients — sync and async. +Covers generate, list, rotate, and revoke operations. +""" + +from __future__ import annotations + +from typing import Any, Callable, Coroutine, Optional + +from .._request import sync_request, async_request +from ..types import ( + Credential, + CredentialStatus, + CredentialWithSecret, + PaginatedCredentials, +) + + +class CredentialClient: + """ + Synchronous client for the Credential Management service. + + Args: + base_url: AgentIdP server base URL. + get_token: Callable that returns a valid Bearer token. + """ + + def __init__( + self, + base_url: str, + get_token: Callable[[], str], + ) -> None: + self._base_url = base_url + self._get_token = get_token + + def generate_credential( + self, + agent_id: str, + expires_at: Optional[str] = None, + ) -> CredentialWithSecret: + """ + Generate a new credential for an agent. + The ``client_secret`` is shown **once** — store it securely immediately. + + Args: + agent_id: The agent UUID. + expires_at: Optional ISO 8601 expiry date string. + + Returns: + CredentialWithSecret including the one-time plain-text secret. + + Raises: + AgentIdPError: On API or network failure. + """ + body = {"expiresAt": expires_at} if expires_at is not None else None + data = sync_request( + "POST", self._base_url, f"/api/v1/agents/{agent_id}/credentials", + self._get_token(), body=body, + ) + return CredentialWithSecret.from_dict(data) + + def list_credentials( + self, + agent_id: str, + status: Optional[CredentialStatus] = None, + page: int = 1, + limit: int = 20, + ) -> PaginatedCredentials: + """ + List credentials for an agent. Secrets are never returned in list responses. + + Args: + agent_id: The agent UUID. + status: Filter by credential status. + page: Page number (1-based). + limit: Results per page. + + Returns: + PaginatedCredentials response. + + Raises: + AgentIdPError: On API or network failure. + """ + data = sync_request( + "GET", self._base_url, f"/api/v1/agents/{agent_id}/credentials", + self._get_token(), + params={"status": status, "page": page, "limit": limit}, + ) + return PaginatedCredentials.from_dict(data) + + def rotate_credential( + self, agent_id: str, credential_id: str + ) -> CredentialWithSecret: + """ + Rotate a credential. The same ``credential_id`` is retained; a new secret is issued. + The old secret is immediately invalidated. + The new ``client_secret`` is shown **once** — store it securely immediately. + + Args: + agent_id: The agent UUID. + credential_id: The credential UUID to rotate. + + Returns: + CredentialWithSecret with the new one-time secret. + + Raises: + AgentIdPError: On API or network failure. + """ + data = sync_request( + "POST", + self._base_url, + f"/api/v1/agents/{agent_id}/credentials/{credential_id}/rotate", + self._get_token(), + ) + return CredentialWithSecret.from_dict(data) + + def revoke_credential( + self, agent_id: str, credential_id: str + ) -> Credential: + """ + Revoke a credential permanently. + + Args: + agent_id: The agent UUID. + credential_id: The credential UUID to revoke. + + Returns: + The revoked Credential record. + + Raises: + AgentIdPError: On API or network failure. + """ + data = sync_request( + "DELETE", + self._base_url, + f"/api/v1/agents/{agent_id}/credentials/{credential_id}", + self._get_token(), + ) + return Credential.from_dict(data) + + +class AsyncCredentialClient: + """ + Asynchronous client for the Credential Management service. + + Args: + base_url: AgentIdP server base URL. + get_token: Async callable that returns a valid Bearer token. + """ + + def __init__( + self, + base_url: str, + get_token: Callable[[], Coroutine[Any, Any, str]], + ) -> None: + self._base_url = base_url + self._get_token = get_token + + async def generate_credential( + self, + agent_id: str, + expires_at: Optional[str] = None, + ) -> CredentialWithSecret: + """Generate a new credential for an agent (async).""" + body = {"expiresAt": expires_at} if expires_at is not None else None + data = await async_request( + "POST", self._base_url, f"/api/v1/agents/{agent_id}/credentials", + await self._get_token(), body=body, + ) + return CredentialWithSecret.from_dict(data) + + async def list_credentials( + self, + agent_id: str, + status: Optional[CredentialStatus] = None, + page: int = 1, + limit: int = 20, + ) -> PaginatedCredentials: + """List credentials for an agent (async).""" + data = await async_request( + "GET", self._base_url, f"/api/v1/agents/{agent_id}/credentials", + await self._get_token(), + params={"status": status, "page": page, "limit": limit}, + ) + return PaginatedCredentials.from_dict(data) + + async def rotate_credential( + self, agent_id: str, credential_id: str + ) -> CredentialWithSecret: + """Rotate a credential (async).""" + data = await async_request( + "POST", + self._base_url, + f"/api/v1/agents/{agent_id}/credentials/{credential_id}/rotate", + await self._get_token(), + ) + return CredentialWithSecret.from_dict(data) + + async def revoke_credential( + self, agent_id: str, credential_id: str + ) -> Credential: + """Revoke a credential permanently (async).""" + data = await async_request( + "DELETE", + self._base_url, + f"/api/v1/agents/{agent_id}/credentials/{credential_id}", + await self._get_token(), + ) + return Credential.from_dict(data) diff --git a/sdk-python/src/sentryagent_idp/services/token.py b/sdk-python/src/sentryagent_idp/services/token.py new file mode 100644 index 0000000..9469dca --- /dev/null +++ b/sdk-python/src/sentryagent_idp/services/token.py @@ -0,0 +1,154 @@ +""" +Token service clients (introspect and revoke) — sync and async. +Token issuance is handled by TokenManager / AsyncTokenManager. +""" + +from __future__ import annotations + +from typing import Any, Callable, Coroutine + +import requests +import httpx + +from ..errors import AgentIdPError +from ..types import IntrospectResponse + + +class TokenClient: + """ + Synchronous client for token introspection and revocation. + + Args: + base_url: AgentIdP server base URL. + get_token: Callable that returns a valid Bearer token. + """ + + def __init__( + self, + base_url: str, + get_token: Callable[[], str], + ) -> None: + self._base_url = base_url.rstrip("/") + self._get_token = get_token + + def introspect_token(self, token_to_check: str) -> IntrospectResponse: + """ + Check whether a token is currently active. + Always returns successfully — check ``response.active`` for validity. + + Args: + token_to_check: The JWT string to introspect. + + Returns: + IntrospectResponse with ``active`` field set. + + Raises: + AgentIdPError: On API or network failure. + """ + url = f"{self._base_url}/api/v1/token/introspect" + try: + response = requests.post( + url, + data={"token": token_to_check}, + headers={ + "Authorization": f"Bearer {self._get_token()}", + "Content-Type": "application/x-www-form-urlencoded", + }, + timeout=30, + ) + except requests.RequestException as exc: + raise AgentIdPError.network_error(exc) from exc + + body = response.json() + if not response.ok: + raise AgentIdPError.from_api_error(body, response.status_code) + return IntrospectResponse.from_dict(body) + + def revoke_token(self, token_to_revoke: str) -> None: + """ + Revoke a token immediately. Idempotent (RFC 7009). + + Args: + token_to_revoke: The JWT string to revoke. + + Raises: + AgentIdPError: On API or network failure. + """ + url = f"{self._base_url}/api/v1/token/revoke" + try: + response = requests.post( + url, + data={"token": token_to_revoke}, + headers={ + "Authorization": f"Bearer {self._get_token()}", + "Content-Type": "application/x-www-form-urlencoded", + }, + timeout=30, + ) + except requests.RequestException as exc: + raise AgentIdPError.network_error(exc) from exc + + if not response.ok: + body = response.json() if response.content else {} + raise AgentIdPError.from_api_error(body, response.status_code) + + +class AsyncTokenClient: + """ + Asynchronous client for token introspection and revocation. + + Args: + base_url: AgentIdP server base URL. + get_token: Async callable that returns a valid Bearer token. + """ + + def __init__( + self, + base_url: str, + get_token: Callable[[], Coroutine[Any, Any, str]], + ) -> None: + self._base_url = base_url.rstrip("/") + self._get_token = get_token + + async def introspect_token(self, token_to_check: str) -> IntrospectResponse: + """Check whether a token is currently active (async).""" + url = f"{self._base_url}/api/v1/token/introspect" + token = await self._get_token() + try: + async with httpx.AsyncClient(timeout=30) as client: + response = await client.post( + url, + data={"token": token_to_check}, + headers={ + "Authorization": f"Bearer {token}", + "Content-Type": "application/x-www-form-urlencoded", + }, + ) + except httpx.RequestError as exc: + raise AgentIdPError.network_error(exc) from exc + + body = response.json() + if not response.is_success: + raise AgentIdPError.from_api_error(body, response.status_code) + return IntrospectResponse.from_dict(body) + + async def revoke_token(self, token_to_revoke: str) -> None: + """Revoke a token immediately — idempotent (async).""" + url = f"{self._base_url}/api/v1/token/revoke" + token = await self._get_token() + try: + async with httpx.AsyncClient(timeout=30) as client: + response = await client.post( + url, + data={"token": token_to_revoke}, + headers={ + "Authorization": f"Bearer {token}", + "Content-Type": "application/x-www-form-urlencoded", + }, + ) + except httpx.RequestError as exc: + raise AgentIdPError.network_error(exc) from exc + + if not response.is_success: + body = response.json() if response.content else {} + raise AgentIdPError.from_api_error(body, response.status_code) diff --git a/sdk-python/src/sentryagent_idp/token_manager.py b/sdk-python/src/sentryagent_idp/token_manager.py new file mode 100644 index 0000000..3bc7720 --- /dev/null +++ b/sdk-python/src/sentryagent_idp/token_manager.py @@ -0,0 +1,116 @@ +""" +Synchronous TokenManager — handles OAuth 2.0 token acquisition, caching, and refresh. +Tokens are re-issued automatically when expired or within 60 seconds of expiry. +""" + +from __future__ import annotations + +import time +import threading +from dataclasses import dataclass +from typing import Optional + +import requests + +from .errors import AgentIdPError +from .types import TokenResponse + +#: Seconds before expiry at which a token refresh is triggered. +REFRESH_BUFFER_SECONDS = 60 + + +@dataclass +class _CachedToken: + access_token: str + expires_at: float # Unix timestamp (seconds) + + +class TokenManager: + """ + Thread-safe synchronous token manager. + + Acquires and caches OAuth 2.0 access tokens. Automatically refreshes + the token when it is within :data:`REFRESH_BUFFER_SECONDS` of expiry. + + Args: + base_url: AgentIdP server base URL (e.g. ``http://localhost:3000``). + client_id: The agent's ``agentId`` (UUID). + client_secret: The agent's credential secret. + scopes: Space-separated OAuth 2.0 scopes to request. + """ + + def __init__( + self, + base_url: str, + client_id: str, + client_secret: str, + scopes: str, + ) -> None: + self._base_url = base_url.rstrip("/") + self._client_id = client_id + self._client_secret = client_secret + self._scopes = scopes + self._cached: Optional[_CachedToken] = None + self._lock = threading.Lock() + + def get_token(self) -> str: + """ + Return a valid access token, refreshing if necessary. + + Returns: + A valid JWT access token string. + + Raises: + AgentIdPError: If token acquisition fails. + """ + with self._lock: + now = time.time() + if ( + self._cached is not None + and self._cached.expires_at - now > REFRESH_BUFFER_SECONDS + ): + return self._cached.access_token + + token_response = self._issue_token() + self._cached = _CachedToken( + access_token=token_response.access_token, + expires_at=now + token_response.expires_in, + ) + return self._cached.access_token + + def clear_cache(self) -> None: + """Clear the cached token, forcing re-acquisition on the next call.""" + with self._lock: + self._cached = None + + def _issue_token(self) -> TokenResponse: + """ + POST /api/v1/token to obtain a new access token. + + Returns: + TokenResponse from the API. + + Raises: + AgentIdPError: On authentication failure or network error. + """ + url = f"{self._base_url}/api/v1/token" + data = { + "grant_type": "client_credentials", + "client_id": self._client_id, + "client_secret": self._client_secret, + "scope": self._scopes, + } + try: + response = requests.post( + url, + data=data, + headers={"Content-Type": "application/x-www-form-urlencoded"}, + timeout=30, + ) + except requests.RequestException as exc: + raise AgentIdPError.network_error(exc) from exc + + body = response.json() + if not response.ok: + raise AgentIdPError.from_oauth2_error(body, response.status_code) + return TokenResponse.from_dict(body) diff --git a/sdk-python/src/sentryagent_idp/types.py b/sdk-python/src/sentryagent_idp/types.py new file mode 100644 index 0000000..116f055 --- /dev/null +++ b/sdk-python/src/sentryagent_idp/types.py @@ -0,0 +1,323 @@ +""" +Type definitions for the SentryAgent.ai AgentIdP Python SDK. +All request and response shapes derived from the AgentIdP OpenAPI 3.0 specs. +""" + +from __future__ import annotations + +from typing import Any, Dict, List, Literal, Optional +from dataclasses import dataclass, field + +# ───────────────────────────────────────────────────────────────────────────── +# Enums / Literal types +# ───────────────────────────────────────────────────────────────────────────── + +AgentType = Literal[ + "screener", + "classifier", + "orchestrator", + "extractor", + "summarizer", + "router", + "monitor", + "custom", +] + +AgentStatus = Literal["active", "suspended", "decommissioned"] + +DeploymentEnv = Literal["development", "staging", "production"] + +CredentialStatus = Literal["active", "revoked"] + +OAuthScope = Literal["agents:read", "agents:write", "tokens:read", "audit:read"] + +AuditAction = Literal[ + "agent.created", + "agent.updated", + "agent.decommissioned", + "agent.suspended", + "agent.reactivated", + "token.issued", + "token.revoked", + "token.introspected", + "credential.generated", + "credential.rotated", + "credential.revoked", + "auth.failed", +] + +AuditOutcome = Literal["success", "failure"] + +# ───────────────────────────────────────────────────────────────────────────── +# Agent Registry +# ───────────────────────────────────────────────────────────────────────────── + + +@dataclass +class Agent: + """A registered AI agent identity.""" + + agent_id: str + email: str + agent_type: AgentType + version: str + capabilities: List[str] + owner: str + deployment_env: DeploymentEnv + status: AgentStatus + created_at: str + updated_at: str + + @classmethod + def from_dict(cls, data: Dict[str, Any]) -> "Agent": + """Deserialise from an API response dict.""" + return cls( + agent_id=data["agentId"], + email=data["email"], + agent_type=data["agentType"], + version=data["version"], + capabilities=data["capabilities"], + owner=data["owner"], + deployment_env=data["deploymentEnv"], + status=data["status"], + created_at=data["createdAt"], + updated_at=data["updatedAt"], + ) + + +@dataclass +class RegisterAgentRequest: + """Request body for registering a new AI agent.""" + + email: str + agent_type: AgentType + version: str + capabilities: List[str] + owner: str + deployment_env: DeploymentEnv + + def to_dict(self) -> Dict[str, Any]: + """Serialise to API request dict.""" + return { + "email": self.email, + "agentType": self.agent_type, + "version": self.version, + "capabilities": self.capabilities, + "owner": self.owner, + "deploymentEnv": self.deployment_env, + } + + +@dataclass +class UpdateAgentRequest: + """Request body for partially updating an agent (all fields optional).""" + + agent_type: Optional[AgentType] = None + version: Optional[str] = None + capabilities: Optional[List[str]] = None + owner: Optional[str] = None + deployment_env: Optional[DeploymentEnv] = None + status: Optional[AgentStatus] = None + + def to_dict(self) -> Dict[str, Any]: + """Serialise to API request dict, omitting None fields.""" + out: Dict[str, Any] = {} + if self.agent_type is not None: + out["agentType"] = self.agent_type + if self.version is not None: + out["version"] = self.version + if self.capabilities is not None: + out["capabilities"] = self.capabilities + if self.owner is not None: + out["owner"] = self.owner + if self.deployment_env is not None: + out["deploymentEnv"] = self.deployment_env + if self.status is not None: + out["status"] = self.status + return out + + +@dataclass +class PaginatedAgents: + """Paginated list of agents.""" + + data: List[Agent] + total: int + page: int + limit: int + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> "PaginatedAgents": + return cls( + data=[Agent.from_dict(a) for a in d["data"]], + total=d["total"], + page=d["page"], + limit=d["limit"], + ) + + +# ───────────────────────────────────────────────────────────────────────────── +# Credential Management +# ───────────────────────────────────────────────────────────────────────────── + + +@dataclass +class Credential: + """A credential record (client_secret never included).""" + + credential_id: str + client_id: str + status: CredentialStatus + created_at: str + expires_at: Optional[str] + revoked_at: Optional[str] + + @classmethod + def from_dict(cls, data: Dict[str, Any]) -> "Credential": + return cls( + credential_id=data["credentialId"], + client_id=data["clientId"], + status=data["status"], + created_at=data["createdAt"], + expires_at=data.get("expiresAt"), + revoked_at=data.get("revokedAt"), + ) + + +@dataclass +class CredentialWithSecret(Credential): + """Credential with plain-text secret — returned once only on create/rotate.""" + + client_secret: str = field(default="") + + @classmethod + def from_dict(cls, data: Dict[str, Any]) -> "CredentialWithSecret": + base = Credential.from_dict(data) + return cls( + credential_id=base.credential_id, + client_id=base.client_id, + status=base.status, + created_at=base.created_at, + expires_at=base.expires_at, + revoked_at=base.revoked_at, + client_secret=data["clientSecret"], + ) + + +@dataclass +class PaginatedCredentials: + """Paginated list of credentials.""" + + data: List[Credential] + total: int + page: int + limit: int + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> "PaginatedCredentials": + return cls( + data=[Credential.from_dict(c) for c in d["data"]], + total=d["total"], + page=d["page"], + limit=d["limit"], + ) + + +# ───────────────────────────────────────────────────────────────────────────── +# OAuth 2.0 Tokens +# ───────────────────────────────────────────────────────────────────────────── + + +@dataclass +class TokenResponse: + """OAuth 2.0 access token response.""" + + access_token: str + token_type: str + expires_in: int + scope: str + + @classmethod + def from_dict(cls, data: Dict[str, Any]) -> "TokenResponse": + return cls( + access_token=data["access_token"], + token_type=data["token_type"], + expires_in=data["expires_in"], + scope=data["scope"], + ) + + +@dataclass +class IntrospectResponse: + """Token introspection response (RFC 7662).""" + + active: bool + sub: Optional[str] = None + client_id: Optional[str] = None + scope: Optional[str] = None + token_type: Optional[str] = None + iat: Optional[int] = None + exp: Optional[int] = None + + @classmethod + def from_dict(cls, data: Dict[str, Any]) -> "IntrospectResponse": + return cls( + active=data["active"], + sub=data.get("sub"), + client_id=data.get("client_id"), + scope=data.get("scope"), + token_type=data.get("token_type"), + iat=data.get("iat"), + exp=data.get("exp"), + ) + + +# ───────────────────────────────────────────────────────────────────────────── +# Audit Log +# ───────────────────────────────────────────────────────────────────────────── + + +@dataclass +class AuditEvent: + """An immutable audit event record.""" + + event_id: str + agent_id: str + action: AuditAction + outcome: AuditOutcome + ip_address: str + user_agent: str + metadata: Dict[str, Any] + timestamp: str + + @classmethod + def from_dict(cls, data: Dict[str, Any]) -> "AuditEvent": + return cls( + event_id=data["eventId"], + agent_id=data["agentId"], + action=data["action"], + outcome=data["outcome"], + ip_address=data["ipAddress"], + user_agent=data["userAgent"], + metadata=data.get("metadata", {}), + timestamp=data["timestamp"], + ) + + +@dataclass +class PaginatedAuditEvents: + """Paginated list of audit events.""" + + data: List[AuditEvent] + total: int + page: int + limit: int + + @classmethod + def from_dict(cls, d: Dict[str, Any]) -> "PaginatedAuditEvents": + return cls( + data=[AuditEvent.from_dict(e) for e in d["data"]], + total=d["total"], + page=d["page"], + limit=d["limit"], + ) diff --git a/sdk-python/tests/__init__.py b/sdk-python/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/sdk-python/tests/__pycache__/__init__.cpython-312.pyc b/sdk-python/tests/__pycache__/__init__.cpython-312.pyc new file mode 100644 index 0000000..58555cb Binary files /dev/null and b/sdk-python/tests/__pycache__/__init__.cpython-312.pyc differ diff --git a/sdk-python/tests/__pycache__/test_errors.cpython-312-pytest-9.0.2.pyc b/sdk-python/tests/__pycache__/test_errors.cpython-312-pytest-9.0.2.pyc new file mode 100644 index 0000000..ee53a00 Binary files /dev/null and b/sdk-python/tests/__pycache__/test_errors.cpython-312-pytest-9.0.2.pyc differ diff --git a/sdk-python/tests/__pycache__/test_services.cpython-312-pytest-9.0.2.pyc b/sdk-python/tests/__pycache__/test_services.cpython-312-pytest-9.0.2.pyc new file mode 100644 index 0000000..4772579 Binary files /dev/null and b/sdk-python/tests/__pycache__/test_services.cpython-312-pytest-9.0.2.pyc differ diff --git a/sdk-python/tests/__pycache__/test_token_manager.cpython-312-pytest-9.0.2.pyc b/sdk-python/tests/__pycache__/test_token_manager.cpython-312-pytest-9.0.2.pyc new file mode 100644 index 0000000..b815660 Binary files /dev/null and b/sdk-python/tests/__pycache__/test_token_manager.cpython-312-pytest-9.0.2.pyc differ diff --git a/sdk-python/tests/__pycache__/test_types.cpython-312-pytest-9.0.2.pyc b/sdk-python/tests/__pycache__/test_types.cpython-312-pytest-9.0.2.pyc new file mode 100644 index 0000000..4844d15 Binary files /dev/null and b/sdk-python/tests/__pycache__/test_types.cpython-312-pytest-9.0.2.pyc differ diff --git a/sdk-python/tests/test_errors.py b/sdk-python/tests/test_errors.py new file mode 100644 index 0000000..944b766 --- /dev/null +++ b/sdk-python/tests/test_errors.py @@ -0,0 +1,52 @@ +"""Tests for AgentIdPError.""" + +from sentryagent_idp.errors import AgentIdPError + + +def test_basic_construction() -> None: + err = AgentIdPError("AgentNotFoundError", "Agent not found.", 404) + assert err.code == "AgentNotFoundError" + assert err.http_status == 404 + assert str(err) == "Agent not found." + assert err.details is None + + +def test_from_api_error_valid_body() -> None: + body = {"code": "AgentNotFoundError", "message": "Not found.", "details": {"id": "x"}} + err = AgentIdPError.from_api_error(body, 404) + assert err.code == "AgentNotFoundError" + assert err.http_status == 404 + assert err.details == {"id": "x"} + + +def test_from_api_error_unknown_body() -> None: + err = AgentIdPError.from_api_error("plain string", 500) + assert err.code == "UNKNOWN_ERROR" + assert err.http_status == 500 + + +def test_from_oauth2_error() -> None: + body = {"error": "invalid_client", "error_description": "Bad credentials."} + err = AgentIdPError.from_oauth2_error(body, 401) + assert err.code == "invalid_client" + assert str(err) == "Bad credentials." + assert err.http_status == 401 + + +def test_from_oauth2_error_unknown() -> None: + err = AgentIdPError.from_oauth2_error("garbage", 400) + assert err.code == "unknown_error" + + +def test_network_error() -> None: + cause = ConnectionError("refused") + err = AgentIdPError.network_error(cause) + assert err.code == "NETWORK_ERROR" + assert err.http_status == 0 + assert "refused" in str(err) + + +def test_repr() -> None: + err = AgentIdPError("CODE", "msg", 400) + assert "AgentIdPError" in repr(err) + assert "CODE" in repr(err) diff --git a/sdk-python/tests/test_services.py b/sdk-python/tests/test_services.py new file mode 100644 index 0000000..7200dc1 --- /dev/null +++ b/sdk-python/tests/test_services.py @@ -0,0 +1,349 @@ +""" +Tests for all service clients — covers all 14 API endpoints (sync + async). +Uses `responses` for sync mocking and `respx` for async mocking. +""" + +from __future__ import annotations + +import pytest +import responses as resp_lib +import respx +import httpx + +from sentryagent_idp.errors import AgentIdPError +from sentryagent_idp.services.agents import AgentRegistryClient, AsyncAgentRegistryClient +from sentryagent_idp.services.credentials import CredentialClient, AsyncCredentialClient +from sentryagent_idp.services.token import TokenClient, AsyncTokenClient +from sentryagent_idp.services.audit import AuditClient, AsyncAuditClient +from sentryagent_idp.types import RegisterAgentRequest, UpdateAgentRequest + +BASE = "http://localhost:3000" +TOKEN = "test-bearer-token" + + +def get_token() -> str: + return TOKEN + + +async def async_get_token() -> str: + return TOKEN + + +# ─── Fixtures ───────────────────────────────────────────────────────────────── + +AGENT = { + "agentId": "uuid-1", "email": "a@b.ai", "agentType": "screener", + "version": "1.0.0", "capabilities": ["read"], "owner": "team", + "deploymentEnv": "production", "status": "active", + "createdAt": "2026-01-01T00:00:00Z", "updatedAt": "2026-01-01T00:00:00Z", +} +PAGINATED_AGENTS = {"data": [AGENT], "total": 1, "page": 1, "limit": 20} + +CRED = { + "credentialId": "cred-1", "clientId": "uuid-1", "status": "active", + "createdAt": "2026-01-01T00:00:00Z", "expiresAt": None, "revokedAt": None, +} +CRED_WITH_SECRET = {**CRED, "clientSecret": "sk_live_abc"} +PAGINATED_CREDS = {"data": [CRED], "total": 1, "page": 1, "limit": 20} + +INTROSPECT_ACTIVE = {"active": True, "sub": "uuid-1", "exp": 9999999999} +INTROSPECT_INACTIVE = {"active": False} + +AUDIT_EVENT = { + "eventId": "ev-1", "agentId": "uuid-1", "action": "token.issued", + "outcome": "success", "ipAddress": "1.2.3.4", "userAgent": "curl", + "metadata": {}, "timestamp": "2026-01-01T00:00:00Z", +} +PAGINATED_AUDIT = {"data": [AUDIT_EVENT], "total": 1, "page": 1, "limit": 20} + + +# ─── Agent Registry — Sync ──────────────────────────────────────────────────── + +@resp_lib.activate +def test_register_agent() -> None: + resp_lib.add(resp_lib.POST, f"{BASE}/api/v1/agents", json=AGENT, status=201) + client = AgentRegistryClient(BASE, get_token) + agent = client.register_agent(RegisterAgentRequest( + email="a@b.ai", agent_type="screener", version="1.0.0", + capabilities=["read"], owner="team", deployment_env="production", + )) + assert agent.agent_id == "uuid-1" + + +@resp_lib.activate +def test_list_agents() -> None: + resp_lib.add(resp_lib.GET, f"{BASE}/api/v1/agents", json=PAGINATED_AGENTS, status=200) + client = AgentRegistryClient(BASE, get_token) + result = client.list_agents() + assert result.total == 1 + + +@resp_lib.activate +def test_get_agent() -> None: + resp_lib.add(resp_lib.GET, f"{BASE}/api/v1/agents/uuid-1", json=AGENT, status=200) + client = AgentRegistryClient(BASE, get_token) + agent = client.get_agent("uuid-1") + assert agent.agent_id == "uuid-1" + + +@resp_lib.activate +def test_update_agent() -> None: + resp_lib.add(resp_lib.PATCH, f"{BASE}/api/v1/agents/uuid-1", json=AGENT, status=200) + client = AgentRegistryClient(BASE, get_token) + agent = client.update_agent("uuid-1", UpdateAgentRequest(version="2.0.0")) + assert agent.agent_id == "uuid-1" + + +@resp_lib.activate +def test_decommission_agent() -> None: + resp_lib.add(resp_lib.DELETE, f"{BASE}/api/v1/agents/uuid-1", status=204) + client = AgentRegistryClient(BASE, get_token) + result = client.decommission_agent("uuid-1") + assert result is None + + +@resp_lib.activate +def test_agent_not_found_raises() -> None: + resp_lib.add( + resp_lib.GET, f"{BASE}/api/v1/agents/bad-id", + json={"code": "AgentNotFoundError", "message": "Not found."}, status=404, + ) + client = AgentRegistryClient(BASE, get_token) + with pytest.raises(AgentIdPError) as exc_info: + client.get_agent("bad-id") + assert exc_info.value.code == "AgentNotFoundError" + assert exc_info.value.http_status == 404 + + +# ─── Credentials — Sync ─────────────────────────────────────────────────────── + +@resp_lib.activate +def test_generate_credential() -> None: + resp_lib.add(resp_lib.POST, f"{BASE}/api/v1/agents/uuid-1/credentials", json=CRED_WITH_SECRET, status=201) + client = CredentialClient(BASE, get_token) + cred = client.generate_credential("uuid-1") + assert cred.client_secret == "sk_live_abc" + + +@resp_lib.activate +def test_list_credentials() -> None: + resp_lib.add(resp_lib.GET, f"{BASE}/api/v1/agents/uuid-1/credentials", json=PAGINATED_CREDS, status=200) + client = CredentialClient(BASE, get_token) + result = client.list_credentials("uuid-1") + assert result.total == 1 + + +@resp_lib.activate +def test_rotate_credential() -> None: + resp_lib.add(resp_lib.POST, f"{BASE}/api/v1/agents/uuid-1/credentials/cred-1/rotate", json=CRED_WITH_SECRET, status=200) + client = CredentialClient(BASE, get_token) + cred = client.rotate_credential("uuid-1", "cred-1") + assert cred.client_secret == "sk_live_abc" + + +@resp_lib.activate +def test_revoke_credential() -> None: + revoked = {**CRED, "status": "revoked", "revokedAt": "2026-01-02T00:00:00Z"} + resp_lib.add(resp_lib.DELETE, f"{BASE}/api/v1/agents/uuid-1/credentials/cred-1", json=revoked, status=200) + client = CredentialClient(BASE, get_token) + cred = client.revoke_credential("uuid-1", "cred-1") + assert cred.status == "revoked" + + +# ─── Token — Sync ───────────────────────────────────────────────────────────── + +@resp_lib.activate +def test_introspect_token_active() -> None: + resp_lib.add(resp_lib.POST, f"{BASE}/api/v1/token/introspect", json=INTROSPECT_ACTIVE, status=200) + client = TokenClient(BASE, get_token) + result = client.introspect_token("some-token") + assert result.active is True + assert result.sub == "uuid-1" + + +@resp_lib.activate +def test_introspect_token_inactive() -> None: + resp_lib.add(resp_lib.POST, f"{BASE}/api/v1/token/introspect", json=INTROSPECT_INACTIVE, status=200) + client = TokenClient(BASE, get_token) + result = client.introspect_token("expired-token") + assert result.active is False + + +@resp_lib.activate +def test_revoke_token() -> None: + resp_lib.add(resp_lib.POST, f"{BASE}/api/v1/token/revoke", json={}, status=200) + client = TokenClient(BASE, get_token) + result = client.revoke_token("some-token") + assert result is None + + +# ─── Audit — Sync ───────────────────────────────────────────────────────────── + +@resp_lib.activate +def test_query_audit_log() -> None: + resp_lib.add(resp_lib.GET, f"{BASE}/api/v1/audit", json=PAGINATED_AUDIT, status=200) + client = AuditClient(BASE, get_token) + result = client.query_audit_log(agent_id="uuid-1", action="token.issued") + assert result.total == 1 + assert result.data[0].event_id == "ev-1" + + +@resp_lib.activate +def test_get_audit_event() -> None: + resp_lib.add(resp_lib.GET, f"{BASE}/api/v1/audit/ev-1", json=AUDIT_EVENT, status=200) + client = AuditClient(BASE, get_token) + event = client.get_audit_event("ev-1") + assert event.event_id == "ev-1" + + +# ─── Async — all 14 endpoints ───────────────────────────────────────────────── + +@pytest.mark.asyncio +async def test_async_register_agent() -> None: + with respx.mock: + respx.post(f"{BASE}/api/v1/agents").mock(return_value=httpx.Response(201, json=AGENT)) + client = AsyncAgentRegistryClient(BASE, async_get_token) + agent = await client.register_agent(RegisterAgentRequest( + email="a@b.ai", agent_type="screener", version="1.0.0", + capabilities=["read"], owner="team", deployment_env="production", + )) + assert agent.agent_id == "uuid-1" + + +@pytest.mark.asyncio +async def test_async_list_agents() -> None: + with respx.mock: + respx.get(f"{BASE}/api/v1/agents").mock(return_value=httpx.Response(200, json=PAGINATED_AGENTS)) + client = AsyncAgentRegistryClient(BASE, async_get_token) + result = await client.list_agents() + assert result.total == 1 + + +@pytest.mark.asyncio +async def test_async_get_agent() -> None: + with respx.mock: + respx.get(f"{BASE}/api/v1/agents/uuid-1").mock(return_value=httpx.Response(200, json=AGENT)) + client = AsyncAgentRegistryClient(BASE, async_get_token) + agent = await client.get_agent("uuid-1") + assert agent.agent_id == "uuid-1" + + +@pytest.mark.asyncio +async def test_async_update_agent() -> None: + with respx.mock: + respx.patch(f"{BASE}/api/v1/agents/uuid-1").mock(return_value=httpx.Response(200, json=AGENT)) + client = AsyncAgentRegistryClient(BASE, async_get_token) + agent = await client.update_agent("uuid-1", UpdateAgentRequest(version="2.0.0")) + assert agent.agent_id == "uuid-1" + + +@pytest.mark.asyncio +async def test_async_decommission_agent() -> None: + with respx.mock: + respx.delete(f"{BASE}/api/v1/agents/uuid-1").mock(return_value=httpx.Response(204)) + client = AsyncAgentRegistryClient(BASE, async_get_token) + result = await client.decommission_agent("uuid-1") + assert result is None + + +@pytest.mark.asyncio +async def test_async_generate_credential() -> None: + with respx.mock: + respx.post(f"{BASE}/api/v1/agents/uuid-1/credentials").mock( + return_value=httpx.Response(201, json=CRED_WITH_SECRET)) + client = AsyncCredentialClient(BASE, async_get_token) + cred = await client.generate_credential("uuid-1") + assert cred.client_secret == "sk_live_abc" + + +@pytest.mark.asyncio +async def test_async_list_credentials() -> None: + with respx.mock: + respx.get(f"{BASE}/api/v1/agents/uuid-1/credentials").mock( + return_value=httpx.Response(200, json=PAGINATED_CREDS)) + client = AsyncCredentialClient(BASE, async_get_token) + result = await client.list_credentials("uuid-1") + assert result.total == 1 + + +@pytest.mark.asyncio +async def test_async_rotate_credential() -> None: + with respx.mock: + respx.post(f"{BASE}/api/v1/agents/uuid-1/credentials/cred-1/rotate").mock( + return_value=httpx.Response(200, json=CRED_WITH_SECRET)) + client = AsyncCredentialClient(BASE, async_get_token) + cred = await client.rotate_credential("uuid-1", "cred-1") + assert cred.client_secret == "sk_live_abc" + + +@pytest.mark.asyncio +async def test_async_revoke_credential() -> None: + revoked = {**CRED, "status": "revoked", "revokedAt": "2026-01-02T00:00:00Z"} + with respx.mock: + respx.delete(f"{BASE}/api/v1/agents/uuid-1/credentials/cred-1").mock( + return_value=httpx.Response(200, json=revoked)) + client = AsyncCredentialClient(BASE, async_get_token) + cred = await client.revoke_credential("uuid-1", "cred-1") + assert cred.status == "revoked" + + +@pytest.mark.asyncio +async def test_async_introspect_token() -> None: + with respx.mock: + respx.post(f"{BASE}/api/v1/token/introspect").mock( + return_value=httpx.Response(200, json=INTROSPECT_ACTIVE)) + client = AsyncTokenClient(BASE, async_get_token) + result = await client.introspect_token("tok") + assert result.active is True + + +@pytest.mark.asyncio +async def test_async_revoke_token() -> None: + with respx.mock: + respx.post(f"{BASE}/api/v1/token/revoke").mock(return_value=httpx.Response(200, json={})) + client = AsyncTokenClient(BASE, async_get_token) + await client.revoke_token("tok") + + +@pytest.mark.asyncio +async def test_async_query_audit_log() -> None: + with respx.mock: + respx.get(f"{BASE}/api/v1/audit").mock(return_value=httpx.Response(200, json=PAGINATED_AUDIT)) + client = AsyncAuditClient(BASE, async_get_token) + result = await client.query_audit_log() + assert result.total == 1 + + +@pytest.mark.asyncio +async def test_async_get_audit_event() -> None: + with respx.mock: + respx.get(f"{BASE}/api/v1/audit/ev-1").mock(return_value=httpx.Response(200, json=AUDIT_EVENT)) + client = AsyncAuditClient(BASE, async_get_token) + event = await client.get_audit_event("ev-1") + assert event.event_id == "ev-1" + + +# ─── Error propagation ──────────────────────────────────────────────────────── + +@resp_lib.activate +def test_api_error_propagated_from_service() -> None: + resp_lib.add( + resp_lib.GET, f"{BASE}/api/v1/agents/bad", + json={"code": "AgentNotFoundError", "message": "Not found."}, status=404, + ) + client = AgentRegistryClient(BASE, get_token) + with pytest.raises(AgentIdPError) as exc_info: + client.get_agent("bad") + assert exc_info.value.http_status == 404 + + +@pytest.mark.asyncio +async def test_async_api_error_propagated() -> None: + with respx.mock: + respx.get(f"{BASE}/api/v1/agents/bad").mock(return_value=httpx.Response( + 404, json={"code": "AgentNotFoundError", "message": "Not found."} + )) + client = AsyncAgentRegistryClient(BASE, async_get_token) + with pytest.raises(AgentIdPError) as exc_info: + await client.get_agent("bad") + assert exc_info.value.http_status == 404 diff --git a/sdk-python/tests/test_token_manager.py b/sdk-python/tests/test_token_manager.py new file mode 100644 index 0000000..1845506 --- /dev/null +++ b/sdk-python/tests/test_token_manager.py @@ -0,0 +1,112 @@ +"""Tests for TokenManager (sync) and AsyncTokenManager.""" + +import time +import pytest +import responses as resp_lib +import respx +import httpx + +from sentryagent_idp.token_manager import TokenManager, REFRESH_BUFFER_SECONDS +from sentryagent_idp.async_token_manager import AsyncTokenManager +from sentryagent_idp.errors import AgentIdPError + +BASE_URL = "http://localhost:3000" +TOKEN_URL = f"{BASE_URL}/api/v1/token" +TOKEN_RESP = { + "access_token": "eyJ.abc.def", + "token_type": "Bearer", + "expires_in": 3600, + "scope": "agents:read", +} + + +# ─── Sync TokenManager ──────────────────────────────────────────────────────── + +@resp_lib.activate +def test_token_manager_issues_token() -> None: + resp_lib.add(resp_lib.POST, TOKEN_URL, json=TOKEN_RESP, status=200) + tm = TokenManager(BASE_URL, "client-id", "secret", "agents:read") + token = tm.get_token() + assert token == "eyJ.abc.def" + assert len(resp_lib.calls) == 1 + + +@resp_lib.activate +def test_token_manager_caches_token() -> None: + resp_lib.add(resp_lib.POST, TOKEN_URL, json=TOKEN_RESP, status=200) + tm = TokenManager(BASE_URL, "client-id", "secret", "agents:read") + tm.get_token() + tm.get_token() + # Only one HTTP call because second call uses cache + assert len(resp_lib.calls) == 1 + + +@resp_lib.activate +def test_token_manager_refreshes_near_expiry() -> None: + resp_lib.add(resp_lib.POST, TOKEN_URL, json={**TOKEN_RESP, "expires_in": 30}, status=200) + resp_lib.add(resp_lib.POST, TOKEN_URL, json=TOKEN_RESP, status=200) + tm = TokenManager(BASE_URL, "client-id", "secret", "agents:read") + tm.get_token() + # Simulate cached token being nearly expired + assert tm._cached is not None + tm._cached.expires_at = time.time() + (REFRESH_BUFFER_SECONDS - 1) + tm.get_token() + assert len(resp_lib.calls) == 2 + + +@resp_lib.activate +def test_token_manager_raises_on_auth_failure() -> None: + resp_lib.add( + resp_lib.POST, TOKEN_URL, + json={"error": "invalid_client", "error_description": "Bad creds."}, + status=401, + ) + tm = TokenManager(BASE_URL, "client-id", "bad-secret", "agents:read") + with pytest.raises(AgentIdPError) as exc_info: + tm.get_token() + assert exc_info.value.code == "invalid_client" + assert exc_info.value.http_status == 401 + + +@resp_lib.activate +def test_token_manager_clear_cache() -> None: + resp_lib.add(resp_lib.POST, TOKEN_URL, json=TOKEN_RESP, status=200) + resp_lib.add(resp_lib.POST, TOKEN_URL, json=TOKEN_RESP, status=200) + tm = TokenManager(BASE_URL, "client-id", "secret", "agents:read") + tm.get_token() + tm.clear_cache() + tm.get_token() + assert len(resp_lib.calls) == 2 + + +# ─── Async TokenManager ─────────────────────────────────────────────────────── + +@pytest.mark.asyncio +async def test_async_token_manager_issues_token() -> None: + with respx.mock: + respx.post(TOKEN_URL).mock(return_value=httpx.Response(200, json=TOKEN_RESP)) + tm = AsyncTokenManager(BASE_URL, "client-id", "secret", "agents:read") + token = await tm.get_token() + assert token == "eyJ.abc.def" + + +@pytest.mark.asyncio +async def test_async_token_manager_caches_token() -> None: + with respx.mock: + route = respx.post(TOKEN_URL).mock(return_value=httpx.Response(200, json=TOKEN_RESP)) + tm = AsyncTokenManager(BASE_URL, "client-id", "secret", "agents:read") + await tm.get_token() + await tm.get_token() + assert route.call_count == 1 + + +@pytest.mark.asyncio +async def test_async_token_manager_raises_on_auth_failure() -> None: + with respx.mock: + respx.post(TOKEN_URL).mock(return_value=httpx.Response( + 401, json={"error": "invalid_client", "error_description": "Bad creds."} + )) + tm = AsyncTokenManager(BASE_URL, "client-id", "bad-secret", "agents:read") + with pytest.raises(AgentIdPError) as exc_info: + await tm.get_token() + assert exc_info.value.code == "invalid_client" diff --git a/sdk-python/tests/test_types.py b/sdk-python/tests/test_types.py new file mode 100644 index 0000000..ce66ea7 --- /dev/null +++ b/sdk-python/tests/test_types.py @@ -0,0 +1,133 @@ +"""Tests for dataclass deserialisation in types.py.""" + +from sentryagent_idp.types import ( + Agent, + Credential, + CredentialWithSecret, + PaginatedAgents, + PaginatedCredentials, + TokenResponse, + IntrospectResponse, + AuditEvent, + PaginatedAuditEvents, + RegisterAgentRequest, + UpdateAgentRequest, +) + + +AGENT_DICT = { + "agentId": "uuid-1", + "email": "a@b.ai", + "agentType": "screener", + "version": "1.0.0", + "capabilities": ["read"], + "owner": "team", + "deploymentEnv": "production", + "status": "active", + "createdAt": "2026-01-01T00:00:00Z", + "updatedAt": "2026-01-02T00:00:00Z", +} + +CREDENTIAL_DICT = { + "credentialId": "cred-1", + "clientId": "uuid-1", + "status": "active", + "createdAt": "2026-01-01T00:00:00Z", + "expiresAt": None, + "revokedAt": None, +} + + +def test_agent_from_dict() -> None: + agent = Agent.from_dict(AGENT_DICT) + assert agent.agent_id == "uuid-1" + assert agent.agent_type == "screener" + assert agent.capabilities == ["read"] + + +def test_register_agent_request_to_dict() -> None: + req = RegisterAgentRequest( + email="a@b.ai", + agent_type="classifier", + version="1.0.0", + capabilities=["read"], + owner="team", + deployment_env="production", + ) + d = req.to_dict() + assert d["agentType"] == "classifier" + assert d["deploymentEnv"] == "production" + + +def test_update_agent_request_omits_none() -> None: + req = UpdateAgentRequest(version="2.0.0") + d = req.to_dict() + assert "version" in d + assert "agentType" not in d + + +def test_paginated_agents_from_dict() -> None: + result = PaginatedAgents.from_dict({"data": [AGENT_DICT], "total": 1, "page": 1, "limit": 20}) + assert result.total == 1 + assert result.data[0].agent_id == "uuid-1" + + +def test_credential_from_dict() -> None: + cred = Credential.from_dict(CREDENTIAL_DICT) + assert cred.credential_id == "cred-1" + assert cred.expires_at is None + + +def test_credential_with_secret_from_dict() -> None: + d = {**CREDENTIAL_DICT, "clientSecret": "sk_live_abc"} + cred = CredentialWithSecret.from_dict(d) + assert cred.client_secret == "sk_live_abc" + assert cred.credential_id == "cred-1" + + +def test_paginated_credentials_from_dict() -> None: + result = PaginatedCredentials.from_dict( + {"data": [CREDENTIAL_DICT], "total": 1, "page": 1, "limit": 20} + ) + assert result.total == 1 + + +def test_token_response_from_dict() -> None: + tr = TokenResponse.from_dict( + {"access_token": "tok", "token_type": "Bearer", "expires_in": 3600, "scope": "agents:read"} + ) + assert tr.access_token == "tok" + assert tr.expires_in == 3600 + + +def test_introspect_response_active() -> None: + ir = IntrospectResponse.from_dict({"active": True, "sub": "uuid-1", "exp": 9999999999}) + assert ir.active is True + assert ir.sub == "uuid-1" + + +def test_introspect_response_inactive() -> None: + ir = IntrospectResponse.from_dict({"active": False}) + assert ir.active is False + assert ir.sub is None + + +def test_audit_event_from_dict() -> None: + ev = AuditEvent.from_dict({ + "eventId": "ev-1", "agentId": "uuid-1", "action": "token.issued", + "outcome": "success", "ipAddress": "1.2.3.4", "userAgent": "curl", + "metadata": {}, "timestamp": "2026-01-01T00:00:00Z", + }) + assert ev.event_id == "ev-1" + assert ev.action == "token.issued" + + +def test_paginated_audit_events_from_dict() -> None: + ev_dict = { + "eventId": "ev-1", "agentId": "uuid-1", "action": "token.issued", + "outcome": "success", "ipAddress": "1.2.3.4", "userAgent": "curl", + "metadata": {}, "timestamp": "2026-01-01T00:00:00Z", + } + result = PaginatedAuditEvents.from_dict({"data": [ev_dict], "total": 1, "page": 1, "limit": 20}) + assert result.total == 1 + assert result.data[0].event_id == "ev-1" diff --git a/src/app.ts b/src/app.ts index a6c9f6a..dfe51d1 100644 --- a/src/app.ts +++ b/src/app.ts @@ -31,9 +31,15 @@ import { createAgentsRouter } from './routes/agents.js'; import { createTokenRouter } from './routes/token.js'; import { createCredentialsRouter } from './routes/credentials.js'; import { createAuditRouter } from './routes/audit.js'; +import { createHealthRouter } from './routes/health.js'; +import { createMetricsRouter } from './routes/metrics.js'; import { errorHandler } from './middleware/errorHandler.js'; +import { createOpaMiddleware } from './middleware/opa.js'; +import { metricsMiddleware } from './middleware/metrics.js'; +import { createVaultClientFromEnv } from './vault/VaultClient.js'; import { RedisClientType } from 'redis'; +import path from 'path'; /** * Creates and returns a configured Express application. @@ -71,6 +77,11 @@ export async function createApp(): Promise { app.use(express.json()); app.use(express.urlencoded({ extended: false })); + // ──────────────────────────────────────────────────────────────── + // Prometheus HTTP metrics middleware — must be before all routes + // ──────────────────────────────────────────────────────────────── + app.use(metricsMiddleware); + // ──────────────────────────────────────────────────────────────── // Infrastructure singletons // ──────────────────────────────────────────────────────────────── @@ -86,12 +97,22 @@ export async function createApp(): Promise { const tokenRepo = new TokenRepository(pool, redis as RedisClientType); const auditRepo = new AuditRepository(pool); + // ──────────────────────────────────────────────────────────────── + // Optional integrations + // ──────────────────────────────────────────────────────────────── + // Vault is optional. When VAULT_ADDR + VAULT_TOKEN are set, new credentials + // are stored in Vault KV v2. When not set, bcrypt is used (Phase 1 behaviour). + const vaultClient = createVaultClientFromEnv(); + if (vaultClient !== null) { + console.log('[AgentIdP] Vault integration enabled — new credentials will use Vault KV v2'); + } + // ──────────────────────────────────────────────────────────────── // Service layer // ──────────────────────────────────────────────────────────────── const auditService = new AuditService(auditRepo); const agentService = new AgentService(agentRepo, credentialRepo, auditService); - const credentialService = new CredentialService(credentialRepo, agentRepo, auditService); + const credentialService = new CredentialService(credentialRepo, agentRepo, auditService, vaultClient); const privateKey = process.env['JWT_PRIVATE_KEY']; const publicKey = process.env['JWT_PUBLIC_KEY']; @@ -106,8 +127,14 @@ export async function createApp(): Promise { auditService, privateKey, publicKey, + vaultClient, ); + // ──────────────────────────────────────────────────────────────── + // OPA authorization middleware (created once — shared across all routers) + // ──────────────────────────────────────────────────────────────── + const opaMiddleware = await createOpaMiddleware(); + // ──────────────────────────────────────────────────────────────── // Controller layer // ──────────────────────────────────────────────────────────────── @@ -121,13 +148,33 @@ export async function createApp(): Promise { // ──────────────────────────────────────────────────────────────── const API_BASE = '/api/v1'; - app.use(`${API_BASE}/agents`, createAgentsRouter(agentController)); + // Health check — unauthenticated, no OPA + app.use('/health', createHealthRouter(pool, redis as RedisClientType)); + + // Prometheus metrics — unauthenticated, internal scraping only + app.use('/metrics', createMetricsRouter()); + + app.use(`${API_BASE}/agents`, createAgentsRouter(agentController, opaMiddleware)); app.use( `${API_BASE}/agents/:agentId/credentials`, - createCredentialsRouter(credentialController), + createCredentialsRouter(credentialController, opaMiddleware), ); - app.use(`${API_BASE}/token`, createTokenRouter(tokenController)); - app.use(`${API_BASE}/audit`, createAuditRouter(auditController)); + app.use(`${API_BASE}/token`, createTokenRouter(tokenController, opaMiddleware)); + app.use(`${API_BASE}/audit`, createAuditRouter(auditController, opaMiddleware)); + + // ──────────────────────────────────────────────────────────────── + // Dashboard static assets (served from dashboard/dist/) + // Placed after API routes so API routes take precedence. + // __dirname is available because the project compiles to CommonJS. + // ──────────────────────────────────────────────────────────────── + const dashboardDist = path.resolve(__dirname, '../../dashboard/dist'); + + app.use('/dashboard', express.static(dashboardDist)); + + // SPA fallback — serve index.html for all /dashboard/* routes not matching a static file + app.get('/dashboard/*', (_req, res) => { + res.sendFile(path.join(dashboardDist, 'index.html')); + }); // ──────────────────────────────────────────────────────────────── // Global error handler (must be last) diff --git a/src/cache/redis.ts b/src/cache/redis.ts index 2be7d6c..184a25d 100644 --- a/src/cache/redis.ts +++ b/src/cache/redis.ts @@ -4,6 +4,31 @@ */ import { createClient, RedisClientType } from 'redis'; +import { redisCommandDurationSeconds } from '../metrics/registry.js'; + +/** + * Wraps a Redis client method to record its duration in Prometheus. + * The cast to `T` is safe: the wrapper is async with identical parameters and + * resolves to the same value. TypeScript cannot infer this through the generic + * constraint alone, so we assert the type explicitly. + * + * @param fn - The bound Redis method to wrap. + * @param command - The command label used in the Prometheus histogram. + * @returns The wrapped method with identical signature. + */ +function instrumentRedisMethod( + fn: (...args: TArgs) => Promise, + command: string, +): (...args: TArgs) => Promise { + return async (...args: TArgs): Promise => { + const end = redisCommandDurationSeconds.startTimer({ command }); + try { + return await fn(...args); + } finally { + end(); + } + }; +} let redisClient: RedisClientType | null = null; @@ -29,6 +54,13 @@ export async function getRedisClient(): Promise { }); await redisClient.connect(); + + // Wrap high-frequency commands to record durations in Prometheus + redisClient.get = instrumentRedisMethod(redisClient.get.bind(redisClient), 'get'); + redisClient.set = instrumentRedisMethod(redisClient.set.bind(redisClient), 'set'); + redisClient.incr = instrumentRedisMethod(redisClient.incr.bind(redisClient), 'incr'); + redisClient.expire = instrumentRedisMethod(redisClient.expire.bind(redisClient), 'expire'); + redisClient.ping = instrumentRedisMethod(redisClient.ping.bind(redisClient), 'ping'); } return redisClient; } diff --git a/src/controllers/AuditController.ts b/src/controllers/AuditController.ts index b7a2c85..f5c252c 100644 --- a/src/controllers/AuditController.ts +++ b/src/controllers/AuditController.ts @@ -9,13 +9,12 @@ import { auditQuerySchema } from '../utils/validators.js'; import { ValidationError, AuthenticationError, - InsufficientScopeError, } from '../utils/errors.js'; import { IAuditListFilters } from '../types/index.js'; /** * Controller for the Audit Log endpoints. - * Enforces `audit:read` scope on all handlers. + * Authorization is enforced by OPA middleware — no per-handler scope checks required. */ export class AuditController { /** @@ -37,12 +36,6 @@ export class AuditController { throw new AuthenticationError(); } - // Enforce audit:read scope - const scopes = req.user.scope.split(' '); - if (!scopes.includes('audit:read')) { - throw new InsufficientScopeError('audit:read'); - } - // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment const { error, value } = auditQuerySchema.validate(req.query, { abortEarly: false }); if (error) { @@ -84,12 +77,6 @@ export class AuditController { throw new AuthenticationError(); } - // Enforce audit:read scope - const scopes = req.user.scope.split(' '); - if (!scopes.includes('audit:read')) { - throw new InsufficientScopeError('audit:read'); - } - const { eventId } = req.params; const event = await this.auditService.getEventById(eventId); res.status(200).json(event); diff --git a/src/db/migrations/005_add_vault_path.sql b/src/db/migrations/005_add_vault_path.sql new file mode 100644 index 0000000..9e5b0a9 --- /dev/null +++ b/src/db/migrations/005_add_vault_path.sql @@ -0,0 +1,19 @@ +-- Migration 005: Add vault_path column to credentials table +-- Phase 2 — HashiCorp Vault integration +-- +-- New credentials generated after this migration will have their secrets stored +-- in HashiCorp Vault KV v2. The vault_path column stores the Vault KV path +-- (e.g. secret/data/agentidp/agents/{agentId}/credentials/{credentialId}). +-- +-- Coexistence strategy: +-- - Rows with vault_path IS NOT NULL → secret verified via Vault +-- - Rows with vault_path IS NULL → secret verified via secret_hash (bcrypt, Phase 1) +-- +-- The secret_hash column is retained for backwards compatibility. +-- Existing credentials continue to work until rotated through the new Vault path. + +ALTER TABLE credentials + ADD COLUMN IF NOT EXISTS vault_path TEXT DEFAULT NULL; + +COMMENT ON COLUMN credentials.vault_path IS + 'Vault KV v2 data path for this credential secret. NULL = bcrypt (Phase 1).'; diff --git a/src/db/pool.ts b/src/db/pool.ts index abffcf3..665517f 100644 --- a/src/db/pool.ts +++ b/src/db/pool.ts @@ -4,6 +4,7 @@ */ import { Pool } from 'pg'; +import { dbQueryDurationSeconds } from '../metrics/registry.js'; let pool: Pool | null = null; @@ -26,6 +27,24 @@ export function getPool(): Pool { // eslint-disable-next-line no-console console.error('Unexpected pg pool error', err); }); + + // Wrap pool.query to record duration in Prometheus. + // The pg Pool.query method is heavily overloaded — the only safe approach + // without TypeScript errors is a typed-any wrapper on the shim itself. + // We capture originalQuery as `(...args: any[]) => Promise` to satisfy + // TypeScript's spread-into-rest constraint; this is the one sanctioned use of + // `any` in this file. + // eslint-disable-next-line @typescript-eslint/no-explicit-any + const originalQuery = pool.query.bind(pool) as (...args: any[]) => Promise; + // eslint-disable-next-line @typescript-eslint/no-explicit-any + (pool as any).query = async (...args: any[]): Promise => { + const end = dbQueryDurationSeconds.startTimer({ operation: 'query' }); + try { + return await originalQuery(...args); + } finally { + end(); + } + }; } return pool; } diff --git a/src/metrics/registry.ts b/src/metrics/registry.ts new file mode 100644 index 0000000..8f97bcc --- /dev/null +++ b/src/metrics/registry.ts @@ -0,0 +1,79 @@ +/** + * Shared Prometheus metrics registry for SentryAgent.ai AgentIdP. + * All 7 metric definitions live here. Import specific metrics in the files that use them. + * This is the ONLY file that defines metrics — all other files import from here. + */ + +import { Registry, Counter, Histogram } from 'prom-client'; + +/** Shared registry — do NOT use the default global registry (conflicts with tests). */ +export const metricsRegistry = new Registry(); + +/** + * Total number of OAuth 2.0 tokens successfully issued. + * Labels: scope (space-separated scope string) + */ +export const tokensIssuedTotal = new Counter({ + name: 'agentidp_tokens_issued_total', + help: 'Total number of OAuth 2.0 access tokens issued successfully.', + labelNames: ['scope'] as const, + registers: [metricsRegistry], +}); + +/** + * Total number of agents successfully registered. + * Labels: deployment_env + */ +export const agentsRegisteredTotal = new Counter({ + name: 'agentidp_agents_registered_total', + help: 'Total number of AI agents registered successfully.', + labelNames: ['deployment_env'] as const, + registers: [metricsRegistry], +}); + +/** + * Total HTTP requests received. + * Labels: method, route (normalised path), status_code + */ +export const httpRequestsTotal = new Counter({ + name: 'agentidp_http_requests_total', + help: 'Total number of HTTP requests received.', + labelNames: ['method', 'route', 'status_code'] as const, + registers: [metricsRegistry], +}); + +/** + * HTTP request duration in seconds. + * Labels: method, route, status_code + */ +export const httpRequestDurationSeconds = new Histogram({ + name: 'agentidp_http_request_duration_seconds', + help: 'HTTP request duration in seconds.', + labelNames: ['method', 'route', 'status_code'] as const, + buckets: [0.005, 0.01, 0.025, 0.05, 0.1, 0.25, 0.5, 1, 2.5], + registers: [metricsRegistry], +}); + +/** + * PostgreSQL query duration in seconds. + * Labels: operation (query/connect) + */ +export const dbQueryDurationSeconds = new Histogram({ + name: 'agentidp_db_query_duration_seconds', + help: 'PostgreSQL query duration in seconds.', + labelNames: ['operation'] as const, + buckets: [0.001, 0.005, 0.01, 0.025, 0.05, 0.1, 0.25, 0.5, 1], + registers: [metricsRegistry], +}); + +/** + * Redis command duration in seconds. + * Labels: command (get/set/incr/expire/ping/etc.) + */ +export const redisCommandDurationSeconds = new Histogram({ + name: 'agentidp_redis_command_duration_seconds', + help: 'Redis command duration in seconds.', + labelNames: ['command'] as const, + buckets: [0.0005, 0.001, 0.005, 0.01, 0.025, 0.05, 0.1, 0.25], + registers: [metricsRegistry], +}); diff --git a/src/middleware/metrics.ts b/src/middleware/metrics.ts new file mode 100644 index 0000000..f93bb38 --- /dev/null +++ b/src/middleware/metrics.ts @@ -0,0 +1,51 @@ +/** + * Prometheus HTTP metrics middleware for SentryAgent.ai AgentIdP. + * Records request count and duration for every HTTP request. + */ +import { Request, Response, NextFunction } from 'express'; +import { httpRequestsTotal, httpRequestDurationSeconds } from '../metrics/registry.js'; + +/** + * Normalises an Express request path to a stable route label. + * Replaces UUIDs and numeric IDs with ':id' to avoid high cardinality. + * + * @param req - The Express request object. + * @returns A normalised route string. + */ +function normalisePath(req: Request): string { + // Use matched route pattern if available (most accurate) + const route = req.route?.path as string | undefined; + if (route) { + return `${req.baseUrl}${route}`; + } + // Fall back to original URL stripped of query, with UUIDs replaced + return req.path.replace( + /[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}/gi, + ':id', + ); +} + +/** + * Express middleware that records Prometheus HTTP metrics for every request. + * Must be registered BEFORE routes in app.ts. + * + * @param req - Express request. + * @param res - Express response. + * @param next - Express next function. + */ +export function metricsMiddleware(req: Request, res: Response, next: NextFunction): void { + const startTime = Date.now(); + + res.on('finish', () => { + const route = normalisePath(req); + const labels = { + method: req.method, + route, + status_code: String(res.statusCode), + }; + httpRequestsTotal.inc(labels); + httpRequestDurationSeconds.observe(labels, (Date.now() - startTime) / 1000); + }); + + next(); +} diff --git a/src/middleware/opa.ts b/src/middleware/opa.ts new file mode 100644 index 0000000..4e6ab3b --- /dev/null +++ b/src/middleware/opa.ts @@ -0,0 +1,279 @@ +/** + * OPA (Open Policy Agent) authorization middleware for SentryAgent.ai AgentIdP. + * + * Primary path — Wasm bundle: When `${POLICY_DIR}/authz.wasm` exists, the policy is + * evaluated using `@open-policy-agent/opa-wasm`. The bundle is loaded once at startup + * and can be hot-reloaded via `reloadOpaPolicy()`. + * + * Fallback path — scopes.json: When no Wasm bundle is present (dev/test), the policy + * is evaluated entirely in TypeScript by reading `${POLICY_DIR}/data/scopes.json` and + * applying the same normalisation + scope-intersection logic as `policies/authz.rego`. + */ + +import fs from 'fs'; +import path from 'path'; +import { RequestHandler } from 'express'; +import { loadPolicy, LoadedPolicy } from '@open-policy-agent/opa-wasm'; +import { AuthorizationError } from '../utils/errors.js'; + +// ──────────────────────────────────────────────────────────────────────────── +// Types +// ──────────────────────────────────────────────────────────────────────────── + +/** Input shape sent to the OPA policy for every authorization check. */ +interface OpaInput { + /** HTTP method in uppercase — e.g. "GET". */ + method: string; + /** Full request path — e.g. "/api/v1/agents/abc-123". */ + path: string; + /** Scopes extracted from the caller's JWT — e.g. ["agents:read"]. */ + scopes: string[]; +} + +/** Expected shape of the OPA Wasm result set entry. */ +interface OpaResultEntry { + result?: { + allow?: boolean; + }; +} + +/** + * Shape of `policies/data/scopes.json`. + * Keys are `"METHOD:/normalised/path"`, values are arrays of required scope strings. + */ +interface ScopesJson { + endpoint_permissions: Record; +} + +// ──────────────────────────────────────────────────────────────────────────── +// Module-level singletons (replaced on hot-reload) +// ──────────────────────────────────────────────────────────────────────────── + +/** + * Resolved base directory for policy files. + * Defaults to `/policies`; override via `POLICY_DIR` environment variable. + */ +const POLICY_DIR: string = + process.env['POLICY_DIR'] ?? path.resolve(process.cwd(), 'policies'); + +const WASM_PATH = path.join(POLICY_DIR, 'authz.wasm'); +const SCOPES_PATH = path.join(POLICY_DIR, 'data', 'scopes.json'); + +/** Active Wasm policy instance — null when running in fallback (scopes.json) mode. */ +let wasmPolicy: LoadedPolicy | null = null; + +/** Fallback scope map — null when running in Wasm mode. */ +let scopesMap: Record | null = null; + +// ──────────────────────────────────────────────────────────────────────────── +// Path normalisation (mirrors `normalise_path` in authz.rego) +// ──────────────────────────────────────────────────────────────────────────── + +/** + * Normalises a concrete request path to the pattern key used in `scopes.json`. + * The priority ordering mirrors the longest-match ordering in `authz.rego`. + * + * @param requestPath - Raw request path from `req.path`. + * @returns Normalised pattern string, or the original path if no pattern matches. + */ +function normalisePath(requestPath: string): string { + // /api/v1/agents/:id/credentials/:credId/rotate (longest — checked first) + if (/^\/api\/v1\/agents\/[^/]+\/credentials\/[^/]+\/rotate$/.test(requestPath)) { + return '/api/v1/agents/:id/credentials/:credId/rotate'; + } + // /api/v1/agents/:id/credentials/:credId + if (/^\/api\/v1\/agents\/[^/]+\/credentials\/[^/]+$/.test(requestPath)) { + return '/api/v1/agents/:id/credentials/:credId'; + } + // /api/v1/agents/:id/credentials + if (/^\/api\/v1\/agents\/[^/]+\/credentials$/.test(requestPath)) { + return '/api/v1/agents/:id/credentials'; + } + // /api/v1/agents/:id + if (/^\/api\/v1\/agents\/[^/]+$/.test(requestPath)) { + return '/api/v1/agents/:id'; + } + // Static paths — returned as-is + if ( + requestPath === '/api/v1/agents' || + requestPath === '/api/v1/token/introspect' || + requestPath === '/api/v1/token/revoke' || + requestPath === '/api/v1/audit' + ) { + return requestPath; + } + // /api/v1/audit/:id + if (/^\/api\/v1\/audit\/[^/]+$/.test(requestPath)) { + return '/api/v1/audit/:id'; + } + // Unknown path — return as-is; the policy will produce no match → deny + return requestPath; +} + +// ──────────────────────────────────────────────────────────────────────────── +// Policy loading helpers +// ──────────────────────────────────────────────────────────────────────────── + +/** + * Attempts to load the Wasm bundle from disk. + * Returns `true` if successful, `false` if the file does not exist. + * + * @returns Whether the Wasm bundle was loaded. + */ +async function loadWasmPolicy(): Promise { + if (!fs.existsSync(WASM_PATH)) { + return false; + } + + const wasmBuffer = fs.readFileSync(WASM_PATH); + const loaded = await loadPolicy(wasmBuffer); + + // Load the scopes data so the Wasm policy has access to endpoint_permissions + if (fs.existsSync(SCOPES_PATH)) { + const raw = fs.readFileSync(SCOPES_PATH, 'utf-8'); + const parsed = JSON.parse(raw) as ScopesJson; + loaded.setData(parsed); + } + + wasmPolicy = loaded; + scopesMap = null; + return true; +} + +/** + * Loads the fallback `scopes.json` into memory. + * Called when no Wasm bundle is present. + */ +function loadScopesFallback(): void { + const raw = fs.readFileSync(SCOPES_PATH, 'utf-8'); + const parsed = JSON.parse(raw) as ScopesJson; + scopesMap = parsed.endpoint_permissions; + wasmPolicy = null; +} + +// ──────────────────────────────────────────────────────────────────────────── +// Authorization evaluation +// ──────────────────────────────────────────────────────────────────────────── + +/** + * Evaluates the OPA policy against the given input. + * + * Uses the Wasm bundle when available; falls back to TypeScript scope-map logic. + * + * @param input - The authorization input (method, path, scopes). + * @returns `true` if the policy allows the request; `false` otherwise. + */ +function evaluate(input: OpaInput): boolean { + if (wasmPolicy !== null) { + // Wasm path: evaluate and extract `allow` from the result set + const resultSet = wasmPolicy.evaluate(input) as OpaResultEntry[]; + if (!Array.isArray(resultSet) || resultSet.length === 0) { + return false; + } + return resultSet[0]?.result?.allow === true; + } + + if (scopesMap !== null) { + // Fallback path: replicate authz.rego logic in TypeScript + const normPath = normalisePath(input.path); + const lookupKey = `${input.method}:${normPath}`; + const required = scopesMap[lookupKey]; + + // If no entry exists for this endpoint the policy denies + if (!required) { + return false; + } + + return required.every((s) => input.scopes.includes(s)); + } + + // Neither policy loaded — deny by default (fail-closed) + return false; +} + +// ──────────────────────────────────────────────────────────────────────────── +// Public API +// ──────────────────────────────────────────────────────────────────────────── + +/** + * Creates the OPA authorization middleware. + * + * Call once at application startup. The returned `RequestHandler` can be wired + * into any Express router after the authentication middleware. + * + * Startup order: + * 1. Try to load `${POLICY_DIR}/authz.wasm` (primary). + * 2. If the Wasm bundle is absent, load `${POLICY_DIR}/data/scopes.json` (fallback). + * + * @returns Promise resolving to an Express `RequestHandler` for OPA authorization. + * @throws Error if neither the Wasm bundle nor `scopes.json` can be loaded. + */ +export async function createOpaMiddleware(): Promise { + const wasmLoaded = await loadWasmPolicy(); + + if (wasmLoaded) { + console.log('[AgentIdP] OPA policy engine: Wasm mode — loaded', WASM_PATH); + } else { + loadScopesFallback(); + console.log('[AgentIdP] OPA policy engine: fallback mode — loaded', SCOPES_PATH); + } + + /** + * Express middleware that authorises the current request against the OPA policy. + * + * Prerequisites: `authMiddleware` must have run first to populate `req.user`. + * + * @param req - Express request (must have `req.user` populated by auth middleware). + * @param _res - Express response (unused). + * @param next - Express next function. + */ + const handler: RequestHandler = (req, _res, next): void => { + try { + if (!req.user) { + // Auth middleware should have already rejected unauthenticated requests; + // this is a safeguard against misconfigured middleware ordering. + next(new AuthorizationError('Request is not authenticated.')); + return; + } + + const input: OpaInput = { + method: req.method, + // Use baseUrl + path to reconstruct the full path (req.path is relative + // to the router's mount point; OPA policy patterns match full paths). + path: req.baseUrl + req.path, + scopes: req.user.scope.split(' '), + }; + + if (!evaluate(input)) { + next(new AuthorizationError()); + return; + } + + next(); + } catch (err) { + next(err); + } + }; + + return handler; +} + +/** + * Reloads the OPA policy from disk without restarting the server. + * + * Intended to be called from a SIGHUP handler. The reload strategy mirrors + * startup: Wasm bundle is preferred; `scopes.json` is used as fallback. + * + * @returns Promise that resolves when the policy has been reloaded. + * @throws Error if the reload fails (e.g. file is missing or malformed). + */ +export async function reloadOpaPolicy(): Promise { + const wasmLoaded = await loadWasmPolicy(); + + if (wasmLoaded) { + console.log('[AgentIdP] OPA policy reloaded: Wasm mode —', WASM_PATH); + } else { + loadScopesFallback(); + console.log('[AgentIdP] OPA policy reloaded: fallback mode —', SCOPES_PATH); + } +} diff --git a/src/repositories/CredentialRepository.ts b/src/repositories/CredentialRepository.ts index 289a56c..8c26833 100644 --- a/src/repositories/CredentialRepository.ts +++ b/src/repositories/CredentialRepository.ts @@ -12,6 +12,7 @@ interface CredentialDbRow { credential_id: string; client_id: string; secret_hash: string; + vault_path: string | null; status: string; created_at: Date; expires_at: Date | null; @@ -29,6 +30,7 @@ function mapRowToCredentialRow(row: CredentialDbRow): ICredentialRow { credentialId: row.credential_id, clientId: row.client_id, secretHash: row.secret_hash, + vaultPath: row.vault_path ?? null, status: row.status as ICredential['status'], createdAt: row.created_at, expiresAt: row.expires_at, @@ -59,7 +61,7 @@ export class CredentialRepository { constructor(private readonly pool: Pool) {} /** - * Creates a new credential record. + * Creates a new credential record using bcrypt secret hash (Phase 1 / Vault-not-configured). * * @param clientId - The agent ID this credential belongs to. * @param secretHash - The bcrypt hash of the plain-text secret. @@ -74,14 +76,40 @@ export class CredentialRepository { const credentialId = uuidv4(); const result: QueryResult = await this.pool.query( `INSERT INTO credentials - (credential_id, client_id, secret_hash, status, created_at, expires_at) - VALUES ($1, $2, $3, 'active', NOW(), $4) + (credential_id, client_id, secret_hash, vault_path, status, created_at, expires_at) + VALUES ($1, $2, $3, NULL, 'active', NOW(), $4) RETURNING *`, [credentialId, clientId, secretHash, expiresAt], ); return mapRowToCredential(result.rows[0]); } + /** + * Creates a new credential record backed by Vault (Phase 2). + * Accepts a caller-supplied credentialId so the Vault path can include it before the DB write. + * + * @param credentialId - The UUID to use for this credential (caller-generated). + * @param clientId - The agent ID this credential belongs to. + * @param vaultPath - The Vault KV v2 data path where the secret is stored. + * @param expiresAt - Optional expiry date. + * @returns The created credential record. + */ + async createWithVaultPath( + credentialId: string, + clientId: string, + vaultPath: string, + expiresAt: Date | null, + ): Promise { + const result: QueryResult = await this.pool.query( + `INSERT INTO credentials + (credential_id, client_id, secret_hash, vault_path, status, created_at, expires_at) + VALUES ($1, $2, '', $3, 'active', NOW(), $4) + RETURNING *`, + [credentialId, clientId, vaultPath, expiresAt], + ); + return mapRowToCredential(result.rows[0]); + } + /** * Finds a credential by its UUID, including the secret hash. * @@ -142,7 +170,7 @@ export class CredentialRepository { } /** - * Updates the bcrypt hash for an existing credential (rotation). + * Updates the bcrypt hash for an existing credential (rotation, bcrypt path). * * @param credentialId - The credential UUID. * @param newSecretHash - The new bcrypt hash. @@ -156,7 +184,7 @@ export class CredentialRepository { ): Promise { const result: QueryResult = await this.pool.query( `UPDATE credentials - SET secret_hash = $1, expires_at = $2, status = 'active', revoked_at = NULL + SET secret_hash = $1, vault_path = NULL, expires_at = $2, status = 'active', revoked_at = NULL WHERE credential_id = $3 RETURNING *`, [newSecretHash, newExpiresAt, credentialId], @@ -165,6 +193,30 @@ export class CredentialRepository { return mapRowToCredential(result.rows[0]); } + /** + * Updates the vault_path for an existing credential (rotation, Vault path). + * + * @param credentialId - The credential UUID. + * @param newVaultPath - The new Vault KV v2 data path. + * @param newExpiresAt - Optional new expiry date. + * @returns The updated credential record, or null if not found. + */ + async updateVaultPath( + credentialId: string, + newVaultPath: string, + newExpiresAt: Date | null, + ): Promise { + const result: QueryResult = await this.pool.query( + `UPDATE credentials + SET vault_path = $1, secret_hash = '', expires_at = $2, status = 'active', revoked_at = NULL + WHERE credential_id = $3 + RETURNING *`, + [newVaultPath, newExpiresAt, credentialId], + ); + if (result.rows.length === 0) return null; + return mapRowToCredential(result.rows[0]); + } + /** * Sets a credential's status to 'revoked'. * diff --git a/src/routes/agents.ts b/src/routes/agents.ts index ec647dd..7f10081 100644 --- a/src/routes/agents.ts +++ b/src/routes/agents.ts @@ -1,9 +1,9 @@ /** * Agent Registry routes for SentryAgent.ai AgentIdP. - * Wires AgentController handlers to Express paths with auth and rateLimit middleware. + * Wires AgentController handlers to Express paths with auth, OPA, and rateLimit middleware. */ -import { Router } from 'express'; +import { Router, RequestHandler } from 'express'; import { AgentController } from '../controllers/AgentController.js'; import { authMiddleware } from '../middleware/auth.js'; import { rateLimitMiddleware } from '../middleware/rateLimit.js'; @@ -13,12 +13,14 @@ import { asyncHandler } from '../utils/asyncHandler.js'; * Creates and returns the Express router for agent registry endpoints. * * @param agentController - The agent controller instance. + * @param opaMiddleware - The OPA authorization middleware created at startup. * @returns Configured Express router. */ -export function createAgentsRouter(agentController: AgentController): Router { +export function createAgentsRouter(agentController: AgentController, opaMiddleware: RequestHandler): Router { const router = Router(); router.use(asyncHandler(authMiddleware)); + router.use(opaMiddleware); router.use(asyncHandler(rateLimitMiddleware)); // POST /agents — Register a new agent diff --git a/src/routes/audit.ts b/src/routes/audit.ts index 47ba6a6..49b6fc9 100644 --- a/src/routes/audit.ts +++ b/src/routes/audit.ts @@ -1,9 +1,9 @@ /** * Audit Log routes for SentryAgent.ai AgentIdP. - * All routes require Bearer auth and are rate-limited. + * All routes require Bearer auth, OPA authorization, and are rate-limited. */ -import { Router } from 'express'; +import { Router, RequestHandler } from 'express'; import { AuditController } from '../controllers/AuditController.js'; import { authMiddleware } from '../middleware/auth.js'; import { rateLimitMiddleware } from '../middleware/rateLimit.js'; @@ -13,12 +13,14 @@ import { asyncHandler } from '../utils/asyncHandler.js'; * Creates and returns the Express router for audit log endpoints. * * @param auditController - The audit controller instance. + * @param opaMiddleware - The OPA authorization middleware created at startup. * @returns Configured Express router. */ -export function createAuditRouter(auditController: AuditController): Router { +export function createAuditRouter(auditController: AuditController, opaMiddleware: RequestHandler): Router { const router = Router(); router.use(asyncHandler(authMiddleware)); + router.use(opaMiddleware); router.use(asyncHandler(rateLimitMiddleware)); // GET /audit — Query audit log diff --git a/src/routes/credentials.ts b/src/routes/credentials.ts index d61c736..ec9c61f 100644 --- a/src/routes/credentials.ts +++ b/src/routes/credentials.ts @@ -1,9 +1,9 @@ /** * Credential Management routes for SentryAgent.ai AgentIdP. - * All routes are under /agents/:agentId/credentials with auth and rateLimit middleware. + * All routes are under /agents/:agentId/credentials with auth, OPA, and rateLimit middleware. */ -import { Router } from 'express'; +import { Router, RequestHandler } from 'express'; import { CredentialController } from '../controllers/CredentialController.js'; import { authMiddleware } from '../middleware/auth.js'; import { rateLimitMiddleware } from '../middleware/rateLimit.js'; @@ -14,12 +14,14 @@ import { asyncHandler } from '../utils/asyncHandler.js'; * This router is mounted at /agents — the :agentId param is part of the path. * * @param credentialController - The credential controller instance. + * @param opaMiddleware - The OPA authorization middleware created at startup. * @returns Configured Express router. */ -export function createCredentialsRouter(credentialController: CredentialController): Router { +export function createCredentialsRouter(credentialController: CredentialController, opaMiddleware: RequestHandler): Router { const router = Router({ mergeParams: true }); router.use(asyncHandler(authMiddleware)); + router.use(opaMiddleware); router.use(asyncHandler(rateLimitMiddleware)); // POST /agents/:agentId/credentials — Generate new credentials diff --git a/src/routes/health.ts b/src/routes/health.ts new file mode 100644 index 0000000..3310f35 --- /dev/null +++ b/src/routes/health.ts @@ -0,0 +1,79 @@ +/** + * Health check route for SentryAgent.ai AgentIdP. + * Returns connectivity status for PostgreSQL and Redis. + * Unauthenticated — safe to call from monitoring systems and the dashboard. + */ + +import { Router, Request, Response } from 'express'; +import { Pool } from 'pg'; +import { RedisClientType } from 'redis'; + +/** Response shape for GET /health */ +interface HealthResponse { + status: 'ok' | 'degraded'; + version: string; + uptime: number; + services: { + postgres: 'connected' | 'disconnected'; + redis: 'connected' | 'disconnected'; + }; +} + +/** + * Creates and returns the Express router for the health endpoint. + * + * @param pool - PostgreSQL connection pool. + * @param redis - Redis client instance. + * @returns Configured Express router. + */ +export function createHealthRouter(pool: Pool, redis: RedisClientType): Router { + const router = Router(); + + /** + * GET /health + * Returns 200 when all services are healthy, 503 when any are degraded. + */ + router.get('/', (_req: Request, res: Response): void => { + const check = async (): Promise => { + let postgresStatus: 'connected' | 'disconnected' = 'disconnected'; + let redisStatus: 'connected' | 'disconnected' = 'disconnected'; + + // Check PostgreSQL + try { + const client = await pool.connect(); + await client.query('SELECT 1'); + client.release(); + postgresStatus = 'connected'; + } catch { + postgresStatus = 'disconnected'; + } + + // Check Redis + try { + await redis.ping(); + redisStatus = 'connected'; + } catch { + redisStatus = 'disconnected'; + } + + const allHealthy = postgresStatus === 'connected' && redisStatus === 'connected'; + const httpStatus = allHealthy ? 200 : 503; + + const body: HealthResponse = { + status: allHealthy ? 'ok' : 'degraded', + version: process.env['npm_package_version'] ?? '1.0.0', + uptime: Math.floor(process.uptime()), + services: { + postgres: postgresStatus, + redis: redisStatus, + }, + }; + + res.status(httpStatus).json(body); + }; + + void check(); + }); + + return router; +} diff --git a/src/routes/metrics.ts b/src/routes/metrics.ts new file mode 100644 index 0000000..2dfad92 --- /dev/null +++ b/src/routes/metrics.ts @@ -0,0 +1,25 @@ +/** + * Prometheus metrics endpoint for SentryAgent.ai AgentIdP. + * Unauthenticated — intended for internal Prometheus scraping only. + * Do NOT expose this endpoint on a public-facing network interface. + */ +import { Router, Request, Response } from 'express'; +import { metricsRegistry } from '../metrics/registry.js'; + +/** + * Creates and returns the Express router for the Prometheus metrics endpoint. + * Returns metrics in Prometheus text exposition format. + * + * @returns Configured Express router. + */ +export function createMetricsRouter(): Router { + const router = Router(); + + router.get('/', async (_req: Request, res: Response): Promise => { + const metrics = await metricsRegistry.metrics(); + res.set('Content-Type', metricsRegistry.contentType); + res.end(metrics); + }); + + return router; +} diff --git a/src/routes/token.ts b/src/routes/token.ts index e36d28a..fca0509 100644 --- a/src/routes/token.ts +++ b/src/routes/token.ts @@ -1,10 +1,10 @@ /** * OAuth 2.0 Token routes for SentryAgent.ai AgentIdP. * POST /token uses no Bearer auth (credentials are in the body). - * POST /token/introspect and POST /token/revoke require Bearer auth. + * POST /token/introspect and POST /token/revoke require Bearer auth and OPA authorization. */ -import { Router } from 'express'; +import { Router, RequestHandler } from 'express'; import { TokenController } from '../controllers/TokenController.js'; import { authMiddleware } from '../middleware/auth.js'; import { rateLimitMiddleware } from '../middleware/rateLimit.js'; @@ -14,26 +14,29 @@ import { asyncHandler } from '../utils/asyncHandler.js'; * Creates and returns the Express router for token endpoints. * * @param tokenController - The token controller instance. + * @param opaMiddleware - The OPA authorization middleware created at startup. * @returns Configured Express router. */ -export function createTokenRouter(tokenController: TokenController): Router { +export function createTokenRouter(tokenController: TokenController, opaMiddleware: RequestHandler): Router { const router = Router(); // POST /token — Issue token (no auth — credentials in body or Basic header) router.post('/', asyncHandler(rateLimitMiddleware), asyncHandler(tokenController.issueToken.bind(tokenController))); - // POST /token/introspect — Introspect token (requires Bearer auth) + // POST /token/introspect — Introspect token (requires Bearer auth + OPA) router.post( '/introspect', asyncHandler(authMiddleware), + opaMiddleware, asyncHandler(rateLimitMiddleware), asyncHandler(tokenController.introspectToken.bind(tokenController)), ); - // POST /token/revoke — Revoke token (requires Bearer auth) + // POST /token/revoke — Revoke token (requires Bearer auth + OPA) router.post( '/revoke', asyncHandler(authMiddleware), + opaMiddleware, asyncHandler(rateLimitMiddleware), asyncHandler(tokenController.revokeToken.bind(tokenController)), ); diff --git a/src/server.ts b/src/server.ts index 32a73e9..14f6b8c 100644 --- a/src/server.ts +++ b/src/server.ts @@ -7,6 +7,7 @@ import * as dotenv from 'dotenv'; dotenv.config(); import { createApp } from './app.js'; +import { reloadOpaPolicy } from './middleware/opa.js'; const PORT = parseInt(process.env['PORT'] ?? '3000', 10); @@ -37,6 +38,14 @@ async function main(): Promise { process.on('SIGINT', () => { shutdown(); }); + + // Hot-reload OPA policy without restarting the server + process.on('SIGHUP', () => { + reloadOpaPolicy().catch((err) => { + // eslint-disable-next-line no-console + console.error('[AgentIdP] Failed to reload OPA policy:', err); + }); + }); } catch (err) { // eslint-disable-next-line no-console console.error('Failed to start server:', err); diff --git a/src/services/AgentService.ts b/src/services/AgentService.ts index a9602e7..a746cee 100644 --- a/src/services/AgentService.ts +++ b/src/services/AgentService.ts @@ -19,6 +19,7 @@ import { AgentAlreadyDecommissionedError, FreeTierLimitError, } from '../utils/errors.js'; +import { agentsRegisteredTotal } from '../metrics/registry.js'; const FREE_TIER_MAX_AGENTS = 100; @@ -81,6 +82,9 @@ export class AgentService { { agentType: agent.agentType, owner: agent.owner }, ); + // Instrument: count successful agent registrations + agentsRegisteredTotal.inc({ deployment_env: data.deploymentEnv }); + return agent; } diff --git a/src/services/CredentialService.ts b/src/services/CredentialService.ts index c0c6005..d147577 100644 --- a/src/services/CredentialService.ts +++ b/src/services/CredentialService.ts @@ -6,7 +6,9 @@ import { CredentialRepository } from '../repositories/CredentialRepository.js'; import { AgentRepository } from '../repositories/AgentRepository.js'; import { AuditService } from './AuditService.js'; +import { VaultClient } from '../vault/VaultClient.js'; import { + ICredential, ICredentialWithSecret, ICredentialListFilters, IPaginatedCredentialsResponse, @@ -19,6 +21,7 @@ import { CredentialError, } from '../utils/errors.js'; import { generateClientSecret, hashSecret } from '../utils/crypto.js'; +import { v4 as uuidv4 } from 'uuid'; /** * Service for credential lifecycle management. @@ -29,11 +32,14 @@ export class CredentialService { * @param credentialRepository - The credential data repository. * @param agentRepository - The agent repository (for status checks). * @param auditService - The audit log service. + * @param vaultClient - Optional VaultClient. When provided, new credentials are stored in Vault. + * When null, bcrypt is used (Phase 1 behaviour). */ constructor( private readonly credentialRepository: CredentialRepository, private readonly agentRepository: AgentRepository, private readonly auditService: AuditService, + private readonly vaultClient: VaultClient | null = null, ) {} /** @@ -70,9 +76,24 @@ export class CredentialService { const expiresAt = data.expiresAt !== undefined ? new Date(data.expiresAt) : null; const plainSecret = generateClientSecret(); - const secretHash = await hashSecret(plainSecret); - const credential = await this.credentialRepository.create(agentId, secretHash, expiresAt); + let credential: ICredential; + + if (this.vaultClient !== null) { + // Phase 2: generate the UUID first so the Vault path includes the real credentialId + const credentialId = uuidv4(); + const vaultPath = await this.vaultClient.writeSecret(agentId, credentialId, plainSecret); + credential = await this.credentialRepository.createWithVaultPath( + credentialId, + agentId, + vaultPath, + expiresAt, + ); + } else { + // Phase 1: bcrypt hash stored in PostgreSQL + const secretHash = await hashSecret(plainSecret); + credential = await this.credentialRepository.create(agentId, secretHash, expiresAt); + } await this.auditService.logEvent( agentId, @@ -158,9 +179,19 @@ export class CredentialService { const expiresAt = data.expiresAt !== undefined ? new Date(data.expiresAt) : null; const plainSecret = generateClientSecret(); - const newHash = await hashSecret(plainSecret); - const updated = await this.credentialRepository.updateHash(credentialId, newHash, expiresAt); + let updated: ICredential | null; + + if (this.vaultClient !== null) { + // Phase 2: overwrite the existing Vault secret (KV v2 creates a new version) + const vaultPath = await this.vaultClient.writeSecret(agentId, credentialId, plainSecret); + updated = await this.credentialRepository.updateVaultPath(credentialId, vaultPath, expiresAt); + } else { + // Phase 1 / migrating credential: use bcrypt + const newHash = await hashSecret(plainSecret); + updated = await this.credentialRepository.updateHash(credentialId, newHash, expiresAt); + } + if (!updated) { throw new CredentialNotFoundError(credentialId); } @@ -214,6 +245,11 @@ export class CredentialService { await this.credentialRepository.revoke(credentialId); + // Phase 2: permanently delete the secret from Vault + if (this.vaultClient !== null && existing.vaultPath !== null) { + await this.vaultClient.deleteSecret(agentId, credentialId); + } + await this.auditService.logEvent( agentId, 'credential.revoked', diff --git a/src/services/OAuth2Service.ts b/src/services/OAuth2Service.ts index f90ff41..6c024c0 100644 --- a/src/services/OAuth2Service.ts +++ b/src/services/OAuth2Service.ts @@ -7,6 +7,7 @@ import { TokenRepository } from '../repositories/TokenRepository.js'; import { CredentialRepository } from '../repositories/CredentialRepository.js'; import { AgentRepository } from '../repositories/AgentRepository.js'; import { AuditService } from './AuditService.js'; +import { VaultClient } from '../vault/VaultClient.js'; import { ITokenPayload, ITokenResponse, @@ -17,11 +18,11 @@ import { AuthenticationError, AuthorizationError, FreeTierLimitError, - InsufficientScopeError, } from '../utils/errors.js'; import { signToken, verifyToken, decodeToken, getTokenExpiresIn } from '../utils/jwt.js'; import { verifySecret } from '../utils/crypto.js'; import { v4 as uuidv4 } from 'uuid'; +import { tokensIssuedTotal } from '../metrics/registry.js'; const FREE_TIER_MAX_MONTHLY_TOKENS = 10000; @@ -44,6 +45,7 @@ export class OAuth2Service { * @param auditService - The audit log service. * @param privateKey - PEM-encoded RSA private key for signing tokens. * @param publicKey - PEM-encoded RSA public key for verifying tokens. + * @param vaultClient - Optional VaultClient for Phase 2 credential verification. */ constructor( private readonly tokenRepository: TokenRepository, @@ -52,6 +54,7 @@ export class OAuth2Service { private readonly auditService: AuditService, private readonly privateKey: string, private readonly publicKey: string, + private readonly vaultClient: VaultClient | null = null, ) {} /** @@ -101,12 +104,25 @@ export class OAuth2Service { for (const cred of credentials) { const credRow = await this.credentialRepository.findById(cred.credentialId); if (credRow) { - const matches = await verifySecret(clientSecret, credRow.secretHash); + // Check expiry before attempting secret verification + if (credRow.expiresAt !== null && credRow.expiresAt < new Date()) { + continue; + } + + let matches: boolean; + if (credRow.vaultPath !== null && this.vaultClient !== null) { + // Phase 2: verify against Vault-stored secret + matches = await this.vaultClient.verifySecret( + clientId, + credRow.credentialId, + clientSecret, + ); + } else { + // Phase 1: verify against bcrypt hash + matches = await verifySecret(clientSecret, credRow.secretHash); + } + if (matches) { - // Check if credential is expired - if (credRow.expiresAt !== null && credRow.expiresAt < new Date()) { - continue; - } credentialVerified = true; break; } @@ -187,6 +203,9 @@ export class OAuth2Service { { scope, expiresAt: expiresAtDate.toISOString() }, ); + // Instrument: count successful token issuances + tokensIssuedTotal.inc({ scope }); + return { access_token: accessToken, token_type: 'Bearer', @@ -198,14 +217,13 @@ export class OAuth2Service { /** * Introspects a token per RFC 7662. * Always returns 200; check the `active` field for validity. - * Requires the caller to hold a token with `tokens:read` scope. + * Scope enforcement (`tokens:read`) is handled upstream by OPA middleware. * * @param token - The JWT string to introspect. - * @param callerPayload - The decoded payload of the calling agent's token (for scope check). + * @param callerPayload - The decoded payload of the calling agent's token. * @param ipAddress - Client IP for audit logging. * @param userAgent - Client User-Agent for audit logging. * @returns The introspection response. - * @throws InsufficientScopeError if the caller lacks `tokens:read` scope. */ async introspectToken( token: string, @@ -213,12 +231,6 @@ export class OAuth2Service { ipAddress: string, userAgent: string, ): Promise { - // Check caller has tokens:read scope - const callerScopes = callerPayload.scope.split(' '); - if (!callerScopes.includes('tokens:read')) { - throw new InsufficientScopeError('tokens:read'); - } - try { const payload = verifyToken(token, this.publicKey); const revoked = await this.tokenRepository.isRevoked(payload.jti); diff --git a/src/types/index.ts b/src/types/index.ts index 372cc3f..639fd9f 100644 --- a/src/types/index.ts +++ b/src/types/index.ts @@ -122,9 +122,16 @@ export interface ICredentialWithSecret extends ICredential { clientSecret: string; } -/** Database row for a credential, including the bcrypt hash. */ +/** Database row for a credential, including the bcrypt hash and optional Vault path. */ export interface ICredentialRow extends ICredential { + /** bcrypt hash of the secret — populated for Phase 1 (bcrypt-only) credentials. */ secretHash: string; + /** + * Vault KV v2 data path for this credential. + * When present, the secret is stored in Vault and secretHash is an empty placeholder. + * When null, bcrypt verification via secretHash is used (Phase 1 behaviour). + */ + vaultPath: string | null; } /** Request body for generating or rotating a credential. */ diff --git a/src/vault/VaultClient.ts b/src/vault/VaultClient.ts new file mode 100644 index 0000000..7bdfb1b --- /dev/null +++ b/src/vault/VaultClient.ts @@ -0,0 +1,200 @@ +/** + * VaultClient — HashiCorp Vault KV v2 integration for SentryAgent.ai AgentIdP. + * Manages agent credential secrets in Vault instead of storing bcrypt hashes in PostgreSQL. + * + * Vault is optional. When VAULT_ADDR is not set the server operates in + * bcrypt-only mode (Phase 1 behaviour). VaultClient is only instantiated when + * all three required env vars are present. + */ + +import nodeVault from 'node-vault'; +import { CredentialError } from '../utils/errors.js'; + +/** The single secret field name stored under each KV v2 path. */ +const SECRET_FIELD = 'clientSecret'; + +/** Raw KV v2 read response shape from node-vault. */ +interface KvV2ReadResponse { + data: { + data: Record; + metadata: { + version: number; + destroyed: boolean; + deletion_time: string; + }; + }; +} + +/** + * Wraps HashiCorp Vault KV v2 operations for credential secret management. + * All secrets are stored under `{mount}/data/agentidp/agents/{agentId}/credentials/{credentialId}`. + */ +export class VaultClient { + private readonly client: ReturnType; + private readonly mount: string; + + /** + * @param vaultAddr - Vault server address (e.g. http://127.0.0.1:8200). + * @param vaultToken - Vault authentication token. + * @param mount - KV v2 mount path (default: 'secret'). + */ + constructor(vaultAddr: string, vaultToken: string, mount: string = 'secret') { + this.client = nodeVault({ endpoint: vaultAddr, token: vaultToken }); + this.mount = mount; + } + + /** + * Builds the Vault KV v2 data path for a credential. + * + * @param agentId - The agent UUID. + * @param credentialId - The credential UUID. + * @returns Full KV v2 data path, e.g. `secret/data/agentidp/agents/{agentId}/credentials/{credentialId}`. + */ + private dataPath(agentId: string, credentialId: string): string { + return `${this.mount}/data/agentidp/agents/${agentId}/credentials/${credentialId}`; + } + + /** + * Builds the Vault KV v2 metadata path for a credential (used for permanent deletion). + * + * @param agentId - The agent UUID. + * @param credentialId - The credential UUID. + * @returns Full KV v2 metadata path. + */ + private metadataPath(agentId: string, credentialId: string): string { + return `${this.mount}/metadata/agentidp/agents/${agentId}/credentials/${credentialId}`; + } + + /** + * Stores a plain-text client secret in Vault for the given credential. + * Creates or overwrites the secret at the KV v2 path (new version on overwrite). + * + * @param agentId - The agent UUID. + * @param credentialId - The credential UUID. + * @param plainSecret - The plain-text client secret to store. + * @returns The Vault KV v2 data path where the secret was stored. + * @throws CredentialError if the Vault write fails. + */ + async writeSecret( + agentId: string, + credentialId: string, + plainSecret: string, + ): Promise { + const path = this.dataPath(agentId, credentialId); + try { + await this.client.write(path, { data: { [SECRET_FIELD]: plainSecret } }); + } catch (err) { + throw new CredentialError( + `Failed to write credential secret to Vault: ${err instanceof Error ? err.message : String(err)}`, + 'VAULT_WRITE_ERROR', + { agentId, credentialId }, + ); + } + return path; + } + + /** + * Reads and returns the plain-text client secret from Vault. + * + * @param agentId - The agent UUID. + * @param credentialId - The credential UUID. + * @returns The plain-text client secret. + * @throws CredentialError if the secret is not found or the read fails. + */ + async readSecret(agentId: string, credentialId: string): Promise { + const path = this.dataPath(agentId, credentialId); + let response: KvV2ReadResponse; + try { + response = (await this.client.read(path)) as KvV2ReadResponse; + } catch (err) { + throw new CredentialError( + `Failed to read credential secret from Vault: ${err instanceof Error ? err.message : String(err)}`, + 'VAULT_READ_ERROR', + { agentId, credentialId }, + ); + } + + const secret = response?.data?.data?.[SECRET_FIELD]; + if (typeof secret !== 'string' || secret.length === 0) { + throw new CredentialError( + 'Vault returned an empty or missing credential secret.', + 'VAULT_SECRET_MISSING', + { agentId, credentialId }, + ); + } + return secret; + } + + /** + * Verifies a plain-text secret against the value stored in Vault. + * Performs a constant-time comparison to prevent timing attacks. + * + * @param agentId - The agent UUID. + * @param credentialId - The credential UUID. + * @param candidateSecret - The plain-text secret to verify. + * @returns `true` if the secret matches, `false` if it does not. + */ + async verifySecret( + agentId: string, + credentialId: string, + candidateSecret: string, + ): Promise { + let stored: string; + try { + stored = await this.readSecret(agentId, credentialId); + } catch { + return false; + } + + // Constant-time comparison using crypto.timingSafeEqual + const { timingSafeEqual } = await import('crypto'); + if (stored.length !== candidateSecret.length) { + // Still perform a dummy comparison to avoid timing leaks on length differences + timingSafeEqual(Buffer.from(stored), Buffer.from(stored)); + return false; + } + return timingSafeEqual(Buffer.from(stored), Buffer.from(candidateSecret)); + } + + /** + * Permanently deletes all versions of a credential secret from Vault. + * Called on credential revocation. + * + * @param agentId - The agent UUID. + * @param credentialId - The credential UUID. + * @throws CredentialError if the deletion fails. + */ + async deleteSecret(agentId: string, credentialId: string): Promise { + const path = this.metadataPath(agentId, credentialId); + try { + await this.client.delete(path); + } catch (err) { + throw new CredentialError( + `Failed to delete credential secret from Vault: ${err instanceof Error ? err.message : String(err)}`, + 'VAULT_DELETE_ERROR', + { agentId, credentialId }, + ); + } + } +} + +/** + * Creates a VaultClient from environment variables, or returns null if Vault is not configured. + * When null is returned, the server operates in bcrypt-only mode (Phase 1 behaviour). + * + * Required env vars: VAULT_ADDR, VAULT_TOKEN + * Optional env var: VAULT_MOUNT (default: 'secret') + * + * @returns A configured VaultClient, or null if VAULT_ADDR/VAULT_TOKEN are not set. + */ +export function createVaultClientFromEnv(): VaultClient | null { + const addr = process.env.VAULT_ADDR; + const token = process.env.VAULT_TOKEN; + + if (!addr || !token) { + return null; + } + + const mount = process.env.VAULT_MOUNT ?? 'secret'; + return new VaultClient(addr, token, mount); +} diff --git a/terraform/environments/aws/main.tf b/terraform/environments/aws/main.tf new file mode 100644 index 0000000..2f837ca --- /dev/null +++ b/terraform/environments/aws/main.tf @@ -0,0 +1,640 @@ +################################################################################ +# Environment: aws +# Main — SentryAgent.ai AgentIdP on AWS +# +# Architecture: +# Internet → Route 53 → ALB (public subnets, HTTPS/443) → +# ECS Fargate tasks (private subnets) → +# RDS PostgreSQL 14 (private subnets, Multi-AZ) + +# ElastiCache Redis 7 (private subnets, primary + replica) +# +# All secrets stored in AWS Secrets Manager — ECS tasks pull at launch time. +# No sensitive values in state (except where Terraform internals require it). +################################################################################ + +terraform { + required_version = ">= 1.6.0" + + required_providers { + aws = { + source = "hashicorp/aws" + version = ">= 5.40.0" + } + random = { + source = "hashicorp/random" + version = ">= 3.6.0" + } + } + + # Remote state — configure your backend here. + # Example using S3 + DynamoDB state locking: + # + # backend "s3" { + # bucket = "sentryagent-terraform-state" + # key = "agentidp/aws/production/terraform.tfstate" + # region = "us-east-1" + # encrypt = true + # dynamodb_table = "sentryagent-terraform-locks" + # } +} + +provider "aws" { + region = var.region + + default_tags { + tags = { + environment = var.environment + project = var.project + managed_by = "terraform" + } + } +} + +################################################################################ +# Data sources +################################################################################ + +data "aws_caller_identity" "current" {} +data "aws_region" "current" {} + +################################################################################ +# VPC +################################################################################ + +resource "aws_vpc" "main" { + cidr_block = var.vpc_cidr + enable_dns_support = true + enable_dns_hostnames = true + + tags = { + Name = "${var.project}-${var.environment}-vpc" + } +} + +resource "aws_internet_gateway" "main" { + vpc_id = aws_vpc.main.id + + tags = { + Name = "${var.project}-${var.environment}-igw" + } +} + +################################################################################ +# Subnets +################################################################################ + +resource "aws_subnet" "public" { + count = length(var.availability_zones) + + vpc_id = aws_vpc.main.id + cidr_block = var.public_subnet_cidrs[count.index] + availability_zone = var.availability_zones[count.index] + map_public_ip_on_launch = false + + tags = { + Name = "${var.project}-${var.environment}-public-${var.availability_zones[count.index]}" + tier = "public" + } +} + +resource "aws_subnet" "private" { + count = length(var.availability_zones) + + vpc_id = aws_vpc.main.id + cidr_block = var.private_subnet_cidrs[count.index] + availability_zone = var.availability_zones[count.index] + + tags = { + Name = "${var.project}-${var.environment}-private-${var.availability_zones[count.index]}" + tier = "private" + } +} + +################################################################################ +# NAT Gateways — one per AZ for HA outbound from private subnets +# ECS tasks need outbound internet to pull ECR images and reach Secrets Manager. +################################################################################ + +resource "aws_eip" "nat" { + count = length(var.availability_zones) + domain = "vpc" + + tags = { + Name = "${var.project}-${var.environment}-nat-eip-${var.availability_zones[count.index]}" + } + + depends_on = [aws_internet_gateway.main] +} + +resource "aws_nat_gateway" "main" { + count = length(var.availability_zones) + + allocation_id = aws_eip.nat[count.index].id + subnet_id = aws_subnet.public[count.index].id + + tags = { + Name = "${var.project}-${var.environment}-nat-${var.availability_zones[count.index]}" + } + + depends_on = [aws_internet_gateway.main] +} + +################################################################################ +# Route Tables +################################################################################ + +resource "aws_route_table" "public" { + vpc_id = aws_vpc.main.id + + route { + cidr_block = "0.0.0.0/0" + gateway_id = aws_internet_gateway.main.id + } + + tags = { + Name = "${var.project}-${var.environment}-public-rt" + } +} + +resource "aws_route_table_association" "public" { + count = length(aws_subnet.public) + + subnet_id = aws_subnet.public[count.index].id + route_table_id = aws_route_table.public.id +} + +resource "aws_route_table" "private" { + count = length(var.availability_zones) + vpc_id = aws_vpc.main.id + + route { + cidr_block = "0.0.0.0/0" + nat_gateway_id = aws_nat_gateway.main[count.index].id + } + + tags = { + Name = "${var.project}-${var.environment}-private-rt-${var.availability_zones[count.index]}" + } +} + +resource "aws_route_table_association" "private" { + count = length(aws_subnet.private) + + subnet_id = aws_subnet.private[count.index].id + route_table_id = aws_route_table.private[count.index].id +} + +################################################################################ +# VPC Endpoints — allow ECS tasks to reach AWS services without NAT +################################################################################ + +resource "aws_vpc_endpoint" "secretsmanager" { + vpc_id = aws_vpc.main.id + service_name = "com.amazonaws.${var.region}.secretsmanager" + vpc_endpoint_type = "Interface" + subnet_ids = aws_subnet.private[*].id + private_dns_enabled = true + + tags = { + Name = "${var.project}-${var.environment}-secretsmanager-endpoint" + } +} + +resource "aws_vpc_endpoint" "ecr_api" { + vpc_id = aws_vpc.main.id + service_name = "com.amazonaws.${var.region}.ecr.api" + vpc_endpoint_type = "Interface" + subnet_ids = aws_subnet.private[*].id + private_dns_enabled = true + + tags = { + Name = "${var.project}-${var.environment}-ecr-api-endpoint" + } +} + +resource "aws_vpc_endpoint" "ecr_dkr" { + vpc_id = aws_vpc.main.id + service_name = "com.amazonaws.${var.region}.ecr.dkr" + vpc_endpoint_type = "Interface" + subnet_ids = aws_subnet.private[*].id + private_dns_enabled = true + + tags = { + Name = "${var.project}-${var.environment}-ecr-dkr-endpoint" + } +} + +resource "aws_vpc_endpoint" "s3" { + vpc_id = aws_vpc.main.id + service_name = "com.amazonaws.${var.region}.s3" + vpc_endpoint_type = "Gateway" + route_table_ids = aws_route_table.private[*].id + + tags = { + Name = "${var.project}-${var.environment}-s3-endpoint" + } +} + +resource "aws_vpc_endpoint" "cloudwatch_logs" { + vpc_id = aws_vpc.main.id + service_name = "com.amazonaws.${var.region}.logs" + vpc_endpoint_type = "Interface" + subnet_ids = aws_subnet.private[*].id + private_dns_enabled = true + + tags = { + Name = "${var.project}-${var.environment}-logs-endpoint" + } +} + +################################################################################ +# IAM — ECS Task Execution Role +# Allows ECS to pull images from ECR, write logs, and fetch secrets. +################################################################################ + +data "aws_iam_policy_document" "ecs_task_execution_assume" { + statement { + actions = ["sts:AssumeRole"] + principals { + type = "Service" + identifiers = ["ecs-tasks.amazonaws.com"] + } + } +} + +resource "aws_iam_role" "ecs_task_execution" { + name = "${var.project}-${var.environment}-ecs-execution-role" + assume_role_policy = data.aws_iam_policy_document.ecs_task_execution_assume.json + + tags = { + environment = var.environment + project = var.project + } +} + +resource "aws_iam_role_policy_attachment" "ecs_task_execution_managed" { + role = aws_iam_role.ecs_task_execution.name + policy_arn = "arn:aws:iam::aws:policy/service-role/AmazonECSTaskExecutionRolePolicy" +} + +# Allow the execution role to fetch the specific secrets it needs +data "aws_iam_policy_document" "ecs_task_execution_secrets" { + statement { + sid = "GetAppSecrets" + effect = "Allow" + actions = [ + "secretsmanager:GetSecretValue", + "secretsmanager:DescribeSecret" + ] + resources = concat( + [ + aws_secretsmanager_secret.database_url.arn, + aws_secretsmanager_secret.redis_url.arn, + aws_secretsmanager_secret.jwt_private_key.arn, + aws_secretsmanager_secret.jwt_public_key.arn, + ], + var.vault_token != "" ? [aws_secretsmanager_secret.vault_token[0].arn] : [] + ) + } +} + +resource "aws_iam_role_policy" "ecs_task_execution_secrets" { + name = "${var.project}-${var.environment}-secrets-policy" + role = aws_iam_role.ecs_task_execution.id + policy = data.aws_iam_policy_document.ecs_task_execution_secrets.json +} + +################################################################################ +# IAM — ECS Task Role +# Permissions granted to the running application container. +################################################################################ + +resource "aws_iam_role" "ecs_task" { + name = "${var.project}-${var.environment}-ecs-task-role" + assume_role_policy = data.aws_iam_policy_document.ecs_task_execution_assume.json + + tags = { + environment = var.environment + project = var.project + } +} + +# ECS task role policy — extend as needed for other AWS service calls. +data "aws_iam_policy_document" "ecs_task" { + statement { + sid = "AllowCloudWatchMetrics" + effect = "Allow" + actions = [ + "cloudwatch:PutMetricData" + ] + resources = ["*"] + } +} + +resource "aws_iam_role_policy" "ecs_task" { + name = "${var.project}-${var.environment}-task-policy" + role = aws_iam_role.ecs_task.id + policy = data.aws_iam_policy_document.ecs_task.json +} + +################################################################################ +# IAM — RDS Enhanced Monitoring Role +################################################################################ + +data "aws_iam_policy_document" "rds_monitoring_assume" { + statement { + actions = ["sts:AssumeRole"] + principals { + type = "Service" + identifiers = ["monitoring.rds.amazonaws.com"] + } + } +} + +resource "aws_iam_role" "rds_monitoring" { + name = "${var.project}-${var.environment}-rds-monitoring-role" + assume_role_policy = data.aws_iam_policy_document.rds_monitoring_assume.json + + tags = { + environment = var.environment + project = var.project + } +} + +resource "aws_iam_role_policy_attachment" "rds_monitoring" { + role = aws_iam_role.rds_monitoring.name + policy_arn = "arn:aws:iam::aws:policy/service-role/AmazonRDSEnhancedMonitoringRole" +} + +################################################################################ +# AWS Secrets Manager — store all sensitive values +################################################################################ + +resource "aws_secretsmanager_secret" "database_url" { + name = "/${var.project}/${var.environment}/database-url" + description = "PostgreSQL DATABASE_URL for AgentIdP" + recovery_window_in_days = 7 + + tags = { + environment = var.environment + project = var.project + } +} + +resource "aws_secretsmanager_secret_version" "database_url" { + secret_id = aws_secretsmanager_secret.database_url.id + # Build the DATABASE_URL using the RDS endpoint output. + # The password is passed in as var.db_password so it never appears in plaintext + # in any .tf file — only in this encrypted secret version. + secret_string = "postgresql://${var.project}:${var.db_password}@${module.rds.endpoint}:${module.rds.port}/${module.rds.db_name}?sslmode=require" + + depends_on = [module.rds] +} + +resource "aws_secretsmanager_secret" "redis_url" { + name = "/${var.project}/${var.environment}/redis-url" + description = "Redis REDIS_URL for AgentIdP" + recovery_window_in_days = 7 + + tags = { + environment = var.environment + project = var.project + } +} + +resource "aws_secretsmanager_secret_version" "redis_url" { + secret_id = aws_secretsmanager_secret.redis_url.id + # ElastiCache Redis with TLS uses the rediss:// scheme and requires an AUTH token. + secret_string = "rediss://:${var.redis_auth_token}@${module.redis.primary_endpoint}:${module.redis.port}" + + depends_on = [module.redis] +} + +resource "aws_secretsmanager_secret" "jwt_private_key" { + name = "/${var.project}/${var.environment}/jwt-private-key" + description = "RSA-2048 private key for signing AgentIdP JWTs" + recovery_window_in_days = 7 + + tags = { + environment = var.environment + project = var.project + } +} + +resource "aws_secretsmanager_secret_version" "jwt_private_key" { + secret_id = aws_secretsmanager_secret.jwt_private_key.id + secret_string = var.jwt_private_key +} + +resource "aws_secretsmanager_secret" "jwt_public_key" { + name = "/${var.project}/${var.environment}/jwt-public-key" + description = "RSA-2048 public key for verifying AgentIdP JWTs" + recovery_window_in_days = 7 + + tags = { + environment = var.environment + project = var.project + } +} + +resource "aws_secretsmanager_secret_version" "jwt_public_key" { + secret_id = aws_secretsmanager_secret.jwt_public_key.id + secret_string = var.jwt_public_key +} + +resource "aws_secretsmanager_secret" "vault_token" { + count = var.vault_token != "" ? 1 : 0 + + name = "/${var.project}/${var.environment}/vault-token" + description = "HashiCorp Vault token for AgentIdP" + recovery_window_in_days = 7 + + tags = { + environment = var.environment + project = var.project + } +} + +resource "aws_secretsmanager_secret_version" "vault_token" { + count = var.vault_token != "" ? 1 : 0 + + secret_id = aws_secretsmanager_secret.vault_token[0].id + secret_string = var.vault_token +} + +################################################################################ +# Module: Load Balancer +################################################################################ + +module "lb" { + source = "../../modules/lb" + + environment = var.environment + project = var.project + vpc_id = aws_vpc.main.id + subnet_ids = aws_subnet.public[*].id + certificate_arn = var.certificate_arn + + target_group_port = 3000 + enable_deletion_protection = true + access_logs_bucket = var.alb_access_logs_bucket +} + +################################################################################ +# Module: RDS PostgreSQL +################################################################################ + +module "rds" { + source = "../../modules/rds" + + environment = var.environment + project = var.project + vpc_id = aws_vpc.main.id + subnet_ids = aws_subnet.private[*].id + + # The app SG is created by the agentidp module; we wire it after both modules + # are instantiated using a separate security group rule (see below). + allowed_security_group_ids = [] + + db_name = "sentryagent_idp" + db_username = var.project + db_password = var.db_password + + instance_class = var.rds_instance_class + allocated_storage = 50 + max_allocated_storage = 500 + multi_az = true + backup_retention_days = var.rds_backup_retention_days + deletion_protection = var.rds_deletion_protection + skip_final_snapshot = var.rds_skip_final_snapshot + monitoring_role_arn = aws_iam_role.rds_monitoring.arn + monitoring_interval = 60 + performance_insights_enabled = true +} + +################################################################################ +# Module: Redis +################################################################################ + +module "redis" { + source = "../../modules/redis" + + environment = var.environment + project = var.project + vpc_id = aws_vpc.main.id + subnet_ids = aws_subnet.private[*].id + + # Same pattern as RDS — app SG wired after agentidp module creates it. + allowed_security_group_ids = [] + + node_type = var.redis_node_type + num_cache_clusters = 2 + automatic_failover_enabled = true + multi_az_enabled = true + at_rest_encryption_enabled = true + transit_encryption_enabled = true + auth_token = var.redis_auth_token + snapshot_retention_limit = 7 +} + +################################################################################ +# Module: AgentIdP (ECS Fargate) +################################################################################ + +module "agentidp" { + source = "../../modules/agentidp" + + provider_type = "aws" + environment = var.environment + project = var.project + app_image = "sentryagent/agentidp:${var.app_image_tag}" + app_port = 3000 + + aws_region = var.region + aws_vpc_id = aws_vpc.main.id + aws_subnet_ids = aws_subnet.private[*].id + aws_target_group_arn = module.lb.target_group_arn + aws_execution_role_arn = aws_iam_role.ecs_task_execution.arn + aws_task_role_arn = aws_iam_role.ecs_task.arn + aws_log_group_name = "/ecs/${var.project}-${var.environment}" + aws_desired_count = var.ecs_desired_count + aws_cpu = 512 + aws_memory = 1024 + aws_cors_origin = var.cors_origin + aws_policy_dir = "/app/policies" + aws_vault_addr = var.vault_addr + aws_vault_mount = var.vault_mount + + aws_secret_database_url_arn = aws_secretsmanager_secret.database_url.arn + aws_secret_redis_url_arn = aws_secretsmanager_secret.redis_url.arn + aws_secret_jwt_private_key_arn = aws_secretsmanager_secret.jwt_private_key.arn + aws_secret_jwt_public_key_arn = aws_secretsmanager_secret.jwt_public_key.arn + aws_secret_vault_token_arn = var.vault_token != "" ? aws_secretsmanager_secret.vault_token[0].arn : "" + + depends_on = [ + aws_secretsmanager_secret_version.database_url, + aws_secretsmanager_secret_version.redis_url, + aws_secretsmanager_secret_version.jwt_private_key, + aws_secretsmanager_secret_version.jwt_public_key, + ] +} + +################################################################################ +# Cross-module security group wiring +# +# The app SG (from agentidp module) must be allowed into RDS and Redis. +# These rules are created after both modules are fully instantiated to avoid +# circular references in the module dependency graph. +################################################################################ + +resource "aws_security_group_rule" "rds_from_app" { + type = "ingress" + description = "PostgreSQL from ECS app tasks" + from_port = 5432 + to_port = 5432 + protocol = "tcp" + source_security_group_id = module.agentidp.aws_app_security_group_id + security_group_id = module.rds.security_group_id +} + +resource "aws_security_group_rule" "redis_from_app" { + type = "ingress" + description = "Redis from ECS app tasks" + from_port = 6379 + to_port = 6379 + protocol = "tcp" + source_security_group_id = module.agentidp.aws_app_security_group_id + security_group_id = module.redis.security_group_id +} + +# Allow the ALB to reach ECS tasks on the app port +resource "aws_security_group_rule" "app_from_alb" { + type = "ingress" + description = "App port from ALB" + from_port = 3000 + to_port = 3000 + protocol = "tcp" + source_security_group_id = module.lb.alb_security_group_id + security_group_id = module.agentidp.aws_app_security_group_id +} + +################################################################################ +# Route 53 — alias record pointing the domain to the ALB +################################################################################ + +data "aws_route53_zone" "main" { + name = join(".", slice(split(".", var.domain_name), 1, length(split(".", var.domain_name)))) + private_zone = false +} + +resource "aws_route53_record" "app" { + zone_id = data.aws_route53_zone.main.zone_id + name = var.domain_name + type = "A" + + alias { + name = module.lb.alb_dns_name + zone_id = module.lb.alb_zone_id + evaluate_target_health = true + } +} diff --git a/terraform/environments/aws/outputs.tf b/terraform/environments/aws/outputs.tf new file mode 100644 index 0000000..c81744b --- /dev/null +++ b/terraform/environments/aws/outputs.tf @@ -0,0 +1,84 @@ +################################################################################ +# Environment: aws +# Outputs +################################################################################ + +output "alb_dns_name" { + description = "DNS name of the Application Load Balancer." + value = module.lb.alb_dns_name +} + +output "service_url" { + description = "Public HTTPS URL of the AgentIdP service." + value = "https://${var.domain_name}" +} + +output "ecs_cluster_arn" { + description = "ARN of the ECS cluster." + value = module.agentidp.aws_ecs_cluster_arn +} + +output "ecs_service_name" { + description = "Name of the ECS Fargate service." + value = module.agentidp.aws_ecs_service_name +} + +output "ecs_task_definition_arn" { + description = "Active ECS task definition ARN." + value = module.agentidp.aws_ecs_task_definition_arn +} + +output "rds_endpoint" { + description = "RDS PostgreSQL endpoint hostname." + value = module.rds.endpoint +} + +output "rds_port" { + description = "RDS PostgreSQL port." + value = module.rds.port +} + +output "rds_instance_id" { + description = "RDS instance identifier." + value = module.rds.instance_id +} + +output "redis_primary_endpoint" { + description = "ElastiCache Redis primary endpoint hostname." + value = module.redis.primary_endpoint +} + +output "redis_reader_endpoint" { + description = "ElastiCache Redis reader endpoint." + value = module.redis.reader_endpoint +} + +output "vpc_id" { + description = "ID of the VPC created for this deployment." + value = aws_vpc.main.id +} + +output "private_subnet_ids" { + description = "IDs of the private subnets (ECS, RDS, Redis)." + value = aws_subnet.private[*].id +} + +output "public_subnet_ids" { + description = "IDs of the public subnets (ALB)." + value = aws_subnet.public[*].id +} + +output "cloudwatch_log_group" { + description = "CloudWatch log group for ECS container logs." + value = module.agentidp.aws_cloudwatch_log_group_name +} + +output "secrets_manager_database_url_arn" { + description = "ARN of the Secrets Manager secret holding DATABASE_URL." + value = aws_secretsmanager_secret.database_url.arn +} + +output "secrets_manager_redis_url_arn" { + description = "ARN of the Secrets Manager secret holding REDIS_URL." + value = aws_secretsmanager_secret.redis_url.arn +} diff --git a/terraform/environments/aws/terraform.tfvars.example b/terraform/environments/aws/terraform.tfvars.example new file mode 100644 index 0000000..5ffac27 --- /dev/null +++ b/terraform/environments/aws/terraform.tfvars.example @@ -0,0 +1,76 @@ +# ───────────────────────────────────────────────────────────────────────────── +# terraform/environments/aws/terraform.tfvars.example +# +# Copy this file to terraform.tfvars and fill in real values. +# NEVER commit terraform.tfvars to version control — it contains secrets. +# +# All sensitive variables (db_password, jwt_*, vault_token) must be provided +# via this file or as TF_VAR_* environment variables in your CI/CD pipeline. +# ───────────────────────────────────────────────────────────────────────────── + +# ── Region & environment ────────────────────────────────────────────────────── + +region = "us-east-1" +environment = "production" +project = "sentryagent-agentidp" + +# ── Application image ───────────────────────────────────────────────────────── +# Docker image tag to deploy. Update this to roll out a new version. + +app_image_tag = "1.0.0" + +# ── DNS & TLS ───────────────────────────────────────────────────────────────── +# The ACM certificate must already exist in the same region as the ALB. +# Create it with: aws acm request-certificate --domain-name idp.example.com --validation-method DNS + +domain_name = "idp.example.com" +certificate_arn = "arn:aws:acm:us-east-1:123456789012:certificate/xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx" + +# ── Networking ──────────────────────────────────────────────────────────────── + +vpc_cidr = "10.0.0.0/16" +availability_zones = ["us-east-1a", "us-east-1b", "us-east-1c"] +public_subnet_cidrs = ["10.0.1.0/24", "10.0.2.0/24", "10.0.3.0/24"] +private_subnet_cidrs = ["10.0.11.0/24", "10.0.12.0/24", "10.0.13.0/24"] + +# ── Secrets — REPLACE ALL VALUES BELOW ─────────────────────────────────────── +# Use strong, randomly generated values. Do NOT use these placeholders in production. + +# Master password for RDS PostgreSQL (min 8 chars, no special chars that break URLs) +db_password = "REPLACE_WITH_STRONG_RANDOM_PASSWORD" + +# AUTH token for ElastiCache Redis (min 16 chars) +redis_auth_token = "REPLACE_WITH_STRONG_RANDOM_TOKEN_AT_LEAST_16_CHARS" + +# RSA-2048 key pair for JWT signing/verification. +# Generate with: +# openssl genrsa -out private.pem 2048 +# openssl rsa -in private.pem -pubout -out public.pem +jwt_private_key = "-----BEGIN RSA PRIVATE KEY-----\nREPLACE_WITH_ACTUAL_PRIVATE_KEY_CONTENTS\n-----END RSA PRIVATE KEY-----" +jwt_public_key = "-----BEGIN PUBLIC KEY-----\nREPLACE_WITH_ACTUAL_PUBLIC_KEY_CONTENTS\n-----END PUBLIC KEY-----" + +# HashiCorp Vault (optional — leave empty strings to disable Vault integration) +vault_addr = "" +vault_token = "" +vault_mount = "secret" + +# ── Application configuration ───────────────────────────────────────────────── + +cors_origin = "*" +ecs_desired_count = 2 + +# ── Infrastructure sizing ───────────────────────────────────────────────────── + +rds_instance_class = "db.t3.medium" +redis_node_type = "cache.t3.medium" + +# ── ALB access logs (optional) ──────────────────────────────────────────────── +# Create the S3 bucket and enable ALB log delivery permissions before setting this. + +alb_access_logs_bucket = "" + +# ── RDS settings ────────────────────────────────────────────────────────────── + +rds_backup_retention_days = 7 +rds_deletion_protection = true +rds_skip_final_snapshot = false diff --git a/terraform/environments/aws/variables.tf b/terraform/environments/aws/variables.tf new file mode 100644 index 0000000..6c77c52 --- /dev/null +++ b/terraform/environments/aws/variables.tf @@ -0,0 +1,164 @@ +################################################################################ +# Environment: aws +# Variables +################################################################################ + +variable "region" { + description = "AWS region for all resources." + type = string + default = "us-east-1" +} + +variable "environment" { + description = "Deployment environment (e.g. production, staging)." + type = string + default = "production" +} + +variable "project" { + description = "Project identifier — used in all resource names and tags." + type = string + default = "sentryagent-agentidp" +} + +variable "app_image_tag" { + description = "Docker image tag to deploy (e.g. '1.2.3' or a full SHA)." + type = string +} + +variable "domain_name" { + description = "Primary domain name for the AgentIdP service (e.g. idp.sentryagent.ai)." + type = string +} + +variable "certificate_arn" { + description = "ARN of the ACM certificate for the domain_name. Must be in the same region as the ALB." + type = string +} + +################################################################################ +# Networking +################################################################################ + +variable "vpc_cidr" { + description = "CIDR block for the VPC." + type = string + default = "10.0.0.0/16" +} + +variable "availability_zones" { + description = "List of Availability Zones to use. Must contain at least 2 for Multi-AZ resources." + type = list(string) + default = ["us-east-1a", "us-east-1b", "us-east-1c"] +} + +variable "public_subnet_cidrs" { + description = "CIDR blocks for public subnets (ALB). One per AZ." + type = list(string) + default = ["10.0.1.0/24", "10.0.2.0/24", "10.0.3.0/24"] +} + +variable "private_subnet_cidrs" { + description = "CIDR blocks for private subnets (ECS, RDS, Redis). One per AZ." + type = list(string) + default = ["10.0.11.0/24", "10.0.12.0/24", "10.0.13.0/24"] +} + +################################################################################ +# Secrets — all marked sensitive; provide via tfvars or environment variables +################################################################################ + +variable "db_password" { + description = "Master password for the RDS PostgreSQL instance. Stored in AWS Secrets Manager." + type = string + sensitive = true +} + +variable "redis_auth_token" { + description = "AUTH token for ElastiCache Redis (minimum 16 characters). Stored in AWS Secrets Manager." + type = string + sensitive = true +} + +variable "jwt_private_key" { + description = "PEM-encoded RSA-2048 private key for signing JWTs. Stored in AWS Secrets Manager." + type = string + sensitive = true +} + +variable "jwt_public_key" { + description = "PEM-encoded RSA-2048 public key for verifying JWTs. Stored in AWS Secrets Manager." + type = string + sensitive = true +} + +variable "vault_token" { + description = "HashiCorp Vault token. Leave empty to disable Vault integration." + type = string + sensitive = true + default = "" +} + +################################################################################ +# Optional configuration +################################################################################ + +variable "vault_addr" { + description = "HashiCorp Vault server address. Leave empty to disable Vault integration." + type = string + default = "" +} + +variable "vault_mount" { + description = "HashiCorp Vault KV v2 mount path." + type = string + default = "secret" +} + +variable "cors_origin" { + description = "CORS_ORIGIN value for the app (use * for public APIs or a specific origin)." + type = string + default = "*" +} + +variable "ecs_desired_count" { + description = "Number of ECS Fargate tasks to run." + type = number + default = 2 +} + +variable "rds_instance_class" { + description = "RDS instance class." + type = string + default = "db.t3.medium" +} + +variable "redis_node_type" { + description = "ElastiCache node type." + type = string + default = "cache.t3.medium" +} + +variable "alb_access_logs_bucket" { + description = "S3 bucket for ALB access logs. Leave empty to disable." + type = string + default = "" +} + +variable "rds_backup_retention_days" { + description = "Number of days to retain RDS automated backups." + type = number + default = 7 +} + +variable "rds_deletion_protection" { + description = "Enable RDS deletion protection." + type = bool + default = true +} + +variable "rds_skip_final_snapshot" { + description = "Skip final RDS snapshot on destroy. Keep false in production." + type = bool + default = false +} diff --git a/terraform/environments/gcp/main.tf b/terraform/environments/gcp/main.tf new file mode 100644 index 0000000..fd782be --- /dev/null +++ b/terraform/environments/gcp/main.tf @@ -0,0 +1,477 @@ +################################################################################ +# Environment: gcp +# Main — SentryAgent.ai AgentIdP on Google Cloud Platform +# +# Architecture: +# Internet → Cloud Run (Google-managed TLS, auto-scaling) → +# Cloud SQL PostgreSQL 14 (private IP, REGIONAL HA) + +# Memorystore Redis 7 (STANDARD_HA, in-transit encryption) +# via Serverless VPC Access connector +# +# All secrets stored in GCP Secret Manager — Cloud Run reads them at startup. +# No sensitive values in state (except where Terraform internals require it). +################################################################################ + +terraform { + required_version = ">= 1.6.0" + + required_providers { + google = { + source = "hashicorp/google" + version = ">= 5.20.0" + } + google-beta = { + source = "hashicorp/google-beta" + version = ">= 5.20.0" + } + random = { + source = "hashicorp/random" + version = ">= 3.6.0" + } + } + + # Remote state — configure your backend here. + # Example using GCS: + # + # backend "gcs" { + # bucket = "sentryagent-terraform-state" + # prefix = "agentidp/gcp/production" + # } +} + +provider "google" { + project = var.project_id + region = var.region +} + +provider "google-beta" { + project = var.project_id + region = var.region +} + +################################################################################ +# Enable required GCP APIs +################################################################################ + +resource "google_project_service" "apis" { + for_each = toset([ + "run.googleapis.com", + "sqladmin.googleapis.com", + "redis.googleapis.com", + "vpcaccess.googleapis.com", + "secretmanager.googleapis.com", + "servicenetworking.googleapis.com", + "cloudresourcemanager.googleapis.com", + "iam.googleapis.com", + ]) + + project = var.project_id + service = each.value + disable_on_destroy = false +} + +################################################################################ +# Locals +################################################################################ + +locals { + name_prefix = "${var.project}-${var.environment}" + + common_labels = { + environment = var.environment + project = replace(var.project, "-", "_") + managed_by = "terraform" + } +} + +################################################################################ +# VPC Network +################################################################################ + +resource "google_compute_network" "main" { + name = "${local.name_prefix}-vpc" + auto_create_subnetworks = false + project = var.project_id + + depends_on = [google_project_service.apis] +} + +resource "google_compute_subnetwork" "private" { + name = "${local.name_prefix}-private-subnet" + ip_cidr_range = var.vpc_cidr + region = var.region + network = google_compute_network.main.id + project = var.project_id + + private_ip_google_access = true + + log_config { + aggregation_interval = "INTERVAL_10_MIN" + flow_sampling = 0.5 + metadata = "INCLUDE_ALL_METADATA" + } +} + +################################################################################ +# Private Services Access — required for Cloud SQL private IP +################################################################################ + +resource "google_compute_global_address" "private_services" { + name = "${local.name_prefix}-private-services-range" + purpose = "VPC_PEERING" + address_type = "INTERNAL" + prefix_length = 20 + network = google_compute_network.main.id + project = var.project_id +} + +resource "google_service_networking_connection" "private_services" { + network = google_compute_network.main.id + service = "servicenetworking.googleapis.com" + reserved_peering_ranges = [google_compute_global_address.private_services.name] + + depends_on = [google_project_service.apis] +} + +################################################################################ +# Serverless VPC Access Connector +# Cloud Run uses this to reach Cloud SQL (private IP) and Memorystore. +################################################################################ + +resource "google_vpc_access_connector" "main" { + name = "${local.name_prefix}-connector" + region = var.region + project = var.project_id + ip_cidr_range = var.vpc_connector_cidr + network = google_compute_network.main.name + min_instances = 2 + max_instances = 10 + machine_type = "e2-micro" + + depends_on = [google_project_service.apis] +} + +################################################################################ +# Service Account for Cloud Run +################################################################################ + +resource "google_service_account" "cloud_run" { + account_id = "${var.project}-${var.environment}-run-sa" + display_name = "AgentIdP Cloud Run Service Account (${var.environment})" + project = var.project_id +} + +################################################################################ +# Secret Manager — create secrets and grant the SA access +################################################################################ + +resource "google_secret_manager_secret" "database_url" { + secret_id = "${local.name_prefix}-database-url" + project = var.project_id + + replication { + auto {} + } + + labels = local.common_labels + + depends_on = [google_project_service.apis] +} + +resource "google_secret_manager_secret_version" "database_url" { + secret = google_secret_manager_secret.database_url.id + # Build the DATABASE_URL from Cloud SQL private IP output. + secret_data = "postgresql://${var.db_username}:${var.db_password}@${google_sql_database_instance.main.private_ip_address}:5432/${var.db_name}?sslmode=require" + + depends_on = [google_sql_database_instance.main] +} + +resource "google_secret_manager_secret" "redis_url" { + secret_id = "${local.name_prefix}-redis-url" + project = var.project_id + + replication { + auto {} + } + + labels = local.common_labels + + depends_on = [google_project_service.apis] +} + +resource "google_secret_manager_secret_version" "redis_url" { + secret = google_secret_manager_secret.redis_url.id + # Memorystore Redis with in-transit encryption uses the rediss:// scheme. + secret_data = "rediss://${google_redis_instance.main.host}:${google_redis_instance.main.port}" + + depends_on = [google_redis_instance.main] +} + +resource "google_secret_manager_secret" "jwt_private_key" { + secret_id = "${local.name_prefix}-jwt-private-key" + project = var.project_id + + replication { + auto {} + } + + labels = local.common_labels + + depends_on = [google_project_service.apis] +} + +resource "google_secret_manager_secret_version" "jwt_private_key" { + secret = google_secret_manager_secret.jwt_private_key.id + secret_data = var.jwt_private_key +} + +resource "google_secret_manager_secret" "jwt_public_key" { + secret_id = "${local.name_prefix}-jwt-public-key" + project = var.project_id + + replication { + auto {} + } + + labels = local.common_labels + + depends_on = [google_project_service.apis] +} + +resource "google_secret_manager_secret_version" "jwt_public_key" { + secret = google_secret_manager_secret.jwt_public_key.id + secret_data = var.jwt_public_key +} + +resource "google_secret_manager_secret" "vault_token" { + count = var.vault_token != "" ? 1 : 0 + + secret_id = "${local.name_prefix}-vault-token" + project = var.project_id + + replication { + auto {} + } + + labels = local.common_labels + + depends_on = [google_project_service.apis] +} + +resource "google_secret_manager_secret_version" "vault_token" { + count = var.vault_token != "" ? 1 : 0 + + secret = google_secret_manager_secret.vault_token[0].id + secret_data = var.vault_token +} + +# Grant the Cloud Run SA access to each secret +resource "google_secret_manager_secret_iam_member" "run_database_url" { + project = var.project_id + secret_id = google_secret_manager_secret.database_url.secret_id + role = "roles/secretmanager.secretAccessor" + member = "serviceAccount:${google_service_account.cloud_run.email}" +} + +resource "google_secret_manager_secret_iam_member" "run_redis_url" { + project = var.project_id + secret_id = google_secret_manager_secret.redis_url.secret_id + role = "roles/secretmanager.secretAccessor" + member = "serviceAccount:${google_service_account.cloud_run.email}" +} + +resource "google_secret_manager_secret_iam_member" "run_jwt_private_key" { + project = var.project_id + secret_id = google_secret_manager_secret.jwt_private_key.secret_id + role = "roles/secretmanager.secretAccessor" + member = "serviceAccount:${google_service_account.cloud_run.email}" +} + +resource "google_secret_manager_secret_iam_member" "run_jwt_public_key" { + project = var.project_id + secret_id = google_secret_manager_secret.jwt_public_key.secret_id + role = "roles/secretmanager.secretAccessor" + member = "serviceAccount:${google_service_account.cloud_run.email}" +} + +resource "google_secret_manager_secret_iam_member" "run_vault_token" { + count = var.vault_token != "" ? 1 : 0 + + project = var.project_id + secret_id = google_secret_manager_secret.vault_token[0].secret_id + role = "roles/secretmanager.secretAccessor" + member = "serviceAccount:${google_service_account.cloud_run.email}" +} + +################################################################################ +# Cloud SQL — PostgreSQL 14, private IP, REGIONAL HA +################################################################################ + +resource "google_sql_database_instance" "main" { + name = "${local.name_prefix}-pg14" + database_version = "POSTGRES_14" + region = var.region + project = var.project_id + + deletion_protection = var.deletion_protection + + settings { + tier = var.db_tier + availability_type = var.db_availability_type + disk_type = "PD_SSD" + disk_size = 50 + disk_autoresize = true + + ip_configuration { + ipv4_enabled = false # No public IP + private_network = google_compute_network.main.id + require_ssl = true + } + + backup_configuration { + enabled = true + start_time = "03:00" + point_in_time_recovery_enabled = true + transaction_log_retention_days = 7 + backup_retention_settings { + retained_backups = 7 + retention_unit = "COUNT" + } + } + + maintenance_window { + day = 7 # Sunday + hour = 5 + update_track = "stable" + } + + insights_config { + query_insights_enabled = true + query_string_length = 1024 + record_application_tags = true + record_client_address = false + } + + database_flags { + name = "log_connections" + value = "on" + } + + database_flags { + name = "log_disconnections" + value = "on" + } + + database_flags { + name = "log_min_duration_statement" + value = "1000" + } + + user_labels = local.common_labels + } + + depends_on = [google_service_networking_connection.private_services] +} + +resource "google_sql_database" "main" { + name = var.db_name + instance = google_sql_database_instance.main.name + project = var.project_id +} + +resource "google_sql_user" "app" { + name = var.db_username + instance = google_sql_database_instance.main.name + password = var.db_password + project = var.project_id +} + +################################################################################ +# Memorystore Redis 7 — STANDARD_HA (primary + replica), TLS enabled +################################################################################ + +resource "google_redis_instance" "main" { + name = "${local.name_prefix}-redis" + tier = var.memorystore_tier + memory_size_gb = var.memorystore_memory_size_gb + region = var.region + project = var.project_id + + redis_version = var.memorystore_redis_version + + # Private connectivity via the VPC + authorized_network = google_compute_network.main.id + connect_mode = "PRIVATE_SERVICE_ACCESS" + + # TLS in transit + transit_encryption_mode = "SERVER_AUTHENTICATION" + + # No AUTH token for Memorystore — access is controlled by VPC network policy. + # If AUTH is required, set auth_enabled = true and read the generated auth_string output. + auth_enabled = true + + redis_configs = { + lazyfree-lazy-eviction = "yes" + lazyfree-lazy-expire = "yes" + } + + maintenance_policy { + weekly_maintenance_window { + day = "SUNDAY" + start_time { + hours = 6 + minutes = 0 + seconds = 0 + nanos = 0 + } + } + } + + labels = local.common_labels + + depends_on = [google_service_networking_connection.private_services] +} + +################################################################################ +# Module: AgentIdP (Cloud Run) +################################################################################ + +module "agentidp" { + source = "../../modules/agentidp" + + provider_type = "gcp" + environment = var.environment + project = var.project + app_image = "sentryagent/agentidp:${var.app_image_tag}" + app_port = 3000 + + gcp_project_id = var.project_id + gcp_region = var.region + gcp_service_account_email = google_service_account.cloud_run.email + gcp_vpc_connector_name = google_vpc_access_connector.main.id + gcp_min_instances = var.cloud_run_min_instances + gcp_max_instances = var.cloud_run_max_instances + gcp_cpu = var.cloud_run_cpu + gcp_memory = var.cloud_run_memory + gcp_cors_origin = var.cors_origin + gcp_policy_dir = "/app/policies" + gcp_vault_addr = var.vault_addr + gcp_vault_mount = var.vault_mount + + gcp_secret_database_url_id = google_secret_manager_secret.database_url.secret_id + gcp_secret_redis_url_id = google_secret_manager_secret.redis_url.secret_id + gcp_secret_jwt_private_key_id = google_secret_manager_secret.jwt_private_key.secret_id + gcp_secret_jwt_public_key_id = google_secret_manager_secret.jwt_public_key.secret_id + gcp_secret_vault_token_id = var.vault_token != "" ? google_secret_manager_secret.vault_token[0].secret_id : "" + + depends_on = [ + google_secret_manager_secret_version.database_url, + google_secret_manager_secret_version.redis_url, + google_secret_manager_secret_version.jwt_private_key, + google_secret_manager_secret_version.jwt_public_key, + google_secret_manager_secret_iam_member.run_database_url, + google_secret_manager_secret_iam_member.run_redis_url, + google_secret_manager_secret_iam_member.run_jwt_private_key, + google_secret_manager_secret_iam_member.run_jwt_public_key, + ] +} diff --git a/terraform/environments/gcp/outputs.tf b/terraform/environments/gcp/outputs.tf new file mode 100644 index 0000000..ebf5a33 --- /dev/null +++ b/terraform/environments/gcp/outputs.tf @@ -0,0 +1,64 @@ +################################################################################ +# Environment: gcp +# Outputs +################################################################################ + +output "service_url" { + description = "Public HTTPS URL of the AgentIdP Cloud Run service (Google-managed TLS)." + value = module.agentidp.gcp_cloud_run_service_url +} + +output "cloud_run_service_name" { + description = "Name of the Cloud Run service." + value = module.agentidp.gcp_cloud_run_service_name +} + +output "cloud_run_service_id" { + description = "Full resource ID of the Cloud Run service." + value = module.agentidp.gcp_cloud_run_service_id +} + +output "cloud_sql_instance_name" { + description = "Cloud SQL instance name." + value = google_sql_database_instance.main.name +} + +output "cloud_sql_private_ip" { + description = "Private IP address of the Cloud SQL instance." + value = google_sql_database_instance.main.private_ip_address +} + +output "cloud_sql_connection_name" { + description = "Cloud SQL instance connection name (project:region:name) for Cloud SQL Proxy." + value = google_sql_database_instance.main.connection_name +} + +output "memorystore_host" { + description = "IP address of the Memorystore Redis primary endpoint." + value = google_redis_instance.main.host +} + +output "memorystore_port" { + description = "Port of the Memorystore Redis instance." + value = google_redis_instance.main.port +} + +output "memorystore_id" { + description = "Fully-qualified resource ID of the Memorystore Redis instance." + value = google_redis_instance.main.id +} + +output "vpc_network_name" { + description = "Name of the VPC network created for this deployment." + value = google_compute_network.main.name +} + +output "vpc_connector_name" { + description = "Serverless VPC Access connector name used by Cloud Run." + value = google_vpc_access_connector.main.name +} + +output "cloud_run_service_account_email" { + description = "Email of the service account attached to the Cloud Run service." + value = google_service_account.cloud_run.email +} diff --git a/terraform/environments/gcp/terraform.tfvars.example b/terraform/environments/gcp/terraform.tfvars.example new file mode 100644 index 0000000..b4f21ff --- /dev/null +++ b/terraform/environments/gcp/terraform.tfvars.example @@ -0,0 +1,70 @@ +# ───────────────────────────────────────────────────────────────────────────── +# terraform/environments/gcp/terraform.tfvars.example +# +# Copy this file to terraform.tfvars and fill in real values. +# NEVER commit terraform.tfvars to version control — it contains secrets. +# +# All sensitive variables (db_password, jwt_*, vault_token) must be provided +# via this file or as TF_VAR_* environment variables in your CI/CD pipeline. +# ───────────────────────────────────────────────────────────────────────────── + +# ── GCP project & region ────────────────────────────────────────────────────── + +project_id = "your-gcp-project-id" +region = "us-central1" +environment = "production" +project = "sentryagent-agentidp" + +# ── Application image ───────────────────────────────────────────────────────── + +app_image_tag = "1.0.0" + +# ── Networking ──────────────────────────────────────────────────────────────── + +vpc_cidr = "10.1.0.0/24" +vpc_connector_cidr = "10.8.0.0/28" + +# ── Database ────────────────────────────────────────────────────────────────── + +db_tier = "db-g1-small" +db_name = "sentryagent_idp" +db_username = "sentryagent" +db_availability_type = "REGIONAL" + +# ── Secrets — REPLACE ALL VALUES BELOW ─────────────────────────────────────── + +# Password for Cloud SQL PostgreSQL user +db_password = "REPLACE_WITH_STRONG_RANDOM_PASSWORD" + +# RSA-2048 key pair for JWT signing/verification. +# Generate with: +# openssl genrsa -out private.pem 2048 +# openssl rsa -in private.pem -pubout -out public.pem +jwt_private_key = "-----BEGIN RSA PRIVATE KEY-----\nREPLACE_WITH_ACTUAL_PRIVATE_KEY_CONTENTS\n-----END RSA PRIVATE KEY-----" +jwt_public_key = "-----BEGIN PUBLIC KEY-----\nREPLACE_WITH_ACTUAL_PUBLIC_KEY_CONTENTS\n-----END PUBLIC KEY-----" + +# HashiCorp Vault (optional — leave empty strings to disable Vault integration) +vault_addr = "" +vault_token = "" +vault_mount = "secret" + +# ── Application configuration ───────────────────────────────────────────────── + +cors_origin = "*" + +# ── Cloud Run scaling ───────────────────────────────────────────────────────── + +cloud_run_min_instances = 1 +cloud_run_max_instances = 10 +cloud_run_cpu = "1" +cloud_run_memory = "512Mi" + +# ── Memorystore Redis ───────────────────────────────────────────────────────── + +memorystore_memory_size_gb = 1 +memorystore_redis_version = "REDIS_7_0" +memorystore_tier = "STANDARD_HA" + +# ── Protection ──────────────────────────────────────────────────────────────── + +deletion_protection = true diff --git a/terraform/environments/gcp/variables.tf b/terraform/environments/gcp/variables.tf new file mode 100644 index 0000000..682852d --- /dev/null +++ b/terraform/environments/gcp/variables.tf @@ -0,0 +1,175 @@ +################################################################################ +# Environment: gcp +# Variables +################################################################################ + +variable "project_id" { + description = "GCP project ID where all resources will be created." + type = string +} + +variable "region" { + description = "GCP region for all resources." + type = string + default = "us-central1" +} + +variable "environment" { + description = "Deployment environment (e.g. production, staging)." + type = string + default = "production" +} + +variable "project" { + description = "Project identifier — used in resource names and labels." + type = string + default = "sentryagent-agentidp" +} + +variable "app_image_tag" { + description = "Docker image tag to deploy (e.g. '1.2.3')." + type = string +} + +################################################################################ +# Networking +################################################################################ + +variable "vpc_cidr" { + description = "CIDR range for the VPC subnet used by Cloud Run and Cloud SQL." + type = string + default = "10.1.0.0/24" +} + +variable "vpc_connector_cidr" { + description = "CIDR range for the Serverless VPC Access connector (/28 required)." + type = string + default = "10.8.0.0/28" +} + +################################################################################ +# Database +################################################################################ + +variable "db_tier" { + description = "Cloud SQL instance tier (machine type)." + type = string + default = "db-g1-small" +} + +variable "db_name" { + description = "Name of the PostgreSQL database to create." + type = string + default = "sentryagent_idp" +} + +variable "db_username" { + description = "PostgreSQL user for the application." + type = string + default = "sentryagent" +} + +variable "db_availability_type" { + description = "Cloud SQL availability type: REGIONAL (HA) or ZONAL." + type = string + default = "REGIONAL" +} + +################################################################################ +# Secrets — all marked sensitive; provide via tfvars or environment variables +################################################################################ + +variable "db_password" { + description = "Password for the Cloud SQL PostgreSQL user. Stored in Secret Manager." + type = string + sensitive = true +} + +variable "jwt_private_key" { + description = "PEM-encoded RSA-2048 private key for signing JWTs. Stored in Secret Manager." + type = string + sensitive = true +} + +variable "jwt_public_key" { + description = "PEM-encoded RSA-2048 public key for verifying JWTs. Stored in Secret Manager." + type = string + sensitive = true +} + +variable "vault_token" { + description = "HashiCorp Vault token. Leave empty to disable Vault integration." + type = string + sensitive = true + default = "" +} + +################################################################################ +# Optional configuration +################################################################################ + +variable "vault_addr" { + description = "HashiCorp Vault server address. Leave empty to disable Vault integration." + type = string + default = "" +} + +variable "vault_mount" { + description = "HashiCorp Vault KV v2 mount path." + type = string + default = "secret" +} + +variable "cors_origin" { + description = "CORS_ORIGIN value for the app." + type = string + default = "*" +} + +variable "cloud_run_min_instances" { + description = "Minimum Cloud Run instances (set > 0 to prevent cold starts)." + type = number + default = 1 +} + +variable "cloud_run_max_instances" { + description = "Maximum Cloud Run instances." + type = number + default = 10 +} + +variable "cloud_run_cpu" { + description = "CPU limit per Cloud Run instance." + type = string + default = "1" +} + +variable "cloud_run_memory" { + description = "Memory limit per Cloud Run instance." + type = string + default = "512Mi" +} + +variable "memorystore_memory_size_gb" { + description = "Memory size in GiB for the Memorystore Redis instance." + type = number + default = 1 +} + +variable "memorystore_redis_version" { + description = "Redis version for Memorystore." + type = string + default = "REDIS_7_0" +} + +variable "memorystore_tier" { + description = "Memorystore service tier: BASIC (single node) or STANDARD_HA (primary + replica)." + type = string + default = "STANDARD_HA" +} + +variable "deletion_protection" { + description = "Enable deletion protection on Cloud SQL and Memorystore resources." + type = bool + default = true +} diff --git a/terraform/modules/agentidp/main.tf b/terraform/modules/agentidp/main.tf new file mode 100644 index 0000000..0bb406b --- /dev/null +++ b/terraform/modules/agentidp/main.tf @@ -0,0 +1,426 @@ +################################################################################ +# Module: agentidp +# Main — ECS Fargate (AWS) or Cloud Run (GCP) +# +# Deploys the sentryagent/agentidp container. +# All sensitive environment variables are injected from AWS Secrets Manager +# (AWS path) or GCP Secret Manager (GCP path) — no plaintext secrets here. +################################################################################ + +terraform { + required_version = ">= 1.6.0" + + required_providers { + aws = { + source = "hashicorp/aws" + version = ">= 5.40.0" + } + google = { + source = "hashicorp/google" + version = ">= 5.20.0" + } + } +} + +################################################################################ +# Locals +################################################################################ + +locals { + common_tags = { + environment = var.environment + project = var.project + managed_by = "terraform" + } + + # Build the list of Vault-related env vars conditionally. + # If vault_addr is empty we omit all Vault env vars entirely. + aws_vault_env_plain = var.aws_vault_addr != "" ? [ + { + name = "VAULT_ADDR" + value = var.aws_vault_addr + }, + { + name = "VAULT_MOUNT" + value = var.aws_vault_mount + } + ] : [] + + aws_vault_secret_env = var.aws_secret_vault_token_arn != "" ? [ + { + name = "VAULT_TOKEN" + valueFrom = var.aws_secret_vault_token_arn + } + ] : [] + + gcp_vault_env_plain = var.gcp_vault_addr != "" ? { + VAULT_ADDR = var.gcp_vault_addr + VAULT_MOUNT = var.gcp_vault_mount + } : {} +} + +################################################################################ +# ── AWS PATH ────────────────────────────────────────────────────────────────── +################################################################################ + +# Security group: allow inbound traffic only from the ALB on app_port, +# allow all outbound (needed for Secrets Manager and ECR API calls over HTTPS). +resource "aws_security_group" "app" { + count = var.provider_type == "aws" ? 1 : 0 + + name = "${var.project}-${var.environment}-app-sg" + description = "Security group for AgentIdP ECS tasks — inbound from ALB only" + vpc_id = var.aws_vpc_id + + ingress { + description = "App port from ALB" + from_port = var.app_port + to_port = var.app_port + protocol = "tcp" + # The ALB security group ID is not directly available here; in the root + # environment module the ALB SG and this SG are cross-referenced. + # The environment module passes the ALB SG id via aws_lb_security_group_id + # below using a separate ingress rule resource to avoid circular dependency. + cidr_blocks = [] + self = false + } + + egress { + description = "All outbound" + from_port = 0 + to_port = 0 + protocol = "-1" + cidr_blocks = ["0.0.0.0/0"] + } + + tags = merge(local.common_tags, { + Name = "${var.project}-${var.environment}-app-sg" + }) +} + +# ECS Cluster +resource "aws_ecs_cluster" "main" { + count = var.provider_type == "aws" ? 1 : 0 + + name = "${var.project}-${var.environment}" + + setting { + name = "containerInsights" + value = "enabled" + } + + tags = local.common_tags +} + +# ECS Cluster Capacity Providers — use FARGATE and FARGATE_SPOT +resource "aws_ecs_cluster_capacity_providers" "main" { + count = var.provider_type == "aws" ? 1 : 0 + + cluster_name = aws_ecs_cluster.main[0].name + capacity_providers = ["FARGATE", "FARGATE_SPOT"] + + default_capacity_provider_strategy { + capacity_provider = "FARGATE" + weight = 1 + base = 1 + } +} + +# CloudWatch Log Group +resource "aws_cloudwatch_log_group" "app" { + count = var.provider_type == "aws" ? 1 : 0 + + name = var.aws_log_group_name + retention_in_days = 30 + + tags = local.common_tags +} + +# ECS Task Definition +resource "aws_ecs_task_definition" "app" { + count = var.provider_type == "aws" ? 1 : 0 + + family = "${var.project}-${var.environment}" + network_mode = "awsvpc" + requires_compatibilities = ["FARGATE"] + cpu = tostring(var.aws_cpu) + memory = tostring(var.aws_memory) + execution_role_arn = var.aws_execution_role_arn + task_role_arn = var.aws_task_role_arn + + container_definitions = jsonencode([ + { + name = "agentidp" + image = var.app_image + essential = true + + portMappings = [ + { + containerPort = var.app_port + protocol = "tcp" + } + ] + + # Plain (non-sensitive) environment variables + environment = concat( + [ + { name = "PORT", value = tostring(var.app_port) }, + { name = "NODE_ENV", value = "production" }, + { name = "CORS_ORIGIN", value = var.aws_cors_origin }, + { name = "POLICY_DIR", value = var.aws_policy_dir } + ], + local.aws_vault_env_plain + ) + + # Sensitive values fetched from Secrets Manager at task launch. + # Each entry is injected as the named environment variable. + secrets = concat( + [ + { + name = "DATABASE_URL" + valueFrom = var.aws_secret_database_url_arn + }, + { + name = "REDIS_URL" + valueFrom = var.aws_secret_redis_url_arn + }, + { + name = "JWT_PRIVATE_KEY" + valueFrom = var.aws_secret_jwt_private_key_arn + }, + { + name = "JWT_PUBLIC_KEY" + valueFrom = var.aws_secret_jwt_public_key_arn + } + ], + local.aws_vault_secret_env + ) + + logConfiguration = { + logDriver = "awslogs" + options = { + "awslogs-group" = var.aws_log_group_name + "awslogs-region" = var.aws_region + "awslogs-stream-prefix" = "agentidp" + } + } + + healthCheck = { + command = ["CMD-SHELL", "wget -qO- http://localhost:${var.app_port}/health || exit 1"] + interval = 30 + timeout = 5 + retries = 3 + startPeriod = 60 + } + + readonlyRootFilesystem = false + user = "node" + } + ]) + + tags = local.common_tags +} + +# ECS Service +resource "aws_ecs_service" "app" { + count = var.provider_type == "aws" ? 1 : 0 + + name = "${var.project}-${var.environment}" + cluster = aws_ecs_cluster.main[0].id + task_definition = aws_ecs_task_definition.app[0].arn + desired_count = var.aws_desired_count + launch_type = "FARGATE" + + # Rolling update: keep at least 100% healthy tasks during deployment + deployment_minimum_healthy_percent = 100 + deployment_maximum_percent = 200 + + network_configuration { + subnets = var.aws_subnet_ids + security_groups = [aws_security_group.app[0].id] + assign_public_ip = false + } + + load_balancer { + target_group_arn = var.aws_target_group_arn + container_name = "agentidp" + container_port = var.app_port + } + + # Ignore task_definition changes driven by image tag updates — deployments + # are managed externally (CI/CD pipeline updates the image tag). + lifecycle { + ignore_changes = [task_definition, desired_count] + } + + tags = local.common_tags + + depends_on = [aws_ecs_cluster_capacity_providers.main] +} + +################################################################################ +# ── GCP PATH ────────────────────────────────────────────────────────────────── +################################################################################ + +# Cloud Run Service +resource "google_cloud_run_v2_service" "app" { + count = var.provider_type == "gcp" ? 1 : 0 + + name = "${var.project}-${var.environment}" + location = var.gcp_region + project = var.gcp_project_id + + # Ingress: allow only requests from the load balancer / public internet. + # Cloud Run provides Google-managed TLS on the default *.run.app domain + # and on any custom domains mapped via Cloud Run domain mappings. + ingress = "INGRESS_TRAFFIC_ALL" + + template { + service_account = var.gcp_service_account_email + + scaling { + min_instance_count = var.gcp_min_instances + max_instance_count = var.gcp_max_instances + } + + # VPC access — route outbound traffic through the VPC connector so the + # container can reach Cloud SQL (private IP) and Memorystore. + vpc_access { + connector = var.gcp_vpc_connector_name + egress = "PRIVATE_RANGES_ONLY" + } + + containers { + image = var.app_image + + ports { + container_port = var.app_port + } + + resources { + limits = { + cpu = var.gcp_cpu + memory = var.gcp_memory + } + cpu_idle = false + startup_cpu_boost = true + } + + # Plain environment variables + dynamic "env" { + for_each = merge( + { + PORT = tostring(var.app_port) + NODE_ENV = "production" + CORS_ORIGIN = var.gcp_cors_origin + POLICY_DIR = var.gcp_policy_dir + }, + local.gcp_vault_env_plain + ) + content { + name = env.key + value = env.value + } + } + + # DATABASE_URL from Secret Manager + env { + name = "DATABASE_URL" + value_source { + secret_key_ref { + secret = var.gcp_secret_database_url_id + version = "latest" + } + } + } + + # REDIS_URL from Secret Manager + env { + name = "REDIS_URL" + value_source { + secret_key_ref { + secret = var.gcp_secret_redis_url_id + version = "latest" + } + } + } + + # JWT_PRIVATE_KEY from Secret Manager + env { + name = "JWT_PRIVATE_KEY" + value_source { + secret_key_ref { + secret = var.gcp_secret_jwt_private_key_id + version = "latest" + } + } + } + + # JWT_PUBLIC_KEY from Secret Manager + env { + name = "JWT_PUBLIC_KEY" + value_source { + secret_key_ref { + secret = var.gcp_secret_jwt_public_key_id + version = "latest" + } + } + } + + # VAULT_TOKEN from Secret Manager (conditional) + dynamic "env" { + for_each = var.gcp_secret_vault_token_id != "" ? [1] : [] + content { + name = "VAULT_TOKEN" + value_source { + secret_key_ref { + secret = var.gcp_secret_vault_token_id + version = "latest" + } + } + } + } + + liveness_probe { + http_get { + path = "/health" + port = var.app_port + } + initial_delay_seconds = 30 + period_seconds = 15 + failure_threshold = 3 + timeout_seconds = 5 + } + + startup_probe { + http_get { + path = "/health" + port = var.app_port + } + initial_delay_seconds = 10 + period_seconds = 5 + failure_threshold = 12 + timeout_seconds = 3 + } + } + } + + labels = { + environment = var.environment + project = replace(var.project, "-", "_") + managed_by = "terraform" + } +} + +# Allow unauthenticated (public internet) invocations of the Cloud Run service. +# Authentication for AgentIdP clients is handled by the application layer +# (JWT Bearer tokens), not by Cloud Run's built-in IAM auth. +resource "google_cloud_run_v2_service_iam_member" "public_invoker" { + count = var.provider_type == "gcp" ? 1 : 0 + + project = var.gcp_project_id + location = var.gcp_region + name = google_cloud_run_v2_service.app[0].name + role = "roles/run.invoker" + member = "allUsers" +} diff --git a/terraform/modules/agentidp/outputs.tf b/terraform/modules/agentidp/outputs.tf new file mode 100644 index 0000000..0a59e19 --- /dev/null +++ b/terraform/modules/agentidp/outputs.tf @@ -0,0 +1,55 @@ +################################################################################ +# Module: agentidp +# Outputs +################################################################################ + +# ── AWS Outputs ────────────────────────────────────────────────────────────── + +output "aws_ecs_cluster_arn" { + description = "ARN of the ECS cluster hosting the AgentIdP service." + value = var.provider_type == "aws" ? aws_ecs_cluster.main[0].arn : null +} + +output "aws_ecs_service_name" { + description = "Name of the ECS Fargate service." + value = var.provider_type == "aws" ? aws_ecs_service.app[0].name : null +} + +output "aws_ecs_task_definition_arn" { + description = "ARN of the active ECS task definition revision." + value = var.provider_type == "aws" ? aws_ecs_task_definition.app[0].arn : null +} + +output "aws_app_security_group_id" { + description = "Security group ID attached to the ECS tasks. Use this to add ingress rules from the ALB." + value = var.provider_type == "aws" ? aws_security_group.app[0].id : null +} + +output "aws_cloudwatch_log_group_name" { + description = "CloudWatch log group name for ECS container logs." + value = var.provider_type == "aws" ? aws_cloudwatch_log_group.app[0].name : null +} + +# ── GCP Outputs ────────────────────────────────────────────────────────────── + +output "gcp_cloud_run_service_name" { + description = "Name of the Cloud Run service." + value = var.provider_type == "gcp" ? google_cloud_run_v2_service.app[0].name : null +} + +output "gcp_cloud_run_service_url" { + description = "Publicly accessible HTTPS URL of the Cloud Run service (Google-managed TLS)." + value = var.provider_type == "gcp" ? google_cloud_run_v2_service.app[0].uri : null +} + +output "gcp_cloud_run_service_id" { + description = "Full resource ID of the Cloud Run service." + value = var.provider_type == "gcp" ? google_cloud_run_v2_service.app[0].id : null +} + +# ── Unified Outputs ─────────────────────────────────────────────────────────── + +output "service_url" { + description = "Publicly accessible service URL. Populated for GCP (Cloud Run native URL). For AWS use the ALB DNS name from the lb module." + value = var.provider_type == "gcp" ? google_cloud_run_v2_service.app[0].uri : null +} diff --git a/terraform/modules/agentidp/variables.tf b/terraform/modules/agentidp/variables.tf new file mode 100644 index 0000000..265fb4e --- /dev/null +++ b/terraform/modules/agentidp/variables.tf @@ -0,0 +1,279 @@ +################################################################################ +# Module: agentidp +# Variables +# +# Accepts all configuration for deploying the AgentIdP container to either +# AWS ECS Fargate (provider = "aws") or GCP Cloud Run (provider = "gcp"). +################################################################################ + +variable "provider_type" { + description = "Cloud provider target: 'aws' or 'gcp'." + type = string + + validation { + condition = contains(["aws", "gcp"], var.provider_type) + error_message = "provider_type must be either 'aws' or 'gcp'." + } +} + +variable "environment" { + description = "Deployment environment label (e.g. production, staging)." + type = string +} + +variable "project" { + description = "Project identifier used in resource tags and names." + type = string + default = "sentryagent-agentidp" +} + +variable "app_image" { + description = "Fully-qualified container image reference including registry host and tag." + type = string + # Example: "sentryagent/agentidp:1.2.3" +} + +variable "app_port" { + description = "Port the AgentIdP container listens on. Must match the PORT env var." + type = number + default = 3000 +} + +################################################################################ +# AWS-specific variables (required when provider_type = "aws") +################################################################################ + +variable "aws_region" { + description = "(AWS) AWS region where ECS resources are deployed." + type = string + default = "" +} + +variable "aws_vpc_id" { + description = "(AWS) VPC ID in which to create the ECS service and security group." + type = string + default = "" +} + +variable "aws_subnet_ids" { + description = "(AWS) List of private subnet IDs for the ECS Fargate tasks." + type = list(string) + default = [] +} + +variable "aws_target_group_arn" { + description = "(AWS) ARN of the ALB target group to register ECS tasks with." + type = string + default = "" +} + +variable "aws_execution_role_arn" { + description = "(AWS) IAM role ARN that ECS uses to pull images and write logs (ECS task execution role)." + type = string + default = "" +} + +variable "aws_task_role_arn" { + description = "(AWS) IAM role ARN granted to the running ECS task (allows it to call Secrets Manager, etc.)." + type = string + default = "" +} + +variable "aws_log_group_name" { + description = "(AWS) CloudWatch log group name where container logs are sent." + type = string + default = "/ecs/sentryagent-agentidp" +} + +variable "aws_desired_count" { + description = "(AWS) Number of ECS Fargate task instances to run." + type = number + default = 2 +} + +variable "aws_cpu" { + description = "(AWS) ECS task CPU units (256 = 0.25 vCPU)." + type = number + default = 512 +} + +variable "aws_memory" { + description = "(AWS) ECS task memory in MiB." + type = number + default = 1024 +} + +# Secret ARNs — the ECS task fetches these from Secrets Manager at launch time. +# The task execution role must have secretsmanager:GetSecretValue on each ARN. + +variable "aws_secret_database_url_arn" { + description = "(AWS) ARN of the Secrets Manager secret holding DATABASE_URL." + type = string + default = "" + sensitive = true +} + +variable "aws_secret_redis_url_arn" { + description = "(AWS) ARN of the Secrets Manager secret holding REDIS_URL." + type = string + default = "" + sensitive = true +} + +variable "aws_secret_jwt_private_key_arn" { + description = "(AWS) ARN of the Secrets Manager secret holding JWT_PRIVATE_KEY." + type = string + default = "" + sensitive = true +} + +variable "aws_secret_jwt_public_key_arn" { + description = "(AWS) ARN of the Secrets Manager secret holding JWT_PUBLIC_KEY." + type = string + default = "" + sensitive = true +} + +variable "aws_secret_vault_token_arn" { + description = "(AWS) ARN of the Secrets Manager secret holding VAULT_TOKEN. Leave empty to omit Vault integration." + type = string + default = "" + sensitive = true +} + +variable "aws_vault_addr" { + description = "(AWS) HashiCorp Vault address injected as a plain env var (not a secret). Leave empty to disable." + type = string + default = "" +} + +variable "aws_vault_mount" { + description = "(AWS) HashiCorp Vault KV v2 mount path." + type = string + default = "secret" +} + +variable "aws_cors_origin" { + description = "(AWS) Value for CORS_ORIGIN env var." + type = string + default = "*" +} + +variable "aws_policy_dir" { + description = "(AWS) Path inside the container where OPA policy files are located." + type = string + default = "/app/policies" +} + +################################################################################ +# GCP-specific variables (required when provider_type = "gcp") +################################################################################ + +variable "gcp_project_id" { + description = "(GCP) GCP project ID where Cloud Run and supporting resources live." + type = string + default = "" +} + +variable "gcp_region" { + description = "(GCP) GCP region for Cloud Run deployment." + type = string + default = "" +} + +variable "gcp_service_account_email" { + description = "(GCP) Service account email attached to the Cloud Run service." + type = string + default = "" +} + +variable "gcp_vpc_connector_name" { + description = "(GCP) Serverless VPC Access connector name for reaching Cloud SQL and Memorystore." + type = string + default = "" +} + +variable "gcp_min_instances" { + description = "(GCP) Minimum number of Cloud Run instances (set > 0 to avoid cold starts)." + type = number + default = 1 +} + +variable "gcp_max_instances" { + description = "(GCP) Maximum number of Cloud Run instances." + type = number + default = 10 +} + +variable "gcp_cpu" { + description = "(GCP) CPU limit for each Cloud Run container instance (e.g. '1', '2')." + type = string + default = "1" +} + +variable "gcp_memory" { + description = "(GCP) Memory limit for each Cloud Run container instance (e.g. '512Mi', '1Gi')." + type = string + default = "512Mi" +} + +# Secret Manager secret IDs — the Cloud Run service fetches these at startup. + +variable "gcp_secret_database_url_id" { + description = "(GCP) Secret Manager secret ID for DATABASE_URL." + type = string + default = "" + sensitive = true +} + +variable "gcp_secret_redis_url_id" { + description = "(GCP) Secret Manager secret ID for REDIS_URL." + type = string + default = "" + sensitive = true +} + +variable "gcp_secret_jwt_private_key_id" { + description = "(GCP) Secret Manager secret ID for JWT_PRIVATE_KEY." + type = string + default = "" + sensitive = true +} + +variable "gcp_secret_jwt_public_key_id" { + description = "(GCP) Secret Manager secret ID for JWT_PUBLIC_KEY." + type = string + default = "" + sensitive = true +} + +variable "gcp_secret_vault_token_id" { + description = "(GCP) Secret Manager secret ID for VAULT_TOKEN. Leave empty to omit Vault integration." + type = string + default = "" + sensitive = true +} + +variable "gcp_vault_addr" { + description = "(GCP) HashiCorp Vault address injected as a plain env var. Leave empty to disable." + type = string + default = "" +} + +variable "gcp_vault_mount" { + description = "(GCP) HashiCorp Vault KV v2 mount path." + type = string + default = "secret" +} + +variable "gcp_cors_origin" { + description = "(GCP) Value for CORS_ORIGIN env var." + type = string + default = "*" +} + +variable "gcp_policy_dir" { + description = "(GCP) Path inside the Cloud Run container where OPA policy files are located." + type = string + default = "/app/policies" +} diff --git a/terraform/modules/lb/main.tf b/terraform/modules/lb/main.tf new file mode 100644 index 0000000..a0d4893 --- /dev/null +++ b/terraform/modules/lb/main.tf @@ -0,0 +1,183 @@ +################################################################################ +# Module: lb +# Main — AWS Application Load Balancer +# +# - Internet-facing ALB in public subnets +# - HTTPS listener (443) with ACM certificate, TLS 1.2+ enforced +# - HTTP listener (80) redirects permanently to HTTPS — no plaintext traffic +# - Target group pointing to ECS Fargate tasks on the app port +# - Access logs optionally streamed to S3 +################################################################################ + +terraform { + required_version = ">= 1.6.0" + + required_providers { + aws = { + source = "hashicorp/aws" + version = ">= 5.40.0" + } + } +} + +locals { + identifier = "${var.project}-${var.environment}" + + common_tags = { + environment = var.environment + project = var.project + managed_by = "terraform" + } +} + +################################################################################ +# Security Group — ALB allows inbound 80 + 443 from the internet +################################################################################ + +resource "aws_security_group" "alb" { + name = "${local.identifier}-alb-sg" + description = "ALB security group — inbound 80/443 from internet, outbound to app" + vpc_id = var.vpc_id + + ingress { + description = "HTTP from internet (redirected to HTTPS)" + from_port = 80 + to_port = 80 + protocol = "tcp" + cidr_blocks = var.allowed_ingress_cidrs + } + + ingress { + description = "HTTPS from internet" + from_port = 443 + to_port = 443 + protocol = "tcp" + cidr_blocks = var.allowed_ingress_cidrs + } + + egress { + description = "Forward to ECS app tasks" + from_port = var.target_group_port + to_port = var.target_group_port + protocol = "tcp" + cidr_blocks = ["0.0.0.0/0"] + } + + tags = merge(local.common_tags, { + Name = "${local.identifier}-alb-sg" + }) +} + +################################################################################ +# Application Load Balancer +################################################################################ + +resource "aws_lb" "main" { + name = "${local.identifier}-alb" + internal = false + load_balancer_type = "application" + security_groups = [aws_security_group.alb.id] + subnets = var.subnet_ids + + idle_timeout = var.idle_timeout + enable_deletion_protection = var.enable_deletion_protection + + # HTTP/2 is enabled by default on ALB; leave it on for performance. + enable_http2 = true + + # Drop invalid header fields to harden against request smuggling. + drop_invalid_header_fields = true + + dynamic "access_logs" { + for_each = var.access_logs_bucket != "" ? [1] : [] + content { + bucket = var.access_logs_bucket + prefix = var.access_logs_prefix + enabled = true + } + } + + tags = merge(local.common_tags, { + Name = "${local.identifier}-alb" + }) +} + +################################################################################ +# Target Group — ECS Fargate tasks register here +################################################################################ + +resource "aws_lb_target_group" "app" { + name = "${local.identifier}-tg" + port = var.target_group_port + protocol = "HTTP" + vpc_id = var.vpc_id + target_type = "ip" # Required for Fargate (awsvpc network mode) + + deregistration_delay = 30 + + health_check { + enabled = true + path = var.target_group_health_check_path + port = "traffic-port" + protocol = "HTTP" + interval = var.target_group_health_check_interval + timeout = var.target_group_health_check_timeout + healthy_threshold = var.target_group_healthy_threshold + unhealthy_threshold = var.target_group_unhealthy_threshold + matcher = "200" + } + + stickiness { + type = "lb_cookie" + enabled = false # AgentIdP is stateless (JWT-based); no sticky sessions needed + } + + tags = merge(local.common_tags, { + Name = "${local.identifier}-tg" + }) + + lifecycle { + create_before_destroy = true + } +} + +################################################################################ +# HTTPS Listener (port 443) — primary listener +################################################################################ + +resource "aws_lb_listener" "https" { + load_balancer_arn = aws_lb.main.arn + port = 443 + protocol = "HTTPS" + ssl_policy = var.ssl_policy + certificate_arn = var.certificate_arn + + default_action { + type = "forward" + target_group_arn = aws_lb_target_group.app.arn + } + + tags = local.common_tags +} + +################################################################################ +# HTTP Listener (port 80) — permanent redirect to HTTPS +################################################################################ + +resource "aws_lb_listener" "http_redirect" { + load_balancer_arn = aws_lb.main.arn + port = 80 + protocol = "HTTP" + + default_action { + type = "redirect" + + redirect { + port = "443" + protocol = "HTTPS" + status_code = "HTTP_301" + } + } + + tags = local.common_tags +} diff --git a/terraform/modules/lb/outputs.tf b/terraform/modules/lb/outputs.tf new file mode 100644 index 0000000..1119225 --- /dev/null +++ b/terraform/modules/lb/outputs.tf @@ -0,0 +1,49 @@ +################################################################################ +# Module: lb +# Outputs +################################################################################ + +output "alb_dns_name" { + description = "DNS name of the Application Load Balancer. Create a CNAME or alias record in Route 53 pointing your domain here." + value = aws_lb.main.dns_name +} + +output "alb_zone_id" { + description = "Hosted zone ID of the ALB. Use with aws_route53_record alias records." + value = aws_lb.main.zone_id +} + +output "alb_arn" { + description = "ARN of the Application Load Balancer." + value = aws_lb.main.arn +} + +output "alb_arn_suffix" { + description = "ARN suffix of the ALB for use in CloudWatch metrics." + value = aws_lb.main.arn_suffix +} + +output "target_group_arn" { + description = "ARN of the target group. Pass to the agentidp module as aws_target_group_arn." + value = aws_lb_target_group.app.arn +} + +output "target_group_arn_suffix" { + description = "ARN suffix of the target group for use in CloudWatch metrics." + value = aws_lb_target_group.app.arn_suffix +} + +output "https_listener_arn" { + description = "ARN of the HTTPS listener." + value = aws_lb_listener.https.arn +} + +output "http_redirect_listener_arn" { + description = "ARN of the HTTP→HTTPS redirect listener." + value = aws_lb_listener.http_redirect.arn +} + +output "alb_security_group_id" { + description = "Security group ID of the ALB. Add this as an allowed source in the app task security group." + value = aws_security_group.alb.id +} diff --git a/terraform/modules/lb/variables.tf b/terraform/modules/lb/variables.tf new file mode 100644 index 0000000..ec2556a --- /dev/null +++ b/terraform/modules/lb/variables.tf @@ -0,0 +1,102 @@ +################################################################################ +# Module: lb +# Variables — AWS Application Load Balancer +################################################################################ + +variable "environment" { + description = "Deployment environment label (e.g. production, staging)." + type = string +} + +variable "project" { + description = "Project identifier used in resource names and tags." + type = string + default = "sentryagent-agentidp" +} + +variable "vpc_id" { + description = "VPC ID in which to create the ALB and its security group." + type = string +} + +variable "subnet_ids" { + description = "List of public subnet IDs for the ALB. Must span at least 2 AZs." + type = list(string) +} + +variable "certificate_arn" { + description = "ARN of the ACM certificate to attach to the HTTPS listener (port 443)." + type = string +} + +variable "target_group_port" { + description = "Port that ECS task containers listen on. Target group forwards traffic to this port." + type = number + default = 3000 +} + +variable "target_group_health_check_path" { + description = "HTTP path used by the ALB target group health check." + type = string + default = "/health" +} + +variable "target_group_health_check_interval" { + description = "Interval in seconds between ALB health checks." + type = number + default = 30 +} + +variable "target_group_health_check_timeout" { + description = "Timeout in seconds for each ALB health check request." + type = number + default = 5 +} + +variable "target_group_healthy_threshold" { + description = "Number of consecutive successful health checks before marking a target healthy." + type = number + default = 2 +} + +variable "target_group_unhealthy_threshold" { + description = "Number of consecutive failed health checks before marking a target unhealthy." + type = number + default = 3 +} + +variable "idle_timeout" { + description = "ALB idle connection timeout in seconds." + type = number + default = 60 +} + +variable "enable_deletion_protection" { + description = "Prevent the ALB from being deleted via the AWS API." + type = bool + default = true +} + +variable "access_logs_bucket" { + description = "S3 bucket name for ALB access logs. Leave empty to disable access logging." + type = string + default = "" +} + +variable "access_logs_prefix" { + description = "S3 key prefix for ALB access log files." + type = string + default = "alb" +} + +variable "ssl_policy" { + description = "SSL negotiation policy for the HTTPS listener. ELBSecurityPolicy-TLS13-1-2-2021-06 enforces TLS 1.2+ and TLS 1.3." + type = string + default = "ELBSecurityPolicy-TLS13-1-2-2021-06" +} + +variable "allowed_ingress_cidrs" { + description = "CIDR blocks allowed to reach the ALB on port 80 and 443. Default allows public internet." + type = list(string) + default = ["0.0.0.0/0"] +} diff --git a/terraform/modules/rds/main.tf b/terraform/modules/rds/main.tf new file mode 100644 index 0000000..3abbfad --- /dev/null +++ b/terraform/modules/rds/main.tf @@ -0,0 +1,180 @@ +################################################################################ +# Module: rds +# Main — AWS RDS PostgreSQL 14 +# +# - Multi-AZ for HA +# - Encryption at rest (AWS-managed KMS key) +# - No public access — VPC-internal only +# - Storage autoscaling up to max_allocated_storage +# - Enhanced monitoring and Performance Insights enabled by default +# - Access restricted to explicitly allowed security groups (app only) +################################################################################ + +terraform { + required_version = ">= 1.6.0" + + required_providers { + aws = { + source = "hashicorp/aws" + version = ">= 5.40.0" + } + } +} + +locals { + identifier = "${var.project}-${var.environment}" + + common_tags = { + environment = var.environment + project = var.project + managed_by = "terraform" + } +} + +################################################################################ +# Security Group — only the app SGs may connect on 5432 +################################################################################ + +resource "aws_security_group" "rds" { + name = "${local.identifier}-rds-sg" + description = "Controls inbound access to RDS PostgreSQL — allow only app SG on 5432" + vpc_id = var.vpc_id + + # No ingress rules defined here — added dynamically below to avoid circular deps. + egress { + description = "All outbound (RDS initiates no outbound connections; this satisfies AWS requirement)" + from_port = 0 + to_port = 0 + protocol = "-1" + cidr_blocks = ["0.0.0.0/0"] + } + + tags = merge(local.common_tags, { + Name = "${local.identifier}-rds-sg" + }) +} + +resource "aws_security_group_rule" "rds_ingress_from_app" { + for_each = toset(var.allowed_security_group_ids) + + type = "ingress" + description = "PostgreSQL from app security group" + from_port = 5432 + to_port = 5432 + protocol = "tcp" + source_security_group_id = each.value + security_group_id = aws_security_group.rds.id +} + +################################################################################ +# DB Subnet Group — must cover at least 2 AZs for Multi-AZ +################################################################################ + +resource "aws_db_subnet_group" "main" { + name = "${local.identifier}-db-subnet-group" + description = "Private subnets for AgentIdP RDS instance" + subnet_ids = var.subnet_ids + + tags = merge(local.common_tags, { + Name = "${local.identifier}-db-subnet-group" + }) +} + +################################################################################ +# DB Parameter Group — enforce SSL connections +################################################################################ + +resource "aws_db_parameter_group" "main" { + name = "${local.identifier}-pg14-params" + family = var.parameter_group_family + description = "AgentIdP custom parameter group — enforces SSL" + + parameter { + name = "rds.force_ssl" + value = "1" + apply_method = "immediate" + } + + parameter { + name = "log_connections" + value = "1" + apply_method = "immediate" + } + + parameter { + name = "log_disconnections" + value = "1" + apply_method = "immediate" + } + + parameter { + name = "log_min_duration_statement" + value = "1000" + apply_method = "immediate" + } + + tags = local.common_tags +} + +################################################################################ +# RDS Instance +################################################################################ + +resource "aws_db_instance" "main" { + identifier = local.identifier + + # Engine + engine = "postgres" + engine_version = "14" + instance_class = var.instance_class + + # Storage + storage_type = "gp3" + allocated_storage = var.allocated_storage + max_allocated_storage = var.max_allocated_storage + storage_encrypted = true + # kms_key_id is omitted — defaults to the AWS-managed RDS KMS key. + # For customer-managed key, set kms_key_id to your CMK ARN. + + # Database + db_name = var.db_name + username = var.db_username + password = var.db_password + + # Network — VPC-internal only, no public endpoint + db_subnet_group_name = aws_db_subnet_group.main.name + vpc_security_group_ids = [aws_security_group.rds.id] + publicly_accessible = false + multi_az = var.multi_az + port = 5432 + + # Parameter group + parameter_group_name = aws_db_parameter_group.main.name + + # Backups + backup_retention_period = var.backup_retention_days + backup_window = var.backup_window + delete_automated_backups = false + copy_tags_to_snapshot = true + skip_final_snapshot = var.skip_final_snapshot + final_snapshot_identifier = var.skip_final_snapshot ? null : "${local.identifier}-final-snapshot" + + # Maintenance + maintenance_window = var.maintenance_window + auto_minor_version_upgrade = true + apply_immediately = false + + # Observability + enabled_cloudwatch_logs_exports = ["postgresql", "upgrade"] + performance_insights_enabled = var.performance_insights_enabled + performance_insights_retention_period = var.performance_insights_enabled ? var.performance_insights_retention_period : null + monitoring_interval = var.monitoring_interval + monitoring_role_arn = var.monitoring_interval > 0 ? var.monitoring_role_arn : null + + # Protection + deletion_protection = var.deletion_protection + + tags = merge(local.common_tags, { + Name = local.identifier + }) +} diff --git a/terraform/modules/rds/outputs.tf b/terraform/modules/rds/outputs.tf new file mode 100644 index 0000000..444bdee --- /dev/null +++ b/terraform/modules/rds/outputs.tf @@ -0,0 +1,44 @@ +################################################################################ +# Module: rds +# Outputs +################################################################################ + +output "endpoint" { + description = "RDS instance endpoint hostname (without port). Use to construct DATABASE_URL." + value = aws_db_instance.main.address +} + +output "port" { + description = "Port the RDS instance listens on (always 5432)." + value = aws_db_instance.main.port +} + +output "db_name" { + description = "Name of the database created on the RDS instance." + value = aws_db_instance.main.db_name +} + +output "db_username" { + description = "Master username for the RDS instance." + value = aws_db_instance.main.username +} + +output "instance_id" { + description = "RDS instance identifier." + value = aws_db_instance.main.identifier +} + +output "instance_arn" { + description = "ARN of the RDS instance." + value = aws_db_instance.main.arn +} + +output "security_group_id" { + description = "Security group ID attached to the RDS instance. Use to add further ingress rules if needed." + value = aws_security_group.rds.id +} + +output "db_subnet_group_name" { + description = "Name of the DB subnet group." + value = aws_db_subnet_group.main.name +} diff --git a/terraform/modules/rds/variables.tf b/terraform/modules/rds/variables.tf new file mode 100644 index 0000000..fcb51ef --- /dev/null +++ b/terraform/modules/rds/variables.tf @@ -0,0 +1,133 @@ +################################################################################ +# Module: rds +# Variables — AWS RDS PostgreSQL 14 +################################################################################ + +variable "environment" { + description = "Deployment environment label (e.g. production, staging)." + type = string +} + +variable "project" { + description = "Project identifier used in resource names and tags." + type = string + default = "sentryagent-agentidp" +} + +variable "vpc_id" { + description = "VPC ID in which to create the RDS subnet group and security group." + type = string +} + +variable "subnet_ids" { + description = "List of private subnet IDs for the RDS DB subnet group. Must span at least 2 AZs for Multi-AZ." + type = list(string) +} + +variable "allowed_security_group_ids" { + description = "List of security group IDs (e.g. ECS app SG) permitted to connect to RDS on port 5432." + type = list(string) + default = [] +} + +variable "db_name" { + description = "Name of the initial PostgreSQL database to create." + type = string + default = "sentryagent_idp" +} + +variable "db_username" { + description = "Master username for the RDS instance." + type = string + default = "sentryagent" +} + +variable "db_password" { + description = "Master password for the RDS instance. Store this in Secrets Manager; do not hardcode." + type = string + sensitive = true +} + +variable "instance_class" { + description = "RDS instance class." + type = string + default = "db.t3.medium" +} + +variable "allocated_storage" { + description = "Initial storage allocated in GiB." + type = number + default = 50 +} + +variable "max_allocated_storage" { + description = "Upper bound for RDS storage autoscaling in GiB. Set to 0 to disable autoscaling." + type = number + default = 500 +} + +variable "multi_az" { + description = "Enable Multi-AZ deployment for high availability." + type = bool + default = true +} + +variable "backup_retention_days" { + description = "Number of days to retain automated backups. Must be >= 1 for Multi-AZ." + type = number + default = 7 +} + +variable "backup_window" { + description = "Preferred daily backup window in UTC (hh24:mi-hh24:mi)." + type = string + default = "03:00-04:00" +} + +variable "maintenance_window" { + description = "Preferred weekly maintenance window (ddd:hh24:mi-ddd:hh24:mi in UTC)." + type = string + default = "sun:05:00-sun:06:00" +} + +variable "deletion_protection" { + description = "Enable deletion protection. Set to false only when decommissioning." + type = bool + default = true +} + +variable "skip_final_snapshot" { + description = "Whether to skip the final DB snapshot on destroy. Should be false in production." + type = bool + default = false +} + +variable "performance_insights_enabled" { + description = "Enable RDS Performance Insights." + type = bool + default = true +} + +variable "performance_insights_retention_period" { + description = "Performance Insights data retention in days. Free tier = 7; paid tiers = 731." + type = number + default = 7 +} + +variable "monitoring_interval" { + description = "Enhanced monitoring interval in seconds (0 to disable, valid: 1, 5, 10, 15, 30, 60)." + type = number + default = 60 +} + +variable "monitoring_role_arn" { + description = "IAM role ARN for RDS Enhanced Monitoring. Required when monitoring_interval > 0." + type = string + default = "" +} + +variable "parameter_group_family" { + description = "DB parameter group family." + type = string + default = "postgres14" +} diff --git a/terraform/modules/redis/main.tf b/terraform/modules/redis/main.tf new file mode 100644 index 0000000..5a1d92c --- /dev/null +++ b/terraform/modules/redis/main.tf @@ -0,0 +1,176 @@ +################################################################################ +# Module: redis +# Main — AWS ElastiCache Redis 7 +# +# - Single shard (cluster mode disabled): one primary + one replica +# - Encryption at rest and in transit (TLS) +# - AUTH token required when transit encryption is enabled +# - VPC-internal only — no public access +# - Access restricted to explicitly allowed security groups (app only) +# - Slow log + engine log delivery to CloudWatch +################################################################################ + +terraform { + required_version = ">= 1.6.0" + + required_providers { + aws = { + source = "hashicorp/aws" + version = ">= 5.40.0" + } + } +} + +locals { + identifier = "${var.project}-${var.environment}" + + common_tags = { + environment = var.environment + project = var.project + managed_by = "terraform" + } +} + +################################################################################ +# CloudWatch Log Group for Redis logs +################################################################################ + +resource "aws_cloudwatch_log_group" "redis" { + count = var.log_delivery_enabled ? 1 : 0 + + name = var.log_group_name + retention_in_days = 30 + + tags = local.common_tags +} + +################################################################################ +# Security Group — only the app SGs may connect on 6379 +################################################################################ + +resource "aws_security_group" "redis" { + name = "${local.identifier}-redis-sg" + description = "Controls inbound access to ElastiCache Redis — allow only app SG on 6379" + vpc_id = var.vpc_id + + egress { + description = "All outbound" + from_port = 0 + to_port = 0 + protocol = "-1" + cidr_blocks = ["0.0.0.0/0"] + } + + tags = merge(local.common_tags, { + Name = "${local.identifier}-redis-sg" + }) +} + +resource "aws_security_group_rule" "redis_ingress_from_app" { + for_each = toset(var.allowed_security_group_ids) + + type = "ingress" + description = "Redis from app security group" + from_port = 6379 + to_port = 6379 + protocol = "tcp" + source_security_group_id = each.value + security_group_id = aws_security_group.redis.id +} + +################################################################################ +# ElastiCache Subnet Group +################################################################################ + +resource "aws_elasticache_subnet_group" "main" { + name = "${local.identifier}-redis-subnet-group" + description = "Private subnets for AgentIdP ElastiCache Redis" + subnet_ids = var.subnet_ids + + tags = local.common_tags +} + +################################################################################ +# ElastiCache Parameter Group — Redis 7.x defaults are fine; custom group +# allows future tuning without recreating the replication group. +################################################################################ + +resource "aws_elasticache_parameter_group" "main" { + name = "${local.identifier}-redis7-params" + family = "redis7" + description = "AgentIdP Redis 7 parameter group" + + # Disable dangerous commands that could truncate data in production + parameter { + name = "lazyfree-lazy-eviction" + value = "yes" + } + + parameter { + name = "lazyfree-lazy-expire" + value = "yes" + } + + tags = local.common_tags +} + +################################################################################ +# ElastiCache Replication Group (cluster mode disabled) +# +# cluster_mode = 0 (disabled) gives a single-shard setup: +# - 1 primary node +# - num_cache_clusters - 1 replica nodes +# This matches the application usage: token revocation (SET/GET/DEL), +# rate limiting (INCR/EXPIRE), and monthly counters (INCR) — no sharding needed. +################################################################################ + +resource "aws_elasticache_replication_group" "main" { + replication_group_id = local.identifier + description = "AgentIdP Redis 7 — token revocation, rate limiting, counters" + + # Engine + engine = "redis" + engine_version = var.engine_version + node_type = var.node_type + parameter_group_name = aws_elasticache_parameter_group.main.name + port = 6379 + + # Topology — single shard, primary + replica + num_cache_clusters = var.num_cache_clusters + automatic_failover_enabled = var.automatic_failover_enabled + multi_az_enabled = var.multi_az_enabled + + # Network — VPC-internal, no public endpoints + subnet_group_name = aws_elasticache_subnet_group.main.name + security_group_ids = [aws_security_group.redis.id] + + # Security + at_rest_encryption_enabled = var.at_rest_encryption_enabled + transit_encryption_enabled = var.transit_encryption_enabled + auth_token = var.transit_encryption_enabled && var.auth_token != "" ? var.auth_token : null + + # Maintenance and snapshots + maintenance_window = var.maintenance_window + snapshot_retention_limit = var.snapshot_retention_limit + snapshot_window = var.snapshot_window + apply_immediately = var.apply_immediately + + # Log delivery to CloudWatch + dynamic "log_delivery_configuration" { + for_each = var.log_delivery_enabled ? [ + { log_type = "slow-log", log_format = "json" }, + { log_type = "engine-log", log_format = "json" } + ] : [] + + content { + destination = var.log_delivery_enabled ? aws_cloudwatch_log_group.redis[0].name : "" + destination_type = "cloudwatch-logs" + log_format = log_delivery_configuration.value.log_format + log_type = log_delivery_configuration.value.log_type + } + } + + tags = merge(local.common_tags, { + Name = local.identifier + }) +} diff --git a/terraform/modules/redis/outputs.tf b/terraform/modules/redis/outputs.tf new file mode 100644 index 0000000..4bc93fd --- /dev/null +++ b/terraform/modules/redis/outputs.tf @@ -0,0 +1,34 @@ +################################################################################ +# Module: redis +# Outputs +################################################################################ + +output "primary_endpoint" { + description = "Primary endpoint hostname for write operations. Use to construct REDIS_URL." + value = aws_elasticache_replication_group.main.primary_endpoint_address +} + +output "reader_endpoint" { + description = "Reader endpoint for read operations (load-balanced across replicas)." + value = aws_elasticache_replication_group.main.reader_endpoint_address +} + +output "port" { + description = "Port the Redis replication group listens on (always 6379)." + value = aws_elasticache_replication_group.main.port +} + +output "replication_group_id" { + description = "ID of the ElastiCache replication group." + value = aws_elasticache_replication_group.main.replication_group_id +} + +output "security_group_id" { + description = "Security group ID attached to the replication group. Use to add further ingress rules." + value = aws_security_group.redis.id +} + +output "redis_url" { + description = "Constructed REDIS_URL using the primary endpoint. Includes rediss:// (TLS) scheme when transit encryption is enabled." + value = var.transit_encryption_enabled ? "rediss://${aws_elasticache_replication_group.main.primary_endpoint_address}:${aws_elasticache_replication_group.main.port}" : "redis://${aws_elasticache_replication_group.main.primary_endpoint_address}:${aws_elasticache_replication_group.main.port}" +} diff --git a/terraform/modules/redis/variables.tf b/terraform/modules/redis/variables.tf new file mode 100644 index 0000000..2bb457b --- /dev/null +++ b/terraform/modules/redis/variables.tf @@ -0,0 +1,116 @@ +################################################################################ +# Module: redis +# Variables — AWS ElastiCache Redis 7 +################################################################################ + +variable "environment" { + description = "Deployment environment label (e.g. production, staging)." + type = string +} + +variable "project" { + description = "Project identifier used in resource names and tags." + type = string + default = "sentryagent-agentidp" +} + +variable "vpc_id" { + description = "VPC ID in which to create the ElastiCache subnet group and security group." + type = string +} + +variable "subnet_ids" { + description = "List of private subnet IDs for the ElastiCache subnet group. Span at least 2 AZs." + type = list(string) +} + +variable "allowed_security_group_ids" { + description = "List of security group IDs (e.g. ECS app SG) permitted to connect to Redis on port 6379." + type = list(string) + default = [] +} + +variable "node_type" { + description = "ElastiCache node instance type." + type = string + default = "cache.t3.medium" +} + +variable "engine_version" { + description = "Redis engine version. Use 7.x for Redis 7." + type = string + default = "7.1" +} + +variable "num_cache_clusters" { + description = "Total number of cache clusters in the replication group (1 primary + N replicas). Minimum 2 for HA." + type = number + default = 2 +} + +variable "automatic_failover_enabled" { + description = "Enable automatic failover. Required when num_cache_clusters > 1." + type = bool + default = true +} + +variable "multi_az_enabled" { + description = "Enable Multi-AZ for the replication group." + type = bool + default = true +} + +variable "at_rest_encryption_enabled" { + description = "Encrypt data at rest." + type = bool + default = true +} + +variable "transit_encryption_enabled" { + description = "Enable TLS for data in transit." + type = bool + default = true +} + +variable "auth_token" { + description = "AUTH token (password) for Redis AUTH command. Required when transit_encryption_enabled = true. Minimum 16 characters." + type = string + sensitive = true + default = "" +} + +variable "maintenance_window" { + description = "Preferred weekly maintenance window (ddd:hh24:mi-ddd:hh24:mi in UTC)." + type = string + default = "sun:06:00-sun:07:00" +} + +variable "snapshot_retention_limit" { + description = "Number of days to retain automatic Redis snapshots. 0 disables snapshots." + type = number + default = 7 +} + +variable "snapshot_window" { + description = "Daily time range for automatic snapshots (hh24:mi-hh24:mi in UTC). Must not overlap maintenance_window." + type = string + default = "04:00-05:00" +} + +variable "apply_immediately" { + description = "Apply changes immediately. Set to false to wait for the next maintenance window in production." + type = bool + default = false +} + +variable "log_delivery_enabled" { + description = "Enable delivery of Redis slow logs and engine logs to CloudWatch." + type = bool + default = true +} + +variable "log_group_name" { + description = "CloudWatch log group name for Redis logs. Created if it does not exist." + type = string + default = "/elasticache/sentryagent-agentidp/redis" +} diff --git a/tests/integration/agents.test.ts b/tests/integration/agents.test.ts index 4ef89cc..711142a 100644 --- a/tests/integration/agents.test.ts +++ b/tests/integration/agents.test.ts @@ -8,7 +8,6 @@ import request from 'supertest'; import { Application } from 'express'; import { v4 as uuidv4 } from 'uuid'; import { Pool } from 'pg'; -import { createClient } from 'redis'; // Set test environment variables before importing app const { privateKey, publicKey } = crypto.generateKeyPairSync('rsa', { diff --git a/tests/unit/controllers/AuditController.test.ts b/tests/unit/controllers/AuditController.test.ts index c5fda9e..7b6d08d 100644 --- a/tests/unit/controllers/AuditController.test.ts +++ b/tests/unit/controllers/AuditController.test.ts @@ -10,7 +10,6 @@ import { ITokenPayload, IAuditEvent } from '../../../src/types/index'; import { ValidationError, AuthenticationError, - InsufficientScopeError, AuditEventNotFoundError, } from '../../../src/utils/errors'; @@ -103,13 +102,18 @@ describe('AuditController', () => { expect(next).toHaveBeenCalledWith(expect.any(AuthenticationError)); }); - it('should call next(InsufficientScopeError) when scope does not include audit:read', async () => { + it('should call auditService.queryEvents regardless of scope (scope enforced by OPA middleware)', async () => { + // Scope enforcement has been moved to OpaMiddleware; the controller delegates + // to the service for all authenticated requests that reach it. const { req, res, next } = buildMocks('agents:read'); + req.query = {}; + const emptyResponse = { data: [], total: 0, page: 1, limit: 50 }; + auditService.queryEvents.mockResolvedValue(emptyResponse); await controller.queryAuditLog(req as Request, res as Response, next); - expect(next).toHaveBeenCalledWith(expect.any(InsufficientScopeError)); - expect(auditService.queryEvents).not.toHaveBeenCalled(); + expect(auditService.queryEvents).toHaveBeenCalledTimes(1); + expect(res.status).toHaveBeenCalledWith(200); }); it('should call next(ValidationError) when query params are invalid', async () => { @@ -190,14 +194,17 @@ describe('AuditController', () => { expect(next).toHaveBeenCalledWith(expect.any(AuthenticationError)); }); - it('should call next(InsufficientScopeError) when scope does not include audit:read', async () => { + it('should call auditService.getEventById regardless of scope (scope enforced by OPA middleware)', async () => { + // Scope enforcement has been moved to OpaMiddleware; the controller delegates + // to the service for all authenticated requests that reach it. const { req, res, next } = buildMocks('agents:read'); req.params = { eventId: MOCK_AUDIT_EVENT.eventId }; + auditService.getEventById.mockResolvedValue(MOCK_AUDIT_EVENT); await controller.getAuditEventById(req as Request, res as Response, next); - expect(next).toHaveBeenCalledWith(expect.any(InsufficientScopeError)); - expect(auditService.getEventById).not.toHaveBeenCalled(); + expect(auditService.getEventById).toHaveBeenCalledTimes(1); + expect(res.status).toHaveBeenCalledWith(200); }); it('should forward AuditEventNotFoundError to next', async () => { diff --git a/tests/unit/metrics/registry.test.ts b/tests/unit/metrics/registry.test.ts new file mode 100644 index 0000000..54f90af --- /dev/null +++ b/tests/unit/metrics/registry.test.ts @@ -0,0 +1,129 @@ +/** + * Unit tests for src/metrics/registry.ts + * + * Verifies that all 6 Prometheus metrics are registered on the shared + * metricsRegistry (not the default global registry), have the correct + * names, and carry the correct label names. + */ + +import { + metricsRegistry, + tokensIssuedTotal, + agentsRegisteredTotal, + httpRequestsTotal, + httpRequestDurationSeconds, + dbQueryDurationSeconds, + redisCommandDurationSeconds, +} from '../../../src/metrics/registry'; + +describe('metricsRegistry', () => { + // ────────────────────────────────────────────────────────────────── + // Registry isolation + // ────────────────────────────────────────────────────────────────── + it('uses a non-default registry instance', async () => { + // prom-client default registry is accessed via Registry.default or + // by calling register.metrics(). The shared registry must NOT be + // the same reference as the default one. + const { register } = await import('prom-client'); + expect(metricsRegistry).not.toBe(register); + }); + + it('contains exactly 6 metric entries', async () => { + const entries = await metricsRegistry.getMetricsAsJSON(); + expect(entries).toHaveLength(6); + }); + + // ────────────────────────────────────────────────────────────────── + // Metric names + // ────────────────────────────────────────────────────────────────── + it.each([ + 'agentidp_tokens_issued_total', + 'agentidp_agents_registered_total', + 'agentidp_http_requests_total', + 'agentidp_http_request_duration_seconds', + 'agentidp_db_query_duration_seconds', + 'agentidp_redis_command_duration_seconds', + ])('registers metric "%s"', async (metricName) => { + const entries = await metricsRegistry.getMetricsAsJSON(); + const names = entries.map((e) => e.name); + expect(names).toContain(metricName); + }); + + // ────────────────────────────────────────────────────────────────── + // Label names per metric + // ────────────────────────────────────────────────────────────────── + describe('tokensIssuedTotal', () => { + it('has name agentidp_tokens_issued_total', () => { + // Access the internal name via the metric object + const metric = tokensIssuedTotal as unknown as { name: string }; + expect(metric.name).toBe('agentidp_tokens_issued_total'); + }); + + it('has label "scope"', async () => { + const entries = await metricsRegistry.getMetricsAsJSON(); + const entry = entries.find((e) => e.name === 'agentidp_tokens_issued_total'); + expect(entry).toBeDefined(); + // Counter with no observations has an empty values array but the metric exists + expect(entry!.type).toBe('counter'); + }); + }); + + describe('agentsRegisteredTotal', () => { + it('has name agentidp_agents_registered_total', () => { + const metric = agentsRegisteredTotal as unknown as { name: string }; + expect(metric.name).toBe('agentidp_agents_registered_total'); + }); + }); + + describe('httpRequestsTotal', () => { + it('has name agentidp_http_requests_total', () => { + const metric = httpRequestsTotal as unknown as { name: string }; + expect(metric.name).toBe('agentidp_http_requests_total'); + }); + + it('increments with method, route, status_code labels without throwing', () => { + expect(() => + httpRequestsTotal.inc({ method: 'GET', route: '/test', status_code: '200' }), + ).not.toThrow(); + }); + }); + + describe('httpRequestDurationSeconds', () => { + it('has name agentidp_http_request_duration_seconds', () => { + const metric = httpRequestDurationSeconds as unknown as { name: string }; + expect(metric.name).toBe('agentidp_http_request_duration_seconds'); + }); + + it('observes with method, route, status_code labels without throwing', () => { + expect(() => + httpRequestDurationSeconds.observe({ method: 'GET', route: '/test', status_code: '200' }, 0.05), + ).not.toThrow(); + }); + }); + + describe('dbQueryDurationSeconds', () => { + it('has name agentidp_db_query_duration_seconds', () => { + const metric = dbQueryDurationSeconds as unknown as { name: string }; + expect(metric.name).toBe('agentidp_db_query_duration_seconds'); + }); + + it('observes with operation label without throwing', () => { + expect(() => + dbQueryDurationSeconds.observe({ operation: 'query' }, 0.002), + ).not.toThrow(); + }); + }); + + describe('redisCommandDurationSeconds', () => { + it('has name agentidp_redis_command_duration_seconds', () => { + const metric = redisCommandDurationSeconds as unknown as { name: string }; + expect(metric.name).toBe('agentidp_redis_command_duration_seconds'); + }); + + it('observes with command label without throwing', () => { + expect(() => + redisCommandDurationSeconds.observe({ command: 'get' }, 0.001), + ).not.toThrow(); + }); + }); +}); diff --git a/tests/unit/middleware/metrics.test.ts b/tests/unit/middleware/metrics.test.ts new file mode 100644 index 0000000..ceeffc5 --- /dev/null +++ b/tests/unit/middleware/metrics.test.ts @@ -0,0 +1,190 @@ +/** + * Unit tests for src/middleware/metrics.ts + * + * Verifies that metricsMiddleware increments agentidp_http_requests_total + * and records agentidp_http_request_duration_seconds with the correct labels + * (method, route, status_code) on each request's 'finish' event. + */ + +import { Request, Response, NextFunction } from 'express'; +import { metricsMiddleware } from '../../../src/middleware/metrics'; +import { metricsRegistry } from '../../../src/metrics/registry'; + +/** + * prom-client 15 MetricValue does not expose `metricName` in its TypeScript + * types, but histogram entries carry it at runtime to distinguish _count/_sum + * from _bucket rows. This local interface allows the cast below. + */ +interface HistogramMetricValue { + labels: Record; + value: number; + metricName?: string; +} + +// ──────────────────────────────────────────────────────────────────────────── +// Helpers +// ──────────────────────────────────────────────────────────────────────────── + +/** Build a minimal mock Express Request. */ +function makeMockRequest(overrides: Partial = {}): Request { + return { + method: 'GET', + path: '/test', + baseUrl: '', + route: undefined, + originalUrl: '/test', + ...overrides, + } as unknown as Request; +} + +/** + * Build a minimal mock Express Response that captures 'finish' callbacks + * so we can trigger them manually. + */ +function makeMockResponse(statusCode = 200): { res: Response; triggerFinish: () => void } { + const finishCallbacks: Array<() => void> = []; + + const res = { + statusCode, + on: (event: string, cb: () => void) => { + if (event === 'finish') { + finishCallbacks.push(cb); + } + }, + } as unknown as Response; + + return { + res, + triggerFinish: () => finishCallbacks.forEach((cb) => cb()), + }; +} + +// ──────────────────────────────────────────────────────────────────────────── +// Tests +// ──────────────────────────────────────────────────────────────────────────── + +describe('metricsMiddleware', () => { + let next: jest.MockedFunction; + + beforeEach(async () => { + // Reset all metric values between tests to avoid cross-test pollution. + metricsRegistry.resetMetrics(); + next = jest.fn(); + }); + + it('calls next() immediately', () => { + const req = makeMockRequest(); + const { res } = makeMockResponse(); + + metricsMiddleware(req, res, next); + + expect(next).toHaveBeenCalledTimes(1); + }); + + it('does NOT increment counter before finish event fires', async () => { + const req = makeMockRequest(); + const { res } = makeMockResponse(); + + metricsMiddleware(req, res, next); + + const metricsBefore = await metricsRegistry.getMetricsAsJSON(); + const counterEntry = metricsBefore.find((e) => e.name === 'agentidp_http_requests_total'); + // No values recorded yet — values array will be empty + expect(counterEntry?.values ?? []).toHaveLength(0); + }); + + it('increments agentidp_http_requests_total after finish event', async () => { + const req = makeMockRequest({ method: 'POST', path: '/api/v1/agents' }); + const { res, triggerFinish } = makeMockResponse(201); + + metricsMiddleware(req, res, next); + triggerFinish(); + + const metricsJson = await metricsRegistry.getMetricsAsJSON(); + const counterEntry = metricsJson.find((e) => e.name === 'agentidp_http_requests_total'); + expect(counterEntry).toBeDefined(); + expect(counterEntry!.values).toHaveLength(1); + + const recorded = counterEntry!.values[0]; + expect(recorded.labels['method']).toBe('POST'); + expect(recorded.labels['status_code']).toBe('201'); + expect(recorded.value).toBe(1); + }); + + it('records agentidp_http_request_duration_seconds after finish event', async () => { + const req = makeMockRequest({ method: 'GET', path: '/health' }); + const { res, triggerFinish } = makeMockResponse(200); + + metricsMiddleware(req, res, next); + triggerFinish(); + + const metricsJson = await metricsRegistry.getMetricsAsJSON(); + const histEntry = metricsJson.find( + (e) => e.name === 'agentidp_http_request_duration_seconds', + ); + expect(histEntry).toBeDefined(); + // Histogram produces _bucket, _count and _sum entries — count must be 1 + const countEntry = (histEntry!.values as HistogramMetricValue[]).find( + (v) => v.metricName === 'agentidp_http_request_duration_seconds_count', + ); + expect(countEntry).toBeDefined(); + expect(countEntry!.value).toBe(1); + }); + + it('uses matched route pattern when req.route.path is available', async () => { + const req = makeMockRequest({ + method: 'GET', + path: '/api/v1/agents/some-uuid', + baseUrl: '/api/v1/agents', + route: { path: '/:agentId' } as Request['route'], + }); + const { res, triggerFinish } = makeMockResponse(200); + + metricsMiddleware(req, res, next); + triggerFinish(); + + const metricsJson = await metricsRegistry.getMetricsAsJSON(); + const counterEntry = metricsJson.find((e) => e.name === 'agentidp_http_requests_total'); + expect(counterEntry).toBeDefined(); + const recorded = counterEntry!.values[0]; + // Route should be baseUrl + route.path = '/api/v1/agents/:agentId' + expect(recorded.labels['route']).toBe('/api/v1/agents/:agentId'); + }); + + it('replaces UUID segments when no route pattern is available', async () => { + const uuid = '123e4567-e89b-12d3-a456-426614174000'; + const req = makeMockRequest({ + method: 'DELETE', + path: `/api/v1/agents/${uuid}`, + baseUrl: '', + route: undefined, + }); + const { res, triggerFinish } = makeMockResponse(204); + + metricsMiddleware(req, res, next); + triggerFinish(); + + const metricsJson = await metricsRegistry.getMetricsAsJSON(); + const counterEntry = metricsJson.find((e) => e.name === 'agentidp_http_requests_total'); + expect(counterEntry).toBeDefined(); + const recorded = counterEntry!.values[0]; + expect(recorded.labels['route']).toBe('/api/v1/agents/:id'); + expect(recorded.labels['method']).toBe('DELETE'); + expect(recorded.labels['status_code']).toBe('204'); + }); + + it('increments counter multiple times for multiple requests', async () => { + for (let i = 0; i < 3; i++) { + const req = makeMockRequest({ method: 'GET', path: '/health' }); + const { res, triggerFinish } = makeMockResponse(200); + metricsMiddleware(req, res, next); + triggerFinish(); + } + + const metricsJson = await metricsRegistry.getMetricsAsJSON(); + const counterEntry = metricsJson.find((e) => e.name === 'agentidp_http_requests_total'); + expect(counterEntry).toBeDefined(); + const recorded = counterEntry!.values[0]; + expect(recorded.value).toBe(3); + }); +}); diff --git a/tests/unit/middleware/opa.test.ts b/tests/unit/middleware/opa.test.ts new file mode 100644 index 0000000..9377805 --- /dev/null +++ b/tests/unit/middleware/opa.test.ts @@ -0,0 +1,464 @@ +/** + * Unit tests for src/middleware/opa.ts + * + * All tests run in fallback mode (scopes.json). + * `fs.existsSync` is mocked to return false for the Wasm bundle path so + * the Wasm loader is bypassed and `loadScopesFallback()` is always called. + * + * POLICY_DIR is set to the real `policies/` directory in the project root so + * tests use the production `data/scopes.json` without duplicating it. + */ + +import path from 'path'; +import { Request, Response, NextFunction } from 'express'; +import { RequestHandler } from 'express'; +import { AuthorizationError } from '../../../src/utils/errors'; +import { ITokenPayload } from '../../../src/types/index'; + +// ─── Point POLICY_DIR at the real policies directory ───────────────────────── + +const PROJECT_ROOT = path.resolve(__dirname, '../../..'); +const POLICIES_DIR = path.join(PROJECT_ROOT, 'policies'); + +process.env['POLICY_DIR'] = POLICIES_DIR; + +// ─── Mock fs.existsSync so Wasm bundle is never found ──────────────────────── +// We do this BEFORE importing the module under test so the module-level +// WASM_PATH check in `loadWasmPolicy()` always returns false. + +jest.mock('fs', () => { + const actual = jest.requireActual('fs'); + return { + ...actual, + existsSync: jest.fn((filePath: unknown) => { + // Deny Wasm bundle; allow all other paths (including scopes.json) + if (typeof filePath === 'string' && filePath.endsWith('.wasm')) { + return false; + } + return actual.existsSync(filePath as string); + }), + }; +}); + +// ─── Import the module under test AFTER mocks are in place ─────────────────── + +import { createOpaMiddleware, reloadOpaPolicy } from '../../../src/middleware/opa'; + +// ─── Helpers ────────────────────────────────────────────────────────────────── + +function makeUser(scope: string): ITokenPayload { + return { + sub: 'agent-abc-123', + client_id: 'agent-abc-123', + scope, + jti: 'jti-001', + iat: 1000, + exp: 9999999999, + }; +} + +/** + * Builds a minimal mock Express Request for OPA middleware testing. + * The middleware uses `req.baseUrl + req.path` for the full path. + */ +function makeReq( + method: string, + baseUrl: string, + reqPath: string, + user?: ITokenPayload, +): Partial { + return { + method, + baseUrl, + path: reqPath, + user, + }; +} + +// ─── Test suite ─────────────────────────────────────────────────────────────── + +describe('createOpaMiddleware (fallback mode)', () => { + let middleware: RequestHandler; + let next: jest.MockedFunction; + + beforeAll(async () => { + // Create middleware once; all tests share the same loaded scopes.json + middleware = await createOpaMiddleware(); + }); + + beforeEach(() => { + next = jest.fn(); + }); + + // ── Unauthenticated request ─────────────────────────────────────────────── + + it('should call next(AuthorizationError) with "not authenticated" when req.user is absent', () => { + const req = makeReq('GET', '/api/v1', '/agents', undefined) as Request; + middleware(req, {} as Response, next); + + expect(next).toHaveBeenCalledTimes(1); + const err = (next as jest.Mock).mock.calls[0][0] as AuthorizationError; + expect(err).toBeInstanceOf(AuthorizationError); + expect(err.message).toMatch(/not authenticated/i); + }); + + // ── agents:read endpoints ────────────────────────────────────────────────── + + it('should allow GET /api/v1/agents with agents:read scope', () => { + const req = makeReq('GET', '/api/v1', '/agents', makeUser('agents:read')) as Request; + middleware(req, {} as Response, next); + + expect(next).toHaveBeenCalledWith(/* no error */); + }); + + it('should deny GET /api/v1/agents with agents:write scope only', () => { + const req = makeReq('GET', '/api/v1', '/agents', makeUser('agents:write')) as Request; + middleware(req, {} as Response, next); + + expect(next).toHaveBeenCalledWith(expect.any(AuthorizationError)); + }); + + it('should deny GET /api/v1/agents when scope list is empty', () => { + const req = makeReq('GET', '/api/v1', '/agents', makeUser('')) as Request; + middleware(req, {} as Response, next); + + expect(next).toHaveBeenCalledWith(expect.any(AuthorizationError)); + }); + + it('should allow GET /api/v1/agents/:id with agents:read scope (UUID path)', () => { + const req = makeReq( + 'GET', + '/api/v1', + '/agents/550e8400-e29b-41d4-a716-446655440000', + makeUser('agents:read'), + ) as Request; + middleware(req, {} as Response, next); + + expect(next).toHaveBeenCalledWith(/* no error */); + }); + + it('should deny GET /api/v1/agents/:id with wrong scope', () => { + const req = makeReq( + 'GET', + '/api/v1', + '/agents/550e8400-e29b-41d4-a716-446655440000', + makeUser('audit:read'), + ) as Request; + middleware(req, {} as Response, next); + + expect(next).toHaveBeenCalledWith(expect.any(AuthorizationError)); + }); + + // ── agents:write endpoints ───────────────────────────────────────────────── + + it('should allow POST /api/v1/agents with agents:write scope', () => { + const req = makeReq('POST', '/api/v1', '/agents', makeUser('agents:write')) as Request; + middleware(req, {} as Response, next); + + expect(next).toHaveBeenCalledWith(/* no error */); + }); + + it('should deny POST /api/v1/agents with agents:read scope only', () => { + const req = makeReq('POST', '/api/v1', '/agents', makeUser('agents:read')) as Request; + middleware(req, {} as Response, next); + + expect(next).toHaveBeenCalledWith(expect.any(AuthorizationError)); + }); + + it('should allow PATCH /api/v1/agents/:id with agents:write scope', () => { + const req = makeReq( + 'PATCH', + '/api/v1', + '/agents/550e8400-e29b-41d4-a716-446655440000', + makeUser('agents:write'), + ) as Request; + middleware(req, {} as Response, next); + + expect(next).toHaveBeenCalledWith(/* no error */); + }); + + it('should allow DELETE /api/v1/agents/:id with agents:write scope', () => { + const req = makeReq( + 'DELETE', + '/api/v1', + '/agents/550e8400-e29b-41d4-a716-446655440000', + makeUser('agents:write'), + ) as Request; + middleware(req, {} as Response, next); + + expect(next).toHaveBeenCalledWith(/* no error */); + }); + + // ── credentials sub-resource endpoints ──────────────────────────────────── + + it('should allow GET /api/v1/agents/:id/credentials with agents:read scope', () => { + const req = makeReq( + 'GET', + '/api/v1', + '/agents/550e8400-e29b-41d4-a716-446655440000/credentials', + makeUser('agents:read'), + ) as Request; + middleware(req, {} as Response, next); + + expect(next).toHaveBeenCalledWith(/* no error */); + }); + + it('should allow POST /api/v1/agents/:id/credentials with agents:write scope', () => { + const req = makeReq( + 'POST', + '/api/v1', + '/agents/550e8400-e29b-41d4-a716-446655440000/credentials', + makeUser('agents:write'), + ) as Request; + middleware(req, {} as Response, next); + + expect(next).toHaveBeenCalledWith(/* no error */); + }); + + it('should allow POST /api/v1/agents/:id/credentials/:credId/rotate with agents:write scope', () => { + const req = makeReq( + 'POST', + '/api/v1', + '/agents/550e8400-e29b-41d4-a716-446655440000/credentials/cred-id-001/rotate', + makeUser('agents:write'), + ) as Request; + middleware(req, {} as Response, next); + + expect(next).toHaveBeenCalledWith(/* no error */); + }); + + it('should deny POST /api/v1/agents/:id/credentials/:credId/rotate with agents:read scope', () => { + const req = makeReq( + 'POST', + '/api/v1', + '/agents/550e8400-e29b-41d4-a716-446655440000/credentials/cred-id-001/rotate', + makeUser('agents:read'), + ) as Request; + middleware(req, {} as Response, next); + + expect(next).toHaveBeenCalledWith(expect.any(AuthorizationError)); + }); + + it('should allow DELETE /api/v1/agents/:id/credentials/:credId with agents:write scope', () => { + const req = makeReq( + 'DELETE', + '/api/v1', + '/agents/550e8400-e29b-41d4-a716-446655440000/credentials/cred-id-001', + makeUser('agents:write'), + ) as Request; + middleware(req, {} as Response, next); + + expect(next).toHaveBeenCalledWith(/* no error */); + }); + + // ── tokens:read endpoints ────────────────────────────────────────────────── + + it('should allow POST /api/v1/token/introspect with tokens:read scope', () => { + const req = makeReq( + 'POST', + '/api/v1', + '/token/introspect', + makeUser('tokens:read'), + ) as Request; + middleware(req, {} as Response, next); + + expect(next).toHaveBeenCalledWith(/* no error */); + }); + + it('should deny POST /api/v1/token/introspect with agents:read scope only', () => { + const req = makeReq( + 'POST', + '/api/v1', + '/token/introspect', + makeUser('agents:read'), + ) as Request; + middleware(req, {} as Response, next); + + expect(next).toHaveBeenCalledWith(expect.any(AuthorizationError)); + }); + + it('should allow POST /api/v1/token/revoke with tokens:read scope', () => { + const req = makeReq( + 'POST', + '/api/v1', + '/token/revoke', + makeUser('tokens:read'), + ) as Request; + middleware(req, {} as Response, next); + + expect(next).toHaveBeenCalledWith(/* no error */); + }); + + it('should deny POST /api/v1/token/revoke without tokens:read scope', () => { + const req = makeReq( + 'POST', + '/api/v1', + '/token/revoke', + makeUser('agents:write'), + ) as Request; + middleware(req, {} as Response, next); + + expect(next).toHaveBeenCalledWith(expect.any(AuthorizationError)); + }); + + // ── audit:read endpoints ─────────────────────────────────────────────────── + + it('should allow GET /api/v1/audit with audit:read scope', () => { + const req = makeReq('GET', '/api/v1', '/audit', makeUser('audit:read')) as Request; + middleware(req, {} as Response, next); + + expect(next).toHaveBeenCalledWith(/* no error */); + }); + + it('should deny GET /api/v1/audit with agents:read scope only', () => { + const req = makeReq('GET', '/api/v1', '/audit', makeUser('agents:read')) as Request; + middleware(req, {} as Response, next); + + expect(next).toHaveBeenCalledWith(expect.any(AuthorizationError)); + }); + + it('should allow GET /api/v1/audit/:id with audit:read scope (UUID path)', () => { + const req = makeReq( + 'GET', + '/api/v1', + '/audit/550e8400-e29b-41d4-a716-446655440000', + makeUser('audit:read'), + ) as Request; + middleware(req, {} as Response, next); + + expect(next).toHaveBeenCalledWith(/* no error */); + }); + + it('should deny GET /api/v1/audit/:id without audit:read scope', () => { + const req = makeReq( + 'GET', + '/api/v1', + '/audit/550e8400-e29b-41d4-a716-446655440000', + makeUser('tokens:read'), + ) as Request; + middleware(req, {} as Response, next); + + expect(next).toHaveBeenCalledWith(expect.any(AuthorizationError)); + }); + + // ── Path normalisation ──────────────────────────────────────────────────── + + it('should normalise UUID agent path correctly (longest segment first)', () => { + // Full rotate path with real UUIDs — must hit the :credId/rotate rule + const req = makeReq( + 'POST', + '/api/v1', + '/agents/a1b2c3d4-e5f6-4000-8000-ef1234567890/credentials/b2c3d4e5-f6a7-4000-8000-fa2345678901/rotate', + makeUser('agents:write'), + ) as Request; + middleware(req, {} as Response, next); + + expect(next).toHaveBeenCalledWith(/* no error */); + }); + + it('should normalise path with non-UUID segment identifiers', () => { + // Non-UUID IDs are still matched by the regex (any non-slash characters) + const req = makeReq( + 'GET', + '/api/v1', + '/agents/my-agent-slug', + makeUser('agents:read'), + ) as Request; + middleware(req, {} as Response, next); + + expect(next).toHaveBeenCalledWith(/* no error */); + }); + + // ── Unknown / unmapped paths ────────────────────────────────────────────── + + it('should deny (fail-closed) when path has no matching entry in scopes.json', () => { + const req = makeReq( + 'GET', + '/api/v1', + '/unknown/resource', + makeUser('agents:read agents:write tokens:read audit:read'), + ) as Request; + middleware(req, {} as Response, next); + + expect(next).toHaveBeenCalledWith(expect.any(AuthorizationError)); + }); + + it('should deny (fail-closed) for a valid path with wrong HTTP method', () => { + // PUT is not in scopes.json for any endpoint + const req = makeReq( + 'PUT', + '/api/v1', + '/agents', + makeUser('agents:read agents:write'), + ) as Request; + middleware(req, {} as Response, next); + + expect(next).toHaveBeenCalledWith(expect.any(AuthorizationError)); + }); + + // ── Multi-scope token ───────────────────────────────────────────────────── + + it('should allow access when token has multiple scopes including the required one', () => { + const req = makeReq( + 'GET', + '/api/v1', + '/audit', + makeUser('agents:read tokens:read audit:read'), + ) as Request; + middleware(req, {} as Response, next); + + expect(next).toHaveBeenCalledWith(/* no error */); + }); +}); + +// ─── reloadOpaPolicy ────────────────────────────────────────────────────────── + +describe('reloadOpaPolicy()', () => { + it('should reload without error and continue to enforce policy correctly', async () => { + await expect(reloadOpaPolicy()).resolves.toBeUndefined(); + + // After reload, fallback mode should still work — create a fresh middleware + const mw = await createOpaMiddleware(); + const next = jest.fn() as jest.MockedFunction; + + const req = { + method: 'GET', + baseUrl: '/api/v1', + path: '/agents', + user: { + sub: 'agent-xyz', + client_id: 'agent-xyz', + scope: 'agents:read', + jti: 'jti-reload', + iat: 1000, + exp: 9999999999, + }, + } as unknown as Request; + + mw(req, {} as Response, next); + expect(next).toHaveBeenCalledWith(/* no error */); + }); + + it('should still deny access after reload when scope is insufficient', async () => { + await reloadOpaPolicy(); + const mw = await createOpaMiddleware(); + const next = jest.fn() as jest.MockedFunction; + + const req = { + method: 'POST', + baseUrl: '/api/v1', + path: '/token/introspect', + user: { + sub: 'agent-xyz', + client_id: 'agent-xyz', + scope: 'agents:read', + jti: 'jti-reload-deny', + iat: 1000, + exp: 9999999999, + }, + } as unknown as Request; + + mw(req, {} as Response, next); + expect(next).toHaveBeenCalledWith(expect.any(AuthorizationError)); + }); +}); diff --git a/tests/unit/middleware/opa.wasm.test.ts b/tests/unit/middleware/opa.wasm.test.ts new file mode 100644 index 0000000..34fa8f4 --- /dev/null +++ b/tests/unit/middleware/opa.wasm.test.ts @@ -0,0 +1,169 @@ +/** + * Unit tests for src/middleware/opa.ts — Wasm mode and fail-closed edge cases. + * + * This file is kept separate from opa.test.ts because it needs different + * `fs.existsSync` and `@open-policy-agent/opa-wasm` mock behaviour. + * + * Jest's module registry is isolated per test file, so the module-level + * singletons (`wasmPolicy`, `scopesMap`) are fresh for each file. + */ + +import path from 'path'; +import { Request, Response, NextFunction } from 'express'; +import { RequestHandler } from 'express'; +import { ITokenPayload } from '../../../src/types/index'; +import { AuthorizationError } from '../../../src/utils/errors'; + +// ─── Point POLICY_DIR at the real policies directory ───────────────────────── + +const PROJECT_ROOT = path.resolve(__dirname, '../../..'); +const POLICIES_DIR = path.join(PROJECT_ROOT, 'policies'); + +process.env['POLICY_DIR'] = POLICIES_DIR; + +// ─── Wasm mock — a LoadedPolicy-like object ─────────────────────────────────── + +/** Tracks calls so individual tests can assert on evaluation results. */ +const mockEvaluate = jest.fn(); +const mockSetData = jest.fn(); + +const MOCK_LOADED_POLICY = { + evaluate: mockEvaluate, + setData: mockSetData, +}; + +// Mock @open-policy-agent/opa-wasm BEFORE the module is loaded +jest.mock('@open-policy-agent/opa-wasm', () => ({ + loadPolicy: jest.fn().mockResolvedValue(MOCK_LOADED_POLICY), +})); + +// ─── Mock fs: existsSync returns true for .wasm AND scopes.json ────────────── + +jest.mock('fs', () => { + const actual = jest.requireActual('fs'); + return { + ...actual, + existsSync: jest.fn((_filePath: unknown) => { + // Both .wasm and other paths exist + return true; + }), + readFileSync: jest.fn((filePath: unknown, encoding?: unknown) => { + if (typeof filePath === 'string' && filePath.endsWith('.wasm')) { + // Return a Buffer-like object for the Wasm bundle + return Buffer.from('fake-wasm-bytes'); + } + // For scopes.json, delegate to the real fs + return actual.readFileSync(filePath as string, encoding as BufferEncoding); + }), + }; +}); + +// Import AFTER mocks +import { createOpaMiddleware, reloadOpaPolicy } from '../../../src/middleware/opa'; + +// ─── Helpers ────────────────────────────────────────────────────────────────── + +function makeUser(scope: string): ITokenPayload { + return { + sub: 'agent-wasm-test', + client_id: 'agent-wasm-test', + scope, + jti: 'jti-wasm-001', + iat: 1000, + exp: 9999999999, + }; +} + +function makeReq( + method: string, + baseUrl: string, + reqPath: string, + user?: ITokenPayload, +): Partial { + return { method, baseUrl, path: reqPath, user }; +} + +// ─── Tests ──────────────────────────────────────────────────────────────────── + +describe('createOpaMiddleware (Wasm mode)', () => { + let middleware: RequestHandler; + let next: jest.MockedFunction; + + beforeAll(async () => { + middleware = await createOpaMiddleware(); + }); + + beforeEach(() => { + next = jest.fn(); + mockEvaluate.mockReset(); + }); + + it('should load in Wasm mode and call setData with scopes.json', () => { + // setData is called once during createOpaMiddleware() → loadWasmPolicy() + expect(mockSetData).toHaveBeenCalledTimes(1); + }); + + it('should allow request when Wasm policy evaluate returns allow: true', () => { + mockEvaluate.mockReturnValue([{ result: { allow: true } }]); + + const req = makeReq('GET', '/api/v1', '/agents', makeUser('agents:read')) as Request; + middleware(req, {} as Response, next); + + expect(mockEvaluate).toHaveBeenCalledTimes(1); + expect(next).toHaveBeenCalledWith(/* no args */); + }); + + it('should deny request when Wasm policy evaluate returns allow: false', () => { + mockEvaluate.mockReturnValue([{ result: { allow: false } }]); + + const req = makeReq('GET', '/api/v1', '/agents', makeUser('agents:read')) as Request; + middleware(req, {} as Response, next); + + expect(next).toHaveBeenCalledWith(expect.any(AuthorizationError)); + }); + + it('should deny request when Wasm evaluate returns empty result set', () => { + mockEvaluate.mockReturnValue([]); + + const req = makeReq('POST', '/api/v1', '/agents', makeUser('agents:write')) as Request; + middleware(req, {} as Response, next); + + expect(next).toHaveBeenCalledWith(expect.any(AuthorizationError)); + }); + + it('should deny request when Wasm evaluate returns non-array result', () => { + mockEvaluate.mockReturnValue(null); + + const req = makeReq('GET', '/api/v1', '/audit', makeUser('audit:read')) as Request; + middleware(req, {} as Response, next); + + expect(next).toHaveBeenCalledWith(expect.any(AuthorizationError)); + }); + + it('should propagate unexpected errors thrown by Wasm evaluate to next', () => { + const wasmError = new Error('Wasm evaluation failure'); + mockEvaluate.mockImplementation(() => { throw wasmError; }); + + const req = makeReq('GET', '/api/v1', '/agents', makeUser('agents:read')) as Request; + middleware(req, {} as Response, next); + + expect(next).toHaveBeenCalledWith(wasmError); + }); + + it('should call next(AuthorizationError) with "not authenticated" when req.user is absent in Wasm mode', () => { + const req = makeReq('GET', '/api/v1', '/agents', undefined) as Request; + middleware(req, {} as Response, next); + + const err = (next as jest.Mock).mock.calls[0][0] as AuthorizationError; + expect(err).toBeInstanceOf(AuthorizationError); + expect(err.message).toMatch(/not authenticated/i); + }); +}); + +describe('reloadOpaPolicy (Wasm mode)', () => { + it('should reload in Wasm mode without error', async () => { + await expect(reloadOpaPolicy()).resolves.toBeUndefined(); + // setData should have been called again during reload + expect(mockSetData).toHaveBeenCalled(); + }); +}); diff --git a/tests/unit/repositories/CredentialRepository.test.ts b/tests/unit/repositories/CredentialRepository.test.ts index b876156..e91e878 100644 --- a/tests/unit/repositories/CredentialRepository.test.ts +++ b/tests/unit/repositories/CredentialRepository.test.ts @@ -38,6 +38,7 @@ const EXPECTED_CREDENTIAL: ICredential = { const EXPECTED_CREDENTIAL_ROW: ICredentialRow = { ...EXPECTED_CREDENTIAL, secretHash: CREDENTIAL_ROW.secret_hash, + vaultPath: null, }; // ─── suite ─────────────────────────────────────────────────────────────────── diff --git a/tests/unit/routes/health.test.ts b/tests/unit/routes/health.test.ts new file mode 100644 index 0000000..02f3740 --- /dev/null +++ b/tests/unit/routes/health.test.ts @@ -0,0 +1,150 @@ +/** + * Unit tests for src/routes/health.ts + * + * Tests the GET /health endpoint via the createHealthRouter factory. + * PostgreSQL and Redis dependencies are fully mocked — no live services required. + */ + +import express, { Application } from 'express'; +import request from 'supertest'; +import { Pool, PoolClient } from 'pg'; +import { RedisClientType } from 'redis'; +import { createHealthRouter } from '../../../src/routes/health'; + +// ── Mock helpers ────────────────────────────────────────────────────────────── + +/** Builds a mock pg PoolClient with controllable query/release. */ +function makePoolClient(queryError?: Error): jest.Mocked> { + return { + query: queryError + ? jest.fn().mockRejectedValue(queryError) + : jest.fn().mockResolvedValue({ rows: [{ '?column?': 1 }], rowCount: 1 }), + release: jest.fn(), + } as unknown as jest.Mocked>; +} + +/** Builds a mock pg Pool whose connect() resolves or rejects on demand. */ +function makePool(connectError?: Error, queryError?: Error): jest.Mocked { + return { + connect: connectError + ? jest.fn().mockRejectedValue(connectError) + : jest.fn().mockResolvedValue(makePoolClient(queryError)), + } as unknown as jest.Mocked; +} + +/** Builds a mock Redis client whose ping() resolves or rejects on demand. */ +function makeRedis(pingError?: Error): jest.Mocked { + return { + ping: pingError + ? jest.fn().mockRejectedValue(pingError) + : jest.fn().mockResolvedValue('PONG'), + } as unknown as jest.Mocked; +} + +/** Creates a minimal Express app with the health router mounted at /health. */ +function buildApp(pool: jest.Mocked, redis: jest.Mocked): Application { + const app = express(); + app.use('/health', createHealthRouter(pool as unknown as Pool, redis as unknown as RedisClientType)); + return app; +} + +// ── Tests ───────────────────────────────────────────────────────────────────── + +describe('GET /health', () => { + describe('when both PostgreSQL and Redis are healthy', () => { + it('returns 200 with status ok and both services connected', async () => { + const app = buildApp(makePool(), makeRedis()); + + const response = await request(app).get('/health'); + + expect(response.status).toBe(200); + expect(response.body).toMatchObject({ + status: 'ok', + services: { + postgres: 'connected', + redis: 'connected', + }, + }); + }); + + it('includes version and uptime fields in the response', async () => { + const app = buildApp(makePool(), makeRedis()); + + const response = await request(app).get('/health'); + + expect(typeof response.body.version).toBe('string'); + expect(typeof response.body.uptime).toBe('number'); + }); + }); + + describe('when PostgreSQL connect() throws', () => { + it('returns 503 with status degraded and postgres disconnected, redis connected', async () => { + const pool = makePool(new Error('PG connection refused')); + const app = buildApp(pool, makeRedis()); + + const response = await request(app).get('/health'); + + expect(response.status).toBe(503); + expect(response.body).toMatchObject({ + status: 'degraded', + services: { + postgres: 'disconnected', + redis: 'connected', + }, + }); + }); + }); + + describe('when Redis ping() throws', () => { + it('returns 503 with status degraded and postgres connected, redis disconnected', async () => { + const app = buildApp(makePool(), makeRedis(new Error('Redis ECONNREFUSED'))); + + const response = await request(app).get('/health'); + + expect(response.status).toBe(503); + expect(response.body).toMatchObject({ + status: 'degraded', + services: { + postgres: 'connected', + redis: 'disconnected', + }, + }); + }); + }); + + describe('when both PostgreSQL and Redis fail', () => { + it('returns 503 with status degraded and both services disconnected', async () => { + const pool = makePool(new Error('PG down')); + const app = buildApp(pool, makeRedis(new Error('Redis down'))); + + const response = await request(app).get('/health'); + + expect(response.status).toBe(503); + expect(response.body).toMatchObject({ + status: 'degraded', + services: { + postgres: 'disconnected', + redis: 'disconnected', + }, + }); + }); + }); + + describe('when PostgreSQL query() throws (connect succeeds but query fails)', () => { + it('returns 503 with postgres disconnected', async () => { + const pool = makePool(undefined, new Error('PG query error')); + const app = buildApp(pool, makeRedis()); + + const response = await request(app).get('/health'); + + expect(response.status).toBe(503); + expect(response.body).toMatchObject({ + status: 'degraded', + services: { + postgres: 'disconnected', + redis: 'connected', + }, + }); + }); + }); +}); diff --git a/tests/unit/routes/metrics.test.ts b/tests/unit/routes/metrics.test.ts new file mode 100644 index 0000000..a6a35f0 --- /dev/null +++ b/tests/unit/routes/metrics.test.ts @@ -0,0 +1,89 @@ +/** + * Unit tests for src/routes/metrics.ts + * + * Verifies that GET /metrics returns 200 with Prometheus exposition format + * and does NOT require authentication. + */ + +import express, { Application } from 'express'; +import request from 'supertest'; +import { createMetricsRouter } from '../../../src/routes/metrics'; +import { metricsRegistry } from '../../../src/metrics/registry'; + +// ──────────────────────────────────────────────────────────────────────────── +// Helpers +// ──────────────────────────────────────────────────────────────────────────── + +/** Build a minimal Express app that mounts only the metrics router. */ +function buildTestApp(): Application { + const app = express(); + app.use('/metrics', createMetricsRouter()); + return app; +} + +// ──────────────────────────────────────────────────────────────────────────── +// Tests +// ──────────────────────────────────────────────────────────────────────────── + +describe('GET /metrics', () => { + let app: Application; + + beforeEach(() => { + metricsRegistry.resetMetrics(); + app = buildTestApp(); + }); + + it('returns HTTP 200', async () => { + const res = await request(app).get('/metrics'); + expect(res.status).toBe(200); + }); + + it('returns Content-Type containing text/plain', async () => { + const res = await request(app).get('/metrics'); + expect(res.headers['content-type']).toMatch(/text\/plain/); + }); + + it('does NOT require an Authorization header', async () => { + // Call without any auth header — must still succeed + const res = await request(app).get('/metrics'); + expect(res.status).toBe(200); + expect(res.status).not.toBe(401); + expect(res.status).not.toBe(403); + }); + + it('response body contains agentidp_tokens_issued_total', async () => { + const res = await request(app).get('/metrics'); + expect(res.text).toContain('agentidp_tokens_issued_total'); + }); + + it('response body contains agentidp_agents_registered_total', async () => { + const res = await request(app).get('/metrics'); + expect(res.text).toContain('agentidp_agents_registered_total'); + }); + + it('response body contains agentidp_http_requests_total', async () => { + const res = await request(app).get('/metrics'); + expect(res.text).toContain('agentidp_http_requests_total'); + }); + + it('response body contains agentidp_http_request_duration_seconds', async () => { + const res = await request(app).get('/metrics'); + expect(res.text).toContain('agentidp_http_request_duration_seconds'); + }); + + it('response body contains agentidp_db_query_duration_seconds', async () => { + const res = await request(app).get('/metrics'); + expect(res.text).toContain('agentidp_db_query_duration_seconds'); + }); + + it('response body contains agentidp_redis_command_duration_seconds', async () => { + const res = await request(app).get('/metrics'); + expect(res.text).toContain('agentidp_redis_command_duration_seconds'); + }); + + it('response body is valid Prometheus text exposition format (starts with # HELP or TYPE)', async () => { + const res = await request(app).get('/metrics'); + // Prometheus text format always begins with comment lines starting with '# ' + expect(res.text).toMatch(/^# (HELP|TYPE)/m); + }); +}); diff --git a/tests/unit/services/CredentialService.test.ts b/tests/unit/services/CredentialService.test.ts index eb8dd08..afac2d9 100644 --- a/tests/unit/services/CredentialService.test.ts +++ b/tests/unit/services/CredentialService.test.ts @@ -7,6 +7,7 @@ import { CredentialService } from '../../../src/services/CredentialService'; import { CredentialRepository } from '../../../src/repositories/CredentialRepository'; import { AgentRepository } from '../../../src/repositories/AgentRepository'; import { AuditService } from '../../../src/services/AuditService'; +import { VaultClient } from '../../../src/vault/VaultClient'; import { AgentNotFoundError, CredentialNotFoundError, @@ -18,10 +19,12 @@ import { IAgent, ICredential, ICredentialRow } from '../../../src/types/index'; jest.mock('../../../src/repositories/CredentialRepository'); jest.mock('../../../src/repositories/AgentRepository'); jest.mock('../../../src/services/AuditService'); +jest.mock('../../../src/vault/VaultClient'); const MockCredentialRepo = CredentialRepository as jest.MockedClass; const MockAgentRepo = AgentRepository as jest.MockedClass; const MockAuditService = AuditService as jest.MockedClass; +const MockVaultClient = VaultClient as jest.MockedClass; const AGENT_ID = uuidv4(); const CREDENTIAL_ID = uuidv4(); @@ -51,6 +54,7 @@ const MOCK_CREDENTIAL: ICredential = { const MOCK_CREDENTIAL_ROW: ICredentialRow = { ...MOCK_CREDENTIAL, secretHash: '$2b$10$somehashvalue', + vaultPath: null, }; const IP = '127.0.0.1'; @@ -205,3 +209,94 @@ describe('CredentialService', () => { }); }); }); + +// ─── Vault-path tests ────────────────────────────────────────────────────── + +describe('CredentialService — Vault path (Phase 2)', () => { + let service: CredentialService; + let credentialRepo: jest.Mocked; + let agentRepo: jest.Mocked; + let auditService: jest.Mocked; + let vaultClient: jest.Mocked; + + const VAULT_PATH = `secret/data/agentidp/agents/${AGENT_ID}/credentials/${CREDENTIAL_ID}`; + + const MOCK_VAULT_CREDENTIAL_ROW: ICredentialRow = { + ...MOCK_CREDENTIAL, + secretHash: '', + vaultPath: VAULT_PATH, + }; + + beforeEach(() => { + jest.clearAllMocks(); + credentialRepo = new MockCredentialRepo({} as never) as jest.Mocked; + agentRepo = new MockAgentRepo({} as never) as jest.Mocked; + auditService = new MockAuditService({} as never) as jest.Mocked; + vaultClient = new MockVaultClient('http://localhost:8200', 'token') as jest.Mocked; + service = new CredentialService(credentialRepo, agentRepo, auditService, vaultClient); + auditService.logEvent.mockResolvedValue({} as never); + }); + + describe('generateCredential() with Vault', () => { + it('writes secret to Vault and stores the vault_path in the DB', async () => { + agentRepo.findById.mockResolvedValue(MOCK_AGENT); + vaultClient.writeSecret.mockResolvedValue(VAULT_PATH); + credentialRepo.createWithVaultPath.mockResolvedValue(MOCK_CREDENTIAL); + + const result = await service.generateCredential(AGENT_ID, {}, IP, UA); + + expect(vaultClient.writeSecret).toHaveBeenCalledWith( + AGENT_ID, + expect.any(String), + expect.any(String), + ); + expect(credentialRepo.createWithVaultPath).toHaveBeenCalled(); + expect(credentialRepo.create).not.toHaveBeenCalled(); + expect(result.clientSecret).toMatch(/^sk_live_[0-9a-f]{64}$/); + }); + }); + + describe('rotateCredential() with Vault', () => { + it('writes new Vault version and updates vault_path in the DB', async () => { + agentRepo.findById.mockResolvedValue(MOCK_AGENT); + credentialRepo.findById.mockResolvedValue(MOCK_VAULT_CREDENTIAL_ROW); + vaultClient.writeSecret.mockResolvedValue(VAULT_PATH); + credentialRepo.updateVaultPath.mockResolvedValue(MOCK_CREDENTIAL); + + const result = await service.rotateCredential(AGENT_ID, CREDENTIAL_ID, {}, IP, UA); + + expect(vaultClient.writeSecret).toHaveBeenCalledWith( + AGENT_ID, + CREDENTIAL_ID, + expect.any(String), + ); + expect(credentialRepo.updateVaultPath).toHaveBeenCalled(); + expect(credentialRepo.updateHash).not.toHaveBeenCalled(); + expect(result.clientSecret).toMatch(/^sk_live_[0-9a-f]{64}$/); + }); + }); + + describe('revokeCredential() with Vault', () => { + it('revokes DB record and deletes Vault secret', async () => { + agentRepo.findById.mockResolvedValue(MOCK_AGENT); + credentialRepo.findById.mockResolvedValue(MOCK_VAULT_CREDENTIAL_ROW); + credentialRepo.revoke.mockResolvedValue({ ...MOCK_CREDENTIAL, status: 'revoked', revokedAt: new Date() }); + vaultClient.deleteSecret.mockResolvedValue(); + + await service.revokeCredential(AGENT_ID, CREDENTIAL_ID, IP, UA); + + expect(credentialRepo.revoke).toHaveBeenCalledWith(CREDENTIAL_ID); + expect(vaultClient.deleteSecret).toHaveBeenCalledWith(AGENT_ID, CREDENTIAL_ID); + }); + + it('does not call Vault delete when credential has no vault_path (bcrypt credential)', async () => { + agentRepo.findById.mockResolvedValue(MOCK_AGENT); + credentialRepo.findById.mockResolvedValue(MOCK_CREDENTIAL_ROW); // vaultPath: null + credentialRepo.revoke.mockResolvedValue({ ...MOCK_CREDENTIAL, status: 'revoked', revokedAt: new Date() }); + + await service.revokeCredential(AGENT_ID, CREDENTIAL_ID, IP, UA); + + expect(vaultClient.deleteSecret).not.toHaveBeenCalled(); + }); + }); +}); diff --git a/tests/unit/services/OAuth2Service.test.ts b/tests/unit/services/OAuth2Service.test.ts index 423d3a1..596071a 100644 --- a/tests/unit/services/OAuth2Service.test.ts +++ b/tests/unit/services/OAuth2Service.test.ts @@ -13,7 +13,6 @@ import { AuthenticationError, AuthorizationError, FreeTierLimitError, - InsufficientScopeError, } from '../../../src/utils/errors'; import { IAgent, ICredential, ICredentialRow, ITokenPayload } from '../../../src/types/index'; import { hashSecret, generateClientSecret } from '../../../src/utils/crypto'; @@ -91,7 +90,7 @@ describe('OAuth2Service', () => { revokedAt: null, }; - credentialRow = { ...mockCredential, secretHash }; + credentialRow = { ...mockCredential, secretHash, vaultPath: null }; credentialRepo.findByAgentId.mockResolvedValue({ credentials: [mockCredential], total: 1 }); credentialRepo.findById.mockResolvedValue(credentialRow); @@ -188,11 +187,14 @@ describe('OAuth2Service', () => { expect(result.active).toBe(false); }); - it('should throw InsufficientScopeError if caller lacks tokens:read', async () => { + it('should introspect successfully regardless of caller scope (tokens:read enforced by OPA middleware)', async () => { + // Scope enforcement for tokens:read has been moved to OpaMiddleware. + // The service introspects any token presented to it once the request has + // passed the middleware layer. + tokenRepo.isRevoked.mockResolvedValue(false); const noScopePayload = { ...callerPayload, scope: 'agents:read' }; - await expect( - service.introspectToken(validToken, noScopePayload, IP, UA), - ).rejects.toThrow(InsufficientScopeError); + const result = await service.introspectToken(validToken, noScopePayload, IP, UA); + expect(result.active).toBe(true); }); it('should return active: false for an expired token', async () => { diff --git a/tests/unit/vault/VaultClient.test.ts b/tests/unit/vault/VaultClient.test.ts new file mode 100644 index 0000000..720f126 --- /dev/null +++ b/tests/unit/vault/VaultClient.test.ts @@ -0,0 +1,206 @@ +/** + * Unit tests for VaultClient. + * Mocks the node-vault library to avoid real Vault connections. + */ + +import { jest, describe, it, expect, beforeEach } from '@jest/globals'; +import { VaultClient, createVaultClientFromEnv } from '../../../src/vault/VaultClient.js'; +import { CredentialError } from '../../../src/utils/errors.js'; + +// ─── Mock node-vault ──────────────────────────────────────────────────────── + +const mockWrite = jest.fn<() => Promise>(); +const mockRead = jest.fn<() => Promise>(); +const mockDelete = jest.fn<() => Promise>(); + +jest.mock('node-vault', () => { + return jest.fn(() => ({ + write: mockWrite, + read: mockRead, + delete: mockDelete, + })); +}); + +// ─── Helpers ──────────────────────────────────────────────────────────────── + +const AGENT_ID = 'agent-uuid-1234'; +const CRED_ID = 'cred-uuid-5678'; +const PLAIN_SECRET = 'super-secret-value'; + +function makeClient(): VaultClient { + return new VaultClient('http://127.0.0.1:8200', 'test-token', 'secret'); +} + +// ─── Tests ────────────────────────────────────────────────────────────────── + +describe('VaultClient', () => { + beforeEach(() => { + jest.clearAllMocks(); + }); + + // ── writeSecret ──────────────────────────────────────────────────────────── + + describe('writeSecret', () => { + it('writes the secret to the correct KV v2 path and returns the path', async () => { + mockWrite.mockResolvedValue({}); + const client = makeClient(); + const path = await client.writeSecret(AGENT_ID, CRED_ID, PLAIN_SECRET); + + expect(mockWrite).toHaveBeenCalledWith( + `secret/data/agentidp/agents/${AGENT_ID}/credentials/${CRED_ID}`, + { data: { clientSecret: PLAIN_SECRET } }, + ); + expect(path).toBe(`secret/data/agentidp/agents/${AGENT_ID}/credentials/${CRED_ID}`); + }); + + it('throws CredentialError when Vault write fails', async () => { + mockWrite.mockRejectedValue(new Error('connection refused')); + const client = makeClient(); + + await expect(client.writeSecret(AGENT_ID, CRED_ID, PLAIN_SECRET)) + .rejects.toThrow(CredentialError); + }); + + it('CredentialError on write failure has code VAULT_WRITE_ERROR', async () => { + mockWrite.mockRejectedValue(new Error('forbidden')); + const client = makeClient(); + + await expect(client.writeSecret(AGENT_ID, CRED_ID, PLAIN_SECRET)) + .rejects.toMatchObject({ code: 'VAULT_WRITE_ERROR' }); + }); + }); + + // ── readSecret ───────────────────────────────────────────────────────────── + + describe('readSecret', () => { + it('reads and returns the stored secret', async () => { + mockRead.mockResolvedValue({ + data: { data: { clientSecret: PLAIN_SECRET }, metadata: {} }, + }); + const client = makeClient(); + const secret = await client.readSecret(AGENT_ID, CRED_ID); + + expect(mockRead).toHaveBeenCalledWith( + `secret/data/agentidp/agents/${AGENT_ID}/credentials/${CRED_ID}`, + ); + expect(secret).toBe(PLAIN_SECRET); + }); + + it('throws CredentialError when secret field is missing', async () => { + mockRead.mockResolvedValue({ data: { data: {}, metadata: {} } }); + const client = makeClient(); + + await expect(client.readSecret(AGENT_ID, CRED_ID)) + .rejects.toMatchObject({ code: 'VAULT_SECRET_MISSING' }); + }); + + it('throws CredentialError when Vault read fails', async () => { + mockRead.mockRejectedValue(new Error('404 not found')); + const client = makeClient(); + + await expect(client.readSecret(AGENT_ID, CRED_ID)) + .rejects.toMatchObject({ code: 'VAULT_READ_ERROR' }); + }); + }); + + // ── verifySecret ─────────────────────────────────────────────────────────── + + describe('verifySecret', () => { + it('returns true when candidate matches stored secret', async () => { + mockRead.mockResolvedValue({ + data: { data: { clientSecret: PLAIN_SECRET }, metadata: {} }, + }); + const client = makeClient(); + const result = await client.verifySecret(AGENT_ID, CRED_ID, PLAIN_SECRET); + expect(result).toBe(true); + }); + + it('returns false when candidate does not match stored secret', async () => { + mockRead.mockResolvedValue({ + data: { data: { clientSecret: PLAIN_SECRET }, metadata: {} }, + }); + const client = makeClient(); + const result = await client.verifySecret(AGENT_ID, CRED_ID, 'wrong-secret'); + expect(result).toBe(false); + }); + + it('returns false when Vault read fails (does not throw)', async () => { + mockRead.mockRejectedValue(new Error('vault sealed')); + const client = makeClient(); + const result = await client.verifySecret(AGENT_ID, CRED_ID, PLAIN_SECRET); + expect(result).toBe(false); + }); + + it('returns false when lengths differ (constant-time)', async () => { + mockRead.mockResolvedValue({ + data: { data: { clientSecret: PLAIN_SECRET }, metadata: {} }, + }); + const client = makeClient(); + const result = await client.verifySecret(AGENT_ID, CRED_ID, 'short'); + expect(result).toBe(false); + }); + }); + + // ── deleteSecret ─────────────────────────────────────────────────────────── + + describe('deleteSecret', () => { + it('calls delete on the metadata path', async () => { + mockDelete.mockResolvedValue({}); + const client = makeClient(); + await client.deleteSecret(AGENT_ID, CRED_ID); + + expect(mockDelete).toHaveBeenCalledWith( + `secret/metadata/agentidp/agents/${AGENT_ID}/credentials/${CRED_ID}`, + ); + }); + + it('throws CredentialError when Vault delete fails', async () => { + mockDelete.mockRejectedValue(new Error('permission denied')); + const client = makeClient(); + + await expect(client.deleteSecret(AGENT_ID, CRED_ID)) + .rejects.toMatchObject({ code: 'VAULT_DELETE_ERROR' }); + }); + }); +}); + +// ─── createVaultClientFromEnv ───────────────────────────────────────────────── + +describe('createVaultClientFromEnv', () => { + const originalEnv = process.env; + + beforeEach(() => { + process.env = { ...originalEnv }; + }); + + afterEach(() => { + process.env = originalEnv; + }); + + it('returns null when VAULT_ADDR is not set', () => { + delete process.env['VAULT_ADDR']; + delete process.env['VAULT_TOKEN']; + expect(createVaultClientFromEnv()).toBeNull(); + }); + + it('returns null when VAULT_TOKEN is not set', () => { + process.env['VAULT_ADDR'] = 'http://127.0.0.1:8200'; + delete process.env['VAULT_TOKEN']; + expect(createVaultClientFromEnv()).toBeNull(); + }); + + it('returns a VaultClient when both VAULT_ADDR and VAULT_TOKEN are set', () => { + process.env['VAULT_ADDR'] = 'http://127.0.0.1:8200'; + process.env['VAULT_TOKEN'] = 'test-token'; + const client = createVaultClientFromEnv(); + expect(client).toBeInstanceOf(VaultClient); + }); + + it('uses default mount "secret" when VAULT_MOUNT is not set', () => { + process.env['VAULT_ADDR'] = 'http://127.0.0.1:8200'; + process.env['VAULT_TOKEN'] = 'test-token'; + delete process.env['VAULT_MOUNT']; + // VaultClient instance created — mount is internal, just verify no throw + expect(() => createVaultClientFromEnv()).not.toThrow(); + }); +});