Initial commit: restructure to flat layout with ui/ and web/ at root

This commit is contained in:
2026-03-12 21:33:50 +08:00
commit decba25a08
1708 changed files with 199890 additions and 0 deletions

2
web/.gitignore vendored Normal file
View File

@@ -0,0 +1,2 @@
server/prompt-templates.js
prompt-enhancer-config.json

89
web/README.md Normal file
View File

@@ -0,0 +1,89 @@
# <picture><source media="(prefers-color-scheme: dark)" srcset="https://github.com/btriapitsyn/openchamber/raw/HEAD/docs/references/badges/openchamber-logo-dark.svg"><img src="https://github.com/btriapitsyn/openchamber/raw/HEAD/docs/references/badges/openchamber-logo-light.svg" width="32" height="32" align="absmiddle" /></picture> @openchamber/web
[![GitHub stars](https://img.shields.io/github/stars/btriapitsyn/openchamber?style=flat&logo=data%3Aimage%2Fsvg%2Bxml%3Bbase64%2CPHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHdpZHRoPSIzMiIgaGVpZ2h0PSIzMiIgZmlsbD0iI2YxZWNlYyIgdmlld0JveD0iMCAwIDI1NiAyNTYiPjxwYXRoIGQ9Ik0yMjkuMDYsMTA4Ljc5bC00OC43LDQyLDE0Ljg4LDYyLjc5YTguNCw4LjQsMCwwLDEtMTIuNTIsOS4xN0wxMjgsMTg5LjA5LDczLjI4LDIyMi43NGE4LjQsOC40LDAsMCwxLTEyLjUyLTkuMTdsMTQuODgtNjIuNzktNDguNy00MkE4LjQ2LDguNDYsMCwwLDEsMzEuNzMsOTRMOTUuNjQsODguOGwyNC42Mi01OS42YTguMzYsOC4zNiwwLDAsMSwxNS40OCwwbDI0LjYyLDU5LjZMMjI0LjI3LDk0QTguNDYsOC40NiwwLDAsMSwyMjkuMDYsMTA4Ljc5WiIgb3BhY2l0eT0iMC4yIj48L3BhdGg%2BPHBhdGggZD0iTTIzOS4xOCw5Ny4yNkExNi4zOCwxNi4zOCwwLDAsMCwyMjQuOTIsODZsLTU5LTQuNzZMMTQzLjE0LDI2LjE1YTE2LjM2LDE2LjM2LDAsMCwwLTMwLjI3LDBMOTAuMTEsODEuMjMsMzEuMDgsODZhMTYuNDYsMTYuNDYsMCwwLDAtOS4zNywyOC44Nmw0NSwzOC44M0w1MywyMTEuNzVhMTYuMzgsMTYuMzgsMCwwLDAsMjQuNSwxNy44MkwxMjgsMTk4LjQ5bDUwLjUzLDMxLjA4QTE2LjQsMTYuNCwwLDAsMCwyMDMsMjExLjc1bC0xMy43Ni01OC4wNyw0NS0zOC44M0ExNi40MywxNi40MywwLDAsMCwyMzkuMTgsOTcuMjZabS0xNS4zNCw1LjQ3LTQ4LjcsNDJhOCw4LDAsMCwwLTIuNTYsNy45MWwxNC44OCw2Mi44YS4zNy4zNywwLDAsMS0uMTcuNDhjLS4xOC4xNC0uMjMuMTEtLjM4LDBsLTU0LjcyLTMzLjY1YTgsOCwwLDAsMC04LjM4LDBMNjkuMDksMjE1Ljk0Yy0uMTUuMDktLjE5LjEyLS4zOCwwYS4zNy4zNywwLDAsMS0uMTctLjQ4bDE0Ljg4LTYyLjhhOCw4LDAsMCwwLTIuNTYtNy45MWwtNDguNy00MmMtLjEyLS4xLS4yMy0uMTktLjEzLS41cy4xOC0uMjcuMzMtLjI5bDYzLjkyLTUuMTZBOCw4LDAsMCwwLDEwMyw5MS44NmwyNC42Mi01OS42MWMuMDgtLjE3LjExLS4yNS4zNS0uMjVzLjI3LjA4LjM1LjI1TDE1Myw5MS44NmE4LDgsMCwwLDAsNi43NSw0LjkybDYzLjkyLDUuMTZjLjE1LDAsLjI0LDAsLjMzLjI5UzIyNCwxMDIuNjMsMjIzLjg0LDEwMi43M1oiPjwvcGF0aD48L3N2Zz4%3D&logoColor=FFFCF0&labelColor=100F0F&color=66800B)](https://github.com/btriapitsyn/openchamber/stargazers)
[![GitHub release](https://img.shields.io/github/v/release/btriapitsyn/openchamber?style=flat&logo=data%3Aimage%2Fsvg%2Bxml%3Bbase64%2CPHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHdpZHRoPSIzMiIgaGVpZ2h0PSIzMiIgZmlsbD0iI2YxZWNlYyIgdmlld0JveD0iMCAwIDI1NiAyNTYiPjxwYXRoIGQ9Ik0xMjgsMTI5LjA5VjIzMmE4LDgsMCwwLDEtMy44NC0xbC04OC00OC4xOGE4LDgsMCwwLDEtNC4xNi03VjgwLjE4YTgsOCwwLDAsMSwuNy0zLjI1WiIgb3BhY2l0eT0iMC4yIj48L3BhdGg%2BPHBhdGggZD0iTTIyMy42OCw2Ni4xNSwxMzUuNjgsMThhMTUuODgsMTUuODgsMCwwLDAtMTUuMzYsMGwtODgsNDguMTdhMTYsMTYsMCwwLDAtOC4zMiwxNHY5NS42NGExNiwxNiwwLDAsMCw4LjMyLDE0bDg4LDQ4LjE3YTE1Ljg4LDE1Ljg4LDAsMCwwLDE1LjM2LDBsODgtNDguMTdhMTYsMTYsMCwwLDAsOC4zMi0xNFY4MC4xOEExNiwxNiwwLDAsMCwyMjMuNjgsNjYuMTVaTTEyOCwzMmw4MC4zNCw0NC0yOS43NywxNi4zLTgwLjM1LTQ0Wk0xMjgsMTIwLDQ3LjY2LDc2bDMzLjktMTguNTYsODAuMzQsNDRaTTQwLDkwbDgwLDQzLjc4djg1Ljc5TDQwLDE3NS44MlptMTc2LDg1Ljc4aDBsLTgwLDQzLjc5VjEzMy44MmwzMi0xNy41MVYxNTJhOCw4LDAsMCwwLDE2LDBWMTA3LjU1TDIxNiw5MHY4NS43N1oiPjwvcGF0aD48L3N2Zz4%3D&logoColor=FFFCF0&labelColor=100F0F&color=205EA6)](https://github.com/btriapitsyn/openchamber/releases/latest)
[![Discord](https://img.shields.io/badge/Discord-join.svg?style=flat&labelColor=100F0F&color=8B7EC8&logo=discord&logoColor=FFFCF0)](https://discord.gg/ZYRSdnwwKA)
Run [OpenCode](https://opencode.ai) in your browser. Install the CLI, open `localhost:3000`, done. Works on desktop browsers, tablets, and phones as a PWA.
Full project overview, screenshots, and all features: [github.com/btriapitsyn/openchamber](https://github.com/btriapitsyn/openchamber)
## Install
```bash
curl -fsSL https://raw.githubusercontent.com/btriapitsyn/openchamber/main/scripts/install.sh | bash
```
Or install manually: `bun add -g @openchamber/web` (or npm, pnpm, yarn).
> **Prerequisites:** [OpenCode CLI](https://opencode.ai) installed, Node.js 20+.
## Usage
```bash
openchamber # Start on port 3000
openchamber --port 8080 # Custom port
openchamber --ui-password secret # Password-protect
openchamber stop # Stop server
openchamber update # Update to latest
```
<details>
<summary>Connect to external OpenCode server</summary>
```bash
OPENCODE_PORT=4096 OPENCODE_SKIP_START=true openchamber
OPENCODE_HOST=https://myhost:4096 OPENCODE_SKIP_START=true openchamber
```
| Variable | Description |
|----------|-------------|
| `OPENCODE_HOST` | Full base URL of external server (overrides `OPENCODE_PORT`) |
| `OPENCODE_PORT` | Port of external server |
| `OPENCODE_SKIP_START` | Skip starting embedded OpenCode server |
</details>
<details>
<summary>Docker</summary>
```bash
docker compose up -d # Available at http://localhost:3000
```
**Optional env vars:**
```yaml
environment:
UI_PASSWORD: your_secure_password
```
**Data directory:** mount `data/` for persistent storage. Ensure permissions:
```bash
mkdir -p data/openchamber data/opencode/share data/opencode/config data/ssh
chown -R 1000:1000 data/
```
</details>
<details>
<summary>Background & daemon mode</summary>
```bash
openchamber --daemon # Run in background
openchamber stop # Stop background server
```
</details>
## What makes the web version special
- **Mobile-first PWA** - optimized chat controls, keyboard-safe layouts, drag-to-reorder projects
- **Background notifications** - know when your agent finishes, even from another tab
- **Self-update** - update and restart from the UI, server settings stay intact
- **Cross-tab tracking** - session activity stays in sync across browser tabs
Plus everything from the shared OpenChamber UI: branchable timeline, Git sidebar, terminal, voice mode, and more.
## License
MIT

1022
web/bin/cli.js Normal file

File diff suppressed because it is too large Load Diff

20
web/electron-package.json Normal file
View File

@@ -0,0 +1,20 @@
{
"name": "@openchamber/web",
"version": "1.8.5",
"private": false,
"type": "module",
"main": "./server/index.js",
"types": "./server/index.d.ts",
"bin": {
"openchamber": "./bin/cli.js"
},
"publishConfig": {
"access": "public"
},
"scripts": {
"dev": "bun run build:watch",
"dev:server": "bun server/index.js --port ${OPENCHAMBER_PORT:-3001}",
"dev:server:watch": "nodemon --watch server --ext js --exec \"bun server/index.js --port ${OPENCHAMBER_PORT:-3001}\"",
"build": "vite build",
"build:watch": "vite build --watch",
"type-check": "tsc --noEmit",

68
web/electron/main.js Normal file
View File

@@ -0,0 +1,68 @@
const { app, BrowserWindow } = require('electron');
const path = require('path');
const { spawn } = require('child_process');
let mainWindow = null;
let serverProcess = null;
function startServer() {
console.log('Starting OpenChamber server...');
serverProcess = spawn('bun', ['run', 'start:web'], {
cwd: path.join(__dirname, '..'),
stdio: 'inherit',
shell: true,
detached: false
});
serverProcess.on('error', (err) => {
console.error('Server error:', err);
});
serverProcess.on('exit', (code) => {
console.log('Server exited with code:', code);
});
}
function createWindow() {
mainWindow = new BrowserWindow({
width: 800,
height: 600,
show: false,
webPreferences: {
nodeIntegration: false,
contextIsolation: true
}
});
mainWindow.loadURL('http://localhost:3000');
mainWindow.once('ready-to-show', () => {
mainWindow.show();
});
mainWindow.on('closed', () => {
mainWindow = null;
});
}
app.whenReady().then(() => {
startServer();
setTimeout(() => {
createWindow();
}, 3000);
});
app.on('window-all-closed', () => {
if (serverProcess) {
serverProcess.kill();
}
app.quit();
});
app.on('before-quit', () => {
if (serverProcess) {
serverProcess.kill();
}
});

4885
web/electron/package-lock.json generated Normal file

File diff suppressed because it is too large Load Diff

39
web/electron/package.json Normal file
View File

@@ -0,0 +1,39 @@
{
"name": "openchamber",
"version": "1.8.5",
"description": "OpenChamber - AI Agent Web UI",
"author": "OpenChamber",
"main": "main.js",
"private": true,
"scripts": {
"electron:dev": "electron .",
"electron:build": "electron-builder --win --x64"
},
"build": {
"appId": "com.openchamber.app",
"productName": "OpenChamber",
"directories": {
"output": "release"
},
"files": [
"main.js",
"../dist/**/*",
"../server/**/*",
"../bin/**/*",
"../public/**/*",
"package.json"
],
"win": {
"target": "portable"
},
"portable": {
"artifactName": "OpenChamber.exe"
},
"asar": true,
"npmRebuild": true
},
"devDependencies": {
"electron": "^41.0.0",
"electron-builder": "^26.8.1"
}
}

276
web/index.html Normal file
View File

@@ -0,0 +1,276 @@
<!doctype html>
<html lang="en" class="h-full">
<head>
<meta charset="UTF-8" />
<meta name="viewport" content="width=device-width, initial-scale=1.0, maximum-scale=1.0, user-scalable=no, viewport-fit=cover" />
<!-- Favicon -->
<link rel="icon" type="image/svg+xml" href="/favicon.svg" />
<link rel="icon" type="image/png" href="/favicon-32.png" sizes="32x32" />
<link rel="icon" type="image/png" href="/favicon-16.png" sizes="16x16" />
<link rel="mask-icon" href="/favicon.svg" color="#edb449" />
<!-- Preload Nerd Fonts for terminal icon display -->
<link rel="preload" href="https://cdn.jsdelivr.net/gh/mshaugh/nerdfont-webfonts@v3.3.0/build/fonts/JetBrainsMonoNerdFont-Regular.woff2"
as="font" type="font/woff2" crossorigin="anonymous">
<link rel="preload" href="https://cdn.jsdelivr.net/gh/mshaugh/nerdfont-webfonts@v3.3.0/build/fonts/FiraCodeNerdFont-Regular.woff2"
as="font" type="font/woff2" crossorigin="anonymous">
<script>
// Blocking script to detect and apply theme before first paint
(function() {
try {
var themeMode = localStorage.getItem('themeMode');
var variant = localStorage.getItem('selectedThemeVariant');
var useSystem = localStorage.getItem('useSystemTheme');
var isDark;
// Check themeMode first (new storage key)
if (themeMode === 'dark') {
isDark = true;
} else if (themeMode === 'light') {
isDark = false;
} else if (themeMode === 'system' || useSystem === null || useSystem === 'true') {
// System preference
isDark = window.matchMedia('(prefers-color-scheme: dark)').matches;
} else if (variant === 'light' || variant === 'dark') {
// Legacy storage key fallback
isDark = variant === 'dark';
} else {
// Default to system
isDark = window.matchMedia('(prefers-color-scheme: dark)').matches;
}
// Apply theme class and data attribute
document.documentElement.classList.add(isDark ? 'dark' : 'light');
document.documentElement.setAttribute('data-splash-variant', isDark ? 'dark' : 'light');
document.documentElement.style.setProperty('color-scheme', isDark ? 'dark' : 'light');
// Splash colors persisted by the app theme system
var splashBgLight = localStorage.getItem('splashBgLight');
var splashFgLight = localStorage.getItem('splashFgLight');
var splashBgDark = localStorage.getItem('splashBgDark');
var splashFgDark = localStorage.getItem('splashFgDark');
if (splashBgLight) document.documentElement.style.setProperty('--splash-background-light', splashBgLight);
if (splashFgLight) document.documentElement.style.setProperty('--splash-stroke-light', splashFgLight);
if (splashBgDark) document.documentElement.style.setProperty('--splash-background-dark', splashBgDark);
if (splashFgDark) document.documentElement.style.setProperty('--splash-stroke-dark', splashFgDark);
} catch (error) {
console.warn('Failed to apply theme:', error);
}
})();
</script>
<!-- Theme color - Safari iOS 26+ prioritizes CSS background-color over this, but keep as fallback -->
<meta name="theme-color" content="#151313" />
<meta name="theme-color" content="#151313" media="(prefers-color-scheme: dark)" />
<title>OpenChamber - AI Coding Assistant</title>
<meta name="description" content="Web interface companion for OpenCode AI coding agent" />
<meta name="application-name" content="OpenChamber" />
<!-- Inline CSS for loading screen and Nerd Fonts (before Tailwind loads) -->
<style>
/* Nerd Font @font-face declarations for terminal icon support */
@font-face {
font-family: 'JetBrainsMono Nerd Font';
src:
local('JetBrainsMono Nerd Font'),
url('https://cdn.jsdelivr.net/gh/mshaugh/nerdfont-webfonts@v3.3.0/build/fonts/JetBrainsMonoNerdFont-Regular.woff2') format('woff2'),
url('https://cdn.jsdelivr.net/gh/mshaugh/nerdfont-webfonts@v3.3.0/build/fonts/JetBrainsMonoNerdFont-Regular.woff') format('woff');
font-weight: normal;
font-style: normal;
font-display: swap;
unicode-range: U+E000-F8FF, U+F0000-FFFFF;
}
@font-face {
font-family: 'FiraCode Nerd Font';
src:
local('FiraCode Nerd Font'),
url('https://cdn.jsdelivr.net/gh/mshaugh/nerdfont-webfonts@v3.3.0/build/fonts/FiraCodeNerdFont-Regular.woff2') format('woff2'),
url('https://cdn.jsdelivr.net/gh/mshaugh/nerdfont-webfonts@v3.3.0/build/fonts/FiraCodeNerdFont-Regular.woff') format('woff');
font-weight: normal;
font-style: normal;
font-display: swap;
unicode-range: U+E000-F8FF, U+F0000-FFFFF;
}
:root {
--splash-background-dark: #151313;
--splash-stroke-dark: #CECDC3;
--splash-background-light: #FFFCF0;
--splash-stroke-light: #100F0F;
--splash-background: var(--splash-background-dark);
--splash-stroke: var(--splash-stroke-dark);
/* Fallback fills (overridden below when supported) */
--splash-face-fill: rgba(255, 255, 255, 0.15);
--splash-cell-fill: rgba(255, 255, 255, 0.35);
--splash-logo-fill: var(--splash-stroke);
}
html[data-splash-variant='light'] {
--splash-background: var(--splash-background-light);
--splash-stroke: var(--splash-stroke-light);
--splash-face-fill: rgba(0, 0, 0, 0.15);
--splash-cell-fill: rgba(0, 0, 0, 0.4);
--splash-logo-fill: var(--splash-stroke);
}
html[data-splash-variant='dark'] {
--splash-background: var(--splash-background-dark);
--splash-stroke: var(--splash-stroke-dark);
--splash-logo-fill: var(--splash-stroke);
}
@supports (color: color-mix(in srgb, white 50%, transparent)) {
:root {
--splash-face-fill: color-mix(in srgb, var(--splash-stroke) 15%, transparent);
--splash-cell-fill: color-mix(in srgb, var(--splash-stroke) 35%, transparent);
}
}
@keyframes logo-pulse {
0%, 100% { opacity: 1; }
50% { opacity: 0.4; }
}
.logo-pulse {
animation: logo-pulse 3s ease-in-out infinite;
}
#initial-loading {
background-color: var(--splash-background);
color: var(--splash-foreground);
display: flex;
align-items: center;
justify-content: center;
height: 100vh;
font-family: system-ui, -apple-system, sans-serif;
transition: opacity 0.3s ease-out;
position: absolute;
width: 100%;
z-index: 9999;
}
#initial-loading.fade-out {
opacity: 0;
pointer-events: none;
}
</style>
</head>
<body class="h-full bg-background text-foreground">
<div id="root" class="h-full">
<!-- Loading fallback while React initializes -->
<div id="initial-loading">
<div style="display: flex; align-items: center; justify-content: center;">
<svg width="120" height="120" viewBox="0 0 100 100" fill="none" xmlns="http://www.w3.org/2000/svg" role="img" aria-label="OpenChamber loading icon">
<!-- Isometric cube: edge=48, centerY=50 -->
<!-- Points: top(50,2), left(8.432,26), right(91.568,26), center(50,50), bottomLeft(8.432,74), bottomRight(91.568,74), bottom(50,98) -->
<!-- Left face - base fill with stroke -->
<path d="M50 50 L8.432 26 L8.432 74 L50 98 Z" fill="var(--splash-face-fill)" stroke="var(--splash-stroke)" stroke-width="2" stroke-linejoin="round"/>
<!-- Left face grid cells (4x4) with varying opacity -->
<path d="M50 50 L39.608 44 L39.608 56 L50 62 Z" fill="var(--splash-cell-fill)" opacity="0.2"/>
<path d="M39.608 44 L29.216 38 L29.216 50 L39.608 56 Z" fill="var(--splash-cell-fill)" opacity="0.45"/>
<path d="M29.216 38 L18.824 32 L18.824 44 L29.216 50 Z" fill="var(--splash-cell-fill)" opacity="0.15"/>
<path d="M18.824 32 L8.432 26 L8.432 38 L18.824 44 Z" fill="var(--splash-cell-fill)" opacity="0.55"/>
<path d="M50 62 L39.608 56 L39.608 68 L50 74 Z" fill="var(--splash-cell-fill)" opacity="0.35"/>
<path d="M39.608 56 L29.216 50 L29.216 62 L39.608 68 Z" fill="var(--splash-cell-fill)" opacity="0.1"/>
<path d="M29.216 50 L18.824 44 L18.824 56 L29.216 62 Z" fill="var(--splash-cell-fill)" opacity="0.5"/>
<path d="M18.824 44 L8.432 38 L8.432 50 L18.824 56 Z" fill="var(--splash-cell-fill)" opacity="0.25"/>
<path d="M50 74 L39.608 68 L39.608 80 L50 86 Z" fill="var(--splash-cell-fill)" opacity="0.4"/>
<path d="M39.608 68 L29.216 62 L29.216 74 L39.608 80 Z" fill="var(--splash-cell-fill)" opacity="0.3"/>
<path d="M29.216 62 L18.824 56 L18.824 68 L29.216 74 Z" fill="var(--splash-cell-fill)" opacity="0.45"/>
<path d="M18.824 56 L8.432 50 L8.432 62 L18.824 68 Z" fill="var(--splash-cell-fill)" opacity="0.15"/>
<path d="M50 86 L39.608 80 L39.608 92 L50 98 Z" fill="var(--splash-cell-fill)" opacity="0.55"/>
<path d="M39.608 80 L29.216 74 L29.216 86 L39.608 92 Z" fill="var(--splash-cell-fill)" opacity="0.2"/>
<path d="M29.216 74 L18.824 68 L18.824 80 L29.216 86 Z" fill="var(--splash-cell-fill)" opacity="0.35"/>
<path d="M18.824 68 L8.432 62 L8.432 74 L18.824 80 Z" fill="var(--splash-cell-fill)" opacity="0.1"/>
<!-- Right face - base fill with stroke -->
<path d="M50 50 L91.568 26 L91.568 74 L50 98 Z" fill="var(--splash-face-fill)" stroke="var(--splash-stroke)" stroke-width="2" stroke-linejoin="round"/>
<!-- Right face grid cells (4x4) with varying opacity -->
<path d="M50 50 L60.392 44 L60.392 56 L50 62 Z" fill="var(--splash-cell-fill)" opacity="0.3"/>
<path d="M60.392 44 L70.784 38 L70.784 50 L60.392 56 Z" fill="var(--splash-cell-fill)" opacity="0.15"/>
<path d="M70.784 38 L81.176 32 L81.176 44 L70.784 50 Z" fill="var(--splash-cell-fill)" opacity="0.45"/>
<path d="M81.176 32 L91.568 26 L91.568 38 L81.176 44 Z" fill="var(--splash-cell-fill)" opacity="0.25"/>
<path d="M50 62 L60.392 56 L60.392 68 L50 74 Z" fill="var(--splash-cell-fill)" opacity="0.5"/>
<path d="M60.392 56 L70.784 50 L70.784 62 L60.392 68 Z" fill="var(--splash-cell-fill)" opacity="0.35"/>
<path d="M70.784 50 L81.176 44 L81.176 56 L70.784 62 Z" fill="var(--splash-cell-fill)" opacity="0.1"/>
<path d="M81.176 44 L91.568 38 L91.568 50 L81.176 56 Z" fill="var(--splash-cell-fill)" opacity="0.4"/>
<path d="M50 74 L60.392 68 L60.392 80 L50 86 Z" fill="var(--splash-cell-fill)" opacity="0.2"/>
<path d="M60.392 68 L70.784 62 L70.784 74 L60.392 80 Z" fill="var(--splash-cell-fill)" opacity="0.55"/>
<path d="M70.784 62 L81.176 56 L81.176 68 L70.784 74 Z" fill="var(--splash-cell-fill)" opacity="0.3"/>
<path d="M81.176 56 L91.568 50 L91.568 62 L81.176 68 Z" fill="var(--splash-cell-fill)" opacity="0.15"/>
<path d="M50 86 L60.392 80 L60.392 92 L50 98 Z" fill="var(--splash-cell-fill)" opacity="0.45"/>
<path d="M60.392 80 L70.784 74 L70.784 86 L60.392 92 Z" fill="var(--splash-cell-fill)" opacity="0.25"/>
<path d="M70.784 74 L81.176 68 L81.176 80 L70.784 86 Z" fill="var(--splash-cell-fill)" opacity="0.4"/>
<path d="M81.176 68 L91.568 62 L91.568 74 L81.176 80 Z" fill="var(--splash-cell-fill)" opacity="0.2"/>
<!-- Top face - open (no fill), only stroke -->
<path d="M50 2 L8.432 26 L50 50 L91.568 26 Z" fill="none" stroke="var(--splash-stroke)" stroke-width="2" stroke-linejoin="round"/>
<!-- OpenCode logo on top face -->
<g class="logo-pulse" transform="matrix(0.866, 0.5, -0.866, 0.5, 50, 26) scale(0.75)">
<path fill-rule="evenodd" clip-rule="evenodd" d="M-16 -20 L16 -20 L16 20 L-16 20 Z M-8 -12 L-8 12 L8 12 L8 -12 Z" fill="var(--splash-logo-fill)"/>
<path d="M-8 -4 L8 -4 L8 12 L-8 12 Z" fill="var(--splash-logo-fill)" fill-opacity="0.4"/>
</g>
</svg>
</div>
</div>
</div>
<script>
// Fallback: hide loading screen after 10 seconds if React fails to load
setTimeout(function() {
const loading = document.getElementById('initial-loading');
if (loading) {
console.warn('Loading screen timeout - forcing hide after 10s');
loading.classList.add('fade-out');
setTimeout(function() {
loading.remove();
}, 300);
}
}, 10000);
</script>
<!-- CSS Font Loading API for reliable Nerd Font loading -->
<script>
(function() {
const fonts = [
{
name: 'JetBrainsMono Nerd Font',
url: 'https://cdn.jsdelivr.net/gh/mshaugh/nerdfont-webfonts@v3.3.0/build/fonts/JetBrainsMonoNerdFont-Regular.woff2'
},
{
name: 'FiraCode Nerd Font',
url: 'https://cdn.jsdelivr.net/gh/mshaugh/nerdfont-webfonts@v3.3.0/build/fonts/FiraCodeNerdFont-Regular.woff2'
}
];
const fontPromises = fonts.map(font => {
const fontFace = new FontFace(font.name, `url(${font.url}) format('woff2')`);
document.fonts.add(fontFace);
return fontFace.load().catch(err => {
console.warn(`Failed to load font: ${font.name}`, err);
});
});
Promise.allSettled(fontPromises).then(() => {
document.documentElement.classList.add('fonts-loaded');
});
})();
</script>
<!-- Polyfill for process before loading React -->
<script>
if (typeof process === 'undefined') {
window.process = { env: {} };
}
</script>
<script type="module" src="/src/main.tsx"></script>
</body>
</html>

38
web/package-electron.json Normal file
View File

@@ -0,0 +1,38 @@
{
"name": "openchamber",
"version": "1.8.5",
"description": "OpenChamber - AI Agent Web UI",
"main": "electron/main.js",
"author": "OpenChamber",
"license": "MIT",
"scripts": {
"electron:dev": "electron .",
"electron:build": "electron-builder --win --x64",
"electron:start": "electron . --no-sandbox"
},
"build": {
"appId": "com.openchamber.app",
"productName": "OpenChamber",
"directories": {
"output": "release"
},
"files": [
"dist/**/*",
"server/**/*",
"bin/**/*",
"public/**/*",
"package.json"
],
"win": {
"target": "portable"
},
"portable": {
"artifactName": "OpenChamber.exe"
}
},
"dependencies": {},
"devDependencies": {
"electron": "^38.2.0",
"electron-builder": "^24.13.3"
}
}

104
web/package.json Normal file
View File

@@ -0,0 +1,104 @@
{
"name": "@openchamber/web",
"version": "1.8.5",
"private": false,
"type": "module",
"main": "./server/index.js",
"types": "./server/index.d.ts",
"bin": {
"openchamber": "./bin/cli.js"
},
"publishConfig": {
"access": "public"
},
"scripts": {
"dev": "bun run build:watch",
"dev:server": "bun server/index.js --port ${OPENCHAMBER_PORT:-3001}",
"dev:server:watch": "nodemon --watch server --ext js --exec \"bun server/index.js --port ${OPENCHAMBER_PORT:-3001}\"",
"build": "vite build",
"build:watch": "vite build --watch",
"type-check": "tsc --noEmit",
"lint": "eslint \"./src/**/*.{ts,tsx}\" --config ../eslint.config.js",
"start": "node bin/cli.js serve",
"bundle": "pkg bin/cli.js --public --targets node18-win-x64 --output ../OpenChamber.exe"
},
"dependencies": {
"@codemirror/lang-cpp": "^6.0.3",
"@codemirror/lang-go": "^6.0.1",
"@fontsource/ibm-plex-mono": "^5.2.7",
"@fontsource/ibm-plex-sans": "^5.1.1",
"@ibm/plex": "^6.4.1",
"@octokit/rest": "^22.0.1",
"@opencode-ai/sdk": "^1.2.20",
"@radix-ui/react-collapsible": "^1.1.12",
"@radix-ui/react-dialog": "^1.1.15",
"@radix-ui/react-dropdown-menu": "^2.1.16",
"@radix-ui/react-scroll-area": "^1.2.10",
"@radix-ui/react-select": "^2.2.6",
"@radix-ui/react-separator": "^1.1.7",
"@radix-ui/react-slot": "^1.2.3",
"@radix-ui/react-toggle": "^1.1.10",
"@radix-ui/react-tooltip": "^1.2.8",
"@remixicon/react": "^4.7.0",
"@types/react-syntax-highlighter": "^15.5.13",
"adm-zip": "^0.5.16",
"bun-pty": "^0.4.5",
"class-variance-authority": "^0.7.1",
"clsx": "^2.1.1",
"cmdk": "^1.1.1",
"express": "^5.1.0",
"ghostty-web": "0.3.0",
"http-proxy-middleware": "^3.0.5",
"jose": "^6.1.3",
"jsonc-parser": "^3.3.1",
"next-themes": "^0.4.6",
"node-pty": "^1.1.0",
"openai": "^4.79.0",
"qrcode-terminal": "^0.12.0",
"react": "^19.1.1",
"react-dom": "^19.1.1",
"react-markdown": "^10.1.0",
"react-syntax-highlighter": "^15.6.6",
"remark-gfm": "^4.0.1",
"simple-git": "^3.28.0",
"sonner": "^2.0.7",
"strip-json-comments": "^5.0.3",
"tailwind-merge": "^3.3.1",
"web-push": "^3.6.7",
"ws": "^8.18.3",
"yaml": "^2.8.1",
"zustand": "^5.0.8"
},
"devDependencies": {
"@eslint/js": "^9.33.0",
"@tailwindcss/postcss": "^4.0.0",
"@types/adm-zip": "^0.5.7",
"@types/node": "^24.3.1",
"@types/react": "^19.1.10",
"@types/react-dom": "^19.1.7",
"@vitejs/plugin-react": "^5.0.0",
"autoprefixer": "^10.4.21",
"concurrently": "^9.2.1",
"cors": "^2.8.5",
"cross-env": "^7.0.3",
"eslint": "^9.33.0",
"eslint-plugin-react-hooks": "^5.2.0",
"eslint-plugin-react-refresh": "^0.4.20",
"globals": "^16.3.0",
"nodemon": "^3.1.7",
"tailwindcss": "^4.0.0",
"tsx": "^4.20.6",
"tw-animate-css": "^1.3.8",
"typescript": "~5.8.3",
"typescript-eslint": "^8.39.1",
"vite": "^7.1.2"
},
"files": [
"dist",
"server",
"bin",
"public",
"package.json",
"README.md"
]
}

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.2 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 4.2 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 4.7 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 5.0 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 5.0 KiB

View File

@@ -0,0 +1,16 @@
<svg width="180" height="180" viewBox="0 0 180 180" xmlns="http://www.w3.org/2000/svg">
<!-- OpenChamber logo - simplified, no background (for iOS) -->
<g transform="translate(90, 90) scale(1.4)">
<!-- Left face - simplified, no grid cells -->
<path d="M0 0 L-41.568 -24 L-41.568 24 L0 48 Z" fill="white" fill-opacity="0.2" stroke="white" stroke-width="3" stroke-linejoin="round"/>
<!-- Right face - simplified, no grid cells -->
<path d="M0 0 L41.568 -24 L41.568 24 L0 48 Z" fill="white" fill-opacity="0.35" stroke="white" stroke-width="3" stroke-linejoin="round"/>
<!-- Top face - open -->
<path d="M0 -48 L-41.568 -24 L0 0 L41.568 -24 Z" fill="none" stroke="white" stroke-width="3" stroke-linejoin="round"/>
<!-- OpenCode logo on top face -->
<g transform="matrix(0.866, 0.5, -0.866, 0.5, 0, -24) scale(0.75)">
<path fill-rule="evenodd" clip-rule="evenodd" d="M-16 -20 L16 -20 L16 20 L-16 20 Z M-8 -12 L-8 12 L8 12 L8 -12 Z" fill="white"/>
<path d="M-8 -4 L8 -4 L8 12 L-8 12 Z" fill="white" fill-opacity="0.4"/>
</g>
</g>
</svg>

After

Width:  |  Height:  |  Size: 1.1 KiB

BIN
web/public/favicon-16.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 437 B

BIN
web/public/favicon-32.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 919 B

BIN
web/public/favicon.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.0 KiB

26
web/public/favicon.svg Normal file
View File

@@ -0,0 +1,26 @@
<svg width="32" height="32" viewBox="0 0 32 32" xmlns="http://www.w3.org/2000/svg">
<style>
:root {
color: #111111;
}
@media (prefers-color-scheme: dark) {
:root {
color: #f5f5f5;
}
}
</style>
<!-- OpenChamber logo - simplified for favicon -->
<g transform="translate(16, 16) scale(0.48)">
<!-- Left face - with fill -->
<path d="M0 0 L-26 -15 L-26 15 L0 30 Z" fill="currentColor" fill-opacity="0.25" stroke="currentColor" stroke-width="4" stroke-linejoin="round"/>
<!-- Right face - with fill -->
<path d="M0 0 L26 -15 L26 15 L0 30 Z" fill="currentColor" fill-opacity="0.4" stroke="currentColor" stroke-width="4" stroke-linejoin="round"/>
<!-- Top face - open -->
<path d="M0 -30 L-26 -15 L0 0 L26 -15 Z" fill="none" stroke="currentColor" stroke-width="4" stroke-linejoin="round"/>
<!-- O logo - hollow rectangle frame on top face with gray fill inside -->
<g transform="matrix(0.866, 0.5, -0.866, 0.5, 0, -15) scale(0.55)">
<rect x="-12" y="-14" width="24" height="28" fill="currentColor" fill-opacity="0.4" stroke="currentColor" stroke-width="8"/>
</g>
</g>
</svg>

After

Width:  |  Height:  |  Size: 1.1 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.8 KiB

View File

@@ -0,0 +1,16 @@
<svg width="512" height="512" viewBox="0 0 512 512" xmlns="http://www.w3.org/2000/svg">
<!-- OpenChamber logo - simplified, no background (for dark backgrounds) -->
<g transform="translate(256, 256) scale(4)">
<!-- Left face - simplified, no grid cells -->
<path d="M0 0 L-41.568 -24 L-41.568 24 L0 48 Z" fill="white" fill-opacity="0.2" stroke="white" stroke-width="3" stroke-linejoin="round"/>
<!-- Right face - simplified, no grid cells -->
<path d="M0 0 L41.568 -24 L41.568 24 L0 48 Z" fill="white" fill-opacity="0.35" stroke="white" stroke-width="3" stroke-linejoin="round"/>
<!-- Top face - open -->
<path d="M0 -48 L-41.568 -24 L0 0 L41.568 -24 Z" fill="none" stroke="white" stroke-width="3" stroke-linejoin="round"/>
<!-- OpenCode logo on top face -->
<g transform="matrix(0.866, 0.5, -0.866, 0.5, 0, -24) scale(0.75)">
<path fill-rule="evenodd" clip-rule="evenodd" d="M-16 -20 L16 -20 L16 20 L-16 20 Z M-8 -12 L-8 12 L8 12 L8 -12 Z" fill="white"/>
<path d="M-8 -4 L8 -4 L8 12 L-8 12 Z" fill="white" fill-opacity="0.4"/>
</g>
</g>
</svg>

After

Width:  |  Height:  |  Size: 1.1 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.7 KiB

View File

@@ -0,0 +1,16 @@
<svg width="512" height="512" viewBox="0 0 512 512" xmlns="http://www.w3.org/2000/svg">
<!-- OpenChamber logo - simplified, no background (for light backgrounds) -->
<g transform="translate(256, 256) scale(4)">
<!-- Left face - simplified, no grid cells -->
<path d="M0 0 L-41.568 -24 L-41.568 24 L0 48 Z" fill="black" fill-opacity="0.2" stroke="black" stroke-width="3" stroke-linejoin="round"/>
<!-- Right face - simplified, no grid cells -->
<path d="M0 0 L41.568 -24 L41.568 24 L0 48 Z" fill="black" fill-opacity="0.35" stroke="black" stroke-width="3" stroke-linejoin="round"/>
<!-- Top face - open -->
<path d="M0 -48 L-41.568 -24 L0 0 L41.568 -24 Z" fill="none" stroke="black" stroke-width="3" stroke-linejoin="round"/>
<!-- OpenCode logo on top face -->
<g transform="matrix(0.866, 0.5, -0.866, 0.5, 0, -24) scale(0.75)">
<path fill-rule="evenodd" clip-rule="evenodd" d="M-16 -20 L16 -20 L16 20 L-16 20 Z M-8 -12 L-8 12 L8 12 L8 -12 Z" fill="black"/>
<path d="M-8 -4 L8 -4 L8 12 L-8 12 Z" fill="black" fill-opacity="0.4"/>
</g>
</g>
</svg>

After

Width:  |  Height:  |  Size: 1.1 KiB

View File

@@ -0,0 +1,44 @@
# Terminal Input WS Protocol
## Goal
Reduce terminal input latency by replacing per-keystroke HTTP requests with a persistent WebSocket input channel, while keeping SSE output and HTTP endpoints as compatibility fallback.
## Scope
- Input path: WebSocket (`/api/terminal/input-ws`)
- Output path: SSE (`/api/terminal/:sessionId/stream`)
- HTTP input fallback remains: `POST /api/terminal/:sessionId/input`
## Framing
- Text frame: terminal keystroke payload (hot path)
- Examples: `"\r"`, `"\u001b[A"`, `"\u0003"`
- Binary frame: control envelope
- Byte 0: tag (`0x01` = JSON control)
- Bytes 1..N: UTF-8 JSON payload
## Control Messages
- Bind active socket to terminal session:
- client -> server: `{"t":"b","s":"<sessionId>","v":1}`
- Keepalive ping:
- client -> server: `{"t":"p","v":1}`
- server -> client: `{"t":"po","v":1}`
- Server control responses:
- ready: `{"t":"ok","v":1}`
- bind ok: `{"t":"bok","v":1}`
- error: `{"t":"e","c":"<code>","f":true|false}`
## Multiplexing Model
- Single shared socket per client runtime.
- Socket has one mutable `boundSessionId`.
- Client sends bind control when active terminal changes.
- Keystroke frames apply to currently bound session.
- Client keeps socket open and sends periodic keepalive pings so the channel stays ready for next input.
- Client primes/opens this socket when the Terminal tab is opened (not per keystroke).
## Security
- UI auth session required when UI password is enabled.
- Origin validation enforced for cookie-authenticated browser upgrades.
- Invalid/malformed frames are rate-limited and may close socket.
## Fallback Behavior
- On WS unavailable/error/close, client falls back to HTTP input immediately.
- Existing terminal behavior remains functional during mixed-version rollout.

28
web/server/index.d.ts vendored Normal file
View File

@@ -0,0 +1,28 @@
import type { Express } from "express";
import type { Server } from "http";
export interface WebUiServerController {
expressApp: Express;
httpServer: Server;
getPort: () => number | null;
getOpenCodePort: () => number | null;
isReady: () => boolean;
restartOpenCode: () => Promise<void>;
stop: (options?: { exitProcess?: boolean }) => Promise<void>;
}
export interface StartWebUiServerOptions {
port?: number;
attachSignals?: boolean;
exitOnShutdown?: boolean;
uiPassword?: string | null;
}
export declare function startWebUiServer(
options?: StartWebUiServerOptions
): Promise<WebUiServerController>;
export declare function gracefulShutdown(options?: { exitProcess?: boolean }): Promise<void>;
export declare function setupProxy(app: Express): void;
export declare function restartOpenCode(): Promise<void>;
export declare function parseArgs(argv?: string[]): { port: number; uiPassword: string | null };

12329
web/server/index.js Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,145 @@
# Git Module Documentation
## Purpose
This module provides Git repository operations for the web server runtime, including repository management, branch/worktree operations, status/diff queries, commit handling, and merge/rebase workflows.
## Entrypoints and structure
- `packages/web/server/lib/git/`: Git module directory containing all Git-related functionality.
- `index.js`: Public API entry point imported by `packages/web/server/index.js`.
- `service.js`: Core Git operations (repository, branch, worktree, commit, merge/rebase, status/diff, log).
- `credentials.js`: Git credentials management.
- `identity-storage.js`: Git identity (user.name, user.email) storage.
## Public API
The following functions are exported and used by the web server:
### Repository Operations
- `isGitRepository(directory)`: Check if a directory is a Git repository.
- `getGlobalIdentity()`: Get global Git user.name, user.email, and core.sshCommand.
- `getCurrentIdentity(directory)`: Get local Git identity (fallback to global if not set locally).
- `hasLocalIdentity(directory)`: Check if local Git identity is configured.
- `setLocalIdentity(directory, profile)`: Set local Git identity (userName, userEmail, authType, sshKey/host).
- `getRemoteUrl(directory, remoteName)`: Get URL for a specific remote.
### Status and Diff Operations
- `getStatus(directory)`: Get comprehensive Git status including current branch, tracking, ahead/behind, file changes, diff stats, merge/rebase state.
- `getDiff(directory, { path, staged, contextLines })`: Get diff output for files or entire working tree.
- `getRangeDiff(directory, { base, head, path, contextLines })`: Get diff between two refs.
- `getRangeFiles(directory, { base, head })`: Get list of changed files between two refs.
- `getFileDiff(directory, { path, staged })`: Get original and modified file contents for a single file (handles images as data URLs).
- `collectDiffs(directory, files)`: Collect diff output for multiple files.
- `revertFile(directory, filePath)`: Revert a file to HEAD state.
### Branch Operations
- `getBranches(directory)`: Get list of local and remote branches (filtered to active remote branches).
- `createBranch(directory, branchName, options)`: Create and checkout a new branch.
- `checkoutBranch(directory, branchName)`: Checkout an existing branch.
- `deleteBranch(directory, branch, options)`: Delete a branch (supports force flag).
- `renameBranch(directory, oldName, newName)`: Rename a branch and preserve upstream tracking.
- `getRemotes(directory)`: Get list of configured remotes.
### Worktree Operations
- `getWorktrees(directory)`: List all git worktrees for a repository.
- `validateWorktreeCreate(directory, input)`: Validate worktree creation parameters (mode, branchName, startRef, upstream config).
- `createWorktree(directory, input)`: Create a new worktree (supports 'new' and 'existing' modes, upstream setup).
- `removeWorktree(directory, input)`: Remove a worktree (optionally delete local branch).
- `isLinkedWorktree(directory)`: Check if directory is a linked worktree (not primary).
### Commit and Remote Operations
- `commit(directory, message, options)`: Create a commit (supports addAll or specific files).
- `pull(directory, options)`: Pull changes from remote.
- `push(directory, options)`: Push changes to remote (auto-sets upstream if needed).
- `fetch(directory, options)`: Fetch changes from remote.
- `deleteRemoteBranch(directory, options)`: Delete a remote branch.
### Log Operations
- `getLog(directory, options)`: Get commit history with stats (supports maxCount, from, to, file filters).
- `getCommitFiles(directory, commitHash)`: Get file changes for a specific commit.
### Merge and Rebase Operations
- `rebase(directory, options)`: Start a rebase onto a target branch.
- `abortRebase(directory)`: Abort an in-progress rebase.
- `continueRebase(directory)`: Continue a rebase after conflict resolution.
- `merge(directory, options)`: Merge a branch into current branch.
- `abortMerge(directory)`: Abort an in-progress merge.
- `continueMerge(directory)`: Continue a merge after conflict resolution.
- `getConflictDetails(directory)`: Get detailed conflict information including operation type, unmerged files, and diff.
### Stash Operations
- `stash(directory, options)`: Stash changes (supports message and includeUntracked options).
- `stashPop(directory)`: Pop and apply the most recent stash.
## Internal Helpers
The following functions are internal helpers used by exported functions:
- `buildSshCommand(sshKeyPath)`: Build SSH command string for git config.
- `buildGitEnv()`: Build Git environment with SSH_AUTH_SOCK resolution.
- `createGit(directory)`: Create simple-git instance with environment.
- `normalizeDirectoryPath(value)`: Normalize directory paths (supports ~ expansion).
- `cleanBranchName(branch)`: Remove refs/heads/ or refs/ prefixes.
- `parseWorktreePorcelain(raw)`: Parse `git worktree list --porcelain` output.
- `resolveWorktreeProjectContext(directory)`: Resolve project context (projectID, primaryWorktree, worktreeRoot).
- `resolveCandidateDirectory(...)`: Generate unique worktree directory candidates.
- `resolveBranchForExistingMode(...)`: Resolve branch for existing-mode worktree creation.
- `applyUpstreamConfiguration(...)`: Set upstream tracking for new branches.
- And various other internal helpers for Git command execution and parsing.
## Response Contracts
### Status Response
- `current`: Current branch name.
- `tracking`: Upstream branch (e.g., 'origin/main').
- `ahead`: Number of commits ahead of upstream.
- `behind`: Number of commits behind upstream.
- `files`: Array of file objects with `path`, `index`, `working_dir` status codes.
- `isClean`: Boolean indicating if working tree is clean.
- `diffStats`: Object mapping file paths to `{ insertions, deletions }`.
- `mergeInProgress`: Object with `{ head, message }` if merge in progress.
- `rebaseInProgress`: Object with `{ headName, onto }` if rebase in progress.
### Worktree Create/Remove Response
- `head`: HEAD commit SHA.
- `name`: Worktree name.
- `branch`: Local branch name.
- `path`: Absolute path to worktree directory.
### Log Response
- `all`: Array of commit objects with hash, date, message, author info, stats.
- `latest`: Latest commit object or null.
- `total`: Total number of commits.
## Notes for Contributors
### Adding a New Git Operation
1. Add the function to `packages/web/server/lib/git/service.js`.
2. Export the function if it's part of the public API.
3. Use `createGit(directory)` to get a simple-git instance with the correct environment.
4. Use `runGitCommand(cwd, args)` for direct git command execution with better error handling.
5. Use `runGitCommandOrThrow(cwd, args, fallbackMessage)` for commands that must succeed.
6. Return consistent error messages; use `parseGitErrorText(error)` to extract meaningful git errors.
7. Update this file with the new function in the appropriate API section.
### SSH Key Handling
- SSH keys are escaped and validated via `escapeSshKeyPath` to prevent command injection.
- On Windows, paths are converted to MSYS format (`C:/path``/c/path`).
- SSH_AUTH_SOCK is automatically resolved via `resolveSshAuthSock` (checks GPG agent, gpgconf).
### Worktree Naming
- Worktree names are slugified via `slugWorktreeName`.
- Random names use adjectives/nouns from `OPENCODE_ADJECTIVES` and `OPENCODE_NOUNS` lists.
- Branches created for new worktrees use `openchamber/<worktree-name>` pattern.
### Cross-Platform Considerations
- Use `normalizeDirectoryPath` for all directory inputs to handle `~` and path separators.
- Use `canonicalPath` for path comparisons to handle case-insensitive filesystems (Windows).
- Windows Git commands use MSYS/MinGW paths; avoid direct Windows paths in git commands.
### Error Handling
- All exported functions should throw errors with descriptive messages.
- Use `console.error` for logging Git operation failures.
- Return structured objects for operations that need partial success reporting (e.g., merge/rebase conflicts).
### Testing
- Run `bun run type-check`, `bun run lint`, and `bun run build` before finalizing changes.
- Consider edge cases: non-Git directories, missing remotes, conflict states, concurrent worktree operations.

View File

@@ -0,0 +1,74 @@
import fs from 'fs';
import path from 'path';
import os from 'os';
const GIT_CREDENTIALS_PATH = path.join(os.homedir(), '.git-credentials');
export function discoverGitCredentials() {
const credentials = [];
if (!fs.existsSync(GIT_CREDENTIALS_PATH)) {
return credentials;
}
try {
const content = fs.readFileSync(GIT_CREDENTIALS_PATH, 'utf8');
const lines = content.split('\n').filter(line => line.trim());
for (const line of lines) {
try {
const url = new URL(line.trim());
const hostname = url.hostname;
const pathname = url.pathname && url.pathname !== '/' ? url.pathname : '';
const host = hostname + pathname;
const username = url.username || '';
if (host && username) {
const exists = credentials.some(c => c.host === host && c.username === username);
if (!exists) {
credentials.push({ host, username });
}
}
} catch {
continue;
}
}
} catch (error) {
console.error('Failed to read .git-credentials:', error);
}
return credentials;
}
export function getCredentialForHost(host) {
if (!fs.existsSync(GIT_CREDENTIALS_PATH)) {
return null;
}
try {
const content = fs.readFileSync(GIT_CREDENTIALS_PATH, 'utf8');
const lines = content.split('\n').filter(line => line.trim());
for (const line of lines) {
try {
const url = new URL(line.trim());
const hostname = url.hostname;
const pathname = url.pathname && url.pathname !== '/' ? url.pathname : '';
const credHost = hostname + pathname;
if (credHost === host) {
return {
username: url.username || '',
token: url.password || ''
};
}
} catch {
continue;
}
}
} catch (error) {
console.error('Failed to read .git-credentials for host lookup:', error);
}
return null;
}

View File

@@ -0,0 +1,110 @@
import fs from 'fs';
import path from 'path';
import os from 'os';
const STORAGE_DIR = path.join(os.homedir(), '.config', 'openchamber');
const STORAGE_FILE = path.join(STORAGE_DIR, 'git-identities.json');
function ensureStorageDir() {
if (!fs.existsSync(STORAGE_DIR)) {
fs.mkdirSync(STORAGE_DIR, { recursive: true });
}
}
export function loadProfiles() {
ensureStorageDir();
if (!fs.existsSync(STORAGE_FILE)) {
return { profiles: [] };
}
try {
const content = fs.readFileSync(STORAGE_FILE, 'utf8');
const data = JSON.parse(content);
return data;
} catch (error) {
console.error('Failed to load git identity profiles:', error);
return { profiles: [] };
}
}
export function saveProfiles(data) {
ensureStorageDir();
try {
fs.writeFileSync(STORAGE_FILE, JSON.stringify(data, null, 2), 'utf8');
return true;
} catch (error) {
console.error('Failed to save git identity profiles:', error);
throw error;
}
}
export function getProfiles() {
const data = loadProfiles();
return data.profiles || [];
}
export function getProfile(id) {
const profiles = getProfiles();
return profiles.find(p => p.id === id) || null;
}
export function createProfile(profileData) {
const profiles = getProfiles();
if (profiles.some(p => p.id === profileData.id)) {
throw new Error(`Profile with ID "${profileData.id}" already exists`);
}
if (!profileData.id || !profileData.userName || !profileData.userEmail) {
throw new Error('Profile must have id, userName, and userEmail');
}
const newProfile = {
id: profileData.id,
name: profileData.name || profileData.userName,
userName: profileData.userName,
userEmail: profileData.userEmail,
authType: profileData.authType || 'ssh',
sshKey: profileData.sshKey || null,
host: profileData.host || null,
color: profileData.color || 'keyword',
icon: profileData.icon || 'branch'
};
profiles.push(newProfile);
saveProfiles({ profiles });
return newProfile;
}
export function updateProfile(id, updates) {
const profiles = getProfiles();
const index = profiles.findIndex(p => p.id === id);
if (index === -1) {
throw new Error(`Profile with ID "${id}" not found`);
}
profiles[index] = {
...profiles[index],
...updates,
id: profiles[index].id
};
saveProfiles({ profiles });
return profiles[index];
}
export function deleteProfile(id) {
const profiles = getProfiles();
const filteredProfiles = profiles.filter(p => p.id !== id);
if (filteredProfiles.length === profiles.length) {
throw new Error(`Profile with ID "${id}" not found`);
}
saveProfiles({ profiles: filteredProfiles });
return true;
}

View File

@@ -0,0 +1,6 @@
// Git library public entrypoint
// Re-exports all Git operations, credentials, and identity storage functions
export * from './service.js';
export * from './credentials.js';
export * from './identity-storage.js';

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,170 @@
# GitHub Module Documentation
## Purpose
- This module owns GitHub auth, Octokit access, repo resolution, and Pull Request status resolution for OpenChamber.
- From user perspective, this is the layer that lets the app know which PR belongs to a local branch and keeps that UI feeling current.
## Entrypoints and structure
- `packages/web/server/lib/github/index.js`: public server entrypoint.
- `packages/web/server/lib/github/auth.js`: auth storage, multi-account support, client id, scope config.
- `packages/web/server/lib/github/device-flow.js`: OAuth device flow.
- `packages/web/server/lib/github/octokit.js`: Octokit factory for the current auth.
- `packages/web/server/lib/github/repo/index.js`: remote URL parsing and directory-to-repo resolution.
- `packages/web/server/lib/github/pr-status.js`: PR lookup across remotes, forks, and upstreams.
- `packages/web/server/index.js`: API route layer that calls this module.
- `packages/web/src/api/github.ts`: web client wrapper for GitHub endpoints.
## Public exports
### Auth
- `getGitHubAuth()`: current auth entry.
- `getGitHubAuthAccounts()`: all configured accounts.
- `setGitHubAuth({ accessToken, scope, tokenType, user, accountId })`: save or update account.
- `activateGitHubAuth(accountId)`: switch active account.
- `clearGitHubAuth()`: clear current account.
- `getGitHubClientId()`: resolve client id.
- `getGitHubScopes()`: resolve scopes.
- `GITHUB_AUTH_FILE`: auth file path.
### Device flow
- `startDeviceFlow({ clientId, scope })`: request device code.
- `exchangeDeviceCode({ clientId, deviceCode })`: poll for access token.
### Octokit
- `getOctokitOrNull()`: current Octokit or `null`.
### Repo
- `parseGitHubRemoteUrl(raw)`: parse SSH or HTTPS remote URL into `{ owner, repo, url }`.
- `resolveGitHubRepoFromDirectory(directory, remoteName)`: resolve GitHub repo from a local git remote.
## Auth storage and config
- Auth storage: `~/.config/openchamber/github-auth.json`
- Writes are atomic and file mode is `0o600`.
- Client ID resolution order: `OPENCHAMBER_GITHUB_CLIENT_ID` -> `settings.json` -> default.
- Scope resolution order: `OPENCHAMBER_GITHUB_SCOPES` -> `settings.json` -> default.
- Account id resolution order: explicit `accountId` -> user login -> user id -> token prefix.
## PR integration overview
- The UI asks `github.prStatus(directory, branch, remote?)` from `packages/web/src/api/github.ts`.
- That hits `GET /api/github/pr/status` in `packages/web/server/index.js`.
- The route calls `resolveGitHubPrStatus(...)` in `packages/web/server/lib/github/pr-status.js`.
- The resolver finds the most likely repo and PR for a local branch.
- The route then enriches that result with checks, mergeability, and permission-related fields.
- The client caches and shares the result between sidebar and Git view.
## Consumers of PR data
- `packages/ui/src/components/session/SessionSidebar.tsx` reads all PR entries and maps them to `directory::branch`.
- `packages/ui/src/components/session/sidebar/SessionGroupSection.tsx` renders the compact badge, PR number, title, checks summary, and GitHub link.
- `packages/ui/src/components/views/git/PullRequestSection.tsx` uses the same shared entry for the full PR workflow.
- `packages/ui/src/components/ui/MemoryDebugPanel.tsx` reads request counters for debugging.
## How PR resolution works
- It reads local git status and remotes first.
- It ranks remotes in this order: explicit remote, tracking remote, `origin`, `upstream`, then the rest.
- It resolves those remotes into GitHub repos.
- It expands each repo through `parent` and `source` so PRs in upstream repos can still be found.
- It skips PR lookup when the current branch matches that repo's default branch.
- It first searches for PRs by likely source owner plus exact head branch.
- If that fails, it falls back to broader GitHub search for the branch name.
- `403` and `404` during repo lookups are treated as expected gaps, not hard errors.
## Shared client state model
- Client key is effectively `directory::branch`.
- One entry stores last known status, loading state, error, timestamps, watcher count, identity, and resolved remote.
- Requests are deduplicated by branch signature, not by component instance.
- This keeps sidebar and Git view aligned and avoids duplicated fetches.
## Persistence
- PR state is persisted in local storage under `openchamber.github-pr-status`.
- Persisted fields include status, timestamps, identity, and resolved remote.
- Runtime-only details are not persisted.
- Persisted entries expire after 12 hours.
- On reload, users get last known state first, then background refresh resumes.
## Polling and refresh model
- There are two layers: entry-level polling in `useGitHubPrStatusStore` and repo scanning in `useGitHubPrBackgroundTracking`.
- Entry-level polling decides when a known branch should revalidate PR state.
- Background tracking decides which directories and branches should even be watched.
## Entry-level polling rules
- Start watching -> immediate refresh.
- If no PR is found yet -> retry after `2s` and `5s`.
- Still no PR -> discovery refresh every `5m`.
- Open PR with pending checks -> refresh about every `1m`.
- Open PR with non-pending checks -> refresh about every `5m`.
- Open PR without a stable checks signal -> refresh about every `2m`.
- Closed or merged PR -> stop regular polling.
- Hidden tab -> skip polling.
- Non-forced refreshes use a `90s` TTL.
## Background tracking rules
- Track up to `50` likely directories.
- Sources are current directory, projects, worktrees, active sessions, and archived sessions.
- Active directory branch TTL is `15s`.
- Background directory branch TTL is `2m`.
- Background scan wakes every `15s`, but only fetches directories whose TTL expired.
- Each scan reads `branch`, `tracking`, `ahead`, and `behind` from git status.
- If any of those branch signals change, that branch's PR status refreshes immediately.
- After that, one more delayed refresh runs after `5s` to catch GitHub eventual consistency.
## UI refresh triggers
- App or tab becomes visible.
- Window regains focus.
- Current branch changes.
- Tracking branch changes.
- Ahead or behind changes.
- User selects a different remote in Git view.
- GitHub auth state changes.
## Action-based refreshes in Git view
- After `Create PR` -> refresh now, then after `2s` and `5s`.
- After `Merge PR` -> refresh now, then after `2s` and `5s`.
- After `Mark ready for review` -> refresh now, then after `2s` and `5s`.
- After `Update PR` -> refresh now, then after `2s` and `5s`.
## Sidebar behavior
- Sidebar shows only compact PR state.
- Aggregation is by `directory::branch`, so multiple sessions on one branch share one signal.
- If multiple entries exist, sidebar keeps the strongest visible PR state.
- Visual state is based on PR health, not merge permissions.
## Git view behavior
- Git view watches one branch directly.
- It supports create, edit, mark ready, and merge.
- It can probe alternate remotes so fork-heavy setups still find the right PR.
- It uses the same shared store as the sidebar.
## Failure handling
- If GitHub is disconnected, API returns `connected: false`.
- If a repo is private or inaccessible, resolver calls may quietly return no PR.
- Sidebar stays quiet on missing or inaccessible PR state.
- Git view is where explicit PR-level problems should be shown.
## Notes for contributors
- Keep the UI calm. Do not add noisy diagnostics to the sidebar.
- Prefer shared state over per-component fetches.
- Prefer event-shaped refreshes over blind frequent polling.
- Prefer correctness for fork and multi-remote setups over assuming `origin` is enough.
- Device flow handles GitHub `authorization_pending` at caller level.
- Repo parser supports `git@github.com:`, `ssh://git@github.com/`, and `https://github.com/`.

View File

@@ -0,0 +1,307 @@
import fs from 'fs';
import path from 'path';
import os from 'os';
const OPENCHAMBER_DATA_DIR = process.env.OPENCHAMBER_DATA_DIR
? path.resolve(process.env.OPENCHAMBER_DATA_DIR)
: path.join(os.homedir(), '.config', 'openchamber');
const STORAGE_DIR = OPENCHAMBER_DATA_DIR;
const STORAGE_FILE = path.join(STORAGE_DIR, 'github-auth.json');
const SETTINGS_FILE = path.join(OPENCHAMBER_DATA_DIR, 'settings.json');
const DEFAULT_GITHUB_CLIENT_ID = 'Ov23liNd8TxDcMXtAHHM';
const DEFAULT_GITHUB_SCOPES = 'repo read:org workflow read:user user:email';
function ensureStorageDir() {
if (!fs.existsSync(STORAGE_DIR)) {
fs.mkdirSync(STORAGE_DIR, { recursive: true });
}
}
function readJsonFile() {
ensureStorageDir();
if (!fs.existsSync(STORAGE_FILE)) {
return null;
}
try {
const raw = fs.readFileSync(STORAGE_FILE, 'utf8');
const trimmed = raw.trim();
if (!trimmed) {
return null;
}
const parsed = JSON.parse(trimmed);
if (!parsed || typeof parsed !== 'object') {
return null;
}
return parsed;
} catch (error) {
console.error('Failed to read GitHub auth file:', error);
return null;
}
}
function writeJsonFile(payload) {
ensureStorageDir();
// Atomic write so multiple OpenChamber instances can safely share the same file.
const tmpFile = `${STORAGE_FILE}.${process.pid}.${Date.now()}.tmp`;
fs.writeFileSync(tmpFile, JSON.stringify(payload, null, 2), 'utf8');
try {
fs.chmodSync(tmpFile, 0o600);
} catch {
// best-effort
}
fs.renameSync(tmpFile, STORAGE_FILE);
try {
fs.chmodSync(STORAGE_FILE, 0o600);
} catch {
// best-effort
}
}
function resolveAccountId({ user, accessToken, accountId }) {
if (typeof accountId === 'string' && accountId.trim()) {
return accountId.trim();
}
if (user && typeof user.login === 'string' && user.login.trim()) {
return user.login.trim();
}
if (user && typeof user.id === 'number') {
return String(user.id);
}
if (typeof accessToken === 'string' && accessToken.trim()) {
return `token:${accessToken.slice(0, 8)}`;
}
return '';
}
function normalizeAuthEntry(entry) {
if (!entry || typeof entry !== 'object') return null;
const accessToken = typeof entry.accessToken === 'string' ? entry.accessToken : '';
if (!accessToken) return null;
const user = entry.user && typeof entry.user === 'object'
? {
login: typeof entry.user.login === 'string' ? entry.user.login : null,
avatarUrl: typeof entry.user.avatarUrl === 'string' ? entry.user.avatarUrl : null,
id: typeof entry.user.id === 'number' ? entry.user.id : null,
name: typeof entry.user.name === 'string' ? entry.user.name : null,
email: typeof entry.user.email === 'string' ? entry.user.email : null,
}
: null;
const accountId = resolveAccountId({
user,
accessToken,
accountId: typeof entry.accountId === 'string' ? entry.accountId : '',
});
return {
accessToken,
scope: typeof entry.scope === 'string' ? entry.scope : '',
tokenType: typeof entry.tokenType === 'string' ? entry.tokenType : 'bearer',
createdAt: typeof entry.createdAt === 'number' ? entry.createdAt : null,
user,
current: Boolean(entry.current),
accountId,
};
}
function normalizeAuthList(raw) {
const list = (Array.isArray(raw) ? raw : [raw])
.map((entry) => normalizeAuthEntry(entry))
.filter(Boolean);
if (!list.length) {
return { list: [], changed: false };
}
let changed = false;
let currentFound = false;
list.forEach((entry) => {
if (entry.current && !currentFound) {
currentFound = true;
} else if (entry.current && currentFound) {
entry.current = false;
changed = true;
}
});
if (!currentFound && list[0]) {
list[0].current = true;
changed = true;
}
list.forEach((entry) => {
if (!entry.accountId) {
entry.accountId = resolveAccountId(entry);
changed = true;
}
});
return { list, changed };
}
function readAuthList() {
const data = readJsonFile();
if (!data) {
return [];
}
const { list, changed } = normalizeAuthList(data);
if (changed) {
writeJsonFile(list);
}
return list;
}
function writeAuthList(list) {
writeJsonFile(list);
}
export function getGitHubAuth() {
const list = readAuthList();
if (!list.length) {
return null;
}
const current = list.find((entry) => entry.current) || list[0];
if (!current?.accessToken) {
return null;
}
return current;
}
export function getGitHubAuthAccounts() {
const list = readAuthList();
return list
.filter((entry) => entry?.user && entry.accountId)
.map((entry) => ({
id: entry.accountId,
user: entry.user,
scope: entry.scope || '',
current: Boolean(entry.current),
}));
}
export function setGitHubAuth({ accessToken, scope, tokenType, user, accountId }) {
if (!accessToken || typeof accessToken !== 'string') {
throw new Error('accessToken is required');
}
const normalizedUser = user && typeof user === 'object'
? {
login: typeof user.login === 'string' ? user.login : undefined,
avatarUrl: typeof user.avatarUrl === 'string' ? user.avatarUrl : undefined,
id: typeof user.id === 'number' ? user.id : undefined,
name: typeof user.name === 'string' ? user.name : undefined,
email: typeof user.email === 'string' ? user.email : undefined,
}
: undefined;
const resolvedAccountId = resolveAccountId({
user: normalizedUser,
accessToken,
accountId,
});
const list = readAuthList();
const existingIndex = list.findIndex((entry) => entry.accountId === resolvedAccountId);
const nextEntry = {
accessToken,
scope: typeof scope === 'string' ? scope : '',
tokenType: typeof tokenType === 'string' ? tokenType : 'bearer',
createdAt: Date.now(),
user: normalizedUser || null,
current: true,
accountId: resolvedAccountId,
};
if (existingIndex >= 0) {
list[existingIndex] = nextEntry;
} else {
list.push(nextEntry);
}
list.forEach((entry, index) => {
entry.current = index === (existingIndex >= 0 ? existingIndex : list.length - 1);
});
writeAuthList(list);
return nextEntry;
}
export function activateGitHubAuth(accountId) {
if (typeof accountId !== 'string' || !accountId.trim()) {
return false;
}
const list = readAuthList();
const index = list.findIndex((entry) => entry.accountId === accountId.trim());
if (index === -1) {
return false;
}
list.forEach((entry, idx) => {
entry.current = idx === index;
});
writeAuthList(list);
return true;
}
export function clearGitHubAuth() {
try {
const list = readAuthList();
if (!list.length) {
return true;
}
const remaining = list.filter((entry) => !entry.current);
if (!remaining.length) {
if (fs.existsSync(STORAGE_FILE)) {
fs.unlinkSync(STORAGE_FILE);
}
return true;
}
remaining.forEach((entry, index) => {
entry.current = index === 0;
});
writeAuthList(remaining);
return true;
} catch (error) {
console.error('Failed to clear GitHub auth file:', error);
return false;
}
}
export function getGitHubClientId() {
const raw = process.env.OPENCHAMBER_GITHUB_CLIENT_ID;
const clientId = typeof raw === 'string' ? raw.trim() : '';
if (clientId) return clientId;
try {
if (fs.existsSync(SETTINGS_FILE)) {
const parsed = JSON.parse(fs.readFileSync(SETTINGS_FILE, 'utf8'));
const stored = typeof parsed?.githubClientId === 'string' ? parsed.githubClientId.trim() : '';
if (stored) return stored;
}
} catch {
// ignore
}
return DEFAULT_GITHUB_CLIENT_ID;
}
export function getGitHubScopes() {
const raw = process.env.OPENCHAMBER_GITHUB_SCOPES;
const fromEnv = typeof raw === 'string' ? raw.trim() : '';
if (fromEnv) return fromEnv;
try {
if (fs.existsSync(SETTINGS_FILE)) {
const parsed = JSON.parse(fs.readFileSync(SETTINGS_FILE, 'utf8'));
const stored = typeof parsed?.githubScopes === 'string' ? parsed.githubScopes.trim() : '';
if (stored) return stored;
}
} catch {
// ignore
}
return DEFAULT_GITHUB_SCOPES;
}
export const GITHUB_AUTH_FILE = STORAGE_FILE;

View File

@@ -0,0 +1,50 @@
const DEVICE_CODE_URL = 'https://github.com/login/device/code';
const ACCESS_TOKEN_URL = 'https://github.com/login/oauth/access_token';
const DEVICE_GRANT_TYPE = 'urn:ietf:params:oauth:grant-type:device_code';
const encodeForm = (params) => {
const body = new URLSearchParams();
for (const [key, value] of Object.entries(params)) {
if (value == null) continue;
body.set(key, String(value));
}
return body.toString();
};
async function postForm(url, params) {
const response = await fetch(url, {
method: 'POST',
headers: {
'Content-Type': 'application/x-www-form-urlencoded',
Accept: 'application/json',
},
body: encodeForm(params),
});
const payload = await response.json().catch(() => null);
if (!response.ok) {
const message = payload?.error_description || payload?.error || response.statusText;
const error = new Error(message || 'GitHub request failed');
error.status = response.status;
error.payload = payload;
throw error;
}
return payload;
}
export async function startDeviceFlow({ clientId, scope }) {
return postForm(DEVICE_CODE_URL, {
client_id: clientId,
scope,
});
}
export async function exchangeDeviceCode({ clientId, deviceCode }) {
// GitHub returns 200 with {error: 'authorization_pending'|...} for non-success states.
const payload = await postForm(ACCESS_TOKEN_URL, {
client_id: clientId,
device_code: deviceCode,
grant_type: DEVICE_GRANT_TYPE,
});
return payload;
}

View File

@@ -0,0 +1,24 @@
export {
getGitHubAuth,
getGitHubAuthAccounts,
setGitHubAuth,
activateGitHubAuth,
clearGitHubAuth,
getGitHubClientId,
getGitHubScopes,
GITHUB_AUTH_FILE,
} from './auth.js';
export {
startDeviceFlow,
exchangeDeviceCode,
} from './device-flow.js';
export {
getOctokitOrNull,
} from './octokit.js';
export {
parseGitHubRemoteUrl,
resolveGitHubRepoFromDirectory,
} from './repo/index.js';

View File

@@ -0,0 +1,10 @@
import { Octokit } from '@octokit/rest';
import { getGitHubAuth } from './auth.js';
export function getOctokitOrNull() {
const auth = getGitHubAuth();
if (!auth?.accessToken) {
return null;
}
return new Octokit({ auth: auth.accessToken });
}

View File

@@ -0,0 +1,478 @@
import { getRemotes, getStatus } from '../git/index.js';
import { resolveGitHubRepoFromDirectory } from './repo/index.js';
const REPO_DEFAULT_BRANCH_TTL_MS = 5 * 60_000;
const defaultBranchCache = new Map();
const repoMetadataCache = new Map();
const normalizeText = (value) => typeof value === 'string' ? value.trim() : '';
const normalizeLower = (value) => normalizeText(value).toLowerCase();
const normalizeRepoKey = (owner, repo) => {
const normalizedOwner = normalizeLower(owner);
const normalizedRepo = normalizeLower(repo);
if (!normalizedOwner || !normalizedRepo) {
return '';
}
return `${normalizedOwner}/${normalizedRepo}`;
};
const parseTrackingRemoteName = (trackingBranch) => {
const normalized = normalizeText(trackingBranch);
if (!normalized) {
return '';
}
const slashIndex = normalized.indexOf('/');
if (slashIndex <= 0) {
return '';
}
return normalized.slice(0, slashIndex).trim();
};
const pushUnique = (collection, value, keyFn = normalizeLower) => {
const normalizedValue = normalizeText(value);
if (!normalizedValue) {
return;
}
const nextKey = keyFn(normalizedValue);
if (!nextKey) {
return;
}
if (collection.some((item) => keyFn(item) === nextKey)) {
return;
}
collection.push(normalizedValue);
};
const rankRemoteNames = (remoteNames, explicitRemoteName, trackingRemoteName) => {
const ranked = [];
pushUnique(ranked, explicitRemoteName);
if (trackingRemoteName) {
pushUnique(ranked, trackingRemoteName);
}
pushUnique(ranked, 'origin');
pushUnique(ranked, 'upstream');
remoteNames.forEach((name) => pushUnique(ranked, name));
return ranked;
};
const getHeadOwner = (pr) => {
const repoOwner = normalizeText(pr?.head?.repo?.owner?.login);
if (repoOwner) {
return repoOwner;
}
const userOwner = normalizeText(pr?.head?.user?.login);
if (userOwner) {
return userOwner;
}
const headLabel = normalizeText(pr?.head?.label);
const separatorIndex = headLabel.indexOf(':');
if (separatorIndex > 0) {
return headLabel.slice(0, separatorIndex).trim();
}
return '';
};
const getHeadRepoKey = (pr, fallbackRepoName) => {
const repoOwner = normalizeText(pr?.head?.repo?.owner?.login);
const repoName = normalizeText(pr?.head?.repo?.name);
if (repoOwner && repoName) {
return normalizeRepoKey(repoOwner, repoName);
}
const headLabel = normalizeText(pr?.head?.label);
const separatorIndex = headLabel.indexOf(':');
if (separatorIndex > 0) {
const labelOwner = headLabel.slice(0, separatorIndex).trim();
if (labelOwner && fallbackRepoName) {
return normalizeRepoKey(labelOwner, fallbackRepoName);
}
}
return '';
};
const buildSourceMatcher = (sourceCandidates) => {
const repoRank = new Map();
const ownerRank = new Map();
sourceCandidates.forEach((candidate, index) => {
const repoKey = normalizeRepoKey(candidate.repo?.owner, candidate.repo?.repo);
if (repoKey && !repoRank.has(repoKey)) {
repoRank.set(repoKey, index);
}
const owner = normalizeLower(candidate.repo?.owner);
if (owner && !ownerRank.has(owner)) {
ownerRank.set(owner, index);
}
});
const matches = (pr, fallbackRepoName) => {
const repoKey = getHeadRepoKey(pr, fallbackRepoName);
if (repoKey && repoRank.has(repoKey)) {
return true;
}
const owner = normalizeLower(getHeadOwner(pr));
return Boolean(owner) && ownerRank.has(owner);
};
const compare = (left, right, fallbackRepoName) => {
const leftRepoRank = repoRank.get(getHeadRepoKey(left, fallbackRepoName));
const rightRepoRank = repoRank.get(getHeadRepoKey(right, fallbackRepoName));
const leftRepoScore = typeof leftRepoRank === 'number' ? leftRepoRank : Number.POSITIVE_INFINITY;
const rightRepoScore = typeof rightRepoRank === 'number' ? rightRepoRank : Number.POSITIVE_INFINITY;
if (leftRepoScore !== rightRepoScore) {
return leftRepoScore - rightRepoScore;
}
const leftOwnerRank = ownerRank.get(normalizeLower(getHeadOwner(left)));
const rightOwnerRank = ownerRank.get(normalizeLower(getHeadOwner(right)));
const leftOwnerScore = typeof leftOwnerRank === 'number' ? leftOwnerRank : Number.POSITIVE_INFINITY;
const rightOwnerScore = typeof rightOwnerRank === 'number' ? rightOwnerRank : Number.POSITIVE_INFINITY;
if (leftOwnerScore !== rightOwnerScore) {
return leftOwnerScore - rightOwnerScore;
}
return 0;
};
return { matches, compare };
};
const getRepoDefaultBranch = async (octokit, repo) => {
const repoKey = normalizeRepoKey(repo?.owner, repo?.repo);
if (!repoKey) {
return null;
}
const cached = defaultBranchCache.get(repoKey);
if (cached && Date.now() - cached.fetchedAt < REPO_DEFAULT_BRANCH_TTL_MS) {
return cached.defaultBranch;
}
try {
const response = await octokit.rest.repos.get({
owner: repo.owner,
repo: repo.repo,
});
const defaultBranch = normalizeText(response?.data?.default_branch) || null;
defaultBranchCache.set(repoKey, {
defaultBranch,
fetchedAt: Date.now(),
});
return defaultBranch;
} catch {
return null;
}
};
const getRepoMetadata = async (octokit, repo) => {
const repoKey = normalizeRepoKey(repo?.owner, repo?.repo);
if (!repoKey) {
return null;
}
const cached = repoMetadataCache.get(repoKey);
if (cached && Date.now() - cached.fetchedAt < REPO_DEFAULT_BRANCH_TTL_MS) {
return cached.data;
}
try {
const response = await octokit.rest.repos.get({
owner: repo.owner,
repo: repo.repo,
});
const data = response?.data ?? null;
repoMetadataCache.set(repoKey, {
data,
fetchedAt: Date.now(),
});
return data;
} catch (error) {
if (error?.status === 403 || error?.status === 404) {
repoMetadataCache.set(repoKey, {
data: null,
fetchedAt: Date.now(),
});
return null;
}
throw error;
}
};
const resolveRemoteCandidates = async (directory, rankedRemoteNames) => {
const results = [];
const seenRepoKeys = new Set();
for (const remoteName of rankedRemoteNames) {
const resolved = await resolveGitHubRepoFromDirectory(directory, remoteName).catch(() => ({ repo: null }));
const repo = resolved?.repo || null;
const repoKey = normalizeRepoKey(repo?.owner, repo?.repo);
if (!repo || !repoKey || seenRepoKeys.has(repoKey)) {
continue;
}
seenRepoKeys.add(repoKey);
results.push({
remoteName,
repo,
});
}
return results;
};
const expandRepoNetwork = async (octokit, candidates) => {
const expanded = [];
const seenRepoKeys = new Set();
const pushCandidate = (repo, remoteName, priority) => {
const repoKey = normalizeRepoKey(repo?.owner, repo?.repo);
if (!repoKey || seenRepoKeys.has(repoKey)) {
return;
}
seenRepoKeys.add(repoKey);
expanded.push({ repo, remoteName, priority });
};
for (const candidate of candidates) {
const metadata = await getRepoMetadata(octokit, candidate.repo);
if (!metadata) {
continue;
}
pushCandidate(candidate.repo, candidate.remoteName, candidate.priority);
const parent = metadata?.parent;
if (parent?.owner?.login && parent?.name) {
pushCandidate({
owner: parent.owner.login,
repo: parent.name,
url: parent.html_url || `https://github.com/${parent.owner.login}/${parent.name}`,
}, candidate.remoteName, candidate.priority + 0.1);
}
const source = metadata?.source;
if (source?.owner?.login && source?.name) {
pushCandidate({
owner: source.owner.login,
repo: source.name,
url: source.html_url || `https://github.com/${source.owner.login}/${source.name}`,
}, candidate.remoteName, candidate.priority + 0.2);
}
}
return expanded.sort((left, right) => left.priority - right.priority);
};
const safeListPulls = async (octokit, options) => {
try {
const response = await octokit.rest.pulls.list(options);
return Array.isArray(response?.data) ? response.data : [];
} catch (error) {
if (error?.status === 404 || error?.status === 403) {
return [];
}
throw error;
}
};
const parseRepoFromApiUrl = (value) => {
const normalized = normalizeText(value);
if (!normalized) {
return null;
}
try {
const url = new URL(normalized);
const parts = url.pathname.replace(/^\/+/, '').split('/').filter(Boolean);
if (parts.length < 2 || parts[0] !== 'repos') {
return null;
}
const owner = parts[1];
const repo = parts[2];
if (!owner || !repo) {
return null;
}
return { owner, repo };
} catch {
return null;
}
};
const searchFallbackPr = async ({ octokit, branch, repoNames }) => {
const normalizedRepoNames = new Set(repoNames.map((name) => normalizeLower(name)).filter(Boolean));
for (const state of ['open', 'closed']) {
let response;
try {
response = await octokit.rest.search.issuesAndPullRequests({
q: `is:pr state:${state} head:${branch}`,
per_page: 20,
});
} catch (error) {
if (error?.status === 403 || error?.status === 404) {
continue;
}
throw error;
}
const items = Array.isArray(response?.data?.items) ? response.data.items : [];
for (const item of items) {
const repo = parseRepoFromApiUrl(item?.repository_url);
if (!repo) {
continue;
}
if (normalizedRepoNames.size > 0 && !normalizedRepoNames.has(normalizeLower(repo.repo))) {
continue;
}
try {
const prResponse = await octokit.rest.pulls.get({
owner: repo.owner,
repo: repo.repo,
pull_number: item.number,
});
const pr = prResponse?.data;
if (!pr || normalizeText(pr.head?.ref) !== branch) {
continue;
}
return {
repo: {
owner: repo.owner,
repo: repo.repo,
url: `https://github.com/${repo.owner}/${repo.repo}`,
},
pr,
};
} catch (error) {
if (error?.status === 403 || error?.status === 404) {
continue;
}
throw error;
}
}
}
return null;
};
const findFirstMatchingPr = async ({ octokit, target, branch, sourceCandidates }) => {
const matcher = buildSourceMatcher(sourceCandidates);
const sourceOwners = [];
sourceCandidates.forEach((candidate) => pushUnique(sourceOwners, candidate.repo?.owner));
const pickPreferred = (prs) => prs
.filter((pr) => normalizeText(pr?.head?.ref) === branch)
.filter((pr) => matcher.matches(pr, target.repo.repo))
.sort((left, right) => matcher.compare(left, right, target.repo.repo))[0] ?? null;
for (const state of ['open', 'closed']) {
for (const owner of sourceOwners) {
const directCandidates = await safeListPulls(octokit, {
owner: target.repo.owner,
repo: target.repo.repo,
state,
head: `${owner}:${branch}`,
per_page: 100,
});
const direct = pickPreferred(directCandidates);
if (direct) {
return direct;
}
}
const fallbackCandidates = await safeListPulls(octokit, {
owner: target.repo.owner,
repo: target.repo.repo,
state,
per_page: 100,
});
const fallback = pickPreferred(fallbackCandidates);
if (fallback) {
return fallback;
}
}
return null;
};
export async function resolveGitHubPrStatus({ octokit, directory, branch, remoteName }) {
const normalizedBranch = normalizeText(branch);
const normalizedRemoteName = normalizeText(remoteName) || 'origin';
const [status, remotes] = await Promise.all([
getStatus(directory).catch(() => null),
getRemotes(directory).catch(() => []),
]);
const trackingRemoteName = parseTrackingRemoteName(status?.tracking);
const rankedRemoteNames = rankRemoteNames(
Array.isArray(remotes) ? remotes.map((remote) => remote?.name).filter(Boolean) : [],
normalizedRemoteName,
trackingRemoteName,
);
const resolvedRemoteTargets = await resolveRemoteCandidates(directory, rankedRemoteNames.slice(0, 3));
const resolvedTargets = await expandRepoNetwork(
octokit,
resolvedRemoteTargets.map((target, index) => ({ ...target, priority: index })),
);
if (resolvedTargets.length === 0) {
return {
repo: null,
pr: null,
defaultBranch: null,
resolvedRemoteName: null,
};
}
const sourceCandidates = resolvedTargets.slice();
let fallbackRepo = resolvedTargets[0].repo;
let fallbackRemoteName = resolvedTargets[0].remoteName;
let fallbackDefaultBranch = await getRepoDefaultBranch(octokit, fallbackRepo);
for (const target of resolvedTargets) {
const defaultBranch = await getRepoDefaultBranch(octokit, target.repo);
if (!fallbackRepo) {
fallbackRepo = target.repo;
fallbackRemoteName = target.remoteName;
fallbackDefaultBranch = defaultBranch;
}
if (defaultBranch && defaultBranch === normalizedBranch) {
continue;
}
const pr = await findFirstMatchingPr({
octokit,
target,
branch: normalizedBranch,
sourceCandidates,
});
if (pr) {
return {
repo: target.repo,
pr,
defaultBranch,
resolvedRemoteName: target.remoteName,
};
}
}
const fallbackSearch = await searchFallbackPr({
octokit,
branch: normalizedBranch,
repoNames: resolvedTargets.map((target) => target.repo.repo),
});
if (fallbackSearch) {
return {
repo: fallbackSearch.repo,
pr: fallbackSearch.pr,
defaultBranch: await getRepoDefaultBranch(octokit, fallbackSearch.repo),
resolvedRemoteName: null,
};
}
return {
repo: fallbackRepo,
pr: null,
defaultBranch: fallbackDefaultBranch,
resolvedRemoteName: fallbackRemoteName,
};
}

View File

@@ -0,0 +1,55 @@
import { getRemoteUrl } from '../../git/index.js';
export const parseGitHubRemoteUrl = (raw) => {
if (typeof raw !== 'string') {
return null;
}
const value = raw.trim();
if (!value) {
return null;
}
// git@github.com:OWNER/REPO.git
if (value.startsWith('git@github.com:')) {
const rest = value.slice('git@github.com:'.length);
const cleaned = rest.endsWith('.git') ? rest.slice(0, -4) : rest;
const [owner, repo] = cleaned.split('/');
if (!owner || !repo) return null;
return { owner, repo, url: `https://github.com/${owner}/${repo}` };
}
// ssh://git@github.com/OWNER/REPO.git
if (value.startsWith('ssh://git@github.com/')) {
const rest = value.slice('ssh://git@github.com/'.length);
const cleaned = rest.endsWith('.git') ? rest.slice(0, -4) : rest;
const [owner, repo] = cleaned.split('/');
if (!owner || !repo) return null;
return { owner, repo, url: `https://github.com/${owner}/${repo}` };
}
// https://github.com/OWNER/REPO(.git)
try {
const url = new URL(value);
if (url.hostname !== 'github.com') {
return null;
}
const path = url.pathname.replace(/^\/+/, '').replace(/\/+$/, '');
const cleaned = path.endsWith('.git') ? path.slice(0, -4) : path;
const [owner, repo] = cleaned.split('/');
if (!owner || !repo) return null;
return { owner, repo, url: `https://github.com/${owner}/${repo}` };
} catch {
return null;
}
};
export async function resolveGitHubRepoFromDirectory(directory, remoteName = 'origin') {
const remoteUrl = await getRemoteUrl(directory, remoteName).catch(() => null);
if (!remoteUrl) {
return { repo: null, remoteUrl: null };
}
return {
repo: parseGitHubRemoteUrl(remoteUrl),
remoteUrl,
};
}

View File

@@ -0,0 +1,61 @@
# Notifications Module Documentation
## Purpose
This module provides notification message preparation utilities for the web server runtime, including text truncation and optional message summarization for system notifications.
## Entrypoints and structure
- `packages/web/server/lib/notifications/index.js`: public entrypoint imported by `packages/web/server/index.js`.
- `packages/web/server/lib/notifications/message.js`: helper implementation module.
- `packages/web/server/lib/notifications/message.test.js`: unit tests for notification message helpers.
## Public exports
### Notifications API (re-exported from message.js)
- `truncateNotificationText(text, maxLength)`: Truncates text to specified max length, appending `...` if truncated.
- `prepareNotificationLastMessage({ message, settings, summarize })`: Prepares the last message for notification display, with optional summarization support.
## Constants
### Default values
- `DEFAULT_NOTIFICATION_MESSAGE_MAX_LENGTH`: 250 (default max length for notification text).
- `DEFAULT_NOTIFICATION_SUMMARY_THRESHOLD`: 200 (minimum message length to trigger summarization).
- `DEFAULT_NOTIFICATION_SUMMARY_LENGTH`: 100 (target length for summarized messages).
## Settings object format
The `settings` parameter for `prepareNotificationLastMessage` supports:
- `summarizeLastMessage` (boolean): Whether to enable summarization for long messages.
- `summaryThreshold` (number): Minimum message length to trigger summarization (default: 200).
- `summaryLength` (number): Target length for summarized messages (default: 100).
- `maxLastMessageLength` (number): Maximum length for the final notification text (default: 250).
## Response contracts
### `truncateNotificationText`
- Returns empty string for non-string input.
- Returns original text if under max length.
- Returns `${text.slice(0, maxLength)}...` for truncated text.
### `prepareNotificationLastMessage`
- Returns empty string for empty/null message.
- Returns truncated original message if summarization disabled, message under threshold, or summarization fails.
- Returns truncated summary if summarization succeeds and returns non-empty string.
- Always applies `maxLastMessageLength` truncation to final result.
## Notes for contributors
### Adding new notification helpers
1. Add new helper functions to `packages/web/server/lib/notifications/message.js`.
2. Export functions that are intended for public use.
3. Follow existing patterns for input validation (e.g., type checking for strings).
4. Use `resolvePositiveNumber` for numeric parameters with fallbacks to maintain safe defaults.
5. Add corresponding unit tests in `packages/web/server/lib/notifications/message.test.js`.
### Error handling
- `prepareNotificationLastMessage` catches summarization errors and falls back to original message.
- Invalid numeric parameters default to safe fallback values.
- Non-string inputs are handled gracefully (return empty string).
### Testing
- Run `bun run type-check`, `bun run lint`, and `bun run build` before finalizing changes.
- Unit tests should cover truncation behavior, summarization success/failure, and edge cases (empty strings, invalid inputs).

View File

@@ -0,0 +1 @@
export { truncateNotificationText, prepareNotificationLastMessage } from './message.js';

View File

@@ -0,0 +1,49 @@
const DEFAULT_NOTIFICATION_MESSAGE_MAX_LENGTH = 250;
const DEFAULT_NOTIFICATION_SUMMARY_THRESHOLD = 200;
const DEFAULT_NOTIFICATION_SUMMARY_LENGTH = 100;
const resolvePositiveNumber = (value, fallback) => {
if (typeof value !== 'number' || !Number.isFinite(value) || value <= 0) {
return fallback;
}
return value;
};
export const truncateNotificationText = (text, maxLength = DEFAULT_NOTIFICATION_MESSAGE_MAX_LENGTH) => {
if (typeof text !== 'string') {
return '';
}
const safeMaxLength = resolvePositiveNumber(maxLength, DEFAULT_NOTIFICATION_MESSAGE_MAX_LENGTH);
if (text.length <= safeMaxLength) {
return text;
}
return `${text.slice(0, safeMaxLength)}...`;
};
export const prepareNotificationLastMessage = async ({ message, settings, summarize }) => {
const originalMessage = typeof message === 'string' ? message : '';
if (!originalMessage) {
return '';
}
const shouldSummarize = settings?.summarizeLastMessage === true && typeof summarize === 'function';
const summaryThreshold = resolvePositiveNumber(settings?.summaryThreshold, DEFAULT_NOTIFICATION_SUMMARY_THRESHOLD);
const summaryLength = resolvePositiveNumber(settings?.summaryLength, DEFAULT_NOTIFICATION_SUMMARY_LENGTH);
const maxLastMessageLength = resolvePositiveNumber(settings?.maxLastMessageLength, DEFAULT_NOTIFICATION_MESSAGE_MAX_LENGTH);
let messageForNotification = originalMessage;
if (shouldSummarize && originalMessage.length > summaryThreshold) {
try {
const summary = await summarize(originalMessage, summaryLength);
if (typeof summary === 'string' && summary.trim().length > 0) {
messageForNotification = summary;
}
} catch {
messageForNotification = originalMessage;
}
}
return truncateNotificationText(messageForNotification, maxLastMessageLength);
};

View File

@@ -0,0 +1,59 @@
import { describe, expect, it } from 'bun:test';
import { prepareNotificationLastMessage, truncateNotificationText } from './message.js';
describe('notification message helpers', () => {
it('truncates oversized notification text', () => {
expect(truncateNotificationText('abcdef', 3)).toBe('abc...');
});
it('falls back to original message when summarization fails', async () => {
const message = '0123456789';
const summarize = async () => {
throw new Error('summarization failed');
};
const result = await prepareNotificationLastMessage({
message,
summarize,
settings: {
summarizeLastMessage: true,
summaryThreshold: 5,
summaryLength: 3,
maxLastMessageLength: 4,
},
});
expect(result).toBe('0123...');
});
it('falls back to original message when summary is empty', async () => {
const result = await prepareNotificationLastMessage({
message: '0123456789',
summarize: async () => ' ',
settings: {
summarizeLastMessage: true,
summaryThreshold: 5,
summaryLength: 3,
maxLastMessageLength: 4,
},
});
expect(result).toBe('0123...');
});
it('uses summary when summarization succeeds', async () => {
const result = await prepareNotificationLastMessage({
message: '0123456789',
summarize: async () => 'short summary',
settings: {
summarizeLastMessage: true,
summaryThreshold: 5,
summaryLength: 3,
maxLastMessageLength: 100,
},
});
expect(result).toBe('short summary');
});
});

View File

@@ -0,0 +1,58 @@
# OpenCode Module Documentation
## Purpose
This module provides OpenCode server integration utilities for the web server runtime, including configuration management, provider authentication, and UI authentication with rate limiting.
## Entrypoints and structure
- `packages/web/server/lib/opencode/index.js`: public entrypoint (currently baseline placeholder).
- `packages/web/server/lib/opencode/auth.js`: provider authentication file operations.
- `packages/web/server/lib/opencode/shared.js`: shared utilities for config, markdown, skills, and git helpers.
- `packages/web/server/lib/opencode/ui-auth.js`: UI session authentication with rate limiting.
## Public exports (auth.js)
- `readAuthFile()`: Reads and parses `~/.local/share/opencode/auth.json`.
- `writeAuthFile(auth)`: Writes auth file with automatic backup.
- `removeProviderAuth(providerId)`: Removes a provider's auth entry.
- `getProviderAuth(providerId)`: Returns auth for a specific provider or null.
- `listProviderAuths()`: Returns list of provider IDs with configured auth.
- `AUTH_FILE`: Auth file path constant.
- `OPENCODE_DATA_DIR`: OpenCode data directory path constant.
## Public exports (shared.js)
- `OPENCODE_CONFIG_DIR`, `AGENT_DIR`, `COMMAND_DIR`, `SKILL_DIR`, `CONFIG_FILE`, `CUSTOM_CONFIG_FILE`: Path constants.
- `AGENT_SCOPE`, `COMMAND_SCOPE`, `SKILL_SCOPE`: Scope constants with USER and PROJECT values.
- `ensureDirs()`: Creates required OpenCode directories.
- `parseMdFile(filePath)`, `writeMdFile(filePath, frontmatter, body)`: Markdown file operations with YAML frontmatter.
- `getConfigPaths(workingDirectory)`, `readConfigLayers(workingDirectory)`, `readConfig(workingDirectory)`: Config file operations with layer merging (user, project, custom).
- `writeConfig(config, filePath)`: Writes config with automatic backup.
- `getJsonEntrySource(layers, sectionKey, entryName)`: Resolves which config layer provides an entry.
- `getJsonWriteTarget(layers, preferredScope)`: Determines write target for config updates.
- `getAncestors(startDir, stopDir)`, `findWorktreeRoot(startDir)`: Git worktree helpers.
- `isPromptFileReference(value)`, `resolvePromptFilePath(reference)`, `writePromptFile(filePath, content)`: Prompt file reference handling.
- `walkSkillMdFiles(rootDir)`: Recursively finds all SKILL.md files.
- `addSkillFromMdFile(skillsMap, skillMdPath, scope, source)`: Parses and indexes a skill file.
- `resolveSkillSearchDirectories(workingDirectory)`: Returns skill search path order (config, project, home, custom).
- `listSkillSupportingFiles(skillDir)`, `readSkillSupportingFile(skillDir, relativePath)`, `writeSkillSupportingFile(skillDir, relativePath, content)`, `deleteSkillSupportingFile(skillDir, relativePath)`: Skill supporting file management.
## Public exports (ui-auth.js)
- `createUiAuth({ password, cookieName, sessionTtlMs })`: Creates UI auth instance with methods:
- `enabled`: Boolean indicating if auth is configured.
- `requireAuth(req, res, next)`: Express middleware to enforce authentication.
- `handleSessionStatus(req, res)`: Returns authentication status.
- `handleSessionCreate(req, res)`: Handles login with rate limiting.
- `ensureSessionToken(req, res)`: Returns or creates session token.
- `dispose()`: Cleans up timers and state.
## Storage and configuration
- Provider auth: `~/.local/share/opencode/auth.json`.
- User config: `~/.config/opencode/opencode.json`.
- Project config: `<workingDirectory>/.opencode/opencode.json` or `opencode.json`.
- Custom config: `OPENCODE_CONFIG` env var path.
- Rate limit config: `OPENCHAMBER_RATE_LIMIT_MAX_ATTEMPTS`, `OPENCHAMBER_RATE_LIMIT_NO_IP_MAX_ATTEMPTS` env vars.
## Notes for contributors
- This module serves as foundation for OpenCode-related server utilities.
- Index.js is currently a baseline placeholder; direct imports use submodule paths.
- All file writes include automatic backup before modification.
- Config merging follows priority: custom > project > user.
- UI auth uses scrypt for password hashing with constant-time comparison.

View File

@@ -0,0 +1,634 @@
import fs from 'fs';
import path from 'path';
import {
CONFIG_FILE,
AGENT_DIR,
AGENT_SCOPE,
ensureDirs,
parseMdFile,
writeMdFile,
readConfigLayers,
readConfigFile,
writeConfig,
getJsonEntrySource,
getJsonWriteTarget,
isPromptFileReference,
resolvePromptFilePath,
writePromptFile,
} from './shared.js';
// ============== AGENT SCOPE HELPERS ==============
/**
* Ensure project-level agent directory exists
*/
function ensureProjectAgentDir(workingDirectory) {
const projectAgentDir = path.join(workingDirectory, '.opencode', 'agents');
if (!fs.existsSync(projectAgentDir)) {
fs.mkdirSync(projectAgentDir, { recursive: true });
}
const legacyProjectAgentDir = path.join(workingDirectory, '.opencode', 'agent');
if (!fs.existsSync(legacyProjectAgentDir)) {
fs.mkdirSync(legacyProjectAgentDir, { recursive: true });
}
return projectAgentDir;
}
/**
* Get project-level agent path
*/
function getProjectAgentPath(workingDirectory, agentName) {
const pluralPath = path.join(workingDirectory, '.opencode', 'agents', `${agentName}.md`);
const legacyPath = path.join(workingDirectory, '.opencode', 'agent', `${agentName}.md`);
if (fs.existsSync(legacyPath) && !fs.existsSync(pluralPath)) return legacyPath;
return pluralPath;
}
/**
* Create a per-request lookup cache for user-level agent path resolution.
*/
function createAgentLookupCache() {
return {
userAgentIndexByName: new Map(),
userAgentLookupByName: new Map(),
userAgentIndexReady: false,
};
}
function buildUserAgentIndex(cache) {
if (cache.userAgentIndexReady) return;
cache.userAgentIndexReady = true;
if (!fs.existsSync(AGENT_DIR)) return;
const dirsToVisit = [AGENT_DIR];
while (dirsToVisit.length > 0) {
const dir = dirsToVisit.pop();
let entries;
try {
entries = fs.readdirSync(dir, { withFileTypes: true });
} catch {
continue;
}
entries.sort((a, b) => a.name.localeCompare(b.name));
for (const entry of entries) {
if (!entry.isFile() || !entry.name.endsWith('.md')) continue;
const agentName = entry.name.slice(0, -3);
if (!cache.userAgentIndexByName.has(agentName)) {
cache.userAgentIndexByName.set(agentName, path.join(dir, entry.name));
}
}
for (let i = entries.length - 1; i >= 0; i -= 1) {
const entry = entries[i];
if (entry.isDirectory()) {
dirsToVisit.push(path.join(dir, entry.name));
}
}
}
}
function getIndexedUserAgentPath(agentName, cache) {
if (cache.userAgentLookupByName.has(agentName)) {
return cache.userAgentLookupByName.get(agentName);
}
buildUserAgentIndex(cache);
const found = cache.userAgentIndexByName.get(agentName) || null;
cache.userAgentLookupByName.set(agentName, found);
return found;
}
/**
* Get user-level agent path — walks subfolders to support grouped layouts.
* e.g. ~/.config/opencode/agents/business/ceo-diginno.md
*/
function getUserAgentPath(agentName, lookupCache = null) {
// 1. Check flat path first (legacy / newly created agents)
const pluralPath = path.join(AGENT_DIR, `${agentName}.md`);
if (fs.existsSync(pluralPath)) return pluralPath;
const legacyPath = path.join(AGENT_DIR, '..', 'agent', `${agentName}.md`);
if (fs.existsSync(legacyPath)) return legacyPath;
// 2. Lookup subfolders for grouped layout
const cache = lookupCache || createAgentLookupCache();
const found = getIndexedUserAgentPath(agentName, cache);
if (found) return found;
// 3. Return expected flat path as default (for new agent creation)
return pluralPath;
}
/**
* Determine agent scope based on where the .md file exists
* Priority: project level > user level > null (built-in only)
*/
function getAgentScope(agentName, workingDirectory, lookupCache = null) {
if (workingDirectory) {
const projectPath = getProjectAgentPath(workingDirectory, agentName);
if (fs.existsSync(projectPath)) {
return { scope: AGENT_SCOPE.PROJECT, path: projectPath };
}
}
const userPath = getUserAgentPath(agentName, lookupCache);
if (fs.existsSync(userPath)) {
return { scope: AGENT_SCOPE.USER, path: userPath };
}
return { scope: null, path: null };
}
/**
* Get the path where an agent should be written based on scope
*/
function getAgentWritePath(agentName, workingDirectory, requestedScope, lookupCache = null) {
// For updates: check existing location first (project takes precedence)
const existing = getAgentScope(agentName, workingDirectory, lookupCache);
if (existing.path) {
return existing;
}
// For new agents or built-in overrides: use requested scope or default to user
const scope = requestedScope || AGENT_SCOPE.USER;
if (scope === AGENT_SCOPE.PROJECT && workingDirectory) {
return {
scope: AGENT_SCOPE.PROJECT,
path: getProjectAgentPath(workingDirectory, agentName)
};
}
return {
scope: AGENT_SCOPE.USER,
path: getUserAgentPath(agentName, lookupCache)
};
}
/**
* Detect where an agent's permission field is currently defined
* Priority: project .md > user .md > project JSON > user JSON
* Returns: { source: 'md'|'json'|null, scope: 'project'|'user'|null, path: string|null }
*/
function getAgentPermissionSource(agentName, workingDirectory, lookupCache = null) {
// Check project-level .md first
if (workingDirectory) {
const projectMdPath = getProjectAgentPath(workingDirectory, agentName);
if (fs.existsSync(projectMdPath)) {
const { frontmatter } = parseMdFile(projectMdPath);
if (frontmatter.permission !== undefined) {
return { source: 'md', scope: AGENT_SCOPE.PROJECT, path: projectMdPath };
}
}
}
// Check user-level .md
const userMdPath = getUserAgentPath(agentName, lookupCache);
if (fs.existsSync(userMdPath)) {
const { frontmatter } = parseMdFile(userMdPath);
if (frontmatter.permission !== undefined) {
return { source: 'md', scope: AGENT_SCOPE.USER, path: userMdPath };
}
}
// Check JSON layers (project > user)
const layers = readConfigLayers(workingDirectory);
// Project opencode.json
const projectJsonPermission = layers.projectConfig?.agent?.[agentName]?.permission;
if (projectJsonPermission !== undefined && layers.paths.projectPath) {
return { source: 'json', scope: AGENT_SCOPE.PROJECT, path: layers.paths.projectPath };
}
// User opencode.json
const userJsonPermission = layers.userConfig?.agent?.[agentName]?.permission;
if (userJsonPermission !== undefined) {
return { source: 'json', scope: AGENT_SCOPE.USER, path: layers.paths.userPath };
}
// Custom config (env var)
const customJsonPermission = layers.customConfig?.agent?.[agentName]?.permission;
if (customJsonPermission !== undefined && layers.paths.customPath) {
return { source: 'json', scope: 'custom', path: layers.paths.customPath };
}
return { source: null, scope: null, path: null };
}
function mergePermissionWithNonWildcards(newPermission, permissionSource, agentName) {
if (!permissionSource.source || !permissionSource.path) {
return newPermission;
}
let existingPermission = null;
if (permissionSource.source === 'md') {
const { frontmatter } = parseMdFile(permissionSource.path);
existingPermission = frontmatter.permission;
} else if (permissionSource.source === 'json') {
const config = readConfigFile(permissionSource.path);
existingPermission = config?.agent?.[agentName]?.permission;
}
if (!existingPermission || typeof existingPermission === 'string') {
return newPermission;
}
if (newPermission == null) {
return null;
}
if (typeof newPermission === 'string') {
return newPermission;
}
const nonWildcardPatterns = {};
for (const [permKey, permValue] of Object.entries(existingPermission)) {
if (permKey === '*') continue;
if (typeof permValue === 'object' && permValue !== null && !Array.isArray(permValue)) {
const nonWildcards = {};
for (const [pattern, action] of Object.entries(permValue)) {
if (pattern !== '*') {
nonWildcards[pattern] = action;
}
}
if (Object.keys(nonWildcards).length > 0) {
nonWildcardPatterns[permKey] = nonWildcards;
}
}
}
if (Object.keys(nonWildcardPatterns).length === 0) {
return newPermission;
}
const merged = { ...newPermission };
for (const [permKey, patterns] of Object.entries(nonWildcardPatterns)) {
const newValue = merged[permKey];
if (typeof newValue === 'string') {
merged[permKey] = { '*': newValue, ...patterns };
} else if (typeof newValue === 'object' && newValue !== null) {
merged[permKey] = { ...patterns, ...newValue };
} else {
const existingValue = existingPermission[permKey];
if (typeof existingValue === 'object' && existingValue !== null) {
const wildcard = existingValue['*'];
merged[permKey] = wildcard ? { '*': wildcard, ...patterns } : patterns;
}
}
}
return merged;
}
function getAgentSources(agentName, workingDirectory, lookupCache = createAgentLookupCache()) {
const projectPath = workingDirectory ? getProjectAgentPath(workingDirectory, agentName) : null;
const projectExists = projectPath && fs.existsSync(projectPath);
const userPath = getUserAgentPath(agentName, lookupCache);
const userExists = fs.existsSync(userPath);
const mdPath = projectExists ? projectPath : (userExists ? userPath : null);
const mdExists = !!mdPath;
const mdScope = projectExists ? AGENT_SCOPE.PROJECT : (userExists ? AGENT_SCOPE.USER : null);
const layers = readConfigLayers(workingDirectory);
const jsonSource = getJsonEntrySource(layers, 'agent', agentName);
const jsonSection = jsonSource.section;
const jsonPath = jsonSource.path || layers.paths.customPath || layers.paths.projectPath || layers.paths.userPath;
const jsonScope = jsonSource.path === layers.paths.projectPath ? AGENT_SCOPE.PROJECT : AGENT_SCOPE.USER;
const sources = {
md: {
exists: mdExists,
path: mdPath,
scope: mdScope,
fields: []
},
json: {
exists: jsonSource.exists,
path: jsonPath,
scope: jsonSource.exists ? jsonScope : null,
fields: []
},
projectMd: {
exists: projectExists,
path: projectPath
},
userMd: {
exists: userExists,
path: userPath
}
};
if (mdExists) {
const { frontmatter, body } = parseMdFile(mdPath);
sources.md.fields = Object.keys(frontmatter);
if (body) {
sources.md.fields.push('prompt');
}
}
if (jsonSection) {
sources.json.fields = Object.keys(jsonSection);
}
return sources;
}
function getAgentConfig(agentName, workingDirectory, lookupCache = createAgentLookupCache()) {
const projectPath = workingDirectory ? getProjectAgentPath(workingDirectory, agentName) : null;
const projectExists = projectPath && fs.existsSync(projectPath);
const userPath = getUserAgentPath(agentName, lookupCache);
const userExists = fs.existsSync(userPath);
if (projectExists || userExists) {
const mdPath = projectExists ? projectPath : userPath;
const { frontmatter, body } = parseMdFile(mdPath);
return {
source: 'md',
scope: projectExists ? AGENT_SCOPE.PROJECT : AGENT_SCOPE.USER,
config: {
...frontmatter,
...(typeof body === 'string' && body.length > 0 ? { prompt: body } : {}),
},
};
}
const layers = readConfigLayers(workingDirectory);
const jsonSource = getJsonEntrySource(layers, 'agent', agentName);
if (jsonSource.exists && jsonSource.section) {
const scope = jsonSource.path === layers.paths.projectPath ? AGENT_SCOPE.PROJECT : AGENT_SCOPE.USER;
return {
source: 'json',
scope,
config: { ...jsonSource.section },
};
}
return {
source: 'none',
scope: null,
config: {},
};
}
function createAgent(agentName, config, workingDirectory, scope) {
ensureDirs();
const lookupCache = createAgentLookupCache();
const projectPath = workingDirectory ? getProjectAgentPath(workingDirectory, agentName) : null;
const userPath = getUserAgentPath(agentName, lookupCache);
if (projectPath && fs.existsSync(projectPath)) {
throw new Error(`Agent ${agentName} already exists as project-level .md file`);
}
if (fs.existsSync(userPath)) {
throw new Error(`Agent ${agentName} already exists as user-level .md file`);
}
const layers = readConfigLayers(workingDirectory);
const jsonSource = getJsonEntrySource(layers, 'agent', agentName);
if (jsonSource.exists) {
throw new Error(`Agent ${agentName} already exists in opencode.json`);
}
let targetPath;
let targetScope;
if (scope === AGENT_SCOPE.PROJECT && workingDirectory) {
ensureProjectAgentDir(workingDirectory);
targetPath = projectPath;
targetScope = AGENT_SCOPE.PROJECT;
} else {
targetPath = userPath;
targetScope = AGENT_SCOPE.USER;
}
const { prompt, scope: _scopeFromConfig, ...frontmatter } = config;
writeMdFile(targetPath, frontmatter, prompt || '');
console.log(`Created new agent: ${agentName} (scope: ${targetScope}, path: ${targetPath})`);
}
function updateAgent(agentName, updates, workingDirectory) {
ensureDirs();
const lookupCache = createAgentLookupCache();
const { scope, path: mdPath } = getAgentWritePath(agentName, workingDirectory, undefined, lookupCache);
const mdExists = mdPath && fs.existsSync(mdPath);
const layers = readConfigLayers(workingDirectory);
const jsonSource = getJsonEntrySource(layers, 'agent', agentName);
const jsonSection = jsonSource.section;
const hasJsonFields = jsonSource.exists && jsonSection && Object.keys(jsonSection).length > 0;
const jsonTarget = jsonSource.exists
? { config: jsonSource.config, path: jsonSource.path }
: getJsonWriteTarget(layers, AGENT_SCOPE.USER);
let config = jsonTarget.config || {};
const isBuiltinOverride = !mdExists && !hasJsonFields;
let targetPath = mdPath;
let targetScope = scope;
if (!mdExists && isBuiltinOverride) {
targetPath = getUserAgentPath(agentName, lookupCache);
targetScope = AGENT_SCOPE.USER;
}
let mdData = mdExists ? parseMdFile(mdPath) : (isBuiltinOverride ? { frontmatter: {}, body: '' } : null);
let mdModified = false;
let jsonModified = false;
const creatingNewMd = isBuiltinOverride;
for (const [field, value] of Object.entries(updates)) {
if (field === 'prompt') {
const normalizedValue = typeof value === 'string' ? value : (value == null ? '' : String(value));
if (mdExists || creatingNewMd) {
if (mdData) {
mdData.body = normalizedValue;
mdModified = true;
}
continue;
} else if (isPromptFileReference(jsonSection?.prompt)) {
const promptFilePath = resolvePromptFilePath(jsonSection.prompt);
if (!promptFilePath) {
throw new Error(`Invalid prompt file reference for agent ${agentName}`);
}
writePromptFile(promptFilePath, normalizedValue);
continue;
} else if (isPromptFileReference(normalizedValue)) {
if (!config.agent) config.agent = {};
if (!config.agent[agentName]) config.agent[agentName] = {};
config.agent[agentName].prompt = normalizedValue;
jsonModified = true;
continue;
}
if (!config.agent) config.agent = {};
if (!config.agent[agentName]) config.agent[agentName] = {};
config.agent[agentName].prompt = normalizedValue;
jsonModified = true;
continue;
}
if (field === 'permission') {
const permissionSource = getAgentPermissionSource(agentName, workingDirectory, lookupCache);
const newPermission = mergePermissionWithNonWildcards(value, permissionSource, agentName);
if (permissionSource.source === 'md') {
const existingMdData = parseMdFile(permissionSource.path);
existingMdData.frontmatter.permission = newPermission;
writeMdFile(permissionSource.path, existingMdData.frontmatter, existingMdData.body);
console.log(`Updated permission in .md file: ${permissionSource.path}`);
} else if (permissionSource.source === 'json') {
const existingConfig = readConfigFile(permissionSource.path);
if (!existingConfig.agent) existingConfig.agent = {};
if (!existingConfig.agent[agentName]) existingConfig.agent[agentName] = {};
existingConfig.agent[agentName].permission = newPermission;
writeConfig(existingConfig, permissionSource.path);
console.log(`Updated permission in JSON: ${permissionSource.path}`);
} else {
if ((mdExists || creatingNewMd) && mdData) {
mdData.frontmatter.permission = newPermission;
mdModified = true;
} else if (hasJsonFields) {
if (!config.agent) config.agent = {};
if (!config.agent[agentName]) config.agent[agentName] = {};
config.agent[agentName].permission = newPermission;
jsonModified = true;
} else {
const writeTarget = workingDirectory
? { config: layers.projectConfig || {}, path: layers.paths.projectPath || layers.paths.userPath }
: { config: layers.userConfig || {}, path: layers.paths.userPath };
if (!writeTarget.config.agent) writeTarget.config.agent = {};
if (!writeTarget.config.agent[agentName]) writeTarget.config.agent[agentName] = {};
writeTarget.config.agent[agentName].permission = newPermission;
writeConfig(writeTarget.config, writeTarget.path);
console.log(`Created permission in JSON: ${writeTarget.path}`);
}
}
continue;
}
const inMd = mdData?.frontmatter?.[field] !== undefined;
const inJson = jsonSection?.[field] !== undefined;
if (value === null) {
if (mdData && inMd) {
delete mdData.frontmatter[field];
mdModified = true;
}
if (inJson && config.agent?.[agentName]) {
delete config.agent[agentName][field];
if (Object.keys(config.agent[agentName]).length === 0) {
delete config.agent[agentName];
}
if (Object.keys(config.agent).length === 0) {
delete config.agent;
}
jsonModified = true;
}
continue;
}
if (inJson) {
if (!config.agent) config.agent = {};
if (!config.agent[agentName]) config.agent[agentName] = {};
config.agent[agentName][field] = value;
jsonModified = true;
} else if (inMd || creatingNewMd) {
if (mdData) {
mdData.frontmatter[field] = value;
mdModified = true;
}
} else {
if ((mdExists || creatingNewMd) && mdData) {
mdData.frontmatter[field] = value;
mdModified = true;
} else {
if (!config.agent) config.agent = {};
if (!config.agent[agentName]) config.agent[agentName] = {};
config.agent[agentName][field] = value;
jsonModified = true;
}
}
}
if (mdModified && mdData) {
writeMdFile(targetPath, mdData.frontmatter, mdData.body);
}
if (jsonModified) {
writeConfig(config, jsonTarget.path || CONFIG_FILE);
}
console.log(`Updated agent: ${agentName} (scope: ${targetScope}, md: ${mdModified}, json: ${jsonModified})`);
}
function deleteAgent(agentName, workingDirectory) {
const lookupCache = createAgentLookupCache();
let deleted = false;
if (workingDirectory) {
const projectPath = getProjectAgentPath(workingDirectory, agentName);
if (fs.existsSync(projectPath)) {
fs.unlinkSync(projectPath);
console.log(`Deleted project-level agent .md file: ${projectPath}`);
deleted = true;
}
}
const userPath = getUserAgentPath(agentName, lookupCache);
if (fs.existsSync(userPath)) {
fs.unlinkSync(userPath);
console.log(`Deleted user-level agent .md file: ${userPath}`);
deleted = true;
}
const layers = readConfigLayers(workingDirectory);
const jsonSource = getJsonEntrySource(layers, 'agent', agentName);
if (jsonSource.exists && jsonSource.config && jsonSource.path) {
if (!jsonSource.config.agent) jsonSource.config.agent = {};
delete jsonSource.config.agent[agentName];
writeConfig(jsonSource.config, jsonSource.path);
console.log(`Removed agent from opencode.json: ${agentName}`);
deleted = true;
}
if (!deleted) {
const jsonTarget = getJsonWriteTarget(layers, workingDirectory ? AGENT_SCOPE.PROJECT : AGENT_SCOPE.USER);
const targetConfig = jsonTarget.config || {};
if (!targetConfig.agent) targetConfig.agent = {};
targetConfig.agent[agentName] = { disable: true };
writeConfig(targetConfig, jsonTarget.path || CONFIG_FILE);
console.log(`Disabled built-in agent: ${agentName}`);
}
}
export {
ensureProjectAgentDir,
getProjectAgentPath,
getUserAgentPath,
getAgentScope,
getAgentWritePath,
getAgentPermissionSource,
getAgentSources,
getAgentConfig,
createAgent,
updateAgent,
deleteAgent,
};

View File

@@ -0,0 +1,81 @@
import fs from 'fs';
import path from 'path';
import os from 'os';
const OPENCODE_DATA_DIR = path.join(os.homedir(), '.local', 'share', 'opencode');
const AUTH_FILE = path.join(OPENCODE_DATA_DIR, 'auth.json');
function readAuthFile() {
if (!fs.existsSync(AUTH_FILE)) {
return {};
}
try {
const content = fs.readFileSync(AUTH_FILE, 'utf8');
const trimmed = content.trim();
if (!trimmed) {
return {};
}
return JSON.parse(trimmed);
} catch (error) {
console.error('Failed to read auth file:', error);
throw new Error('Failed to read OpenCode auth configuration');
}
}
function writeAuthFile(auth) {
try {
if (!fs.existsSync(OPENCODE_DATA_DIR)) {
fs.mkdirSync(OPENCODE_DATA_DIR, { recursive: true });
}
if (fs.existsSync(AUTH_FILE)) {
const backupFile = `${AUTH_FILE}.openchamber.backup`;
fs.copyFileSync(AUTH_FILE, backupFile);
console.log(`Created auth backup: ${backupFile}`);
}
fs.writeFileSync(AUTH_FILE, JSON.stringify(auth, null, 2), 'utf8');
console.log('Successfully wrote auth file');
} catch (error) {
console.error('Failed to write auth file:', error);
throw new Error('Failed to write OpenCode auth configuration');
}
}
function removeProviderAuth(providerId) {
if (!providerId || typeof providerId !== 'string') {
throw new Error('Provider ID is required');
}
const auth = readAuthFile();
if (!auth[providerId]) {
console.log(`Provider ${providerId} not found in auth file, nothing to remove`);
return false;
}
delete auth[providerId];
writeAuthFile(auth);
console.log(`Removed provider auth: ${providerId}`);
return true;
}
function getProviderAuth(providerId) {
const auth = readAuthFile();
return auth[providerId] || null;
}
function listProviderAuths() {
const auth = readAuthFile();
return Object.keys(auth);
}
export {
readAuthFile,
writeAuthFile,
removeProviderAuth,
getProviderAuth,
listProviderAuths,
AUTH_FILE,
OPENCODE_DATA_DIR
};

View File

@@ -0,0 +1,339 @@
import fs from 'fs';
import path from 'path';
import {
CONFIG_FILE,
OPENCODE_CONFIG_DIR,
COMMAND_DIR,
COMMAND_SCOPE,
ensureDirs,
parseMdFile,
writeMdFile,
readConfigLayers,
writeConfig,
getJsonEntrySource,
getJsonWriteTarget,
isPromptFileReference,
resolvePromptFilePath,
writePromptFile,
} from './shared.js';
// ============== COMMAND SCOPE HELPERS ==============
/**
* Ensure project-level command directory exists
*/
function ensureProjectCommandDir(workingDirectory) {
const projectCommandDir = path.join(workingDirectory, '.opencode', 'commands');
if (!fs.existsSync(projectCommandDir)) {
fs.mkdirSync(projectCommandDir, { recursive: true });
}
const legacyProjectCommandDir = path.join(workingDirectory, '.opencode', 'command');
if (!fs.existsSync(legacyProjectCommandDir)) {
fs.mkdirSync(legacyProjectCommandDir, { recursive: true });
}
return projectCommandDir;
}
/**
* Get project-level command path
*/
function getProjectCommandPath(workingDirectory, commandName) {
const pluralPath = path.join(workingDirectory, '.opencode', 'commands', `${commandName}.md`);
const legacyPath = path.join(workingDirectory, '.opencode', 'command', `${commandName}.md`);
if (fs.existsSync(legacyPath) && !fs.existsSync(pluralPath)) return legacyPath;
return pluralPath;
}
/**
* Get user-level command path
*/
function getUserCommandPath(commandName) {
const pluralPath = path.join(COMMAND_DIR, `${commandName}.md`);
const legacyPath = path.join(OPENCODE_CONFIG_DIR, 'command', `${commandName}.md`);
if (fs.existsSync(legacyPath) && !fs.existsSync(pluralPath)) return legacyPath;
return pluralPath;
}
/**
* Determine command scope based on where the .md file exists
* Priority: project level > user level > null (built-in only)
*/
function getCommandScope(commandName, workingDirectory) {
if (workingDirectory) {
const projectPath = getProjectCommandPath(workingDirectory, commandName);
if (fs.existsSync(projectPath)) {
return { scope: COMMAND_SCOPE.PROJECT, path: projectPath };
}
}
const userPath = getUserCommandPath(commandName);
if (fs.existsSync(userPath)) {
return { scope: COMMAND_SCOPE.USER, path: userPath };
}
return { scope: null, path: null };
}
/**
* Get the path where a command should be written based on scope
*/
function getCommandWritePath(commandName, workingDirectory, requestedScope) {
// For updates: check existing location first (project takes precedence)
const existing = getCommandScope(commandName, workingDirectory);
if (existing.path) {
return existing;
}
// For new commands or built-in overrides: use requested scope or default to user
const scope = requestedScope || COMMAND_SCOPE.USER;
if (scope === COMMAND_SCOPE.PROJECT && workingDirectory) {
return {
scope: COMMAND_SCOPE.PROJECT,
path: getProjectCommandPath(workingDirectory, commandName)
};
}
return {
scope: COMMAND_SCOPE.USER,
path: getUserCommandPath(commandName)
};
}
function getCommandSources(commandName, workingDirectory) {
const projectPath = workingDirectory ? getProjectCommandPath(workingDirectory, commandName) : null;
const projectExists = projectPath && fs.existsSync(projectPath);
const userPath = getUserCommandPath(commandName);
const userExists = fs.existsSync(userPath);
const mdPath = projectExists ? projectPath : (userExists ? userPath : null);
const mdExists = !!mdPath;
const mdScope = projectExists ? COMMAND_SCOPE.PROJECT : (userExists ? COMMAND_SCOPE.USER : null);
const layers = readConfigLayers(workingDirectory);
const jsonSource = getJsonEntrySource(layers, 'command', commandName);
const jsonSection = jsonSource.section;
const jsonPath = jsonSource.path || layers.paths.customPath || layers.paths.projectPath || layers.paths.userPath;
const jsonScope = jsonSource.path === layers.paths.projectPath ? COMMAND_SCOPE.PROJECT : COMMAND_SCOPE.USER;
const sources = {
md: {
exists: mdExists,
path: mdPath,
scope: mdScope,
fields: []
},
json: {
exists: jsonSource.exists,
path: jsonPath,
scope: jsonSource.exists ? jsonScope : null,
fields: []
},
projectMd: {
exists: projectExists,
path: projectPath
},
userMd: {
exists: userExists,
path: userPath
}
};
if (mdExists) {
const { frontmatter, body } = parseMdFile(mdPath);
sources.md.fields = Object.keys(frontmatter);
if (body) {
sources.md.fields.push('template');
}
}
if (jsonSection) {
sources.json.fields = Object.keys(jsonSection);
}
return sources;
}
function createCommand(commandName, config, workingDirectory, scope) {
ensureDirs();
const projectPath = workingDirectory ? getProjectCommandPath(workingDirectory, commandName) : null;
const userPath = getUserCommandPath(commandName);
if (projectPath && fs.existsSync(projectPath)) {
throw new Error(`Command ${commandName} already exists as project-level .md file`);
}
if (fs.existsSync(userPath)) {
throw new Error(`Command ${commandName} already exists as user-level .md file`);
}
const layers = readConfigLayers(workingDirectory);
const jsonSource = getJsonEntrySource(layers, 'command', commandName);
if (jsonSource.exists) {
throw new Error(`Command ${commandName} already exists in opencode.json`);
}
let targetPath;
let targetScope;
if (scope === COMMAND_SCOPE.PROJECT && workingDirectory) {
ensureProjectCommandDir(workingDirectory);
targetPath = projectPath;
targetScope = COMMAND_SCOPE.PROJECT;
} else {
targetPath = userPath;
targetScope = COMMAND_SCOPE.USER;
}
const { template, scope: _scopeFromConfig, ...frontmatter } = config;
writeMdFile(targetPath, frontmatter, template || '');
console.log(`Created new command: ${commandName} (scope: ${targetScope}, path: ${targetPath})`);
}
function updateCommand(commandName, updates, workingDirectory) {
ensureDirs();
const { scope, path: mdPath } = getCommandWritePath(commandName, workingDirectory);
const mdExists = mdPath && fs.existsSync(mdPath);
const layers = readConfigLayers(workingDirectory);
const jsonSource = getJsonEntrySource(layers, 'command', commandName);
const jsonSection = jsonSource.section;
const hasJsonFields = jsonSource.exists && jsonSection && Object.keys(jsonSection).length > 0;
const jsonTarget = jsonSource.exists
? { config: jsonSource.config, path: jsonSource.path }
: getJsonWriteTarget(layers, workingDirectory ? COMMAND_SCOPE.PROJECT : COMMAND_SCOPE.USER);
let config = jsonTarget.config || {};
const isBuiltinOverride = !mdExists && !hasJsonFields;
let targetPath = mdPath;
let targetScope = scope;
if (!mdExists && isBuiltinOverride) {
targetPath = getUserCommandPath(commandName);
targetScope = COMMAND_SCOPE.USER;
}
const mdData = mdExists ? parseMdFile(mdPath) : (isBuiltinOverride ? { frontmatter: {}, body: '' } : null);
let mdModified = false;
let jsonModified = false;
const creatingNewMd = isBuiltinOverride;
for (const [field, value] of Object.entries(updates)) {
if (field === 'template') {
const normalizedValue = typeof value === 'string' ? value : (value == null ? '' : String(value));
if (mdExists || creatingNewMd) {
if (mdData) {
mdData.body = normalizedValue;
mdModified = true;
}
continue;
} else if (isPromptFileReference(jsonSection?.template)) {
const templateFilePath = resolvePromptFilePath(jsonSection.template);
if (!templateFilePath) {
throw new Error(`Invalid template file reference for command ${commandName}`);
}
writePromptFile(templateFilePath, normalizedValue);
continue;
} else if (isPromptFileReference(normalizedValue)) {
if (!config.command) config.command = {};
if (!config.command[commandName]) config.command[commandName] = {};
config.command[commandName].template = normalizedValue;
jsonModified = true;
continue;
}
if (!config.command) config.command = {};
if (!config.command[commandName]) config.command[commandName] = {};
config.command[commandName].template = normalizedValue;
jsonModified = true;
continue;
}
const inMd = mdData?.frontmatter?.[field] !== undefined;
const inJson = jsonSection?.[field] !== undefined;
if (inJson) {
if (!config.command) config.command = {};
if (!config.command[commandName]) config.command[commandName] = {};
config.command[commandName][field] = value;
jsonModified = true;
} else if (inMd || creatingNewMd) {
if (mdData) {
mdData.frontmatter[field] = value;
mdModified = true;
}
} else {
if ((mdExists || creatingNewMd) && mdData) {
mdData.frontmatter[field] = value;
mdModified = true;
} else {
if (!config.command) config.command = {};
if (!config.command[commandName]) config.command[commandName] = {};
config.command[commandName][field] = value;
jsonModified = true;
}
}
}
if (mdModified && mdData) {
writeMdFile(targetPath, mdData.frontmatter, mdData.body);
}
if (jsonModified) {
writeConfig(config, jsonTarget.path || CONFIG_FILE);
}
console.log(`Updated command: ${commandName} (scope: ${targetScope}, md: ${mdModified}, json: ${jsonModified})`);
}
function deleteCommand(commandName, workingDirectory) {
let deleted = false;
if (workingDirectory) {
const projectPath = getProjectCommandPath(workingDirectory, commandName);
if (fs.existsSync(projectPath)) {
fs.unlinkSync(projectPath);
console.log(`Deleted project-level command .md file: ${projectPath}`);
deleted = true;
}
}
const userPath = getUserCommandPath(commandName);
if (fs.existsSync(userPath)) {
fs.unlinkSync(userPath);
console.log(`Deleted user-level command .md file: ${userPath}`);
deleted = true;
}
const layers = readConfigLayers(workingDirectory);
const jsonSource = getJsonEntrySource(layers, 'command', commandName);
if (jsonSource.exists && jsonSource.config && jsonSource.path) {
if (!jsonSource.config.command) jsonSource.config.command = {};
delete jsonSource.config.command[commandName];
writeConfig(jsonSource.config, jsonSource.path);
console.log(`Removed command from opencode.json: ${commandName}`);
deleted = true;
}
if (!deleted) {
throw new Error(`Command "${commandName}" not found`);
}
}
export {
ensureProjectCommandDir,
getProjectCommandPath,
getUserCommandPath,
getCommandScope,
getCommandWritePath,
getCommandSources,
createCommand,
updateCommand,
deleteCommand,
};

View File

@@ -0,0 +1,66 @@
export {
AGENT_DIR,
COMMAND_DIR,
SKILL_DIR,
CONFIG_FILE,
AGENT_SCOPE,
COMMAND_SCOPE,
SKILL_SCOPE,
readConfig,
writeConfig,
readSkillSupportingFile,
writeSkillSupportingFile,
deleteSkillSupportingFile,
} from './shared.js';
export {
getAgentScope,
getAgentPermissionSource,
getAgentSources,
getAgentConfig,
createAgent,
updateAgent,
deleteAgent,
} from './agents.js';
export {
getCommandScope,
getCommandSources,
createCommand,
updateCommand,
deleteCommand,
} from './commands.js';
export {
getSkillSources,
getSkillScope,
discoverSkills,
createSkill,
updateSkill,
deleteSkill,
} from './skills.js';
export {
getProviderSources,
removeProviderConfig,
} from './providers.js';
export {
readAuthFile,
writeAuthFile,
removeProviderAuth,
getProviderAuth,
listProviderAuths,
AUTH_FILE,
OPENCODE_DATA_DIR,
} from './auth.js';
export { createUiAuth } from './ui-auth.js';
export {
listMcpConfigs,
getMcpConfig,
createMcpConfig,
updateMcpConfig,
deleteMcpConfig,
} from './mcp.js';

View File

@@ -0,0 +1,206 @@
import fs from 'fs';
import path from 'path';
import {
CONFIG_FILE,
AGENT_SCOPE,
readConfigFile,
readConfigLayers,
getJsonEntrySource,
getJsonWriteTarget,
writeConfig,
} from './shared.js';
// ============== MCP CONFIG HELPERS ==============
/**
* Validate MCP server name
*/
function validateMcpName(name) {
if (!name || typeof name !== 'string') {
throw new Error('MCP server name is required');
}
if (!/^[a-z0-9][a-z0-9_-]*[a-z0-9]$|^[a-z0-9]$/.test(name)) {
throw new Error('MCP server name must be lowercase alphanumeric with hyphens/underscores');
}
}
/**
* List all MCP server configs from user-level opencode.json
*/
function resolveMcpScopeFromPath(layers, sourcePath) {
if (!sourcePath) return null;
return sourcePath === layers.paths.projectPath ? AGENT_SCOPE.PROJECT : AGENT_SCOPE.USER;
}
function ensureProjectMcpConfigPath(workingDirectory) {
const configDir = path.join(workingDirectory, '.opencode');
if (!fs.existsSync(configDir)) {
fs.mkdirSync(configDir, { recursive: true });
}
return path.join(configDir, 'opencode.json');
}
function listMcpConfigs(workingDirectory) {
const layers = readConfigLayers(workingDirectory);
const mcp = layers?.mergedConfig?.mcp || {};
return Object.entries(mcp)
.filter(([, entry]) => entry && typeof entry === 'object' && !Array.isArray(entry))
.map(([name, entry]) => {
const source = getJsonEntrySource(layers, 'mcp', name);
return {
name,
...buildMcpEntry(entry),
scope: resolveMcpScopeFromPath(layers, source.path),
};
});
}
/**
* Get a single MCP server config by name
*/
function getMcpConfig(name, workingDirectory) {
const layers = readConfigLayers(workingDirectory);
const entry = layers?.mergedConfig?.mcp?.[name];
if (!entry) {
return null;
}
const source = getJsonEntrySource(layers, 'mcp', name);
return {
name,
...buildMcpEntry(entry),
scope: resolveMcpScopeFromPath(layers, source.path),
};
}
/**
* Create a new MCP server config entry
*/
function createMcpConfig(name, mcpConfig, workingDirectory, scope) {
validateMcpName(name);
const layers = readConfigLayers(workingDirectory);
const source = getJsonEntrySource(layers, 'mcp', name);
if (source.exists) {
throw new Error(`MCP server "${name}" already exists`);
}
let targetPath = CONFIG_FILE;
let config = {};
if (scope === AGENT_SCOPE.PROJECT) {
if (!workingDirectory) {
throw new Error('Project scope requires working directory');
}
targetPath = ensureProjectMcpConfigPath(workingDirectory);
config = fs.existsSync(targetPath) ? readConfigFile(targetPath) : {};
} else {
const jsonTarget = getJsonWriteTarget(layers, AGENT_SCOPE.USER);
targetPath = jsonTarget.path || CONFIG_FILE;
config = jsonTarget.config || {};
}
if (!config.mcp || typeof config.mcp !== 'object' || Array.isArray(config.mcp)) {
config.mcp = {};
}
const { name: _ignoredName, ...entryData } = mcpConfig;
config.mcp[name] = buildMcpEntry(entryData);
writeConfig(config, targetPath);
console.log(`Created MCP server config: ${name}`);
}
/**
* Update an existing MCP server config entry
*/
function updateMcpConfig(name, updates, workingDirectory) {
const layers = readConfigLayers(workingDirectory);
const source = getJsonEntrySource(layers, 'mcp', name);
const targetPath = source.path || CONFIG_FILE;
const config = source.config || (fs.existsSync(targetPath) ? readConfigFile(targetPath) : {});
if (!config.mcp || typeof config.mcp !== 'object' || Array.isArray(config.mcp)) {
config.mcp = {};
}
const existing = config.mcp[name] ?? {};
const { name: _ignoredName, ...updateData } = updates;
config.mcp[name] = buildMcpEntry({ ...existing, ...updateData });
writeConfig(config, targetPath);
console.log(`Updated MCP server config: ${name}`);
}
/**
* Delete an MCP server config entry
*/
function deleteMcpConfig(name, workingDirectory) {
const layers = readConfigLayers(workingDirectory);
const source = getJsonEntrySource(layers, 'mcp', name);
const targetPath = source.path || CONFIG_FILE;
const config = source.config || (fs.existsSync(targetPath) ? readConfigFile(targetPath) : {});
if (!config.mcp || typeof config.mcp !== 'object' || config.mcp[name] === undefined) {
throw new Error(`MCP server "${name}" not found`);
}
delete config.mcp[name];
if (Object.keys(config.mcp).length === 0) {
delete config.mcp;
}
writeConfig(config, targetPath);
console.log(`Deleted MCP server config: ${name}`);
}
/**
* Build a clean MCP entry object, omitting undefined/null values
*/
function buildMcpEntry(data) {
const entry = {};
// type is required
entry.type = data.type === 'remote' ? 'remote' : 'local';
if (entry.type === 'local') {
// command must be a non-empty array of strings
if (Array.isArray(data.command) && data.command.length > 0) {
entry.command = data.command.map(String);
}
} else {
// remote: url required
if (data.url && typeof data.url === 'string') {
entry.url = data.url.trim();
}
}
// environment: flat Record<string, string>
if (data.environment && typeof data.environment === 'object' && !Array.isArray(data.environment)) {
const cleaned = {};
for (const [k, v] of Object.entries(data.environment)) {
if (k && v !== undefined && v !== null) {
cleaned[k] = String(v);
}
}
if (Object.keys(cleaned).length > 0) {
entry.environment = cleaned;
}
}
// enabled defaults to true
entry.enabled = data.enabled !== false;
return entry;
}
export {
listMcpConfigs,
getMcpConfig,
createMcpConfig,
updateMcpConfig,
deleteMcpConfig,
};

View File

@@ -0,0 +1,96 @@
import {
CONFIG_FILE,
readConfigLayers,
isPlainObject,
getConfigForPath,
writeConfig,
} from './shared.js';
function getProviderSources(providerId, workingDirectory) {
const layers = readConfigLayers(workingDirectory);
const { userConfig, projectConfig, customConfig, paths } = layers;
const customProviders = isPlainObject(customConfig?.provider) ? customConfig.provider : {};
const customProvidersAlias = isPlainObject(customConfig?.providers) ? customConfig.providers : {};
const projectProviders = isPlainObject(projectConfig?.provider) ? projectConfig.provider : {};
const projectProvidersAlias = isPlainObject(projectConfig?.providers) ? projectConfig.providers : {};
const userProviders = isPlainObject(userConfig?.provider) ? userConfig.provider : {};
const userProvidersAlias = isPlainObject(userConfig?.providers) ? userConfig.providers : {};
const customExists =
Object.prototype.hasOwnProperty.call(customProviders, providerId) ||
Object.prototype.hasOwnProperty.call(customProvidersAlias, providerId);
const projectExists =
Object.prototype.hasOwnProperty.call(projectProviders, providerId) ||
Object.prototype.hasOwnProperty.call(projectProvidersAlias, providerId);
const userExists =
Object.prototype.hasOwnProperty.call(userProviders, providerId) ||
Object.prototype.hasOwnProperty.call(userProvidersAlias, providerId);
return {
sources: {
auth: { exists: false },
user: { exists: userExists, path: paths.userPath },
project: { exists: projectExists, path: paths.projectPath || null },
custom: { exists: customExists, path: paths.customPath }
}
};
}
function removeProviderConfig(providerId, workingDirectory, scope = 'user') {
if (!providerId || typeof providerId !== 'string') {
throw new Error('Provider ID is required');
}
const layers = readConfigLayers(workingDirectory);
let targetPath = layers.paths.userPath;
if (scope === 'project') {
if (!workingDirectory) {
throw new Error('Working directory is required for project scope');
}
targetPath = layers.paths.projectPath || targetPath;
} else if (scope === 'custom') {
if (!layers.paths.customPath) {
return false;
}
targetPath = layers.paths.customPath;
}
const targetConfig = getConfigForPath(layers, targetPath);
const providerConfig = isPlainObject(targetConfig.provider) ? targetConfig.provider : {};
const providersConfig = isPlainObject(targetConfig.providers) ? targetConfig.providers : {};
const removedProvider = Object.prototype.hasOwnProperty.call(providerConfig, providerId);
const removedProviders = Object.prototype.hasOwnProperty.call(providersConfig, providerId);
if (!removedProvider && !removedProviders) {
return false;
}
if (removedProvider) {
delete providerConfig[providerId];
if (Object.keys(providerConfig).length === 0) {
delete targetConfig.provider;
} else {
targetConfig.provider = providerConfig;
}
}
if (removedProviders) {
delete providersConfig[providerId];
if (Object.keys(providersConfig).length === 0) {
delete targetConfig.providers;
} else {
targetConfig.providers = providersConfig;
}
}
writeConfig(targetConfig, targetPath || CONFIG_FILE);
console.log(`Removed provider ${providerId} from config: ${targetPath}`);
return true;
}
export {
getProviderSources,
removeProviderConfig,
};

View File

@@ -0,0 +1,530 @@
import fs from 'fs';
import path from 'path';
import os from 'os';
import yaml from 'yaml';
import { parse as parseJsonc } from 'jsonc-parser';
// ============== PATH CONSTANTS ==============
const OPENCODE_CONFIG_DIR = path.join(os.homedir(), '.config', 'opencode');
const AGENT_DIR = path.join(OPENCODE_CONFIG_DIR, 'agents');
const COMMAND_DIR = path.join(OPENCODE_CONFIG_DIR, 'commands');
const SKILL_DIR = path.join(OPENCODE_CONFIG_DIR, 'skills');
const CONFIG_FILE = path.join(OPENCODE_CONFIG_DIR, 'opencode.json');
const CUSTOM_CONFIG_FILE = process.env.OPENCODE_CONFIG
? path.resolve(process.env.OPENCODE_CONFIG)
: null;
const PROMPT_FILE_PATTERN = /^\{file:(.+)\}$/i;
// ============== SCOPE TYPE CONSTANTS ==============
const AGENT_SCOPE = {
USER: 'user',
PROJECT: 'project'
};
const COMMAND_SCOPE = {
USER: 'user',
PROJECT: 'project'
};
const SKILL_SCOPE = {
USER: 'user',
PROJECT: 'project'
};
// ============== DIRECTORY OPERATIONS ==============
function ensureDirs() {
if (!fs.existsSync(OPENCODE_CONFIG_DIR)) {
fs.mkdirSync(OPENCODE_CONFIG_DIR, { recursive: true });
}
if (!fs.existsSync(AGENT_DIR)) {
fs.mkdirSync(AGENT_DIR, { recursive: true });
}
if (!fs.existsSync(COMMAND_DIR)) {
fs.mkdirSync(COMMAND_DIR, { recursive: true });
}
if (!fs.existsSync(SKILL_DIR)) {
fs.mkdirSync(SKILL_DIR, { recursive: true });
}
}
// ============== MARKDOWN FILE OPERATIONS ==============
function parseMdFile(filePath) {
const content = fs.readFileSync(filePath, 'utf8');
const match = content.match(/^---\r?\n([\s\S]*?)\r?\n---\r?\n([\s\S]*)$/);
if (!match) {
return { frontmatter: {}, body: content.trim() };
}
let frontmatter = {};
try {
frontmatter = yaml.parse(match[1]) || {};
} catch (error) {
console.warn(`Failed to parse markdown frontmatter ${filePath}, treating as empty:`, error);
frontmatter = {};
}
const body = match[2].trim();
return { frontmatter, body };
}
function writeMdFile(filePath, frontmatter, body) {
try {
const cleanedFrontmatter = Object.fromEntries(
Object.entries(frontmatter).filter(([, value]) => value != null)
);
const yamlStr = yaml.stringify(cleanedFrontmatter);
const content = `---\n${yamlStr}---\n\n${body}`;
fs.writeFileSync(filePath, content, 'utf8');
console.log(`Successfully wrote markdown file: ${filePath}`);
} catch (error) {
console.error(`Failed to write markdown file ${filePath}:`, error);
throw new Error('Failed to write agent markdown file');
}
}
// ============== CONFIG FILE OPERATIONS ==============
function getProjectConfigCandidates(workingDirectory) {
if (!workingDirectory) return [];
return [
path.join(workingDirectory, 'opencode.json'),
path.join(workingDirectory, 'opencode.jsonc'),
path.join(workingDirectory, '.opencode', 'opencode.json'),
path.join(workingDirectory, '.opencode', 'opencode.jsonc'),
];
}
function getProjectConfigPath(workingDirectory) {
if (!workingDirectory) return null;
const candidates = getProjectConfigCandidates(workingDirectory);
for (const candidate of candidates) {
if (fs.existsSync(candidate)) {
return candidate;
}
}
return candidates[0];
}
function getConfigPaths(workingDirectory) {
return {
userPath: CONFIG_FILE,
projectPath: getProjectConfigPath(workingDirectory),
customPath: CUSTOM_CONFIG_FILE
};
}
function readConfigFile(filePath) {
if (!filePath || !fs.existsSync(filePath)) {
return {};
}
try {
const content = fs.readFileSync(filePath, 'utf8');
const normalized = content.trim();
if (!normalized) {
return {};
}
return parseJsonc(normalized, [], { allowTrailingComma: true });
} catch (error) {
console.error(`Failed to read config file: ${filePath}`, error);
throw new Error('Failed to read OpenCode configuration');
}
}
function isPlainObject(value) {
return value && typeof value === 'object' && !Array.isArray(value);
}
function mergeConfigs(base, override) {
if (!isPlainObject(base) || !isPlainObject(override)) {
return override;
}
const result = { ...base };
for (const [key, value] of Object.entries(override)) {
if (key in result) {
const baseValue = result[key];
if (isPlainObject(baseValue) && isPlainObject(value)) {
result[key] = mergeConfigs(baseValue, value);
} else {
result[key] = value;
}
} else {
result[key] = value;
}
}
return result;
}
function readConfigLayers(workingDirectory) {
const { userPath, projectPath, customPath } = getConfigPaths(workingDirectory);
const userConfig = readConfigFile(userPath);
const projectConfig = readConfigFile(projectPath);
const customConfig = readConfigFile(customPath);
const mergedConfig = mergeConfigs(mergeConfigs(userConfig, projectConfig), customConfig);
return {
userConfig,
projectConfig,
customConfig,
mergedConfig,
paths: { userPath, projectPath, customPath }
};
}
function readConfig(workingDirectory) {
return readConfigLayers(workingDirectory).mergedConfig;
}
function getConfigForPath(layers, targetPath) {
if (!targetPath) {
return layers.userConfig;
}
if (layers.paths.customPath && targetPath === layers.paths.customPath) {
return layers.customConfig;
}
if (layers.paths.projectPath && targetPath === layers.paths.projectPath) {
return layers.projectConfig;
}
return layers.userConfig;
}
function writeConfig(config, filePath = CONFIG_FILE) {
try {
if (fs.existsSync(filePath)) {
const backupFile = `${filePath}.openchamber.backup`;
fs.copyFileSync(filePath, backupFile);
console.log(`Created config backup: ${backupFile}`);
}
fs.mkdirSync(path.dirname(filePath), { recursive: true });
fs.writeFileSync(filePath, JSON.stringify(config, null, 2), 'utf8');
console.log(`Successfully wrote config file: ${filePath}`);
} catch (error) {
console.error(`Failed to write config file: ${filePath}`, error);
throw new Error('Failed to write OpenCode configuration');
}
}
function getJsonEntrySource(layers, sectionKey, entryName) {
const { userConfig, projectConfig, customConfig, paths } = layers;
const customSection = customConfig?.[sectionKey]?.[entryName];
if (customSection !== undefined) {
return { section: customSection, config: customConfig, path: paths.customPath, exists: true };
}
const projectSection = projectConfig?.[sectionKey]?.[entryName];
if (projectSection !== undefined) {
return { section: projectSection, config: projectConfig, path: paths.projectPath, exists: true };
}
const userSection = userConfig?.[sectionKey]?.[entryName];
if (userSection !== undefined) {
return { section: userSection, config: userConfig, path: paths.userPath, exists: true };
}
return { section: null, config: null, path: null, exists: false };
}
function getJsonWriteTarget(layers, preferredScope) {
const { userConfig, projectConfig, customConfig, paths } = layers;
if (paths.customPath) {
return { config: customConfig, path: paths.customPath };
}
if (preferredScope === AGENT_SCOPE.PROJECT && paths.projectPath) {
return { config: projectConfig, path: paths.projectPath };
}
if (paths.projectPath) {
return { config: projectConfig, path: paths.projectPath };
}
return { config: userConfig, path: paths.userPath };
}
// ============== GIT/WORKTREE HELPERS ==============
function getAncestors(startDir, stopDir) {
if (!startDir) return [];
const result = [];
let current = path.resolve(startDir);
const resolvedStop = stopDir ? path.resolve(stopDir) : null;
while (true) {
result.push(current);
if (resolvedStop && current === resolvedStop) {
break;
}
const parent = path.dirname(current);
if (parent === current) {
break;
}
current = parent;
}
return result;
}
function findWorktreeRoot(startDir) {
if (!startDir) return null;
let current = path.resolve(startDir);
while (true) {
if (fs.existsSync(path.join(current, '.git'))) {
return current;
}
const parent = path.dirname(current);
if (parent === current) {
return null;
}
current = parent;
}
}
// ============== PROMPT FILE HELPERS ==============
function isPromptFileReference(value) {
if (typeof value !== 'string') {
return false;
}
return PROMPT_FILE_PATTERN.test(value.trim());
}
function resolvePromptFilePath(reference) {
const match = typeof reference === 'string' ? reference.trim().match(PROMPT_FILE_PATTERN) : null;
if (!match) {
return null;
}
let target = match[1].trim();
if (!target) {
return null;
}
if (target.startsWith('./')) {
target = target.slice(2);
target = path.join(OPENCODE_CONFIG_DIR, target);
} else if (!path.isAbsolute(target)) {
target = path.join(OPENCODE_CONFIG_DIR, target);
}
return target;
}
function writePromptFile(filePath, content) {
const dir = path.dirname(filePath);
fs.mkdirSync(dir, { recursive: true });
fs.writeFileSync(filePath, content ?? '', 'utf8');
console.log(`Updated prompt file: ${filePath}`);
}
// ============== SKILL FILE OPERATIONS ==============
function walkSkillMdFiles(rootDir) {
if (!rootDir || !fs.existsSync(rootDir)) return [];
const results = [];
const walk = (dir) => {
let entries = [];
try {
entries = fs.readdirSync(dir, { withFileTypes: true });
} catch {
return;
}
for (const entry of entries) {
const fullPath = path.join(dir, entry.name);
if (entry.isDirectory()) {
walk(fullPath);
continue;
}
if (entry.isFile() && entry.name === 'SKILL.md') {
results.push(fullPath);
}
}
};
walk(rootDir);
return results;
}
function addSkillFromMdFile(skillsMap, skillMdPath, scope, source) {
let parsed;
try {
parsed = parseMdFile(skillMdPath);
} catch {
return;
}
const name = typeof parsed.frontmatter?.name === 'string'
? parsed.frontmatter.name.trim()
: '';
const description = typeof parsed.frontmatter?.description === 'string'
? parsed.frontmatter.description
: '';
if (!name) {
return;
}
skillsMap.set(name, {
name,
path: skillMdPath,
scope,
source,
description,
});
}
function resolveSkillSearchDirectories(workingDirectory) {
const directories = [];
const pushDir = (dir) => {
if (!dir) return;
const resolved = path.resolve(dir);
if (!directories.includes(resolved)) {
directories.push(resolved);
}
};
pushDir(OPENCODE_CONFIG_DIR);
if (workingDirectory) {
const worktreeRoot = findWorktreeRoot(workingDirectory) || path.resolve(workingDirectory);
const projectDirs = getAncestors(workingDirectory, worktreeRoot)
.map((dir) => path.join(dir, '.opencode'));
projectDirs.forEach(pushDir);
}
pushDir(path.join(os.homedir(), '.opencode'));
const customConfigDir = process.env.OPENCODE_CONFIG_DIR
? path.resolve(process.env.OPENCODE_CONFIG_DIR)
: null;
pushDir(customConfigDir);
return directories;
}
function listSkillSupportingFiles(skillDir) {
if (!fs.existsSync(skillDir)) {
return [];
}
const files = [];
function walkDir(dir, relativePath = '') {
const entries = fs.readdirSync(dir, { withFileTypes: true });
for (const entry of entries) {
const fullPath = path.join(dir, entry.name);
const relPath = relativePath ? path.join(relativePath, entry.name) : entry.name;
if (entry.isDirectory()) {
walkDir(fullPath, relPath);
} else if (entry.name !== 'SKILL.md') {
files.push({
name: entry.name,
path: relPath,
fullPath: fullPath
});
}
}
}
walkDir(skillDir);
return files;
}
function assertPathWithinSkillDir(skillDir, relativePath) {
const root = fs.realpathSync(skillDir);
const target = path.resolve(root, relativePath);
const relative = path.relative(root, target);
const isWithin = relative === '' || (!relative.startsWith('..') && !path.isAbsolute(relative));
if (!isWithin) {
const error = new Error('Access to file denied');
error.code = 'EACCES';
throw error;
}
return target;
}
function readSkillSupportingFile(skillDir, relativePath) {
const fullPath = assertPathWithinSkillDir(skillDir, relativePath);
if (!fs.existsSync(fullPath)) {
return null;
}
return fs.readFileSync(fullPath, 'utf8');
}
function writeSkillSupportingFile(skillDir, relativePath, content) {
const fullPath = assertPathWithinSkillDir(skillDir, relativePath);
const dir = path.dirname(fullPath);
fs.mkdirSync(dir, { recursive: true });
fs.writeFileSync(fullPath, content, 'utf8');
}
function deleteSkillSupportingFile(skillDir, relativePath) {
const root = fs.realpathSync(skillDir);
const fullPath = assertPathWithinSkillDir(skillDir, relativePath);
if (fs.existsSync(fullPath)) {
fs.unlinkSync(fullPath);
let parentDir = path.dirname(fullPath);
while (parentDir !== root) {
try {
const entries = fs.readdirSync(parentDir);
if (entries.length === 0) {
fs.rmdirSync(parentDir);
parentDir = path.dirname(parentDir);
} else {
break;
}
} catch {
break;
}
}
}
}
export {
OPENCODE_CONFIG_DIR,
AGENT_DIR,
COMMAND_DIR,
SKILL_DIR,
CONFIG_FILE,
CUSTOM_CONFIG_FILE,
PROMPT_FILE_PATTERN,
AGENT_SCOPE,
COMMAND_SCOPE,
SKILL_SCOPE,
ensureDirs,
parseMdFile,
writeMdFile,
getProjectConfigCandidates,
getProjectConfigPath,
getConfigPaths,
readConfigFile,
isPlainObject,
mergeConfigs,
readConfigLayers,
readConfig,
getConfigForPath,
writeConfig,
getJsonEntrySource,
getJsonWriteTarget,
getAncestors,
findWorktreeRoot,
isPromptFileReference,
resolvePromptFilePath,
writePromptFile,
walkSkillMdFiles,
addSkillFromMdFile,
resolveSkillSearchDirectories,
listSkillSupportingFiles,
readSkillSupportingFile,
writeSkillSupportingFile,
deleteSkillSupportingFile,
};

View File

@@ -0,0 +1,480 @@
import fs from 'fs';
import path from 'path';
import os from 'os';
import {
SKILL_DIR,
OPENCODE_CONFIG_DIR,
SKILL_SCOPE,
ensureDirs,
parseMdFile,
writeMdFile,
readConfigLayers,
readConfig,
walkSkillMdFiles,
addSkillFromMdFile,
resolveSkillSearchDirectories,
listSkillSupportingFiles,
readSkillSupportingFile,
writeSkillSupportingFile,
deleteSkillSupportingFile,
getAncestors,
findWorktreeRoot,
} from './shared.js';
function ensureProjectSkillDir(workingDirectory) {
const projectSkillDir = path.join(workingDirectory, '.opencode', 'skills');
if (!fs.existsSync(projectSkillDir)) {
fs.mkdirSync(projectSkillDir, { recursive: true });
}
const legacyProjectSkillDir = path.join(workingDirectory, '.opencode', 'skill');
if (!fs.existsSync(legacyProjectSkillDir)) {
fs.mkdirSync(legacyProjectSkillDir, { recursive: true });
}
return projectSkillDir;
}
function getProjectSkillDir(workingDirectory, skillName) {
const pluralPath = path.join(workingDirectory, '.opencode', 'skills', skillName);
const legacyPath = path.join(workingDirectory, '.opencode', 'skill', skillName);
if (fs.existsSync(legacyPath) && !fs.existsSync(pluralPath)) return legacyPath;
return pluralPath;
}
function getProjectSkillPath(workingDirectory, skillName) {
const pluralPath = path.join(workingDirectory, '.opencode', 'skills', skillName, 'SKILL.md');
const legacyPath = path.join(workingDirectory, '.opencode', 'skill', skillName, 'SKILL.md');
if (fs.existsSync(legacyPath) && !fs.existsSync(pluralPath)) return legacyPath;
return pluralPath;
}
function getUserSkillDir(skillName) {
const pluralPath = path.join(SKILL_DIR, skillName);
const legacyPath = path.join(OPENCODE_CONFIG_DIR, 'skill', skillName);
if (fs.existsSync(legacyPath) && !fs.existsSync(pluralPath)) return legacyPath;
return pluralPath;
}
function getUserSkillPath(skillName) {
const pluralPath = path.join(SKILL_DIR, skillName, 'SKILL.md');
const legacyPath = path.join(OPENCODE_CONFIG_DIR, 'skill', skillName, 'SKILL.md');
if (fs.existsSync(legacyPath) && !fs.existsSync(pluralPath)) return legacyPath;
return pluralPath;
}
function getClaudeSkillDir(workingDirectory, skillName) {
return path.join(workingDirectory, '.claude', 'skills', skillName);
}
function getClaudeSkillPath(workingDirectory, skillName) {
return path.join(getClaudeSkillDir(workingDirectory, skillName), 'SKILL.md');
}
function getUserAgentsSkillDir(skillName) {
return path.join(os.homedir(), '.agents', 'skills', skillName);
}
function getUserAgentsSkillPath(skillName) {
return path.join(getUserAgentsSkillDir(skillName), 'SKILL.md');
}
function getProjectAgentsSkillDir(workingDirectory, skillName) {
return path.join(workingDirectory, '.agents', 'skills', skillName);
}
function getProjectAgentsSkillPath(workingDirectory, skillName) {
return path.join(getProjectAgentsSkillDir(workingDirectory, skillName), 'SKILL.md');
}
function getSkillScope(skillName, workingDirectory) {
const discovered = discoverSkills(workingDirectory).find((skill) => skill.name === skillName);
if (discovered?.path) {
return { scope: discovered.scope || null, path: discovered.path, source: discovered.source || null };
}
if (workingDirectory) {
const projectPath = getProjectSkillPath(workingDirectory, skillName);
if (fs.existsSync(projectPath)) {
return { scope: SKILL_SCOPE.PROJECT, path: projectPath, source: 'opencode' };
}
const claudePath = getClaudeSkillPath(workingDirectory, skillName);
if (fs.existsSync(claudePath)) {
return { scope: SKILL_SCOPE.PROJECT, path: claudePath, source: 'claude' };
}
}
const userPath = getUserSkillPath(skillName);
if (fs.existsSync(userPath)) {
return { scope: SKILL_SCOPE.USER, path: userPath, source: 'opencode' };
}
return { scope: null, path: null, source: null };
}
function getSkillWritePath(skillName, workingDirectory, requestedScope) {
const existing = getSkillScope(skillName, workingDirectory);
if (existing.path) {
return existing;
}
const scope = requestedScope || SKILL_SCOPE.USER;
if (scope === SKILL_SCOPE.PROJECT && workingDirectory) {
return {
scope: SKILL_SCOPE.PROJECT,
path: getProjectSkillPath(workingDirectory, skillName),
source: 'opencode'
};
}
return {
scope: SKILL_SCOPE.USER,
path: getUserSkillPath(skillName),
source: 'opencode'
};
}
function discoverSkills(workingDirectory) {
const skills = new Map();
for (const externalRootName of ['.claude', '.agents']) {
const homeRoot = path.join(os.homedir(), externalRootName, 'skills');
const source = externalRootName === '.agents' ? 'agents' : 'claude';
for (const skillMdPath of walkSkillMdFiles(homeRoot)) {
addSkillFromMdFile(skills, skillMdPath, SKILL_SCOPE.USER, source);
}
}
if (workingDirectory) {
const worktreeRoot = findWorktreeRoot(workingDirectory) || path.resolve(workingDirectory);
const ancestors = getAncestors(workingDirectory, worktreeRoot);
for (const ancestor of ancestors) {
for (const externalRootName of ['.claude', '.agents']) {
const source = externalRootName === '.agents' ? 'agents' : 'claude';
const externalSkillsRoot = path.join(ancestor, externalRootName, 'skills');
for (const skillMdPath of walkSkillMdFiles(externalSkillsRoot)) {
addSkillFromMdFile(skills, skillMdPath, SKILL_SCOPE.PROJECT, source);
}
}
}
}
const configDirectories = resolveSkillSearchDirectories(workingDirectory);
const homeOpencodeDir = path.resolve(path.join(os.homedir(), '.opencode'));
const customConfigDir = process.env.OPENCODE_CONFIG_DIR
? path.resolve(process.env.OPENCODE_CONFIG_DIR)
: null;
for (const dir of configDirectories) {
for (const subDir of ['skill', 'skills']) {
const root = path.join(dir, subDir);
for (const skillMdPath of walkSkillMdFiles(root)) {
const isUserConfigDir = dir === OPENCODE_CONFIG_DIR
|| dir === homeOpencodeDir
|| (customConfigDir && dir === customConfigDir);
const scope = isUserConfigDir ? SKILL_SCOPE.USER : SKILL_SCOPE.PROJECT;
addSkillFromMdFile(skills, skillMdPath, scope, 'opencode');
}
}
}
let configuredPaths = [];
try {
const config = readConfig(workingDirectory);
configuredPaths = Array.isArray(config?.skills?.paths) ? config.skills.paths : [];
} catch {
configuredPaths = [];
}
for (const skillPath of configuredPaths) {
if (typeof skillPath !== 'string' || !skillPath.trim()) continue;
const expanded = skillPath.startsWith('~/')
? path.join(os.homedir(), skillPath.slice(2))
: skillPath;
const resolved = path.isAbsolute(expanded)
? path.resolve(expanded)
: path.resolve(workingDirectory || process.cwd(), expanded);
for (const skillMdPath of walkSkillMdFiles(resolved)) {
addSkillFromMdFile(skills, skillMdPath, SKILL_SCOPE.PROJECT, 'opencode');
}
}
const cacheCandidates = [];
if (process.env.XDG_CACHE_HOME) {
cacheCandidates.push(path.join(process.env.XDG_CACHE_HOME, 'opencode', 'skills'));
}
cacheCandidates.push(path.join(os.homedir(), '.cache', 'opencode', 'skills'));
cacheCandidates.push(path.join(os.homedir(), 'Library', 'Caches', 'opencode', 'skills'));
for (const cacheRoot of cacheCandidates) {
if (!fs.existsSync(cacheRoot)) continue;
const entries = fs.readdirSync(cacheRoot, { withFileTypes: true });
for (const entry of entries) {
if (!entry.isDirectory()) continue;
const skillRoot = path.join(cacheRoot, entry.name);
for (const skillMdPath of walkSkillMdFiles(skillRoot)) {
addSkillFromMdFile(skills, skillMdPath, SKILL_SCOPE.USER, 'opencode');
}
}
}
return Array.from(skills.values());
}
function getSkillSources(skillName, workingDirectory, discoveredSkill = null) {
const projectPath = workingDirectory ? getProjectSkillPath(workingDirectory, skillName) : null;
const projectExists = projectPath && fs.existsSync(projectPath);
const projectDir = projectExists ? path.dirname(projectPath) : null;
const claudePath = workingDirectory ? getClaudeSkillPath(workingDirectory, skillName) : null;
const claudeExists = claudePath && fs.existsSync(claudePath);
const claudeDir = claudeExists ? path.dirname(claudePath) : null;
const userPath = getUserSkillPath(skillName);
const userExists = fs.existsSync(userPath);
const userDir = userExists ? path.dirname(userPath) : null;
const matchedDiscovered = discoveredSkill && discoveredSkill.name === skillName
? discoveredSkill
: discoverSkills(workingDirectory).find((skill) => skill.name === skillName);
let mdPath = null;
let mdScope = null;
let mdSource = null;
let mdDir = null;
if (projectExists) {
mdPath = projectPath;
mdScope = SKILL_SCOPE.PROJECT;
mdSource = 'opencode';
mdDir = projectDir;
} else if (claudeExists) {
mdPath = claudePath;
mdScope = SKILL_SCOPE.PROJECT;
mdSource = 'claude';
mdDir = claudeDir;
} else if (userExists) {
mdPath = userPath;
mdScope = SKILL_SCOPE.USER;
mdSource = 'opencode';
mdDir = userDir;
} else if (matchedDiscovered?.path) {
mdPath = matchedDiscovered.path;
mdScope = matchedDiscovered.scope || null;
mdSource = matchedDiscovered.source || null;
mdDir = path.dirname(matchedDiscovered.path);
}
const mdExists = !!mdPath;
const sources = {
md: {
exists: mdExists,
path: mdPath,
dir: mdDir,
scope: mdScope,
source: mdSource,
fields: [],
supportingFiles: []
},
projectMd: {
exists: projectExists,
path: projectPath,
dir: projectDir
},
claudeMd: {
exists: claudeExists,
path: claudePath,
dir: claudeDir
},
userMd: {
exists: userExists,
path: userPath,
dir: userDir
}
};
if (mdExists && mdDir) {
const { frontmatter, body } = parseMdFile(mdPath);
sources.md.fields = Object.keys(frontmatter);
sources.md.description = frontmatter.description || '';
sources.md.name = frontmatter.name || skillName;
if (body) {
sources.md.fields.push('instructions');
sources.md.instructions = body;
} else {
sources.md.instructions = '';
}
sources.md.supportingFiles = listSkillSupportingFiles(mdDir);
}
return sources;
}
function createSkill(skillName, config, workingDirectory, scope) {
ensureDirs();
if (!/^[a-z0-9][a-z0-9-]*[a-z0-9]$|^[a-z0-9]$/.test(skillName) || skillName.length > 64) {
throw new Error(`Invalid skill name "${skillName}". Must be 1-64 lowercase alphanumeric characters with hyphens, cannot start or end with hyphen.`);
}
const existing = getSkillScope(skillName, workingDirectory);
if (existing.path) {
throw new Error(`Skill ${skillName} already exists at ${existing.path}`);
}
let targetDir;
let targetPath;
let targetScope;
const requestedScope = scope === SKILL_SCOPE.PROJECT ? SKILL_SCOPE.PROJECT : SKILL_SCOPE.USER;
const requestedSource = config?.source === 'agents' ? 'agents' : 'opencode';
if (requestedScope === SKILL_SCOPE.PROJECT && workingDirectory) {
ensureProjectSkillDir(workingDirectory);
if (requestedSource === 'agents') {
targetDir = getProjectAgentsSkillDir(workingDirectory, skillName);
targetPath = getProjectAgentsSkillPath(workingDirectory, skillName);
} else {
targetDir = getProjectSkillDir(workingDirectory, skillName);
targetPath = getProjectSkillPath(workingDirectory, skillName);
}
targetScope = SKILL_SCOPE.PROJECT;
} else {
if (requestedSource === 'agents') {
targetDir = getUserAgentsSkillDir(skillName);
targetPath = getUserAgentsSkillPath(skillName);
} else {
targetDir = getUserSkillDir(skillName);
targetPath = getUserSkillPath(skillName);
}
targetScope = SKILL_SCOPE.USER;
}
fs.mkdirSync(targetDir, { recursive: true });
const { instructions, scope: _scopeFromConfig, source: _sourceFromConfig, supportingFiles, ...frontmatter } = config;
void _scopeFromConfig;
void _sourceFromConfig;
if (!frontmatter.name) {
frontmatter.name = skillName;
}
if (!frontmatter.description) {
throw new Error('Skill description is required');
}
writeMdFile(targetPath, frontmatter, instructions || '');
if (supportingFiles && Array.isArray(supportingFiles)) {
for (const file of supportingFiles) {
if (file.path && file.content !== undefined) {
writeSkillSupportingFile(targetDir, file.path, file.content);
}
}
}
console.log(`Created new skill: ${skillName} (scope: ${targetScope}, path: ${targetPath})`);
}
function updateSkill(skillName, updates, workingDirectory) {
ensureDirs();
const existing = getSkillScope(skillName, workingDirectory);
if (!existing.path) {
throw new Error(`Skill "${skillName}" not found`);
}
const mdPath = existing.path;
const mdDir = path.dirname(mdPath);
const mdData = parseMdFile(mdPath);
let mdModified = false;
for (const [field, value] of Object.entries(updates)) {
if (field === 'scope') {
continue;
}
if (field === 'instructions') {
const normalizedValue = typeof value === 'string' ? value : (value == null ? '' : String(value));
mdData.body = normalizedValue;
mdModified = true;
continue;
}
if (field === 'supportingFiles') {
if (Array.isArray(value)) {
for (const file of value) {
if (file.delete && file.path) {
deleteSkillSupportingFile(mdDir, file.path);
} else if (file.path && file.content !== undefined) {
writeSkillSupportingFile(mdDir, file.path, file.content);
}
}
}
continue;
}
mdData.frontmatter[field] = value;
mdModified = true;
}
if (mdModified) {
writeMdFile(mdPath, mdData.frontmatter, mdData.body);
}
console.log(`Updated skill: ${skillName} (path: ${mdPath})`);
}
function deleteSkill(skillName, workingDirectory) {
let deleted = false;
if (workingDirectory) {
const projectDir = getProjectSkillDir(workingDirectory, skillName);
if (fs.existsSync(projectDir)) {
fs.rmSync(projectDir, { recursive: true, force: true });
console.log(`Deleted project-level skill directory: ${projectDir}`);
deleted = true;
}
const claudeDir = getClaudeSkillDir(workingDirectory, skillName);
if (fs.existsSync(claudeDir)) {
fs.rmSync(claudeDir, { recursive: true, force: true });
console.log(`Deleted claude-compat skill directory: ${claudeDir}`);
deleted = true;
}
const projectAgentsDir = getProjectAgentsSkillDir(workingDirectory, skillName);
if (fs.existsSync(projectAgentsDir)) {
fs.rmSync(projectAgentsDir, { recursive: true, force: true });
console.log(`Deleted project-level agents skill directory: ${projectAgentsDir}`);
deleted = true;
}
}
const userDir = getUserSkillDir(skillName);
if (fs.existsSync(userDir)) {
fs.rmSync(userDir, { recursive: true, force: true });
console.log(`Deleted user-level skill directory: ${userDir}`);
deleted = true;
}
const userAgentsDir = getUserAgentsSkillDir(skillName);
if (fs.existsSync(userAgentsDir)) {
fs.rmSync(userAgentsDir, { recursive: true, force: true });
console.log(`Deleted user-level agents skill directory: ${userAgentsDir}`);
deleted = true;
}
if (!deleted) {
throw new Error(`Skill "${skillName}" not found`);
}
}
export {
getSkillSources,
getSkillScope,
getSkillWritePath,
discoverSkills,
createSkill,
updateSkill,
deleteSkill,
};

View File

@@ -0,0 +1,510 @@
import crypto from 'crypto';
import { SignJWT, jwtVerify } from 'jose';
import fs from 'fs';
import path from 'path';
import os from 'os';
const SESSION_COOKIE_NAME = 'oc_ui_session';
const SESSION_TTL_MS = 12 * 60 * 60 * 1000;
const RATE_LIMIT_WINDOW_MS = 5 * 60 * 1000;
const RATE_LIMIT_MAX_ATTEMPTS = Number(process.env.OPENCHAMBER_RATE_LIMIT_MAX_ATTEMPTS) || 10;
const RATE_LIMIT_LOCKOUT_MS = 15 * 60 * 1000;
const RATE_LIMIT_CLEANUP_MS = 60 * 60 * 1000;
const RATE_LIMIT_NO_IP_MAX_ATTEMPTS = Number(process.env.OPENCHAMBER_RATE_LIMIT_NO_IP_MAX_ATTEMPTS) || 3;
const loginRateLimiter = new Map();
let rateLimitCleanupTimer = null;
const rateLimitLocks = new Map();
const getClientIp = (req) => {
const forwarded = req.headers['x-forwarded-for'];
if (typeof forwarded === 'string') {
const ip = forwarded.split(',')[0].trim();
if (ip.startsWith('::ffff:')) {
return ip.substring(7);
}
return ip;
}
const ip = req.ip || req.connection?.remoteAddress;
if (ip) {
if (ip.startsWith('::ffff:')) {
return ip.substring(7);
}
return ip;
}
return null;
};
const getRateLimitKey = (req) => {
const ip = getClientIp(req);
if (ip) return ip;
return 'rate-limit:no-ip';
};
const getRateLimitConfig = (key) => {
if (key === 'rate-limit:no-ip') {
return {
maxAttempts: RATE_LIMIT_NO_IP_MAX_ATTEMPTS,
windowMs: RATE_LIMIT_WINDOW_MS
};
}
return {
maxAttempts: RATE_LIMIT_MAX_ATTEMPTS,
windowMs: RATE_LIMIT_WINDOW_MS
};
};
const acquireRateLimitLock = async (key) => {
const prev = rateLimitLocks.get(key) || Promise.resolve();
const curr = prev.then(() => rateLimitLocks.delete(key));
rateLimitLocks.set(key, curr);
await curr;
};
const checkRateLimit = async (req) => {
const key = getRateLimitKey(req);
await acquireRateLimitLock(key);
const now = Date.now();
const { maxAttempts } = getRateLimitConfig(key);
let record;
try {
record = loginRateLimiter.get(key);
} catch (err) {
console.error('[RateLimit] Failed to get record', { key, error: err.message });
return {
allowed: true,
limit: maxAttempts,
remaining: maxAttempts,
reset: Math.ceil((now + RATE_LIMIT_WINDOW_MS) / 1000)
};
}
if (record?.lockedUntil && now < record.lockedUntil) {
return {
allowed: false,
retryAfter: Math.ceil((record.lockedUntil - now) / 1000),
locked: true,
limit: maxAttempts,
remaining: 0,
reset: Math.ceil(record.lockedUntil / 1000)
};
}
if (record?.lockedUntil && now >= record.lockedUntil) {
try {
loginRateLimiter.delete(key);
} catch (err) {
console.error('[RateLimit] Failed to delete expired record', { key, error: err.message });
}
}
if (!record || now - record.lastAttempt > RATE_LIMIT_WINDOW_MS) {
return {
allowed: true,
limit: maxAttempts,
remaining: maxAttempts,
reset: Math.ceil((now + RATE_LIMIT_WINDOW_MS) / 1000)
};
}
if (record.count >= maxAttempts) {
const lockedUntil = now + RATE_LIMIT_LOCKOUT_MS;
try {
loginRateLimiter.set(key, { count: record.count + 1, lastAttempt: now, lockedUntil });
} catch (err) {
console.error('[RateLimit] Failed to set lockout', { key, error: err.message });
}
return {
allowed: false,
retryAfter: Math.ceil(RATE_LIMIT_LOCKOUT_MS / 1000),
locked: true,
limit: maxAttempts,
remaining: 0,
reset: Math.ceil(lockedUntil / 1000)
};
}
const remaining = maxAttempts - record.count;
const reset = Math.ceil((record.lastAttempt + RATE_LIMIT_WINDOW_MS) / 1000);
return {
allowed: true,
limit: maxAttempts,
remaining,
reset
};
};
const recordFailedAttempt = async (req) => {
const key = getRateLimitKey(req);
await acquireRateLimitLock(key);
const now = Date.now();
const { maxAttempts } = getRateLimitConfig(key);
const record = loginRateLimiter.get(key);
if (!record || now - record.lastAttempt > RATE_LIMIT_WINDOW_MS) {
try {
loginRateLimiter.set(key, { count: 1, lastAttempt: now });
} catch (err) {
console.error('[RateLimit] Failed to record attempt', { key, error: err.message });
}
} else {
const newCount = record.count + 1;
try {
loginRateLimiter.set(key, { count: newCount, lastAttempt: now });
} catch (err) {
console.error('[RateLimit] Failed to record attempt', { key, error: err.message });
}
}
};
const clearRateLimit = async (req) => {
const key = getRateLimitKey(req);
await acquireRateLimitLock(key);
try {
loginRateLimiter.delete(key);
} catch (err) {
console.error('[RateLimit] Failed to clear', { key, error: err.message });
}
};
const cleanupRateLimitRecords = () => {
const now = Date.now();
for (const [key, record] of loginRateLimiter.entries()) {
const isExpired = record.lockedUntil && now >= record.lockedUntil;
const isStale = now - record.lastAttempt > RATE_LIMIT_CLEANUP_MS;
if (isExpired || isStale) {
try {
loginRateLimiter.delete(key);
} catch (err) {
console.error('[RateLimit] Cleanup failed', { key, error: err.message });
}
}
}
};
const startRateLimitCleanup = () => {
if (!rateLimitCleanupTimer) {
rateLimitCleanupTimer = setInterval(cleanupRateLimitRecords, RATE_LIMIT_CLEANUP_MS);
if (rateLimitCleanupTimer && typeof rateLimitCleanupTimer.unref === 'function') {
rateLimitCleanupTimer.unref();
}
}
};
const stopRateLimitCleanup = () => {
if (rateLimitCleanupTimer) {
clearInterval(rateLimitCleanupTimer);
rateLimitCleanupTimer = null;
}
};
const isSecureRequest = (req) => {
if (req.secure) {
return true;
}
const forwardedProto = req.headers['x-forwarded-proto'];
if (typeof forwardedProto === 'string') {
const firstProto = forwardedProto.split(',')[0]?.trim().toLowerCase();
return firstProto === 'https';
}
return false;
};
const parseCookies = (cookieHeader) => {
if (!cookieHeader || typeof cookieHeader !== 'string') {
return {};
}
return cookieHeader.split(';').reduce((acc, segment) => {
const [name, ...rest] = segment.split('=');
if (!name) {
return acc;
}
const key = name.trim();
if (!key) {
return acc;
}
const value = rest.join('=').trim();
acc[key] = decodeURIComponent(value || '');
return acc;
}, {});
};
const buildCookie = ({
name,
value,
maxAge,
secure,
}) => {
const attributes = [
`${name}=${value}`,
'Path=/',
'HttpOnly',
'SameSite=Strict',
];
if (typeof maxAge === 'number') {
attributes.push(`Max-Age=${Math.max(0, Math.floor(maxAge))}`);
}
const expires = maxAge === 0
? 'Thu, 01 Jan 1970 00:00:00 GMT'
: new Date(Date.now() + maxAge * 1000).toUTCString();
attributes.push(`Expires=${expires}`);
if (secure) {
attributes.push('Secure');
}
return attributes.join('; ');
};
const normalizePassword = (candidate) => {
if (typeof candidate !== 'string') {
return '';
}
return candidate.normalize().trim();
};
const OPENCHAMBER_DATA_DIR = process.env.OPENCHAMBER_DATA_DIR
? path.resolve(process.env.OPENCHAMBER_DATA_DIR)
: path.join(os.homedir(), '.config', 'openchamber');
const JWT_SECRET_FILE = path.join(OPENCHAMBER_DATA_DIR, 'jwt-secret');
function getOrCreateJwtSecret() {
const envSecret = process.env.OPENCODE_JWT_SECRET;
if (envSecret) {
return new TextEncoder().encode(envSecret);
}
try {
if (fs.existsSync(JWT_SECRET_FILE)) {
return new TextEncoder().encode(fs.readFileSync(JWT_SECRET_FILE, 'utf8').trim());
}
} catch (e) {
console.warn('[JWT] Failed to read secret file:', e.message);
}
const secret = crypto.randomBytes(32).toString('hex');
try {
fs.mkdirSync(OPENCHAMBER_DATA_DIR, { recursive: true });
fs.writeFileSync(JWT_SECRET_FILE, secret, { mode: 0o600 });
console.log('[JWT] Generated and persisted new secret to', JWT_SECRET_FILE);
} catch (e) {
console.warn('[JWT] Failed to persist secret:', e.message);
}
return new TextEncoder().encode(secret);
}
export const createUiAuth = ({
password,
cookieName = SESSION_COOKIE_NAME,
sessionTtlMs = SESSION_TTL_MS,
} = {}) => {
const normalizedPassword = normalizePassword(password);
if (!normalizedPassword) {
const setSessionCookie = (req, res, token) => {
const secure = isSecureRequest(req);
const maxAgeSeconds = Math.floor(sessionTtlMs / 1000);
const header = buildCookie({
name: cookieName,
value: encodeURIComponent(token),
maxAge: maxAgeSeconds,
secure,
});
res.setHeader('Set-Cookie', header);
};
const ensureSessionToken = async (req, res) => {
const cookies = parseCookies(req.headers.cookie);
if (cookies[cookieName]) {
return cookies[cookieName];
}
const token = crypto.randomBytes(32).toString('base64url');
setSessionCookie(req, res, token);
return token;
};
return {
enabled: false,
requireAuth: (_req, _res, next) => next(),
handleSessionStatus: (_req, res) => {
res.json({ authenticated: true, disabled: true });
},
handleSessionCreate: (_req, res) => {
res.status(400).json({ error: 'UI password not configured' });
},
ensureSessionToken,
dispose: () => {
},
};
}
const salt = crypto.randomBytes(16);
const expectedHash = crypto.scryptSync(normalizedPassword, salt, 64);
const JWT_SECRET = getOrCreateJwtSecret();
const getTokenFromRequest = (req) => {
const cookies = parseCookies(req.headers.cookie);
if (cookies[cookieName]) {
return cookies[cookieName];
}
return null;
};
const setSessionCookie = (req, res, token) => {
const secure = isSecureRequest(req);
const maxAgeSeconds = Math.floor(sessionTtlMs / 1000);
const header = buildCookie({
name: cookieName,
value: encodeURIComponent(token),
maxAge: maxAgeSeconds,
secure,
});
res.setHeader('Set-Cookie', header);
};
const clearSessionCookie = (req, res) => {
const secure = isSecureRequest(req);
const header = buildCookie({
name: cookieName,
value: '',
maxAge: 0,
secure,
});
res.setHeader('Set-Cookie', header);
};
const verifyPassword = (candidate) => {
if (!candidate) {
return false;
}
const normalizedCandidate = normalizePassword(candidate);
if (!normalizedCandidate) {
return false;
}
try {
const candidateHash = crypto.scryptSync(normalizedCandidate, salt, 64);
return crypto.timingSafeEqual(candidateHash, expectedHash);
} catch {
return false;
}
};
const isSessionValid = async (token) => {
if (!token) {
return false;
}
try {
await jwtVerify(token, JWT_SECRET);
return true;
} catch {
return false;
}
};
const issueSession = async (req, res) => {
const token = await new SignJWT({ type: 'ui-session' })
.setProtectedHeader({ alg: 'HS256' })
.setIssuedAt()
.setExpirationTime(sessionTtlMs / 1000 + 's')
.sign(JWT_SECRET);
setSessionCookie(req, res, token);
return token;
};
startRateLimitCleanup();
const respondUnauthorized = (req, res) => {
res.status(401);
const acceptsJson = req.headers.accept?.includes('application/json');
if (acceptsJson || req.path.startsWith('/api')) {
res.json({ error: 'UI authentication required', locked: true });
} else {
res.type('text/plain').send('Authentication required');
}
};
const requireAuth = async (req, res, next) => {
if (req.method === 'OPTIONS') {
return next();
}
const token = getTokenFromRequest(req);
if (await isSessionValid(token)) {
return next();
}
clearSessionCookie(req, res);
return respondUnauthorized(req, res);
};
const handleSessionStatus = async (req, res) => {
const token = getTokenFromRequest(req);
if (await isSessionValid(token)) {
res.json({ authenticated: true });
return;
}
clearSessionCookie(req, res);
res.status(401).json({ authenticated: false, locked: true });
};
const handleSessionCreate = async (req, res) => {
const rateLimitResult = await checkRateLimit(req);
res.setHeader('X-RateLimit-Limit', rateLimitResult.limit);
res.setHeader('X-RateLimit-Remaining', rateLimitResult.remaining);
res.setHeader('X-RateLimit-Reset', rateLimitResult.reset);
if (!rateLimitResult.allowed) {
res.setHeader('Retry-After', rateLimitResult.retryAfter);
res.status(429).json({
error: 'Too many login attempts, please try again later',
retryAfter: rateLimitResult.retryAfter
});
return;
}
const candidate = typeof req.body?.password === 'string' ? req.body.password : '';
if (!verifyPassword(candidate)) {
await recordFailedAttempt(req);
clearSessionCookie(req, res);
res.status(401).json({ error: 'Invalid credentials' });
return;
}
await clearRateLimit(req);
await issueSession(req, res);
res.json({ authenticated: true });
};
const dispose = () => {
loginRateLimiter.clear();
if (rateLimitCleanupTimer) {
clearInterval(rateLimitCleanupTimer);
rateLimitCleanupTimer = null;
}
};
return {
enabled: true,
requireAuth,
handleSessionStatus,
handleSessionCreate,
ensureSessionToken: async (req, _res) => {
const token = getTokenFromRequest(req);
return (await isSessionValid(token)) ? token : null;
},
dispose,
};
};

View File

@@ -0,0 +1,362 @@
import { spawnSync } from 'child_process';
import fs from 'fs';
import path from 'path';
import { fileURLToPath } from 'url';
const __filename = fileURLToPath(import.meta.url);
const __dirname = path.dirname(__filename);
const PACKAGE_NAME = '@openchamber/web';
const NPM_REGISTRY_URL = `https://registry.npmjs.org/${PACKAGE_NAME}`;
const CHANGELOG_URL = 'https://raw.githubusercontent.com/btriapitsyn/openchamber/main/CHANGELOG.md';
/**
* Detect which package manager was used to install this package.
* Strategy:
* 1. Check npm_config_user_agent (set during npm/pnpm/yarn/bun install)
* 2. Check npm_execpath for PM binary path
* 3. Analyze package location path for PM-specific patterns
* 4. Fall back to npm
*/
export function detectPackageManager() {
const forcedPm = process.env.OPENCHAMBER_PACKAGE_MANAGER?.trim();
if (forcedPm && ['npm', 'pnpm', 'yarn', 'bun'].includes(forcedPm)) {
const forcedPmCommand = resolvePackageManagerCommand(forcedPm);
if (isCommandAvailable(forcedPmCommand)) {
return forcedPm;
}
}
// Strategy 1: Detect from runtime executable path (reliable for server-side updates)
const runtimePm = detectPackageManagerFromRuntimePath(process.execPath);
if (runtimePm && isCommandAvailable(resolvePackageManagerCommand(runtimePm))) {
return runtimePm;
}
// Strategy 2: Check user agent (most reliable during install)
const userAgent = process.env.npm_config_user_agent || '';
let hintedPm = null;
if (userAgent.startsWith('pnpm')) hintedPm = 'pnpm';
else if (userAgent.startsWith('yarn')) hintedPm = 'yarn';
else if (userAgent.startsWith('bun')) hintedPm = 'bun';
else if (userAgent.startsWith('npm')) hintedPm = 'npm';
// Strategy 3: Check execpath
const execPath = process.env.npm_execpath || '';
if (!hintedPm) {
if (execPath.includes('pnpm')) hintedPm = 'pnpm';
else if (execPath.includes('yarn')) hintedPm = 'yarn';
else if (execPath.includes('bun')) hintedPm = 'bun';
else if (execPath.includes('npm')) hintedPm = 'npm';
}
// Strategy 4: Detect from invoked binary path (works for bun global symlink installs)
const invokedPm = detectPackageManagerFromInvocationPath(process.argv?.[1]);
if (invokedPm && isCommandAvailable(resolvePackageManagerCommand(invokedPm))) {
return invokedPm;
}
if (!hintedPm) {
hintedPm = invokedPm;
}
// Strategy 5: Analyze package location for PM-specific patterns
try {
const pkgPath = path.resolve(__dirname, '..', '..');
const pmFromPath = detectPackageManagerFromInstallPath(pkgPath);
if (pmFromPath && isCommandAvailable(resolvePackageManagerCommand(pmFromPath))) {
return pmFromPath;
}
if (!hintedPm) {
hintedPm = pmFromPath;
}
} catch {
// Ignore path resolution errors
}
// Validate the hinted PM actually owns the global install.
// This avoids false positives (for example running via bunx while installed with npm).
if (hintedPm && isCommandAvailable(resolvePackageManagerCommand(hintedPm)) && isPackageInstalledWith(hintedPm)) {
return hintedPm;
}
// Strategy 6: Check which PM binaries are available and preferred
const pmChecks = [
{ name: 'pnpm', check: () => isCommandAvailable(resolvePackageManagerCommand('pnpm')) },
{ name: 'yarn', check: () => isCommandAvailable(resolvePackageManagerCommand('yarn')) },
{ name: 'bun', check: () => isCommandAvailable(resolvePackageManagerCommand('bun')) },
{ name: 'npm', check: () => isCommandAvailable(resolvePackageManagerCommand('npm')) },
];
for (const { name, check } of pmChecks) {
if (check()) {
// Verify this PM actually has the package installed globally
if (isPackageInstalledWith(name)) {
return name;
}
}
}
return 'npm';
}
function detectPackageManagerFromInstallPath(pkgPath) {
if (!pkgPath) return null;
const normalized = pkgPath.replace(/\\/g, '/').toLowerCase();
if (normalized.includes('/.pnpm/') || normalized.includes('/pnpm/')) return 'pnpm';
if (normalized.includes('/.yarn/')) return 'yarn';
if (normalized.includes('/.bun/') || normalized.includes('/bun/install/')) return 'bun';
if (normalized.includes('/node_modules/')) return 'npm';
return null;
}
function detectPackageManagerFromRuntimePath(runtimePath) {
if (!runtimePath || typeof runtimePath !== 'string') return null;
const normalized = runtimePath.replace(/\\/g, '/').toLowerCase();
if (normalized.includes('/.bun/bin/bun') || normalized.endsWith('/bun') || normalized.endsWith('/bun.exe')) {
return 'bun';
}
if (normalized.includes('/pnpm/')) return 'pnpm';
if (normalized.includes('/yarn/')) return 'yarn';
if (normalized.includes('/node') || normalized.endsWith('/node.exe')) return 'npm';
return null;
}
function detectPackageManagerFromInvocationPath(invokedPath) {
if (!invokedPath || typeof invokedPath !== 'string') return null;
const normalized = invokedPath.replace(/\\/g, '/').toLowerCase();
if (normalized.includes('/.bun/bin/')) return 'bun';
if (normalized.includes('/.pnpm/')) return 'pnpm';
if (normalized.includes('/.yarn/')) return 'yarn';
return null;
}
function getPackageManagerCommandCandidates(pm) {
const candidates = [];
if (pm === 'bun') {
const bunExecutable = process.platform === 'win32' ? 'bun.exe' : 'bun';
if (process.env.BUN_INSTALL) {
candidates.push(path.join(process.env.BUN_INSTALL, 'bin', bunExecutable));
}
if (process.env.HOME) {
candidates.push(path.join(process.env.HOME, '.bun', 'bin', bunExecutable));
}
if (process.env.USERPROFILE) {
candidates.push(path.join(process.env.USERPROFILE, '.bun', 'bin', bunExecutable));
}
}
candidates.push(pm);
return [...new Set(candidates.filter(Boolean))];
}
function resolvePackageManagerCommand(pm) {
const candidates = getPackageManagerCommandCandidates(pm);
for (const candidate of candidates) {
if (isCommandAvailable(candidate)) {
return candidate;
}
}
return pm;
}
function quoteCommand(command) {
if (!command) return command;
if (!/\s/.test(command)) return command;
if (process.platform === 'win32') {
return `"${command.replace(/"/g, '""')}"`;
}
return `'${command.replace(/'/g, "'\\''")}'`;
}
function isCommandAvailable(command) {
try {
const result = spawnSync(command, ['--version'], {
encoding: 'utf8',
stdio: ['ignore', 'pipe', 'pipe'],
timeout: 5000,
});
return result.status === 0;
} catch {
return false;
}
}
function isPackageInstalledWith(pm) {
try {
const pmCommand = resolvePackageManagerCommand(pm);
let args;
switch (pm) {
case 'pnpm':
args = ['list', '-g', '--depth=0', PACKAGE_NAME];
break;
case 'yarn':
args = ['global', 'list', '--depth=0'];
break;
case 'bun':
args = ['pm', 'ls', '-g'];
break;
default:
args = ['list', '-g', '--depth=0', PACKAGE_NAME];
}
const result = spawnSync(pmCommand, args, {
encoding: 'utf8',
stdio: ['ignore', 'pipe', 'pipe'],
timeout: 10000,
});
if (result.status !== 0) return false;
return result.stdout.includes(PACKAGE_NAME) || result.stdout.includes('openchamber');
} catch {
return false;
}
}
/**
* Get the update command for the detected package manager
*/
export function getUpdateCommand(pm = detectPackageManager()) {
const pmCommand = quoteCommand(resolvePackageManagerCommand(pm));
switch (pm) {
case 'pnpm':
return `${pmCommand} add -g ${PACKAGE_NAME}@latest`;
case 'yarn':
return `${pmCommand} global add ${PACKAGE_NAME}@latest`;
case 'bun':
return `${pmCommand} add -g ${PACKAGE_NAME}@latest`;
default:
return `${pmCommand} install -g ${PACKAGE_NAME}@latest`;
}
}
/**
* Get current installed version from package.json
*/
export function getCurrentVersion() {
try {
const pkgPath = path.resolve(__dirname, '..', '..', 'package.json');
const pkg = JSON.parse(fs.readFileSync(pkgPath, 'utf8'));
return pkg.version || 'unknown';
} catch {
return 'unknown';
}
}
/**
* Fetch latest version from npm registry
*/
export async function getLatestVersion() {
try {
const response = await fetch(NPM_REGISTRY_URL, {
headers: { Accept: 'application/json' },
signal: AbortSignal.timeout(10000),
});
if (!response.ok) {
throw new Error(`Registry responded with ${response.status}`);
}
const data = await response.json();
return data['dist-tags']?.latest || null;
} catch (error) {
console.warn('Failed to fetch latest version from npm:', error.message);
return null;
}
}
/**
* Parse semver version to numeric for comparison
*/
function parseVersion(version) {
const parts = version.replace(/^v/, '').split('.').map(Number);
return (parts[0] || 0) * 10000 + (parts[1] || 0) * 100 + (parts[2] || 0);
}
/**
* Fetch changelog notes between versions
*/
export async function fetchChangelogNotes(fromVersion, toVersion) {
try {
const response = await fetch(CHANGELOG_URL, {
signal: AbortSignal.timeout(10000),
});
if (!response.ok) return undefined;
const changelog = await response.text();
const sections = changelog.split(/^## /m).slice(1);
const fromNum = parseVersion(fromVersion);
const toNum = parseVersion(toVersion);
const relevantSections = sections.filter((section) => {
const match = section.match(/^\[(\d+\.\d+\.\d+)\]/);
if (!match) return false;
const ver = parseVersion(match[1]);
return ver > fromNum && ver <= toNum;
});
if (relevantSections.length === 0) return undefined;
return relevantSections
.map((s) => '## ' + s.trim())
.join('\n\n');
} catch {
return undefined;
}
}
/**
* Check for updates and return update info
*/
export async function checkForUpdates() {
const currentVersion = getCurrentVersion();
const latestVersion = await getLatestVersion();
if (!latestVersion || currentVersion === 'unknown') {
return {
available: false,
currentVersion,
error: 'Unable to determine versions',
};
}
const currentNum = parseVersion(currentVersion);
const latestNum = parseVersion(latestVersion);
const available = latestNum > currentNum;
const pm = detectPackageManager();
let changelog;
if (available) {
changelog = await fetchChangelogNotes(currentVersion, latestVersion);
}
return {
available,
version: latestVersion,
currentVersion,
body: changelog,
packageManager: pm,
// Show our CLI command, not raw package manager command
updateCommand: 'openchamber update',
};
}
/**
* Execute the update (used by CLI)
*/
export function executeUpdate(pm = detectPackageManager()) {
const command = getUpdateCommand(pm);
console.log(`Updating ${PACKAGE_NAME} using ${pm}...`);
console.log(`Running: ${command}`);
const result = spawnSync(command, {
stdio: 'inherit',
shell: true,
});
return {
success: result.status === 0,
exitCode: result.status,
};
}

View File

@@ -0,0 +1,55 @@
# Quota Module Documentation
## Purpose
This module fetches quota and usage signals for supported providers in the web server runtime.
## Entrypoints and structure
- `packages/web/server/lib/quota/index.js`: public entrypoint imported by `packages/web/server/index.js`.
- `packages/web/server/lib/quota/providers/index.js`: provider registry, configured-provider list, and provider dispatcher.
- `packages/web/server/lib/quota/providers/interface.js`: JSDoc provider contract used as implementation reference.
- `packages/web/server/lib/quota/providers/google/`: Google-specific auth, API, and transform modules.
- `packages/web/server/lib/quota/utils/`: shared auth, transform, and formatting helpers.
## Supported provider IDs (dispatcher)
These provider IDs are currently dispatchable via `fetchQuotaForProvider(providerId)` in `packages/web/server/lib/quota/providers/index.js`.
| Provider ID | Display name | Module | Auth aliases/keys |
| --- | --- | --- | --- |
| `claude` | Claude | `providers/claude.js` | `anthropic`, `claude` |
| `codex` | Codex | `providers/codex.js` | `openai`, `codex`, `chatgpt` |
| `google` | Google | `providers/google/index.js` | `google`, `google.oauth`, Antigravity accounts file |
| `github-copilot` | GitHub Copilot | `providers/copilot.js` | `github-copilot`, `copilot` |
| `github-copilot-addon` | GitHub Copilot Add-on | `providers/copilot.js` | `github-copilot`, `copilot` |
| `kimi-for-coding` | Kimi for Coding | `providers/kimi.js` | `kimi-for-coding`, `kimi` |
| `nano-gpt` | NanoGPT | `providers/nanogpt.js` | `nano-gpt`, `nanogpt`, `nano_gpt` |
| `openrouter` | OpenRouter | `providers/openrouter.js` | `openrouter` |
| `zai-coding-plan` | z.ai | `providers/zai.js` | `zai-coding-plan`, `zai`, `z.ai` |
| `minimax-coding-plan` | MiniMax Coding Plan (minimax.io) | `providers/minimax-coding-plan.js` | `minimax-coding-plan` |
| `minimax-cn-coding-plan` | MiniMax Coding Plan (minimaxi.com) | `providers/minimax-cn-coding-plan.js` | `minimax-cn-coding-plan` |
| `ollama-cloud` | Ollama Cloud | `providers/ollama-cloud.js` | Cookie file at `~/.config/ollama-quota/cookie` (raw session cookie string) |
## Internal-only provider module
- `providers/openai.js` exists for logic parity/reuse but is intentionally not registered for dispatcher ID routing.
## Response contract
All providers should return results via shared helpers to preserve API shape:
- Required fields: `providerId`, `providerName`, `ok`, `configured`, `usage`, `fetchedAt`
- Optional field: `error`
- Unsupported provider requests should return `ok: false`, `configured: false`, `error: Unsupported provider`
## Add a new provider (quick steps)
1. Choose module shape based on complexity:
- Simple providers: create `packages/web/server/lib/quota/providers/<provider>.js`.
- Complex providers (multi-source auth, multiple API calls, non-trivial transforms): create `packages/web/server/lib/quota/providers/<provider>/` with split modules like Google (`index.js`, `auth.js`, `api.js`, `transforms.js`).
2. Export `providerId`, `providerName`, `aliases`, `isConfigured`, and `fetchQuota`.
3. Use shared helpers from `packages/web/server/lib/quota/utils/index.js` (`buildResult`, `toUsageWindow`, auth/conversion helpers) to keep payload shape consistent.
4. Register the provider in `packages/web/server/lib/quota/providers/index.js`.
5. If needed for direct use, export a named fetcher from `packages/web/server/lib/quota/providers/index.js` and `packages/web/server/lib/quota/index.js`.
6. Update this file with the new provider ID, module path, and alias/auth details.
7. Validate with `bun run type-check`, `bun run lint`, and `bun run build`.
## Notes for contributors
- Keep provider IDs stable; clients use them directly.
- Avoid adding alias-based dispatch in `fetchQuotaForProvider`; dispatch currently expects exact provider IDs.
- Keep Google behavior changes isolated and review `providers/google/*` together.

View File

@@ -0,0 +1,24 @@
/**
* Quota module
*
* Provides quota usage tracking for various AI provider services.
* @module quota
*/
export {
listConfiguredQuotaProviders,
fetchQuotaForProvider,
fetchClaudeQuota,
fetchOpenaiQuota,
fetchGoogleQuota,
fetchCodexQuota,
fetchCopilotQuota,
fetchCopilotAddonQuota,
fetchKimiQuota,
fetchOpenRouterQuota,
fetchZaiQuota,
fetchNanoGptQuota,
fetchMinimaxCodingPlanQuota,
fetchMinimaxCnCodingPlanQuota,
fetchOllamaCloudQuota
} from './providers/index.js';

View File

@@ -0,0 +1,107 @@
import { readAuthFile } from '../../opencode/auth.js';
import {
getAuthEntry,
normalizeAuthEntry,
buildResult,
toUsageWindow,
toNumber,
toTimestamp
} from '../utils/index.js';
export const providerId = 'claude';
export const providerName = 'Claude';
export const aliases = ['anthropic', 'claude'];
export const isConfigured = () => {
const auth = readAuthFile();
const entry = normalizeAuthEntry(getAuthEntry(auth, aliases));
return Boolean(entry?.access || entry?.token);
};
export const fetchQuota = async () => {
const auth = readAuthFile();
const entry = normalizeAuthEntry(getAuthEntry(auth, aliases));
const accessToken = entry?.access ?? entry?.token;
if (!accessToken) {
return buildResult({
providerId,
providerName,
ok: false,
configured: false,
error: 'Not configured'
});
}
try {
const response = await fetch('https://api.anthropic.com/api/oauth/usage', {
method: 'GET',
headers: {
Authorization: `Bearer ${accessToken}`,
'anthropic-beta': 'oauth-2025-04-20'
}
});
if (!response.ok) {
return buildResult({
providerId,
providerName,
ok: false,
configured: true,
error: `API error: ${response.status}`
});
}
const payload = await response.json();
const windows = {};
const fiveHour = payload?.five_hour ?? null;
const sevenDay = payload?.seven_day ?? null;
const sevenDaySonnet = payload?.seven_day_sonnet ?? null;
const sevenDayOpus = payload?.seven_day_opus ?? null;
if (fiveHour) {
windows['5h'] = toUsageWindow({
usedPercent: toNumber(fiveHour.utilization),
windowSeconds: null,
resetAt: toTimestamp(fiveHour.resets_at)
});
}
if (sevenDay) {
windows['7d'] = toUsageWindow({
usedPercent: toNumber(sevenDay.utilization),
windowSeconds: null,
resetAt: toTimestamp(sevenDay.resets_at)
});
}
if (sevenDaySonnet) {
windows['7d-sonnet'] = toUsageWindow({
usedPercent: toNumber(sevenDaySonnet.utilization),
windowSeconds: null,
resetAt: toTimestamp(sevenDaySonnet.resets_at)
});
}
if (sevenDayOpus) {
windows['7d-opus'] = toUsageWindow({
usedPercent: toNumber(sevenDayOpus.utilization),
windowSeconds: null,
resetAt: toTimestamp(sevenDayOpus.resets_at)
});
}
return buildResult({
providerId,
providerName,
ok: true,
configured: true,
usage: { windows }
});
} catch (error) {
return buildResult({
providerId,
providerName,
ok: false,
configured: true,
error: error instanceof Error ? error.message : 'Request failed'
});
}
};

View File

@@ -0,0 +1,113 @@
import { readAuthFile } from '../../opencode/auth.js';
import {
getAuthEntry,
normalizeAuthEntry,
buildResult,
toUsageWindow,
toNumber,
toTimestamp,
formatMoney
} from '../utils/index.js';
export const providerId = 'codex';
export const providerName = 'Codex';
export const aliases = ['openai', 'codex', 'chatgpt'];
export const isConfigured = () => {
const auth = readAuthFile();
const entry = normalizeAuthEntry(getAuthEntry(auth, aliases));
return Boolean(entry?.access || entry?.token);
};
export const fetchQuota = async () => {
const auth = readAuthFile();
const entry = normalizeAuthEntry(getAuthEntry(auth, aliases));
const accessToken = entry?.access ?? entry?.token;
const accountId = entry?.accountId;
if (!accessToken) {
return buildResult({
providerId,
providerName,
ok: false,
configured: false,
error: 'Not configured'
});
}
try {
const headers = {
Authorization: `Bearer ${accessToken}`,
'Content-Type': 'application/json',
...(accountId ? { 'ChatGPT-Account-Id': accountId } : {})
};
const response = await fetch('https://chatgpt.com/backend-api/wham/usage', {
method: 'GET',
headers
});
if (!response.ok) {
return buildResult({
providerId,
providerName,
ok: false,
configured: true,
error: response.status === 401
? 'Session expired \u2014 please re-authenticate with OpenAI'
: `API error: ${response.status}`
});
}
const payload = await response.json();
const primary = payload?.rate_limit?.primary_window ?? null;
const secondary = payload?.rate_limit?.secondary_window ?? null;
const credits = payload?.credits ?? null;
const windows = {};
if (primary) {
windows['5h'] = toUsageWindow({
usedPercent: toNumber(primary.used_percent),
windowSeconds: toNumber(primary.limit_window_seconds),
resetAt: toTimestamp(primary.reset_at)
});
}
if (secondary) {
windows['weekly'] = toUsageWindow({
usedPercent: toNumber(secondary.used_percent),
windowSeconds: toNumber(secondary.limit_window_seconds),
resetAt: toTimestamp(secondary.reset_at)
});
}
if (credits) {
const balance = toNumber(credits.balance);
const unlimited = Boolean(credits.unlimited);
const label = unlimited
? 'Unlimited'
: balance !== null
? `$${formatMoney(balance)} remaining`
: null;
windows.credits = toUsageWindow({
usedPercent: null,
windowSeconds: null,
resetAt: null,
valueLabel: label
});
}
return buildResult({
providerId,
providerName,
ok: true,
configured: true,
usage: { windows }
});
} catch (error) {
return buildResult({
providerId,
providerName,
ok: false,
configured: true,
error: error instanceof Error ? error.message : 'Request failed'
});
}
};

View File

@@ -0,0 +1,165 @@
import { readAuthFile } from '../../opencode/auth.js';
import {
getAuthEntry,
normalizeAuthEntry,
buildResult,
toUsageWindow,
toNumber,
toTimestamp
} from '../utils/index.js';
const buildCopilotWindows = (payload) => {
const quota = payload?.quota_snapshots ?? {};
const resetAt = toTimestamp(payload?.quota_reset_date);
const windows = {};
const addWindow = (label, snapshot) => {
if (!snapshot) return;
const entitlement = toNumber(snapshot.entitlement);
const remaining = toNumber(snapshot.remaining);
const usedPercent = entitlement && remaining !== null
? Math.max(0, Math.min(100, 100 - (remaining / entitlement) * 100))
: null;
const valueLabel = entitlement !== null && remaining !== null
? `${remaining.toFixed(0)} / ${entitlement.toFixed(0)} left`
: null;
windows[label] = toUsageWindow({
usedPercent,
windowSeconds: null,
resetAt,
valueLabel
});
};
addWindow('chat', quota.chat);
addWindow('completions', quota.completions);
addWindow('premium', quota.premium_interactions);
return windows;
};
export const providerId = 'github-copilot';
export const providerName = 'GitHub Copilot';
export const aliases = ['github-copilot', 'copilot'];
export const isConfigured = () => {
const auth = readAuthFile();
const entry = normalizeAuthEntry(getAuthEntry(auth, aliases));
return Boolean(entry?.access || entry?.token);
};
export const fetchQuota = async () => {
const auth = readAuthFile();
const entry = normalizeAuthEntry(getAuthEntry(auth, aliases));
const accessToken = entry?.access ?? entry?.token;
if (!accessToken) {
return buildResult({
providerId,
providerName,
ok: false,
configured: false,
error: 'Not configured'
});
}
try {
const response = await fetch('https://api.github.com/copilot_internal/user', {
method: 'GET',
headers: {
Authorization: `token ${accessToken}`,
Accept: 'application/json',
'Editor-Version': 'vscode/1.96.2',
'X-Github-Api-Version': '2025-04-01'
}
});
if (!response.ok) {
return buildResult({
providerId,
providerName,
ok: false,
configured: true,
error: `API error: ${response.status}`
});
}
const payload = await response.json();
return buildResult({
providerId,
providerName,
ok: true,
configured: true,
usage: { windows: buildCopilotWindows(payload) }
});
} catch (error) {
return buildResult({
providerId,
providerName,
ok: false,
configured: true,
error: error instanceof Error ? error.message : 'Request failed'
});
}
};
export const providerIdAddon = 'github-copilot-addon';
export const providerNameAddon = 'GitHub Copilot Add-on';
export const fetchQuotaAddon = async () => {
const auth = readAuthFile();
const entry = normalizeAuthEntry(getAuthEntry(auth, aliases));
const accessToken = entry?.access ?? entry?.token;
if (!accessToken) {
return buildResult({
providerId: providerIdAddon,
providerName: providerNameAddon,
ok: false,
configured: false,
error: 'Not configured'
});
}
try {
const response = await fetch('https://api.github.com/copilot_internal/user', {
method: 'GET',
headers: {
Authorization: `token ${accessToken}`,
Accept: 'application/json',
'Editor-Version': 'vscode/1.96.2',
'X-Github-Api-Version': '2025-04-01'
}
});
if (!response.ok) {
return buildResult({
providerId: providerIdAddon,
providerName: providerNameAddon,
ok: false,
configured: true,
error: `API error: ${response.status}`
});
}
const payload = await response.json();
const windows = buildCopilotWindows(payload);
const premium = windows.premium ? { premium: windows.premium } : windows;
return buildResult({
providerId: providerIdAddon,
providerName: providerNameAddon,
ok: true,
configured: true,
usage: { windows: premium }
});
} catch (error) {
return buildResult({
providerId: providerIdAddon,
providerName: providerNameAddon,
ok: false,
configured: true,
error: error instanceof Error ? error.message : 'Request failed'
});
}
};

View File

@@ -0,0 +1,92 @@
/**
* Google Provider - API
*
* API calls for Google quota providers.
* @module quota/providers/google/api
*/
const GOOGLE_PRIMARY_ENDPOINT = 'https://cloudcode-pa.googleapis.com';
const GOOGLE_ENDPOINTS = [
'https://daily-cloudcode-pa.sandbox.googleapis.com',
'https://autopush-cloudcode-pa.sandbox.googleapis.com',
GOOGLE_PRIMARY_ENDPOINT
];
const GOOGLE_HEADERS = {
'User-Agent': 'antigravity/1.11.5 windows/amd64',
'X-Goog-Api-Client': 'google-cloud-sdk vscode_cloudshelleditor/0.1',
'Client-Metadata':
'{"ideType":"IDE_UNSPECIFIED","platform":"PLATFORM_UNSPECIFIED","pluginType":"GEMINI"}'
};
export const refreshGoogleAccessToken = async (refreshToken, clientId, clientSecret) => {
const response = await fetch('https://oauth2.googleapis.com/token', {
method: 'POST',
headers: { 'Content-Type': 'application/x-www-form-urlencoded' },
body: new URLSearchParams({
client_id: clientId,
client_secret: clientSecret,
refresh_token: refreshToken,
grant_type: 'refresh_token'
})
});
if (!response.ok) {
return null;
}
const data = await response.json();
return typeof data?.access_token === 'string' ? data.access_token : null;
};
export const fetchGoogleQuotaBuckets = async (accessToken, projectId) => {
const body = projectId ? { project: projectId } : {};
try {
const response = await fetch(`${GOOGLE_PRIMARY_ENDPOINT}/v1internal:retrieveUserQuota`, {
method: 'POST',
headers: {
Authorization: `Bearer ${accessToken}`,
'Content-Type': 'application/json'
},
body: JSON.stringify(body),
signal: AbortSignal.timeout(15000)
});
if (!response.ok) {
return null;
}
return await response.json();
} catch {
return null;
}
};
export const fetchGoogleModels = async (accessToken, projectId) => {
const body = projectId ? { project: projectId } : {};
for (const endpoint of GOOGLE_ENDPOINTS) {
try {
const response = await fetch(`${endpoint}/v1internal:fetchAvailableModels`, {
method: 'POST',
headers: {
Authorization: `Bearer ${accessToken}`,
'Content-Type': 'application/json',
...GOOGLE_HEADERS
},
body: JSON.stringify(body),
signal: AbortSignal.timeout(15000)
});
if (response.ok) {
return await response.json();
}
} catch {
continue;
}
}
return null;
};

View File

@@ -0,0 +1,108 @@
/**
* Google Provider - Auth
*
* Authentication resolution logic for Google quota providers.
* @module quota/providers/google/auth
*/
import {
ANTIGRAVITY_ACCOUNTS_PATHS,
readJsonFile,
getAuthEntry,
normalizeAuthEntry,
asObject,
asNonEmptyString,
toTimestamp
} from '../../utils/index.js';
import { readAuthFile } from '../../../opencode/auth.js';
import { parseGoogleRefreshToken } from './transforms.js';
const ANTIGRAVITY_GOOGLE_CLIENT_ID =
'1071006060591-tmhssin2h21lcre235vtolojh4g403ep.apps.googleusercontent.com';
const ANTIGRAVITY_GOOGLE_CLIENT_SECRET = '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';
const GEMINI_GOOGLE_CLIENT_ID =
'681255809395-oo8ft2oprdrnp9e3aqf6av3hmdib135j.apps.googleusercontent.com';
const GEMINI_GOOGLE_CLIENT_SECRET = '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';
export const DEFAULT_PROJECT_ID = 'rising-fact-p41fc';
export const resolveGoogleOAuthClient = (sourceId) => {
if (sourceId === 'gemini') {
return {
clientId: GEMINI_GOOGLE_CLIENT_ID,
clientSecret: GEMINI_GOOGLE_CLIENT_SECRET
};
}
return {
clientId: ANTIGRAVITY_GOOGLE_CLIENT_ID,
clientSecret: ANTIGRAVITY_GOOGLE_CLIENT_SECRET
};
};
export const resolveGeminiCliAuth = (auth) => {
const entry = normalizeAuthEntry(getAuthEntry(auth, ['google', 'google.oauth']));
const entryObject = asObject(entry);
if (!entryObject) {
return null;
}
const oauthObject = asObject(entryObject.oauth) ?? entryObject;
const accessToken = asNonEmptyString(oauthObject.access) ?? asNonEmptyString(oauthObject.token);
const refreshParts = parseGoogleRefreshToken(oauthObject.refresh);
if (!accessToken && !refreshParts.refreshToken) {
return null;
}
return {
sourceId: 'gemini',
sourceLabel: 'Gemini',
accessToken,
refreshToken: refreshParts.refreshToken,
projectId: refreshParts.projectId ?? refreshParts.managedProjectId,
expires: toTimestamp(oauthObject.expires)
};
};
export const resolveAntigravityAuth = () => {
for (const filePath of ANTIGRAVITY_ACCOUNTS_PATHS) {
const data = readJsonFile(filePath);
const accounts = data?.accounts;
if (Array.isArray(accounts) && accounts.length > 0) {
const index = typeof data.activeIndex === 'number' ? data.activeIndex : 0;
const account = accounts[index] ?? accounts[0];
if (account?.refreshToken) {
const refreshParts = parseGoogleRefreshToken(account.refreshToken);
return {
sourceId: 'antigravity',
sourceLabel: 'Antigravity',
refreshToken: refreshParts.refreshToken,
projectId: asNonEmptyString(account.projectId)
?? asNonEmptyString(account.managedProjectId)
?? refreshParts.projectId
?? refreshParts.managedProjectId,
email: account.email
};
}
}
}
return null;
};
export const resolveGoogleAuthSources = () => {
const auth = readAuthFile();
const sources = [];
const geminiAuth = resolveGeminiCliAuth(auth);
if (geminiAuth) {
sources.push(geminiAuth);
}
const antigravityAuth = resolveAntigravityAuth();
if (antigravityAuth) {
sources.push(antigravityAuth);
}
return sources;
};

View File

@@ -0,0 +1,124 @@
/**
* Google Provider
*
* Google quota provider implementation.
* @module quota/providers/google
*/
export {
resolveGoogleOAuthClient,
resolveGeminiCliAuth,
resolveAntigravityAuth,
resolveGoogleAuthSources,
DEFAULT_PROJECT_ID
} from './auth.js';
export {
resolveGoogleWindow,
transformQuotaBucket,
transformModelData
} from './transforms.js';
export {
refreshGoogleAccessToken,
fetchGoogleQuotaBuckets,
fetchGoogleModels
} from './api.js';
import { buildResult } from '../../utils/index.js';
import {
resolveGoogleAuthSources,
resolveGoogleOAuthClient,
DEFAULT_PROJECT_ID
} from './auth.js';
import { transformQuotaBucket, transformModelData } from './transforms.js';
import {
refreshGoogleAccessToken,
fetchGoogleQuotaBuckets,
fetchGoogleModels
} from './api.js';
export const fetchGoogleQuota = async () => {
const authSources = resolveGoogleAuthSources();
if (!authSources.length) {
return buildResult({
providerId: 'google',
providerName: 'Google',
ok: false,
configured: false,
error: 'Not configured'
});
}
const models = {};
const sourceErrors = [];
for (const source of authSources) {
const now = Date.now();
let accessToken = source.accessToken;
if (!accessToken || (typeof source.expires === 'number' && source.expires <= now)) {
if (!source.refreshToken) {
sourceErrors.push(`${source.sourceLabel}: Missing refresh token`);
continue;
}
const { clientId, clientSecret } = resolveGoogleOAuthClient(source.sourceId);
accessToken = await refreshGoogleAccessToken(source.refreshToken, clientId, clientSecret);
}
if (!accessToken) {
sourceErrors.push(`${source.sourceLabel}: Failed to refresh OAuth token`);
continue;
}
const projectId = source.projectId ?? DEFAULT_PROJECT_ID;
let mergedAnyModel = false;
if (source.sourceId === 'gemini') {
const quotaPayload = await fetchGoogleQuotaBuckets(accessToken, projectId);
const buckets = Array.isArray(quotaPayload?.buckets) ? quotaPayload.buckets : [];
for (const bucket of buckets) {
const transformed = transformQuotaBucket(bucket, source.sourceId);
if (transformed) {
Object.assign(models, transformed);
mergedAnyModel = true;
}
}
}
const payload = await fetchGoogleModels(accessToken, projectId);
if (payload) {
for (const [modelName, modelData] of Object.entries(payload.models ?? {})) {
const transformed = transformModelData(modelName, modelData, source.sourceId);
Object.assign(models, transformed);
mergedAnyModel = true;
}
}
if (!mergedAnyModel) {
sourceErrors.push(`${source.sourceLabel}: Failed to fetch models`);
}
}
if (!Object.keys(models).length) {
return buildResult({
providerId: 'google',
providerName: 'Google',
ok: false,
configured: true,
error: sourceErrors[0] ?? 'Failed to fetch models'
});
}
return buildResult({
providerId: 'google',
providerName: 'Google',
ok: true,
configured: true,
usage: {
windows: {},
models: Object.keys(models).length ? models : undefined
}
});
};

View File

@@ -0,0 +1,109 @@
/**
* Google Provider - Transforms
*
* Data transformation functions for Google quota responses.
* @module quota/providers/google/transforms
*/
import {
asNonEmptyString,
toNumber,
toTimestamp,
toUsageWindow
} from '../../utils/index.js';
const GOOGLE_FIVE_HOUR_WINDOW_SECONDS = 5 * 60 * 60;
const GOOGLE_DAILY_WINDOW_SECONDS = 24 * 60 * 60;
export const parseGoogleRefreshToken = (rawRefreshToken) => {
const refreshToken = asNonEmptyString(rawRefreshToken);
if (!refreshToken) {
return { refreshToken: null, projectId: null, managedProjectId: null };
}
const [rawToken = '', rawProject = '', rawManagedProject = ''] = refreshToken.split('|');
return {
refreshToken: asNonEmptyString(rawToken),
projectId: asNonEmptyString(rawProject),
managedProjectId: asNonEmptyString(rawManagedProject)
};
};
export const resolveGoogleWindow = (sourceId, resetAt) => {
if (sourceId === 'gemini') {
return { label: 'daily', seconds: GOOGLE_DAILY_WINDOW_SECONDS };
}
if (sourceId === 'antigravity') {
const remainingSeconds = typeof resetAt === 'number'
? Math.max(0, Math.round((resetAt - Date.now()) / 1000))
: null;
if (remainingSeconds !== null && remainingSeconds > 10 * 60 * 60) {
return { label: 'daily', seconds: GOOGLE_DAILY_WINDOW_SECONDS };
}
return { label: '5h', seconds: GOOGLE_FIVE_HOUR_WINDOW_SECONDS };
}
return { label: 'daily', seconds: GOOGLE_DAILY_WINDOW_SECONDS };
};
export const transformQuotaBucket = (bucket, sourceId) => {
const modelId = asNonEmptyString(bucket?.modelId);
if (!modelId) {
return null;
}
const scopedName = modelId.startsWith(`${sourceId}/`)
? modelId
: `${sourceId}/${modelId}`;
const remainingFraction = toNumber(bucket?.remainingFraction);
const remainingPercent = remainingFraction !== null
? Math.round(remainingFraction * 100)
: null;
const usedPercent = remainingPercent !== null ? Math.max(0, 100 - remainingPercent) : null;
const resetAt = toTimestamp(bucket?.resetTime);
const window = resolveGoogleWindow(sourceId, resetAt);
return {
[scopedName]: {
windows: {
[window.label]: toUsageWindow({
usedPercent,
windowSeconds: window.seconds,
resetAt
})
}
}
};
};
export const transformModelData = (modelName, modelData, sourceId) => {
const scopedName = modelName.startsWith(`${sourceId}/`)
? modelName
: `${sourceId}/${modelName}`;
const remainingFraction = modelData?.quotaInfo?.remainingFraction;
const remainingPercent = typeof remainingFraction === 'number'
? Math.round(remainingFraction * 100)
: null;
const usedPercent = remainingPercent !== null ? Math.max(0, 100 - remainingPercent) : null;
const resetAt = modelData?.quotaInfo?.resetTime
? new Date(modelData.quotaInfo.resetTime).getTime()
: null;
const window = resolveGoogleWindow(sourceId, resetAt);
return {
[scopedName]: {
windows: {
[window.label]: toUsageWindow({
usedPercent,
windowSeconds: window.seconds,
resetAt
})
}
}
};
};

View File

@@ -0,0 +1,152 @@
/**
* Quota Providers Registry
*
* Implements quota fetching for various AI providers using a registry pattern.
* @module quota/providers
*/
import { buildResult } from '../utils/index.js';
import * as claude from './claude.js';
import * as codex from './codex.js';
import * as copilot from './copilot.js';
import * as google from './google/index.js';
import * as kimi from './kimi.js';
import * as nanogpt from './nanogpt.js';
import * as openai from './openai.js';
import * as openrouter from './openrouter.js';
import * as zai from './zai.js';
import * as minimaxCodingPlan from './minimax-coding-plan.js';
import * as minimaxCnCodingPlan from './minimax-cn-coding-plan.js';
import * as ollamaCloud from './ollama-cloud.js';
const registry = {
claude: {
providerId: claude.providerId,
providerName: claude.providerName,
isConfigured: claude.isConfigured,
fetchQuota: claude.fetchQuota
},
codex: {
providerId: codex.providerId,
providerName: codex.providerName,
isConfigured: codex.isConfigured,
fetchQuota: codex.fetchQuota
},
google: {
providerId: 'google',
providerName: 'Google',
isConfigured: () => google.resolveGoogleAuthSources().length > 0,
fetchQuota: google.fetchGoogleQuota
},
'zai-coding-plan': {
providerId: zai.providerId,
providerName: zai.providerName,
isConfigured: zai.isConfigured,
fetchQuota: zai.fetchQuota
},
'kimi-for-coding': {
providerId: kimi.providerId,
providerName: kimi.providerName,
isConfigured: kimi.isConfigured,
fetchQuota: kimi.fetchQuota
},
openrouter: {
providerId: openrouter.providerId,
providerName: openrouter.providerName,
isConfigured: openrouter.isConfigured,
fetchQuota: openrouter.fetchQuota
},
'nano-gpt': {
providerId: nanogpt.providerId,
providerName: nanogpt.providerName,
isConfigured: nanogpt.isConfigured,
fetchQuota: nanogpt.fetchQuota
},
'github-copilot': {
providerId: copilot.providerId,
providerName: copilot.providerName,
isConfigured: copilot.isConfigured,
fetchQuota: copilot.fetchQuota
},
'github-copilot-addon': {
providerId: copilot.providerIdAddon,
providerName: copilot.providerNameAddon,
isConfigured: copilot.isConfigured,
fetchQuota: copilot.fetchQuotaAddon
},
'minimax-coding-plan': {
providerId: minimaxCodingPlan.providerId,
providerName: minimaxCodingPlan.providerName,
isConfigured: minimaxCodingPlan.isConfigured,
fetchQuota: minimaxCodingPlan.fetchQuota
},
'minimax-cn-coding-plan': {
providerId: minimaxCnCodingPlan.providerId,
providerName: minimaxCnCodingPlan.providerName,
isConfigured: minimaxCnCodingPlan.isConfigured,
fetchQuota: minimaxCnCodingPlan.fetchQuota
},
'ollama-cloud': {
providerId: ollamaCloud.providerId,
providerName: ollamaCloud.providerName,
isConfigured: ollamaCloud.isConfigured,
fetchQuota: ollamaCloud.fetchQuota
}
};
export const listConfiguredQuotaProviders = () => {
const configured = [];
for (const [id, provider] of Object.entries(registry)) {
try {
if (provider.isConfigured()) {
configured.push(id);
}
} catch {
// Ignore provider-specific config errors in list API.
}
}
return configured;
};
export const fetchQuotaForProvider = async (providerId) => {
const provider = registry[providerId];
if (!provider) {
return buildResult({
providerId,
providerName: providerId,
ok: false,
configured: false,
error: 'Unsupported provider'
});
}
try {
return await provider.fetchQuota();
} catch (error) {
return buildResult({
providerId: provider.providerId,
providerName: provider.providerName,
ok: false,
configured: true,
error: error instanceof Error ? error.message : 'Request failed'
});
}
};
export const fetchClaudeQuota = claude.fetchQuota;
export const fetchOpenaiQuota = openai.fetchQuota;
export const fetchGoogleQuota = google.fetchGoogleQuota;
export const fetchCodexQuota = codex.fetchQuota;
export const fetchCopilotQuota = copilot.fetchQuota;
export const fetchCopilotAddonQuota = copilot.fetchQuotaAddon;
export const fetchKimiQuota = kimi.fetchQuota;
export const fetchOpenRouterQuota = openrouter.fetchQuota;
export const fetchZaiQuota = zai.fetchQuota;
export const fetchNanoGptQuota = nanogpt.fetchQuota;
export const fetchMinimaxCodingPlanQuota = minimaxCodingPlan.fetchQuota;
export const fetchMinimaxCnCodingPlanQuota = minimaxCnCodingPlan.fetchQuota;
export const fetchOllamaCloudQuota = ollamaCloud.fetchQuota;

View File

@@ -0,0 +1,55 @@
/**
* Quota Provider Interface
*
* Defines the contract for implementing quota providers.
* @module quota/providers
*/
/**
* @typedef {Object} UsageWindow
* @property {number|null} usedPercent - Percentage of usage (0-100)
* @property {number|null} remainingPercent - Percentage remaining (0-100)
* @property {number|null} windowSeconds - Window duration in seconds
* @property {number|null} resetAfterSeconds - Seconds until reset
* @property {number|null} resetAt - Unix timestamp when quota resets
* @property {string|null} resetAtFormatted - Human-readable reset time
* @property {string|null} resetAfterFormatted - Human-readable time until reset
* @property {string|null} valueLabel - Optional label for display (e.g., "$10.00 remaining")
*/
/**
* @typedef {Object} ProviderUsage
* @property {Object.<string, UsageWindow>} windows - Usage windows by key (e.g., '5h', '7d', 'daily')
* @property {Object.<string, Object>} [models] - Model-specific usage (provider-specific)
*/
/**
* @typedef {Object} QuotaProviderResult
* @property {string} providerId - Unique identifier for the provider
* @property {string} providerName - Display name for the provider
* @property {boolean} ok - Whether the fetch was successful
* @property {boolean} configured - Whether the provider is configured
* @property {ProviderUsage|null} usage - Usage data if successful
* @property {string|null} [error] - Error message if not successful
* @property {number} fetchedAt - Unix timestamp when the result was fetched
*/
/**
* @typedef {Function} ProviderQuotaFetcher
* @returns {Promise<QuotaProviderResult>}
*/
/**
* @typedef {Function} ProviderConfigurationChecker
* @param {Object.<string, unknown>} [auth]
* @returns {boolean}
*/
/**
* @typedef {Object} QuotaProvider
* @property {string} providerId
* @property {string} providerName
* @property {string[]} aliases
* @property {ProviderConfigurationChecker} isConfigured
* @property {ProviderQuotaFetcher} fetchQuota
*/

View File

@@ -0,0 +1,108 @@
import { readAuthFile } from '../../opencode/auth.js';
import {
getAuthEntry,
normalizeAuthEntry,
buildResult,
toUsageWindow,
toNumber,
toTimestamp,
durationToLabel,
durationToSeconds
} from '../utils/index.js';
export const providerId = 'kimi-for-coding';
export const providerName = 'Kimi for Coding';
export const aliases = ['kimi-for-coding', 'kimi'];
export const isConfigured = () => {
const auth = readAuthFile();
const entry = normalizeAuthEntry(getAuthEntry(auth, aliases));
return Boolean(entry?.key || entry?.token);
};
export const fetchQuota = async () => {
const auth = readAuthFile();
const entry = normalizeAuthEntry(getAuthEntry(auth, aliases));
const apiKey = entry?.key ?? entry?.token;
if (!apiKey) {
return buildResult({
providerId,
providerName,
ok: false,
configured: false,
error: 'Not configured'
});
}
try {
const response = await fetch('https://api.kimi.com/coding/v1/usages', {
method: 'GET',
headers: {
Authorization: `Bearer ${apiKey}`,
'Content-Type': 'application/json'
}
});
if (!response.ok) {
return buildResult({
providerId,
providerName,
ok: false,
configured: true,
error: `API error: ${response.status}`
});
}
const payload = await response.json();
const windows = {};
const usage = payload?.usage ?? null;
if (usage) {
const limit = toNumber(usage.limit);
const remaining = toNumber(usage.remaining);
const usedPercent = limit && remaining !== null
? Math.max(0, Math.min(100, 100 - (remaining / limit) * 100))
: null;
windows.weekly = toUsageWindow({
usedPercent,
windowSeconds: null,
resetAt: toTimestamp(usage.resetTime)
});
}
const limits = Array.isArray(payload?.limits) ? payload.limits : [];
for (const limit of limits) {
const window = limit?.window;
const detail = limit?.detail;
const rawLabel = durationToLabel(window?.duration, window?.timeUnit);
const windowSeconds = durationToSeconds(window?.duration, window?.timeUnit);
const label = windowSeconds === 5 * 60 * 60 ? `Rate Limit (${rawLabel})` : rawLabel;
const total = toNumber(detail?.limit);
const remaining = toNumber(detail?.remaining);
const usedPercent = total && remaining !== null
? Math.max(0, Math.min(100, 100 - (remaining / total) * 100))
: null;
windows[label] = toUsageWindow({
usedPercent,
windowSeconds,
resetAt: toTimestamp(detail?.resetTime)
});
}
return buildResult({
providerId,
providerName,
ok: true,
configured: true,
usage: { windows }
});
} catch (error) {
return buildResult({
providerId,
providerName,
ok: false,
configured: true,
error: error instanceof Error ? error.message : 'Request failed'
});
}
};

View File

@@ -0,0 +1,131 @@
// MiniMax Coding Plan Provider
import { readAuthFile } from '../../opencode/auth.js';
import {
getAuthEntry,
normalizeAuthEntry,
buildResult,
toUsageWindow,
toNumber,
toTimestamp,
} from '../utils/index.js';
export const providerId = 'minimax-cn-coding-plan';
export const providerName = 'MiniMax Coding Plan (minimaxi.com)';
export const aliases = ['minimax-cn-coding-plan'];
export const isConfigured = () => {
const auth = readAuthFile();
const entry = normalizeAuthEntry(getAuthEntry(auth, aliases));
return Boolean(entry?.key || entry?.token);
};
export const fetchQuota = async () => {
const auth = readAuthFile();
const entry = normalizeAuthEntry(getAuthEntry(auth, aliases));
const apiKey = entry?.key ?? entry?.token;
if (!apiKey) {
return buildResult({
providerId,
providerName,
ok: false,
configured: false,
error: 'Not configured',
});
}
try {
const response = await fetch(
'https://www.minimaxi.com/v1/api/openplatform/coding_plan/remains',
{
method: 'GET',
headers: {
Authorization: `Bearer ${apiKey}`,
'Content-Type': 'application/json',
},
},
);
if (!response.ok) {
return buildResult({
providerId,
providerName,
ok: false,
configured: true,
error: `API error: ${response.status}`,
});
}
const payload = await response.json();
const baseResp = payload?.base_resp;
if (baseResp && baseResp.status_code !== 0) {
return buildResult({
providerId,
providerName,
ok: false,
configured: true,
error: baseResp.status_msg || `API error: ${baseResp.status_code}`,
});
}
const windows = {};
const modelRemains = payload?.model_remains;
if (Array.isArray(modelRemains) && modelRemains.length > 0) {
const firstModel = modelRemains[0];
const total = toNumber(firstModel?.current_interval_total_count);
const used = 600 - toNumber(firstModel?.current_interval_usage_count);
if (total === null || used === null) {
return buildResult({
providerId,
providerName,
ok: false,
configured: true,
error: 'Missing required quota fields',
});
}
const usedPercent =
total > 0 ? Math.max(0, Math.min(100, (used / total) * 100)) : null;
const startTime = toTimestamp(firstModel?.start_time);
const endTime = toTimestamp(firstModel?.end_time);
const windowSeconds =
startTime && endTime && endTime > startTime
? Math.floor((endTime - startTime) / 1000)
: null;
windows['5h'] = toUsageWindow({
usedPercent,
windowSeconds,
resetAt: endTime,
});
} else {
return buildResult({
providerId,
providerName,
ok: false,
configured: true,
error: 'No model quota data available',
});
}
return buildResult({
providerId,
providerName,
ok: true,
configured: true,
usage: { windows },
});
} catch (error) {
return buildResult({
providerId,
providerName,
ok: false,
configured: true,
error: error instanceof Error ? error.message : 'Request failed',
});
}
};

View File

@@ -0,0 +1,131 @@
// MiniMax Coding Plan Provider
import { readAuthFile } from '../../opencode/auth.js';
import {
getAuthEntry,
normalizeAuthEntry,
buildResult,
toUsageWindow,
toNumber,
toTimestamp,
} from '../utils/index.js';
export const providerId = 'minimax-coding-plan';
export const providerName = 'MiniMax Coding Plan (minimax.io)';
export const aliases = ['minimax-coding-plan'];
export const isConfigured = () => {
const auth = readAuthFile();
const entry = normalizeAuthEntry(getAuthEntry(auth, aliases));
return Boolean(entry?.key || entry?.token);
};
export const fetchQuota = async () => {
const auth = readAuthFile();
const entry = normalizeAuthEntry(getAuthEntry(auth, aliases));
const apiKey = entry?.key ?? entry?.token;
if (!apiKey) {
return buildResult({
providerId,
providerName,
ok: false,
configured: false,
error: 'Not configured',
});
}
try {
const response = await fetch(
'https://www.minimax.io/v1/api/openplatform/coding_plan/remains',
{
method: 'GET',
headers: {
Authorization: `Bearer ${apiKey}`,
'Content-Type': 'application/json',
},
},
);
if (!response.ok) {
return buildResult({
providerId,
providerName,
ok: false,
configured: true,
error: `API error: ${response.status}`,
});
}
const payload = await response.json();
const baseResp = payload?.base_resp;
if (baseResp && baseResp.status_code !== 0) {
return buildResult({
providerId,
providerName,
ok: false,
configured: true,
error: baseResp.status_msg || `API error: ${baseResp.status_code}`,
});
}
const windows = {};
const modelRemains = payload?.model_remains;
if (Array.isArray(modelRemains) && modelRemains.length > 0) {
const firstModel = modelRemains[0];
const total = toNumber(firstModel?.current_interval_total_count);
const used = 600-toNumber(firstModel?.current_interval_usage_count);
if (total === null || used === null) {
return buildResult({
providerId,
providerName,
ok: false,
configured: true,
error: 'Missing required quota fields',
});
}
const usedPercent =
total > 0 ? Math.max(0, Math.min(100, (used / total) * 100)) : null;
const startTime = toTimestamp(firstModel?.start_time);
const endTime = toTimestamp(firstModel?.end_time);
const windowSeconds =
startTime && endTime && endTime > startTime
? Math.floor((endTime - startTime) / 1000)
: null;
windows['5h'] = toUsageWindow({
usedPercent,
windowSeconds,
resetAt: endTime,
});
} else {
return buildResult({
providerId,
providerName,
ok: false,
configured: true,
error: 'No model quota data available',
});
}
return buildResult({
providerId,
providerName,
ok: true,
configured: true,
usage: { windows },
});
} catch (error) {
return buildResult({
providerId,
providerName,
ok: false,
configured: true,
error: error instanceof Error ? error.message : 'Request failed',
});
}
};

View File

@@ -0,0 +1,124 @@
import { readAuthFile } from '../../opencode/auth.js';
import {
getAuthEntry,
normalizeAuthEntry,
buildResult,
toUsageWindow,
toNumber,
toTimestamp
} from '../utils/index.js';
const NANO_GPT_DAILY_WINDOW_SECONDS = 86400;
export const providerId = 'nano-gpt';
export const providerName = 'NanoGPT';
export const aliases = ['nano-gpt', 'nanogpt', 'nano_gpt'];
export const isConfigured = () => {
const auth = readAuthFile();
const entry = normalizeAuthEntry(getAuthEntry(auth, aliases));
return Boolean(entry?.key || entry?.token);
};
export const fetchQuota = async () => {
const auth = readAuthFile();
const entry = normalizeAuthEntry(getAuthEntry(auth, aliases));
const apiKey = entry?.key ?? entry?.token;
if (!apiKey) {
return buildResult({
providerId,
providerName,
ok: false,
configured: false,
error: 'Not configured'
});
}
try {
const response = await fetch('https://nano-gpt.com/api/subscription/v1/usage', {
method: 'GET',
headers: {
Authorization: `Bearer ${apiKey}`,
'Content-Type': 'application/json'
}
});
if (!response.ok) {
return buildResult({
providerId,
providerName,
ok: false,
configured: true,
error: `API error: ${response.status}`
});
}
const payload = await response.json();
const windows = {};
const period = payload?.period ?? null;
const daily = payload?.daily ?? null;
const monthly = payload?.monthly ?? null;
const state = payload?.state ?? 'active';
if (daily) {
let usedPercent = null;
const percentUsed = daily?.percentUsed;
if (typeof percentUsed === 'number') {
usedPercent = Math.max(0, Math.min(100, percentUsed * 100));
} else {
const used = toNumber(daily?.used);
const limit = toNumber(daily?.limit ?? daily?.limits?.daily);
if (used !== null && limit !== null && limit > 0) {
usedPercent = Math.max(0, Math.min(100, (used / limit) * 100));
}
}
const resetAt = toTimestamp(daily?.resetAt);
const valueLabel = state !== 'active' ? `(${state})` : null;
windows['daily'] = toUsageWindow({
usedPercent,
windowSeconds: NANO_GPT_DAILY_WINDOW_SECONDS,
resetAt,
valueLabel
});
}
if (monthly) {
let usedPercent = null;
const percentUsed = monthly?.percentUsed;
if (typeof percentUsed === 'number') {
usedPercent = Math.max(0, Math.min(100, percentUsed * 100));
} else {
const used = toNumber(monthly?.used);
const limit = toNumber(monthly?.limit ?? monthly?.limits?.monthly);
if (used !== null && limit !== null && limit > 0) {
usedPercent = Math.max(0, Math.min(100, (used / limit) * 100));
}
}
const resetAt = toTimestamp(monthly?.resetAt ?? period?.currentPeriodEnd);
const valueLabel = state !== 'active' ? `(${state})` : null;
windows['monthly'] = toUsageWindow({
usedPercent,
windowSeconds: null,
resetAt,
valueLabel
});
}
return buildResult({
providerId,
providerName,
ok: true,
configured: true,
usage: { windows }
});
} catch (error) {
return buildResult({
providerId,
providerName,
ok: false,
configured: true,
error: error instanceof Error ? error.message : 'Request failed'
});
}
};

View File

@@ -0,0 +1,112 @@
import { homedir } from 'os';
import { readFileSync, existsSync } from 'fs';
import { join } from 'path';
import { buildResult, toUsageWindow, toNumber } from '../utils/index.js';
const COOKIE_PATH = join(homedir(), '.config', 'ollama-quota', 'cookie');
export const providerId = 'ollama-cloud';
export const providerName = 'Ollama Cloud';
export const aliases = ['ollama-cloud', 'ollamacloud'];
const readCookieFile = () => {
try {
if (!existsSync(COOKIE_PATH)) return null;
const content = readFileSync(COOKIE_PATH, 'utf-8');
const trimmed = content.trim();
return trimmed || null;
} catch {
return null;
}
};
const parseOllamaSettingsHtml = (html) => {
const windows = {};
const sessionMatch = html.match(/Session\s+usage[^0-9]*([0-9.]+)%/i);
if (sessionMatch) {
windows.session = toUsageWindow({
usedPercent: toNumber(sessionMatch[1]),
windowSeconds: null,
resetAt: null
});
}
const weeklyMatch = html.match(/Weekly\s+usage[^0-9]*([0-9.]+)%/i);
if (weeklyMatch) {
windows.weekly = toUsageWindow({
usedPercent: toNumber(weeklyMatch[1]),
windowSeconds: null,
resetAt: null
});
}
const premiumMatch = html.match(/Premium[^0-9]*([0-9]+)\s*\/\s*([0-9]+)/i);
if (premiumMatch) {
const used = toNumber(premiumMatch[1]);
const total = toNumber(premiumMatch[2]);
const usedPercent = total && used !== null ? Math.min(100, (used / total) * 100) : null;
windows.premium = toUsageWindow({
usedPercent,
windowSeconds: null,
resetAt: null,
valueLabel: `${used ?? 0} / ${total ?? 0}`
});
}
return windows;
};
export const isConfigured = () => {
const cookie = readCookieFile();
return Boolean(cookie);
};
export const fetchQuota = async () => {
const cookie = readCookieFile();
if (!cookie) {
return buildResult({
providerId,
providerName,
ok: false,
configured: false,
error: 'Not configured'
});
}
try {
const response = await fetch('https://ollama.com/settings', {
method: 'GET',
headers: {
Cookie: cookie,
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36'
}
});
if (!response.ok) {
return buildResult({
providerId,
providerName,
ok: false,
configured: true,
error: `API error: ${response.status}`
});
}
const html = await response.text();
const windows = parseOllamaSettingsHtml(html);
return buildResult({
providerId,
providerName,
ok: true,
configured: true,
usage: { windows }
});
} catch (error) {
return buildResult({
providerId,
providerName,
ok: false,
configured: true,
error: error instanceof Error ? error.message : 'Request failed'
});
}
};

View File

@@ -0,0 +1,91 @@
import { readAuthFile } from '../../opencode/auth.js';
import {
getAuthEntry,
normalizeAuthEntry,
buildResult,
toUsageWindow,
toNumber,
toTimestamp
} from '../utils/index.js';
export const providerId = 'openai';
export const providerName = 'OpenAI';
export const aliases = ['openai', 'codex', 'chatgpt'];
export const isConfigured = () => {
const auth = readAuthFile();
const entry = normalizeAuthEntry(getAuthEntry(auth, aliases));
return Boolean(entry?.access || entry?.token);
};
export const fetchQuota = async () => {
const auth = readAuthFile();
const entry = normalizeAuthEntry(getAuthEntry(auth, aliases));
const accessToken = entry?.access ?? entry?.token;
if (!accessToken) {
return buildResult({
providerId,
providerName,
ok: false,
configured: false,
error: 'Not configured'
});
}
try {
const response = await fetch('https://chatgpt.com/backend-api/wham/usage', {
method: 'GET',
headers: {
Authorization: `Bearer ${accessToken}`,
'Content-Type': 'application/json'
}
});
if (!response.ok) {
return buildResult({
providerId,
providerName,
ok: false,
configured: true,
error: `API error: ${response.status}`
});
}
const payload = await response.json();
const primary = payload?.rate_limit?.primary_window ?? null;
const secondary = payload?.rate_limit?.secondary_window ?? null;
const windows = {};
if (primary) {
windows['5h'] = toUsageWindow({
usedPercent: primary.used_percent ?? null,
windowSeconds: primary.limit_window_seconds ?? null,
resetAt: primary.reset_at ? primary.reset_at * 1000 : null
});
}
if (secondary) {
windows['weekly'] = toUsageWindow({
usedPercent: secondary.used_percent ?? null,
windowSeconds: secondary.limit_window_seconds ?? null,
resetAt: secondary.reset_at ? secondary.reset_at * 1000 : null
});
}
return buildResult({
providerId,
providerName,
ok: true,
configured: true,
usage: { windows }
});
} catch (error) {
return buildResult({
providerId,
providerName,
ok: false,
configured: true,
error: error instanceof Error ? error.message : 'Request failed'
});
}
};

View File

@@ -0,0 +1,92 @@
import { readAuthFile } from '../../opencode/auth.js';
import {
getAuthEntry,
normalizeAuthEntry,
buildResult,
toUsageWindow,
toNumber,
formatMoney
} from '../utils/index.js';
export const providerId = 'openrouter';
export const providerName = 'OpenRouter';
export const aliases = ['openrouter'];
export const isConfigured = () => {
const auth = readAuthFile();
const entry = normalizeAuthEntry(getAuthEntry(auth, aliases));
return Boolean(entry?.key || entry?.token);
};
export const fetchQuota = async () => {
const auth = readAuthFile();
const entry = normalizeAuthEntry(getAuthEntry(auth, aliases));
const apiKey = entry?.key ?? entry?.token;
if (!apiKey) {
return buildResult({
providerId,
providerName,
ok: false,
configured: false,
error: 'Not configured'
});
}
try {
const response = await fetch('https://openrouter.ai/api/v1/credits', {
method: 'GET',
headers: {
Authorization: `Bearer ${apiKey}`,
'Content-Type': 'application/json'
}
});
if (!response.ok) {
return buildResult({
providerId,
providerName,
ok: false,
configured: true,
error: `API error: ${response.status}`
});
}
const payload = await response.json();
const credits = payload?.data ?? {};
const totalCredits = toNumber(credits.total_credits);
const totalUsage = toNumber(credits.total_usage);
const remaining = totalCredits !== null && totalUsage !== null
? Math.max(0, totalCredits - totalUsage)
: null;
const usedPercent = totalCredits && totalUsage !== null
? Math.max(0, Math.min(100, (totalUsage / totalCredits) * 100))
: null;
const valueLabel = remaining !== null ? `$${formatMoney(remaining)} remaining` : null;
const windows = {
credits: toUsageWindow({
usedPercent,
windowSeconds: null,
resetAt: null,
valueLabel
})
};
return buildResult({
providerId,
providerName,
ok: true,
configured: true,
usage: { windows }
});
} catch (error) {
return buildResult({
providerId,
providerName,
ok: false,
configured: true,
error: error instanceof Error ? error.message : 'Request failed'
});
}
};

View File

@@ -0,0 +1,91 @@
import { readAuthFile } from '../../opencode/auth.js';
import {
getAuthEntry,
normalizeAuthEntry,
buildResult,
toUsageWindow,
toNumber,
toTimestamp,
resolveWindowSeconds,
resolveWindowLabel,
normalizeTimestamp
} from '../utils/index.js';
export const providerId = 'zai-coding-plan';
export const providerName = 'z.ai';
export const aliases = ['zai-coding-plan', 'zai', 'z.ai'];
export const isConfigured = () => {
const auth = readAuthFile();
const entry = normalizeAuthEntry(getAuthEntry(auth, aliases));
return Boolean(entry?.key || entry?.token);
};
export const fetchQuota = async () => {
const auth = readAuthFile();
const entry = normalizeAuthEntry(getAuthEntry(auth, aliases));
const apiKey = entry?.key ?? entry?.token;
if (!apiKey) {
return buildResult({
providerId,
providerName,
ok: false,
configured: false,
error: 'Not configured'
});
}
try {
const response = await fetch('https://api.z.ai/api/monitor/usage/quota/limit', {
method: 'GET',
headers: {
Authorization: `Bearer ${apiKey}`,
'Content-Type': 'application/json'
}
});
if (!response.ok) {
return buildResult({
providerId,
providerName,
ok: false,
configured: true,
error: `API error: ${response.status}`
});
}
const payload = await response.json();
const limits = Array.isArray(payload?.data?.limits) ? payload.data.limits : [];
const tokensLimit = limits.find((limit) => limit?.type === 'TOKENS_LIMIT');
const windowSeconds = resolveWindowSeconds(tokensLimit);
const windowLabel = resolveWindowLabel(windowSeconds);
const resetAt = tokensLimit?.nextResetTime ? normalizeTimestamp(tokensLimit.nextResetTime) : null;
const usedPercent = typeof tokensLimit?.percentage === 'number' ? tokensLimit.percentage : null;
const windows = {};
if (tokensLimit) {
windows[windowLabel] = toUsageWindow({
usedPercent,
windowSeconds,
resetAt
});
}
return buildResult({
providerId,
providerName,
ok: true,
configured: true,
usage: { windows }
});
} catch (error) {
return buildResult({
providerId,
providerName,
ok: false,
configured: true,
error: error instanceof Error ? error.message : 'Request failed'
});
}
};

View File

@@ -0,0 +1,46 @@
import fs from 'fs';
import path from 'path';
import os from 'os';
const OPENCODE_CONFIG_DIR = path.join(os.homedir(), '.config', 'opencode');
const OPENCODE_DATA_DIR = path.join(os.homedir(), '.local', 'share', 'opencode');
export const ANTIGRAVITY_ACCOUNTS_PATHS = [
path.join(OPENCODE_CONFIG_DIR, 'antigravity-accounts.json'),
path.join(OPENCODE_DATA_DIR, 'antigravity-accounts.json')
];
export const readJsonFile = (filePath) => {
if (!fs.existsSync(filePath)) {
return null;
}
try {
const raw = fs.readFileSync(filePath, 'utf8');
const trimmed = raw.trim();
if (!trimmed) return null;
return JSON.parse(trimmed);
} catch (error) {
console.warn(`Failed to read JSON file: ${filePath}`, error);
return null;
}
};
export const getAuthEntry = (auth, aliases) => {
for (const alias of aliases) {
if (auth[alias]) {
return auth[alias];
}
}
return null;
};
export const normalizeAuthEntry = (entry) => {
if (!entry) return null;
if (typeof entry === 'string') {
return { token: entry };
}
if (typeof entry === 'object') {
return entry;
}
return null;
};

View File

@@ -0,0 +1,76 @@
export const formatResetTime = (timestamp) => {
try {
const resetDate = new Date(timestamp);
const now = new Date();
const isToday = resetDate.toDateString() === now.toDateString();
if (isToday) {
return resetDate.toLocaleTimeString(undefined, {
hour: 'numeric',
minute: '2-digit'
});
}
return resetDate.toLocaleString(undefined, {
month: 'short',
day: 'numeric',
weekday: 'short',
hour: 'numeric',
minute: '2-digit'
});
} catch {
return null;
}
};
export const calculateResetAfterSeconds = (resetAt) => {
if (!resetAt) return null;
const delta = Math.floor((resetAt - Date.now()) / 1000);
return delta < 0 ? 0 : delta;
};
export const toUsageWindow = ({ usedPercent, windowSeconds, resetAt, valueLabel }) => {
const resetAfterSeconds = calculateResetAfterSeconds(resetAt);
const resetFormatted = resetAt ? formatResetTime(resetAt) : null;
return {
usedPercent,
remainingPercent: usedPercent !== null ? Math.max(0, 100 - usedPercent) : null,
windowSeconds: windowSeconds ?? null,
resetAfterSeconds,
resetAt,
resetAtFormatted: resetFormatted,
resetAfterFormatted: resetFormatted,
...(valueLabel ? { valueLabel } : {})
};
};
export const buildResult = ({ providerId, providerName, ok, configured, usage, error }) => ({
providerId,
providerName,
ok,
configured,
usage: usage ?? null,
...(error ? { error } : {}),
fetchedAt: Date.now()
});
export const durationToLabel = (duration, unit) => {
if (!duration || !unit) return 'limit';
if (unit === 'TIME_UNIT_MINUTE') return `${duration}m`;
if (unit === 'TIME_UNIT_HOUR') return `${duration}h`;
if (unit === 'TIME_UNIT_DAY') return `${duration}d`;
return 'limit';
};
export const durationToSeconds = (duration, unit) => {
if (!duration || !unit) return null;
if (unit === 'TIME_UNIT_MINUTE') return duration * 60;
if (unit === 'TIME_UNIT_HOUR') return duration * 3600;
if (unit === 'TIME_UNIT_DAY') return duration * 86400;
return null;
};
export const formatMoney = (value) => {
if (typeof value !== 'number' || !Number.isFinite(value)) return null;
return value.toFixed(2);
};

View File

@@ -0,0 +1,10 @@
/**
* Quota Utilities
*
* Shared utility functions for quota calculations and formatting.
* @module quota/utils
*/
export * from './auth.js';
export * from './transformers.js';
export * from './formatters.js';

View File

@@ -0,0 +1,55 @@
export const asObject = (value) => (value && typeof value === 'object' ? value : null);
export const asNonEmptyString = (value) => {
if (typeof value !== 'string') return null;
const trimmed = value.trim();
return trimmed ? trimmed : null;
};
export const toNumber = (value) => {
if (typeof value === 'number' && Number.isFinite(value)) {
return value;
}
if (typeof value === 'string') {
const parsed = Number(value);
return Number.isFinite(parsed) ? parsed : null;
}
return null;
};
export const toTimestamp = (value) => {
if (!value) return null;
if (typeof value === 'number') {
return value < 1_000_000_000_000 ? value * 1000 : value;
}
if (typeof value === 'string') {
const parsed = Date.parse(value);
return Number.isNaN(parsed) ? null : parsed;
}
return null;
};
export const normalizeTimestamp = (value) => {
if (typeof value !== 'number') return null;
return value < 1_000_000_000_000 ? value * 1000 : value;
};
export const resolveWindowSeconds = (limit) => {
const ZAI_TOKEN_WINDOW_SECONDS = { 3: 3600 };
if (!limit || !limit.number) return null;
const unitSeconds = ZAI_TOKEN_WINDOW_SECONDS[limit.unit];
if (!unitSeconds) return null;
return unitSeconds * limit.number;
};
export const resolveWindowLabel = (windowSeconds) => {
if (!windowSeconds) return 'tokens';
if (windowSeconds % 86400 === 0) {
const days = windowSeconds / 86400;
return days === 7 ? 'weekly' : `${days}d`;
}
if (windowSeconds % 3600 === 0) {
return `${windowSeconds / 3600}h`;
}
return `${windowSeconds}s`;
};

View File

@@ -0,0 +1,178 @@
# Skills Catalog Module Documentation
## Purpose
This module provides skill discovery, scanning, and installation capabilities for OpenCode. It supports multiple skill sources including GitHub repositories and the ClawdHub registry, with caching and conflict resolution for skill installation.
## Entrypoints and structure
- `packages/web/server/lib/skills-catalog/`: Skills catalog module directory containing all skill-related functionality.
- `cache.js`: In-memory cache for scan results with TTL support.
- `curated-sources.js`: Predefined skill sources (Anthropic, ClawdHub).
- `git.js`: Git operations helpers for cloning and auth error detection.
- `install.js`: Skills installation from GitHub repositories.
- `scan.js`: Skills scanning from GitHub repositories.
- `source.js`: Source string parsing for GitHub repositories.
- `clawdhub/`: ClawdHub registry integration.
- `index.js`: Public API exports for ClawdHub.
- `scan.js`: Scanning ClawdHub registry with pagination.
- `install.js`: Installation from ClawdHub (ZIP download).
- `api.js`: ClawdHub API client with rate limiting.
## Public API
The following functions are exported and used by the web server:
### Cache (`cache.js`)
- `getCacheKey({ normalizedRepo, subpath, identityId })`: Generate cache key for scan results.
- `getCachedScan(key)`: Retrieve cached scan result if not expired.
- `setCachedScan(key, value, ttlMs)`: Store scan result with TTL (default 30 minutes).
- `clearCache()`: Clear all cached scan results.
### Curated Sources (`curated-sources.js`)
- `getCuratedSkillsSources()`: Return list of curated skill sources (Anthropic, ClawdHub).
- `CURATED_SKILLS_SOURCES`: Constant array of predefined sources.
### Source Parsing (`source.js`)
- `parseSkillRepoSource(source, { subpath })`: Parse GitHub repository source string into structured object with SSH/HTTPS clone URLs, normalized repo, and effective subpath. Supports SSH URLs, HTTPS URLs, and shorthand `owner/repo[/subpath]` format.
### Git Repository Scanning (`scan.js`)
- `scanSkillsRepository({ source, subpath, defaultSubpath, identity })`: Scan GitHub repository for skills by cloning and analyzing SKILL.md files. Returns array of skill items with metadata.
### Git Repository Installation (`install.js`)
- `installSkillsFromRepository({ source, subpath, defaultSubpath, identity, scope, targetSource, workingDirectory, userSkillDir, selections, conflictPolicy, conflictDecisions })`: Install skills from GitHub repository. Supports user/project scopes, opencode/agents targets, conflict resolution (prompt/skipAll/overwriteAll), and sparse checkout for efficiency.
### ClawdHub Integration (`clawdhub/index.js`)
- `isClawdHubSource(source)`: Check if source string refers to ClawdHub.
- `scanClawdHub()`: Scan entire ClawdHub registry for all skills (paginated, max 20 pages).
- `scanClawdHubPage({ cursor })`: Scan a single page of ClawdHub results with cursor-based pagination.
- `installSkillsFromClawdHub({ scope, targetSource, workingDirectory, userSkillDir, selections, conflictPolicy, conflictDecisions })`: Install skills from ClawdHub by downloading ZIP files.
- `fetchClawdHubSkills({ cursor })`: Fetch paginated skills list from ClawdHub API.
- `fetchClawdHubSkillVersion(slug, version)`: Fetch specific skill version details.
- `fetchClawdHubSkillInfo(slug)`: Fetch skill metadata without version details.
- `downloadClawdHubSkill(slug, version)`: Download skill package as ZIP buffer.
### ClawdHub Constants (`clawdhub/index.js`)
- `CLAWDHUB_SOURCE_ID`: Source identifier for curated sources.
- `CLAWDHUB_SOURCE_STRING`: Source string format.
## Internal Helpers
The following functions are internal helpers used by exported functions:
### Git Helpers (`git.js`)
- `runGit(args, options)`: Execute git command with optional SSH identity, timeout, and max buffer. Returns `{ ok, stdout, stderr, message, code, signal }`.
- `looksLikeAuthError(message)`: Detect if error message indicates authentication failure (permission denied, publickey, etc.).
- `assertGitAvailable()`: Check if git is available in PATH.
### Skill Name Validation (used in `install.js`, `scan.js`, `clawdhub/install.js`)
- `validateSkillName(skillName)`: Validate skill name against pattern `/^[a-z0-9][a-z0-9-]*[a-z0-9]$|^[a-z0-9]$/` (1-64 chars, lowercase alphanumeric with hyphens).
### File System Helpers (`install.js`, `scan.js`, `clawdhub/install.js`)
- `safeRm(dir)`: Safely remove directory recursively (ignores errors).
- `ensureDir(dirPath)`: Ensure directory exists with recursive creation.
- `copyDirectoryNoSymlinks(srcDir, dstDir)`: Copy directory contents without symlinks, with path traversal protection.
- `normalizeUserSkillDir(userSkillDir)`: Normalize user skill directory path (handles legacy `~/.config/opencode/skill``~/.config/opencode/skills` migration).
### Git Clone Helpers (`install.js`, `scan.js`)
- `cloneRepo({ cloneUrl, identity, tempDir })`: Clone GitHub repository with preferred partial clone (`--filter=blob:none`) and fallback. Uses non-interactive mode.
### SKILL.md Parsing (`scan.js`)
- `parseSkillMd(content)`: Parse YAML frontmatter from SKILL.md content. Returns `{ ok, frontmatter, warnings }`.
### Path Helpers (`install.js`)
- `toFsPath(repoDir, repoRelPosixPath)`: Convert POSIX path to filesystem path.
- `getTargetSkillDir({ scope, targetSource, workingDirectory, userSkillDir, skillName })`: Determine target installation directory based on scope (user/project), targetSource (opencode/agents), and skill name.
### ClawdHub API Helpers (`clawdhub/api.js`)
- `rateLimitedFetch(url, options)`: Fetch with rate limiting (120 req/min limit, 100ms delay between requests, exponential backoff on 429/500 errors).
- `mapClawdHubItem(item)`: Transform ClawdHub API response to SkillsCatalogItem format.
## Response Contracts
### Scan Skills Repository Response
- `ok`: Boolean indicating success.
- `normalizedRepo`: Normalized GitHub repo string (`owner/repo`).
- `effectiveSubpath`: Effective subpath used for scanning (may be from source string or defaultSubpath).
- `items`: Array of skill items with `{ repoSource, repoSubpath, skillDir, skillName, frontmatterName, description, installable, warnings }`.
- `error`: Error object with `{ kind, message }` on failure.
### Install Skills Response
- `ok`: Boolean indicating success.
- `installed`: Array of installed skills with `{ skillName, scope, source }`.
- `skipped`: Array of skipped skills with `{ skillName, reason }`.
- `error`: Error object with `{ kind, message, conflicts? }` on failure. Kinds: `authRequired`, `networkError`, `conflicts`, `invalidSource`, `unknown`.
### ClawdHub Scan Response
- `ok`: Boolean indicating success.
- `items`: Array of skill items with ClawdHub-specific metadata in `clawdhub` property.
- `nextCursor`: Pagination cursor for next page (only for `scanClawdHubPage`).
- `error`: Error object with `{ kind, message }` on failure.
### Parse Source Response
- `ok`: Boolean indicating success.
- `host`: GitHub host (`github.com`).
- `owner`: Repository owner.
- `repo`: Repository name.
- `cloneUrlSsh`: SSH clone URL.
- `cloneUrlHttps`: HTTPS clone URL.
- `effectiveSubpath`: Subpath for scanning (from source string or options).
- `normalizedRepo`: Normalized repo string (`owner/repo`).
- `error`: Error object with `{ kind, message }` on failure.
## Notes for Contributors
### Adding a New Skill Source
1. Create a new subdirectory under `packages/web/server/lib/skills-catalog/` (e.g., `newsource/`).
2. Implement `scan.js` with a function that returns `{ ok, items, error? }` matching the SkillsCatalogItem contract.
3. Implement `install.js` with a function that accepts selections and returns `{ ok, installed, skipped, error? }`.
4. Add the source to `CURATED_SKILLS_SOURCES` in `curated-sources.js` if it should appear in the default catalog.
5. Update `packages/web/server/index.js` to import and wire up the new source.
### Skill Name Validation
- All skill names must match `/^[a-z0-9][a-z0-9-]*[a-z0-9]$|^[a-z0-9]$/` (1-64 chars).
- Skill names are derived from directory basenames for GitHub repos and slugs for ClawdHub.
- Invalid names result in non-installable skills with appropriate warnings.
### Git Cloning Strategy
- Use sparse checkout to minimize clone size: `sparse-checkout init`, `sparse-checkout set`, `checkout HEAD`.
- Preferred clone uses `--depth=1 --filter=blob:none` for partial clone with fallback to `--depth=1`.
- Always use non-interactive mode (`GIT_TERMINAL_PROMPT=0`) to avoid hangs.
- SSH keys are injected via `core.sshCommand` in git config.
### Conflict Resolution
- Installation checks for existing skills before downloading/cloning.
- Three conflict policies: `prompt`, `skipAll`, `overwriteAll`.
- Per-skill decisions override global policy via `conflictDecisions` map.
- Conflict response includes `{ skillName, scope, source }` for each conflict.
### ClawdHub Integration
- ClawdHub API base URL: `https://clawdhub.com/api/v1`.
- Pagination uses cursor-based approach with `MAX_PAGES=20` safety limit.
- Rate limiting: 120 req/min with 100ms delay between requests.
- Downloaded skills are extracted from ZIP files using `adm-zip`.
- Always validate `SKILL.md` exists before installation.
### Cache Management
- Cache keys include `normalizedRepo`, `subpath`, and `identityId` for isolation.
- Default TTL is 30 minutes; can be overridden via `ttlMs` parameter.
- Cache is in-memory (not persisted across restarts).
### Security Considerations
- Path traversal protection in `copyDirectoryNoSymlinks`: resolves real paths and checks containment.
- Symlinks are explicitly rejected to prevent escape from skill directory.
- SSH key paths are trimmed but not escaped in `git.js` (assumes safe input from profiles).
- Temporary directories are cleaned up in `finally` blocks.
### Error Handling
- All exported functions return `{ ok, ... }` result objects, not throw.
- Error kinds: `authRequired`, `networkError`, `conflicts`, `invalidSource`, `unknown`.
- Use `looksLikeAuthError` to detect SSH/HTTPS auth failures for better UX.
- Log errors to console for debugging but return structured errors to callers.
### Testing
- Run `bun run type-check`, `bun run lint`, and `bun run build` before finalizing changes.
- Consider edge cases: non-existent repos, private repos without auth, missing SKILL.md files, invalid skill names, conflicts, network failures.
## Verification Commands
- Type-check: `bun run type-check`
- Lint: `bun run lint`
- Build: `bun run build`

View File

@@ -0,0 +1,29 @@
const DEFAULT_TTL_MS = 30 * 60 * 1000;
const cache = new Map();
export function getCacheKey({ normalizedRepo, subpath, identityId }) {
const safeRepo = String(normalizedRepo || '').trim();
const safeSubpath = String(subpath || '').trim();
const safeIdentity = String(identityId || '').trim();
return `${safeRepo}::${safeSubpath}::${safeIdentity}`;
}
export function getCachedScan(key) {
const entry = cache.get(key);
if (!entry) return null;
if (Date.now() >= entry.expiresAt) {
cache.delete(key);
return null;
}
return entry.value;
}
export function setCachedScan(key, value, ttlMs = DEFAULT_TTL_MS) {
const ttl = Number.isFinite(ttlMs) ? ttlMs : DEFAULT_TTL_MS;
cache.set(key, { expiresAt: Date.now() + ttl, value });
}
export function clearCache() {
cache.clear();
}

View File

@@ -0,0 +1,158 @@
/**
* ClawdHub API client
*
* ClawdHub is a public skill registry at https://clawdhub.com
* This client provides methods to fetch skills list and download skill packages.
*/
const CLAWDHUB_API_BASE = 'https://clawdhub.com/api/v1';
const CLAWDHUB_PAGE_LIMIT = 25;
// Rate limiting: ClawdHub allows 120 requests/minute
const RATE_LIMIT_DELAY_MS = 100;
let lastRequestTime = 0;
async function rateLimitedFetch(url, options = {}) {
const maxAttempts = 10;
let lastResponse = null;
for (let attempt = 0; attempt < maxAttempts; attempt++) {
const now = Date.now();
const elapsed = now - lastRequestTime;
if (elapsed < RATE_LIMIT_DELAY_MS) {
await new Promise((resolve) => setTimeout(resolve, RATE_LIMIT_DELAY_MS - elapsed));
}
lastRequestTime = Date.now();
const response = await fetch(url, {
...options,
headers: {
Accept: 'application/json',
'User-Agent': 'OpenChamber/1.0',
...options.headers,
},
});
lastResponse = response;
if (response.status === 429 || response.status >= 500) {
if (attempt < maxAttempts - 1) {
const waitMs = 50 * (attempt + 1);
await new Promise((resolve) => setTimeout(resolve, waitMs));
continue;
}
}
return response;
}
return lastResponse;
}
/**
* Fetch paginated list of skills from ClawdHub
* @param {Object} options
* @param {string} [options.cursor] - Pagination cursor from previous response
* @returns {Promise<{ items: Array, nextCursor?: string }>}
*/
export async function fetchClawdHubSkills({ cursor } = {}) {
const url = cursor
? `${CLAWDHUB_API_BASE}/skills?cursor=${encodeURIComponent(cursor)}&limit=${CLAWDHUB_PAGE_LIMIT}`
: `${CLAWDHUB_API_BASE}/skills?limit=${CLAWDHUB_PAGE_LIMIT}`;
const response = await rateLimitedFetch(url);
if (!response.ok) {
const text = await response.text().catch(() => '');
throw new Error(`ClawdHub API error (${response.status}): ${text || response.statusText}`);
}
const data = await response.json();
const nextCursor =
(typeof data.nextCursor === 'string' && data.nextCursor) ||
(typeof data.next_cursor === 'string' && data.next_cursor) ||
(typeof data.next === 'string' && data.next) ||
(typeof data.cursor === 'string' && data.cursor) ||
null;
return {
items: data.items || [],
nextCursor,
};
}
/**
* Fetch details for a specific skill version
* @param {string} slug - Skill slug/identifier
* @param {string} [version='latest'] - Version string or 'latest'
* @returns {Promise<{ skill: Object, version: Object }>}
*/
export async function fetchClawdHubSkillVersion(slug, version = 'latest') {
// For 'latest', we need to first get the skill metadata to find the latest version
if (version === 'latest') {
const skillResponse = await rateLimitedFetch(`${CLAWDHUB_API_BASE}/skills/${encodeURIComponent(slug)}`);
if (!skillResponse.ok) {
throw new Error(`ClawdHub skill not found: ${slug}`);
}
const skillData = await skillResponse.json();
const latestVersion = skillData.skill?.tags?.latest || skillData.latestVersion?.version;
if (!latestVersion) {
throw new Error(`No latest version found for skill: ${slug}`);
}
version = latestVersion;
}
const url = `${CLAWDHUB_API_BASE}/skills/${encodeURIComponent(slug)}/versions/${encodeURIComponent(version)}`;
const response = await rateLimitedFetch(url);
if (!response.ok) {
const text = await response.text().catch(() => '');
throw new Error(`ClawdHub version error (${response.status}): ${text || response.statusText}`);
}
return response.json();
}
/**
* Download a skill package as a ZIP buffer
* @param {string} slug - Skill slug/identifier
* @param {string} version - Specific version string
* @returns {Promise<ArrayBuffer>} - ZIP file contents
*/
export async function downloadClawdHubSkill(slug, version) {
const versionParam = typeof version === 'string' && version !== 'latest'
? `&version=${encodeURIComponent(version)}`
: '&tag=latest';
const url = `${CLAWDHUB_API_BASE}/download?slug=${encodeURIComponent(slug)}${versionParam}`;
const response = await rateLimitedFetch(url, {
headers: {
Accept: 'application/zip',
},
});
if (!response.ok) {
const text = await response.text().catch(() => '');
throw new Error(`ClawdHub download error (${response.status}): ${text || response.statusText}`);
}
return response.arrayBuffer();
}
/**
* Get skill metadata without version details
* @param {string} slug - Skill slug/identifier
* @returns {Promise<Object>}
*/
export async function fetchClawdHubSkillInfo(slug) {
const url = `${CLAWDHUB_API_BASE}/skills/${encodeURIComponent(slug)}`;
const response = await rateLimitedFetch(url);
if (!response.ok) {
const text = await response.text().catch(() => '');
throw new Error(`ClawdHub skill error (${response.status}): ${text || response.statusText}`);
}
return response.json();
}

View File

@@ -0,0 +1,30 @@
/**
* ClawdHub integration module
*
* Provides skill browsing and installation from the ClawdHub registry.
* https://clawdhub.com
*/
export { scanClawdHub, scanClawdHubPage } from './scan.js';
export { installSkillsFromClawdHub } from './install.js';
export {
fetchClawdHubSkills,
fetchClawdHubSkillVersion,
fetchClawdHubSkillInfo,
downloadClawdHubSkill,
} from './api.js';
/**
* Check if a source string refers to ClawdHub
* @param {string} source
* @returns {boolean}
*/
export function isClawdHubSource(source) {
return typeof source === 'string' && source.startsWith('clawdhub:');
}
/**
* ClawdHub source identifier used in curated sources
*/
export const CLAWDHUB_SOURCE_ID = 'clawdhub';
export const CLAWDHUB_SOURCE_STRING = 'clawdhub:registry';

View File

@@ -0,0 +1,238 @@
/**
* ClawdHub skill installation
*
* Downloads skills from ClawdHub as ZIP files and extracts them
* to the appropriate skill directory.
*/
import fs from 'fs';
import os from 'os';
import path from 'path';
import AdmZip from 'adm-zip';
import { downloadClawdHubSkill, fetchClawdHubSkillInfo } from './api.js';
const SKILL_NAME_PATTERN = /^[a-z0-9][a-z0-9-]*[a-z0-9]$|^[a-z0-9]$/;
function normalizeUserSkillDir(userSkillDir) {
if (!userSkillDir) return null;
const legacySkillDir = path.join(os.homedir(), '.config', 'opencode', 'skill');
const pluralSkillDir = path.join(os.homedir(), '.config', 'opencode', 'skills');
if (userSkillDir === legacySkillDir) {
if (fs.existsSync(legacySkillDir) && !fs.existsSync(pluralSkillDir)) return legacySkillDir;
return pluralSkillDir;
}
return userSkillDir;
}
function validateSkillName(skillName) {
if (typeof skillName !== 'string') return false;
if (skillName.length < 1 || skillName.length > 64) return false;
return SKILL_NAME_PATTERN.test(skillName);
}
async function safeRm(dir) {
try {
await fs.promises.rm(dir, { recursive: true, force: true });
} catch {
// ignore
}
}
async function ensureDir(dirPath) {
await fs.promises.mkdir(dirPath, { recursive: true });
}
function getTargetSkillDir({ scope, targetSource, workingDirectory, userSkillDir, skillName }) {
const source = targetSource === 'agents' ? 'agents' : 'opencode';
if (scope === 'user') {
if (source === 'agents') {
return path.join(os.homedir(), '.agents', 'skills', skillName);
}
return path.join(userSkillDir, skillName);
}
if (!workingDirectory) {
throw new Error('workingDirectory is required for project installs');
}
if (source === 'agents') {
return path.join(workingDirectory, '.agents', 'skills', skillName);
}
return path.join(workingDirectory, '.opencode', 'skills', skillName);
}
/**
* Install skills from ClawdHub registry
* @param {Object} options
* @param {string} options.scope - 'user' or 'project'
* @param {string} [options.targetSource] - 'opencode' or 'agents'
* @param {string} [options.workingDirectory] - Required for project scope
* @param {string} options.userSkillDir - User skills directory
* @param {Array} options.selections - Array of { skillDir, clawdhub: { slug, version } }
* @param {string} [options.conflictPolicy] - 'prompt', 'skipAll', or 'overwriteAll'
* @param {Object} [options.conflictDecisions] - Per-skill conflict decisions
* @returns {Promise<{ ok: boolean, installed?: Array, skipped?: Array, error?: Object }>}
*/
export async function installSkillsFromClawdHub({
scope,
targetSource,
workingDirectory,
userSkillDir,
selections,
conflictPolicy,
conflictDecisions,
} = {}) {
if (scope !== 'user' && scope !== 'project') {
return { ok: false, error: { kind: 'invalidSource', message: 'Invalid scope' } };
}
if (targetSource !== undefined && targetSource !== 'opencode' && targetSource !== 'agents') {
return { ok: false, error: { kind: 'invalidSource', message: 'Invalid target source' } };
}
if (!userSkillDir) {
return { ok: false, error: { kind: 'unknown', message: 'userSkillDir is required' } };
}
const normalizedUserSkillDir = normalizeUserSkillDir(userSkillDir);
if (normalizedUserSkillDir) {
userSkillDir = normalizedUserSkillDir;
}
if (scope === 'project' && !workingDirectory) {
return { ok: false, error: { kind: 'invalidSource', message: 'Project installs require a directory parameter' } };
}
const requestedSkills = Array.isArray(selections) ? selections : [];
if (requestedSkills.length === 0) {
return { ok: false, error: { kind: 'invalidSource', message: 'No skills selected for installation' } };
}
// Build installation plans
const skillPlans = requestedSkills.map((sel) => {
const slug = sel.clawdhub?.slug || sel.skillDir;
const version = sel.clawdhub?.version || 'latest';
return {
slug,
version,
installable: validateSkillName(slug),
};
});
// Check for conflicts before downloading
const conflicts = [];
for (const plan of skillPlans) {
if (!plan.installable) {
continue;
}
const targetDir = getTargetSkillDir({ scope, targetSource, workingDirectory, userSkillDir, skillName: plan.slug });
if (fs.existsSync(targetDir)) {
const decision = conflictDecisions?.[plan.slug];
const hasAutoPolicy = conflictPolicy === 'skipAll' || conflictPolicy === 'overwriteAll';
if (!decision && !hasAutoPolicy) {
conflicts.push({ skillName: plan.slug, scope, source: targetSource === 'agents' ? 'agents' : 'opencode' });
}
}
}
if (conflicts.length > 0) {
return {
ok: false,
error: {
kind: 'conflicts',
message: 'Some skills already exist in the selected scope',
conflicts,
},
};
}
const installed = [];
const skipped = [];
for (const plan of skillPlans) {
if (!plan.installable) {
skipped.push({ skillName: plan.slug, reason: 'Invalid skill name' });
continue;
}
try {
// Resolve 'latest' version if needed
let resolvedVersion = plan.version;
if (resolvedVersion === 'latest') {
try {
const info = await fetchClawdHubSkillInfo(plan.slug);
const latest = info.skill?.tags?.latest || info.latestVersion?.version || null;
if (latest) {
resolvedVersion = latest;
}
} catch {
// ignore
}
if (resolvedVersion === 'latest') {
skipped.push({ skillName: plan.slug, reason: 'Unable to resolve latest version' });
continue;
}
}
const targetDir = getTargetSkillDir({ scope, targetSource, workingDirectory, userSkillDir, skillName: plan.slug });
const exists = fs.existsSync(targetDir);
// Determine conflict resolution
let decision = conflictDecisions?.[plan.slug] || null;
if (!decision) {
if (exists && conflictPolicy === 'skipAll') decision = 'skip';
if (exists && conflictPolicy === 'overwriteAll') decision = 'overwrite';
if (!exists) decision = 'overwrite'; // No conflict, proceed
}
if (exists && decision === 'skip') {
skipped.push({ skillName: plan.slug, reason: 'Already installed (skipped)' });
continue;
}
if (exists && decision === 'overwrite') {
await safeRm(targetDir);
}
// Download the skill ZIP
const zipBuffer = await downloadClawdHubSkill(plan.slug, resolvedVersion);
// Extract to a temp directory first for validation
const tempDir = await fs.promises.mkdtemp(path.join(os.tmpdir(), `clawdhub-${plan.slug}-`));
try {
const zip = new AdmZip(Buffer.from(zipBuffer));
zip.extractAllTo(tempDir, true);
// Verify SKILL.md exists
const skillMdPath = path.join(tempDir, 'SKILL.md');
if (!fs.existsSync(skillMdPath)) {
skipped.push({ skillName: plan.slug, reason: 'SKILL.md not found in downloaded package' });
continue;
}
// Move to target directory
await ensureDir(path.dirname(targetDir));
await fs.promises.rename(tempDir, targetDir);
installed.push({ skillName: plan.slug, scope, source: targetSource === 'agents' ? 'agents' : 'opencode' });
} catch (extractError) {
await safeRm(tempDir);
throw extractError;
}
} catch (error) {
console.error(`Failed to install ClawdHub skill "${plan.slug}":`, error);
skipped.push({
skillName: plan.slug,
reason: error instanceof Error ? error.message : 'Failed to download or extract skill',
});
}
}
return { ok: true, installed, skipped };
}

View File

@@ -0,0 +1,113 @@
/**
* ClawdHub skill scanning
*
* Fetches all available skills from the ClawdHub registry
* and transforms them into SkillsCatalogItem format.
*/
import { fetchClawdHubSkills } from './api.js';
const MAX_PAGES = 20; // Safety limit to prevent infinite loops
const CLAWDHUB_PAGE_LIMIT = 25;
const mapClawdHubItem = (item) => {
const latestVersion = item.tags?.latest || item.latestVersion?.version || '1.0.0';
return {
sourceId: 'clawdhub',
repoSource: 'clawdhub:registry',
repoSubpath: null,
gitIdentityId: null,
skillDir: item.slug,
skillName: item.slug,
frontmatterName: item.displayName || item.slug,
description: item.summary || null,
installable: true,
warnings: [],
// ClawdHub-specific metadata
clawdhub: {
slug: item.slug,
version: latestVersion,
displayName: item.displayName,
owner: item.owner?.handle || null,
downloads: item.stats?.downloads || 0,
stars: item.stats?.stars || 0,
versionsCount: item.stats?.versions || 1,
createdAt: item.createdAt,
updatedAt: item.updatedAt,
},
};
};
/**
* Scan ClawdHub registry for all available skills
* @returns {Promise<{ ok: boolean, items?: Array, error?: Object }>}
*/
export async function scanClawdHub() {
try {
const allItems = [];
let cursor = null;
for (let page = 0; page < MAX_PAGES; page++) {
let items = [];
let nextCursor = null;
try {
const pageResult = await fetchClawdHubSkills({ cursor });
items = pageResult.items || [];
nextCursor = pageResult.nextCursor || null;
} catch (error) {
if (page > 0 && allItems.length > 0) {
console.warn('ClawdHub pagination failed; returning partial results.');
break;
}
throw error;
}
for (const item of items) {
allItems.push(mapClawdHubItem(item));
}
if (!nextCursor) {
break;
}
cursor = nextCursor;
}
// Sort by downloads (most popular first)
allItems.sort((a, b) => (b.clawdhub?.downloads || 0) - (a.clawdhub?.downloads || 0));
return { ok: true, items: allItems };
} catch (error) {
console.error('ClawdHub scan error:', error);
return {
ok: false,
error: {
kind: 'networkError',
message: error instanceof Error ? error.message : 'Failed to fetch skills from ClawdHub',
},
};
}
}
/**
* Scan a single ClawdHub page (cursor-based)
* @returns {Promise<{ ok: boolean, items?: Array, nextCursor?: string | null, error?: Object }>}
*/
export async function scanClawdHubPage({ cursor } = {}) {
try {
const { items, nextCursor } = await fetchClawdHubSkills({ cursor });
const mapped = (items || []).map(mapClawdHubItem).slice(0, CLAWDHUB_PAGE_LIMIT);
mapped.sort((a, b) => (b.clawdhub?.downloads || 0) - (a.clawdhub?.downloads || 0));
return { ok: true, items: mapped, nextCursor: nextCursor || null };
} catch (error) {
console.error('ClawdHub page scan error:', error);
return {
ok: false,
error: {
kind: 'networkError',
message: error instanceof Error ? error.message : 'Failed to fetch skills from ClawdHub',
},
};
}
}

View File

@@ -0,0 +1,21 @@
export const CURATED_SKILLS_SOURCES = [
{
id: 'anthropic',
label: 'Anthropic',
description: "Anthropic's public skills repository",
source: 'anthropics/skills',
defaultSubpath: 'skills',
sourceType: 'github',
},
{
id: 'clawdhub',
label: 'ClawdHub',
description: 'Community skill registry with vector search',
source: 'clawdhub:registry',
sourceType: 'clawdhub',
},
];
export function getCuratedSkillsSources() {
return CURATED_SKILLS_SOURCES.slice();
}

View File

@@ -0,0 +1,76 @@
import { execFile } from 'child_process';
import { promisify } from 'util';
const execFileAsync = promisify(execFile);
const DEFAULT_TIMEOUT_MS = 60_000;
const DEFAULT_MAX_BUFFER = 4 * 1024 * 1024;
export function looksLikeAuthError(message) {
const text = String(message || '');
return (
/permission denied/i.test(text) ||
/publickey/i.test(text) ||
/could not read from remote repository/i.test(text) ||
/authentication failed/i.test(text) ||
/fatal: could not/i.test(text)
);
}
export async function runGit(args, options = {}) {
const cwd = options.cwd;
const timeoutMs = Number.isFinite(options.timeoutMs) ? options.timeoutMs : DEFAULT_TIMEOUT_MS;
const maxBuffer = Number.isFinite(options.maxBuffer) ? options.maxBuffer : DEFAULT_MAX_BUFFER;
const identity = options.identity || null;
const normalizedArgs = Array.isArray(args) ? args.slice() : [];
// Non-interactive git (avoid prompts / hangs)
const env = {
...process.env,
GIT_TERMINAL_PROMPT: '0',
};
if (identity?.sshKey) {
const sshKeyPath = String(identity.sshKey).trim();
if (sshKeyPath) {
// Avoid interactive host key prompts; still safe against changed keys.
const sshCommand = `ssh -i ${sshKeyPath} -o BatchMode=yes -o StrictHostKeyChecking=accept-new`;
normalizedArgs.unshift(`core.sshCommand=${sshCommand}`);
normalizedArgs.unshift('-c');
}
}
try {
const { stdout, stderr } = await execFileAsync('git', normalizedArgs, {
cwd,
env,
timeout: timeoutMs,
maxBuffer,
});
return { ok: true, stdout: stdout || '', stderr: stderr || '' };
} catch (error) {
const err = error;
const stdout = typeof err?.stdout === 'string' ? err.stdout : '';
const stderr = typeof err?.stderr === 'string' ? err.stderr : '';
const message = err instanceof Error ? err.message : String(err);
return {
ok: false,
stdout,
stderr,
message,
code: typeof err?.code === 'number' ? err.code : null,
signal: typeof err?.signal === 'string' ? err.signal : null,
};
}
}
export async function assertGitAvailable() {
const result = await runGit(['--version'], { timeoutMs: 5_000 });
if (!result.ok) {
return { ok: false, error: { kind: 'gitUnavailable', message: 'Git is not available in PATH' } };
}
return { ok: true };
}

View File

@@ -0,0 +1,42 @@
/**
* Skills catalog module
*
* Provides skill scanning, installation, and caching from GitHub repositories and ClawdHub.
*/
export {
CURATED_SKILLS_SOURCES,
getCuratedSkillsSources,
} from './curated-sources.js';
export {
getCacheKey,
getCachedScan,
setCachedScan,
clearCache,
} from './cache.js';
export {
parseSkillRepoSource,
} from './source.js';
export {
scanSkillsRepository,
} from './scan.js';
export {
installSkillsFromRepository,
} from './install.js';
export {
scanClawdHub,
scanClawdHubPage,
installSkillsFromClawdHub,
fetchClawdHubSkills,
fetchClawdHubSkillVersion,
fetchClawdHubSkillInfo,
downloadClawdHubSkill,
isClawdHubSource,
CLAWDHUB_SOURCE_ID,
CLAWDHUB_SOURCE_STRING,
} from './clawdhub/index.js';

View File

@@ -0,0 +1,294 @@
import fs from 'fs';
import os from 'os';
import path from 'path';
import { assertGitAvailable, looksLikeAuthError, runGit } from './git.js';
import { parseSkillRepoSource } from './source.js';
const SKILL_NAME_PATTERN = /^[a-z0-9][a-z0-9-]*[a-z0-9]$|^[a-z0-9]$/;
function normalizeUserSkillDir(userSkillDir) {
if (!userSkillDir) return null;
const legacySkillDir = path.join(os.homedir(), '.config', 'opencode', 'skill');
const pluralSkillDir = path.join(os.homedir(), '.config', 'opencode', 'skills');
if (userSkillDir === legacySkillDir) {
if (fs.existsSync(legacySkillDir) && !fs.existsSync(pluralSkillDir)) return legacySkillDir;
return pluralSkillDir;
}
return userSkillDir;
}
function validateSkillName(skillName) {
if (typeof skillName !== 'string') return false;
if (skillName.length < 1 || skillName.length > 64) return false;
return SKILL_NAME_PATTERN.test(skillName);
}
async function safeRm(dir) {
try {
await fs.promises.rm(dir, { recursive: true, force: true });
} catch {
// ignore
}
}
function toFsPath(repoDir, repoRelPosixPath) {
const parts = String(repoRelPosixPath || '')
.split('/')
.map((p) => p.trim())
.filter(Boolean);
return path.join(repoDir, ...parts);
}
async function ensureDir(dirPath) {
await fs.promises.mkdir(dirPath, { recursive: true });
}
async function copyDirectoryNoSymlinks(srcDir, dstDir) {
const srcReal = await fs.promises.realpath(srcDir);
await ensureDir(dstDir);
const walk = async (currentSrc, currentDst) => {
const entries = await fs.promises.readdir(currentSrc, { withFileTypes: true });
for (const entry of entries) {
const nextSrc = path.join(currentSrc, entry.name);
const nextDst = path.join(currentDst, entry.name);
const stat = await fs.promises.lstat(nextSrc);
if (stat.isSymbolicLink()) {
throw new Error('Symlinks are not supported in skills');
}
// Guard against traversal: ensure source is still under srcReal
const nextRealParent = await fs.promises.realpath(path.dirname(nextSrc));
if (!nextRealParent.startsWith(srcReal)) {
throw new Error('Invalid source path traversal detected');
}
if (stat.isDirectory()) {
await ensureDir(nextDst);
await walk(nextSrc, nextDst);
continue;
}
if (stat.isFile()) {
await ensureDir(path.dirname(nextDst));
await fs.promises.copyFile(nextSrc, nextDst);
try {
await fs.promises.chmod(nextDst, stat.mode & 0o777);
} catch {
// best-effort
}
continue;
}
// Skip other types (sockets, devices, etc.)
}
};
await walk(srcDir, dstDir);
}
async function cloneRepo({ cloneUrl, identity, tempDir }) {
const preferred = ['clone', '--depth', '1', '--filter=blob:none', '--no-checkout', cloneUrl, tempDir];
const fallback = ['clone', '--depth', '1', '--no-checkout', cloneUrl, tempDir];
const result = await runGit(preferred, { identity, timeoutMs: 90_000 });
if (result.ok) return { ok: true };
const fallbackResult = await runGit(fallback, { identity, timeoutMs: 90_000 });
if (fallbackResult.ok) return { ok: true };
return {
ok: false,
error: fallbackResult,
};
}
function getTargetSkillDir({ scope, targetSource, workingDirectory, userSkillDir, skillName }) {
const source = targetSource === 'agents' ? 'agents' : 'opencode';
if (scope === 'user') {
if (source === 'agents') {
return path.join(os.homedir(), '.agents', 'skills', skillName);
}
return path.join(userSkillDir, skillName);
}
if (!workingDirectory) {
throw new Error('workingDirectory is required for project installs');
}
if (source === 'agents') {
return path.join(workingDirectory, '.agents', 'skills', skillName);
}
return path.join(workingDirectory, '.opencode', 'skills', skillName);
}
export async function installSkillsFromRepository({
source,
subpath,
defaultSubpath,
identity,
scope,
targetSource,
workingDirectory,
userSkillDir,
selections,
conflictPolicy,
conflictDecisions,
} = {}) {
const gitCheck = await assertGitAvailable();
if (!gitCheck.ok) {
return { ok: false, error: gitCheck.error };
}
const normalizedUserSkillDir = normalizeUserSkillDir(userSkillDir);
if (normalizedUserSkillDir) {
userSkillDir = normalizedUserSkillDir;
}
if (!userSkillDir) {
return { ok: false, error: { kind: 'unknown', message: 'userSkillDir is required' } };
}
if (scope !== 'user' && scope !== 'project') {
return { ok: false, error: { kind: 'invalidSource', message: 'Invalid scope' } };
}
if (targetSource !== undefined && targetSource !== 'opencode' && targetSource !== 'agents') {
return { ok: false, error: { kind: 'invalidSource', message: 'Invalid target source' } };
}
if (scope === 'project' && !workingDirectory) {
return { ok: false, error: { kind: 'invalidSource', message: 'Project installs require a directory parameter' } };
}
const parsed = parseSkillRepoSource(source, { subpath });
if (!parsed.ok) {
return { ok: false, error: parsed.error };
}
const effectiveSubpath = parsed.effectiveSubpath || (typeof defaultSubpath === 'string' && defaultSubpath.trim() ? defaultSubpath.trim() : null);
void effectiveSubpath;
const cloneUrl = identity?.sshKey ? parsed.cloneUrlSsh : parsed.cloneUrlHttps;
const requestedDirs = Array.isArray(selections) ? selections.map((s) => String(s?.skillDir || '').trim()).filter(Boolean) : [];
if (requestedDirs.length === 0) {
return { ok: false, error: { kind: 'invalidSource', message: 'No skills selected for installation' } };
}
// Validate names early and compute conflicts without mutating.
const skillPlans = requestedDirs.map((skillDirPosix) => {
const skillName = path.posix.basename(skillDirPosix);
return { skillDirPosix, skillName, installable: validateSkillName(skillName) };
});
const conflicts = [];
for (const plan of skillPlans) {
if (!plan.installable) {
continue;
}
const targetDir = getTargetSkillDir({ scope, targetSource, workingDirectory, userSkillDir, skillName: plan.skillName });
if (fs.existsSync(targetDir)) {
const decision = conflictDecisions?.[plan.skillName];
const hasAutoPolicy = conflictPolicy === 'skipAll' || conflictPolicy === 'overwriteAll';
if (!decision && !hasAutoPolicy) {
conflicts.push({ skillName: plan.skillName, scope, source: targetSource === 'agents' ? 'agents' : 'opencode' });
}
}
}
if (conflicts.length > 0) {
return {
ok: false,
error: {
kind: 'conflicts',
message: 'Some skills already exist in the selected scope',
conflicts,
},
};
}
const tempBase = await fs.promises.mkdtemp(path.join(os.tmpdir(), 'openchamber-skills-install-'));
try {
const cloned = await cloneRepo({ cloneUrl, identity, tempDir: tempBase });
if (!cloned.ok) {
const msg = `${cloned.error?.stderr || ''}\n${cloned.error?.message || ''}`.trim();
if (looksLikeAuthError(msg)) {
return { ok: false, error: { kind: 'authRequired', message: 'Authentication required to access this repository', sshOnly: true } };
}
return { ok: false, error: { kind: 'networkError', message: msg || 'Failed to clone repository' } };
}
// Selective checkout for only requested skill dirs.
await runGit(['-C', tempBase, 'sparse-checkout', 'init', '--cone'], { identity, timeoutMs: 15_000 });
const setResult = await runGit(['-C', tempBase, 'sparse-checkout', 'set', ...requestedDirs], { identity, timeoutMs: 30_000 });
if (!setResult.ok) {
return { ok: false, error: { kind: 'unknown', message: setResult.stderr || setResult.message || 'Failed to configure sparse checkout' } };
}
const checkoutResult = await runGit(['-C', tempBase, 'checkout', '--force', 'HEAD'], { identity, timeoutMs: 60_000 });
if (!checkoutResult.ok) {
return { ok: false, error: { kind: 'unknown', message: checkoutResult.stderr || checkoutResult.message || 'Failed to checkout repository' } };
}
const installed = [];
const skipped = [];
for (const plan of skillPlans) {
if (!plan.installable) {
skipped.push({ skillName: plan.skillName, reason: 'Invalid skill name (directory basename)' });
continue;
}
const srcDir = toFsPath(tempBase, plan.skillDirPosix);
const skillMdPath = path.join(srcDir, 'SKILL.md');
if (!fs.existsSync(skillMdPath)) {
skipped.push({ skillName: plan.skillName, reason: 'SKILL.md not found in selected directory' });
continue;
}
const targetDir = getTargetSkillDir({ scope, targetSource, workingDirectory, userSkillDir, skillName: plan.skillName });
const exists = fs.existsSync(targetDir);
let decision = conflictDecisions?.[plan.skillName] || null;
if (!decision) {
if (exists && conflictPolicy === 'skipAll') decision = 'skip';
if (exists && conflictPolicy === 'overwriteAll') decision = 'overwrite';
if (!exists) decision = 'overwrite'; // no conflict, proceed
}
if (exists && decision === 'skip') {
skipped.push({ skillName: plan.skillName, reason: 'Already installed (skipped)' });
continue;
}
if (exists && decision === 'overwrite') {
await safeRm(targetDir);
}
// Ensure project parent directories exist
await ensureDir(path.dirname(targetDir));
try {
await copyDirectoryNoSymlinks(srcDir, targetDir);
installed.push({ skillName: plan.skillName, scope, source: targetSource === 'agents' ? 'agents' : 'opencode' });
} catch (error) {
await safeRm(targetDir);
skipped.push({
skillName: plan.skillName,
reason: error instanceof Error ? error.message : 'Failed to copy skill files',
});
}
}
return { ok: true, installed, skipped };
} finally {
await safeRm(tempBase);
}
}

View File

@@ -0,0 +1,221 @@
import fs from 'fs';
import os from 'os';
import path from 'path';
import yaml from 'yaml';
import { assertGitAvailable, looksLikeAuthError, runGit } from './git.js';
import { parseSkillRepoSource } from './source.js';
const SKILL_NAME_PATTERN = /^[a-z0-9][a-z0-9-]*[a-z0-9]$|^[a-z0-9]$/;
function validateSkillName(skillName) {
if (typeof skillName !== 'string') return false;
if (skillName.length < 1 || skillName.length > 64) return false;
return SKILL_NAME_PATTERN.test(skillName);
}
function parseSkillMd(content) {
const text = typeof content === 'string' ? content : '';
const match = text.match(/^---\r?\n([\s\S]*?)\r?\n---\r?\n([\s\S]*)$/);
if (!match) {
return {
ok: true,
frontmatter: {},
warnings: ['Invalid SKILL.md: missing YAML frontmatter delimiter'],
};
}
try {
const frontmatter = yaml.parse(match[1]) || {};
return { ok: true, frontmatter, warnings: [] };
} catch {
return {
ok: true,
frontmatter: {},
warnings: ['Invalid SKILL.md: failed to parse YAML frontmatter'],
};
}
}
async function safeRm(dir) {
try {
await fs.promises.rm(dir, { recursive: true, force: true });
} catch {
// ignore
}
}
async function cloneRepo({ cloneUrl, identity, tempDir }) {
const preferred = ['clone', '--depth', '1', '--filter=blob:none', '--no-checkout', cloneUrl, tempDir];
const fallback = ['clone', '--depth', '1', '--no-checkout', cloneUrl, tempDir];
const result = await runGit(preferred, { identity, timeoutMs: 60_000 });
if (result.ok) return { ok: true };
const fallbackResult = await runGit(fallback, { identity, timeoutMs: 60_000 });
if (fallbackResult.ok) return { ok: true };
return {
ok: false,
error: fallbackResult,
};
}
export async function scanSkillsRepository({
source,
subpath,
defaultSubpath,
identity,
} = {}) {
const gitCheck = await assertGitAvailable();
if (!gitCheck.ok) {
return { ok: false, error: gitCheck.error };
}
const parsed = parseSkillRepoSource(source, { subpath });
if (!parsed.ok) {
return { ok: false, error: parsed.error };
}
const effectiveSubpath = parsed.effectiveSubpath || (typeof defaultSubpath === 'string' && defaultSubpath.trim() ? defaultSubpath.trim() : null);
const cloneUrl = identity?.sshKey ? parsed.cloneUrlSsh : parsed.cloneUrlHttps;
const tempBase = await fs.promises.mkdtemp(path.join(os.tmpdir(), 'openchamber-skills-scan-'));
try {
const cloned = await cloneRepo({ cloneUrl, identity, tempDir: tempBase });
if (!cloned.ok) {
const msg = `${cloned.error?.stderr || ''}\n${cloned.error?.message || ''}`.trim();
if (looksLikeAuthError(msg)) {
return { ok: false, error: { kind: 'authRequired', message: 'Authentication required to access this repository', sshOnly: true } };
}
return { ok: false, error: { kind: 'networkError', message: msg || 'Failed to clone repository' } };
}
const toFsPath = (posixPath) => path.join(tempBase, ...String(posixPath || '').split('/').filter(Boolean));
const patterns = effectiveSubpath
? [`${effectiveSubpath}/SKILL.md`, `${effectiveSubpath}/**/SKILL.md`]
: ['SKILL.md', '**/SKILL.md'];
let skillMdPaths = null;
// Fast path: sparse checkout only SKILL.md files, then parse from disk.
// This avoids one `git show` per skill.
const sparseInit = await runGit(['-C', tempBase, 'sparse-checkout', 'init', '--no-cone'], { identity, timeoutMs: 15_000 });
if (sparseInit.ok) {
const sparseSet = await runGit(['-C', tempBase, 'sparse-checkout', 'set', ...patterns], { identity, timeoutMs: 30_000 });
if (sparseSet.ok) {
const checkout = await runGit(['-C', tempBase, 'checkout', '--force', 'HEAD'], { identity, timeoutMs: 60_000 });
if (checkout.ok) {
const lsFiles = await runGit(['-C', tempBase, 'ls-files'], { identity, timeoutMs: 15_000 });
if (lsFiles.ok) {
skillMdPaths = lsFiles.stdout
.split(/\r?\n/)
.map((line) => line.trim())
.filter(Boolean)
.filter((p) => p.endsWith('/SKILL.md') || p === 'SKILL.md');
}
}
}
}
// Fallback: list tree and read SKILL.md blobs via git.
if (!Array.isArray(skillMdPaths)) {
const listArgs = ['-C', tempBase, 'ls-tree', '-r', '--name-only', 'HEAD'];
if (effectiveSubpath) {
listArgs.push('--', effectiveSubpath);
}
const listResult = await runGit(listArgs, { identity, timeoutMs: 30_000 });
if (!listResult.ok) {
// If subpath doesn't exist, treat as empty scan.
return {
ok: true,
normalizedRepo: parsed.normalizedRepo,
effectiveSubpath,
items: [],
};
}
skillMdPaths = listResult.stdout
.split(/\r?\n/)
.map((line) => line.trim())
.filter(Boolean)
.filter((p) => p.endsWith('/SKILL.md') || p === 'SKILL.md');
}
// Root-level SKILL.md doesn't map cleanly to OpenCode's "skill name == folder name" convention.
const uniqueSkillDirs = Array.from(
new Set(
skillMdPaths
.filter((p) => p !== 'SKILL.md')
.map((p) => path.posix.dirname(p))
)
);
const items = [];
const maxParallel = 10;
let idx = 0;
const worker = async () => {
while (idx < uniqueSkillDirs.length) {
const skillDir = uniqueSkillDirs[idx++];
const skillName = path.posix.basename(skillDir);
const skillMdPath = path.posix.join(skillDir, 'SKILL.md');
const warnings = [];
let skillMdContent = '';
// Prefer filesystem reads when sparse checkout succeeded.
const filePath = toFsPath(skillMdPath);
try {
skillMdContent = await fs.promises.readFile(filePath, 'utf8');
} catch {
const showResult = await runGit(['-C', tempBase, 'show', `HEAD:${skillMdPath}`], { identity, timeoutMs: 15_000 });
if (!showResult.ok) {
warnings.push('Failed to read SKILL.md');
} else {
skillMdContent = showResult.stdout;
}
}
const parsedMd = parseSkillMd(skillMdContent);
warnings.push(...(parsedMd.warnings || []));
const description = typeof parsedMd.frontmatter?.description === 'string' ? parsedMd.frontmatter.description : undefined;
const frontmatterName = typeof parsedMd.frontmatter?.name === 'string' ? parsedMd.frontmatter.name : undefined;
const installable = validateSkillName(skillName);
if (!installable) {
warnings.push('Skill directory name is not a valid OpenCode skill name');
}
items.push({
repoSource: source,
repoSubpath: effectiveSubpath || undefined,
skillDir,
skillName,
frontmatterName,
description,
installable,
warnings: warnings.length ? warnings : undefined,
});
}
};
await Promise.all(Array.from({ length: Math.min(maxParallel, uniqueSkillDirs.length || 1) }, () => worker()));
// Stable ordering for UX
items.sort((a, b) => a.skillName.localeCompare(b.skillName));
return {
ok: true,
normalizedRepo: parsed.normalizedRepo,
effectiveSubpath,
items,
};
} finally {
await safeRm(tempBase);
}
}

View File

@@ -0,0 +1,85 @@
const GITHUB_HOST = 'github.com';
function normalizeGitHubOwnerRepo(owner, repo) {
const normalizedOwner = String(owner || '').trim();
const normalizedRepo = String(repo || '').trim().replace(/\.git$/i, '');
if (!normalizedOwner || !normalizedRepo) {
return null;
}
return { owner: normalizedOwner, repo: normalizedRepo };
}
export function parseSkillRepoSource(input, options = {}) {
const raw = typeof input === 'string' ? input.trim() : '';
if (!raw) {
return { ok: false, error: { kind: 'invalidSource', message: 'Repository source is required' } };
}
const explicitSubpath = typeof options.subpath === 'string' && options.subpath.trim() ? options.subpath.trim() : null;
// SSH URL: git@github.com:owner/repo(.git)
const sshMatch = raw.match(/^git@github\.com:([^/\s]+)\/([^\s#]+)$/i);
if (sshMatch) {
const parsed = normalizeGitHubOwnerRepo(sshMatch[1], sshMatch[2]);
if (!parsed) {
return { ok: false, error: { kind: 'invalidSource', message: 'Invalid SSH repository URL' } };
}
return {
ok: true,
host: GITHUB_HOST,
owner: parsed.owner,
repo: parsed.repo,
cloneUrlSsh: `git@github.com:${parsed.owner}/${parsed.repo}.git`,
cloneUrlHttps: `https://github.com/${parsed.owner}/${parsed.repo}.git`,
// For SSH URLs, subpath is only accepted via options.subpath
effectiveSubpath: explicitSubpath,
normalizedRepo: `${parsed.owner}/${parsed.repo}`,
};
}
// HTTPS URL: https://github.com/owner/repo(.git)
const httpsMatch = raw.match(/^https?:\/\/github\.com\/([^/\s]+)\/([^\s#]+)$/i);
if (httpsMatch) {
const parsed = normalizeGitHubOwnerRepo(httpsMatch[1], httpsMatch[2]);
if (!parsed) {
return { ok: false, error: { kind: 'invalidSource', message: 'Invalid HTTPS repository URL' } };
}
return {
ok: true,
host: GITHUB_HOST,
owner: parsed.owner,
repo: parsed.repo,
cloneUrlSsh: `git@github.com:${parsed.owner}/${parsed.repo}.git`,
cloneUrlHttps: `https://github.com/${parsed.owner}/${parsed.repo}.git`,
effectiveSubpath: explicitSubpath,
normalizedRepo: `${parsed.owner}/${parsed.repo}`,
};
}
// Shorthand: owner/repo[/subpath...]
const shorthandMatch = raw.match(/^([^/\s]+)\/([^/\s]+)(?:\/(.+))?$/);
if (shorthandMatch) {
const parsed = normalizeGitHubOwnerRepo(shorthandMatch[1], shorthandMatch[2]);
if (!parsed) {
return { ok: false, error: { kind: 'invalidSource', message: 'Invalid repository source' } };
}
const shorthandSubpath = typeof shorthandMatch[3] === 'string' && shorthandMatch[3].trim() ? shorthandMatch[3].trim() : null;
const effectiveSubpath = explicitSubpath || shorthandSubpath;
return {
ok: true,
host: GITHUB_HOST,
owner: parsed.owner,
repo: parsed.repo,
cloneUrlSsh: `git@github.com:${parsed.owner}/${parsed.repo}.git`,
cloneUrlHttps: `https://github.com/${parsed.owner}/${parsed.repo}.git`,
effectiveSubpath,
normalizedRepo: `${parsed.owner}/${parsed.repo}`,
};
}
return { ok: false, error: { kind: 'invalidSource', message: 'Unsupported repository source format' } };
}

View File

@@ -0,0 +1,114 @@
# Terminal Module Documentation
## Purpose
This module provides WebSocket protocol utilities for terminal input handling in the web server runtime, including message normalization, control frame parsing, rate limiting, and pathname resolution for terminal WebSocket connections.
## Entrypoints and structure
- `packages/web/server/lib/terminal/`: Terminal module directory.
- `index.js`: Stable module entrypoint that re-exports protocol helpers/constants.
- `input-ws-protocol.js`: Single-file module containing all terminal input WebSocket protocol utilities.
- `packages/web/server/lib/terminal/input-ws-protocol.test.js`: Test file for protocol utilities.
Public API entry point: imported by `packages/web/server/index.js` from `./lib/terminal/index.js`.
## Public exports
### Constants
- `TERMINAL_INPUT_WS_PATH`: WebSocket endpoint path (`/api/terminal/input-ws`).
- `TERMINAL_INPUT_WS_CONTROL_TAG_JSON`: Control frame tag byte (0x01) indicating JSON payload.
- `TERMINAL_INPUT_WS_MAX_PAYLOAD_BYTES`: Maximum payload size (64KB).
### Request Parsing
- `parseRequestPathname(requestUrl)`: Extracts pathname from request URL string. Returns empty string for invalid inputs.
### Message Normalization
- `normalizeTerminalInputWsMessageToBuffer(rawData)`: Normalizes various data types (Buffer, Uint8Array, ArrayBuffer, string, chunk arrays) to a single Buffer.
- `normalizeTerminalInputWsMessageToText(rawData)`: Normalizes data to UTF-8 text string. Passes through strings directly, converts binary data to text.
### Control Frame Handling
- `readTerminalInputWsControlFrame(rawData)`: Parses WebSocket message as control frame. Returns parsed JSON object or null if invalid/malformed. Validates control tag prefix and JSON structure.
- `createTerminalInputWsControlFrame(payload)`: Creates a control frame with JSON payload. Prepends control tag byte.
### Rate Limiting
- `pruneRebindTimestamps(timestamps, now, windowMs)`: Filters timestamps to keep only those within the active time window.
- `isRebindRateLimited(timestamps, maxPerWindow)`: Checks if rebind operations have exceeded rate limit threshold.
## Response contracts
### Control Frame
Control frames use binary encoding:
- First byte: `TERMINAL_INPUT_WS_CONTROL_TAG_JSON` (0x01)
- Remaining bytes: UTF-8 encoded JSON object
- Parsed result: Object or null on parse failure
### Normalized Buffer
Input types are normalized to Buffer:
- `Buffer`: Returned as-is
- `Uint8Array`/`ArrayBuffer`: Converted to Buffer
- `String`: Converted to UTF-8 Buffer
- `Array<Buffer|string|Uint8Array>`: Concatenated to single Buffer
### Rate Limiting
Rate limiting uses timestamp arrays:
- `pruneRebindTimestamps`: Returns filtered array of active timestamps
- `isRebindRateLimited`: Returns boolean indicating if limit is reached
## Usage in web server
The terminal protocol utilities are used by `packages/web/server/index.js` for:
- WebSocket endpoint path definition (`TERMINAL_INPUT_WS_PATH`)
- Message normalization for input handling
- Control frame parsing for session binding
- Rate limiting for session rebind operations
- Request pathname parsing for WebSocket routing
The web server uses these utilities in combination with `bun-pty` or `node-pty` for PTY session management.
## Notes for contributors
### Adding New Control Frame Types
1. Define new control tag constants (e.g., `TERMINAL_INPUT_WS_CONTROL_TAG_CUSTOM = 0x02`)
2. Update `readTerminalInputWsControlFrame` to handle new tag type
3. Update `createTerminalInputWsControlFrame` or create new frame creation function
4. Add corresponding tests in `terminal-input-ws-protocol.test.js`
### Message Normalization
- Always normalize incoming WebSocket messages before processing
- Use `normalizeTerminalInputWsMessageToBuffer` for binary data
- Use `normalizeTerminalInputWsMessageToText` for text data (terminal escape sequences)
- Normalize chunked messages from WebSocket fragmentation handling
### Rate Limiting
- Rate limiting is time-window based: tracks timestamps within a rolling window
- Use `pruneRebindTimestamps` to clean up stale timestamps before rate limit checks
- Configure `maxPerWindow` based on operational requirements (prevent abuse)
### Error Handling
- `readTerminalInputWsControlFrame` returns null for invalid/malformed frames
- `parseRequestPathname` returns empty string for invalid URLs
- Callers should handle null/empty returns gracefully
### Testing
- Run `bun run type-check`, `bun run lint`, and `bun run build` before finalizing changes
- Test edge cases: empty payloads, malformed JSON, chunked messages, rate limit boundaries
- Verify control frame roundtrip: create → read → validate payload equality
- Test pathname parsing with relative URLs, absolute URLs, and invalid inputs
## Verification notes
### Manual verification
1. Start web server and create terminal session via `/api/terminal/create`
2. Connect to `/api/terminal/input-ws` WebSocket
3. Send control frames with valid/invalid payloads to verify parsing
4. Test message normalization with various data types
5. Verify rate limiting by issuing rapid rebind requests
### Automated verification
- Run test file: `bun test packages/web/server/lib/terminal/input-ws-protocol.test.js`
- Protocol tests should pass covering:
- WebSocket path constant
- Control frame encoding/decoding
- Payload validation
- Message normalization (all data types)
- Pathname parsing
- Rate limiting logic

View File

@@ -0,0 +1,12 @@
export {
TERMINAL_INPUT_WS_PATH,
TERMINAL_INPUT_WS_CONTROL_TAG_JSON,
TERMINAL_INPUT_WS_MAX_PAYLOAD_BYTES,
parseRequestPathname,
normalizeTerminalInputWsMessageToBuffer,
normalizeTerminalInputWsMessageToText,
readTerminalInputWsControlFrame,
createTerminalInputWsControlFrame,
pruneRebindTimestamps,
isRebindRateLimited,
} from './input-ws-protocol.js';

View File

@@ -0,0 +1,66 @@
export const TERMINAL_INPUT_WS_PATH = '/api/terminal/input-ws';
export const TERMINAL_INPUT_WS_CONTROL_TAG_JSON = 0x01;
export const TERMINAL_INPUT_WS_MAX_PAYLOAD_BYTES = 64 * 1024;
export const parseRequestPathname = (requestUrl) => {
if (typeof requestUrl !== 'string' || requestUrl.length === 0) {
return '';
}
try {
return new URL(requestUrl, 'http://localhost').pathname;
} catch {
return '';
}
};
export const normalizeTerminalInputWsMessageToBuffer = (rawData) => {
if (Buffer.isBuffer(rawData)) {
return rawData;
}
if (Array.isArray(rawData)) {
return Buffer.concat(rawData.map((chunk) => (Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk))));
}
return Buffer.from(rawData);
};
export const normalizeTerminalInputWsMessageToText = (rawData) => {
if (typeof rawData === 'string') {
return rawData;
}
return normalizeTerminalInputWsMessageToBuffer(rawData).toString('utf8');
};
export const readTerminalInputWsControlFrame = (rawData) => {
if (!rawData) {
return null;
}
const buffer = normalizeTerminalInputWsMessageToBuffer(rawData);
if (buffer.length < 2 || buffer[0] !== TERMINAL_INPUT_WS_CONTROL_TAG_JSON) {
return null;
}
try {
const parsed = JSON.parse(buffer.subarray(1).toString('utf8'));
if (!parsed || typeof parsed !== 'object') {
return null;
}
return parsed;
} catch {
return null;
}
};
export const createTerminalInputWsControlFrame = (payload) => {
const jsonBytes = Buffer.from(JSON.stringify(payload), 'utf8');
return Buffer.concat([Buffer.from([TERMINAL_INPUT_WS_CONTROL_TAG_JSON]), jsonBytes]);
};
export const pruneRebindTimestamps = (timestamps, now, windowMs) =>
timestamps.filter((timestamp) => now - timestamp < windowMs);
export const isRebindRateLimited = (timestamps, maxPerWindow) => timestamps.length >= maxPerWindow;

View File

@@ -0,0 +1,138 @@
import { describe, expect, it } from 'bun:test';
import {
TERMINAL_INPUT_WS_CONTROL_TAG_JSON,
TERMINAL_INPUT_WS_PATH,
createTerminalInputWsControlFrame,
isRebindRateLimited,
normalizeTerminalInputWsMessageToBuffer,
normalizeTerminalInputWsMessageToText,
parseRequestPathname,
pruneRebindTimestamps,
readTerminalInputWsControlFrame,
} from './input-ws-protocol.js';
describe('terminal input websocket protocol', () => {
it('uses fixed websocket path', () => {
expect(TERMINAL_INPUT_WS_PATH).toBe('/api/terminal/input-ws');
});
it('encodes control frames with control tag prefix', () => {
const frame = createTerminalInputWsControlFrame({ t: 'ok', v: 1 });
expect(frame[0]).toBe(TERMINAL_INPUT_WS_CONTROL_TAG_JSON);
});
it('roundtrips control frame payload', () => {
const payload = { t: 'b', s: 'abc123', v: 1 };
const frame = createTerminalInputWsControlFrame(payload);
expect(readTerminalInputWsControlFrame(frame)).toEqual(payload);
});
it('rejects control frame without protocol tag', () => {
const frame = Buffer.from(JSON.stringify({ t: 'b', s: 'abc123' }), 'utf8');
expect(readTerminalInputWsControlFrame(frame)).toBeNull();
});
it('rejects malformed control json', () => {
const frame = Buffer.concat([
Buffer.from([TERMINAL_INPUT_WS_CONTROL_TAG_JSON]),
Buffer.from('{not json', 'utf8'),
]);
expect(readTerminalInputWsControlFrame(frame)).toBeNull();
});
it('rejects empty control payloads', () => {
expect(readTerminalInputWsControlFrame(null)).toBeNull();
expect(readTerminalInputWsControlFrame(undefined)).toBeNull();
expect(readTerminalInputWsControlFrame(Buffer.alloc(0))).toBeNull();
});
it('rejects control json that is not object', () => {
const frame = Buffer.concat([
Buffer.from([TERMINAL_INPUT_WS_CONTROL_TAG_JSON]),
Buffer.from('"str"', 'utf8'),
]);
expect(readTerminalInputWsControlFrame(frame)).toBeNull();
});
it('parses control frame from chunk arrays', () => {
const frame = createTerminalInputWsControlFrame({ t: 'bok', v: 1 });
const chunks = [frame.subarray(0, 2), frame.subarray(2)];
expect(readTerminalInputWsControlFrame(chunks)).toEqual({ t: 'bok', v: 1 });
});
it('normalizes buffer passthrough', () => {
const raw = Buffer.from('abc', 'utf8');
const normalized = normalizeTerminalInputWsMessageToBuffer(raw);
expect(normalized).toBe(raw);
expect(normalized.toString('utf8')).toBe('abc');
});
it('normalizes uint8 arrays', () => {
const normalized = normalizeTerminalInputWsMessageToBuffer(new Uint8Array([97, 98, 99]));
expect(normalized.toString('utf8')).toBe('abc');
});
it('normalizes array buffer payloads', () => {
const source = new Uint8Array([97, 98, 99]).buffer;
const normalized = normalizeTerminalInputWsMessageToBuffer(source);
expect(normalized.toString('utf8')).toBe('abc');
});
it('normalizes chunk array payloads', () => {
const normalized = normalizeTerminalInputWsMessageToBuffer([
Buffer.from('ab', 'utf8'),
Buffer.from('c', 'utf8'),
]);
expect(normalized.toString('utf8')).toBe('abc');
});
it('normalizes text payload from string', () => {
expect(normalizeTerminalInputWsMessageToText('\u001b[A')).toBe('\u001b[A');
});
it('normalizes text payload from binary data', () => {
expect(normalizeTerminalInputWsMessageToText(Buffer.from('\r', 'utf8'))).toBe('\r');
});
it('parses relative request pathname', () => {
expect(parseRequestPathname('/api/terminal/input-ws?x=1')).toBe('/api/terminal/input-ws');
});
it('parses absolute request pathname', () => {
expect(parseRequestPathname('http://localhost:3000/api/terminal/input-ws')).toBe('/api/terminal/input-ws');
});
it('returns empty pathname for non-string request url', () => {
expect(parseRequestPathname(null)).toBe('');
});
it('returns empty pathname for invalid request url', () => {
expect(parseRequestPathname('http://')).toBe('');
expect(parseRequestPathname('')).toBe('');
});
it('prunes stale rebind timestamps', () => {
const now = 1_000;
const pruned = pruneRebindTimestamps([100, 200, 950, 999], now, 100);
expect(pruned).toEqual([950, 999]);
});
it('keeps rebind timestamps within active window', () => {
const now = 1_000;
const pruned = pruneRebindTimestamps([920, 950, 999], now, 100);
expect(pruned).toEqual([920, 950, 999]);
});
it('does not rate limit below threshold', () => {
expect(isRebindRateLimited([1, 2, 3], 4)).toBe(false);
});
it('does not rate limit empty window', () => {
expect(isRebindRateLimited([], 1)).toBe(false);
});
it('rate limits at threshold', () => {
expect(isRebindRateLimited([1, 2, 3, 4], 4)).toBe(true);
});
});

197
web/src/api/files.ts Normal file
View File

@@ -0,0 +1,197 @@
import type {
DirectoryListResult,
FileSearchQuery,
FileSearchResult,
FilesAPI,
} from '@openchamber/ui/lib/api/types';
const normalizePath = (path: string): string => path.replace(/\\/g, '/');
type WebDirectoryEntry = {
name?: string;
path?: string;
isDirectory?: boolean;
isFile?: boolean;
isSymbolicLink?: boolean;
};
type WebDirectoryListResponse = {
directory?: string;
path?: string;
entries?: WebDirectoryEntry[];
};
const toDirectoryListResult = (fallbackDirectory: string, payload: WebDirectoryListResponse): DirectoryListResult => {
const directory = normalizePath(payload?.directory || payload?.path || fallbackDirectory);
const entries = Array.isArray(payload?.entries) ? payload.entries : [];
return {
directory,
entries: entries
.filter((entry): entry is Required<Pick<WebDirectoryEntry, 'name' | 'path'>> & { isDirectory?: boolean } =>
Boolean(entry && typeof entry.name === 'string' && typeof entry.path === 'string')
)
.map((entry) => ({
name: entry.name,
path: normalizePath(entry.path),
isDirectory: Boolean(entry.isDirectory),
})),
};
};
export const createWebFilesAPI = (): FilesAPI => ({
async listDirectory(path: string): Promise<DirectoryListResult> {
const target = normalizePath(path);
const params = new URLSearchParams();
if (target) {
params.set('path', target);
}
const response = await fetch(`/api/fs/list${params.toString() ? `?${params.toString()}` : ''}`);
if (!response.ok) {
const error = await response.json().catch(() => ({ error: response.statusText }));
throw new Error((error as { error?: string }).error || 'Failed to list directory');
}
const result = (await response.json()) as WebDirectoryListResponse;
return toDirectoryListResult(target, result);
},
async search(payload: FileSearchQuery): Promise<FileSearchResult[]> {
const params = new URLSearchParams();
const directory = normalizePath(payload.directory);
if (directory) {
params.set('directory', directory);
}
params.set('query', payload.query);
params.set('dirs', 'false');
params.set('type', 'file');
if (typeof payload.maxResults === 'number' && Number.isFinite(payload.maxResults)) {
params.set('limit', String(payload.maxResults));
}
const response = await fetch(`/api/find/file?${params.toString()}`);
if (!response.ok) {
const error = await response.json().catch(() => ({ error: response.statusText }));
throw new Error((error as { error?: string }).error || 'Failed to search files');
}
const result = (await response.json()) as string[];
const files = Array.isArray(result) ? result : [];
return files.map((relativePath) => ({
path: normalizePath(`${directory}/${relativePath}`),
preview: [normalizePath(relativePath)],
}));
},
async createDirectory(path: string): Promise<{ success: boolean; path: string }> {
const target = normalizePath(path);
const response = await fetch('/api/fs/mkdir', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ path: target }),
});
if (!response.ok) {
const error = await response.json().catch(() => ({ error: response.statusText }));
throw new Error((error as { error?: string }).error || 'Failed to create directory');
}
const result = await response.json();
return {
success: Boolean(result?.success),
path: typeof result?.path === 'string' ? normalizePath(result.path) : target,
};
},
async readFile(path: string): Promise<{ content: string; path: string }> {
const target = normalizePath(path);
const response = await fetch(`/api/fs/read?path=${encodeURIComponent(target)}`);
if (!response.ok) {
const error = await response.json().catch(() => ({ error: response.statusText }));
throw new Error((error as { error?: string }).error || 'Failed to read file');
}
const content = await response.text();
return { content, path: target };
},
async writeFile(path: string, content: string): Promise<{ success: boolean; path: string }> {
const target = normalizePath(path);
const response = await fetch('/api/fs/write', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ path: target, content }),
});
if (!response.ok) {
const error = await response.json().catch(() => ({ error: response.statusText }));
throw new Error((error as { error?: string }).error || 'Failed to write file');
}
const result = await response.json().catch(() => ({}));
return {
success: Boolean((result as { success?: boolean }).success),
path: typeof (result as { path?: string }).path === 'string' ? normalizePath((result as { path: string }).path) : target,
};
},
async delete(path: string): Promise<{ success: boolean }> {
const target = normalizePath(path);
const response = await fetch('/api/fs/delete', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ path: target }),
});
if (!response.ok) {
const error = await response.json().catch(() => ({ error: response.statusText }));
throw new Error((error as { error?: string }).error || 'Failed to delete file');
}
const result = await response.json().catch(() => ({}));
return { success: Boolean((result as { success?: boolean }).success) };
},
async rename(oldPath: string, newPath: string): Promise<{ success: boolean; path: string }> {
const response = await fetch('/api/fs/rename', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ oldPath, newPath }),
});
if (!response.ok) {
const error = await response.json().catch(() => ({ error: response.statusText }));
throw new Error((error as { error?: string }).error || 'Failed to rename file');
}
const result = await response.json().catch(() => ({}));
return {
success: Boolean((result as { success?: boolean }).success),
path: typeof (result as { path?: string }).path === 'string' ? normalizePath((result as { path: string }).path) : newPath,
};
},
async revealPath(targetPath: string): Promise<{ success: boolean }> {
const response = await fetch('/api/fs/reveal', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({ path: normalizePath(targetPath) }),
});
if (!response.ok) {
const error = await response.json().catch(() => ({ error: response.statusText }));
throw new Error((error as { error?: string }).error || 'Failed to reveal path');
}
const result = await response.json().catch(() => ({}));
return { success: Boolean((result as { success?: boolean }).success) };
},
});

60
web/src/api/git.ts Normal file
View File

@@ -0,0 +1,60 @@
import * as gitApiHttp from '@openchamber/ui/lib/gitApiHttp';
import type {
GitAPI,
CreateGitCommitOptions,
GitLogOptions,
} from '@openchamber/ui/lib/api/types';
export const createWebGitAPI = (): GitAPI => ({
checkIsGitRepository: gitApiHttp.checkIsGitRepository,
getGitStatus: gitApiHttp.getGitStatus,
getGitDiff: gitApiHttp.getGitDiff,
getGitFileDiff: gitApiHttp.getGitFileDiff,
revertGitFile: gitApiHttp.revertGitFile,
isLinkedWorktree: gitApiHttp.isLinkedWorktree,
getGitBranches: gitApiHttp.getGitBranches,
deleteGitBranch: gitApiHttp.deleteGitBranch as GitAPI['deleteGitBranch'],
deleteRemoteBranch: gitApiHttp.deleteRemoteBranch as GitAPI['deleteRemoteBranch'],
generateCommitMessage: gitApiHttp.generateCommitMessage,
generatePullRequestDescription: gitApiHttp.generatePullRequestDescription,
listGitWorktrees: gitApiHttp.listGitWorktrees,
validateGitWorktree: gitApiHttp.validateGitWorktree,
createGitWorktree: gitApiHttp.createGitWorktree,
deleteGitWorktree: gitApiHttp.deleteGitWorktree,
createGitCommit(directory: string, message: string, options?: CreateGitCommitOptions) {
return gitApiHttp.createGitCommit(directory, message, options);
},
gitPush: gitApiHttp.gitPush,
gitPull: gitApiHttp.gitPull,
gitFetch: gitApiHttp.gitFetch,
checkoutBranch: gitApiHttp.checkoutBranch,
createBranch: gitApiHttp.createBranch,
renameBranch: gitApiHttp.renameBranch,
getGitLog(directory: string, options?: GitLogOptions) {
return gitApiHttp.getGitLog(directory, options);
},
getCommitFiles: gitApiHttp.getCommitFiles,
getCurrentGitIdentity: gitApiHttp.getCurrentGitIdentity,
hasLocalIdentity: gitApiHttp.hasLocalIdentity,
setGitIdentity: gitApiHttp.setGitIdentity,
getGitIdentities: gitApiHttp.getGitIdentities,
createGitIdentity: gitApiHttp.createGitIdentity,
updateGitIdentity: gitApiHttp.updateGitIdentity,
deleteGitIdentity: gitApiHttp.deleteGitIdentity,
getRemotes: gitApiHttp.getRemotes,
rebase: gitApiHttp.rebase,
abortRebase: gitApiHttp.abortRebase,
continueRebase: gitApiHttp.continueRebase,
merge: gitApiHttp.merge,
abortMerge: gitApiHttp.abortMerge,
continueMerge: gitApiHttp.continueMerge,
stash: gitApiHttp.stash,
stashPop: gitApiHttp.stashPop,
getConflictDetails: gitApiHttp.getConflictDetails,
worktree: {
list: gitApiHttp.listGitWorktrees,
validate: gitApiHttp.validateGitWorktree,
create: gitApiHttp.createGitWorktree,
remove: gitApiHttp.deleteGitWorktree,
},
});

233
web/src/api/github.ts Normal file
View File

@@ -0,0 +1,233 @@
import type {
GitHubAPI,
GitHubAuthStatus,
GitHubIssueCommentsResult,
GitHubIssueGetResult,
GitHubIssuesListResult,
GitHubPullRequestContextResult,
GitHubPullRequestsListResult,
GitHubPullRequest,
GitHubPullRequestCreateInput,
GitHubPullRequestMergeInput,
GitHubPullRequestMergeResult,
GitHubPullRequestReadyInput,
GitHubPullRequestReadyResult,
GitHubPullRequestUpdateInput,
GitHubPullRequestStatus,
GitHubDeviceFlowComplete,
GitHubDeviceFlowStart,
GitHubUserSummary,
} from '@openchamber/ui/lib/api/types';
const jsonOrNull = async <T>(response: Response): Promise<T | null> => {
return (await response.json().catch(() => null)) as T | null;
};
export const createWebGitHubAPI = (): GitHubAPI => ({
async authStatus(): Promise<GitHubAuthStatus> {
const response = await fetch('/api/github/auth/status', { method: 'GET', headers: { Accept: 'application/json' } });
const payload = await jsonOrNull<GitHubAuthStatus & { error?: string }>(response);
if (!response.ok || !payload) {
throw new Error(payload?.error || response.statusText || 'Failed to load GitHub status');
}
return payload;
},
async authStart(): Promise<GitHubDeviceFlowStart> {
const response = await fetch('/api/github/auth/start', {
method: 'POST',
headers: { 'Content-Type': 'application/json', Accept: 'application/json' },
body: JSON.stringify({}),
});
const payload = await jsonOrNull<GitHubDeviceFlowStart & { error?: string }>(response);
if (!response.ok || !payload || !('deviceCode' in payload)) {
throw new Error((payload as { error?: string } | null)?.error || response.statusText || 'Failed to start GitHub auth');
}
return payload;
},
async authComplete(deviceCode: string): Promise<GitHubDeviceFlowComplete> {
const response = await fetch('/api/github/auth/complete', {
method: 'POST',
headers: { 'Content-Type': 'application/json', Accept: 'application/json' },
body: JSON.stringify({ deviceCode }),
});
const payload = await jsonOrNull<GitHubDeviceFlowComplete & { error?: string }>(response);
if (!response.ok || !payload) {
throw new Error((payload as { error?: string } | null)?.error || response.statusText || 'Failed to complete GitHub auth');
}
return payload;
},
async authDisconnect(): Promise<{ removed: boolean }> {
const response = await fetch('/api/github/auth', { method: 'DELETE', headers: { Accept: 'application/json' } });
const payload = await jsonOrNull<{ removed?: boolean; error?: string }>(response);
if (!response.ok) {
throw new Error(payload?.error || response.statusText || 'Failed to disconnect GitHub');
}
return { removed: Boolean(payload?.removed) };
},
async authActivate(accountId: string): Promise<GitHubAuthStatus> {
const response = await fetch('/api/github/auth/activate', {
method: 'POST',
headers: { 'Content-Type': 'application/json', Accept: 'application/json' },
body: JSON.stringify({ accountId }),
});
const payload = await jsonOrNull<GitHubAuthStatus & { error?: string }>(response);
if (!response.ok || !payload) {
throw new Error(payload?.error || response.statusText || 'Failed to activate GitHub account');
}
return payload;
},
async me(): Promise<GitHubUserSummary> {
const response = await fetch('/api/github/me', { method: 'GET', headers: { Accept: 'application/json' } });
const payload = await jsonOrNull<GitHubUserSummary & { error?: string }>(response);
if (!response.ok || !payload) {
throw new Error(payload?.error || response.statusText || 'Failed to fetch GitHub user');
}
return payload;
},
async prStatus(directory: string, branch: string, remote?: string): Promise<GitHubPullRequestStatus> {
const params = new URLSearchParams({
directory,
branch,
...(remote ? { remote } : {}),
});
const response = await fetch(
`/api/github/pr/status?${params.toString()}`,
{ method: 'GET', headers: { Accept: 'application/json' } }
);
const payload = await jsonOrNull<GitHubPullRequestStatus & { error?: string }>(response);
if (!response.ok || !payload) {
throw new Error(payload?.error || response.statusText || 'Failed to load PR status');
}
return payload;
},
async prCreate(payload: GitHubPullRequestCreateInput): Promise<GitHubPullRequest> {
const response = await fetch('/api/github/pr/create', {
method: 'POST',
headers: { 'Content-Type': 'application/json', Accept: 'application/json' },
body: JSON.stringify(payload),
});
const body = await jsonOrNull<GitHubPullRequest & { error?: string }>(response);
if (!response.ok || !body) {
throw new Error((body as { error?: string } | null)?.error || response.statusText || 'Failed to create PR');
}
return body;
},
async prUpdate(payload: GitHubPullRequestUpdateInput): Promise<GitHubPullRequest> {
const response = await fetch('/api/github/pr/update', {
method: 'POST',
headers: { 'Content-Type': 'application/json', Accept: 'application/json' },
body: JSON.stringify(payload),
});
const body = await jsonOrNull<GitHubPullRequest & { error?: string }>(response);
if (!response.ok || !body) {
throw new Error((body as { error?: string } | null)?.error || response.statusText || 'Failed to update PR');
}
return body;
},
async prMerge(payload: GitHubPullRequestMergeInput): Promise<GitHubPullRequestMergeResult> {
const response = await fetch('/api/github/pr/merge', {
method: 'POST',
headers: { 'Content-Type': 'application/json', Accept: 'application/json' },
body: JSON.stringify(payload),
});
const body = await jsonOrNull<GitHubPullRequestMergeResult & { error?: string }>(response);
if (!response.ok || !body) {
throw new Error((body as { error?: string } | null)?.error || response.statusText || 'Failed to merge PR');
}
return body;
},
async prReady(payload: GitHubPullRequestReadyInput): Promise<GitHubPullRequestReadyResult> {
const response = await fetch('/api/github/pr/ready', {
method: 'POST',
headers: { 'Content-Type': 'application/json', Accept: 'application/json' },
body: JSON.stringify(payload),
});
const body = await jsonOrNull<GitHubPullRequestReadyResult & { error?: string }>(response);
if (!response.ok || !body) {
throw new Error((body as { error?: string } | null)?.error || response.statusText || 'Failed to mark PR ready');
}
return body;
},
async prsList(directory: string, options?: { page?: number }): Promise<GitHubPullRequestsListResult> {
const page = options?.page ?? 1;
const response = await fetch(
`/api/github/pulls/list?directory=${encodeURIComponent(directory)}&page=${encodeURIComponent(String(page))}`,
{ method: 'GET', headers: { Accept: 'application/json' } }
);
const body = await jsonOrNull<GitHubPullRequestsListResult & { error?: string }>(response);
if (!response.ok || !body) {
throw new Error(body?.error || response.statusText || 'Failed to load pull requests');
}
return body;
},
async prContext(
directory: string,
number: number,
options?: { includeDiff?: boolean; includeCheckDetails?: boolean }
): Promise<GitHubPullRequestContextResult> {
const url = new URL('/api/github/pulls/context', window.location.origin);
url.searchParams.set('directory', directory);
url.searchParams.set('number', String(number));
if (options?.includeDiff) {
url.searchParams.set('diff', '1');
}
if (options?.includeCheckDetails) {
url.searchParams.set('checkDetails', '1');
}
const response = await fetch(url.toString(), { method: 'GET', headers: { Accept: 'application/json' } });
const body = await jsonOrNull<GitHubPullRequestContextResult & { error?: string }>(response);
if (!response.ok || !body) {
throw new Error(body?.error || response.statusText || 'Failed to load pull request context');
}
return body;
},
async issuesList(directory: string, options?: { page?: number }): Promise<GitHubIssuesListResult> {
const page = options?.page ?? 1;
const response = await fetch(
`/api/github/issues/list?directory=${encodeURIComponent(directory)}&page=${encodeURIComponent(String(page))}`,
{ method: 'GET', headers: { Accept: 'application/json' } }
);
const payload = await jsonOrNull<GitHubIssuesListResult & { error?: string }>(response);
if (!response.ok || !payload) {
throw new Error(payload?.error || response.statusText || 'Failed to load issues');
}
return payload;
},
async issueGet(directory: string, number: number): Promise<GitHubIssueGetResult> {
const response = await fetch(
`/api/github/issues/get?directory=${encodeURIComponent(directory)}&number=${encodeURIComponent(String(number))}`,
{ method: 'GET', headers: { Accept: 'application/json' } }
);
const payload = await jsonOrNull<GitHubIssueGetResult & { error?: string }>(response);
if (!response.ok || !payload) {
throw new Error(payload?.error || response.statusText || 'Failed to load issue');
}
return payload;
},
async issueComments(directory: string, number: number): Promise<GitHubIssueCommentsResult> {
const response = await fetch(
`/api/github/issues/comments?directory=${encodeURIComponent(directory)}&number=${encodeURIComponent(String(number))}`,
{ method: 'GET', headers: { Accept: 'application/json' } }
);
const payload = await jsonOrNull<GitHubIssueCommentsResult & { error?: string }>(response);
if (!response.ok || !payload) {
throw new Error(payload?.error || response.statusText || 'Failed to load issue comments');
}
return payload;
},
});

23
web/src/api/index.ts Normal file
View File

@@ -0,0 +1,23 @@
import type { RuntimeAPIs } from '@openchamber/ui/lib/api/types';
import { createWebTerminalAPI } from './terminal';
import { createWebGitAPI } from './git';
import { createWebFilesAPI } from './files';
import { createWebSettingsAPI } from './settings';
import { createWebPermissionsAPI } from './permissions';
import { createWebNotificationsAPI } from './notifications';
import { createWebToolsAPI } from './tools';
import { createWebPushAPI } from './push';
import { createWebGitHubAPI } from './github';
export const createWebAPIs = (): RuntimeAPIs => ({
runtime: { platform: 'web', isDesktop: false, isVSCode: false, label: 'web' },
terminal: createWebTerminalAPI(),
git: createWebGitAPI(),
files: createWebFilesAPI(),
settings: createWebSettingsAPI(),
permissions: createWebPermissionsAPI(),
notifications: createWebNotificationsAPI(),
github: createWebGitHubAPI(),
push: createWebPushAPI(),
tools: createWebToolsAPI(),
});

View File

@@ -0,0 +1,77 @@
import type { NotificationPayload, NotificationsAPI } from '@openchamber/ui/lib/api/types';
const notifyWithWebAPI = async (payload?: NotificationPayload): Promise<boolean> => {
if (typeof Notification === 'undefined') {
console.info('Notifications not supported in this environment', payload);
return false;
}
if (Notification.permission === 'default') {
const permission = await Notification.requestPermission();
if (permission !== 'granted') {
console.warn('Notification permission not granted');
return false;
}
}
if (Notification.permission !== 'granted') {
console.warn('Notification permission not granted');
return false;
}
try {
new Notification(payload?.title ?? 'OpenChamber', {
body: payload?.body,
tag: payload?.tag,
});
return true;
} catch (error) {
console.warn('Failed to send notification', error);
return false;
}
};
const notifyWithTauri = async (payload?: NotificationPayload): Promise<boolean> => {
if (typeof window === 'undefined') {
return false;
}
const tauri = (window as unknown as { __TAURI__?: TauriGlobal }).__TAURI__;
if (!tauri?.core?.invoke) {
return false;
}
try {
await tauri.core.invoke('desktop_notify', {
payload: {
title: payload?.title,
body: payload?.body,
tag: payload?.tag,
},
});
return true;
} catch (error) {
console.warn('Failed to send native notification (tauri)', error);
return false;
}
};
export const createWebNotificationsAPI = (): NotificationsAPI => ({
async notifyAgentCompletion(payload?: NotificationPayload): Promise<boolean> {
return (await notifyWithTauri(payload)) || notifyWithWebAPI(payload);
},
canNotify: () => {
if (typeof window !== 'undefined') {
const tauri = (window as unknown as { __TAURI__?: TauriGlobal }).__TAURI__;
if (tauri?.core?.invoke) {
return true;
}
}
return typeof Notification !== 'undefined' ? Notification.permission === 'granted' : false;
},
});
type TauriGlobal = {
core?: {
invoke?: (cmd: string, args?: Record<string, unknown>) => Promise<unknown>;
};
};

View File

@@ -0,0 +1,15 @@
import type { DirectoryPermissionRequest, PermissionsAPI, StartAccessingResult } from '@openchamber/ui/lib/api/types';
export const createWebPermissionsAPI = (): PermissionsAPI => ({
async requestDirectoryAccess(request: DirectoryPermissionRequest) {
return { success: true, path: request.path };
},
async startAccessingDirectory(path: string): Promise<StartAccessingResult> {
void path;
return { success: true };
},
async stopAccessingDirectory(path: string): Promise<StartAccessingResult> {
void path;
return { success: true };
},
});

59
web/src/api/push.ts Normal file
View File

@@ -0,0 +1,59 @@
import type { PushAPI, PushSubscribePayload, PushUnsubscribePayload } from '@openchamber/ui/lib/api/types';
const fetchJson = async <T>(input: RequestInfo | URL, init?: RequestInit): Promise<T | null> => {
try {
const res = await fetch(input, {
...init,
credentials: 'include',
headers: {
Accept: 'application/json',
...(init?.headers ?? {}),
},
});
if (!res.ok) {
return null;
}
return (await res.json()) as T;
} catch {
return null;
}
};
export const createWebPushAPI = (): PushAPI => ({
async getVapidPublicKey() {
return fetchJson<{ publicKey: string }>('/api/push/vapid-public-key');
},
async subscribe(payload: PushSubscribePayload) {
return fetchJson<{ ok: true }>('/api/push/subscribe', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify(payload),
});
},
async unsubscribe(payload: PushUnsubscribePayload) {
return fetchJson<{ ok: true }>('/api/push/subscribe', {
method: 'DELETE',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify(payload),
});
},
async setVisibility(payload: { visible: boolean }) {
return fetchJson<{ ok: true }>('/api/push/visibility', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify(payload),
keepalive: true,
});
},
});

Some files were not shown because too many files have changed in this diff Show More