routie dev init since i didn't adhere to any proper guidance up until now
This commit is contained in:
+21
@@ -0,0 +1,21 @@
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2025 Eleanor Berger
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
+994
@@ -0,0 +1,994 @@
|
||||
# Ruler: Centralise Your AI Coding Assistant Instructions
|
||||
|
||||
<table style="width:100%">
|
||||
<tr>
|
||||
<td style="vertical-align: top;">
|
||||
<p>
|
||||
<a href="https://github.com/intellectronica/ruler/actions/workflows/ci.yml"><img src="https://github.com/intellectronica/ruler/actions/workflows/ci.yml/badge.svg" alt="CI"></a>
|
||||
<a href="https://www.npmjs.com/package/@intellectronica/ruler"><img src="https://badge.fury.io/js/%40intellectronica%2Fruler.svg" alt="npm version"></a>
|
||||
<img src="https://img.shields.io/badge/License-MIT-yellow.svg" alt="License: MIT">
|
||||
</p>
|
||||
<ul>
|
||||
<li><strong>GitHub</strong>: <a href="https://github.com/intellectronica/ruler">intellectronica/ruler</a></li>
|
||||
<li><strong>NPM</strong>: <a href="https://www.npmjs.com/package/@intellectronica/ruler">@intellectronica/ruler</a></li>
|
||||
</ul>
|
||||
<hr />
|
||||
<p>
|
||||
<em>Animation by <a href="https://isaacflath.com/">Isaac Flath</a> of <strong><a href="https://elite-ai-assisted-coding.dev/">Elite AI-Assisted Coding</a></strong></em> ➡︎
|
||||
</p>
|
||||
</td>
|
||||
<td style="vertical-align: top; width:33%;">
|
||||
<img src="img/ruler-short.gif" alt="Ruler demo" style="width:300px; height:auto; display:block;" />
|
||||
</td>
|
||||
</tr>
|
||||
</table>
|
||||
|
||||
---
|
||||
|
||||
> **Beta Research Preview**
|
||||
>
|
||||
> - Please test this version carefully in your environment
|
||||
> - Report issues at https://github.com/intellectronica/ruler/issues
|
||||
|
||||
## Why Ruler?
|
||||
|
||||
Managing instructions across multiple AI coding tools becomes complex as your team grows. Different agents (GitHub Copilot, Claude, Cursor, Aider, etc.) require their own configuration files, leading to:
|
||||
|
||||
- **Inconsistent guidance** across AI tools
|
||||
- **Duplicated effort** maintaining multiple config files
|
||||
- **Context drift** as project requirements evolve
|
||||
- **Onboarding friction** for new AI tools
|
||||
- **Complex project structures** requiring context-specific instructions for different components
|
||||
|
||||
Ruler solves this by providing a **single source of truth** for all your AI agent instructions, automatically distributing them to the right configuration files. With support for **nested rule loading**, Ruler can handle complex project structures with context-specific instructions for different components.
|
||||
|
||||
## Core Features
|
||||
|
||||
- **Centralised Rule Management**: Store all AI instructions in a dedicated `.ruler/` directory using Markdown files
|
||||
- **Nested Rule Loading**: Support complex project structures with multiple `.ruler/` directories for context-specific instructions
|
||||
- **Automatic Distribution**: Ruler applies these rules to configuration files of supported AI agents
|
||||
- **Targeted Agent Configuration**: Fine-tune which agents are affected and their specific output paths via `ruler.toml`
|
||||
- **MCP Server Propagation**: Manage and distribute Model Context Protocol (MCP) server settings
|
||||
- **`.gitignore` Automation**: Keeps generated agent config files out of version control automatically
|
||||
- **Simple CLI**: Easy-to-use commands for initialising and applying configurations
|
||||
|
||||
## Supported AI Agents
|
||||
|
||||
| Agent | Rules File(s) | MCP Configuration / Notes | Skills Support / Location |
|
||||
| ---------------------- | ---------------------------------------------- | ------------------------------------------------ | ------------------------- |
|
||||
| AGENTS.md | `AGENTS.md` | (pseudo-agent ensuring root `AGENTS.md` exists) | - |
|
||||
| GitHub Copilot | `AGENTS.md` | `.vscode/mcp.json` | `.claude/skills/` |
|
||||
| Claude Code | `CLAUDE.md` | `.mcp.json` | `.claude/skills/` |
|
||||
| OpenAI Codex CLI | `AGENTS.md` | `.codex/config.toml` | `.codex/skills/` |
|
||||
| Pi Coding Agent | `AGENTS.md` | - | `.pi/skills/` |
|
||||
| Jules | `AGENTS.md` | - | - |
|
||||
| Cursor | `AGENTS.md` | `.cursor/mcp.json` | `.cursor/skills/` |
|
||||
| Windsurf | `AGENTS.md` | `.windsurf/mcp_config.json` | `.windsurf/skills/` |
|
||||
| Cline | `.clinerules` | - | - |
|
||||
| Crush | `CRUSH.md` | `.crush.json` | - |
|
||||
| Amp | `AGENTS.md` | - | `.agents/skills/` |
|
||||
| Antigravity | `.agent/rules/ruler.md` | - | `.agent/skills/` |
|
||||
| Amazon Q CLI | `.amazonq/rules/ruler_q_rules.md` | `.amazonq/mcp.json` | - |
|
||||
| Aider | `AGENTS.md`, `.aider.conf.yml` | `.mcp.json` | - |
|
||||
| Firebase Studio | `.idx/airules.md` | `.idx/mcp.json` | - |
|
||||
| Open Hands | `.openhands/microagents/repo.md` | `config.toml` | - |
|
||||
| Gemini CLI | `AGENTS.md` | `.gemini/settings.json` | `.gemini/skills/` |
|
||||
| Junie | `.junie/guidelines.md` | `.junie/mcp/mcp.json` | `.junie/skills/` |
|
||||
| AugmentCode | `.augment/rules/ruler_augment_instructions.md` | - | - |
|
||||
| Kilo Code | `AGENTS.md` | `.kilocode/mcp.json` | `.claude/skills/` |
|
||||
| OpenCode | `AGENTS.md` | `opencode.json` | `.opencode/skills/` |
|
||||
| Goose | `.goosehints` | - | `.agents/skills/` |
|
||||
| Qwen Code | `AGENTS.md` | `.qwen/settings.json` | - |
|
||||
| RooCode | `AGENTS.md` | `.roo/mcp.json` | `.roo/skills/` |
|
||||
| Zed | `AGENTS.md` | `.zed/settings.json` (project root, never $HOME) | - |
|
||||
| Trae AI | `.trae/rules/project_rules.md` | - | - |
|
||||
| Warp | `WARP.md` | - | - |
|
||||
| Kiro | `.kiro/steering/ruler_kiro_instructions.md` | `.kiro/settings/mcp.json` | - |
|
||||
| Firebender | `firebender.json` | `firebender.json` (rules and MCP in same file) | - |
|
||||
| Factory Droid | `AGENTS.md` | `.factory/mcp.json` | `.factory/skills/` |
|
||||
| Mistral Vibe | `AGENTS.md` | `.vibe/config.toml` | `.vibe/skills/` |
|
||||
| JetBrains AI Assistant | `.aiassistant/rules/AGENTS.md` | - | - |
|
||||
|
||||
## Getting Started
|
||||
|
||||
### Installation
|
||||
|
||||
Requires Node.js `^20.19.0 || ^22.12.0 || >=23`.
|
||||
|
||||
**Global Installation (Recommended for CLI use):**
|
||||
|
||||
```bash
|
||||
npm install -g @intellectronica/ruler
|
||||
```
|
||||
|
||||
**Using `npx` (for one-off commands):**
|
||||
|
||||
```bash
|
||||
npx @intellectronica/ruler apply
|
||||
```
|
||||
|
||||
### Project Initialisation
|
||||
|
||||
1. Navigate to your project's root directory
|
||||
2. Run `ruler init`
|
||||
3. This creates:
|
||||
|
||||
- `.ruler/` directory
|
||||
- `.ruler/AGENTS.md`: The primary starter Markdown file for your rules
|
||||
- `.ruler/ruler.toml`: The main configuration file for Ruler (now contains sample MCP server sections; legacy `.ruler/mcp.json` no longer scaffolded)
|
||||
- (Optional legacy fallback) If you previously used `.ruler/instructions.md`, it is still respected when `AGENTS.md` is absent. (The prior runtime warning was removed.)
|
||||
|
||||
Additionally, you can create a global configuration to use when no local `.ruler/` directory is found:
|
||||
|
||||
```bash
|
||||
ruler init --global
|
||||
```
|
||||
|
||||
The global configuration will be created to `$XDG_CONFIG_HOME/ruler` (default: `~/.config/ruler`).
|
||||
|
||||
## Core Concepts
|
||||
|
||||
### The `.ruler/` Directory
|
||||
|
||||
This is your central hub for all AI agent instructions:
|
||||
|
||||
- **Primary File Order & Precedence**:
|
||||
1. A repository root `AGENTS.md` (outside `.ruler/`) if present (highest precedence, prepended)
|
||||
2. `.ruler/AGENTS.md` (new default starter file)
|
||||
3. Legacy `.ruler/instructions.md` (only if `.ruler/AGENTS.md` absent; no longer emits a deprecation warning)
|
||||
4. Remaining discovered `.md` files under `.ruler/` (and subdirectories) in sorted order
|
||||
- **Rule Files (`*.md`)**: Discovered recursively from `.ruler/` or `$XDG_CONFIG_HOME/ruler` and concatenated in the order above
|
||||
- **Concatenation Marker**: Each file's content is prepended with `<!-- Source: <relative_path_to_md_file> -->` for traceability
|
||||
- **`ruler.toml`**: Master configuration for Ruler's behavior, agent selection, output paths, and MCP server settings
|
||||
- **`mcp.json`**: (Legacy, deprecated) Shared MCP server settings - no longer scaffolded but still supported for backward compatibility
|
||||
|
||||
This ordering lets you keep a short, high-impact root `AGENTS.md` (e.g. executive project summary) while housing detailed guidance inside `.ruler/`.
|
||||
|
||||
### Nested Rule Loading
|
||||
|
||||
Ruler now supports **nested rule loading** with the `--nested` flag, enabling context-specific instructions for different parts of your project:
|
||||
|
||||
```
|
||||
project/
|
||||
├── .ruler/ # Global project rules
|
||||
│ ├── AGENTS.md
|
||||
│ └── coding_style.md
|
||||
├── src/
|
||||
│ └── .ruler/ # Component-specific rules
|
||||
│ └── api_guidelines.md
|
||||
├── tests/
|
||||
│ └── .ruler/ # Test-specific rules
|
||||
│ └── testing_conventions.md
|
||||
└── docs/
|
||||
└── .ruler/ # Documentation rules
|
||||
└── writing_style.md
|
||||
```
|
||||
|
||||
**How it works:**
|
||||
|
||||
- Discover all `.ruler/` directories in the project hierarchy
|
||||
- Load and concatenate rules from each directory in order
|
||||
- Decide whether nested mode is enabled using the following precedence:
|
||||
1. `ruler apply --nested` (or `--no-nested`) takes top priority
|
||||
2. `nested = true` in `ruler.toml`
|
||||
3. Default to disabled when neither option is provided
|
||||
- When a run is nested, downstream configs are forced to keep `nested = true`. If a child config attempts to disable it, Ruler keeps nested processing active and emits a warning in the logs.
|
||||
- Nested processing carries forward each directory's own MCP bundle and configuration settings so that generated files remain scoped to their source directories while being normalized back to the project root.
|
||||
|
||||
> [!CAUTION]
|
||||
> Nested mode is experimental and may change in future releases. The CLI logs this warning the first time a nested run is detected so you know the behavior may evolve.
|
||||
|
||||
**Perfect for:**
|
||||
|
||||
- Monorepos with multiple services
|
||||
- Projects with distinct components (frontend/backend)
|
||||
- Teams needing different instructions for different areas
|
||||
- Complex codebases with varying standards
|
||||
|
||||
### Best Practices for Rule Files
|
||||
|
||||
**Granularity**: Break down complex instructions into focused `.md` files:
|
||||
|
||||
- `coding_style.md`
|
||||
- `api_conventions.md`
|
||||
- `project_architecture.md`
|
||||
- `security_guidelines.md`
|
||||
|
||||
**Example rule file (`.ruler/python_guidelines.md`):**
|
||||
|
||||
```markdown
|
||||
# Python Project Guidelines
|
||||
|
||||
## General Style
|
||||
|
||||
- Follow PEP 8 for all Python code
|
||||
- Use type hints for all function signatures and complex variables
|
||||
- Keep functions short and focused on a single task
|
||||
|
||||
## Error Handling
|
||||
|
||||
- Use specific exception types rather than generic `Exception`
|
||||
- Log errors effectively with context
|
||||
|
||||
## Security
|
||||
|
||||
- Always validate and sanitize user input
|
||||
- Be mindful of potential injection vulnerabilities
|
||||
```
|
||||
|
||||
## Usage: The `apply` Command
|
||||
|
||||
### Primary Command
|
||||
|
||||
```bash
|
||||
ruler apply [options]
|
||||
```
|
||||
|
||||
The `apply` command looks for `.ruler/` in the current directory tree, reading the first match. If no such directory is found, it will look for a global configuration in `$XDG_CONFIG_HOME/ruler`.
|
||||
|
||||
### Options
|
||||
|
||||
| Option | Description |
|
||||
| ------------------------------ | ---------------------------------------------------------------------- |
|
||||
| `--project-root <path>` | Project root path (default: current directory). |
|
||||
| `--agents <agent1,agent2,...>` | Comma-separated agent names to target (see supported list below). |
|
||||
| `--config <path>` | Custom `ruler.toml` path. |
|
||||
| `--mcp` / `--with-mcp` | Enable applying MCP server configurations (default: true). |
|
||||
| `--no-mcp` | Disable applying MCP server configurations. |
|
||||
| `--mcp-overwrite` | Overwrite native MCP config instead of merging. |
|
||||
| `--gitignore` | Enable automatic .gitignore updates (default: true). |
|
||||
| `--no-gitignore` | Disable automatic .gitignore updates. |
|
||||
| `--gitignore-local` | Write managed ignore entries to `.git/info/exclude` instead. |
|
||||
| `--nested` | Enable nested rule loading (default: inherit from config or disabled). |
|
||||
| `--no-nested` | Disable nested rule loading even if `nested = true` in config. |
|
||||
| `--backup` | Toggle creation of `.bak` backup files (default: enabled). |
|
||||
| `--skills` | Enable skills support (experimental, default: enabled). |
|
||||
| `--no-skills` | Disable skills support. |
|
||||
| `--dry-run` | Preview changes without writing files. |
|
||||
| `--local-only` | Skip `$XDG_CONFIG_HOME` when looking for configuration. |
|
||||
| `--verbose` / `-v` | Display detailed output during execution. |
|
||||
|
||||
### Common Examples
|
||||
|
||||
**Apply rules to all configured agents:**
|
||||
|
||||
```bash
|
||||
ruler apply
|
||||
```
|
||||
|
||||
**Apply rules only to GitHub Copilot and Claude:**
|
||||
|
||||
```bash
|
||||
ruler apply --agents copilot,claude
|
||||
```
|
||||
|
||||
**Apply rules only to Firebase Studio:**
|
||||
|
||||
```bash
|
||||
ruler apply --agents firebase
|
||||
```
|
||||
|
||||
**Apply rules only to Warp:**
|
||||
|
||||
```bash
|
||||
ruler apply --agents warp
|
||||
```
|
||||
|
||||
**Apply rules only to Trae AI:**
|
||||
|
||||
```bash
|
||||
ruler apply --agents trae
|
||||
```
|
||||
|
||||
**Apply rules only to RooCode:**
|
||||
|
||||
```bash
|
||||
ruler apply --agents roo
|
||||
```
|
||||
|
||||
**Use a specific configuration file:**
|
||||
|
||||
```bash
|
||||
ruler apply --config ./team-configs/ruler.frontend.toml
|
||||
```
|
||||
|
||||
**Apply rules with verbose output:**
|
||||
|
||||
```bash
|
||||
ruler apply --verbose
|
||||
```
|
||||
|
||||
**Apply rules but skip MCP and .gitignore updates:**
|
||||
|
||||
```bash
|
||||
ruler apply --no-mcp --no-gitignore
|
||||
```
|
||||
|
||||
## Usage: The `revert` Command
|
||||
|
||||
The `revert` command safely undoes all changes made by `ruler apply`, restoring your project to its pre-ruler state. It intelligently restores files from backups (`.bak` files) when available, or removes generated files that didn't exist before.
|
||||
|
||||
### Why Revert is Needed
|
||||
|
||||
When experimenting with different rule configurations or switching between projects, you may want to:
|
||||
|
||||
- **Clean slate**: Remove all ruler-generated files to start fresh
|
||||
- **Restore originals**: Revert modified files back to their original state
|
||||
- **Selective cleanup**: Remove configurations for specific agents only
|
||||
- **Safe experimentation**: Try ruler without fear of permanent changes
|
||||
|
||||
### Primary Command
|
||||
|
||||
```bash
|
||||
ruler revert [options]
|
||||
```
|
||||
|
||||
### Options
|
||||
|
||||
| Option | Description |
|
||||
| ------------------------------ | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ |
|
||||
| `--project-root <path>` | Path to your project's root (default: current directory) |
|
||||
| `--agents <agent1,agent2,...>` | Comma-separated list of agent names to revert (agentsmd, aider, amazonqcli, amp, antigravity, augmentcode, claude, cline, codex, copilot, crush, cursor, factory, firebase, firebender, gemini-cli, goose, jetbrains-ai, jules, junie, kilocode, kiro, mistral, opencode, openhands, pi, qwen, roo, trae, warp, windsurf, zed) |
|
||||
| `--config <path>` | Path to a custom `ruler.toml` configuration file |
|
||||
| `--keep-backups` | Keep backup files (.bak) after restoration (default: false) |
|
||||
| `--dry-run` | Preview changes without actually reverting files |
|
||||
| `--verbose` / `-v` | Display detailed output during execution |
|
||||
| `--local-only` | Only search for local .ruler directories, ignore global config |
|
||||
|
||||
### Common Examples
|
||||
|
||||
**Revert all ruler changes:**
|
||||
|
||||
```bash
|
||||
ruler revert
|
||||
```
|
||||
|
||||
**Preview what would be reverted (dry-run):**
|
||||
|
||||
```bash
|
||||
ruler revert --dry-run
|
||||
```
|
||||
|
||||
**Revert only specific agents:**
|
||||
|
||||
```bash
|
||||
ruler revert --agents claude,copilot
|
||||
```
|
||||
|
||||
**Revert with detailed output:**
|
||||
|
||||
```bash
|
||||
ruler revert --verbose
|
||||
```
|
||||
|
||||
**Keep backup files after reverting:**
|
||||
|
||||
```bash
|
||||
ruler revert --keep-backups
|
||||
```
|
||||
|
||||
## Configuration (`ruler.toml`) in Detail
|
||||
|
||||
### Location
|
||||
|
||||
Defaults to `.ruler/ruler.toml` in the project root. Override with `--config` CLI option.
|
||||
|
||||
### Complete Example
|
||||
|
||||
```toml
|
||||
# Default agents to run when --agents is not specified
|
||||
# Uses case-insensitive substring matching
|
||||
default_agents = ["copilot", "claude", "aider"]
|
||||
|
||||
# --- Global MCP Server Configuration ---
|
||||
[mcp]
|
||||
# Enable/disable MCP propagation globally (default: true)
|
||||
enabled = true
|
||||
# Global merge strategy: 'merge' or 'overwrite' (default: 'merge')
|
||||
merge_strategy = "merge"
|
||||
|
||||
# --- MCP Server Definitions ---
|
||||
[mcp_servers.filesystem]
|
||||
command = "npx"
|
||||
args = ["-y", "@modelcontextprotocol/server-filesystem", "/path/to/project"]
|
||||
|
||||
[mcp_servers.git]
|
||||
command = "npx"
|
||||
args = ["-y", "@modelcontextprotocol/server-git", "--repository", "."]
|
||||
|
||||
[mcp_servers.remote_api]
|
||||
url = "https://api.example.com"
|
||||
|
||||
[mcp_servers.remote_api.headers]
|
||||
Authorization = "Bearer your-token"
|
||||
|
||||
# --- Global .gitignore Configuration ---
|
||||
[gitignore]
|
||||
# Enable/disable automatic .gitignore updates (default: true)
|
||||
enabled = true
|
||||
# Write managed entries to .git/info/exclude instead of .gitignore (default: false)
|
||||
local = false
|
||||
|
||||
# --- Agent-Specific Configurations ---
|
||||
[agents.copilot]
|
||||
enabled = true
|
||||
|
||||
[agents.claude]
|
||||
enabled = true
|
||||
output_path = "CLAUDE.md"
|
||||
|
||||
[agents.aider]
|
||||
enabled = true
|
||||
output_path_instructions = "AGENTS.md"
|
||||
output_path_config = ".aider.conf.yml"
|
||||
|
||||
# OpenAI Codex CLI agent and MCP config
|
||||
[agents.codex]
|
||||
enabled = true
|
||||
output_path = "AGENTS.md"
|
||||
output_path_config = ".codex/config.toml"
|
||||
|
||||
# Agent-specific MCP configuration for Codex CLI
|
||||
[agents.codex.mcp]
|
||||
enabled = true
|
||||
merge_strategy = "merge"
|
||||
|
||||
[agents.firebase]
|
||||
enabled = true
|
||||
output_path = ".idx/airules.md"
|
||||
|
||||
[agents.gemini-cli]
|
||||
enabled = true
|
||||
|
||||
[agents.jules]
|
||||
enabled = true
|
||||
|
||||
[agents.junie]
|
||||
enabled = true
|
||||
output_path = ".junie/guidelines.md"
|
||||
|
||||
[agents.junie.mcp]
|
||||
enabled = true
|
||||
merge_strategy = "merge"
|
||||
|
||||
# Agent-specific MCP configuration
|
||||
[agents.cursor.mcp]
|
||||
enabled = true
|
||||
merge_strategy = "merge"
|
||||
|
||||
# Disable specific agents
|
||||
[agents.windsurf]
|
||||
enabled = false
|
||||
|
||||
[agents.kilocode]
|
||||
enabled = true
|
||||
output_path = "AGENTS.md"
|
||||
|
||||
[agents.warp]
|
||||
enabled = true
|
||||
output_path = "WARP.md"
|
||||
```
|
||||
|
||||
### Configuration Precedence
|
||||
|
||||
1. **CLI flags** (e.g., `--agents`, `--no-mcp`, `--mcp-overwrite`, `--no-gitignore`)
|
||||
2. **Settings in `ruler.toml`** (`default_agents`, specific agent settings, global sections)
|
||||
3. **Ruler's built-in defaults** (all agents enabled, standard output paths, MCP enabled with 'merge')
|
||||
|
||||
## MCP (Model Context Protocol) Server Configuration
|
||||
|
||||
MCP provides broader context to AI models through server configurations. Ruler can manage and distribute these settings across compatible agents.
|
||||
|
||||
### TOML Configuration (Recommended)
|
||||
|
||||
You can now define MCP servers directly in `ruler.toml` using the `[mcp_servers.<name>]` syntax:
|
||||
|
||||
```toml
|
||||
# Global MCP behavior
|
||||
[mcp]
|
||||
enabled = true
|
||||
merge_strategy = "merge" # or "overwrite"
|
||||
|
||||
# Local (stdio) server
|
||||
[mcp_servers.filesystem]
|
||||
command = "npx"
|
||||
args = ["-y", "@modelcontextprotocol/server-filesystem", "/path/to/project"]
|
||||
|
||||
[mcp_servers.filesystem.env]
|
||||
API_KEY = "your-api-key"
|
||||
|
||||
# Remote server
|
||||
[mcp_servers.search]
|
||||
url = "https://mcp.example.com"
|
||||
|
||||
[mcp_servers.search.headers]
|
||||
Authorization = "Bearer your-token"
|
||||
"X-API-Version" = "v1"
|
||||
```
|
||||
|
||||
### Legacy `.ruler/mcp.json` (Deprecated)
|
||||
|
||||
For backward compatibility, you can still use the JSON format; a warning is issued encouraging migration to TOML. The file is no longer created during `ruler init`.
|
||||
|
||||
```json
|
||||
{
|
||||
"mcpServers": {
|
||||
"filesystem": {
|
||||
"command": "npx",
|
||||
"args": [
|
||||
"-y",
|
||||
"@modelcontextprotocol/server-filesystem",
|
||||
"/path/to/project"
|
||||
]
|
||||
},
|
||||
"git": {
|
||||
"command": "npx",
|
||||
"args": ["-y", "@modelcontextprotocol/server-git", "--repository", "."]
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Configuration Precedence
|
||||
|
||||
When both TOML and JSON configurations are present:
|
||||
|
||||
1. **TOML servers take precedence** over JSON servers with the same name
|
||||
2. **Servers are merged** from both sources (unless using overwrite strategy)
|
||||
3. **Deprecation warning** is shown encouraging migration to TOML (warning shown once per run)
|
||||
|
||||
### Server Types
|
||||
|
||||
**Local/stdio servers** require a `command` field:
|
||||
|
||||
```toml
|
||||
[mcp_servers.local_server]
|
||||
command = "node"
|
||||
args = ["server.js"]
|
||||
|
||||
[mcp_servers.local_server.env]
|
||||
DEBUG = "1"
|
||||
```
|
||||
|
||||
**Remote servers** require a `url` field (headers optional; bearer Authorization token auto-extracted for OpenHands when possible):
|
||||
|
||||
```toml
|
||||
[mcp_servers.remote_server]
|
||||
url = "https://api.example.com"
|
||||
|
||||
[mcp_servers.remote_server.headers]
|
||||
Authorization = "Bearer token"
|
||||
```
|
||||
|
||||
Ruler uses this configuration with the `merge` (default) or `overwrite` strategy, controlled by `ruler.toml` or CLI flags.
|
||||
|
||||
**Home Directory Safety:** Ruler never writes MCP configuration files outside your project root. Any historical references to user home directories (e.g. `~/.codeium/windsurf/mcp_config.json` or `~/.zed/settings.json`) have been removed; only project-local paths are targeted.
|
||||
|
||||
**Note for OpenAI Codex CLI:** To apply the local Codex CLI MCP configuration, set the `CODEX_HOME` environment variable to your project’s `.codex` directory:
|
||||
|
||||
```bash
|
||||
export CODEX_HOME="$(pwd)/.codex"
|
||||
```
|
||||
|
||||
## Skills Support (Experimental)
|
||||
|
||||
**⚠️ Experimental Feature**: Skills support is currently experimental. Skills are only propagated to agents with native skills support; other agents are skipped with a warning.
|
||||
|
||||
Ruler can manage and propagate skills to supported AI agents. Skills are stored in `.ruler/skills/` and are automatically distributed to compatible agents when you run `ruler apply`.
|
||||
|
||||
### How It Works
|
||||
|
||||
Skills are specialized knowledge packages that extend AI agent capabilities with domain-specific expertise, workflows, or tool integrations. Ruler discovers skills in your `.ruler/skills/` directory and propagates them to compatible agents:
|
||||
|
||||
- **Agents with native skills support**: Skills are copied directly to each agent's native skills directory:
|
||||
- **Claude Code**: `.claude/skills/`
|
||||
- **GitHub Copilot**: `.claude/skills/` (shared with Claude Code)
|
||||
- **Kilo Code**: `.claude/skills/` (shared with Claude Code)
|
||||
- **OpenAI Codex CLI**: `.codex/skills/`
|
||||
- **OpenCode**: `.opencode/skills/`
|
||||
- **Pi Coding Agent**: `.pi/skills/`
|
||||
- **Goose**: `.agents/skills/`
|
||||
- **Amp**: `.agents/skills/` (shared with Goose)
|
||||
- **Antigravity**: `.agent/skills/`
|
||||
- **Factory Droid**: `.factory/skills/`
|
||||
- **Mistral Vibe**: `.vibe/skills/`
|
||||
- **Roo Code**: `.roo/skills/`
|
||||
- **Gemini CLI**: `.gemini/skills/`
|
||||
- **Junie**: `.junie/skills/`
|
||||
- **Cursor**: `.cursor/skills/`
|
||||
- **Windsurf**: `.windsurf/skills/`
|
||||
|
||||
### Skills Directory Structure
|
||||
|
||||
Skills can be organized flat or nested:
|
||||
|
||||
```
|
||||
.ruler/skills/
|
||||
├── my-skill/
|
||||
│ ├── SKILL.md # Required: skill instructions/knowledge
|
||||
│ ├── helper.py # Optional: additional resources (scripts)
|
||||
│ └── reference.md # Optional: additional resources (docs)
|
||||
└── another-skill/
|
||||
└── SKILL.md
|
||||
```
|
||||
|
||||
Each skill must contain:
|
||||
|
||||
- `SKILL.md` - Primary skill file with instructions or knowledge base
|
||||
|
||||
Skills can optionally include additional resources like:
|
||||
|
||||
- Markdown files with supplementary documentation
|
||||
- Python, JavaScript, or other scripts
|
||||
- Configuration files or data
|
||||
|
||||
### Configuration
|
||||
|
||||
Skills support is **enabled by default** but can be controlled via:
|
||||
|
||||
**CLI flags:**
|
||||
|
||||
```bash
|
||||
# Enable skills (default)
|
||||
ruler apply --skills
|
||||
|
||||
# Disable skills
|
||||
ruler apply --no-skills
|
||||
```
|
||||
|
||||
**Configuration in `ruler.toml`:**
|
||||
|
||||
```toml
|
||||
[skills]
|
||||
enabled = true # or false to disable
|
||||
```
|
||||
|
||||
### Non-native Agents
|
||||
|
||||
If you run Ruler for agents that do not support native skills, Ruler logs a warning and skips skills propagation for those agents.
|
||||
|
||||
### `.gitignore` Integration
|
||||
|
||||
When skills support is enabled and gitignore integration is active, Ruler automatically adds:
|
||||
|
||||
- `.claude/skills/` (for Claude Code, GitHub Copilot, and Kilo Code)
|
||||
- `.codex/skills/` (for OpenAI Codex CLI)
|
||||
- `.opencode/skills/` (for OpenCode)
|
||||
- `.pi/skills/` (for Pi Coding Agent)
|
||||
- `.agents/skills/` (for Goose and Amp)
|
||||
- `.agent/skills/` (for Antigravity)
|
||||
- `.factory/skills/` (for Factory Droid)
|
||||
- `.vibe/skills/` (for Mistral Vibe)
|
||||
- `.roo/skills/` (for Roo Code)
|
||||
- `.gemini/skills/` (for Gemini CLI)
|
||||
- `.junie/skills/` (for Junie)
|
||||
- `.cursor/skills/` (for Cursor)
|
||||
|
||||
to your `.gitignore` file within the managed Ruler block.
|
||||
|
||||
### Requirements
|
||||
|
||||
- **For agents with native skills support** (Claude Code, GitHub Copilot, Kilo Code, OpenAI Codex CLI, OpenCode, Pi Coding Agent, Goose, Amp, Antigravity, Factory Droid, Mistral Vibe, Roo Code, Gemini CLI, Junie, Cursor): No additional requirements.
|
||||
|
||||
### Validation
|
||||
|
||||
Ruler validates discovered skills and issues warnings for:
|
||||
|
||||
- Missing required file (`SKILL.md`)
|
||||
- Invalid directory structures (directories without `SKILL.md` and no sub-skills)
|
||||
|
||||
Warnings don't prevent propagation but help identify potential issues.
|
||||
|
||||
### Dry-Run Mode
|
||||
|
||||
Test skills propagation without making changes:
|
||||
|
||||
```bash
|
||||
ruler apply --dry-run
|
||||
```
|
||||
|
||||
This shows which skills would be copied.
|
||||
|
||||
### Example Workflow
|
||||
|
||||
```bash
|
||||
# 1. Add a skill to your project
|
||||
mkdir -p .ruler/skills/my-skill
|
||||
cat > .ruler/skills/my-skill/SKILL.md << 'EOF'
|
||||
# My Custom Skill
|
||||
|
||||
This skill provides specialized knowledge for...
|
||||
|
||||
## Usage
|
||||
|
||||
When working on this project, always follow these guidelines:
|
||||
- Use TypeScript for all new code
|
||||
- Write tests for all features
|
||||
- Follow the existing code style
|
||||
EOF
|
||||
|
||||
# 2. Apply to all agents (skills enabled by default)
|
||||
ruler apply
|
||||
|
||||
# 3. Skills are now available to compatible agents:
|
||||
# - Claude Code, GitHub Copilot & Kilo Code: .claude/skills/my-skill/
|
||||
# - OpenAI Codex CLI: .codex/skills/my-skill/
|
||||
# - OpenCode: .opencode/skills/my-skill/
|
||||
# - Pi Coding Agent: .pi/skills/my-skill/
|
||||
# - Goose & Amp: .agents/skills/my-skill/
|
||||
# - Antigravity: .agent/skills/my-skill/
|
||||
# - Factory Droid: .factory/skills/my-skill/
|
||||
# - Mistral Vibe: .vibe/skills/my-skill/
|
||||
# - Roo Code: .roo/skills/my-skill/
|
||||
# - Gemini CLI: .gemini/skills/my-skill/
|
||||
# - Junie: .junie/skills/my-skill/
|
||||
# - Cursor: .cursor/skills/my-skill/
|
||||
```
|
||||
|
||||
## `.gitignore` Integration
|
||||
|
||||
Ruler automatically manages your `.gitignore` file to keep generated agent configuration files out of version control.
|
||||
|
||||
### How it Works
|
||||
|
||||
- Creates or updates `.gitignore` in your project root
|
||||
- Adds paths to a managed block marked with `# START Ruler Generated Files` and `# END Ruler Generated Files`
|
||||
- Preserves existing content outside this block
|
||||
- Sorts paths alphabetically and uses relative POSIX-style paths
|
||||
|
||||
### Example `.gitignore` Section (sample - actual list depends on enabled agents)
|
||||
|
||||
```gitignore
|
||||
# Your existing rules
|
||||
node_modules/
|
||||
*.log
|
||||
|
||||
# START Ruler Generated Files
|
||||
.aider.conf.yml
|
||||
.clinerules
|
||||
AGENTS.md
|
||||
CLAUDE.md
|
||||
# END Ruler Generated Files
|
||||
|
||||
dist/
|
||||
```
|
||||
|
||||
### Control Options
|
||||
|
||||
- **CLI flags**: `--gitignore`, `--no-gitignore`, `--gitignore-local`, `--no-gitignore-local`
|
||||
- **Configuration**: `[gitignore].enabled` and `[gitignore].local` in `ruler.toml`
|
||||
- **Default**: enabled
|
||||
|
||||
## Practical Usage Scenarios
|
||||
|
||||
### Scenario 1: Getting Started Quickly
|
||||
|
||||
```bash
|
||||
# Initialize Ruler in your project
|
||||
cd your-project
|
||||
ruler init
|
||||
|
||||
# Edit the generated files
|
||||
# - Add your coding guidelines to .ruler/AGENTS.md (or keep adding additional .md files)
|
||||
# - Customize .ruler/ruler.toml if needed
|
||||
|
||||
# Apply rules to all AI agents
|
||||
ruler apply
|
||||
```
|
||||
|
||||
### Scenario 2: Complex Projects with Nested Rules
|
||||
|
||||
For large projects with multiple components or services, enable nested rule loading so each directory keeps its own rules and MCP bundle:
|
||||
|
||||
```bash
|
||||
# Set up nested .ruler directories
|
||||
mkdir -p src/.ruler tests/.ruler docs/.ruler
|
||||
|
||||
# Add component-specific instructions
|
||||
echo "# API Design Guidelines" > src/.ruler/api_rules.md
|
||||
echo "# Testing Best Practices" > tests/.ruler/test_rules.md
|
||||
echo "# Documentation Standards" > docs/.ruler/docs_rules.md
|
||||
```
|
||||
|
||||
```toml
|
||||
# .ruler/ruler.toml
|
||||
nested = true
|
||||
```
|
||||
|
||||
```bash
|
||||
# The CLI inherits nested mode from ruler.toml
|
||||
ruler apply --verbose
|
||||
|
||||
# Override from the CLI at any time
|
||||
ruler apply --no-nested
|
||||
```
|
||||
|
||||
This creates context-specific instructions for different parts of your project while maintaining global rules in the root `.ruler/` directory. Nested runs automatically keep every nested config enabled even if a child tries to disable it.
|
||||
|
||||
> [!NOTE]
|
||||
> The CLI prints "Nested mode is experimental and may change in future releases." the first time nested processing runs. Expect refinements in future versions.
|
||||
|
||||
### Scenario 3: Team Standardization
|
||||
|
||||
1. Create `.ruler/coding_standards.md`, `.ruler/api_usage.md`
|
||||
2. Commit the `.ruler` directory to your repository
|
||||
3. Team members pull changes and run `ruler apply` to update their local AI agent configurations
|
||||
|
||||
### Scenario 4: Project-Specific Context for AI
|
||||
|
||||
1. Detail your project's architecture in `.ruler/project_overview.md`
|
||||
2. Describe primary data structures in `.ruler/data_models.md`
|
||||
3. Run `ruler apply` to help AI tools provide more relevant suggestions
|
||||
|
||||
### Integration with NPM Scripts
|
||||
|
||||
```json
|
||||
{
|
||||
"scripts": {
|
||||
"ruler:apply": "ruler apply",
|
||||
"dev": "npm run ruler:apply && your_dev_command",
|
||||
"precommit": "npm run ruler:apply"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Integration with GitHub Actions
|
||||
|
||||
```yaml
|
||||
# .github/workflows/ruler-check.yml
|
||||
name: Check Ruler Configuration
|
||||
on:
|
||||
pull_request:
|
||||
paths: ['.ruler/**']
|
||||
|
||||
jobs:
|
||||
check-ruler:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '20'
|
||||
cache: 'npm'
|
||||
|
||||
- name: Install Ruler
|
||||
run: npm install -g @intellectronica/ruler
|
||||
|
||||
- name: Apply Ruler configuration
|
||||
run: ruler apply --no-gitignore
|
||||
|
||||
- name: Check for uncommitted changes
|
||||
run: |
|
||||
if [[ -n $(git status --porcelain) ]]; then
|
||||
echo "::error::Ruler configuration is out of sync!"
|
||||
echo "Please run 'ruler apply' locally and commit the changes."
|
||||
exit 1
|
||||
fi
|
||||
```
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Common Issues
|
||||
|
||||
**"Cannot find module" errors:**
|
||||
|
||||
- Ensure Ruler is installed globally: `npm install -g @intellectronica/ruler`
|
||||
- Or use `npx @intellectronica/ruler`
|
||||
|
||||
**Permission denied errors:**
|
||||
|
||||
- On Unix systems, you may need `sudo` for global installation
|
||||
|
||||
**Agent files not updating:**
|
||||
|
||||
- Check if the agent is enabled in `ruler.toml`
|
||||
- Verify agent isn't excluded by `--agents` flag
|
||||
- Use `--verbose` to see detailed execution logs
|
||||
|
||||
**Configuration validation errors:**
|
||||
|
||||
- Ruler now validates `ruler.toml` format and will show specific error details
|
||||
- Check that all configuration values match the expected types and formats
|
||||
|
||||
### Debug Mode
|
||||
|
||||
Use `--verbose` flag to see detailed execution logs:
|
||||
|
||||
```bash
|
||||
ruler apply --verbose
|
||||
```
|
||||
|
||||
This shows:
|
||||
|
||||
- Configuration loading details
|
||||
- Agent selection logic
|
||||
- File processing information
|
||||
- MCP configuration steps
|
||||
|
||||
## FAQ
|
||||
|
||||
**Q: Can I use different rules for different agents?**
|
||||
A: Currently, all agents receive the same concatenated rules. For agent-specific instructions, include sections in your rule files like "## GitHub Copilot Specific" or "## Aider Configuration".
|
||||
|
||||
**Q: How do I set up different instructions for different parts of my project?**
|
||||
A: Enable nested mode either by setting `nested = true` in `ruler.toml` or by passing `ruler apply --nested`. The CLI inherits the config setting by default, but `--no-nested` always wins if you need to opt out for a run. Nested mode keeps loading rules (and MCP settings) from every `.ruler/` directory in the hierarchy, forces child configs to remain nested, and logs "Nested mode is experimental and may change in future releases." if any nested processing occurs.
|
||||
|
||||
**Q: How do I temporarily disable Ruler for an agent?**
|
||||
A: Set `enabled = false` in `ruler.toml` under `[agents.agentname]`, or use `--agents` flag to specify only the agents you want.
|
||||
|
||||
**Q: What happens to my existing agent configuration files?**
|
||||
A: Ruler creates backups with `.bak` extension before overwriting any existing files.
|
||||
|
||||
**Q: Can I run Ruler in CI/CD pipelines?**
|
||||
A: Yes! Use `ruler apply --no-gitignore` in CI to avoid modifying `.gitignore`. See the GitHub Actions example above.
|
||||
|
||||
**Q: How do I migrate from older versions using `instructions.md`?**
|
||||
A: Simply rename `.ruler/instructions.md` to `.ruler/AGENTS.md` (recommended). If you keep the legacy file and omit `AGENTS.md`, Ruler will still use it (without emitting the old deprecation warning). Having both causes `AGENTS.md` to take precedence; the legacy file is still concatenated afterward.
|
||||
|
||||
**Q: How does OpenHands MCP propagation classify servers?**
|
||||
A: Local stdio servers become `stdio_servers`. Remote URLs containing `/sse` are classified as `sse_servers`; others become `shttp_servers`. Bearer tokens in an `Authorization` header are extracted into `api_key` where possible.
|
||||
|
||||
**Q: Where is Zed configuration written now?**
|
||||
A: Ruler writes a `settings.json` in the project root (not the user home dir) and transforms MCP server definitions to Zed's `context_servers` format including `source: "custom"`.
|
||||
|
||||
**Q: What changed about MCP initialization?**
|
||||
A: `ruler init` now only adds example MCP server sections to `ruler.toml` instead of creating `.ruler/mcp.json`. The JSON file is still consumed if present, but TOML servers win on name conflicts.
|
||||
|
||||
**Q: Is Kiro supported?**
|
||||
A: Yes. Kiro receives concatenated rules at `.kiro/steering/ruler_kiro_instructions.md`.
|
||||
|
||||
## Development
|
||||
|
||||
### Setup
|
||||
|
||||
```bash
|
||||
git clone https://github.com/intellectronica/ruler.git
|
||||
cd ruler
|
||||
npm install
|
||||
npm run build
|
||||
```
|
||||
|
||||
### Testing
|
||||
|
||||
```bash
|
||||
# Run all tests
|
||||
npm test
|
||||
|
||||
# Run tests with coverage
|
||||
npm run test:coverage
|
||||
|
||||
# Run tests in watch mode
|
||||
npm run test:watch
|
||||
```
|
||||
|
||||
### Code Quality
|
||||
|
||||
```bash
|
||||
# Run linting
|
||||
npm run lint
|
||||
|
||||
# Run formatting
|
||||
npm run format
|
||||
```
|
||||
|
||||
## Contributing
|
||||
|
||||
Contributions are welcome! Please:
|
||||
|
||||
1. Fork the repository
|
||||
2. Create a feature branch
|
||||
3. Make your changes
|
||||
4. Add tests for new functionality
|
||||
5. Ensure all tests pass
|
||||
6. Submit a pull request
|
||||
|
||||
For bugs and feature requests, please [open an issue](https://github.com/intellectronica/ruler/issues).
|
||||
|
||||
## License
|
||||
|
||||
MIT
|
||||
|
||||
---
|
||||
|
||||
© Eleanor Berger
|
||||
[ai.intellectronica.net](https://ai.intellectronica.net/)
|
||||
+97
@@ -0,0 +1,97 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || (function () {
|
||||
var ownKeys = function(o) {
|
||||
ownKeys = Object.getOwnPropertyNames || function (o) {
|
||||
var ar = [];
|
||||
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
||||
return ar;
|
||||
};
|
||||
return ownKeys(o);
|
||||
};
|
||||
return function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
})();
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.AbstractAgent = void 0;
|
||||
const path = __importStar(require("path"));
|
||||
const FileSystemUtils_1 = require("../core/FileSystemUtils");
|
||||
/**
|
||||
* Abstract base class for agents that write to a single configuration file.
|
||||
* Implements common logic for applying ruler configuration.
|
||||
*/
|
||||
class AbstractAgent {
|
||||
/**
|
||||
* Applies the concatenated ruler rules to the agent's configuration.
|
||||
* This implementation handles the common pattern of:
|
||||
* 1. Determining the output path
|
||||
* 2. Ensuring the parent directory exists
|
||||
* 3. Backing up the existing file
|
||||
* 4. Writing the new content
|
||||
*/
|
||||
async applyRulerConfig(concatenatedRules, projectRoot, _rulerMcpJson, agentConfig, backup = true) {
|
||||
const output = agentConfig?.outputPath ?? this.getDefaultOutputPath(projectRoot);
|
||||
const absolutePath = path.resolve(projectRoot, output);
|
||||
await (0, FileSystemUtils_1.ensureDirExists)(path.dirname(absolutePath));
|
||||
if (backup) {
|
||||
await (0, FileSystemUtils_1.backupFile)(absolutePath);
|
||||
}
|
||||
await (0, FileSystemUtils_1.writeGeneratedFile)(absolutePath, concatenatedRules);
|
||||
}
|
||||
/**
|
||||
* Returns the specific key to be used for the server object in MCP JSON.
|
||||
* Defaults to 'mcpServers' if not overridden.
|
||||
*/
|
||||
getMcpServerKey() {
|
||||
return 'mcpServers';
|
||||
}
|
||||
/**
|
||||
* Returns whether this agent supports MCP STDIO servers.
|
||||
* Defaults to false if not overridden.
|
||||
*/
|
||||
supportsMcpStdio() {
|
||||
return false;
|
||||
}
|
||||
/**
|
||||
* Returns whether this agent supports MCP remote servers.
|
||||
* Defaults to false if not overridden.
|
||||
*/
|
||||
supportsMcpRemote() {
|
||||
return false;
|
||||
}
|
||||
/**
|
||||
* Returns whether this agent supports MCP server timeout configuration.
|
||||
* Defaults to false if not overridden.
|
||||
*/
|
||||
supportsMcpTimeout() {
|
||||
return false;
|
||||
}
|
||||
/**
|
||||
* Returns whether this agent has native skills support.
|
||||
* Defaults to false if not overridden.
|
||||
*/
|
||||
supportsNativeSkills() {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
exports.AbstractAgent = AbstractAgent;
|
||||
+85
@@ -0,0 +1,85 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || (function () {
|
||||
var ownKeys = function(o) {
|
||||
ownKeys = Object.getOwnPropertyNames || function (o) {
|
||||
var ar = [];
|
||||
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
||||
return ar;
|
||||
};
|
||||
return ownKeys(o);
|
||||
};
|
||||
return function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
})();
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.AgentsMdAgent = void 0;
|
||||
const path = __importStar(require("path"));
|
||||
const fs_1 = require("fs");
|
||||
const AbstractAgent_1 = require("./AbstractAgent");
|
||||
const FileSystemUtils_1 = require("../core/FileSystemUtils");
|
||||
/**
|
||||
* Pseudo-agent that ensures the concatenated rules are written to root-level `AGENTS.md`.
|
||||
* Does not participate in MCP propagation. Idempotent: only writes (and creates a backup)
|
||||
* when content differs from existing file.
|
||||
*/
|
||||
class AgentsMdAgent extends AbstractAgent_1.AbstractAgent {
|
||||
getIdentifier() {
|
||||
return 'agentsmd';
|
||||
}
|
||||
getName() {
|
||||
return 'AgentsMd';
|
||||
}
|
||||
getDefaultOutputPath(projectRoot) {
|
||||
return path.join(projectRoot, 'AGENTS.md');
|
||||
}
|
||||
async applyRulerConfig(concatenatedRules, projectRoot, _rulerMcpJson, agentConfig, backup = true) {
|
||||
const output = agentConfig?.outputPath ?? this.getDefaultOutputPath(projectRoot);
|
||||
const absolutePath = path.resolve(projectRoot, output);
|
||||
await (0, FileSystemUtils_1.ensureDirExists)(path.dirname(absolutePath));
|
||||
// Add marker comment to the content to identify it as generated
|
||||
const contentWithMarker = `<!-- Generated by Ruler -->\n${concatenatedRules}`;
|
||||
// Read existing content if present and skip write if identical
|
||||
let existing = null;
|
||||
try {
|
||||
existing = await fs_1.promises.readFile(absolutePath, 'utf8');
|
||||
}
|
||||
catch {
|
||||
existing = null;
|
||||
}
|
||||
if (existing !== null && existing === contentWithMarker) {
|
||||
// No change; skip backup/write for idempotency
|
||||
return;
|
||||
}
|
||||
// Backup (only if file existed and backup is enabled) then write new content
|
||||
if (backup) {
|
||||
await (0, FileSystemUtils_1.backupFile)(absolutePath);
|
||||
}
|
||||
await (0, FileSystemUtils_1.writeGeneratedFile)(absolutePath, contentWithMarker);
|
||||
}
|
||||
getMcpServerKey() {
|
||||
// No MCP configuration for this pseudo-agent
|
||||
return '';
|
||||
}
|
||||
}
|
||||
exports.AgentsMdAgent = AgentsMdAgent;
|
||||
+108
@@ -0,0 +1,108 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || (function () {
|
||||
var ownKeys = function(o) {
|
||||
ownKeys = Object.getOwnPropertyNames || function (o) {
|
||||
var ar = [];
|
||||
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
||||
return ar;
|
||||
};
|
||||
return ownKeys(o);
|
||||
};
|
||||
return function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
})();
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.AiderAgent = void 0;
|
||||
const path = __importStar(require("path"));
|
||||
const AgentsMdAgent_1 = require("./AgentsMdAgent");
|
||||
const FileSystemUtils_1 = require("../core/FileSystemUtils");
|
||||
const fs = __importStar(require("fs/promises"));
|
||||
const yaml = __importStar(require("js-yaml"));
|
||||
/**
|
||||
* Aider agent adapter that uses AGENTS.md for instructions and .aider.conf.yml for configuration.
|
||||
*/
|
||||
class AiderAgent {
|
||||
constructor() {
|
||||
this.agentsMdAgent = new AgentsMdAgent_1.AgentsMdAgent();
|
||||
}
|
||||
getIdentifier() {
|
||||
return 'aider';
|
||||
}
|
||||
getName() {
|
||||
return 'Aider';
|
||||
}
|
||||
async applyRulerConfig(concatenatedRules, projectRoot, rulerMcpJson, agentConfig, backup = true) {
|
||||
// First perform idempotent AGENTS.md write via composed AgentsMdAgent
|
||||
await this.agentsMdAgent.applyRulerConfig(concatenatedRules, projectRoot, null, {
|
||||
// Preserve explicit outputPath precedence semantics if provided.
|
||||
outputPath: agentConfig?.outputPath ||
|
||||
agentConfig?.outputPathInstructions ||
|
||||
undefined,
|
||||
}, backup);
|
||||
// Now handle .aider.conf.yml configuration
|
||||
const cfgPath = agentConfig?.outputPathConfig ??
|
||||
this.getDefaultOutputPath(projectRoot).config;
|
||||
let doc = {};
|
||||
try {
|
||||
await fs.access(cfgPath);
|
||||
if (backup) {
|
||||
await (0, FileSystemUtils_1.backupFile)(cfgPath);
|
||||
}
|
||||
const raw = await fs.readFile(cfgPath, 'utf8');
|
||||
doc = (yaml.load(raw) || {});
|
||||
}
|
||||
catch {
|
||||
doc = {};
|
||||
}
|
||||
if (!Array.isArray(doc.read)) {
|
||||
doc.read = [];
|
||||
}
|
||||
// Determine the actual agents file path (AGENTS.md by default, or custom path)
|
||||
const agentsPath = agentConfig?.outputPath ||
|
||||
agentConfig?.outputPathInstructions ||
|
||||
this.getDefaultOutputPath(projectRoot).instructions;
|
||||
const name = path.basename(agentsPath);
|
||||
if (!doc.read.includes(name)) {
|
||||
doc.read.push(name);
|
||||
}
|
||||
const yamlStr = yaml.dump(doc);
|
||||
await (0, FileSystemUtils_1.writeGeneratedFile)(cfgPath, yamlStr);
|
||||
}
|
||||
getDefaultOutputPath(projectRoot) {
|
||||
return {
|
||||
instructions: path.join(projectRoot, 'AGENTS.md'),
|
||||
config: path.join(projectRoot, '.aider.conf.yml'),
|
||||
};
|
||||
}
|
||||
getMcpServerKey() {
|
||||
return this.agentsMdAgent.getMcpServerKey();
|
||||
}
|
||||
supportsMcpStdio() {
|
||||
return true;
|
||||
}
|
||||
supportsMcpRemote() {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
exports.AiderAgent = AiderAgent;
|
||||
+103
@@ -0,0 +1,103 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || (function () {
|
||||
var ownKeys = function(o) {
|
||||
ownKeys = Object.getOwnPropertyNames || function (o) {
|
||||
var ar = [];
|
||||
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
||||
return ar;
|
||||
};
|
||||
return ownKeys(o);
|
||||
};
|
||||
return function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
})();
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.AmazonQCliAgent = void 0;
|
||||
const path = __importStar(require("path"));
|
||||
const fs_1 = require("fs");
|
||||
const FileSystemUtils_1 = require("../core/FileSystemUtils");
|
||||
const merge_1 = require("../mcp/merge");
|
||||
/**
|
||||
* Amazon Q CLI agent adapter.
|
||||
*/
|
||||
class AmazonQCliAgent {
|
||||
getIdentifier() {
|
||||
return 'amazonqcli';
|
||||
}
|
||||
getName() {
|
||||
return 'Amazon Q CLI';
|
||||
}
|
||||
async applyRulerConfig(concatenatedRules, projectRoot, rulerMcpJson, agentConfig, backup = true) {
|
||||
const outputPaths = this.getDefaultOutputPath(projectRoot);
|
||||
const rulesPath = path.resolve(projectRoot, agentConfig?.outputPath ||
|
||||
agentConfig?.outputPathInstructions ||
|
||||
outputPaths['instructions']);
|
||||
// Write rules file to .amazonq/rules/
|
||||
await (0, FileSystemUtils_1.ensureDirExists)(path.dirname(rulesPath));
|
||||
if (backup) {
|
||||
await (0, FileSystemUtils_1.backupFile)(rulesPath);
|
||||
}
|
||||
await (0, FileSystemUtils_1.writeGeneratedFile)(rulesPath, concatenatedRules);
|
||||
// Handle MCP configuration if enabled and provided
|
||||
const mcpEnabled = agentConfig?.mcp?.enabled ?? true;
|
||||
if (mcpEnabled && rulerMcpJson) {
|
||||
const mcpPath = path.resolve(projectRoot, agentConfig?.outputPathConfig ?? outputPaths['mcp']);
|
||||
const mcpStrategy = agentConfig?.mcp?.strategy ?? 'merge';
|
||||
await (0, FileSystemUtils_1.ensureDirExists)(path.dirname(mcpPath));
|
||||
let existingMcpConfig = {};
|
||||
try {
|
||||
const raw = await fs_1.promises.readFile(mcpPath, 'utf8');
|
||||
existingMcpConfig = JSON.parse(raw);
|
||||
}
|
||||
catch (err) {
|
||||
if (err.code !== 'ENOENT') {
|
||||
throw err;
|
||||
}
|
||||
// File doesn't exist, start with empty config
|
||||
}
|
||||
// Merge the MCP configurations using the standard merge function
|
||||
const mergedConfig = (0, merge_1.mergeMcp)(existingMcpConfig, rulerMcpJson, mcpStrategy, 'mcpServers');
|
||||
if (backup) {
|
||||
await (0, FileSystemUtils_1.backupFile)(mcpPath);
|
||||
}
|
||||
await (0, FileSystemUtils_1.writeGeneratedFile)(mcpPath, JSON.stringify(mergedConfig, null, 2));
|
||||
}
|
||||
}
|
||||
getDefaultOutputPath(projectRoot) {
|
||||
return {
|
||||
instructions: path.join(projectRoot, '.amazonq', 'rules', 'ruler_q_rules.md'),
|
||||
mcp: path.join(projectRoot, '.amazonq', 'mcp.json'),
|
||||
};
|
||||
}
|
||||
getMcpServerKey() {
|
||||
return 'mcpServers';
|
||||
}
|
||||
supportsMcpStdio() {
|
||||
return true;
|
||||
}
|
||||
supportsMcpRemote() {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
exports.AmazonQCliAgent = AmazonQCliAgent;
|
||||
+16
@@ -0,0 +1,16 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.AmpAgent = void 0;
|
||||
const AgentsMdAgent_1 = require("./AgentsMdAgent");
|
||||
class AmpAgent extends AgentsMdAgent_1.AgentsMdAgent {
|
||||
getIdentifier() {
|
||||
return 'amp';
|
||||
}
|
||||
getName() {
|
||||
return 'Amp';
|
||||
}
|
||||
supportsNativeSkills() {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
exports.AmpAgent = AmpAgent;
|
||||
+56
@@ -0,0 +1,56 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || (function () {
|
||||
var ownKeys = function(o) {
|
||||
ownKeys = Object.getOwnPropertyNames || function (o) {
|
||||
var ar = [];
|
||||
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
||||
return ar;
|
||||
};
|
||||
return ownKeys(o);
|
||||
};
|
||||
return function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
})();
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.AntigravityAgent = void 0;
|
||||
const path = __importStar(require("path"));
|
||||
const AbstractAgent_1 = require("./AbstractAgent");
|
||||
/**
|
||||
* Antigravity agent adapter.
|
||||
*/
|
||||
class AntigravityAgent extends AbstractAgent_1.AbstractAgent {
|
||||
getIdentifier() {
|
||||
return 'antigravity';
|
||||
}
|
||||
getName() {
|
||||
return 'Antigravity';
|
||||
}
|
||||
getDefaultOutputPath(projectRoot) {
|
||||
return path.join(projectRoot, '.agent', 'rules', 'ruler.md');
|
||||
}
|
||||
supportsNativeSkills() {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
exports.AntigravityAgent = AntigravityAgent;
|
||||
+70
@@ -0,0 +1,70 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || (function () {
|
||||
var ownKeys = function(o) {
|
||||
ownKeys = Object.getOwnPropertyNames || function (o) {
|
||||
var ar = [];
|
||||
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
||||
return ar;
|
||||
};
|
||||
return ownKeys(o);
|
||||
};
|
||||
return function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
})();
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.AugmentCodeAgent = void 0;
|
||||
const path = __importStar(require("path"));
|
||||
const FileSystemUtils_1 = require("../core/FileSystemUtils");
|
||||
/**
|
||||
* AugmentCode agent adapter.
|
||||
* Generates ruler_augment_instructions.md configuration file and updates VSCode settings.json with MCP server configuration.
|
||||
*/
|
||||
class AugmentCodeAgent {
|
||||
getIdentifier() {
|
||||
return 'augmentcode';
|
||||
}
|
||||
getName() {
|
||||
return 'AugmentCode';
|
||||
}
|
||||
async applyRulerConfig(concatenatedRules, projectRoot, _rulerMcpJson, agentConfig, backup = true) {
|
||||
const output = agentConfig?.outputPath ?? this.getDefaultOutputPath(projectRoot);
|
||||
if (backup) {
|
||||
await (0, FileSystemUtils_1.backupFile)(output);
|
||||
}
|
||||
await (0, FileSystemUtils_1.writeGeneratedFile)(output, concatenatedRules);
|
||||
// AugmentCode does not support MCP servers
|
||||
// MCP configuration is ignored for this agent
|
||||
}
|
||||
getDefaultOutputPath(projectRoot) {
|
||||
return path.join(projectRoot, '.augment', 'rules', 'ruler_augment_instructions.md');
|
||||
}
|
||||
// AugmentCode does not support MCP servers
|
||||
supportsMcpStdio() {
|
||||
return false;
|
||||
}
|
||||
supportsMcpRemote() {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
exports.AugmentCodeAgent = AugmentCodeAgent;
|
||||
+62
@@ -0,0 +1,62 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || (function () {
|
||||
var ownKeys = function(o) {
|
||||
ownKeys = Object.getOwnPropertyNames || function (o) {
|
||||
var ar = [];
|
||||
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
||||
return ar;
|
||||
};
|
||||
return ownKeys(o);
|
||||
};
|
||||
return function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
})();
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.ClaudeAgent = void 0;
|
||||
const path = __importStar(require("path"));
|
||||
const AbstractAgent_1 = require("./AbstractAgent");
|
||||
/**
|
||||
* Claude Code agent adapter.
|
||||
*/
|
||||
class ClaudeAgent extends AbstractAgent_1.AbstractAgent {
|
||||
getIdentifier() {
|
||||
return 'claude';
|
||||
}
|
||||
getName() {
|
||||
return 'Claude Code';
|
||||
}
|
||||
getDefaultOutputPath(projectRoot) {
|
||||
return path.join(projectRoot, 'CLAUDE.md');
|
||||
}
|
||||
supportsMcpStdio() {
|
||||
return true;
|
||||
}
|
||||
supportsMcpRemote() {
|
||||
return true;
|
||||
}
|
||||
supportsNativeSkills() {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
exports.ClaudeAgent = ClaudeAgent;
|
||||
+53
@@ -0,0 +1,53 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || (function () {
|
||||
var ownKeys = function(o) {
|
||||
ownKeys = Object.getOwnPropertyNames || function (o) {
|
||||
var ar = [];
|
||||
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
||||
return ar;
|
||||
};
|
||||
return ownKeys(o);
|
||||
};
|
||||
return function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
})();
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.ClineAgent = void 0;
|
||||
const path = __importStar(require("path"));
|
||||
const AbstractAgent_1 = require("./AbstractAgent");
|
||||
/**
|
||||
* Cline agent adapter.
|
||||
*/
|
||||
class ClineAgent extends AbstractAgent_1.AbstractAgent {
|
||||
getIdentifier() {
|
||||
return 'cline';
|
||||
}
|
||||
getName() {
|
||||
return 'Cline';
|
||||
}
|
||||
getDefaultOutputPath(projectRoot) {
|
||||
return path.join(projectRoot, '.clinerules');
|
||||
}
|
||||
}
|
||||
exports.ClineAgent = ClineAgent;
|
||||
+153
@@ -0,0 +1,153 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || (function () {
|
||||
var ownKeys = function(o) {
|
||||
ownKeys = Object.getOwnPropertyNames || function (o) {
|
||||
var ar = [];
|
||||
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
||||
return ar;
|
||||
};
|
||||
return ownKeys(o);
|
||||
};
|
||||
return function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
})();
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.CodexCliAgent = void 0;
|
||||
const path = __importStar(require("path"));
|
||||
const fs_1 = require("fs");
|
||||
const toml_1 = require("@iarna/toml");
|
||||
const AgentsMdAgent_1 = require("./AgentsMdAgent");
|
||||
const FileSystemUtils_1 = require("../core/FileSystemUtils");
|
||||
const constants_1 = require("../constants");
|
||||
/**
|
||||
* OpenAI Codex CLI agent adapter.
|
||||
*/
|
||||
class CodexCliAgent {
|
||||
constructor() {
|
||||
this.agentsMdAgent = new AgentsMdAgent_1.AgentsMdAgent();
|
||||
}
|
||||
getIdentifier() {
|
||||
return 'codex';
|
||||
}
|
||||
getName() {
|
||||
return 'OpenAI Codex CLI';
|
||||
}
|
||||
async applyRulerConfig(concatenatedRules, projectRoot, rulerMcpJson, agentConfig, backup = true) {
|
||||
// First perform idempotent AGENTS.md write via composed AgentsMdAgent
|
||||
await this.agentsMdAgent.applyRulerConfig(concatenatedRules, projectRoot, null, {
|
||||
// Preserve explicit outputPath precedence semantics if provided.
|
||||
outputPath: agentConfig?.outputPath ||
|
||||
agentConfig?.outputPathInstructions ||
|
||||
undefined,
|
||||
}, backup);
|
||||
// Use proper path resolution from getDefaultOutputPath and agentConfig
|
||||
const defaults = this.getDefaultOutputPath(projectRoot);
|
||||
const mcpEnabled = agentConfig?.mcp?.enabled ?? true;
|
||||
if (mcpEnabled && rulerMcpJson) {
|
||||
// Apply MCP server filtering and transformation
|
||||
const { filterMcpConfigForAgent } = await Promise.resolve().then(() => __importStar(require('../mcp/capabilities')));
|
||||
const filteredMcpConfig = filterMcpConfigForAgent(rulerMcpJson, this);
|
||||
if (!filteredMcpConfig) {
|
||||
return; // No compatible servers found
|
||||
}
|
||||
const filteredRulerMcpJson = filteredMcpConfig;
|
||||
// Determine the config file path using proper precedence
|
||||
const configPath = agentConfig?.outputPathConfig ?? defaults.config;
|
||||
// Ensure the parent directory exists
|
||||
await fs_1.promises.mkdir(path.dirname(configPath), { recursive: true });
|
||||
// Get the merge strategy
|
||||
const strategy = agentConfig?.mcp?.strategy ?? 'merge';
|
||||
// Extract MCP servers from filtered ruler config
|
||||
const rulerServers = filteredRulerMcpJson.mcpServers || {};
|
||||
// Read existing TOML config if it exists
|
||||
let existingConfig = {};
|
||||
try {
|
||||
const existingContent = await fs_1.promises.readFile(configPath, 'utf8');
|
||||
existingConfig = (0, toml_1.parse)(existingContent);
|
||||
}
|
||||
catch {
|
||||
// File doesn't exist or can't be parsed, use empty config
|
||||
}
|
||||
// Create the updated config
|
||||
const updatedConfig = { ...existingConfig };
|
||||
// Initialize mcp_servers if it doesn't exist
|
||||
if (!updatedConfig.mcp_servers) {
|
||||
updatedConfig.mcp_servers = {};
|
||||
}
|
||||
if (strategy === 'overwrite') {
|
||||
// For overwrite strategy, replace the entire mcp_servers section
|
||||
updatedConfig.mcp_servers = {};
|
||||
}
|
||||
// Add the ruler servers
|
||||
for (const [serverName, serverConfig] of Object.entries(rulerServers)) {
|
||||
// Create a properly formatted MCP server entry
|
||||
const mcpServer = {};
|
||||
if (serverConfig.command) {
|
||||
mcpServer.command = serverConfig.command;
|
||||
}
|
||||
if (serverConfig.url) {
|
||||
mcpServer.url = serverConfig.url;
|
||||
}
|
||||
if (serverConfig.args) {
|
||||
mcpServer.args = serverConfig.args;
|
||||
}
|
||||
// Format env as an inline table
|
||||
if (serverConfig.env) {
|
||||
mcpServer.env = serverConfig.env;
|
||||
}
|
||||
// Handle additional properties from remote server transformation
|
||||
if (serverConfig.headers) {
|
||||
mcpServer.headers = serverConfig.headers;
|
||||
}
|
||||
if (updatedConfig.mcp_servers) {
|
||||
updatedConfig.mcp_servers[serverName] = mcpServer;
|
||||
}
|
||||
}
|
||||
// Convert to TOML using structured objects
|
||||
const finalConfig = { ...updatedConfig };
|
||||
// @iarna/toml should handle the formatting properly
|
||||
const tomlContent = (0, toml_1.stringify)(finalConfig);
|
||||
await (0, FileSystemUtils_1.writeGeneratedFile)(configPath, tomlContent);
|
||||
}
|
||||
}
|
||||
getDefaultOutputPath(projectRoot) {
|
||||
return {
|
||||
instructions: path.join(projectRoot, constants_1.DEFAULT_RULES_FILENAME),
|
||||
config: path.join(projectRoot, '.codex', 'config.toml'),
|
||||
};
|
||||
}
|
||||
getMcpServerKey() {
|
||||
return 'mcp_servers';
|
||||
}
|
||||
supportsMcpStdio() {
|
||||
return true;
|
||||
}
|
||||
supportsMcpRemote() {
|
||||
return true;
|
||||
}
|
||||
supportsNativeSkills() {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
exports.CodexCliAgent = CodexCliAgent;
|
||||
+46
@@ -0,0 +1,46 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.CopilotAgent = void 0;
|
||||
const AgentsMdAgent_1 = require("./AgentsMdAgent");
|
||||
/**
|
||||
* GitHub Copilot agent adapter.
|
||||
* Writes to AGENTS.md for both web-based GitHub Copilot and VS Code extension.
|
||||
*/
|
||||
class CopilotAgent {
|
||||
constructor() {
|
||||
this.agentsMdAgent = new AgentsMdAgent_1.AgentsMdAgent();
|
||||
}
|
||||
getIdentifier() {
|
||||
return 'copilot';
|
||||
}
|
||||
getName() {
|
||||
return 'GitHub Copilot';
|
||||
}
|
||||
/**
|
||||
* Returns the default output path for AGENTS.md.
|
||||
*/
|
||||
getDefaultOutputPath(projectRoot) {
|
||||
return this.agentsMdAgent.getDefaultOutputPath(projectRoot);
|
||||
}
|
||||
async applyRulerConfig(concatenatedRules, projectRoot, rulerMcpJson, agentConfig, backup = true) {
|
||||
// Write to AGENTS.md using the existing AgentsMdAgent infrastructure
|
||||
await this.agentsMdAgent.applyRulerConfig(concatenatedRules, projectRoot, null, // No MCP config needed for the instructions file
|
||||
{
|
||||
// Preserve explicit outputPath precedence semantics if provided
|
||||
outputPath: agentConfig?.outputPath || agentConfig?.outputPathInstructions,
|
||||
}, backup);
|
||||
}
|
||||
getMcpServerKey() {
|
||||
return 'servers';
|
||||
}
|
||||
supportsMcpStdio() {
|
||||
return true;
|
||||
}
|
||||
supportsMcpRemote() {
|
||||
return true;
|
||||
}
|
||||
supportsNativeSkills() {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
exports.CopilotAgent = CopilotAgent;
|
||||
+128
@@ -0,0 +1,128 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || (function () {
|
||||
var ownKeys = function(o) {
|
||||
ownKeys = Object.getOwnPropertyNames || function (o) {
|
||||
var ar = [];
|
||||
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
||||
return ar;
|
||||
};
|
||||
return ownKeys(o);
|
||||
};
|
||||
return function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
})();
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.CrushAgent = void 0;
|
||||
const fs = __importStar(require("fs/promises"));
|
||||
const path = __importStar(require("path"));
|
||||
class CrushAgent {
|
||||
getIdentifier() {
|
||||
return 'crush';
|
||||
}
|
||||
getName() {
|
||||
return 'Crush';
|
||||
}
|
||||
getDefaultOutputPath(projectRoot) {
|
||||
return {
|
||||
instructions: path.join(projectRoot, 'CRUSH.md'),
|
||||
mcp: path.join(projectRoot, '.crush.json'),
|
||||
};
|
||||
}
|
||||
/**
|
||||
* Transform MCP server types for Crush compatibility.
|
||||
* Crush expects "http" for HTTP servers and "sse" for SSE servers, not "remote".
|
||||
*/
|
||||
transformMcpServersForCrush(mcpServers) {
|
||||
const transformedServers = {};
|
||||
for (const [name, serverDef] of Object.entries(mcpServers)) {
|
||||
if (serverDef && typeof serverDef === 'object') {
|
||||
const server = serverDef;
|
||||
const transformedServer = { ...server };
|
||||
// Transform type: "remote" to appropriate Crush types
|
||||
if (server.type === 'remote' &&
|
||||
server.url &&
|
||||
typeof server.url === 'string') {
|
||||
const url = server.url;
|
||||
// Check if URL suggests SSE (contains /sse path segment)
|
||||
if (/\/sse(\/|$)/i.test(url)) {
|
||||
transformedServer.type = 'sse';
|
||||
}
|
||||
else {
|
||||
transformedServer.type = 'http';
|
||||
}
|
||||
}
|
||||
transformedServers[name] = transformedServer;
|
||||
}
|
||||
else {
|
||||
transformedServers[name] = serverDef;
|
||||
}
|
||||
}
|
||||
return transformedServers;
|
||||
}
|
||||
async applyRulerConfig(concatenatedRules, projectRoot, rulerMcpJson, agentConfig) {
|
||||
const outputPaths = this.getDefaultOutputPath(projectRoot);
|
||||
const instructionsPath = agentConfig?.outputPathInstructions ?? outputPaths['instructions'];
|
||||
const mcpPath = agentConfig?.outputPathConfig ?? outputPaths['mcp'];
|
||||
await fs.writeFile(instructionsPath, concatenatedRules);
|
||||
// Always transform from mcpServers ({ mcpServers: ... }) to { mcp: ... } for Crush
|
||||
let finalMcpConfig = { mcp: {} };
|
||||
try {
|
||||
const existingMcpConfig = JSON.parse(await fs.readFile(mcpPath, 'utf-8'));
|
||||
if (existingMcpConfig && typeof existingMcpConfig === 'object') {
|
||||
const transformedServers = this.transformMcpServersForCrush((rulerMcpJson?.mcpServers ?? {}));
|
||||
finalMcpConfig = {
|
||||
...existingMcpConfig,
|
||||
mcp: {
|
||||
...(existingMcpConfig.mcp || {}),
|
||||
...transformedServers,
|
||||
},
|
||||
};
|
||||
}
|
||||
else if (rulerMcpJson) {
|
||||
const transformedServers = this.transformMcpServersForCrush((rulerMcpJson?.mcpServers ?? {}));
|
||||
finalMcpConfig = {
|
||||
mcp: transformedServers,
|
||||
};
|
||||
}
|
||||
}
|
||||
catch {
|
||||
if (rulerMcpJson) {
|
||||
const transformedServers = this.transformMcpServersForCrush((rulerMcpJson?.mcpServers ?? {}));
|
||||
finalMcpConfig = {
|
||||
mcp: transformedServers,
|
||||
};
|
||||
}
|
||||
}
|
||||
if (Object.keys(finalMcpConfig.mcp).length > 0) {
|
||||
await fs.writeFile(mcpPath, JSON.stringify(finalMcpConfig, null, 2));
|
||||
}
|
||||
}
|
||||
supportsMcpStdio() {
|
||||
return true;
|
||||
}
|
||||
supportsMcpRemote() {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
exports.CrushAgent = CrushAgent;
|
||||
+37
@@ -0,0 +1,37 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.CursorAgent = void 0;
|
||||
const AgentsMdAgent_1 = require("./AgentsMdAgent");
|
||||
/**
|
||||
* Cursor agent adapter.
|
||||
* Leverages the standardized AGENTS.md approach supported natively by Cursor.
|
||||
* See: https://docs.cursor.com/en/cli/using
|
||||
*/
|
||||
class CursorAgent extends AgentsMdAgent_1.AgentsMdAgent {
|
||||
getIdentifier() {
|
||||
return 'cursor';
|
||||
}
|
||||
getName() {
|
||||
return 'Cursor';
|
||||
}
|
||||
async applyRulerConfig(concatenatedRules, projectRoot, _rulerMcpJson, agentConfig, backup = true) {
|
||||
// Write AGENTS.md via base class
|
||||
// Cursor natively reads AGENTS.md from the project root
|
||||
await super.applyRulerConfig(concatenatedRules, projectRoot, null, {
|
||||
outputPath: agentConfig?.outputPath,
|
||||
}, backup);
|
||||
}
|
||||
getMcpServerKey() {
|
||||
return 'mcpServers';
|
||||
}
|
||||
supportsMcpStdio() {
|
||||
return true;
|
||||
}
|
||||
supportsMcpRemote() {
|
||||
return true;
|
||||
}
|
||||
supportsNativeSkills() {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
exports.CursorAgent = CursorAgent;
|
||||
+29
@@ -0,0 +1,29 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.FactoryDroidAgent = void 0;
|
||||
const AgentsMdAgent_1 = require("./AgentsMdAgent");
|
||||
/**
|
||||
* Factory Droid agent adapter.
|
||||
* Uses the root-level AGENTS.md for instructions.
|
||||
*/
|
||||
class FactoryDroidAgent extends AgentsMdAgent_1.AgentsMdAgent {
|
||||
getIdentifier() {
|
||||
return 'factory';
|
||||
}
|
||||
getName() {
|
||||
return 'Factory Droid';
|
||||
}
|
||||
getMcpServerKey() {
|
||||
return 'mcpServers';
|
||||
}
|
||||
supportsMcpStdio() {
|
||||
return true;
|
||||
}
|
||||
supportsMcpRemote() {
|
||||
return true;
|
||||
}
|
||||
supportsNativeSkills() {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
exports.FactoryDroidAgent = FactoryDroidAgent;
|
||||
+61
@@ -0,0 +1,61 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || (function () {
|
||||
var ownKeys = function(o) {
|
||||
ownKeys = Object.getOwnPropertyNames || function (o) {
|
||||
var ar = [];
|
||||
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
||||
return ar;
|
||||
};
|
||||
return ownKeys(o);
|
||||
};
|
||||
return function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
})();
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.FirebaseAgent = void 0;
|
||||
const path = __importStar(require("path"));
|
||||
const AbstractAgent_1 = require("./AbstractAgent");
|
||||
/**
|
||||
* Firebase Studio agent adapter.
|
||||
*/
|
||||
class FirebaseAgent extends AbstractAgent_1.AbstractAgent {
|
||||
getIdentifier() {
|
||||
return 'firebase';
|
||||
}
|
||||
getName() {
|
||||
return 'Firebase Studio';
|
||||
}
|
||||
getDefaultOutputPath(projectRoot) {
|
||||
return path.join(projectRoot, '.idx', 'airules.md');
|
||||
}
|
||||
// Firebase Studio (IDX) supports stdio MCP servers via .idx/mcp.json
|
||||
supportsMcpStdio() {
|
||||
return true;
|
||||
}
|
||||
// Remote MCP over HTTP/SSE is not documented for Firebase Studio yet
|
||||
supportsMcpRemote() {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
exports.FirebaseAgent = FirebaseAgent;
|
||||
+205
@@ -0,0 +1,205 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || (function () {
|
||||
var ownKeys = function(o) {
|
||||
ownKeys = Object.getOwnPropertyNames || function (o) {
|
||||
var ar = [];
|
||||
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
||||
return ar;
|
||||
};
|
||||
return ownKeys(o);
|
||||
};
|
||||
return function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
})();
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.FirebenderAgent = void 0;
|
||||
const path = __importStar(require("path"));
|
||||
const fs = __importStar(require("fs"));
|
||||
const FileSystemUtils_1 = require("../core/FileSystemUtils");
|
||||
/**
|
||||
* Firebender agent adapter.
|
||||
*/
|
||||
class FirebenderAgent {
|
||||
/**
|
||||
* Type guard function to safely check if an object is a FirebenderRule.
|
||||
*/
|
||||
isFirebenderRule(rule) {
|
||||
return (typeof rule === 'object' &&
|
||||
rule !== null &&
|
||||
'filePathMatches' in rule &&
|
||||
'rulesPaths' in rule &&
|
||||
typeof rule.filePathMatches === 'string' &&
|
||||
typeof rule.rulesPaths === 'string');
|
||||
}
|
||||
getIdentifier() {
|
||||
return 'firebender';
|
||||
}
|
||||
getName() {
|
||||
return 'Firebender';
|
||||
}
|
||||
async applyRulerConfig(concatenatedRules, projectRoot, rulerMcpJson, agentConfig, backup = true) {
|
||||
const rulesPath = this.resolveOutputPath(projectRoot, agentConfig);
|
||||
await (0, FileSystemUtils_1.ensureDirExists)(path.dirname(rulesPath));
|
||||
const firebenderConfig = await this.loadExistingConfig(rulesPath);
|
||||
const newRules = this.createRulesFromConcatenatedRules(concatenatedRules, projectRoot);
|
||||
firebenderConfig.rules.push(...newRules);
|
||||
this.removeDuplicateRules(firebenderConfig);
|
||||
const mcpEnabled = agentConfig?.mcp?.enabled ?? true;
|
||||
if (mcpEnabled && rulerMcpJson) {
|
||||
await this.handleMcpConfiguration(firebenderConfig, rulerMcpJson, agentConfig);
|
||||
}
|
||||
await this.saveConfig(rulesPath, firebenderConfig, backup);
|
||||
}
|
||||
resolveOutputPath(projectRoot, agentConfig) {
|
||||
const outputPaths = this.getDefaultOutputPath(projectRoot);
|
||||
const output = agentConfig?.outputPath ??
|
||||
agentConfig?.outputPathInstructions ??
|
||||
outputPaths['instructions'];
|
||||
return path.resolve(projectRoot, output);
|
||||
}
|
||||
async loadExistingConfig(rulesPath) {
|
||||
try {
|
||||
const existingContent = await fs.promises.readFile(rulesPath, 'utf8');
|
||||
const config = JSON.parse(existingContent);
|
||||
if (!config.rules) {
|
||||
config.rules = [];
|
||||
}
|
||||
return config;
|
||||
}
|
||||
catch (error) {
|
||||
if (error &&
|
||||
typeof error === 'object' &&
|
||||
'code' in error &&
|
||||
error.code === 'ENOENT') {
|
||||
return { rules: [] };
|
||||
}
|
||||
console.warn(`Failed to read/parse existing firebender.json: ${error}`);
|
||||
return { rules: [] };
|
||||
}
|
||||
}
|
||||
createRulesFromConcatenatedRules(concatenatedRules, projectRoot) {
|
||||
const filePaths = this.extractFilePathsFromRules(concatenatedRules, projectRoot);
|
||||
if (filePaths.length > 0) {
|
||||
return this.createRuleObjectsFromFilePaths(filePaths);
|
||||
}
|
||||
else {
|
||||
return this.createRulesFromPlainText(concatenatedRules);
|
||||
}
|
||||
}
|
||||
createRuleObjectsFromFilePaths(filePaths) {
|
||||
return filePaths.map((filePath) => ({
|
||||
filePathMatches: '**/*',
|
||||
rulesPaths: filePath,
|
||||
}));
|
||||
}
|
||||
createRulesFromPlainText(concatenatedRules) {
|
||||
return concatenatedRules.split('\n').filter((rule) => rule.trim());
|
||||
}
|
||||
removeDuplicateRules(firebenderConfig) {
|
||||
const seen = new Set();
|
||||
firebenderConfig.rules = firebenderConfig.rules.filter((rule) => {
|
||||
let key;
|
||||
if (this.isFirebenderRule(rule)) {
|
||||
const filePathMatchesPart = rule.filePathMatches;
|
||||
const rulesPathsPart = rule.rulesPaths;
|
||||
key = `${filePathMatchesPart}::${rulesPathsPart}`;
|
||||
}
|
||||
else {
|
||||
key = String(rule);
|
||||
}
|
||||
if (seen.has(key)) {
|
||||
return false;
|
||||
}
|
||||
seen.add(key);
|
||||
return true;
|
||||
});
|
||||
}
|
||||
async saveConfig(rulesPath, config, backup) {
|
||||
const updatedContent = JSON.stringify(config, null, 2);
|
||||
if (backup) {
|
||||
await (0, FileSystemUtils_1.backupFile)(rulesPath);
|
||||
}
|
||||
await (0, FileSystemUtils_1.writeGeneratedFile)(rulesPath, updatedContent);
|
||||
}
|
||||
/**
|
||||
* Handle MCP server configuration for Firebender.
|
||||
* Merges or overwrites MCP servers in the firebender.json configuration based on strategy.
|
||||
*/
|
||||
async handleMcpConfiguration(firebenderConfig, rulerMcpJson, agentConfig) {
|
||||
const strategy = agentConfig?.mcp?.strategy ?? 'merge';
|
||||
const incomingServers = rulerMcpJson.mcpServers || {};
|
||||
if (!firebenderConfig.mcpServers) {
|
||||
firebenderConfig.mcpServers = {};
|
||||
}
|
||||
if (strategy === 'overwrite') {
|
||||
firebenderConfig.mcpServers = { ...incomingServers };
|
||||
}
|
||||
else if (strategy === 'merge') {
|
||||
const existingServers = firebenderConfig.mcpServers || {};
|
||||
firebenderConfig.mcpServers = { ...existingServers, ...incomingServers };
|
||||
}
|
||||
}
|
||||
getDefaultOutputPath(projectRoot) {
|
||||
return {
|
||||
instructions: path.join(projectRoot, 'firebender.json'),
|
||||
mcp: path.join(projectRoot, 'firebender.json'),
|
||||
};
|
||||
}
|
||||
getMcpServerKey() {
|
||||
return 'mcpServers';
|
||||
}
|
||||
supportsMcpStdio() {
|
||||
return true;
|
||||
}
|
||||
supportsMcpRemote() {
|
||||
return true;
|
||||
}
|
||||
/**
|
||||
* Extracts file paths from concatenated rules by parsing HTML source comments.
|
||||
* @param concatenatedRules The concatenated rules string with HTML comments
|
||||
* @param projectRoot The project root directory
|
||||
* @returns Array of file paths relative to project root
|
||||
*/
|
||||
extractFilePathsFromRules(concatenatedRules, projectRoot) {
|
||||
const sourceCommentRegex = /<!-- Source: (.+?) -->/g;
|
||||
const filePaths = [];
|
||||
let match;
|
||||
while ((match = sourceCommentRegex.exec(concatenatedRules)) !== null) {
|
||||
const relativePath = match[1];
|
||||
const absolutePath = path.resolve(projectRoot, relativePath);
|
||||
const normalizedProjectRoot = path.resolve(projectRoot);
|
||||
// Ensure the absolutePath is within the project root (cross-platform compatible)
|
||||
// This prevents path traversal attacks while handling Windows/Unix path differences
|
||||
const isWithinProject = absolutePath.startsWith(normalizedProjectRoot) &&
|
||||
(absolutePath.length === normalizedProjectRoot.length ||
|
||||
absolutePath[normalizedProjectRoot.length] === path.sep);
|
||||
if (isWithinProject) {
|
||||
const projectRelativePath = path.relative(projectRoot, absolutePath);
|
||||
filePaths.push(projectRelativePath);
|
||||
}
|
||||
}
|
||||
return filePaths;
|
||||
}
|
||||
}
|
||||
exports.FirebenderAgent = FirebenderAgent;
|
||||
+120
@@ -0,0 +1,120 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || (function () {
|
||||
var ownKeys = function(o) {
|
||||
ownKeys = Object.getOwnPropertyNames || function (o) {
|
||||
var ar = [];
|
||||
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
||||
return ar;
|
||||
};
|
||||
return ownKeys(o);
|
||||
};
|
||||
return function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
})();
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.GeminiCliAgent = void 0;
|
||||
const path = __importStar(require("path"));
|
||||
const fs_1 = require("fs");
|
||||
const AgentsMdAgent_1 = require("./AgentsMdAgent");
|
||||
class GeminiCliAgent extends AgentsMdAgent_1.AgentsMdAgent {
|
||||
getIdentifier() {
|
||||
return 'gemini-cli';
|
||||
}
|
||||
getName() {
|
||||
return 'Gemini CLI';
|
||||
}
|
||||
async applyRulerConfig(concatenatedRules, projectRoot, rulerMcpJson, agentConfig) {
|
||||
// First, perform idempotent write of AGENTS.md via base class
|
||||
await super.applyRulerConfig(concatenatedRules, projectRoot, null, {
|
||||
outputPath: agentConfig?.outputPath,
|
||||
});
|
||||
// Prepare .gemini/settings.json with contextFileName and MCP configuration
|
||||
const settingsPath = path.join(projectRoot, '.gemini', 'settings.json');
|
||||
let existingSettings = {};
|
||||
try {
|
||||
const raw = await fs_1.promises.readFile(settingsPath, 'utf8');
|
||||
existingSettings = JSON.parse(raw);
|
||||
}
|
||||
catch (err) {
|
||||
if (err.code !== 'ENOENT') {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
const updated = {
|
||||
...existingSettings,
|
||||
contextFileName: 'AGENTS.md',
|
||||
};
|
||||
// Handle MCP server configuration if provided
|
||||
const mcpEnabled = agentConfig?.mcp?.enabled ?? true;
|
||||
if (mcpEnabled && rulerMcpJson) {
|
||||
const strategy = agentConfig?.mcp?.strategy ?? 'merge';
|
||||
// Gemini CLI (since v0.21.0) no longer accepts the "type" field in MCP server entries.
|
||||
// Following the MCP spec update from Nov 25, 2025, the transport type is now inferred
|
||||
// from the presence of specific keys (command/args -> stdio, url -> sse/http).
|
||||
// Strip 'type' field from all incoming servers before merging.
|
||||
const stripTypeField = (servers) => {
|
||||
const cleaned = {};
|
||||
for (const [name, def] of Object.entries(servers)) {
|
||||
if (def && typeof def === 'object') {
|
||||
const copy = { ...def };
|
||||
delete copy.type;
|
||||
cleaned[name] = copy;
|
||||
}
|
||||
else {
|
||||
cleaned[name] = def;
|
||||
}
|
||||
}
|
||||
return cleaned;
|
||||
};
|
||||
if (strategy === 'overwrite') {
|
||||
// For overwrite, preserve existing settings except MCP servers
|
||||
const incomingServers = rulerMcpJson.mcpServers || {};
|
||||
updated[this.getMcpServerKey()] = stripTypeField(incomingServers);
|
||||
}
|
||||
else {
|
||||
// For merge strategy, merge with existing MCP servers
|
||||
const baseServers = existingSettings[this.getMcpServerKey()] || {};
|
||||
const incomingServers = rulerMcpJson.mcpServers || {};
|
||||
const mergedServers = { ...baseServers, ...incomingServers };
|
||||
updated[this.getMcpServerKey()] = stripTypeField(mergedServers);
|
||||
}
|
||||
}
|
||||
await fs_1.promises.mkdir(path.dirname(settingsPath), { recursive: true });
|
||||
await fs_1.promises.writeFile(settingsPath, JSON.stringify(updated, null, 2));
|
||||
}
|
||||
// Ensure MCP merging uses the correct key for Gemini (.gemini/settings.json)
|
||||
getMcpServerKey() {
|
||||
return 'mcpServers';
|
||||
}
|
||||
supportsMcpStdio() {
|
||||
return true;
|
||||
}
|
||||
supportsMcpRemote() {
|
||||
return true;
|
||||
}
|
||||
supportsNativeSkills() {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
exports.GeminiCliAgent = GeminiCliAgent;
|
||||
+61
@@ -0,0 +1,61 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || (function () {
|
||||
var ownKeys = function(o) {
|
||||
ownKeys = Object.getOwnPropertyNames || function (o) {
|
||||
var ar = [];
|
||||
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
||||
return ar;
|
||||
};
|
||||
return ownKeys(o);
|
||||
};
|
||||
return function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
})();
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.GooseAgent = void 0;
|
||||
const path = __importStar(require("path"));
|
||||
const AbstractAgent_1 = require("./AbstractAgent");
|
||||
/**
|
||||
* Goose agent adapter for Block's Goose AI assistant.
|
||||
* Propagates rules to .goosehints file.
|
||||
*/
|
||||
class GooseAgent extends AbstractAgent_1.AbstractAgent {
|
||||
getIdentifier() {
|
||||
return 'goose';
|
||||
}
|
||||
getName() {
|
||||
return 'Goose';
|
||||
}
|
||||
getDefaultOutputPath(projectRoot) {
|
||||
return path.join(projectRoot, '.goosehints');
|
||||
}
|
||||
getMcpServerKey() {
|
||||
// Goose doesn't support MCP configuration via local config files
|
||||
return '';
|
||||
}
|
||||
supportsNativeSkills() {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
exports.GooseAgent = GooseAgent;
|
||||
+2
@@ -0,0 +1,2 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
Generated
Vendored
+54
@@ -0,0 +1,54 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || (function () {
|
||||
var ownKeys = function(o) {
|
||||
ownKeys = Object.getOwnPropertyNames || function (o) {
|
||||
var ar = [];
|
||||
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
||||
return ar;
|
||||
};
|
||||
return ownKeys(o);
|
||||
};
|
||||
return function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
})();
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.JetBrainsAiAssistantAgent = void 0;
|
||||
const path = __importStar(require("path"));
|
||||
const AbstractAgent_1 = require("./AbstractAgent");
|
||||
/**
|
||||
* JetBrains AI Assistant agent adapter.
|
||||
* Writes rules to .aiassistant/rules/AGENTS.md.
|
||||
*/
|
||||
class JetBrainsAiAssistantAgent extends AbstractAgent_1.AbstractAgent {
|
||||
getIdentifier() {
|
||||
return 'jetbrains-ai';
|
||||
}
|
||||
getName() {
|
||||
return 'JetBrains AI Assistant';
|
||||
}
|
||||
getDefaultOutputPath(projectRoot) {
|
||||
return path.join(projectRoot, '.aiassistant', 'rules', 'AGENTS.md');
|
||||
}
|
||||
}
|
||||
exports.JetBrainsAiAssistantAgent = JetBrainsAiAssistantAgent;
|
||||
+14
@@ -0,0 +1,14 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.JulesAgent = void 0;
|
||||
const AgentsMdAgent_1 = require("./AgentsMdAgent");
|
||||
// Jules agent now simply inherits AgentsMdAgent behavior (idempotent AGENTS.md writes).
|
||||
class JulesAgent extends AgentsMdAgent_1.AgentsMdAgent {
|
||||
getIdentifier() {
|
||||
return 'jules';
|
||||
}
|
||||
getName() {
|
||||
return 'Jules';
|
||||
}
|
||||
}
|
||||
exports.JulesAgent = JulesAgent;
|
||||
+62
@@ -0,0 +1,62 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || (function () {
|
||||
var ownKeys = function(o) {
|
||||
ownKeys = Object.getOwnPropertyNames || function (o) {
|
||||
var ar = [];
|
||||
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
||||
return ar;
|
||||
};
|
||||
return ownKeys(o);
|
||||
};
|
||||
return function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
})();
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.JunieAgent = void 0;
|
||||
const path = __importStar(require("path"));
|
||||
const AbstractAgent_1 = require("./AbstractAgent");
|
||||
/**
|
||||
* JetBrains Junie agent adapter.
|
||||
*/
|
||||
class JunieAgent extends AbstractAgent_1.AbstractAgent {
|
||||
getIdentifier() {
|
||||
return 'junie';
|
||||
}
|
||||
getName() {
|
||||
return 'Junie';
|
||||
}
|
||||
getDefaultOutputPath(projectRoot) {
|
||||
return path.join(projectRoot, '.junie', 'guidelines.md');
|
||||
}
|
||||
supportsMcpStdio() {
|
||||
return true;
|
||||
}
|
||||
supportsMcpRemote() {
|
||||
return true;
|
||||
}
|
||||
supportsNativeSkills() {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
exports.JunieAgent = JunieAgent;
|
||||
+66
@@ -0,0 +1,66 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || (function () {
|
||||
var ownKeys = function(o) {
|
||||
ownKeys = Object.getOwnPropertyNames || function (o) {
|
||||
var ar = [];
|
||||
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
||||
return ar;
|
||||
};
|
||||
return ownKeys(o);
|
||||
};
|
||||
return function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
})();
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.KiloCodeAgent = void 0;
|
||||
const path = __importStar(require("path"));
|
||||
const AgentsMdAgent_1 = require("./AgentsMdAgent");
|
||||
/**
|
||||
* Kilo Code agent adapter.
|
||||
* Uses AGENTS.md for instructions and .kilocode/mcp.json for MCP configuration.
|
||||
*/
|
||||
class KiloCodeAgent extends AgentsMdAgent_1.AgentsMdAgent {
|
||||
getIdentifier() {
|
||||
return 'kilocode';
|
||||
}
|
||||
getName() {
|
||||
return 'Kilo Code';
|
||||
}
|
||||
getDefaultOutputPath(projectRoot) {
|
||||
return path.join(projectRoot, 'AGENTS.md');
|
||||
}
|
||||
getMcpServerKey() {
|
||||
return 'mcpServers';
|
||||
}
|
||||
supportsMcpStdio() {
|
||||
return true;
|
||||
}
|
||||
supportsMcpRemote() {
|
||||
return true;
|
||||
}
|
||||
supportsNativeSkills() {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
exports.KiloCodeAgent = KiloCodeAgent;
|
||||
+56
@@ -0,0 +1,56 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || (function () {
|
||||
var ownKeys = function(o) {
|
||||
ownKeys = Object.getOwnPropertyNames || function (o) {
|
||||
var ar = [];
|
||||
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
||||
return ar;
|
||||
};
|
||||
return ownKeys(o);
|
||||
};
|
||||
return function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
})();
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.KiroAgent = void 0;
|
||||
const path = __importStar(require("path"));
|
||||
const AbstractAgent_1 = require("./AbstractAgent");
|
||||
class KiroAgent extends AbstractAgent_1.AbstractAgent {
|
||||
getIdentifier() {
|
||||
return 'kiro';
|
||||
}
|
||||
getName() {
|
||||
return 'Kiro';
|
||||
}
|
||||
getDefaultOutputPath(projectRoot) {
|
||||
return path.join(projectRoot, '.kiro', 'steering', 'ruler_kiro_instructions.md');
|
||||
}
|
||||
supportsMcpStdio() {
|
||||
return true;
|
||||
}
|
||||
supportsMcpRemote() {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
exports.KiroAgent = KiroAgent;
|
||||
+171
@@ -0,0 +1,171 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || (function () {
|
||||
var ownKeys = function(o) {
|
||||
ownKeys = Object.getOwnPropertyNames || function (o) {
|
||||
var ar = [];
|
||||
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
||||
return ar;
|
||||
};
|
||||
return ownKeys(o);
|
||||
};
|
||||
return function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
})();
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.MistralVibeAgent = void 0;
|
||||
const path = __importStar(require("path"));
|
||||
const fs_1 = require("fs");
|
||||
const toml_1 = require("@iarna/toml");
|
||||
const AgentsMdAgent_1 = require("./AgentsMdAgent");
|
||||
const FileSystemUtils_1 = require("../core/FileSystemUtils");
|
||||
const constants_1 = require("../constants");
|
||||
/**
|
||||
* Mistral Vibe CLI agent adapter.
|
||||
* Propagates rules to AGENTS.md and MCP servers to .vibe/config.toml.
|
||||
*/
|
||||
class MistralVibeAgent {
|
||||
constructor() {
|
||||
this.agentsMdAgent = new AgentsMdAgent_1.AgentsMdAgent();
|
||||
}
|
||||
getIdentifier() {
|
||||
return 'mistral';
|
||||
}
|
||||
getName() {
|
||||
return 'Mistral';
|
||||
}
|
||||
async applyRulerConfig(concatenatedRules, projectRoot, rulerMcpJson, agentConfig, backup = true) {
|
||||
// First perform idempotent AGENTS.md write via composed AgentsMdAgent
|
||||
await this.agentsMdAgent.applyRulerConfig(concatenatedRules, projectRoot, null, {
|
||||
outputPath: agentConfig?.outputPath ||
|
||||
agentConfig?.outputPathInstructions ||
|
||||
undefined,
|
||||
}, backup);
|
||||
// Handle MCP configuration
|
||||
const defaults = this.getDefaultOutputPath(projectRoot);
|
||||
const mcpEnabled = agentConfig?.mcp?.enabled ?? true;
|
||||
if (mcpEnabled && rulerMcpJson) {
|
||||
// Apply MCP server filtering and transformation
|
||||
const { filterMcpConfigForAgent } = await Promise.resolve().then(() => __importStar(require('../mcp/capabilities')));
|
||||
const filteredMcpConfig = filterMcpConfigForAgent(rulerMcpJson, this);
|
||||
if (!filteredMcpConfig) {
|
||||
return; // No compatible servers found
|
||||
}
|
||||
const filteredRulerMcpJson = filteredMcpConfig;
|
||||
// Determine the config file path
|
||||
const configPath = agentConfig?.outputPathConfig ?? defaults.config;
|
||||
// Ensure the parent directory exists
|
||||
await fs_1.promises.mkdir(path.dirname(configPath), { recursive: true });
|
||||
// Get the merge strategy
|
||||
const strategy = agentConfig?.mcp?.strategy ?? 'merge';
|
||||
// Transform ruler MCP servers to Vibe format
|
||||
const rulerServers = filteredRulerMcpJson.mcpServers || {};
|
||||
const vibeServers = [];
|
||||
for (const [serverName, serverConfig] of Object.entries(rulerServers)) {
|
||||
const vibeServer = {
|
||||
name: serverName,
|
||||
transport: this.determineTransport(serverConfig),
|
||||
};
|
||||
// Handle stdio servers
|
||||
if (serverConfig.command) {
|
||||
vibeServer.command = serverConfig.command;
|
||||
if (serverConfig.args) {
|
||||
vibeServer.args = serverConfig.args;
|
||||
}
|
||||
}
|
||||
// Handle remote servers
|
||||
if (serverConfig.url) {
|
||||
vibeServer.url = serverConfig.url;
|
||||
}
|
||||
// Handle headers
|
||||
if (serverConfig.headers) {
|
||||
vibeServer.headers = serverConfig.headers;
|
||||
}
|
||||
// Handle env
|
||||
if (serverConfig.env) {
|
||||
vibeServer.env = serverConfig.env;
|
||||
}
|
||||
vibeServers.push(vibeServer);
|
||||
}
|
||||
// Read existing TOML config if it exists
|
||||
let existingConfig = {};
|
||||
try {
|
||||
const existingContent = await fs_1.promises.readFile(configPath, 'utf8');
|
||||
existingConfig = (0, toml_1.parse)(existingContent);
|
||||
}
|
||||
catch {
|
||||
// File doesn't exist or can't be parsed, use empty config
|
||||
}
|
||||
// Create the updated config
|
||||
const updatedConfig = { ...existingConfig };
|
||||
if (strategy === 'overwrite') {
|
||||
// For overwrite strategy, replace the entire mcp_servers array
|
||||
updatedConfig.mcp_servers = vibeServers;
|
||||
}
|
||||
else {
|
||||
// For merge strategy, merge by server name
|
||||
const existingServers = updatedConfig.mcp_servers || [];
|
||||
// Keep existing servers that aren't being overwritten by ruler
|
||||
const mergedServers = existingServers.filter((s) => !rulerServers[s.name]);
|
||||
// Add all ruler servers
|
||||
mergedServers.push(...vibeServers);
|
||||
updatedConfig.mcp_servers = mergedServers;
|
||||
}
|
||||
// Convert to TOML and write
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
const tomlContent = (0, toml_1.stringify)(updatedConfig);
|
||||
await (0, FileSystemUtils_1.writeGeneratedFile)(configPath, tomlContent);
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Determines the transport type based on server configuration.
|
||||
*/
|
||||
determineTransport(server) {
|
||||
if (server.command) {
|
||||
return 'stdio';
|
||||
}
|
||||
if (server.url) {
|
||||
// Default to http for remote servers
|
||||
// Could potentially detect streamable-http based on URL patterns if needed
|
||||
return 'http';
|
||||
}
|
||||
return 'stdio';
|
||||
}
|
||||
getDefaultOutputPath(projectRoot) {
|
||||
return {
|
||||
instructions: path.join(projectRoot, constants_1.DEFAULT_RULES_FILENAME),
|
||||
config: path.join(projectRoot, '.vibe', 'config.toml'),
|
||||
};
|
||||
}
|
||||
supportsMcpStdio() {
|
||||
return true;
|
||||
}
|
||||
supportsMcpRemote() {
|
||||
return true; // Mistral Vibe supports http and streamable-http transports
|
||||
}
|
||||
supportsNativeSkills() {
|
||||
// Mistral Vibe supports native skills in .vibe/skills/
|
||||
return true;
|
||||
}
|
||||
}
|
||||
exports.MistralVibeAgent = MistralVibeAgent;
|
||||
+105
@@ -0,0 +1,105 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || (function () {
|
||||
var ownKeys = function(o) {
|
||||
ownKeys = Object.getOwnPropertyNames || function (o) {
|
||||
var ar = [];
|
||||
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
||||
return ar;
|
||||
};
|
||||
return ownKeys(o);
|
||||
};
|
||||
return function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
})();
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.OpenCodeAgent = void 0;
|
||||
const fs = __importStar(require("fs/promises"));
|
||||
const path = __importStar(require("path"));
|
||||
class OpenCodeAgent {
|
||||
getIdentifier() {
|
||||
return 'opencode';
|
||||
}
|
||||
getName() {
|
||||
return 'OpenCode';
|
||||
}
|
||||
getDefaultOutputPath(projectRoot) {
|
||||
return {
|
||||
instructions: path.join(projectRoot, 'AGENTS.md'),
|
||||
mcp: path.join(projectRoot, 'opencode.json'),
|
||||
};
|
||||
}
|
||||
async applyRulerConfig(concatenatedRules, projectRoot, rulerMcpJson, agentConfig) {
|
||||
const outputPaths = this.getDefaultOutputPath(projectRoot);
|
||||
const instructionsPath = path.resolve(projectRoot, agentConfig?.outputPathInstructions ?? outputPaths['instructions']);
|
||||
const mcpPath = path.resolve(projectRoot, agentConfig?.outputPathConfig ?? outputPaths['mcp']);
|
||||
await fs.writeFile(instructionsPath, concatenatedRules);
|
||||
// Create OpenCode config with schema and MCP configuration
|
||||
let finalMcpConfig = {
|
||||
$schema: 'https://opencode.ai/config.json',
|
||||
mcp: {},
|
||||
};
|
||||
try {
|
||||
const existingMcpConfig = JSON.parse(await fs.readFile(mcpPath, 'utf-8'));
|
||||
if (existingMcpConfig && typeof existingMcpConfig === 'object') {
|
||||
finalMcpConfig = {
|
||||
$schema: 'https://opencode.ai/config.json',
|
||||
...existingMcpConfig,
|
||||
mcp: {
|
||||
...(existingMcpConfig.mcp || {}),
|
||||
...(rulerMcpJson?.mcpServers ?? {}),
|
||||
},
|
||||
};
|
||||
}
|
||||
else if (rulerMcpJson) {
|
||||
finalMcpConfig = {
|
||||
$schema: 'https://opencode.ai/config.json',
|
||||
mcp: (rulerMcpJson?.mcpServers ?? {}),
|
||||
};
|
||||
}
|
||||
}
|
||||
catch {
|
||||
if (rulerMcpJson) {
|
||||
finalMcpConfig = {
|
||||
$schema: 'https://opencode.ai/config.json',
|
||||
mcp: (rulerMcpJson?.mcpServers ?? {}),
|
||||
};
|
||||
}
|
||||
}
|
||||
// Always write the config file, even if MCP is empty
|
||||
await fs.writeFile(mcpPath, JSON.stringify(finalMcpConfig, null, 2));
|
||||
}
|
||||
supportsMcpStdio() {
|
||||
return true;
|
||||
}
|
||||
supportsMcpRemote() {
|
||||
return true;
|
||||
}
|
||||
supportsMcpTimeout() {
|
||||
return true;
|
||||
}
|
||||
supportsNativeSkills() {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
exports.OpenCodeAgent = OpenCodeAgent;
|
||||
+56
@@ -0,0 +1,56 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || (function () {
|
||||
var ownKeys = function(o) {
|
||||
ownKeys = Object.getOwnPropertyNames || function (o) {
|
||||
var ar = [];
|
||||
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
||||
return ar;
|
||||
};
|
||||
return ownKeys(o);
|
||||
};
|
||||
return function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
})();
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.OpenHandsAgent = void 0;
|
||||
const path = __importStar(require("path"));
|
||||
const AbstractAgent_1 = require("./AbstractAgent");
|
||||
class OpenHandsAgent extends AbstractAgent_1.AbstractAgent {
|
||||
getIdentifier() {
|
||||
return 'openhands';
|
||||
}
|
||||
getName() {
|
||||
return 'Open Hands';
|
||||
}
|
||||
getDefaultOutputPath(projectRoot) {
|
||||
return path.join(projectRoot, '.openhands', 'microagents', 'repo.md');
|
||||
}
|
||||
supportsMcpStdio() {
|
||||
return true;
|
||||
}
|
||||
supportsMcpRemote() {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
exports.OpenHandsAgent = OpenHandsAgent;
|
||||
+19
@@ -0,0 +1,19 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.PiAgent = void 0;
|
||||
const AgentsMdAgent_1 = require("./AgentsMdAgent");
|
||||
/**
|
||||
* Pi Coding Agent adapter.
|
||||
*/
|
||||
class PiAgent extends AgentsMdAgent_1.AgentsMdAgent {
|
||||
getIdentifier() {
|
||||
return 'pi';
|
||||
}
|
||||
getName() {
|
||||
return 'Pi Coding Agent';
|
||||
}
|
||||
supportsNativeSkills() {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
exports.PiAgent = PiAgent;
|
||||
+82
@@ -0,0 +1,82 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || (function () {
|
||||
var ownKeys = function(o) {
|
||||
ownKeys = Object.getOwnPropertyNames || function (o) {
|
||||
var ar = [];
|
||||
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
||||
return ar;
|
||||
};
|
||||
return ownKeys(o);
|
||||
};
|
||||
return function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
})();
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.QwenCodeAgent = void 0;
|
||||
const path = __importStar(require("path"));
|
||||
const fs_1 = require("fs");
|
||||
const AgentsMdAgent_1 = require("./AgentsMdAgent");
|
||||
class QwenCodeAgent extends AgentsMdAgent_1.AgentsMdAgent {
|
||||
getIdentifier() {
|
||||
return 'qwen';
|
||||
}
|
||||
getName() {
|
||||
return 'Qwen Code';
|
||||
}
|
||||
async applyRulerConfig(concatenatedRules, projectRoot, _rulerMcpJson, agentConfig) {
|
||||
// First, perform idempotent write of AGENTS.md via base class
|
||||
await super.applyRulerConfig(concatenatedRules, projectRoot, null, {
|
||||
outputPath: agentConfig?.outputPath,
|
||||
});
|
||||
// Ensure .qwen/settings.json has contextFileName set to AGENTS.md
|
||||
const settingsPath = path.join(projectRoot, '.qwen', 'settings.json');
|
||||
let existingSettings = {};
|
||||
try {
|
||||
const raw = await fs_1.promises.readFile(settingsPath, 'utf8');
|
||||
existingSettings = JSON.parse(raw);
|
||||
}
|
||||
catch (err) {
|
||||
if (err.code !== 'ENOENT') {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
const updated = {
|
||||
...existingSettings,
|
||||
contextFileName: 'AGENTS.md',
|
||||
};
|
||||
await fs_1.promises.mkdir(path.dirname(settingsPath), { recursive: true });
|
||||
await fs_1.promises.writeFile(settingsPath, JSON.stringify(updated, null, 2));
|
||||
}
|
||||
// Ensure MCP merging uses the correct key for Qwen Code (.qwen/settings.json)
|
||||
getMcpServerKey() {
|
||||
return 'mcpServers';
|
||||
}
|
||||
supportsMcpStdio() {
|
||||
return true;
|
||||
}
|
||||
supportsMcpRemote() {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
exports.QwenCodeAgent = QwenCodeAgent;
|
||||
+142
@@ -0,0 +1,142 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || (function () {
|
||||
var ownKeys = function(o) {
|
||||
ownKeys = Object.getOwnPropertyNames || function (o) {
|
||||
var ar = [];
|
||||
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
||||
return ar;
|
||||
};
|
||||
return ownKeys(o);
|
||||
};
|
||||
return function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
})();
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.RooCodeAgent = void 0;
|
||||
const path = __importStar(require("path"));
|
||||
const fs_1 = require("fs");
|
||||
const AgentsMdAgent_1 = require("./AgentsMdAgent");
|
||||
const FileSystemUtils_1 = require("../core/FileSystemUtils");
|
||||
/**
|
||||
* Agent for RooCode that writes to AGENTS.md and generates .roo/mcp.json
|
||||
* with project-level MCP server configuration.
|
||||
*/
|
||||
class RooCodeAgent {
|
||||
constructor() {
|
||||
this.agentsMdAgent = new AgentsMdAgent_1.AgentsMdAgent();
|
||||
}
|
||||
getIdentifier() {
|
||||
return 'roo';
|
||||
}
|
||||
getName() {
|
||||
return 'RooCode';
|
||||
}
|
||||
getDefaultOutputPath(projectRoot) {
|
||||
return {
|
||||
instructions: path.join(projectRoot, 'AGENTS.md'),
|
||||
mcp: path.join(projectRoot, '.roo', 'mcp.json'),
|
||||
};
|
||||
}
|
||||
async applyRulerConfig(concatenatedRules, projectRoot, rulerMcpJson, agentConfig, backup = true) {
|
||||
// First perform idempotent AGENTS.md write via composed AgentsMdAgent
|
||||
await this.agentsMdAgent.applyRulerConfig(concatenatedRules, projectRoot, null, {
|
||||
// Preserve explicit outputPath precedence semantics if provided.
|
||||
outputPath: agentConfig?.outputPath ||
|
||||
agentConfig?.outputPathInstructions ||
|
||||
undefined,
|
||||
}, backup);
|
||||
// Now handle .roo/mcp.json configuration
|
||||
const outputPaths = this.getDefaultOutputPath(projectRoot);
|
||||
const mcpPath = path.resolve(projectRoot, agentConfig?.outputPathConfig ?? outputPaths['mcp']);
|
||||
await (0, FileSystemUtils_1.ensureDirExists)(path.dirname(mcpPath));
|
||||
// Create base structure with mcpServers
|
||||
let finalMcpConfig = {
|
||||
mcpServers: {},
|
||||
};
|
||||
// Try to read existing .roo/mcp.json
|
||||
let existingConfig = {};
|
||||
try {
|
||||
const existingContent = await fs_1.promises.readFile(mcpPath, 'utf-8');
|
||||
const parsed = JSON.parse(existingContent);
|
||||
if (parsed && typeof parsed === 'object') {
|
||||
existingConfig = parsed;
|
||||
}
|
||||
}
|
||||
catch {
|
||||
// File doesn't exist or invalid JSON - start fresh
|
||||
existingConfig = {};
|
||||
}
|
||||
// Merge MCP servers if we have ruler config
|
||||
if (rulerMcpJson?.mcpServers) {
|
||||
const existingServers = existingConfig.mcpServers || {};
|
||||
const newServers = rulerMcpJson.mcpServers;
|
||||
// Shallow merge: new servers override existing with same name
|
||||
finalMcpConfig = {
|
||||
mcpServers: {
|
||||
...existingServers,
|
||||
...newServers,
|
||||
},
|
||||
};
|
||||
}
|
||||
else if (existingConfig.mcpServers) {
|
||||
// Keep existing servers if no new ones to add
|
||||
finalMcpConfig = {
|
||||
mcpServers: existingConfig.mcpServers,
|
||||
};
|
||||
}
|
||||
// If neither condition is met, finalMcpConfig remains { mcpServers: {} }
|
||||
// Write the config file with pretty JSON (2 spaces)
|
||||
const newContent = JSON.stringify(finalMcpConfig, null, 2);
|
||||
// Check if content has changed for idempotency
|
||||
let existingContent = null;
|
||||
try {
|
||||
existingContent = await fs_1.promises.readFile(mcpPath, 'utf8');
|
||||
}
|
||||
catch {
|
||||
existingContent = null;
|
||||
}
|
||||
if (existingContent !== null && existingContent === newContent) {
|
||||
// No change; skip backup/write for idempotency
|
||||
return;
|
||||
}
|
||||
// Backup (only if file existed and backup is enabled) then write new content
|
||||
if (backup) {
|
||||
await (0, FileSystemUtils_1.backupFile)(mcpPath);
|
||||
}
|
||||
await (0, FileSystemUtils_1.writeGeneratedFile)(mcpPath, newContent);
|
||||
}
|
||||
supportsMcpStdio() {
|
||||
return true;
|
||||
}
|
||||
supportsMcpRemote() {
|
||||
return true;
|
||||
}
|
||||
getMcpServerKey() {
|
||||
return 'mcpServers';
|
||||
}
|
||||
supportsNativeSkills() {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
exports.RooCodeAgent = RooCodeAgent;
|
||||
+54
@@ -0,0 +1,54 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || (function () {
|
||||
var ownKeys = function(o) {
|
||||
ownKeys = Object.getOwnPropertyNames || function (o) {
|
||||
var ar = [];
|
||||
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
||||
return ar;
|
||||
};
|
||||
return ownKeys(o);
|
||||
};
|
||||
return function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
})();
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.TraeAgent = void 0;
|
||||
const path = __importStar(require("path"));
|
||||
const AbstractAgent_1 = require("./AbstractAgent");
|
||||
/**
|
||||
* Trae AI agent adapter.
|
||||
* Generates project_rules.md configuration file.
|
||||
*/
|
||||
class TraeAgent extends AbstractAgent_1.AbstractAgent {
|
||||
getIdentifier() {
|
||||
return 'trae';
|
||||
}
|
||||
getName() {
|
||||
return 'Trae AI';
|
||||
}
|
||||
getDefaultOutputPath(projectRoot) {
|
||||
return path.join(projectRoot, '.trae', 'rules', 'project_rules.md');
|
||||
}
|
||||
}
|
||||
exports.TraeAgent = TraeAgent;
|
||||
+61
@@ -0,0 +1,61 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || (function () {
|
||||
var ownKeys = function(o) {
|
||||
ownKeys = Object.getOwnPropertyNames || function (o) {
|
||||
var ar = [];
|
||||
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
||||
return ar;
|
||||
};
|
||||
return ownKeys(o);
|
||||
};
|
||||
return function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
})();
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.WarpAgent = void 0;
|
||||
const path = __importStar(require("path"));
|
||||
const AbstractAgent_1 = require("./AbstractAgent");
|
||||
/**
|
||||
* Warp Agent Mode adapter.
|
||||
* Generates WARP.md configuration file in the project root.
|
||||
*/
|
||||
class WarpAgent extends AbstractAgent_1.AbstractAgent {
|
||||
getIdentifier() {
|
||||
return 'warp';
|
||||
}
|
||||
getName() {
|
||||
return 'Warp';
|
||||
}
|
||||
getDefaultOutputPath(projectRoot) {
|
||||
return path.join(projectRoot, 'WARP.md');
|
||||
}
|
||||
// Warp does not support MCP servers
|
||||
supportsMcpStdio() {
|
||||
return false;
|
||||
}
|
||||
supportsMcpRemote() {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
exports.WarpAgent = WarpAgent;
|
||||
+30
@@ -0,0 +1,30 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.WindsurfAgent = void 0;
|
||||
const AgentsMdAgent_1 = require("./AgentsMdAgent");
|
||||
/**
|
||||
* Windsurf agent adapter.
|
||||
* Now uses AGENTS.md format like other agents.
|
||||
*/
|
||||
class WindsurfAgent extends AgentsMdAgent_1.AgentsMdAgent {
|
||||
getIdentifier() {
|
||||
return 'windsurf';
|
||||
}
|
||||
getName() {
|
||||
return 'Windsurf';
|
||||
}
|
||||
// Windsurf supports MCP configuration
|
||||
getMcpServerKey() {
|
||||
return 'mcpServers';
|
||||
}
|
||||
supportsMcpStdio() {
|
||||
return true;
|
||||
}
|
||||
supportsMcpRemote() {
|
||||
return true;
|
||||
}
|
||||
supportsNativeSkills() {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
exports.WindsurfAgent = WindsurfAgent;
|
||||
+132
@@ -0,0 +1,132 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || (function () {
|
||||
var ownKeys = function(o) {
|
||||
ownKeys = Object.getOwnPropertyNames || function (o) {
|
||||
var ar = [];
|
||||
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
||||
return ar;
|
||||
};
|
||||
return ownKeys(o);
|
||||
};
|
||||
return function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
})();
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.ZedAgent = void 0;
|
||||
const path = __importStar(require("path"));
|
||||
const fs_1 = require("fs");
|
||||
const AgentsMdAgent_1 = require("./AgentsMdAgent");
|
||||
/**
|
||||
* Zed editor agent adapter.
|
||||
* Inherits from AgentsMdAgent to write instructions to AGENTS.md and handles
|
||||
* MCP server configuration in .zed/settings.json at the project root.
|
||||
*/
|
||||
class ZedAgent extends AgentsMdAgent_1.AgentsMdAgent {
|
||||
getIdentifier() {
|
||||
return 'zed';
|
||||
}
|
||||
getName() {
|
||||
return 'Zed';
|
||||
}
|
||||
async applyRulerConfig(concatenatedRules, projectRoot, rulerMcpJson, agentConfig) {
|
||||
// First, perform idempotent AGENTS.md write via base class
|
||||
await super.applyRulerConfig(concatenatedRules, projectRoot, null, {
|
||||
outputPath: agentConfig?.outputPath,
|
||||
});
|
||||
// Handle MCP server configuration if enabled and provided
|
||||
const mcpEnabled = agentConfig?.mcp?.enabled ?? true;
|
||||
if (mcpEnabled && rulerMcpJson) {
|
||||
const zedSettingsPath = path.join(projectRoot, '.zed', 'settings.json');
|
||||
// Read existing settings
|
||||
let existingSettings = {};
|
||||
try {
|
||||
const content = await fs_1.promises.readFile(zedSettingsPath, 'utf8');
|
||||
existingSettings = JSON.parse(content);
|
||||
}
|
||||
catch (error) {
|
||||
if (error.code !== 'ENOENT') {
|
||||
throw error;
|
||||
}
|
||||
// File doesn't exist, use empty settings
|
||||
}
|
||||
// Get the merge strategy
|
||||
const strategy = agentConfig?.mcp?.strategy ?? 'merge';
|
||||
// Handle merging based on strategy
|
||||
let mergedSettings;
|
||||
if (strategy === 'overwrite') {
|
||||
// For overwrite, preserve all existing settings except MCP servers
|
||||
mergedSettings = { ...existingSettings };
|
||||
// Extract incoming MCP servers and transform them for Zed format
|
||||
const incomingServers = rulerMcpJson.mcpServers || {};
|
||||
const transformedServers = {};
|
||||
for (const [serverName, serverConfig] of Object.entries(incomingServers)) {
|
||||
transformedServers[serverName] = this.transformMcpServerForZed(serverConfig);
|
||||
}
|
||||
// Replace MCP servers completely
|
||||
mergedSettings[this.getMcpServerKey()] = transformedServers;
|
||||
}
|
||||
else {
|
||||
// For merge strategy, preserve all existing settings
|
||||
const baseServers = existingSettings[this.getMcpServerKey()] || {};
|
||||
const incomingServers = rulerMcpJson.mcpServers || {};
|
||||
// Transform incoming servers for Zed format
|
||||
const transformedIncomingServers = {};
|
||||
for (const [serverName, serverConfig] of Object.entries(incomingServers)) {
|
||||
transformedIncomingServers[serverName] =
|
||||
this.transformMcpServerForZed(serverConfig);
|
||||
}
|
||||
const mergedServers = { ...baseServers, ...transformedIncomingServers };
|
||||
mergedSettings = {
|
||||
...existingSettings,
|
||||
[this.getMcpServerKey()]: mergedServers,
|
||||
};
|
||||
}
|
||||
// Write updated settings
|
||||
await fs_1.promises.mkdir(path.dirname(zedSettingsPath), { recursive: true });
|
||||
await fs_1.promises.writeFile(zedSettingsPath, JSON.stringify(mergedSettings, null, 2));
|
||||
}
|
||||
}
|
||||
getMcpServerKey() {
|
||||
return 'context_servers';
|
||||
}
|
||||
supportsMcpStdio() {
|
||||
return true;
|
||||
}
|
||||
supportsMcpRemote() {
|
||||
return true;
|
||||
}
|
||||
/**
|
||||
* Transform MCP server configuration from ruler format to Zed format.
|
||||
* Converts "type": "stdio" to "source": "custom" and preserves other fields.
|
||||
*/
|
||||
transformMcpServerForZed(rulerServer) {
|
||||
const transformedServer = { ...rulerServer };
|
||||
// Remove "type" field if present
|
||||
delete transformedServer.type;
|
||||
// Add "source": "custom" as required by Zed
|
||||
transformedServer.source = 'custom';
|
||||
return transformedServer;
|
||||
}
|
||||
}
|
||||
exports.ZedAgent = ZedAgent;
|
||||
+37
@@ -0,0 +1,37 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.getAgentOutputPaths = getAgentOutputPaths;
|
||||
/**
|
||||
* Gets all output paths for an agent, taking into account any config overrides.
|
||||
*/
|
||||
function getAgentOutputPaths(agent, projectRoot, agentConfig) {
|
||||
const paths = [];
|
||||
const defaults = agent.getDefaultOutputPath(projectRoot);
|
||||
if (typeof defaults === 'string') {
|
||||
// Single output path (most agents)
|
||||
const actualPath = agentConfig?.outputPath ?? defaults;
|
||||
paths.push(actualPath);
|
||||
}
|
||||
else {
|
||||
// Multiple output paths (e.g., AiderAgent)
|
||||
const defaultPaths = defaults;
|
||||
// Handle instructions path
|
||||
if ('instructions' in defaultPaths) {
|
||||
const instructionsPath = agentConfig?.outputPathInstructions ?? defaultPaths.instructions;
|
||||
paths.push(instructionsPath);
|
||||
}
|
||||
// Handle config path
|
||||
if ('config' in defaultPaths) {
|
||||
const configPath = agentConfig?.outputPathConfig ?? defaultPaths.config;
|
||||
paths.push(configPath);
|
||||
}
|
||||
// Handle any other paths in the default paths record
|
||||
for (const [key, defaultPath] of Object.entries(defaultPaths)) {
|
||||
if (key !== 'instructions' && key !== 'config') {
|
||||
// For unknown path types, use the default since we don't have specific config overrides
|
||||
paths.push(defaultPath);
|
||||
}
|
||||
}
|
||||
}
|
||||
return paths;
|
||||
}
|
||||
+87
@@ -0,0 +1,87 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.allAgents = exports.AbstractAgent = void 0;
|
||||
exports.getAgentIdentifiersForCliHelp = getAgentIdentifiersForCliHelp;
|
||||
const AbstractAgent_1 = require("./AbstractAgent");
|
||||
Object.defineProperty(exports, "AbstractAgent", { enumerable: true, get: function () { return AbstractAgent_1.AbstractAgent; } });
|
||||
const CopilotAgent_1 = require("./CopilotAgent");
|
||||
const ClaudeAgent_1 = require("./ClaudeAgent");
|
||||
const CodexCliAgent_1 = require("./CodexCliAgent");
|
||||
const CursorAgent_1 = require("./CursorAgent");
|
||||
const WindsurfAgent_1 = require("./WindsurfAgent");
|
||||
const ClineAgent_1 = require("./ClineAgent");
|
||||
const AiderAgent_1 = require("./AiderAgent");
|
||||
const FirebaseAgent_1 = require("./FirebaseAgent");
|
||||
const OpenHandsAgent_1 = require("./OpenHandsAgent");
|
||||
const GeminiCliAgent_1 = require("./GeminiCliAgent");
|
||||
const JulesAgent_1 = require("./JulesAgent");
|
||||
const JunieAgent_1 = require("./JunieAgent");
|
||||
const AugmentCodeAgent_1 = require("./AugmentCodeAgent");
|
||||
const KiloCodeAgent_1 = require("./KiloCodeAgent");
|
||||
const OpenCodeAgent_1 = require("./OpenCodeAgent");
|
||||
const CrushAgent_1 = require("./CrushAgent");
|
||||
const GooseAgent_1 = require("./GooseAgent");
|
||||
const AmpAgent_1 = require("./AmpAgent");
|
||||
const ZedAgent_1 = require("./ZedAgent");
|
||||
const AgentsMdAgent_1 = require("./AgentsMdAgent");
|
||||
const QwenCodeAgent_1 = require("./QwenCodeAgent");
|
||||
const KiroAgent_1 = require("./KiroAgent");
|
||||
const WarpAgent_1 = require("./WarpAgent");
|
||||
const RooCodeAgent_1 = require("./RooCodeAgent");
|
||||
const TraeAgent_1 = require("./TraeAgent");
|
||||
const AmazonQCliAgent_1 = require("./AmazonQCliAgent");
|
||||
const FirebenderAgent_1 = require("./FirebenderAgent");
|
||||
const FactoryDroidAgent_1 = require("./FactoryDroidAgent");
|
||||
const AntigravityAgent_1 = require("./AntigravityAgent");
|
||||
const MistralVibeAgent_1 = require("./MistralVibeAgent");
|
||||
const PiAgent_1 = require("./PiAgent");
|
||||
const JetBrainsAiAssistantAgent_1 = require("./JetBrainsAiAssistantAgent");
|
||||
exports.allAgents = [
|
||||
new CopilotAgent_1.CopilotAgent(),
|
||||
new ClaudeAgent_1.ClaudeAgent(),
|
||||
new CodexCliAgent_1.CodexCliAgent(),
|
||||
new CursorAgent_1.CursorAgent(),
|
||||
new WindsurfAgent_1.WindsurfAgent(),
|
||||
new ClineAgent_1.ClineAgent(),
|
||||
new AiderAgent_1.AiderAgent(),
|
||||
new FirebaseAgent_1.FirebaseAgent(),
|
||||
new OpenHandsAgent_1.OpenHandsAgent(),
|
||||
new GeminiCliAgent_1.GeminiCliAgent(),
|
||||
new JulesAgent_1.JulesAgent(),
|
||||
new JunieAgent_1.JunieAgent(),
|
||||
new AugmentCodeAgent_1.AugmentCodeAgent(),
|
||||
new KiloCodeAgent_1.KiloCodeAgent(),
|
||||
new OpenCodeAgent_1.OpenCodeAgent(),
|
||||
new GooseAgent_1.GooseAgent(),
|
||||
new CrushAgent_1.CrushAgent(),
|
||||
new AmpAgent_1.AmpAgent(),
|
||||
new ZedAgent_1.ZedAgent(),
|
||||
new QwenCodeAgent_1.QwenCodeAgent(),
|
||||
new AgentsMdAgent_1.AgentsMdAgent(),
|
||||
new KiroAgent_1.KiroAgent(),
|
||||
new WarpAgent_1.WarpAgent(),
|
||||
new RooCodeAgent_1.RooCodeAgent(),
|
||||
new TraeAgent_1.TraeAgent(),
|
||||
new AmazonQCliAgent_1.AmazonQCliAgent(),
|
||||
new FirebenderAgent_1.FirebenderAgent(),
|
||||
new FactoryDroidAgent_1.FactoryDroidAgent(),
|
||||
new AntigravityAgent_1.AntigravityAgent(),
|
||||
new MistralVibeAgent_1.MistralVibeAgent(),
|
||||
new PiAgent_1.PiAgent(),
|
||||
new JetBrainsAiAssistantAgent_1.JetBrainsAiAssistantAgent(),
|
||||
];
|
||||
/**
|
||||
* Generates a comma-separated list of agent identifiers for CLI help text.
|
||||
* Returns identifiers in alphabetical order, with 'agentsmd' always first.
|
||||
*/
|
||||
function getAgentIdentifiersForCliHelp() {
|
||||
const identifiers = exports.allAgents.map((agent) => agent.getIdentifier());
|
||||
const sorted = identifiers.sort();
|
||||
// Ensure agentsmd is first (it should already be first alphabetically, but let's be explicit)
|
||||
const agentsMdIndex = sorted.indexOf('agentsmd');
|
||||
if (agentsMdIndex > 0) {
|
||||
const agentsmd = sorted.splice(agentsMdIndex, 1)[0];
|
||||
sorted.unshift(agentsmd);
|
||||
}
|
||||
return sorted.join(', ');
|
||||
}
|
||||
+136
@@ -0,0 +1,136 @@
|
||||
"use strict";
|
||||
var __importDefault = (this && this.__importDefault) || function (mod) {
|
||||
return (mod && mod.__esModule) ? mod : { "default": mod };
|
||||
};
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.run = run;
|
||||
const yargs_1 = __importDefault(require("yargs"));
|
||||
const helpers_1 = require("yargs/helpers");
|
||||
const handlers_1 = require("./handlers");
|
||||
const index_1 = require("../agents/index");
|
||||
/**
|
||||
* Sets up and parses CLI commands.
|
||||
*/
|
||||
function run() {
|
||||
(0, yargs_1.default)((0, helpers_1.hideBin)(process.argv))
|
||||
.scriptName('ruler')
|
||||
.usage('$0 <command> [options]')
|
||||
.command('apply', 'Apply ruler configurations to supported AI agents', (y) => {
|
||||
return y
|
||||
.option('project-root', {
|
||||
type: 'string',
|
||||
description: 'Project root directory',
|
||||
default: process.cwd(),
|
||||
})
|
||||
.option('agents', {
|
||||
type: 'string',
|
||||
description: `Comma-separated list of agent identifiers: ${(0, index_1.getAgentIdentifiersForCliHelp)()}`,
|
||||
})
|
||||
.option('config', {
|
||||
type: 'string',
|
||||
description: 'Path to TOML configuration file',
|
||||
})
|
||||
.option('mcp', {
|
||||
type: 'boolean',
|
||||
description: 'Enable or disable applying MCP server config',
|
||||
default: true,
|
||||
})
|
||||
.alias('mcp', 'with-mcp')
|
||||
.option('mcp-overwrite', {
|
||||
type: 'boolean',
|
||||
description: 'Replace (not merge) the native MCP config(s)',
|
||||
default: false,
|
||||
})
|
||||
.option('gitignore', {
|
||||
type: 'boolean',
|
||||
description: 'Enable/disable automatic .gitignore updates (default: enabled)',
|
||||
})
|
||||
.option('gitignore-local', {
|
||||
type: 'boolean',
|
||||
description: 'Write generated ignore entries to .git/info/exclude instead of .gitignore',
|
||||
})
|
||||
.option('verbose', {
|
||||
type: 'boolean',
|
||||
description: 'Enable verbose logging',
|
||||
default: false,
|
||||
})
|
||||
.alias('verbose', 'v')
|
||||
.option('dry-run', {
|
||||
type: 'boolean',
|
||||
description: 'Preview changes without writing files',
|
||||
default: false,
|
||||
})
|
||||
.option('local-only', {
|
||||
type: 'boolean',
|
||||
description: 'Only search for local .ruler directories, ignore global config',
|
||||
default: false,
|
||||
})
|
||||
.option('nested', {
|
||||
type: 'boolean',
|
||||
description: 'Enable nested rule loading from nested .ruler directories (default: from config or disabled)',
|
||||
})
|
||||
.option('backup', {
|
||||
type: 'boolean',
|
||||
description: 'Enable/disable creation of .bak backup files (default: enabled)',
|
||||
default: true,
|
||||
})
|
||||
.option('skills', {
|
||||
type: 'boolean',
|
||||
description: 'Enable/disable skills support (experimental, default: enabled)',
|
||||
});
|
||||
}, handlers_1.applyHandler)
|
||||
.command('init', 'Scaffold a .ruler directory with default files', (y) => {
|
||||
return y
|
||||
.option('project-root', {
|
||||
type: 'string',
|
||||
description: 'Project root directory',
|
||||
default: process.cwd(),
|
||||
})
|
||||
.option('global', {
|
||||
type: 'boolean',
|
||||
description: 'Initialize in global config directory (XDG_CONFIG_HOME/ruler)',
|
||||
default: false,
|
||||
});
|
||||
}, handlers_1.initHandler)
|
||||
.command('revert', 'Revert ruler configurations from supported AI agents', (y) => {
|
||||
return y
|
||||
.option('project-root', {
|
||||
type: 'string',
|
||||
description: 'Project root directory',
|
||||
default: process.cwd(),
|
||||
})
|
||||
.option('agents', {
|
||||
type: 'string',
|
||||
description: `Comma-separated list of agent identifiers: ${(0, index_1.getAgentIdentifiersForCliHelp)()}`,
|
||||
})
|
||||
.option('config', {
|
||||
type: 'string',
|
||||
description: 'Path to TOML configuration file',
|
||||
})
|
||||
.option('keep-backups', {
|
||||
type: 'boolean',
|
||||
description: 'Keep backup files after revert',
|
||||
default: false,
|
||||
})
|
||||
.option('verbose', {
|
||||
type: 'boolean',
|
||||
description: 'Enable verbose logging',
|
||||
default: false,
|
||||
})
|
||||
.alias('verbose', 'v')
|
||||
.option('dry-run', {
|
||||
type: 'boolean',
|
||||
description: 'Preview changes without writing files',
|
||||
default: false,
|
||||
})
|
||||
.option('local-only', {
|
||||
type: 'boolean',
|
||||
description: 'Only search for local .ruler directories, ignore global config',
|
||||
default: false,
|
||||
});
|
||||
}, handlers_1.revertHandler)
|
||||
.demandCommand(1, 'You need to specify a command')
|
||||
.help()
|
||||
.strict()
|
||||
.parse();
|
||||
}
|
||||
+232
@@ -0,0 +1,232 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || (function () {
|
||||
var ownKeys = function(o) {
|
||||
ownKeys = Object.getOwnPropertyNames || function (o) {
|
||||
var ar = [];
|
||||
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
||||
return ar;
|
||||
};
|
||||
return ownKeys(o);
|
||||
};
|
||||
return function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
})();
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.applyHandler = applyHandler;
|
||||
exports.initHandler = initHandler;
|
||||
exports.revertHandler = revertHandler;
|
||||
const lib_1 = require("../lib");
|
||||
const revert_1 = require("../revert");
|
||||
const path = __importStar(require("path"));
|
||||
const os = __importStar(require("os"));
|
||||
const fs = __importStar(require("fs/promises"));
|
||||
const constants_1 = require("../constants");
|
||||
const ConfigLoader_1 = require("../core/ConfigLoader");
|
||||
function assertNotInsideRulerDir(projectRoot) {
|
||||
const normalized = path.resolve(projectRoot);
|
||||
const segments = normalized.split(path.sep);
|
||||
if (segments.includes('.ruler')) {
|
||||
console.error(`${constants_1.ERROR_PREFIX} Cannot run from inside a .ruler directory. Please run from your project root.`);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Handler for the 'apply' command.
|
||||
*/
|
||||
async function applyHandler(argv) {
|
||||
const projectRoot = argv['project-root'];
|
||||
assertNotInsideRulerDir(projectRoot);
|
||||
const agents = argv.agents
|
||||
? argv.agents.split(',').map((a) => a.trim())
|
||||
: undefined;
|
||||
const configPath = argv.config;
|
||||
const mcpEnabled = argv.mcp;
|
||||
const mcpStrategy = argv['mcp-overwrite']
|
||||
? 'overwrite'
|
||||
: undefined;
|
||||
const verbose = argv.verbose;
|
||||
const dryRun = argv['dry-run'];
|
||||
const localOnly = argv['local-only'];
|
||||
const backup = argv.backup;
|
||||
// Determine gitignore preference: CLI > TOML > Default (enabled)
|
||||
// yargs handles --no-gitignore by setting gitignore to false
|
||||
let gitignorePreference;
|
||||
if (argv.gitignore !== undefined) {
|
||||
gitignorePreference = argv.gitignore;
|
||||
}
|
||||
else {
|
||||
gitignorePreference = undefined; // Let TOML/default decide
|
||||
}
|
||||
let gitignoreLocalPreference;
|
||||
if (argv['gitignore-local'] !== undefined) {
|
||||
gitignoreLocalPreference = argv['gitignore-local'];
|
||||
}
|
||||
else {
|
||||
gitignoreLocalPreference = undefined; // Let TOML/default decide
|
||||
}
|
||||
// Determine nested preference: CLI > TOML > Default (false)
|
||||
let nested;
|
||||
if (argv.nested !== undefined) {
|
||||
// CLI explicitly set nested (either --nested or --no-nested)
|
||||
nested = argv.nested;
|
||||
}
|
||||
else {
|
||||
// CLI didn't set nested, check TOML configuration
|
||||
try {
|
||||
const config = await (0, ConfigLoader_1.loadConfig)({
|
||||
projectRoot,
|
||||
configPath,
|
||||
});
|
||||
// Use TOML setting if available, otherwise default to false
|
||||
nested = config.nested ?? false;
|
||||
}
|
||||
catch {
|
||||
// If config loading fails, use default (false)
|
||||
nested = false;
|
||||
}
|
||||
}
|
||||
// Determine skills preference: CLI > TOML > Default (enabled)
|
||||
let skillsEnabled;
|
||||
if (argv.skills !== undefined) {
|
||||
skillsEnabled = argv.skills;
|
||||
}
|
||||
else {
|
||||
skillsEnabled = undefined; // Let config/default decide
|
||||
}
|
||||
try {
|
||||
await (0, lib_1.applyAllAgentConfigs)(projectRoot, agents, configPath, mcpEnabled, mcpStrategy, gitignorePreference, verbose, dryRun, localOnly, nested, backup, skillsEnabled, gitignoreLocalPreference);
|
||||
console.log('Ruler apply completed successfully.');
|
||||
}
|
||||
catch (err) {
|
||||
const message = err instanceof Error ? err.message : String(err);
|
||||
console.error(`${constants_1.ERROR_PREFIX} ${message}`);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Handler for the 'init' command.
|
||||
*/
|
||||
async function initHandler(argv) {
|
||||
const projectRoot = argv['project-root'];
|
||||
const isGlobal = argv['global'];
|
||||
const rulerDir = isGlobal
|
||||
? path.join(process.env.XDG_CONFIG_HOME || path.join(os.homedir(), '.config'), 'ruler')
|
||||
: path.join(projectRoot, '.ruler');
|
||||
await fs.mkdir(rulerDir, { recursive: true });
|
||||
const instructionsPath = path.join(rulerDir, constants_1.DEFAULT_RULES_FILENAME); // .ruler/AGENTS.md
|
||||
const tomlPath = path.join(rulerDir, 'ruler.toml');
|
||||
const exists = async (p) => {
|
||||
try {
|
||||
await fs.access(p);
|
||||
return true;
|
||||
}
|
||||
catch {
|
||||
return false;
|
||||
}
|
||||
};
|
||||
const DEFAULT_INSTRUCTIONS = `# AGENTS.md\n\nCentralised AI agent instructions. Add coding guidelines, style guides, and project context here.\n\nRuler concatenates all .md files in this directory (and subdirectories), starting with AGENTS.md (if present), then remaining files in sorted order.\n`;
|
||||
const DEFAULT_TOML = `# Ruler Configuration File
|
||||
# See https://ai.intellectronica.net/ruler for documentation.
|
||||
|
||||
# To specify which agents are active by default when --agents is not used,
|
||||
# uncomment and populate the following line. If omitted, all agents are active.
|
||||
# default_agents = ["copilot", "claude"]
|
||||
|
||||
# Enable nested rule loading from nested .ruler directories
|
||||
# When enabled, ruler will search for and process .ruler directories throughout the project hierarchy
|
||||
# nested = false
|
||||
|
||||
# [gitignore]
|
||||
# enabled = true
|
||||
# local = false # set true to write generated ignores to .git/info/exclude instead
|
||||
|
||||
# --- Agent Specific Configurations ---
|
||||
# You can enable/disable agents and override their default output paths here.
|
||||
# Use lowercase agent identifiers: aider, amp, claude, cline, codex, copilot, cursor, jetbrains-ai, kilocode, pi, windsurf
|
||||
|
||||
# [agents.copilot]
|
||||
# enabled = true
|
||||
# output_path = ".github/copilot-instructions.md"
|
||||
|
||||
# [agents.aider]
|
||||
# enabled = true
|
||||
# output_path_instructions = "AGENTS.md"
|
||||
# output_path_config = ".aider.conf.yml"
|
||||
|
||||
# [agents.gemini-cli]
|
||||
# enabled = true
|
||||
|
||||
# --- MCP Servers ---
|
||||
# Define Model Context Protocol servers here. Two examples:
|
||||
# 1. A stdio server (local executable)
|
||||
# 2. A remote server (HTTP-based)
|
||||
|
||||
# [mcp_servers.example_stdio]
|
||||
# command = "node"
|
||||
# args = ["scripts/your-mcp-server.js"]
|
||||
# env = { API_KEY = "replace_me" }
|
||||
|
||||
# [mcp_servers.example_remote]
|
||||
# url = "https://api.example.com/mcp"
|
||||
# headers = { Authorization = "Bearer REPLACE_ME" }
|
||||
`;
|
||||
if (!(await exists(instructionsPath))) {
|
||||
// Create new AGENTS.md regardless of legacy presence.
|
||||
await fs.writeFile(instructionsPath, DEFAULT_INSTRUCTIONS);
|
||||
console.log(`[ruler] Created ${instructionsPath}`);
|
||||
}
|
||||
else {
|
||||
console.log(`[ruler] ${constants_1.DEFAULT_RULES_FILENAME} already exists, skipping`);
|
||||
}
|
||||
if (!(await exists(tomlPath))) {
|
||||
await fs.writeFile(tomlPath, DEFAULT_TOML);
|
||||
console.log(`[ruler] Created ${tomlPath}`);
|
||||
}
|
||||
else {
|
||||
console.log(`[ruler] ruler.toml already exists, skipping`);
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Handler for the 'revert' command.
|
||||
*/
|
||||
async function revertHandler(argv) {
|
||||
const projectRoot = argv['project-root'];
|
||||
assertNotInsideRulerDir(projectRoot);
|
||||
const agents = argv.agents
|
||||
? argv.agents.split(',').map((a) => a.trim())
|
||||
: undefined;
|
||||
const configPath = argv.config;
|
||||
const keepBackups = argv['keep-backups'];
|
||||
const verbose = argv.verbose;
|
||||
const dryRun = argv['dry-run'];
|
||||
const localOnly = argv['local-only'];
|
||||
try {
|
||||
await (0, revert_1.revertAllAgentConfigs)(projectRoot, agents, configPath, keepBackups, verbose, dryRun, localOnly);
|
||||
}
|
||||
catch (err) {
|
||||
const message = err instanceof Error ? err.message : String(err);
|
||||
console.error(`${constants_1.ERROR_PREFIX} ${message}`);
|
||||
process.exit(1);
|
||||
}
|
||||
}
|
||||
+5
@@ -0,0 +1,5 @@
|
||||
#!/usr/bin/env node
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const commands_1 = require("./commands");
|
||||
(0, commands_1.run)();
|
||||
+68
@@ -0,0 +1,68 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.SKILL_MD_FILENAME = exports.ANTIGRAVITY_SKILLS_PATH = exports.FACTORY_SKILLS_PATH = exports.WINDSURF_SKILLS_PATH = exports.CURSOR_SKILLS_PATH = exports.JUNIE_SKILLS_PATH = exports.GEMINI_SKILLS_PATH = exports.ROO_SKILLS_PATH = exports.VIBE_SKILLS_PATH = exports.GOOSE_SKILLS_PATH = exports.PI_SKILLS_PATH = exports.OPENCODE_SKILLS_PATH = exports.CODEX_SKILLS_PATH = exports.CLAUDE_SKILLS_PATH = exports.RULER_SKILLS_PATH = exports.SKILLS_DIR = exports.DEFAULT_RULES_FILENAME = exports.ERROR_PREFIX = void 0;
|
||||
exports.actionPrefix = actionPrefix;
|
||||
exports.createRulerError = createRulerError;
|
||||
exports.logVerbose = logVerbose;
|
||||
exports.logInfo = logInfo;
|
||||
exports.logWarn = logWarn;
|
||||
exports.logError = logError;
|
||||
exports.logVerboseInfo = logVerboseInfo;
|
||||
exports.ERROR_PREFIX = '[ruler]';
|
||||
// Centralized default rules filename. Now points to 'AGENTS.md'.
|
||||
// Legacy '.ruler/instructions.md' is still supported as a fallback with a warning.
|
||||
exports.DEFAULT_RULES_FILENAME = 'AGENTS.md';
|
||||
function actionPrefix(dry) {
|
||||
return dry ? '[ruler:dry-run]' : '[ruler]';
|
||||
}
|
||||
function createRulerError(message, context) {
|
||||
const fullMessage = context
|
||||
? `${exports.ERROR_PREFIX} ${message} (Context: ${context})`
|
||||
: `${exports.ERROR_PREFIX} ${message}`;
|
||||
return new Error(fullMessage);
|
||||
}
|
||||
function logVerbose(message, isVerbose) {
|
||||
if (isVerbose) {
|
||||
console.error(`[ruler:verbose] ${message}`);
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Centralized logging functions with consistent output streams and prefixing.
|
||||
* - info/verbose go to stdout (user-visible progress)
|
||||
* - warn/error go to stderr (problems)
|
||||
*/
|
||||
function logInfo(message, dryRun = false) {
|
||||
const prefix = actionPrefix(dryRun);
|
||||
console.log(`${prefix} ${message}`);
|
||||
}
|
||||
function logWarn(message, dryRun = false) {
|
||||
const prefix = actionPrefix(dryRun);
|
||||
console.warn(`${prefix} ${message}`);
|
||||
}
|
||||
function logError(message, dryRun = false) {
|
||||
const prefix = actionPrefix(dryRun);
|
||||
console.error(`${prefix} ${message}`);
|
||||
}
|
||||
function logVerboseInfo(message, isVerbose, dryRun = false) {
|
||||
if (isVerbose) {
|
||||
const prefix = actionPrefix(dryRun);
|
||||
console.log(`${prefix} ${message}`);
|
||||
}
|
||||
}
|
||||
// Skills-related constants
|
||||
exports.SKILLS_DIR = 'skills';
|
||||
exports.RULER_SKILLS_PATH = '.ruler/skills';
|
||||
exports.CLAUDE_SKILLS_PATH = '.claude/skills';
|
||||
exports.CODEX_SKILLS_PATH = '.codex/skills';
|
||||
exports.OPENCODE_SKILLS_PATH = '.opencode/skills';
|
||||
exports.PI_SKILLS_PATH = '.pi/skills';
|
||||
exports.GOOSE_SKILLS_PATH = '.agents/skills';
|
||||
exports.VIBE_SKILLS_PATH = '.vibe/skills';
|
||||
exports.ROO_SKILLS_PATH = '.roo/skills';
|
||||
exports.GEMINI_SKILLS_PATH = '.gemini/skills';
|
||||
exports.JUNIE_SKILLS_PATH = '.junie/skills';
|
||||
exports.CURSOR_SKILLS_PATH = '.cursor/skills';
|
||||
exports.WINDSURF_SKILLS_PATH = '.windsurf/skills';
|
||||
exports.FACTORY_SKILLS_PATH = '.factory/skills';
|
||||
exports.ANTIGRAVITY_SKILLS_PATH = '.agent/skills';
|
||||
exports.SKILL_MD_FILENAME = 'SKILL.md';
|
||||
+229
@@ -0,0 +1,229 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || (function () {
|
||||
var ownKeys = function(o) {
|
||||
ownKeys = Object.getOwnPropertyNames || function (o) {
|
||||
var ar = [];
|
||||
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
||||
return ar;
|
||||
};
|
||||
return ownKeys(o);
|
||||
};
|
||||
return function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
})();
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.loadConfig = loadConfig;
|
||||
const fs_1 = require("fs");
|
||||
const path = __importStar(require("path"));
|
||||
const os = __importStar(require("os"));
|
||||
const toml_1 = require("@iarna/toml");
|
||||
const zod_1 = require("zod");
|
||||
const constants_1 = require("../constants");
|
||||
const mcpConfigSchema = zod_1.z
|
||||
.object({
|
||||
enabled: zod_1.z.boolean().optional(),
|
||||
merge_strategy: zod_1.z.enum(['merge', 'overwrite']).optional(),
|
||||
})
|
||||
.optional();
|
||||
const agentConfigSchema = zod_1.z
|
||||
.object({
|
||||
enabled: zod_1.z.boolean().optional(),
|
||||
output_path: zod_1.z.string().optional(),
|
||||
output_path_instructions: zod_1.z.string().optional(),
|
||||
output_path_config: zod_1.z.string().optional(),
|
||||
mcp: mcpConfigSchema,
|
||||
})
|
||||
.optional();
|
||||
const rulerConfigSchema = zod_1.z.object({
|
||||
default_agents: zod_1.z.array(zod_1.z.string()).optional(),
|
||||
agents: zod_1.z.record(zod_1.z.string(), agentConfigSchema).optional(),
|
||||
mcp: zod_1.z
|
||||
.object({
|
||||
enabled: zod_1.z.boolean().optional(),
|
||||
merge_strategy: zod_1.z.enum(['merge', 'overwrite']).optional(),
|
||||
})
|
||||
.optional(),
|
||||
gitignore: zod_1.z
|
||||
.object({
|
||||
enabled: zod_1.z.boolean().optional(),
|
||||
local: zod_1.z.boolean().optional(),
|
||||
})
|
||||
.optional(),
|
||||
skills: zod_1.z
|
||||
.object({
|
||||
enabled: zod_1.z.boolean().optional(),
|
||||
})
|
||||
.optional(),
|
||||
nested: zod_1.z.boolean().optional(),
|
||||
});
|
||||
/**
|
||||
* Recursively creates a new object with only enumerable string keys,
|
||||
* effectively excluding Symbol properties.
|
||||
* The @iarna/toml parser adds Symbol properties (Symbol(type), Symbol(declared))
|
||||
* for metadata, which Zod v4+ validates and rejects as invalid record keys.
|
||||
* By rebuilding the object structure using Object.keys(), we create clean objects
|
||||
* that only contain the actual data without Symbol metadata.
|
||||
*/
|
||||
function stripSymbols(obj) {
|
||||
if (obj === null || typeof obj !== 'object') {
|
||||
return obj;
|
||||
}
|
||||
if (Array.isArray(obj)) {
|
||||
return obj.map(stripSymbols);
|
||||
}
|
||||
const result = {};
|
||||
for (const key of Object.keys(obj)) {
|
||||
result[key] = stripSymbols(obj[key]);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
/**
|
||||
* Loads and parses the ruler TOML configuration file, applying defaults.
|
||||
* If the file is missing or invalid, returns empty/default config.
|
||||
*/
|
||||
async function loadConfig(options) {
|
||||
const { projectRoot, configPath, cliAgents } = options;
|
||||
let configFile;
|
||||
if (configPath) {
|
||||
configFile = path.resolve(configPath);
|
||||
}
|
||||
else {
|
||||
// Try local .ruler/ruler.toml first
|
||||
const localConfigFile = path.join(projectRoot, '.ruler', 'ruler.toml');
|
||||
try {
|
||||
await fs_1.promises.access(localConfigFile);
|
||||
configFile = localConfigFile;
|
||||
}
|
||||
catch {
|
||||
// If local config doesn't exist, try global config
|
||||
const xdgConfigDir = process.env.XDG_CONFIG_HOME || path.join(os.homedir(), '.config');
|
||||
configFile = path.join(xdgConfigDir, 'ruler', 'ruler.toml');
|
||||
}
|
||||
}
|
||||
let raw = {};
|
||||
try {
|
||||
const text = await fs_1.promises.readFile(configFile, 'utf8');
|
||||
const parsed = text.trim() ? (0, toml_1.parse)(text) : {};
|
||||
// Strip Symbol properties added by @iarna/toml (required for Zod v4+)
|
||||
raw = stripSymbols(parsed);
|
||||
// Validate the configuration with zod
|
||||
const validationResult = rulerConfigSchema.safeParse(raw);
|
||||
if (!validationResult.success) {
|
||||
throw (0, constants_1.createRulerError)('Invalid configuration file format', `File: ${configFile}, Errors: ${validationResult.error.issues.map((i) => `${i.path.join('.')}: ${i.message}`).join(', ')}`);
|
||||
}
|
||||
}
|
||||
catch (err) {
|
||||
if (err instanceof Error && err.code !== 'ENOENT') {
|
||||
if (err.message.includes('[ruler]')) {
|
||||
throw err; // Re-throw validation errors
|
||||
}
|
||||
console.warn(`[ruler] Warning: could not read config file at ${configFile}: ${err.message}`);
|
||||
}
|
||||
raw = {};
|
||||
}
|
||||
const defaultAgents = Array.isArray(raw.default_agents)
|
||||
? raw.default_agents.map((a) => String(a))
|
||||
: undefined;
|
||||
const agentsSection = raw.agents && typeof raw.agents === 'object' && !Array.isArray(raw.agents)
|
||||
? raw.agents
|
||||
: {};
|
||||
const agentConfigs = {};
|
||||
for (const [name, section] of Object.entries(agentsSection)) {
|
||||
if (section && typeof section === 'object') {
|
||||
const sectionObj = section;
|
||||
const cfg = {};
|
||||
if (typeof sectionObj.enabled === 'boolean') {
|
||||
cfg.enabled = sectionObj.enabled;
|
||||
}
|
||||
if (typeof sectionObj.output_path === 'string') {
|
||||
cfg.outputPath = path.resolve(projectRoot, sectionObj.output_path);
|
||||
}
|
||||
if (typeof sectionObj.output_path_instructions === 'string') {
|
||||
cfg.outputPathInstructions = path.resolve(projectRoot, sectionObj.output_path_instructions);
|
||||
}
|
||||
if (typeof sectionObj.output_path_config === 'string') {
|
||||
cfg.outputPathConfig = path.resolve(projectRoot, sectionObj.output_path_config);
|
||||
}
|
||||
if (sectionObj.mcp && typeof sectionObj.mcp === 'object') {
|
||||
const m = sectionObj.mcp;
|
||||
const mcpCfg = {};
|
||||
if (typeof m.enabled === 'boolean') {
|
||||
mcpCfg.enabled = m.enabled;
|
||||
}
|
||||
if (typeof m.merge_strategy === 'string') {
|
||||
const ms = m.merge_strategy;
|
||||
if (ms === 'merge' || ms === 'overwrite') {
|
||||
mcpCfg.strategy = ms;
|
||||
}
|
||||
}
|
||||
cfg.mcp = mcpCfg;
|
||||
}
|
||||
agentConfigs[name] = cfg;
|
||||
}
|
||||
}
|
||||
const rawMcpSection = raw.mcp && typeof raw.mcp === 'object' && !Array.isArray(raw.mcp)
|
||||
? raw.mcp
|
||||
: {};
|
||||
const globalMcpConfig = {};
|
||||
if (typeof rawMcpSection.enabled === 'boolean') {
|
||||
globalMcpConfig.enabled = rawMcpSection.enabled;
|
||||
}
|
||||
if (typeof rawMcpSection.merge_strategy === 'string') {
|
||||
const strat = rawMcpSection.merge_strategy;
|
||||
if (strat === 'merge' || strat === 'overwrite') {
|
||||
globalMcpConfig.strategy = strat;
|
||||
}
|
||||
}
|
||||
const rawGitignoreSection = raw.gitignore &&
|
||||
typeof raw.gitignore === 'object' &&
|
||||
!Array.isArray(raw.gitignore)
|
||||
? raw.gitignore
|
||||
: {};
|
||||
const gitignoreConfig = {};
|
||||
if (typeof rawGitignoreSection.enabled === 'boolean') {
|
||||
gitignoreConfig.enabled = rawGitignoreSection.enabled;
|
||||
}
|
||||
if (typeof rawGitignoreSection.local === 'boolean') {
|
||||
gitignoreConfig.local = rawGitignoreSection.local;
|
||||
}
|
||||
const rawSkillsSection = raw.skills && typeof raw.skills === 'object' && !Array.isArray(raw.skills)
|
||||
? raw.skills
|
||||
: {};
|
||||
const skillsConfig = {};
|
||||
if (typeof rawSkillsSection.enabled === 'boolean') {
|
||||
skillsConfig.enabled = rawSkillsSection.enabled;
|
||||
}
|
||||
const nestedDefined = typeof raw.nested === 'boolean';
|
||||
const nested = nestedDefined ? raw.nested : false;
|
||||
return {
|
||||
defaultAgents,
|
||||
agentConfigs,
|
||||
cliAgents,
|
||||
mcp: globalMcpConfig,
|
||||
gitignore: gitignoreConfig,
|
||||
skills: skillsConfig,
|
||||
nested,
|
||||
nestedDefined,
|
||||
};
|
||||
}
|
||||
+290
@@ -0,0 +1,290 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || (function () {
|
||||
var ownKeys = function(o) {
|
||||
ownKeys = Object.getOwnPropertyNames || function (o) {
|
||||
var ar = [];
|
||||
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
||||
return ar;
|
||||
};
|
||||
return ownKeys(o);
|
||||
};
|
||||
return function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
})();
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.findRulerDir = findRulerDir;
|
||||
exports.readMarkdownFiles = readMarkdownFiles;
|
||||
exports.writeGeneratedFile = writeGeneratedFile;
|
||||
exports.backupFile = backupFile;
|
||||
exports.ensureDirExists = ensureDirExists;
|
||||
exports.findGlobalRulerDir = findGlobalRulerDir;
|
||||
exports.findAllRulerDirs = findAllRulerDirs;
|
||||
const fs_1 = require("fs");
|
||||
const path = __importStar(require("path"));
|
||||
const os = __importStar(require("os"));
|
||||
const constants_1 = require("../constants");
|
||||
/**
|
||||
* Gets the XDG config directory path, falling back to ~/.config if XDG_CONFIG_HOME is not set.
|
||||
*/
|
||||
function getXdgConfigDir() {
|
||||
return process.env.XDG_CONFIG_HOME || path.join(os.homedir(), '.config');
|
||||
}
|
||||
/**
|
||||
* Searches upwards from startPath to find a directory named .ruler.
|
||||
* If not found locally and checkGlobal is true, checks for global config at XDG_CONFIG_HOME/ruler.
|
||||
* Returns the path to the .ruler directory, or null if not found.
|
||||
*/
|
||||
async function findRulerDir(startPath, checkGlobal = true) {
|
||||
// First, search upwards from startPath for local .ruler directory
|
||||
let current = startPath;
|
||||
while (current) {
|
||||
const candidate = path.join(current, '.ruler');
|
||||
try {
|
||||
const stat = await fs_1.promises.stat(candidate);
|
||||
if (stat.isDirectory()) {
|
||||
return candidate;
|
||||
}
|
||||
}
|
||||
catch {
|
||||
// ignore errors when checking for .ruler directory
|
||||
}
|
||||
const parent = path.dirname(current);
|
||||
if (parent === current) {
|
||||
break;
|
||||
}
|
||||
current = parent;
|
||||
}
|
||||
// If no local .ruler found and checkGlobal is true, check global config directory
|
||||
if (checkGlobal) {
|
||||
const globalConfigDir = path.join(getXdgConfigDir(), 'ruler');
|
||||
try {
|
||||
const stat = await fs_1.promises.stat(globalConfigDir);
|
||||
if (stat.isDirectory()) {
|
||||
return globalConfigDir;
|
||||
}
|
||||
}
|
||||
catch (err) {
|
||||
console.error(`[ruler] Error checking global config directory ${globalConfigDir}:`, err);
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
/**
|
||||
* Recursively reads all Markdown (.md) files in rulerDir, returning their paths and contents.
|
||||
* Files are sorted alphabetically by path.
|
||||
*/
|
||||
async function readMarkdownFiles(rulerDir) {
|
||||
const mdFiles = [];
|
||||
// Gather all markdown files (recursive) first
|
||||
async function walk(dir) {
|
||||
const entries = await fs_1.promises.readdir(dir, { withFileTypes: true });
|
||||
for (const entry of entries) {
|
||||
const fullPath = path.join(dir, entry.name);
|
||||
// Resolve symlinks to determine actual type
|
||||
let isDir = entry.isDirectory();
|
||||
let isFile = entry.isFile();
|
||||
if (entry.isSymbolicLink()) {
|
||||
try {
|
||||
const stat = await fs_1.promises.stat(fullPath);
|
||||
isDir = stat.isDirectory();
|
||||
isFile = stat.isFile();
|
||||
}
|
||||
catch {
|
||||
continue; // skip broken symlinks
|
||||
}
|
||||
}
|
||||
if (isDir) {
|
||||
// Skip .ruler/skills; skills are propagated separately and should not be concatenated
|
||||
const relativeFromRoot = path.relative(rulerDir, fullPath);
|
||||
const isSkillsDir = relativeFromRoot === constants_1.SKILLS_DIR ||
|
||||
relativeFromRoot.startsWith(`${constants_1.SKILLS_DIR}${path.sep}`);
|
||||
if (isSkillsDir) {
|
||||
continue;
|
||||
}
|
||||
await walk(fullPath);
|
||||
}
|
||||
else if (isFile && entry.name.endsWith('.md')) {
|
||||
const content = await fs_1.promises.readFile(fullPath, 'utf8');
|
||||
mdFiles.push({ path: fullPath, content });
|
||||
}
|
||||
}
|
||||
}
|
||||
await walk(rulerDir);
|
||||
// Prioritisation logic:
|
||||
// 1. Prefer top-level AGENTS.md if present.
|
||||
// 2. If AGENTS.md absent but legacy instructions.md present, use it (no longer emits a warning; legacy accepted silently).
|
||||
// 3. Include any remaining .md files (excluding whichever of the above was used if present) in
|
||||
// sorted order AFTER the preferred primary file so that new concatenation priority starts with AGENTS.md.
|
||||
const topLevelAgents = path.join(rulerDir, 'AGENTS.md');
|
||||
const topLevelLegacy = path.join(rulerDir, 'instructions.md');
|
||||
// Separate primary candidates from others
|
||||
let primaryFile = null;
|
||||
const others = [];
|
||||
for (const f of mdFiles) {
|
||||
if (f.path === topLevelAgents) {
|
||||
primaryFile = f; // Highest priority
|
||||
}
|
||||
}
|
||||
if (!primaryFile) {
|
||||
for (const f of mdFiles) {
|
||||
if (f.path === topLevelLegacy) {
|
||||
primaryFile = f;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
for (const f of mdFiles) {
|
||||
if (primaryFile && f.path === primaryFile.path)
|
||||
continue;
|
||||
others.push(f);
|
||||
}
|
||||
// Sort the remaining others for stable deterministic concatenation order.
|
||||
others.sort((a, b) => a.path.localeCompare(b.path));
|
||||
let ordered = primaryFile ? [primaryFile, ...others] : others;
|
||||
// NEW: Prepend repository root AGENTS.md (outside .ruler) if it exists and is not identical path.
|
||||
try {
|
||||
const repoRoot = path.dirname(rulerDir); // .ruler parent
|
||||
const rootAgentsPath = path.join(repoRoot, 'AGENTS.md');
|
||||
if (path.resolve(rootAgentsPath) !== path.resolve(topLevelAgents)) {
|
||||
const stat = await fs_1.promises.stat(rootAgentsPath);
|
||||
if (stat.isFile()) {
|
||||
const content = await fs_1.promises.readFile(rootAgentsPath, 'utf8');
|
||||
// Check if this is a generated file and we have other .ruler files
|
||||
const isGenerated = content.startsWith('<!-- Generated by Ruler -->');
|
||||
const hasRulerFiles = others.length > 0 || primaryFile !== null;
|
||||
// Additional check: if AGENTS.md contains ruler source comments and we have ruler files,
|
||||
// it's likely a corrupted generated file that should be skipped
|
||||
const containsRulerSources = content.includes('<!-- Source: .ruler/') ||
|
||||
content.includes('<!-- Source: ruler/');
|
||||
const isProbablyGenerated = isGenerated || (containsRulerSources && hasRulerFiles);
|
||||
// Skip generated AGENTS.md if we have other files in .ruler
|
||||
if (!isProbablyGenerated || !hasRulerFiles) {
|
||||
// Prepend so it has highest precedence
|
||||
ordered = [{ path: rootAgentsPath, content }, ...ordered];
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
catch {
|
||||
// ignore if root AGENTS.md not present
|
||||
}
|
||||
return ordered;
|
||||
}
|
||||
/**
|
||||
* Writes content to filePath, creating parent directories if necessary.
|
||||
*/
|
||||
async function writeGeneratedFile(filePath, content) {
|
||||
await fs_1.promises.mkdir(path.dirname(filePath), { recursive: true });
|
||||
await fs_1.promises.writeFile(filePath, content, 'utf8');
|
||||
}
|
||||
/**
|
||||
* Creates a backup of the given filePath by copying it to filePath.bak if it exists.
|
||||
*/
|
||||
async function backupFile(filePath) {
|
||||
try {
|
||||
await fs_1.promises.access(filePath);
|
||||
await fs_1.promises.copyFile(filePath, `${filePath}.bak`);
|
||||
}
|
||||
catch {
|
||||
// ignore if file does not exist
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Ensures that the given directory exists by creating it recursively.
|
||||
*/
|
||||
async function ensureDirExists(dirPath) {
|
||||
await fs_1.promises.mkdir(dirPath, { recursive: true });
|
||||
}
|
||||
/**
|
||||
* Finds the global ruler configuration directory at XDG_CONFIG_HOME/ruler.
|
||||
* Returns the path if it exists, null otherwise.
|
||||
*/
|
||||
async function findGlobalRulerDir() {
|
||||
const globalConfigDir = path.join(getXdgConfigDir(), 'ruler');
|
||||
try {
|
||||
const stat = await fs_1.promises.stat(globalConfigDir);
|
||||
if (stat.isDirectory()) {
|
||||
return globalConfigDir;
|
||||
}
|
||||
}
|
||||
catch {
|
||||
// ignore if global config doesn't exist
|
||||
}
|
||||
return null;
|
||||
}
|
||||
/**
|
||||
* Searches the entire directory tree from startPath to find all .ruler directories.
|
||||
* Returns an array of .ruler directory paths from most specific to least specific.
|
||||
*/
|
||||
async function findAllRulerDirs(startPath) {
|
||||
const rulerDirs = [];
|
||||
const rootPath = path.resolve(startPath);
|
||||
// Search the entire directory tree downwards from startPath
|
||||
async function findRulerDirs(dir) {
|
||||
try {
|
||||
const entries = await fs_1.promises.readdir(dir, { withFileTypes: true });
|
||||
for (const entry of entries) {
|
||||
const fullPath = path.join(dir, entry.name);
|
||||
if (entry.isDirectory()) {
|
||||
if (entry.name === '.ruler') {
|
||||
rulerDirs.push(fullPath);
|
||||
}
|
||||
else {
|
||||
// Recursively search subdirectories (but skip hidden directories like .git)
|
||||
if (!entry.name.startsWith('.')) {
|
||||
// Do not cross git repository boundaries (except the starting root)
|
||||
const gitDir = path.join(fullPath, '.git');
|
||||
try {
|
||||
const gitStat = await fs_1.promises.stat(gitDir);
|
||||
if (gitStat.isDirectory() &&
|
||||
path.resolve(fullPath) !== rootPath) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
catch {
|
||||
// no .git boundary, continue traversal
|
||||
}
|
||||
await findRulerDirs(fullPath);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
catch {
|
||||
// ignore errors when reading directories
|
||||
}
|
||||
}
|
||||
// Start searching from the startPath
|
||||
await findRulerDirs(startPath);
|
||||
// Sort by depth (most specific first) - deeper paths come first
|
||||
rulerDirs.sort((a, b) => {
|
||||
const depthA = a.split(path.sep).length;
|
||||
const depthB = b.split(path.sep).length;
|
||||
if (depthA !== depthB) {
|
||||
return depthB - depthA; // Deeper paths first
|
||||
}
|
||||
return a.localeCompare(b); // Alphabetical for same depth
|
||||
});
|
||||
return rulerDirs;
|
||||
}
|
||||
+173
@@ -0,0 +1,173 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || (function () {
|
||||
var ownKeys = function(o) {
|
||||
ownKeys = Object.getOwnPropertyNames || function (o) {
|
||||
var ar = [];
|
||||
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
||||
return ar;
|
||||
};
|
||||
return ownKeys(o);
|
||||
};
|
||||
return function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
})();
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.updateGitignore = updateGitignore;
|
||||
const fs_1 = require("fs");
|
||||
const path = __importStar(require("path"));
|
||||
const RULER_START_MARKER = '# START Ruler Generated Files';
|
||||
const RULER_END_MARKER = '# END Ruler Generated Files';
|
||||
/**
|
||||
* Updates an ignore file in the project root with paths in a managed Ruler block.
|
||||
* Creates the file if it doesn't exist, and creates or updates the Ruler-managed block.
|
||||
*
|
||||
* @param projectRoot The project root directory
|
||||
* @param paths Array of file paths to add to the ignore file (can be absolute or relative)
|
||||
* @param ignoreFile Relative path to the ignore file from project root (defaults to .gitignore)
|
||||
*/
|
||||
async function updateGitignore(projectRoot, paths, ignoreFile = '.gitignore') {
|
||||
const gitignorePath = path.join(projectRoot, ignoreFile);
|
||||
// Read existing .gitignore or start with empty content
|
||||
let existingContent = '';
|
||||
try {
|
||||
existingContent = await fs_1.promises.readFile(gitignorePath, 'utf8');
|
||||
}
|
||||
catch (err) {
|
||||
if (err.code !== 'ENOENT') {
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
// Convert paths to repo-relative POSIX format with leading /
|
||||
const relativePaths = paths
|
||||
.map((p) => {
|
||||
let relative;
|
||||
if (path.isAbsolute(p)) {
|
||||
relative = path.relative(projectRoot, p);
|
||||
}
|
||||
else {
|
||||
// Handle relative paths that might include the project root prefix
|
||||
const normalizedProjectRoot = path.normalize(projectRoot);
|
||||
const normalizedPath = path.normalize(p);
|
||||
// Get the basename of the project root to match against path prefixes
|
||||
const projectBasename = path.basename(normalizedProjectRoot);
|
||||
// If the path starts with the project basename, remove it
|
||||
if (normalizedPath.startsWith(projectBasename + path.sep)) {
|
||||
relative = normalizedPath.substring(projectBasename.length + 1);
|
||||
}
|
||||
else {
|
||||
relative = normalizedPath;
|
||||
}
|
||||
}
|
||||
return relative.replace(/\\/g, '/'); // Convert to POSIX format
|
||||
})
|
||||
.filter((p) => {
|
||||
// Never include any path that resides inside a .ruler directory (inputs, not outputs)
|
||||
return !p.includes('/.ruler/') && !p.startsWith('.ruler/');
|
||||
})
|
||||
.map((p) => {
|
||||
// Always write full repository-relative paths (prefix with leading /)
|
||||
return p.startsWith('/') ? p : `/${p}`;
|
||||
});
|
||||
// Get all existing paths from .gitignore (excluding Ruler block)
|
||||
const existingPaths = getExistingPathsExcludingRulerBlock(existingContent);
|
||||
// Filter out paths that already exist outside the Ruler block
|
||||
const newPaths = relativePaths.filter((p) => !existingPaths.includes(p));
|
||||
// The Ruler block should contain only the new paths (replacement behavior)
|
||||
const allRulerPaths = [...new Set(newPaths)].sort();
|
||||
// Create new content
|
||||
const newContent = updateGitignoreContent(existingContent, allRulerPaths);
|
||||
// Write the updated content
|
||||
await fs_1.promises.mkdir(path.dirname(gitignorePath), { recursive: true });
|
||||
await fs_1.promises.writeFile(gitignorePath, newContent);
|
||||
}
|
||||
/**
|
||||
* Gets all paths from .gitignore content excluding those in the Ruler block.
|
||||
*/
|
||||
function getExistingPathsExcludingRulerBlock(content) {
|
||||
const lines = content.split('\n');
|
||||
const paths = [];
|
||||
let inRulerBlock = false;
|
||||
for (const line of lines) {
|
||||
const trimmed = line.trim();
|
||||
if (trimmed === RULER_START_MARKER) {
|
||||
inRulerBlock = true;
|
||||
continue;
|
||||
}
|
||||
if (trimmed === RULER_END_MARKER) {
|
||||
inRulerBlock = false;
|
||||
continue;
|
||||
}
|
||||
if (!inRulerBlock && trimmed && !trimmed.startsWith('#')) {
|
||||
paths.push(trimmed);
|
||||
}
|
||||
}
|
||||
return paths;
|
||||
}
|
||||
/**
|
||||
* Updates the .gitignore content by replacing or adding the Ruler block.
|
||||
*/
|
||||
function updateGitignoreContent(existingContent, rulerPaths) {
|
||||
const lines = existingContent.split('\n');
|
||||
const newLines = [];
|
||||
let inFirstRulerBlock = false;
|
||||
let hasRulerBlock = false;
|
||||
let processedFirstBlock = false;
|
||||
for (const line of lines) {
|
||||
const trimmed = line.trim();
|
||||
if (trimmed === RULER_START_MARKER && !processedFirstBlock) {
|
||||
inFirstRulerBlock = true;
|
||||
hasRulerBlock = true;
|
||||
newLines.push(line);
|
||||
// Add the new Ruler paths
|
||||
rulerPaths.forEach((p) => newLines.push(p));
|
||||
continue;
|
||||
}
|
||||
if (trimmed === RULER_END_MARKER && inFirstRulerBlock) {
|
||||
inFirstRulerBlock = false;
|
||||
processedFirstBlock = true;
|
||||
newLines.push(line);
|
||||
continue;
|
||||
}
|
||||
if (!inFirstRulerBlock) {
|
||||
newLines.push(line);
|
||||
}
|
||||
// Skip lines that are in the first Ruler block (they get replaced)
|
||||
}
|
||||
// If no Ruler block exists, add one at the end
|
||||
if (!hasRulerBlock) {
|
||||
// Add blank line if content exists and doesn't end with blank line
|
||||
if (existingContent.trim() && !existingContent.endsWith('\n\n')) {
|
||||
newLines.push('');
|
||||
}
|
||||
newLines.push(RULER_START_MARKER);
|
||||
rulerPaths.forEach((p) => newLines.push(p));
|
||||
newLines.push(RULER_END_MARKER);
|
||||
}
|
||||
// Ensure file ends with a newline
|
||||
let result = newLines.join('\n');
|
||||
if (!result.endsWith('\n')) {
|
||||
result += '\n';
|
||||
}
|
||||
return result;
|
||||
}
|
||||
+60
@@ -0,0 +1,60 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || (function () {
|
||||
var ownKeys = function(o) {
|
||||
ownKeys = Object.getOwnPropertyNames || function (o) {
|
||||
var ar = [];
|
||||
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
||||
return ar;
|
||||
};
|
||||
return ownKeys(o);
|
||||
};
|
||||
return function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
})();
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.concatenateRules = concatenateRules;
|
||||
const path = __importStar(require("path"));
|
||||
/**
|
||||
* Concatenates markdown rule files into a single string,
|
||||
* marking each section with its source filename.
|
||||
*/
|
||||
function concatenateRules(files, baseDir) {
|
||||
const base = baseDir || process.cwd();
|
||||
const sections = files.map(({ path: filePath, content }) => {
|
||||
const rel = path.relative(base, filePath);
|
||||
// Normalize path separators to forward slashes for consistent output across platforms
|
||||
const normalizedRel = rel.replace(/\\/g, '/');
|
||||
// New format: two leading blank lines, HTML comment with source, one blank line, then content, then trailing newline
|
||||
// We intentionally trim content to avoid cascading blank lines, then ensure a final newline via join logic
|
||||
return [
|
||||
'', // first leading blank line
|
||||
'', // second leading blank line
|
||||
`<!-- Source: ${normalizedRel} -->`,
|
||||
'', // single blank line after the comment
|
||||
content.trim(),
|
||||
'', // ensure file section ends with newline
|
||||
].join('\n');
|
||||
});
|
||||
return sections.join('\n');
|
||||
}
|
||||
+1098
File diff suppressed because it is too large
Load Diff
+161
@@ -0,0 +1,161 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || (function () {
|
||||
var ownKeys = function(o) {
|
||||
ownKeys = Object.getOwnPropertyNames || function (o) {
|
||||
var ar = [];
|
||||
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
||||
return ar;
|
||||
};
|
||||
return ownKeys(o);
|
||||
};
|
||||
return function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
})();
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.hasSkillMd = hasSkillMd;
|
||||
exports.isGroupingDir = isGroupingDir;
|
||||
exports.walkSkillsTree = walkSkillsTree;
|
||||
exports.formatValidationWarnings = formatValidationWarnings;
|
||||
exports.copySkillsDirectory = copySkillsDirectory;
|
||||
const path = __importStar(require("path"));
|
||||
const fs = __importStar(require("fs/promises"));
|
||||
const constants_1 = require("../constants");
|
||||
/**
|
||||
* Checks if a directory contains a SKILL.md file.
|
||||
*/
|
||||
async function hasSkillMd(dirPath) {
|
||||
try {
|
||||
const skillMdPath = path.join(dirPath, constants_1.SKILL_MD_FILENAME);
|
||||
await fs.access(skillMdPath);
|
||||
return true;
|
||||
}
|
||||
catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Checks if a directory is a grouping directory (contains subdirectories with SKILL.md).
|
||||
*/
|
||||
async function isGroupingDir(dirPath) {
|
||||
try {
|
||||
const entries = await fs.readdir(dirPath, { withFileTypes: true });
|
||||
const subdirs = entries.filter((e) => e.isDirectory());
|
||||
for (const subdir of subdirs) {
|
||||
const subdirPath = path.join(dirPath, subdir.name);
|
||||
if (await hasSkillMd(subdirPath)) {
|
||||
return true;
|
||||
}
|
||||
// Check recursively for nested grouping
|
||||
if (await isGroupingDir(subdirPath)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Walks the skills tree and discovers all skills.
|
||||
* Returns skills and any validation warnings.
|
||||
*/
|
||||
async function walkSkillsTree(root) {
|
||||
const skills = [];
|
||||
const warnings = [];
|
||||
async function walk(currentPath, relativePath) {
|
||||
try {
|
||||
const entries = await fs.readdir(currentPath, { withFileTypes: true });
|
||||
for (const entry of entries) {
|
||||
if (!entry.isDirectory()) {
|
||||
continue;
|
||||
}
|
||||
const entryPath = path.join(currentPath, entry.name);
|
||||
const entryRelativePath = relativePath
|
||||
? path.join(relativePath, entry.name)
|
||||
: entry.name;
|
||||
const hasSkill = await hasSkillMd(entryPath);
|
||||
const isGrouping = !hasSkill && (await isGroupingDir(entryPath));
|
||||
if (hasSkill) {
|
||||
// This is a valid skill directory
|
||||
skills.push({
|
||||
name: entry.name,
|
||||
path: entryPath,
|
||||
hasSkillMd: true,
|
||||
valid: true,
|
||||
});
|
||||
}
|
||||
else if (isGrouping) {
|
||||
// This is a grouping directory, recurse into it
|
||||
await walk(entryPath, entryRelativePath);
|
||||
}
|
||||
else {
|
||||
// This is neither a skill nor a grouping directory - warn about it
|
||||
warnings.push(`Directory '${entryRelativePath}' in .ruler/skills has no SKILL.md and contains no sub-skills. It may be malformed or stray.`);
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (err) {
|
||||
// If we can't read the directory, just return what we have
|
||||
warnings.push(`Failed to read directory ${relativePath || 'root'}: ${err.message}`);
|
||||
}
|
||||
}
|
||||
await walk(root, '');
|
||||
return { skills, warnings };
|
||||
}
|
||||
/**
|
||||
* Formats validation warnings for display.
|
||||
*/
|
||||
function formatValidationWarnings(warnings) {
|
||||
if (warnings.length === 0) {
|
||||
return '';
|
||||
}
|
||||
return warnings.map((w) => ` - ${w}`).join('\n');
|
||||
}
|
||||
/**
|
||||
* Recursively copies a directory and all its contents.
|
||||
*/
|
||||
async function copyRecursive(src, dest) {
|
||||
const stat = await fs.stat(src);
|
||||
if (stat.isDirectory()) {
|
||||
await fs.mkdir(dest, { recursive: true });
|
||||
const entries = await fs.readdir(src, { withFileTypes: true });
|
||||
for (const entry of entries) {
|
||||
const srcPath = path.join(src, entry.name);
|
||||
const destPath = path.join(dest, entry.name);
|
||||
await copyRecursive(srcPath, destPath);
|
||||
}
|
||||
}
|
||||
else {
|
||||
await fs.copyFile(src, dest);
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Copies the skills directory to the destination, preserving structure.
|
||||
* Creates the destination directory if it doesn't exist.
|
||||
*/
|
||||
async function copySkillsDirectory(srcDir, destDir) {
|
||||
await fs.mkdir(destDir, { recursive: true });
|
||||
await copyRecursive(srcDir, destDir);
|
||||
}
|
||||
+384
@@ -0,0 +1,384 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || (function () {
|
||||
var ownKeys = function(o) {
|
||||
ownKeys = Object.getOwnPropertyNames || function (o) {
|
||||
var ar = [];
|
||||
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
||||
return ar;
|
||||
};
|
||||
return ownKeys(o);
|
||||
};
|
||||
return function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
})();
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.loadUnifiedConfig = loadUnifiedConfig;
|
||||
const fs_1 = require("fs");
|
||||
const path = __importStar(require("path"));
|
||||
const toml_1 = require("@iarna/toml");
|
||||
const hash_1 = require("./hash");
|
||||
const RuleProcessor_1 = require("./RuleProcessor");
|
||||
const FileSystemUtils = __importStar(require("./FileSystemUtils"));
|
||||
async function loadUnifiedConfig(options) {
|
||||
// Resolve the effective .ruler directory (local or global), mirroring the main loader behavior
|
||||
const resolvedRulerDir = (await FileSystemUtils.findRulerDir(options.projectRoot, true)) ||
|
||||
path.join(options.projectRoot, '.ruler');
|
||||
const meta = {
|
||||
projectRoot: options.projectRoot,
|
||||
rulerDir: resolvedRulerDir,
|
||||
loadedAt: new Date(),
|
||||
version: '0.0.0-dev',
|
||||
};
|
||||
const diagnostics = [];
|
||||
// Read TOML if available
|
||||
let tomlRaw = {};
|
||||
const tomlFile = options.configPath
|
||||
? path.resolve(options.configPath)
|
||||
: path.join(meta.rulerDir, 'ruler.toml');
|
||||
try {
|
||||
const text = await fs_1.promises.readFile(tomlFile, 'utf8');
|
||||
tomlRaw = text.trim() ? (0, toml_1.parse)(text) : {};
|
||||
meta.configFile = tomlFile;
|
||||
}
|
||||
catch (err) {
|
||||
if (err.code !== 'ENOENT') {
|
||||
diagnostics.push({
|
||||
severity: 'warning',
|
||||
code: 'TOML_READ_ERROR',
|
||||
message: 'Failed to read ruler.toml',
|
||||
file: tomlFile,
|
||||
detail: err.message,
|
||||
});
|
||||
}
|
||||
}
|
||||
let defaultAgents;
|
||||
if (tomlRaw &&
|
||||
typeof tomlRaw === 'object' &&
|
||||
tomlRaw.default_agents &&
|
||||
Array.isArray(tomlRaw.default_agents)) {
|
||||
defaultAgents = tomlRaw.default_agents.map((a) => String(a));
|
||||
}
|
||||
let nested = false;
|
||||
if (tomlRaw &&
|
||||
typeof tomlRaw === 'object' &&
|
||||
typeof tomlRaw.nested === 'boolean') {
|
||||
nested = tomlRaw.nested;
|
||||
}
|
||||
// Parse skills configuration
|
||||
let skillsConfig;
|
||||
if (tomlRaw && typeof tomlRaw === 'object') {
|
||||
const skillsSection = tomlRaw.skills;
|
||||
if (skillsSection && typeof skillsSection === 'object') {
|
||||
const skillsObj = skillsSection;
|
||||
if (typeof skillsObj.enabled === 'boolean') {
|
||||
skillsConfig = { enabled: skillsObj.enabled };
|
||||
}
|
||||
}
|
||||
}
|
||||
const toml = {
|
||||
raw: tomlRaw,
|
||||
schemaVersion: 1,
|
||||
agents: {},
|
||||
defaultAgents,
|
||||
nested,
|
||||
skills: skillsConfig,
|
||||
};
|
||||
// Collect rule markdown files
|
||||
let ruleFiles = [];
|
||||
try {
|
||||
const dirEntries = await fs_1.promises.readdir(meta.rulerDir, { withFileTypes: true });
|
||||
const mdFiles = dirEntries
|
||||
.filter((e) => e.isFile() && e.name.toLowerCase().endsWith('.md'))
|
||||
.map((e) => path.join(meta.rulerDir, e.name));
|
||||
// Sort lexicographically then ensure AGENTS.md first
|
||||
mdFiles.sort((a, b) => a.localeCompare(b));
|
||||
mdFiles.sort((a, b) => {
|
||||
const aIs = /agents\.md$/i.test(a);
|
||||
const bIs = /agents\.md$/i.test(b);
|
||||
if (aIs && !bIs)
|
||||
return -1;
|
||||
if (bIs && !aIs)
|
||||
return 1;
|
||||
return 0;
|
||||
});
|
||||
let order = 0;
|
||||
ruleFiles = await Promise.all(mdFiles.map(async (file) => {
|
||||
const content = await fs_1.promises.readFile(file, 'utf8');
|
||||
const stat = await fs_1.promises.stat(file);
|
||||
return {
|
||||
path: file,
|
||||
relativePath: path.basename(file),
|
||||
content,
|
||||
contentHash: (0, hash_1.sha256)(content),
|
||||
mtimeMs: stat.mtimeMs,
|
||||
size: stat.size,
|
||||
order: order++,
|
||||
primary: /agents\.md$/i.test(file),
|
||||
};
|
||||
}));
|
||||
}
|
||||
catch (err) {
|
||||
diagnostics.push({
|
||||
severity: 'warning',
|
||||
code: 'RULES_READ_ERROR',
|
||||
message: 'Failed reading rule files',
|
||||
file: meta.rulerDir,
|
||||
detail: err.message,
|
||||
});
|
||||
}
|
||||
const concatenated = (0, RuleProcessor_1.concatenateRules)(ruleFiles.map((f) => ({ path: f.path, content: f.content })), path.dirname(meta.rulerDir));
|
||||
const rules = {
|
||||
files: ruleFiles,
|
||||
concatenated,
|
||||
concatenatedHash: (0, hash_1.sha256)(concatenated),
|
||||
};
|
||||
// Parse TOML MCP servers
|
||||
const tomlMcpServers = {};
|
||||
if (tomlRaw && typeof tomlRaw === 'object') {
|
||||
const tomlObj = tomlRaw;
|
||||
if (tomlObj.mcp_servers && typeof tomlObj.mcp_servers === 'object') {
|
||||
const mcpServersRaw = tomlObj.mcp_servers;
|
||||
for (const [name, def] of Object.entries(mcpServersRaw)) {
|
||||
if (!def || typeof def !== 'object')
|
||||
continue;
|
||||
const serverDef = def;
|
||||
const server = {};
|
||||
// Parse command and args
|
||||
if (typeof serverDef.command === 'string') {
|
||||
server.command = serverDef.command;
|
||||
}
|
||||
if (Array.isArray(serverDef.args)) {
|
||||
server.args = serverDef.args.map(String);
|
||||
}
|
||||
// Parse env
|
||||
if (serverDef.env && typeof serverDef.env === 'object') {
|
||||
server.env = Object.fromEntries(Object.entries(serverDef.env).filter(([, v]) => typeof v === 'string'));
|
||||
}
|
||||
// Parse URL and headers
|
||||
if (typeof serverDef.url === 'string') {
|
||||
server.url = serverDef.url;
|
||||
}
|
||||
if (serverDef.headers && typeof serverDef.headers === 'object') {
|
||||
server.headers = Object.fromEntries(Object.entries(serverDef.headers).filter(([, v]) => typeof v === 'string'));
|
||||
}
|
||||
if (typeof serverDef.timeout === 'number') {
|
||||
server.timeout = serverDef.timeout;
|
||||
}
|
||||
// Validate server configuration
|
||||
const hasCommand = !!server.command;
|
||||
const hasUrl = !!server.url;
|
||||
if (!hasCommand && !hasUrl) {
|
||||
diagnostics.push({
|
||||
severity: 'warning',
|
||||
code: 'MCP_TOML_INVALID_SERVER',
|
||||
message: `MCP server '${name}' must have at least one of command or url`,
|
||||
file: tomlFile,
|
||||
});
|
||||
continue;
|
||||
}
|
||||
if (hasCommand && hasUrl) {
|
||||
diagnostics.push({
|
||||
severity: 'warning',
|
||||
code: 'MCP_TOML_FIELD_CONFLICT',
|
||||
message: `MCP server '${name}' has both command and url - using url (remote)`,
|
||||
file: tomlFile,
|
||||
});
|
||||
}
|
||||
if (hasCommand && server.headers) {
|
||||
diagnostics.push({
|
||||
severity: 'warning',
|
||||
code: 'MCP_TOML_FIELD_CONFLICT',
|
||||
message: `MCP server '${name}' has headers with command (should be used with url only)`,
|
||||
file: tomlFile,
|
||||
});
|
||||
}
|
||||
if (hasUrl && server.env) {
|
||||
diagnostics.push({
|
||||
severity: 'warning',
|
||||
code: 'MCP_TOML_FIELD_CONFLICT',
|
||||
message: `MCP server '${name}' has env with url (should be used with command only)`,
|
||||
file: tomlFile,
|
||||
});
|
||||
}
|
||||
// Derive type - remote takes precedence if both are present
|
||||
if (server.url) {
|
||||
server.type = 'remote';
|
||||
}
|
||||
else if (server.command) {
|
||||
server.type = 'stdio';
|
||||
}
|
||||
tomlMcpServers[name] = server;
|
||||
}
|
||||
}
|
||||
}
|
||||
// Store TOML MCP servers in toml config
|
||||
toml.mcpServers = tomlMcpServers;
|
||||
// MCP normalization - merge JSON and TOML
|
||||
let mcp = null;
|
||||
const mcpFile = path.join(meta.rulerDir, 'mcp.json');
|
||||
const jsonMcpServers = {};
|
||||
let mcpJsonExists = false;
|
||||
// Pre-flight existence check so users see warning even if JSON invalid
|
||||
try {
|
||||
await fs_1.promises.access(mcpFile);
|
||||
mcpJsonExists = true;
|
||||
// Warning is handled by apply-engine to avoid duplication
|
||||
}
|
||||
catch {
|
||||
// file not present
|
||||
}
|
||||
// Add deprecation warning if mcp.json exists (regardless of validity)
|
||||
if (mcpJsonExists) {
|
||||
meta.mcpFile = mcpFile;
|
||||
diagnostics.push({
|
||||
severity: 'warning',
|
||||
code: 'MCP_JSON_DEPRECATED',
|
||||
message: 'mcp.json detected: please migrate MCP servers to ruler.toml [mcp_servers.*] sections',
|
||||
file: mcpFile,
|
||||
});
|
||||
}
|
||||
try {
|
||||
if (mcpJsonExists) {
|
||||
const raw = await fs_1.promises.readFile(mcpFile, 'utf8');
|
||||
let parsed;
|
||||
try {
|
||||
parsed = JSON.parse(raw);
|
||||
}
|
||||
catch (e) {
|
||||
// Lenient fallback: strip comments and trailing commas then retry
|
||||
const stripped = raw
|
||||
// strip /* */ comments
|
||||
.replace(/\/\*[\s\S]*?\*\//g, '')
|
||||
// strip // comments
|
||||
.replace(/(^|\s+)\/\/.*$/gm, '$1')
|
||||
// remove trailing commas before } or ]
|
||||
.replace(/,\s*([}\]])/g, '$1');
|
||||
try {
|
||||
parsed = JSON.parse(stripped);
|
||||
}
|
||||
catch {
|
||||
throw e; // rethrow original error for diagnostics
|
||||
}
|
||||
}
|
||||
const parsedObj = parsed;
|
||||
const serversRaw = parsedObj.mcpServers ||
|
||||
parsedObj.servers ||
|
||||
{};
|
||||
if (serversRaw && typeof serversRaw === 'object') {
|
||||
for (const [name, def] of Object.entries(serversRaw)) {
|
||||
if (!def || typeof def !== 'object')
|
||||
continue;
|
||||
const server = {};
|
||||
if (typeof def.command === 'string')
|
||||
server.command = def.command;
|
||||
if (Array.isArray(def.command))
|
||||
server.command = def.command[0];
|
||||
if (Array.isArray(def.args))
|
||||
server.args = def.args.map(String);
|
||||
if (def.env && typeof def.env === 'object') {
|
||||
server.env = Object.fromEntries(Object.entries(def.env).filter(([, v]) => typeof v === 'string'));
|
||||
}
|
||||
if (typeof def.url === 'string')
|
||||
server.url = def.url;
|
||||
if (def.headers && typeof def.headers === 'object') {
|
||||
server.headers = Object.fromEntries(Object.entries(def.headers).filter(([, v]) => typeof v === 'string'));
|
||||
}
|
||||
if (typeof def.timeout === 'number') {
|
||||
server.timeout = def.timeout;
|
||||
}
|
||||
// Derive type
|
||||
if (server.url)
|
||||
server.type = 'remote';
|
||||
else if (server.command)
|
||||
server.type = 'stdio';
|
||||
jsonMcpServers[name] = server;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (err) {
|
||||
if (mcpJsonExists) {
|
||||
diagnostics.push({
|
||||
severity: 'warning',
|
||||
code: 'MCP_READ_ERROR',
|
||||
message: 'Failed to read mcp.json',
|
||||
file: mcpFile,
|
||||
detail: err.message,
|
||||
});
|
||||
}
|
||||
}
|
||||
// Merge servers: start with JSON, overlay TOML (TOML wins per server name)
|
||||
const mergedServers = { ...jsonMcpServers, ...tomlMcpServers };
|
||||
// Create MCP bundle if we have any servers
|
||||
if (Object.keys(mergedServers).length > 0 || mcpJsonExists) {
|
||||
mcp = {
|
||||
servers: mergedServers,
|
||||
raw: mcpJsonExists ? { mcpServers: jsonMcpServers } : {},
|
||||
hash: (0, hash_1.sha256)((0, hash_1.stableJson)(mergedServers)),
|
||||
};
|
||||
}
|
||||
const config = {
|
||||
meta,
|
||||
toml,
|
||||
rules,
|
||||
mcp,
|
||||
agents: {},
|
||||
diagnostics,
|
||||
hash: '', // placeholder, recompute after agents
|
||||
};
|
||||
// Agent resolution (basic): enabled set is CLI override or default_agents
|
||||
const cliAgents = options.cliAgents && options.cliAgents.length > 0
|
||||
? options.cliAgents
|
||||
: undefined;
|
||||
const enabledList = cliAgents ?? toml.defaultAgents ?? [];
|
||||
for (const name of enabledList) {
|
||||
config.agents[name] = {
|
||||
identifier: name,
|
||||
enabled: true,
|
||||
output: {},
|
||||
mcp: { enabled: false, strategy: 'merge' },
|
||||
};
|
||||
}
|
||||
// If CLI provided, mark defaults not included as disabled (optional design choice)
|
||||
if (cliAgents) {
|
||||
for (const name of toml.defaultAgents ?? []) {
|
||||
if (!config.agents[name]) {
|
||||
config.agents[name] = {
|
||||
identifier: name,
|
||||
enabled: false,
|
||||
output: {},
|
||||
mcp: { enabled: false, strategy: 'merge' },
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
// Recompute hash including agents list
|
||||
config.hash = (0, hash_1.sha256)((0, hash_1.stableJson)({
|
||||
toml: toml.defaultAgents,
|
||||
rules: rules.concatenatedHash,
|
||||
mcp: mcp ? mcp.hash : null,
|
||||
agents: Object.entries(config.agents).map(([k, v]) => [k, v.enabled]),
|
||||
}));
|
||||
return config;
|
||||
}
|
||||
+2
@@ -0,0 +1,2 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
+52
@@ -0,0 +1,52 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.resolveSelectedAgents = resolveSelectedAgents;
|
||||
const constants_1 = require("../constants");
|
||||
/**
|
||||
* Resolves which agents should be selected based on configuration.
|
||||
* Handles precedence: CLI agents > default_agents > per-agent enabled flags > all agents
|
||||
*
|
||||
* @param config Loaded configuration containing CLI agents, default agents, and per-agent configs
|
||||
* @param allAgents Array of all available agents
|
||||
* @returns Array of agents that should be processed
|
||||
*/
|
||||
function resolveSelectedAgents(config, allAgents) {
|
||||
// CLI --agents > config.default_agents > per-agent.enabled flags > default all
|
||||
let selected = allAgents;
|
||||
if (config.cliAgents && config.cliAgents.length > 0) {
|
||||
const filters = config.cliAgents.map((n) => n.toLowerCase());
|
||||
// Check if any of the specified agents don't exist
|
||||
const validAgentIdentifiers = new Set(allAgents.map((agent) => agent.getIdentifier()));
|
||||
const validAgentNames = new Set(allAgents.map((agent) => agent.getName().toLowerCase()));
|
||||
const invalidAgents = filters.filter((filter) => !validAgentIdentifiers.has(filter) &&
|
||||
![...validAgentNames].some((name) => name.includes(filter)));
|
||||
if (invalidAgents.length > 0) {
|
||||
throw (0, constants_1.createRulerError)(`Invalid agent specified: ${invalidAgents.join(', ')}`, `Valid agents are: ${[...validAgentIdentifiers].join(', ')}`);
|
||||
}
|
||||
selected = allAgents.filter((agent) => filters.some((f) => agent.getIdentifier() === f ||
|
||||
agent.getName().toLowerCase().includes(f)));
|
||||
}
|
||||
else if (config.defaultAgents && config.defaultAgents.length > 0) {
|
||||
const defaults = config.defaultAgents.map((n) => n.toLowerCase());
|
||||
// Check if any of the default agents don't exist
|
||||
const validAgentIdentifiers = new Set(allAgents.map((agent) => agent.getIdentifier()));
|
||||
const validAgentNames = new Set(allAgents.map((agent) => agent.getName().toLowerCase()));
|
||||
const invalidAgents = defaults.filter((filter) => !validAgentIdentifiers.has(filter) &&
|
||||
![...validAgentNames].some((name) => name.includes(filter)));
|
||||
if (invalidAgents.length > 0) {
|
||||
throw (0, constants_1.createRulerError)(`Invalid agent specified in default_agents: ${invalidAgents.join(', ')}`, `Valid agents are: ${[...validAgentIdentifiers].join(', ')}`);
|
||||
}
|
||||
selected = allAgents.filter((agent) => {
|
||||
const identifier = agent.getIdentifier();
|
||||
const override = config.agentConfigs[identifier]?.enabled;
|
||||
if (override !== undefined) {
|
||||
return override;
|
||||
}
|
||||
return defaults.some((d) => identifier === d || agent.getName().toLowerCase().includes(d));
|
||||
});
|
||||
}
|
||||
else {
|
||||
selected = allAgents.filter((agent) => config.agentConfigs[agent.getIdentifier()]?.enabled !== false);
|
||||
}
|
||||
return selected;
|
||||
}
|
||||
+682
@@ -0,0 +1,682 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || (function () {
|
||||
var ownKeys = function(o) {
|
||||
ownKeys = Object.getOwnPropertyNames || function (o) {
|
||||
var ar = [];
|
||||
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
||||
return ar;
|
||||
};
|
||||
return ownKeys(o);
|
||||
};
|
||||
return function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
})();
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.loadNestedConfigurations = loadNestedConfigurations;
|
||||
exports.loadSingleConfiguration = loadSingleConfiguration;
|
||||
exports.processHierarchicalConfigurations = processHierarchicalConfigurations;
|
||||
exports.processSingleConfiguration = processSingleConfiguration;
|
||||
exports.applyConfigurationsToAgents = applyConfigurationsToAgents;
|
||||
exports.updateGitignore = updateGitignore;
|
||||
const path = __importStar(require("path"));
|
||||
const fs_1 = require("fs");
|
||||
const toml_1 = require("@iarna/toml");
|
||||
const FileSystemUtils = __importStar(require("./FileSystemUtils"));
|
||||
const RuleProcessor_1 = require("./RuleProcessor");
|
||||
const ConfigLoader_1 = require("./ConfigLoader");
|
||||
const GitignoreUtils_1 = require("./GitignoreUtils");
|
||||
const merge_1 = require("../mcp/merge");
|
||||
const mcp_1 = require("../paths/mcp");
|
||||
const propagateOpenHandsMcp_1 = require("../mcp/propagateOpenHandsMcp");
|
||||
const propagateOpenCodeMcp_1 = require("../mcp/propagateOpenCodeMcp");
|
||||
const agent_utils_1 = require("../agents/agent-utils");
|
||||
const capabilities_1 = require("../mcp/capabilities");
|
||||
const constants_1 = require("../constants");
|
||||
async function loadNestedConfigurations(projectRoot, configPath, localOnly, resolvedNested) {
|
||||
const { dirs: rulerDirs } = await findRulerDirectories(projectRoot, localOnly, true);
|
||||
const results = [];
|
||||
const rulerDirConfigs = await processIndependentRulerDirs(rulerDirs);
|
||||
for (const { rulerDir, files } of rulerDirConfigs) {
|
||||
const config = await loadConfigForRulerDir(rulerDir, configPath, resolvedNested);
|
||||
results.push(await createHierarchicalConfiguration(rulerDir, files, config, configPath));
|
||||
}
|
||||
return results;
|
||||
}
|
||||
/**
|
||||
* Processes each .ruler directory independently, returning configuration for each.
|
||||
* Each .ruler directory gets its own rules (not merged with others).
|
||||
*/
|
||||
async function processIndependentRulerDirs(rulerDirs) {
|
||||
const results = [];
|
||||
// Process each .ruler directory independently
|
||||
for (const rulerDir of rulerDirs) {
|
||||
const files = await FileSystemUtils.readMarkdownFiles(rulerDir);
|
||||
results.push({ rulerDir, files });
|
||||
}
|
||||
return results;
|
||||
}
|
||||
async function createHierarchicalConfiguration(rulerDir, files, config, cliConfigPath) {
|
||||
await warnAboutLegacyMcpJson(rulerDir);
|
||||
const concatenatedRules = (0, RuleProcessor_1.concatenateRules)(files, path.dirname(rulerDir));
|
||||
const directoryRoot = path.dirname(rulerDir);
|
||||
const localConfigPath = path.join(rulerDir, 'ruler.toml');
|
||||
let configPathToUse = cliConfigPath;
|
||||
try {
|
||||
await fs_1.promises.access(localConfigPath);
|
||||
configPathToUse = localConfigPath;
|
||||
}
|
||||
catch {
|
||||
// fall back to CLI config or default resolution
|
||||
}
|
||||
const { loadUnifiedConfig } = await Promise.resolve().then(() => __importStar(require('./UnifiedConfigLoader')));
|
||||
const unifiedConfig = await loadUnifiedConfig({
|
||||
projectRoot: directoryRoot,
|
||||
configPath: configPathToUse,
|
||||
});
|
||||
let rulerMcpJson = null;
|
||||
if (unifiedConfig.mcp && Object.keys(unifiedConfig.mcp.servers).length > 0) {
|
||||
rulerMcpJson = {
|
||||
mcpServers: unifiedConfig.mcp.servers,
|
||||
};
|
||||
}
|
||||
return {
|
||||
rulerDir,
|
||||
config,
|
||||
concatenatedRules,
|
||||
rulerMcpJson,
|
||||
};
|
||||
}
|
||||
async function loadConfigForRulerDir(rulerDir, cliConfigPath, resolvedNested) {
|
||||
const directoryRoot = path.dirname(rulerDir);
|
||||
const localConfigPath = path.join(rulerDir, 'ruler.toml');
|
||||
let hasLocalConfig = false;
|
||||
try {
|
||||
await fs_1.promises.access(localConfigPath);
|
||||
hasLocalConfig = true;
|
||||
}
|
||||
catch {
|
||||
hasLocalConfig = false;
|
||||
}
|
||||
const loaded = await (0, ConfigLoader_1.loadConfig)({
|
||||
projectRoot: directoryRoot,
|
||||
configPath: hasLocalConfig ? localConfigPath : cliConfigPath,
|
||||
});
|
||||
const cloned = cloneLoadedConfig(loaded);
|
||||
if (resolvedNested) {
|
||||
if (hasLocalConfig && loaded.nestedDefined && loaded.nested === false) {
|
||||
(0, constants_1.logWarn)(`Nested mode is enabled but ${localConfigPath} sets nested = false. Continuing with nested processing.`);
|
||||
}
|
||||
cloned.nested = true;
|
||||
cloned.nestedDefined = true;
|
||||
}
|
||||
return cloned;
|
||||
}
|
||||
function cloneLoadedConfig(config) {
|
||||
const clonedAgentConfigs = {};
|
||||
for (const [agent, agentConfig] of Object.entries(config.agentConfigs)) {
|
||||
clonedAgentConfigs[agent] = {
|
||||
...agentConfig,
|
||||
mcp: agentConfig.mcp ? { ...agentConfig.mcp } : undefined,
|
||||
};
|
||||
}
|
||||
return {
|
||||
defaultAgents: config.defaultAgents ? [...config.defaultAgents] : undefined,
|
||||
agentConfigs: clonedAgentConfigs,
|
||||
cliAgents: config.cliAgents ? [...config.cliAgents] : undefined,
|
||||
mcp: config.mcp ? { ...config.mcp } : undefined,
|
||||
gitignore: config.gitignore ? { ...config.gitignore } : undefined,
|
||||
nested: config.nested,
|
||||
nestedDefined: config.nestedDefined,
|
||||
};
|
||||
}
|
||||
/**
|
||||
* Finds ruler directories based on the specified mode.
|
||||
*/
|
||||
async function findRulerDirectories(projectRoot, localOnly, hierarchical) {
|
||||
if (hierarchical) {
|
||||
const dirs = await FileSystemUtils.findAllRulerDirs(projectRoot);
|
||||
const allDirs = [...dirs];
|
||||
// Add global config if not local-only
|
||||
if (!localOnly) {
|
||||
const globalDir = await FileSystemUtils.findGlobalRulerDir();
|
||||
if (globalDir) {
|
||||
allDirs.push(globalDir);
|
||||
}
|
||||
}
|
||||
if (allDirs.length === 0) {
|
||||
throw (0, constants_1.createRulerError)(`.ruler directory not found`, `Searched from: ${projectRoot}`);
|
||||
}
|
||||
return { dirs: allDirs, primaryDir: allDirs[0] };
|
||||
}
|
||||
else {
|
||||
const dir = await FileSystemUtils.findRulerDir(projectRoot, !localOnly);
|
||||
if (!dir) {
|
||||
throw (0, constants_1.createRulerError)(`.ruler directory not found`, `Searched from: ${projectRoot}`);
|
||||
}
|
||||
return { dirs: [dir], primaryDir: dir };
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Warns about legacy mcp.json files if they exist.
|
||||
*/
|
||||
async function warnAboutLegacyMcpJson(rulerDir) {
|
||||
try {
|
||||
const legacyMcpPath = path.join(rulerDir, 'mcp.json');
|
||||
await fs_1.promises.access(legacyMcpPath);
|
||||
(0, constants_1.logWarn)('Warning: Using legacy .ruler/mcp.json. Please migrate to ruler.toml. This fallback will be removed in a future release.');
|
||||
}
|
||||
catch {
|
||||
// ignore
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Loads configuration for single-directory mode (existing behavior).
|
||||
*/
|
||||
async function loadSingleConfiguration(projectRoot, configPath, localOnly) {
|
||||
// Find the single ruler directory
|
||||
const { dirs: rulerDirs, primaryDir } = await findRulerDirectories(projectRoot, localOnly, false);
|
||||
// Warn about legacy mcp.json
|
||||
await warnAboutLegacyMcpJson(primaryDir);
|
||||
// Load the ruler.toml configuration
|
||||
const config = await (0, ConfigLoader_1.loadConfig)({
|
||||
projectRoot,
|
||||
configPath,
|
||||
});
|
||||
// Read rule files
|
||||
const files = await FileSystemUtils.readMarkdownFiles(rulerDirs[0]);
|
||||
// Concatenate rules
|
||||
const concatenatedRules = (0, RuleProcessor_1.concatenateRules)(files, path.dirname(primaryDir));
|
||||
// Load unified config to get merged MCP configuration
|
||||
const { loadUnifiedConfig } = await Promise.resolve().then(() => __importStar(require('./UnifiedConfigLoader')));
|
||||
const unifiedConfig = await loadUnifiedConfig({ projectRoot, configPath });
|
||||
// Synthesize rulerMcpJson from unified MCP bundle for backward compatibility
|
||||
let rulerMcpJson = null;
|
||||
if (unifiedConfig.mcp && Object.keys(unifiedConfig.mcp.servers).length > 0) {
|
||||
rulerMcpJson = {
|
||||
mcpServers: unifiedConfig.mcp.servers,
|
||||
};
|
||||
}
|
||||
return {
|
||||
config,
|
||||
concatenatedRules,
|
||||
rulerMcpJson,
|
||||
};
|
||||
}
|
||||
/**
|
||||
* Processes hierarchical configurations by applying rules to each .ruler directory independently.
|
||||
* Each directory gets its own set of rules and generates its own agent files.
|
||||
* @param agents Array of agents to process
|
||||
* @param configurations Array of hierarchical configurations for each .ruler directory
|
||||
* @param verbose Whether to enable verbose logging
|
||||
* @param dryRun Whether to perform a dry run
|
||||
* @param cliMcpEnabled Whether MCP is enabled via CLI
|
||||
* @param cliMcpStrategy MCP strategy from CLI
|
||||
* @returns Promise resolving to array of generated file paths
|
||||
*/
|
||||
async function processHierarchicalConfigurations(agents, configurations, verbose, dryRun, cliMcpEnabled, cliMcpStrategy, backup = true) {
|
||||
const allGeneratedPaths = [];
|
||||
for (const config of configurations) {
|
||||
(0, constants_1.logVerboseInfo)(`Processing .ruler directory: ${config.rulerDir}`, verbose, dryRun);
|
||||
const rulerRoot = path.dirname(config.rulerDir);
|
||||
const paths = await applyConfigurationsToAgents(agents, config.concatenatedRules, config.rulerMcpJson, config.config, rulerRoot, verbose, dryRun, cliMcpEnabled, cliMcpStrategy, backup);
|
||||
const normalizedPaths = paths.map((p) => path.isAbsolute(p) ? p : path.join(rulerRoot, p));
|
||||
allGeneratedPaths.push(...normalizedPaths);
|
||||
}
|
||||
return allGeneratedPaths;
|
||||
}
|
||||
/**
|
||||
* Processes a single configuration by applying rules to all selected agents.
|
||||
* All rules are concatenated and applied to generate agent files in the project root.
|
||||
* @param agents Array of agents to process
|
||||
* @param configuration Single ruler configuration with concatenated rules
|
||||
* @param projectRoot Root directory of the project
|
||||
* @param verbose Whether to enable verbose logging
|
||||
* @param dryRun Whether to perform a dry run
|
||||
* @param cliMcpEnabled Whether MCP is enabled via CLI
|
||||
* @param cliMcpStrategy MCP strategy from CLI
|
||||
* @returns Promise resolving to array of generated file paths
|
||||
*/
|
||||
async function processSingleConfiguration(agents, configuration, projectRoot, verbose, dryRun, cliMcpEnabled, cliMcpStrategy, backup = true) {
|
||||
return await applyConfigurationsToAgents(agents, configuration.concatenatedRules, configuration.rulerMcpJson, configuration.config, projectRoot, verbose, dryRun, cliMcpEnabled, cliMcpStrategy, backup);
|
||||
}
|
||||
/**
|
||||
* Applies configurations to the selected agents (internal function).
|
||||
* @param agents Array of agents to process
|
||||
* @param concatenatedRules Concatenated rule content
|
||||
* @param rulerMcpJson MCP configuration JSON
|
||||
* @param config Loaded configuration
|
||||
* @param projectRoot Root directory of the project
|
||||
* @param verbose Whether to enable verbose logging
|
||||
* @param dryRun Whether to perform a dry run
|
||||
* @returns Promise resolving to array of generated file paths
|
||||
*/
|
||||
async function applyConfigurationsToAgents(agents, concatenatedRules, rulerMcpJson, config, projectRoot, verbose, dryRun, cliMcpEnabled = true, cliMcpStrategy, backup = true) {
|
||||
const generatedPaths = [];
|
||||
let agentsMdWritten = false;
|
||||
for (const agent of agents) {
|
||||
(0, constants_1.logInfo)(`Applying rules for ${agent.getName()}...`, dryRun);
|
||||
(0, constants_1.logVerbose)(`Processing agent: ${agent.getName()}`, verbose);
|
||||
const agentConfig = config.agentConfigs[agent.getIdentifier()];
|
||||
const agentRulerMcpJson = rulerMcpJson;
|
||||
// Collect output paths for .gitignore
|
||||
const outputPaths = (0, agent_utils_1.getAgentOutputPaths)(agent, projectRoot, agentConfig);
|
||||
(0, constants_1.logVerbose)(`Agent ${agent.getName()} output paths: ${outputPaths.join(', ')}`, verbose);
|
||||
generatedPaths.push(...outputPaths);
|
||||
// Only add the backup file paths to the gitignore list if backups are enabled
|
||||
if (backup) {
|
||||
const backupPaths = outputPaths.map((p) => `${p}.bak`);
|
||||
generatedPaths.push(...backupPaths);
|
||||
}
|
||||
if (dryRun) {
|
||||
(0, constants_1.logVerbose)(`DRY RUN: Would write rules to: ${outputPaths.join(', ')}`, verbose);
|
||||
}
|
||||
else {
|
||||
let skipApplyForThisAgent = false;
|
||||
if (agent.getIdentifier() === 'jules' ||
|
||||
agent.getIdentifier() === 'agentsmd') {
|
||||
if (agentsMdWritten) {
|
||||
// Skip rewriting AGENTS.md, but still allow MCP handling below
|
||||
skipApplyForThisAgent = true;
|
||||
}
|
||||
else {
|
||||
agentsMdWritten = true;
|
||||
}
|
||||
}
|
||||
let finalAgentConfig = agentConfig;
|
||||
if (agent.getIdentifier() === 'augmentcode' && agentRulerMcpJson) {
|
||||
const resolvedStrategy = cliMcpStrategy ??
|
||||
agentConfig?.mcp?.strategy ??
|
||||
config.mcp?.strategy ??
|
||||
'merge';
|
||||
finalAgentConfig = {
|
||||
...agentConfig,
|
||||
mcp: {
|
||||
...agentConfig?.mcp,
|
||||
strategy: resolvedStrategy,
|
||||
},
|
||||
};
|
||||
}
|
||||
if (!skipApplyForThisAgent) {
|
||||
await agent.applyRulerConfig(concatenatedRules, projectRoot, agentRulerMcpJson, finalAgentConfig, backup);
|
||||
}
|
||||
}
|
||||
// Handle MCP configuration
|
||||
await handleMcpConfiguration(agent, agentConfig, config, agentRulerMcpJson, projectRoot, generatedPaths, verbose, dryRun, cliMcpEnabled, cliMcpStrategy, backup);
|
||||
}
|
||||
return generatedPaths;
|
||||
}
|
||||
async function handleMcpConfiguration(agent, agentConfig, config, rulerMcpJson, projectRoot, generatedPaths, verbose, dryRun, cliMcpEnabled = true, cliMcpStrategy, backup = true) {
|
||||
if (!(0, capabilities_1.agentSupportsMcp)(agent)) {
|
||||
(0, constants_1.logVerbose)(`Agent ${agent.getName()} does not support MCP - skipping MCP configuration`, verbose);
|
||||
return;
|
||||
}
|
||||
const dest = await (0, mcp_1.getNativeMcpPath)(agent.getName(), projectRoot);
|
||||
const mcpEnabledForAgent = cliMcpEnabled && (agentConfig?.mcp?.enabled ?? config.mcp?.enabled ?? true);
|
||||
if (!dest || !mcpEnabledForAgent) {
|
||||
return;
|
||||
}
|
||||
const filteredMcpJson = rulerMcpJson
|
||||
? (0, capabilities_1.filterMcpConfigForAgent)(rulerMcpJson, agent)
|
||||
: null;
|
||||
if (!filteredMcpJson) {
|
||||
(0, constants_1.logVerbose)(`No compatible MCP servers found for ${agent.getName()} - skipping MCP configuration`, verbose);
|
||||
return;
|
||||
}
|
||||
await updateGitignoreForMcpFile(dest, projectRoot, generatedPaths, backup);
|
||||
await applyMcpConfiguration(agent, filteredMcpJson, dest, agentConfig, config, projectRoot, cliMcpStrategy, dryRun, verbose, backup);
|
||||
}
|
||||
async function updateGitignoreForMcpFile(dest, projectRoot, generatedPaths, backup = true) {
|
||||
if (dest.startsWith(projectRoot)) {
|
||||
const relativeDest = path.relative(projectRoot, dest);
|
||||
generatedPaths.push(relativeDest);
|
||||
if (backup) {
|
||||
generatedPaths.push(`${relativeDest}.bak`);
|
||||
}
|
||||
}
|
||||
}
|
||||
function sanitizeMcpTimeoutsForAgent(agent, mcpJson, dryRun) {
|
||||
if (agent.supportsMcpTimeout?.()) {
|
||||
return mcpJson;
|
||||
}
|
||||
if (!mcpJson.mcpServers || typeof mcpJson.mcpServers !== 'object') {
|
||||
return mcpJson;
|
||||
}
|
||||
const servers = mcpJson.mcpServers;
|
||||
const sanitizedServers = {};
|
||||
const strippedTimeouts = [];
|
||||
for (const [name, serverDef] of Object.entries(servers)) {
|
||||
if (serverDef && typeof serverDef === 'object') {
|
||||
const copy = { ...serverDef };
|
||||
if ('timeout' in copy) {
|
||||
delete copy.timeout;
|
||||
strippedTimeouts.push(name);
|
||||
}
|
||||
sanitizedServers[name] = copy;
|
||||
}
|
||||
else {
|
||||
sanitizedServers[name] = serverDef;
|
||||
}
|
||||
}
|
||||
if (strippedTimeouts.length > 0) {
|
||||
(0, constants_1.logWarn)(`${agent.getName()} does not support MCP server timeout configuration; ignoring timeout for: ${strippedTimeouts.join(', ')}`, dryRun);
|
||||
}
|
||||
return {
|
||||
...mcpJson,
|
||||
mcpServers: sanitizedServers,
|
||||
};
|
||||
}
|
||||
async function applyMcpConfiguration(agent, filteredMcpJson, dest, agentConfig, config, projectRoot, cliMcpStrategy, dryRun, verbose, backup = true) {
|
||||
// Prevent writing MCP configs outside the project root (e.g., legacy home-directory targets)
|
||||
if (!dest.startsWith(projectRoot)) {
|
||||
(0, constants_1.logVerbose)(`Skipping MCP config for ${agent.getName()} because target path is outside project: ${dest}`, verbose);
|
||||
return;
|
||||
}
|
||||
const agentMcpJson = sanitizeMcpTimeoutsForAgent(agent, filteredMcpJson, dryRun);
|
||||
if (agent.getIdentifier() === 'openhands') {
|
||||
return await applyOpenHandsMcpConfiguration(agentMcpJson, dest, dryRun, verbose, backup);
|
||||
}
|
||||
if (agent.getIdentifier() === 'opencode') {
|
||||
return await applyOpenCodeMcpConfiguration(agentMcpJson, dest, dryRun, verbose, backup);
|
||||
}
|
||||
// Agents that handle MCP configuration internally should not have external MCP handling
|
||||
if (agent.getIdentifier() === 'zed' ||
|
||||
agent.getIdentifier() === 'gemini-cli' ||
|
||||
agent.getIdentifier() === 'amazon-q-cli' ||
|
||||
agent.getIdentifier() === 'crush') {
|
||||
(0, constants_1.logVerbose)(`Skipping external MCP config for ${agent.getName()} - handled internally by agent`, verbose);
|
||||
return;
|
||||
}
|
||||
return await applyStandardMcpConfiguration(agent, agentMcpJson, dest, agentConfig, config, cliMcpStrategy, dryRun, verbose, backup);
|
||||
}
|
||||
async function applyOpenHandsMcpConfiguration(filteredMcpJson, dest, dryRun, verbose, backup = true) {
|
||||
if (dryRun) {
|
||||
(0, constants_1.logVerbose)(`DRY RUN: Would apply MCP config by updating TOML file: ${dest}`, verbose);
|
||||
}
|
||||
else {
|
||||
await (0, propagateOpenHandsMcp_1.propagateMcpToOpenHands)(filteredMcpJson, dest, backup);
|
||||
}
|
||||
}
|
||||
async function applyOpenCodeMcpConfiguration(filteredMcpJson, dest, dryRun, verbose, backup = true) {
|
||||
if (dryRun) {
|
||||
(0, constants_1.logVerbose)(`DRY RUN: Would apply MCP config by updating OpenCode config file: ${dest}`, verbose);
|
||||
}
|
||||
else {
|
||||
await (0, propagateOpenCodeMcp_1.propagateMcpToOpenCode)(filteredMcpJson, dest, backup);
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Transform MCP server types for Claude Code compatibility.
|
||||
* Claude expects "http" for HTTP servers and "sse" for SSE servers, not "remote".
|
||||
*/
|
||||
function transformMcpForClaude(mcpJson) {
|
||||
if (!mcpJson.mcpServers || typeof mcpJson.mcpServers !== 'object') {
|
||||
return mcpJson;
|
||||
}
|
||||
const transformedMcp = { ...mcpJson };
|
||||
const transformedServers = {};
|
||||
for (const [name, serverDef] of Object.entries(mcpJson.mcpServers)) {
|
||||
if (serverDef && typeof serverDef === 'object') {
|
||||
const server = serverDef;
|
||||
const transformedServer = { ...server };
|
||||
// Transform type: "remote" to appropriate Claude types
|
||||
if (server.type === 'remote' &&
|
||||
server.url &&
|
||||
typeof server.url === 'string') {
|
||||
const url = server.url;
|
||||
// Check if URL suggests SSE (contains /sse path segment)
|
||||
if (/\/sse(\/|$)/i.test(url)) {
|
||||
transformedServer.type = 'sse';
|
||||
}
|
||||
else {
|
||||
transformedServer.type = 'http';
|
||||
}
|
||||
}
|
||||
transformedServers[name] = transformedServer;
|
||||
}
|
||||
else {
|
||||
transformedServers[name] = serverDef;
|
||||
}
|
||||
}
|
||||
transformedMcp.mcpServers = transformedServers;
|
||||
return transformedMcp;
|
||||
}
|
||||
/**
|
||||
* Transform MCP server types for Kilo Code compatibility.
|
||||
* Kilo Code expects "streamable-http" for remote HTTP servers, not "remote".
|
||||
*/
|
||||
function transformMcpForKiloCode(mcpJson) {
|
||||
if (!mcpJson.mcpServers || typeof mcpJson.mcpServers !== 'object') {
|
||||
return mcpJson;
|
||||
}
|
||||
const transformedMcp = { ...mcpJson };
|
||||
const transformedServers = {};
|
||||
for (const [name, serverDef] of Object.entries(mcpJson.mcpServers)) {
|
||||
if (serverDef && typeof serverDef === 'object') {
|
||||
const server = serverDef;
|
||||
const transformedServer = { ...server };
|
||||
// Transform type: "remote" to "streamable-http" for HTTP-based servers
|
||||
if (server.type === 'remote' &&
|
||||
server.url &&
|
||||
typeof server.url === 'string') {
|
||||
transformedServer.type = 'streamable-http';
|
||||
}
|
||||
transformedServers[name] = transformedServer;
|
||||
}
|
||||
else {
|
||||
transformedServers[name] = serverDef;
|
||||
}
|
||||
}
|
||||
transformedMcp.mcpServers = transformedServers;
|
||||
return transformedMcp;
|
||||
}
|
||||
/**
|
||||
* Transform MCP server types for Factory Droid compatibility.
|
||||
* Factory Droid expects "http" for remote HTTP servers, not "remote".
|
||||
*/
|
||||
function transformMcpForFactoryDroid(mcpJson) {
|
||||
if (!mcpJson.mcpServers || typeof mcpJson.mcpServers !== 'object') {
|
||||
return mcpJson;
|
||||
}
|
||||
const transformedMcp = { ...mcpJson };
|
||||
const transformedServers = {};
|
||||
for (const [name, serverDef] of Object.entries(mcpJson.mcpServers)) {
|
||||
if (serverDef && typeof serverDef === 'object') {
|
||||
const server = serverDef;
|
||||
const transformedServer = { ...server };
|
||||
if (server.type === 'remote' &&
|
||||
server.url &&
|
||||
typeof server.url === 'string') {
|
||||
transformedServer.type = 'http';
|
||||
}
|
||||
transformedServers[name] = transformedServer;
|
||||
}
|
||||
else {
|
||||
transformedServers[name] = serverDef;
|
||||
}
|
||||
}
|
||||
transformedMcp.mcpServers = transformedServers;
|
||||
return transformedMcp;
|
||||
}
|
||||
async function applyStandardMcpConfiguration(agent, filteredMcpJson, dest, agentConfig, config, cliMcpStrategy, dryRun, verbose, backup = true) {
|
||||
const strategy = cliMcpStrategy ??
|
||||
agentConfig?.mcp?.strategy ??
|
||||
config.mcp?.strategy ??
|
||||
'merge';
|
||||
const serverKey = agent.getMcpServerKey?.() ?? 'mcpServers';
|
||||
// Skip agents with empty server keys (e.g., AgentsMdAgent, GooseAgent)
|
||||
if (serverKey === '') {
|
||||
(0, constants_1.logVerbose)(`Skipping MCP config for ${agent.getName()} - agent has empty server key`, verbose);
|
||||
return;
|
||||
}
|
||||
(0, constants_1.logVerbose)(`Applying filtered MCP config for ${agent.getName()} with strategy: ${strategy} and key: ${serverKey}`, verbose);
|
||||
if (dryRun) {
|
||||
(0, constants_1.logVerbose)(`DRY RUN: Would apply MCP config to: ${dest}`, verbose);
|
||||
}
|
||||
else {
|
||||
// Transform MCP config for agent-specific compatibility
|
||||
let mcpToMerge = filteredMcpJson;
|
||||
if (agent.getIdentifier() === 'claude') {
|
||||
mcpToMerge = transformMcpForClaude(filteredMcpJson);
|
||||
}
|
||||
else if (agent.getIdentifier() === 'kilocode') {
|
||||
mcpToMerge = transformMcpForKiloCode(filteredMcpJson);
|
||||
}
|
||||
else if (agent.getIdentifier() === 'factory') {
|
||||
mcpToMerge = transformMcpForFactoryDroid(filteredMcpJson);
|
||||
}
|
||||
const CODEX_AGENT_ID = 'codex';
|
||||
const isCodexToml = agent.getIdentifier() === CODEX_AGENT_ID && dest.endsWith('.toml');
|
||||
let existing = await (0, mcp_1.readNativeMcp)(dest);
|
||||
if (isCodexToml) {
|
||||
try {
|
||||
const tomlContent = await fs_1.promises.readFile(dest, 'utf8');
|
||||
existing = (0, toml_1.parse)(tomlContent);
|
||||
}
|
||||
catch (error) {
|
||||
(0, constants_1.logVerbose)(`Failed to read Codex MCP TOML at ${dest}: ${error.message}`, verbose);
|
||||
// ignore missing or invalid TOML, fall back to previously read value
|
||||
}
|
||||
}
|
||||
let merged = (0, merge_1.mergeMcp)(existing, mcpToMerge, strategy, serverKey);
|
||||
if (isCodexToml) {
|
||||
const { [serverKey]: servers, ...rest } = merged;
|
||||
merged = {
|
||||
...rest,
|
||||
// Codex CLI expects MCP servers under mcp_servers in config.toml.
|
||||
mcp_servers: servers ?? {},
|
||||
};
|
||||
}
|
||||
// Firebase Studio (IDX) expects no "type" fields in .idx/mcp.json server entries.
|
||||
// Sanitize merged config by stripping 'type' from each server when targeting Firebase.
|
||||
const sanitizeForFirebase = (obj) => {
|
||||
if (agent.getIdentifier() !== 'firebase')
|
||||
return obj;
|
||||
const out = { ...obj };
|
||||
const servers = out[serverKey] || {};
|
||||
const cleanedServers = {};
|
||||
for (const [name, def] of Object.entries(servers)) {
|
||||
if (def && typeof def === 'object') {
|
||||
const copy = { ...def };
|
||||
delete copy.type;
|
||||
cleanedServers[name] = copy;
|
||||
}
|
||||
else {
|
||||
cleanedServers[name] = def;
|
||||
}
|
||||
}
|
||||
out[serverKey] = cleanedServers;
|
||||
return out;
|
||||
};
|
||||
// Gemini CLI (since v0.21.0) no longer accepts the "type" field in MCP server entries.
|
||||
// Following the MCP spec update from Nov 25, 2025, the transport type is now inferred
|
||||
// from the presence of specific keys (command/args -> stdio, url -> sse/http).
|
||||
// Sanitize merged config by stripping 'type' from each server when targeting Gemini.
|
||||
const sanitizeForGemini = (obj) => {
|
||||
if (agent.getIdentifier() !== 'gemini-cli')
|
||||
return obj;
|
||||
const out = { ...obj };
|
||||
const servers = out[serverKey] || {};
|
||||
const cleanedServers = {};
|
||||
for (const [name, def] of Object.entries(servers)) {
|
||||
if (def && typeof def === 'object') {
|
||||
const copy = { ...def };
|
||||
delete copy.type;
|
||||
cleanedServers[name] = copy;
|
||||
}
|
||||
else {
|
||||
cleanedServers[name] = def;
|
||||
}
|
||||
}
|
||||
out[serverKey] = cleanedServers;
|
||||
return out;
|
||||
};
|
||||
let toWrite = sanitizeForFirebase(merged);
|
||||
toWrite = sanitizeForGemini(toWrite);
|
||||
// Only backup and write if content would actually change (idempotent)
|
||||
const currentContent = isCodexToml
|
||||
? (0, toml_1.stringify)(existing)
|
||||
: JSON.stringify(existing, null, 2);
|
||||
const newContent = isCodexToml
|
||||
? (0, toml_1.stringify)(toWrite)
|
||||
: JSON.stringify(toWrite, null, 2);
|
||||
if (currentContent !== newContent) {
|
||||
if (backup) {
|
||||
const { backupFile } = await Promise.resolve().then(() => __importStar(require('../core/FileSystemUtils')));
|
||||
await backupFile(dest);
|
||||
}
|
||||
if (isCodexToml) {
|
||||
await FileSystemUtils.writeGeneratedFile(dest, (0, toml_1.stringify)(toWrite));
|
||||
}
|
||||
else {
|
||||
await (0, mcp_1.writeNativeMcp)(dest, toWrite);
|
||||
}
|
||||
}
|
||||
else {
|
||||
(0, constants_1.logVerbose)(`MCP config for ${agent.getName()} is already up to date - skipping backup and write`, verbose);
|
||||
}
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Updates the .gitignore file with generated paths.
|
||||
* @param projectRoot Root directory of the project
|
||||
* @param generatedPaths Array of generated file paths
|
||||
* @param config Loaded configuration
|
||||
* @param cliGitignoreEnabled CLI gitignore setting
|
||||
* @param dryRun Whether to perform a dry run
|
||||
* @param cliGitignoreLocal CLI toggle for .git/info/exclude usage
|
||||
*/
|
||||
async function updateGitignore(projectRoot, generatedPaths, config, cliGitignoreEnabled, dryRun, cliGitignoreLocal) {
|
||||
// Configuration precedence: CLI > TOML > Default (enabled)
|
||||
let gitignoreEnabled;
|
||||
if (cliGitignoreEnabled !== undefined) {
|
||||
gitignoreEnabled = cliGitignoreEnabled;
|
||||
}
|
||||
else if (config.gitignore?.enabled !== undefined) {
|
||||
gitignoreEnabled = config.gitignore.enabled;
|
||||
}
|
||||
else {
|
||||
gitignoreEnabled = true; // Default enabled
|
||||
}
|
||||
const gitignoreTarget = cliGitignoreLocal !== undefined
|
||||
? cliGitignoreLocal
|
||||
? '.git/info/exclude'
|
||||
: '.gitignore'
|
||||
: config.gitignore?.local
|
||||
? '.git/info/exclude'
|
||||
: '.gitignore';
|
||||
if (gitignoreEnabled && generatedPaths.length > 0) {
|
||||
const uniquePaths = [...new Set(generatedPaths)];
|
||||
// Note: Individual backup patterns are added per-file in the collection phase
|
||||
// No need to add a broad *.bak pattern here
|
||||
if (uniquePaths.length > 0) {
|
||||
if (dryRun) {
|
||||
(0, constants_1.logInfo)(`Would update ${gitignoreTarget} with ${uniquePaths.length} unique path(s): ${uniquePaths.join(', ')}`, dryRun);
|
||||
}
|
||||
else {
|
||||
await (0, GitignoreUtils_1.updateGitignore)(projectRoot, uniquePaths, gitignoreTarget);
|
||||
(0, constants_1.logInfo)(`Updated ${gitignoreTarget} with ${uniquePaths.length} unique path(s) in the Ruler block.`, dryRun);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
+30
@@ -0,0 +1,30 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.mapRawAgentConfigs = mapRawAgentConfigs;
|
||||
/**
|
||||
* Maps raw agent configuration keys to their corresponding agent identifiers.
|
||||
*
|
||||
* This function normalizes configuration keys by matching them against agent identifiers
|
||||
* and display names. It performs both exact matching (case-insensitive) with agent
|
||||
* identifiers and substring matching (case-insensitive) with agent display names
|
||||
* for backwards compatibility.
|
||||
*
|
||||
* @param raw Raw agent configurations with user-provided keys
|
||||
* @param agents Array of all available agents
|
||||
* @returns Record with agent identifiers as keys and their configurations as values
|
||||
*/
|
||||
function mapRawAgentConfigs(raw, agents) {
|
||||
const mappedConfigs = {};
|
||||
for (const [key, cfg] of Object.entries(raw)) {
|
||||
const lowerKey = key.toLowerCase();
|
||||
for (const agent of agents) {
|
||||
const identifier = agent.getIdentifier();
|
||||
// Exact match with identifier or substring match with display name for backwards compatibility
|
||||
if (identifier === lowerKey ||
|
||||
agent.getName().toLowerCase().includes(lowerKey)) {
|
||||
mappedConfigs[identifier] = cfg;
|
||||
}
|
||||
}
|
||||
}
|
||||
return mappedConfigs;
|
||||
}
|
||||
+24
@@ -0,0 +1,24 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.sha256 = sha256;
|
||||
exports.stableJson = stableJson;
|
||||
const crypto_1 = require("crypto");
|
||||
function sha256(data) {
|
||||
return (0, crypto_1.createHash)('sha256').update(data, 'utf8').digest('hex');
|
||||
}
|
||||
// Stable JSON stringify: sorts object keys recursively.
|
||||
function stableJson(value) {
|
||||
return JSON.stringify(sortValue(value));
|
||||
}
|
||||
function sortValue(value) {
|
||||
if (Array.isArray(value)) {
|
||||
return value.map(sortValue);
|
||||
}
|
||||
if (value && typeof value === 'object') {
|
||||
const entries = Object.entries(value)
|
||||
.sort(([a], [b]) => (a < b ? -1 : a > b ? 1 : 0))
|
||||
.map(([k, v]) => [k, sortValue(v)]);
|
||||
return Object.fromEntries(entries);
|
||||
}
|
||||
return value;
|
||||
}
|
||||
+414
@@ -0,0 +1,414 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || (function () {
|
||||
var ownKeys = function(o) {
|
||||
ownKeys = Object.getOwnPropertyNames || function (o) {
|
||||
var ar = [];
|
||||
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
||||
return ar;
|
||||
};
|
||||
return ownKeys(o);
|
||||
};
|
||||
return function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
})();
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.revertAgentConfiguration = revertAgentConfiguration;
|
||||
exports.cleanUpAuxiliaryFiles = cleanUpAuxiliaryFiles;
|
||||
const path = __importStar(require("path"));
|
||||
const fs_1 = require("fs");
|
||||
const agent_utils_1 = require("../agents/agent-utils");
|
||||
const mcp_1 = require("../paths/mcp");
|
||||
const constants_1 = require("../constants");
|
||||
const settings_1 = require("../vscode/settings");
|
||||
/**
|
||||
* Checks if a file exists.
|
||||
*/
|
||||
async function fileExists(filePath) {
|
||||
try {
|
||||
await fs_1.promises.access(filePath);
|
||||
return true;
|
||||
}
|
||||
catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Restores a file from its backup if the backup exists.
|
||||
*/
|
||||
async function restoreFromBackup(filePath, verbose, dryRun) {
|
||||
const backupPath = `${filePath}.bak`;
|
||||
const backupExists = await fileExists(backupPath);
|
||||
if (!backupExists) {
|
||||
(0, constants_1.logVerbose)(`No backup found for: ${filePath}`, verbose);
|
||||
return false;
|
||||
}
|
||||
const prefix = (0, constants_1.actionPrefix)(dryRun);
|
||||
if (dryRun) {
|
||||
(0, constants_1.logVerbose)(`${prefix} Would restore: ${filePath} from backup`, verbose);
|
||||
}
|
||||
else {
|
||||
await fs_1.promises.copyFile(backupPath, filePath);
|
||||
(0, constants_1.logVerbose)(`${prefix} Restored: ${filePath} from backup`, verbose);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
/**
|
||||
* Removes a file if it exists and has no backup (meaning it was generated by ruler).
|
||||
*/
|
||||
async function removeGeneratedFile(filePath, verbose, dryRun) {
|
||||
const fileExistsFlag = await fileExists(filePath);
|
||||
const backupExists = await fileExists(`${filePath}.bak`);
|
||||
if (!fileExistsFlag) {
|
||||
(0, constants_1.logVerbose)(`File does not exist: ${filePath}`, verbose);
|
||||
return false;
|
||||
}
|
||||
if (backupExists) {
|
||||
(0, constants_1.logVerbose)(`File has backup, skipping removal: ${filePath}`, verbose);
|
||||
return false;
|
||||
}
|
||||
const prefix = (0, constants_1.actionPrefix)(dryRun);
|
||||
if (dryRun) {
|
||||
(0, constants_1.logVerbose)(`${prefix} Would remove generated file: ${filePath}`, verbose);
|
||||
}
|
||||
else {
|
||||
await fs_1.promises.unlink(filePath);
|
||||
(0, constants_1.logVerbose)(`${prefix} Removed generated file: ${filePath}`, verbose);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
/**
|
||||
* Removes backup files.
|
||||
*/
|
||||
async function removeBackupFile(filePath, verbose, dryRun) {
|
||||
const backupPath = `${filePath}.bak`;
|
||||
const backupExists = await fileExists(backupPath);
|
||||
if (!backupExists) {
|
||||
return false;
|
||||
}
|
||||
const prefix = (0, constants_1.actionPrefix)(dryRun);
|
||||
if (dryRun) {
|
||||
(0, constants_1.logVerbose)(`${prefix} Would remove backup file: ${backupPath}`, verbose);
|
||||
}
|
||||
else {
|
||||
await fs_1.promises.unlink(backupPath);
|
||||
(0, constants_1.logVerbose)(`${prefix} Removed backup file: ${backupPath}`, verbose);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
/**
|
||||
* Recursively checks if a directory contains only empty directories
|
||||
*/
|
||||
async function isDirectoryTreeEmpty(dirPath) {
|
||||
try {
|
||||
const entries = await fs_1.promises.readdir(dirPath);
|
||||
if (entries.length === 0) {
|
||||
return true;
|
||||
}
|
||||
for (const entry of entries) {
|
||||
const entryPath = path.join(dirPath, entry);
|
||||
const entryStat = await fs_1.promises.stat(entryPath);
|
||||
if (entryStat.isFile()) {
|
||||
return false;
|
||||
}
|
||||
else if (entryStat.isDirectory()) {
|
||||
const isEmpty = await isDirectoryTreeEmpty(entryPath);
|
||||
if (!isEmpty) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Helper function to execute directory removal with consistent dry-run handling and logging.
|
||||
*/
|
||||
async function executeDirectoryAction(dirPath, action, verbose, dryRun) {
|
||||
const prefix = (0, constants_1.actionPrefix)(dryRun);
|
||||
const actionText = action === 'remove-tree' ? 'directory tree' : 'directory';
|
||||
if (dryRun) {
|
||||
(0, constants_1.logVerbose)(`${prefix} Would remove empty ${actionText}: ${dirPath}`, verbose);
|
||||
}
|
||||
else {
|
||||
await fs_1.promises.rm(dirPath, { recursive: true });
|
||||
(0, constants_1.logVerbose)(`${prefix} Removed empty ${actionText}: ${dirPath}`, verbose);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
/**
|
||||
* Attempts to remove a single empty directory if it exists and is empty.
|
||||
*/
|
||||
async function removeEmptyDirectory(dirPath, verbose, dryRun, logMissing = false) {
|
||||
try {
|
||||
const stat = await fs_1.promises.stat(dirPath);
|
||||
if (!stat.isDirectory()) {
|
||||
return false;
|
||||
}
|
||||
const isEmpty = await isDirectoryTreeEmpty(dirPath);
|
||||
if (isEmpty) {
|
||||
return await executeDirectoryAction(dirPath, 'remove-tree', verbose, dryRun);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
catch {
|
||||
if (logMissing) {
|
||||
(0, constants_1.logVerbose)(`Directory ${dirPath} doesn't exist or can't be accessed`, verbose);
|
||||
}
|
||||
return false;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Handles special cleanup logic for .augment directory and its rules subdirectory.
|
||||
*/
|
||||
async function removeAugmentDirectory(projectRoot, verbose, dryRun) {
|
||||
const augmentDir = path.join(projectRoot, '.augment');
|
||||
let directoriesRemoved = 0;
|
||||
try {
|
||||
const augmentStat = await fs_1.promises.stat(augmentDir);
|
||||
if (!augmentStat.isDirectory()) {
|
||||
return 0;
|
||||
}
|
||||
const rulesDir = path.join(augmentDir, 'rules');
|
||||
const rulesRemoved = await removeEmptyDirectory(rulesDir, verbose, dryRun);
|
||||
if (rulesRemoved) {
|
||||
directoriesRemoved++;
|
||||
}
|
||||
const augmentRemoved = await removeEmptyDirectory(augmentDir, verbose, dryRun);
|
||||
if (augmentRemoved) {
|
||||
directoriesRemoved++;
|
||||
}
|
||||
}
|
||||
catch {
|
||||
// .augment directory doesn't exist, that's fine. leaving comment as catch block can't be kept empty.
|
||||
}
|
||||
return directoriesRemoved;
|
||||
}
|
||||
/**
|
||||
* Removes empty directories that were created by ruler.
|
||||
* Only removes directories if they are empty and were likely created by ruler.
|
||||
* Special handling for .augment directory to clean up rules subdirectory.
|
||||
*/
|
||||
async function removeEmptyDirectories(projectRoot, verbose, dryRun) {
|
||||
const rulerCreatedDirs = [
|
||||
'.github',
|
||||
'.cursor',
|
||||
'.windsurf',
|
||||
'.junie',
|
||||
'.openhands',
|
||||
'.idx',
|
||||
'.gemini',
|
||||
'.vscode',
|
||||
'.augmentcode',
|
||||
'.kilocode',
|
||||
];
|
||||
let directoriesRemoved = 0;
|
||||
// Handle .augment directory with special logic
|
||||
directoriesRemoved += await removeAugmentDirectory(projectRoot, verbose, dryRun);
|
||||
// Handle all other ruler-created directories
|
||||
for (const dirName of rulerCreatedDirs) {
|
||||
const dirPath = path.join(projectRoot, dirName);
|
||||
const removed = await removeEmptyDirectory(dirPath, verbose, dryRun, true);
|
||||
if (removed) {
|
||||
directoriesRemoved++;
|
||||
}
|
||||
}
|
||||
return directoriesRemoved;
|
||||
}
|
||||
/**
|
||||
* Removes additional files created by specific agents that aren't covered by their main output paths.
|
||||
*/
|
||||
async function removeAdditionalAgentFiles(projectRoot, verbose, dryRun) {
|
||||
const additionalFiles = [
|
||||
'.gemini/settings.json',
|
||||
'.mcp.json',
|
||||
'.vscode/mcp.json',
|
||||
'.cursor/mcp.json',
|
||||
'.junie/mcp/mcp.json',
|
||||
'.kilocode/mcp.json',
|
||||
'config.toml',
|
||||
];
|
||||
let filesRemoved = 0;
|
||||
const prefix = (0, constants_1.actionPrefix)(dryRun);
|
||||
for (const filePath of additionalFiles) {
|
||||
const fullPath = path.join(projectRoot, filePath);
|
||||
try {
|
||||
const fileExistsFlag = await fileExists(fullPath);
|
||||
if (!fileExistsFlag) {
|
||||
continue;
|
||||
}
|
||||
const backupExists = await fileExists(`${fullPath}.bak`);
|
||||
if (backupExists) {
|
||||
const restored = await restoreFromBackup(fullPath, verbose, dryRun);
|
||||
if (restored) {
|
||||
filesRemoved++;
|
||||
}
|
||||
}
|
||||
else {
|
||||
if (dryRun) {
|
||||
(0, constants_1.logVerbose)(`${prefix} Would remove additional file: ${fullPath}`, verbose);
|
||||
}
|
||||
else {
|
||||
await fs_1.promises.unlink(fullPath);
|
||||
(0, constants_1.logVerbose)(`${prefix} Removed additional file: ${fullPath}`, verbose);
|
||||
}
|
||||
filesRemoved++;
|
||||
}
|
||||
}
|
||||
catch {
|
||||
(0, constants_1.logVerbose)(`Additional file ${fullPath} doesn't exist or can't be accessed`, verbose);
|
||||
}
|
||||
}
|
||||
const settingsPath = (0, settings_1.getVSCodeSettingsPath)(projectRoot);
|
||||
const backupPath = `${settingsPath}.bak`;
|
||||
if (await fileExists(backupPath)) {
|
||||
const restored = await restoreFromBackup(settingsPath, verbose, dryRun);
|
||||
if (restored) {
|
||||
filesRemoved++;
|
||||
(0, constants_1.logVerbose)(`${constants_1.actionPrefix} Restored VSCode settings from backup`, verbose);
|
||||
}
|
||||
}
|
||||
else if (await fileExists(settingsPath)) {
|
||||
try {
|
||||
if (dryRun) {
|
||||
const settings = await (0, settings_1.readVSCodeSettings)(settingsPath);
|
||||
if (settings['augment.advanced']) {
|
||||
delete settings['augment.advanced'];
|
||||
const remainingKeys = Object.keys(settings);
|
||||
if (remainingKeys.length === 0) {
|
||||
(0, constants_1.logVerbose)(`${constants_1.actionPrefix} Would remove empty VSCode settings file`, verbose);
|
||||
}
|
||||
else {
|
||||
(0, constants_1.logVerbose)(`${constants_1.actionPrefix} Would remove augment.advanced section from ${settingsPath}`, verbose);
|
||||
}
|
||||
filesRemoved++;
|
||||
}
|
||||
}
|
||||
else {
|
||||
const settings = await (0, settings_1.readVSCodeSettings)(settingsPath);
|
||||
if (settings['augment.advanced']) {
|
||||
delete settings['augment.advanced'];
|
||||
const remainingKeys = Object.keys(settings);
|
||||
if (remainingKeys.length === 0) {
|
||||
await fs_1.promises.unlink(settingsPath);
|
||||
(0, constants_1.logVerbose)(`${constants_1.actionPrefix} Removed empty VSCode settings file`, verbose);
|
||||
}
|
||||
else {
|
||||
await (0, settings_1.writeVSCodeSettings)(settingsPath, settings);
|
||||
(0, constants_1.logVerbose)(`${constants_1.actionPrefix} Removed augment.advanced section from VSCode settings`, verbose);
|
||||
}
|
||||
filesRemoved++;
|
||||
}
|
||||
else {
|
||||
(0, constants_1.logVerbose)(`No augment.advanced section found in ${settingsPath}`, verbose);
|
||||
}
|
||||
}
|
||||
}
|
||||
catch (error) {
|
||||
(0, constants_1.logVerbose)(`Failed to process VSCode settings.json: ${error}`, verbose);
|
||||
}
|
||||
}
|
||||
return filesRemoved;
|
||||
}
|
||||
/**
|
||||
* Reverts configuration for a single agent.
|
||||
* @param agent The agent to revert
|
||||
* @param projectRoot Root directory of the project
|
||||
* @param agentConfig Agent-specific configuration
|
||||
* @param keepBackups Whether to keep backup files
|
||||
* @param verbose Whether to enable verbose logging
|
||||
* @param dryRun Whether to perform a dry run
|
||||
* @returns Promise resolving to revert statistics
|
||||
*/
|
||||
async function revertAgentConfiguration(agent, projectRoot, agentConfig, keepBackups, verbose, dryRun) {
|
||||
const result = {
|
||||
restored: 0,
|
||||
removed: 0,
|
||||
backupsRemoved: 0,
|
||||
};
|
||||
const outputPaths = (0, agent_utils_1.getAgentOutputPaths)(agent, projectRoot, agentConfig);
|
||||
(0, constants_1.logVerbose)(`Agent ${agent.getName()} output paths: ${outputPaths.join(', ')}`, verbose);
|
||||
for (const outputPath of outputPaths) {
|
||||
const restored = await restoreFromBackup(outputPath, verbose, dryRun);
|
||||
if (restored) {
|
||||
result.restored++;
|
||||
if (!keepBackups) {
|
||||
const backupRemoved = await removeBackupFile(outputPath, verbose, dryRun);
|
||||
if (backupRemoved) {
|
||||
result.backupsRemoved++;
|
||||
}
|
||||
}
|
||||
}
|
||||
else {
|
||||
const removed = await removeGeneratedFile(outputPath, verbose, dryRun);
|
||||
if (removed) {
|
||||
result.removed++;
|
||||
}
|
||||
}
|
||||
}
|
||||
// Handle MCP files
|
||||
const mcpPath = await (0, mcp_1.getNativeMcpPath)(agent.getName(), projectRoot);
|
||||
if (mcpPath && mcpPath.startsWith(projectRoot)) {
|
||||
if (agent.getName() === 'AugmentCode' &&
|
||||
mcpPath.endsWith('.vscode/settings.json')) {
|
||||
(0, constants_1.logVerbose)(`Skipping MCP handling for AugmentCode settings.json - handled separately`, verbose);
|
||||
}
|
||||
else {
|
||||
const mcpRestored = await restoreFromBackup(mcpPath, verbose, dryRun);
|
||||
if (mcpRestored) {
|
||||
result.restored++;
|
||||
if (!keepBackups) {
|
||||
const mcpBackupRemoved = await removeBackupFile(mcpPath, verbose, dryRun);
|
||||
if (mcpBackupRemoved) {
|
||||
result.backupsRemoved++;
|
||||
}
|
||||
}
|
||||
}
|
||||
else {
|
||||
const mcpRemoved = await removeGeneratedFile(mcpPath, verbose, dryRun);
|
||||
if (mcpRemoved) {
|
||||
result.removed++;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
/**
|
||||
* Cleans up auxiliary files and directories.
|
||||
* @param projectRoot Root directory of the project
|
||||
* @param verbose Whether to enable verbose logging
|
||||
* @param dryRun Whether to perform a dry run
|
||||
* @returns Promise resolving to cleanup statistics
|
||||
*/
|
||||
async function cleanUpAuxiliaryFiles(projectRoot, verbose, dryRun) {
|
||||
const additionalFilesRemoved = await removeAdditionalAgentFiles(projectRoot, verbose, dryRun);
|
||||
const directoriesRemoved = await removeEmptyDirectories(projectRoot, verbose, dryRun);
|
||||
return {
|
||||
additionalFilesRemoved,
|
||||
directoriesRemoved,
|
||||
};
|
||||
}
|
||||
+166
@@ -0,0 +1,166 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || (function () {
|
||||
var ownKeys = function(o) {
|
||||
ownKeys = Object.getOwnPropertyNames || function (o) {
|
||||
var ar = [];
|
||||
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
||||
return ar;
|
||||
};
|
||||
return ownKeys(o);
|
||||
};
|
||||
return function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
})();
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.allAgents = void 0;
|
||||
exports.applyAllAgentConfigs = applyAllAgentConfigs;
|
||||
const path = __importStar(require("path"));
|
||||
const agents_1 = require("./agents");
|
||||
Object.defineProperty(exports, "allAgents", { enumerable: true, get: function () { return agents_1.allAgents; } });
|
||||
const constants_1 = require("./constants");
|
||||
const apply_engine_1 = require("./core/apply-engine");
|
||||
const config_utils_1 = require("./core/config-utils");
|
||||
const agent_selection_1 = require("./core/agent-selection");
|
||||
const agents = agents_1.allAgents;
|
||||
/**
|
||||
* Resolves skills enabled state based on precedence: CLI flag > ruler.toml > default (enabled)
|
||||
*/
|
||||
function resolveSkillsEnabled(cliFlag, configSetting) {
|
||||
return cliFlag !== undefined
|
||||
? cliFlag
|
||||
: configSetting !== undefined
|
||||
? configSetting
|
||||
: true; // default to enabled
|
||||
}
|
||||
/**
|
||||
* Applies ruler configurations for all supported AI agents.
|
||||
* @param projectRoot Root directory of the project
|
||||
*/
|
||||
/**
|
||||
* Applies ruler configurations for selected AI agents.
|
||||
* @param projectRoot Root directory of the project
|
||||
* @param includedAgents Optional list of agent name filters (case-insensitive substrings)
|
||||
*/
|
||||
async function applyAllAgentConfigs(projectRoot, includedAgents, configPath, cliMcpEnabled = true, cliMcpStrategy, cliGitignoreEnabled, verbose = false, dryRun = false, localOnly = false, nested = false, backup = true, skillsEnabled, cliGitignoreLocal) {
|
||||
// Load configuration and rules
|
||||
(0, constants_1.logVerbose)(`Loading configuration from project root: ${projectRoot}`, verbose);
|
||||
if (configPath) {
|
||||
(0, constants_1.logVerbose)(`Using custom config path: ${configPath}`, verbose);
|
||||
}
|
||||
let selectedAgents;
|
||||
let generatedPaths;
|
||||
let loadedConfig;
|
||||
if (nested) {
|
||||
const hierarchicalConfigs = await (0, apply_engine_1.loadNestedConfigurations)(projectRoot, configPath, localOnly, nested);
|
||||
if (hierarchicalConfigs.length === 0) {
|
||||
throw new Error('No .ruler directories found');
|
||||
}
|
||||
(0, constants_1.logWarn)('Nested mode is experimental and may change in future releases.', dryRun);
|
||||
// Use the root config for agent selection (all levels share the same agent settings)
|
||||
const rootConfigEntry = selectRootConfiguration(hierarchicalConfigs, projectRoot);
|
||||
const rootConfig = rootConfigEntry.config;
|
||||
loadedConfig = rootConfig;
|
||||
rootConfig.cliAgents = includedAgents;
|
||||
(0, constants_1.logVerbose)(`Loaded ${hierarchicalConfigs.length} .ruler directory configurations`, verbose);
|
||||
(0, constants_1.logVerbose)(`Root configuration has ${Object.keys(rootConfig.agentConfigs).length} agent configs`, verbose);
|
||||
for (const configEntry of hierarchicalConfigs) {
|
||||
normalizeAgentConfigs(configEntry.config, agents);
|
||||
}
|
||||
selectedAgents = (0, agent_selection_1.resolveSelectedAgents)(rootConfig, agents);
|
||||
(0, constants_1.logVerbose)(`Selected ${selectedAgents.length} agents: ${selectedAgents.map((a) => a.getName()).join(', ')}`, verbose);
|
||||
// Propagate skills if enabled - do this for each nested directory
|
||||
const skillsEnabledResolved = resolveSkillsEnabled(skillsEnabled, rootConfig.skills?.enabled);
|
||||
if (skillsEnabledResolved) {
|
||||
const { propagateSkills } = await Promise.resolve().then(() => __importStar(require('./core/SkillsProcessor')));
|
||||
// Propagate skills for each nested .ruler directory
|
||||
for (const configEntry of hierarchicalConfigs) {
|
||||
const nestedRoot = path.dirname(configEntry.rulerDir);
|
||||
(0, constants_1.logVerbose)(`Propagating skills for nested directory: ${nestedRoot}`, verbose);
|
||||
await propagateSkills(nestedRoot, selectedAgents, skillsEnabledResolved, verbose, dryRun);
|
||||
}
|
||||
}
|
||||
generatedPaths = await (0, apply_engine_1.processHierarchicalConfigurations)(selectedAgents, hierarchicalConfigs, verbose, dryRun, cliMcpEnabled, cliMcpStrategy, backup);
|
||||
}
|
||||
else {
|
||||
const singleConfig = await (0, apply_engine_1.loadSingleConfiguration)(projectRoot, configPath, localOnly);
|
||||
loadedConfig = singleConfig.config;
|
||||
singleConfig.config.cliAgents = includedAgents;
|
||||
(0, constants_1.logVerbose)(`Loaded configuration with ${Object.keys(singleConfig.config.agentConfigs).length} agent configs`, verbose);
|
||||
(0, constants_1.logVerbose)(`Found .ruler directory with ${singleConfig.concatenatedRules.length} characters of rules`, verbose);
|
||||
normalizeAgentConfigs(singleConfig.config, agents);
|
||||
selectedAgents = (0, agent_selection_1.resolveSelectedAgents)(singleConfig.config, agents);
|
||||
(0, constants_1.logVerbose)(`Selected ${selectedAgents.length} agents: ${selectedAgents.map((a) => a.getName()).join(', ')}`, verbose);
|
||||
// Propagate skills if enabled
|
||||
const skillsEnabledResolved = resolveSkillsEnabled(skillsEnabled, singleConfig.config.skills?.enabled);
|
||||
if (skillsEnabledResolved) {
|
||||
const { propagateSkills } = await Promise.resolve().then(() => __importStar(require('./core/SkillsProcessor')));
|
||||
await propagateSkills(projectRoot, selectedAgents, skillsEnabledResolved, verbose, dryRun);
|
||||
}
|
||||
generatedPaths = await (0, apply_engine_1.processSingleConfiguration)(selectedAgents, singleConfig, projectRoot, verbose, dryRun, cliMcpEnabled, cliMcpStrategy, backup);
|
||||
}
|
||||
// Add skills-generated paths to gitignore if skills are enabled
|
||||
let allGeneratedPaths = generatedPaths;
|
||||
const skillsEnabledForGitignore = resolveSkillsEnabled(skillsEnabled, loadedConfig.skills?.enabled);
|
||||
if (skillsEnabledForGitignore) {
|
||||
// Skills enabled by default or explicitly
|
||||
const { getSkillsGitignorePaths } = await Promise.resolve().then(() => __importStar(require('./core/SkillsProcessor')));
|
||||
const skillsPaths = await getSkillsGitignorePaths(projectRoot, selectedAgents);
|
||||
allGeneratedPaths = [...generatedPaths, ...skillsPaths];
|
||||
}
|
||||
await (0, apply_engine_1.updateGitignore)(projectRoot, allGeneratedPaths, loadedConfig, cliGitignoreEnabled, dryRun, cliGitignoreLocal);
|
||||
}
|
||||
/**
|
||||
* Normalizes per-agent config keys to agent identifiers for consistent lookup.
|
||||
* Maps both exact identifier matches and substring matches with agent names.
|
||||
* @param config The configuration object to normalize
|
||||
* @param agents Array of available agents
|
||||
*/
|
||||
function normalizeAgentConfigs(config, agents) {
|
||||
// Normalize per-agent config keys to agent identifiers (exact match or substring match)
|
||||
config.agentConfigs = (0, config_utils_1.mapRawAgentConfigs)(config.agentConfigs, agents);
|
||||
}
|
||||
function selectRootConfiguration(configurations, projectRoot) {
|
||||
if (configurations.length === 0) {
|
||||
throw new Error('No hierarchical configurations available');
|
||||
}
|
||||
const normalizedProjectRoot = path.resolve(projectRoot);
|
||||
let bestIndex = -1;
|
||||
let bestDepth = Number.POSITIVE_INFINITY;
|
||||
for (let i = 0; i < configurations.length; i++) {
|
||||
const entry = configurations[i];
|
||||
const normalizedDir = path.resolve(entry.rulerDir);
|
||||
if (!normalizedDir.startsWith(normalizedProjectRoot)) {
|
||||
continue;
|
||||
}
|
||||
const depth = normalizedDir.split(path.sep).length;
|
||||
if (depth < bestDepth) {
|
||||
bestDepth = depth;
|
||||
bestIndex = i;
|
||||
}
|
||||
}
|
||||
if (bestIndex === -1) {
|
||||
return configurations[0];
|
||||
}
|
||||
return configurations[bestIndex];
|
||||
}
|
||||
+65
@@ -0,0 +1,65 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.getAgentMcpCapabilities = getAgentMcpCapabilities;
|
||||
exports.agentSupportsMcp = agentSupportsMcp;
|
||||
exports.filterMcpConfigForAgent = filterMcpConfigForAgent;
|
||||
/**
|
||||
* Derives MCP capabilities for an agent
|
||||
*/
|
||||
function getAgentMcpCapabilities(agent) {
|
||||
return {
|
||||
supportsStdio: agent.supportsMcpStdio?.() ?? false,
|
||||
supportsRemote: agent.supportsMcpRemote?.() ?? false,
|
||||
};
|
||||
}
|
||||
/**
|
||||
* Checks if an agent supports any MCP functionality
|
||||
*/
|
||||
function agentSupportsMcp(agent) {
|
||||
const capabilities = getAgentMcpCapabilities(agent);
|
||||
return capabilities.supportsStdio || capabilities.supportsRemote;
|
||||
}
|
||||
/**
|
||||
* Filters MCP configuration based on agent capabilities
|
||||
*/
|
||||
function filterMcpConfigForAgent(mcpConfig, agent) {
|
||||
const capabilities = getAgentMcpCapabilities(agent);
|
||||
if (!agentSupportsMcp(agent)) {
|
||||
return null;
|
||||
}
|
||||
const servers = mcpConfig.mcpServers;
|
||||
if (!servers) {
|
||||
return null;
|
||||
}
|
||||
const filteredServers = {};
|
||||
for (const [serverName, serverConfig] of Object.entries(servers)) {
|
||||
const config = serverConfig;
|
||||
// Determine server type
|
||||
const hasCommand = 'command' in config;
|
||||
const hasUrl = 'url' in config;
|
||||
const isStdio = hasCommand && !hasUrl;
|
||||
const isRemote = hasUrl && !hasCommand;
|
||||
// Include server if agent supports its type
|
||||
if (isStdio && capabilities.supportsStdio) {
|
||||
filteredServers[serverName] = serverConfig;
|
||||
}
|
||||
else if (isRemote && capabilities.supportsRemote) {
|
||||
filteredServers[serverName] = serverConfig;
|
||||
}
|
||||
else if (isRemote &&
|
||||
!capabilities.supportsRemote &&
|
||||
capabilities.supportsStdio) {
|
||||
// Transform remote server to stdio server using mcp-remote
|
||||
const transformedConfig = {
|
||||
command: 'npx',
|
||||
args: ['-y', 'mcp-remote@latest', config.url],
|
||||
...Object.fromEntries(Object.entries(config).filter(([key]) => key !== 'url')),
|
||||
};
|
||||
filteredServers[serverName] = transformedConfig;
|
||||
}
|
||||
// Note: Mixed servers (both command and url) are excluded
|
||||
}
|
||||
return Object.keys(filteredServers).length > 0
|
||||
? { mcpServers: filteredServers }
|
||||
: null;
|
||||
}
|
||||
+39
@@ -0,0 +1,39 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.mergeMcp = mergeMcp;
|
||||
/**
|
||||
* Merge native and incoming MCP server configurations according to strategy.
|
||||
* @param base Existing native MCP config object.
|
||||
* @param incoming Ruler MCP config object.
|
||||
* @param strategy Merge strategy: 'merge' to union servers, 'overwrite' to replace.
|
||||
* @param serverKey The key to use for servers in the output (e.g., 'servers' for Copilot, 'mcpServers' for others).
|
||||
* @returns Merged MCP config object.
|
||||
*/
|
||||
function mergeMcp(base, incoming, strategy, serverKey) {
|
||||
if (strategy === 'overwrite') {
|
||||
// Ensure the incoming object uses the correct server key.
|
||||
// Transform from the standard (Crush) MCP config format
|
||||
const incomingServers = incoming[serverKey] ||
|
||||
incoming.mcpServers ||
|
||||
incoming.mcp ||
|
||||
{};
|
||||
return {
|
||||
[serverKey]: incomingServers,
|
||||
};
|
||||
}
|
||||
const baseServers = base[serverKey] ||
|
||||
base.mcpServers ||
|
||||
base.mcp ||
|
||||
{};
|
||||
const incomingServers = incoming[serverKey] ||
|
||||
incoming.mcpServers ||
|
||||
incoming.mcp ||
|
||||
{};
|
||||
const mergedServers = { ...baseServers, ...incomingServers };
|
||||
const newBase = { ...base };
|
||||
delete newBase.mcpServers; // Remove old key if present
|
||||
return {
|
||||
...newBase,
|
||||
[serverKey]: mergedServers,
|
||||
};
|
||||
}
|
||||
+122
@@ -0,0 +1,122 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || (function () {
|
||||
var ownKeys = function(o) {
|
||||
ownKeys = Object.getOwnPropertyNames || function (o) {
|
||||
var ar = [];
|
||||
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
||||
return ar;
|
||||
};
|
||||
return ownKeys(o);
|
||||
};
|
||||
return function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
})();
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.propagateMcpToOpenCode = propagateMcpToOpenCode;
|
||||
const fs = __importStar(require("fs/promises"));
|
||||
const FileSystemUtils_1 = require("../core/FileSystemUtils");
|
||||
const path = __importStar(require("path"));
|
||||
function isLocalServer(value) {
|
||||
const server = value;
|
||||
return (server &&
|
||||
(typeof server.command === 'string' || Array.isArray(server.command)));
|
||||
}
|
||||
function isRemoteServer(value) {
|
||||
const server = value;
|
||||
return server && typeof server.url === 'string';
|
||||
}
|
||||
/**
|
||||
* Transform ruler MCP configuration to OpenCode's specific format
|
||||
*/
|
||||
function transformToOpenCodeFormat(rulerMcp) {
|
||||
const rulerServers = rulerMcp.mcpServers || {};
|
||||
const openCodeServers = {};
|
||||
for (const [name, serverDef] of Object.entries(rulerServers)) {
|
||||
const openCodeServer = {
|
||||
type: 'local',
|
||||
enabled: true,
|
||||
};
|
||||
if (isRemoteServer(serverDef)) {
|
||||
openCodeServer.type = 'remote';
|
||||
openCodeServer.url = serverDef.url;
|
||||
if (serverDef.headers) {
|
||||
openCodeServer.headers = serverDef.headers;
|
||||
}
|
||||
if (typeof serverDef.timeout === 'number') {
|
||||
openCodeServer.timeout = serverDef.timeout;
|
||||
}
|
||||
}
|
||||
else if (isLocalServer(serverDef)) {
|
||||
openCodeServer.type = 'local';
|
||||
const command = Array.isArray(serverDef.command)
|
||||
? serverDef.command
|
||||
: [serverDef.command];
|
||||
const args = serverDef.args || [];
|
||||
openCodeServer.command = [...command, ...args];
|
||||
if (serverDef.env) {
|
||||
openCodeServer.environment = serverDef.env;
|
||||
}
|
||||
if (typeof serverDef.timeout === 'number') {
|
||||
openCodeServer.timeout = serverDef.timeout;
|
||||
}
|
||||
}
|
||||
else {
|
||||
continue;
|
||||
}
|
||||
openCodeServers[name] = openCodeServer;
|
||||
}
|
||||
return {
|
||||
$schema: 'https://opencode.ai/config.json',
|
||||
mcp: openCodeServers,
|
||||
};
|
||||
}
|
||||
async function propagateMcpToOpenCode(rulerMcpData, openCodeConfigPath, backup = true) {
|
||||
const rulerMcp = rulerMcpData || {};
|
||||
// Read existing OpenCode config if it exists
|
||||
let existingConfig = {};
|
||||
try {
|
||||
const existingContent = await fs.readFile(openCodeConfigPath, 'utf8');
|
||||
existingConfig = JSON.parse(existingContent);
|
||||
}
|
||||
catch {
|
||||
// File doesn't exist, we'll create it
|
||||
}
|
||||
// Transform ruler MCP to OpenCode format
|
||||
const transformedConfig = transformToOpenCodeFormat(rulerMcp);
|
||||
// Merge with existing config, preserving non-MCP settings
|
||||
const finalConfig = {
|
||||
...existingConfig,
|
||||
$schema: transformedConfig.$schema,
|
||||
mcp: {
|
||||
...existingConfig.mcp,
|
||||
...transformedConfig.mcp,
|
||||
},
|
||||
};
|
||||
await (0, FileSystemUtils_1.ensureDirExists)(path.dirname(openCodeConfigPath));
|
||||
if (backup) {
|
||||
const { backupFile } = await Promise.resolve().then(() => __importStar(require('../core/FileSystemUtils')));
|
||||
await backupFile(openCodeConfigPath);
|
||||
}
|
||||
await fs.writeFile(openCodeConfigPath, JSON.stringify(finalConfig, null, 2) + '\n');
|
||||
}
|
||||
+169
@@ -0,0 +1,169 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || (function () {
|
||||
var ownKeys = function(o) {
|
||||
ownKeys = Object.getOwnPropertyNames || function (o) {
|
||||
var ar = [];
|
||||
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
||||
return ar;
|
||||
};
|
||||
return ownKeys(o);
|
||||
};
|
||||
return function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
})();
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.propagateMcpToOpenHands = propagateMcpToOpenHands;
|
||||
const fs = __importStar(require("fs/promises"));
|
||||
const toml_1 = require("@iarna/toml");
|
||||
const FileSystemUtils_1 = require("../core/FileSystemUtils");
|
||||
const path = __importStar(require("path"));
|
||||
function isRulerMcpServer(value) {
|
||||
const server = value;
|
||||
return (server &&
|
||||
(typeof server.command === 'string' || typeof server.url === 'string'));
|
||||
}
|
||||
function classifyRemoteServer(url) {
|
||||
// Heuristic: URLs containing /sse path segments are classified as SSE
|
||||
return /\/sse(\/|$)/i.test(url) ? 'sse' : 'shttp';
|
||||
}
|
||||
function extractApiKey(headers) {
|
||||
if (!headers)
|
||||
return null;
|
||||
const authHeader = headers.Authorization || headers.authorization;
|
||||
if (!authHeader)
|
||||
return null;
|
||||
// Extract Bearer token if that's the only header, or if only Authorization + standard content headers
|
||||
const headerCount = Object.keys(headers).length;
|
||||
const hasOnlyAuthHeader = headerCount === 1;
|
||||
const hasOnlyStandardHeaders = headerCount <= 2 &&
|
||||
(headers['Content-Type'] ||
|
||||
headers['content-type'] ||
|
||||
headers['Accept'] ||
|
||||
headers['accept']);
|
||||
if ((hasOnlyAuthHeader || hasOnlyStandardHeaders) &&
|
||||
authHeader.startsWith('Bearer ')) {
|
||||
return authHeader.substring('Bearer '.length);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
function createRemoteServerEntry(url, headers) {
|
||||
const apiKey = extractApiKey(headers);
|
||||
if (apiKey) {
|
||||
return { url, api_key: apiKey };
|
||||
}
|
||||
return url;
|
||||
}
|
||||
function normalizeRemoteServerArray(entries) {
|
||||
// TOML doesn't support mixed types in arrays, so we need to be consistent
|
||||
// If any entry is an object, convert all simple URLs to objects
|
||||
const hasObjectEntries = entries.some((entry) => typeof entry === 'object');
|
||||
if (hasObjectEntries) {
|
||||
return entries.map((entry) => {
|
||||
if (typeof entry === 'string') {
|
||||
return { url: entry };
|
||||
}
|
||||
return entry;
|
||||
});
|
||||
}
|
||||
// All entries are strings, keep as is
|
||||
return entries;
|
||||
}
|
||||
async function propagateMcpToOpenHands(rulerMcpData, openHandsConfigPath, backup = true) {
|
||||
const rulerMcp = rulerMcpData || {};
|
||||
// Always use the legacy Ruler MCP config format as input (top-level "mcpServers" key)
|
||||
const rulerServers = rulerMcp.mcpServers || {};
|
||||
// Return early if no servers to process
|
||||
if (!rulerServers ||
|
||||
typeof rulerServers !== 'object' ||
|
||||
Object.keys(rulerServers).length === 0) {
|
||||
return;
|
||||
}
|
||||
let config = {};
|
||||
try {
|
||||
const tomlContent = await fs.readFile(openHandsConfigPath, 'utf8');
|
||||
config = (0, toml_1.parse)(tomlContent);
|
||||
}
|
||||
catch {
|
||||
// File doesn't exist, we'll create it.
|
||||
}
|
||||
if (!config.mcp) {
|
||||
config.mcp = {};
|
||||
}
|
||||
if (!config.mcp.stdio_servers) {
|
||||
config.mcp.stdio_servers = [];
|
||||
}
|
||||
if (!config.mcp.sse_servers) {
|
||||
config.mcp.sse_servers = [];
|
||||
}
|
||||
if (!config.mcp.shttp_servers) {
|
||||
config.mcp.shttp_servers = [];
|
||||
}
|
||||
// Build maps for merging existing servers
|
||||
const existingStdioServers = new Map(config.mcp.stdio_servers.map((s) => [s.name, s]));
|
||||
const existingSseServers = new Map();
|
||||
config.mcp.sse_servers.forEach((entry) => {
|
||||
const url = typeof entry === 'string' ? entry : entry.url;
|
||||
existingSseServers.set(url, entry);
|
||||
});
|
||||
const existingShttpServers = new Map();
|
||||
config.mcp.shttp_servers.forEach((entry) => {
|
||||
const url = typeof entry === 'string' ? entry : entry.url;
|
||||
existingShttpServers.set(url, entry);
|
||||
});
|
||||
for (const [name, serverDef] of Object.entries(rulerServers)) {
|
||||
if (isRulerMcpServer(serverDef)) {
|
||||
if (serverDef.command) {
|
||||
// Stdio server
|
||||
const { command, args, env } = serverDef;
|
||||
const newServer = { name, command };
|
||||
if (args)
|
||||
newServer.args = args;
|
||||
if (env)
|
||||
newServer.env = env;
|
||||
existingStdioServers.set(name, newServer);
|
||||
}
|
||||
else if (serverDef.url) {
|
||||
// Remote server
|
||||
const classification = classifyRemoteServer(serverDef.url);
|
||||
const entry = createRemoteServerEntry(serverDef.url, serverDef.headers);
|
||||
if (classification === 'sse') {
|
||||
existingSseServers.set(serverDef.url, entry);
|
||||
}
|
||||
else {
|
||||
existingShttpServers.set(serverDef.url, entry);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
// Convert maps back to arrays and normalize for TOML compatibility
|
||||
config.mcp.stdio_servers = Array.from(existingStdioServers.values());
|
||||
config.mcp.sse_servers = normalizeRemoteServerArray(Array.from(existingSseServers.values()));
|
||||
config.mcp.shttp_servers = normalizeRemoteServerArray(Array.from(existingShttpServers.values()));
|
||||
await (0, FileSystemUtils_1.ensureDirExists)(path.dirname(openHandsConfigPath));
|
||||
if (backup) {
|
||||
const { backupFile } = await Promise.resolve().then(() => __importStar(require('../core/FileSystemUtils')));
|
||||
await backupFile(openHandsConfigPath);
|
||||
}
|
||||
await fs.writeFile(openHandsConfigPath, (0, toml_1.stringify)(config));
|
||||
}
|
||||
+17
@@ -0,0 +1,17 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.validateMcp = validateMcp;
|
||||
/**
|
||||
* Validate the structure of the Ruler MCP JSON config.
|
||||
* Minimal validation: ensure 'mcpServers' property exists and is an object.
|
||||
* @param data Parsed JSON object from .ruler/mcp.json.
|
||||
* @throws Error if validation fails.
|
||||
*/
|
||||
function validateMcp(data) {
|
||||
if (!data ||
|
||||
typeof data !== 'object' ||
|
||||
!('mcpServers' in data) ||
|
||||
typeof data.mcpServers !== 'object') {
|
||||
throw new Error('[ruler] Invalid MCP config: must contain an object property "mcpServers" (Ruler style)');
|
||||
}
|
||||
}
|
||||
+129
@@ -0,0 +1,129 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || (function () {
|
||||
var ownKeys = function(o) {
|
||||
ownKeys = Object.getOwnPropertyNames || function (o) {
|
||||
var ar = [];
|
||||
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
||||
return ar;
|
||||
};
|
||||
return ownKeys(o);
|
||||
};
|
||||
return function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
})();
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.getNativeMcpPath = getNativeMcpPath;
|
||||
exports.readNativeMcp = readNativeMcp;
|
||||
exports.writeNativeMcp = writeNativeMcp;
|
||||
const path = __importStar(require("path"));
|
||||
const fs_1 = require("fs");
|
||||
/** Determine the native MCP config path for a given agent. */
|
||||
async function getNativeMcpPath(adapterName, projectRoot) {
|
||||
const candidates = [];
|
||||
switch (adapterName) {
|
||||
case 'GitHub Copilot':
|
||||
candidates.push(path.join(projectRoot, '.vscode', 'mcp.json'));
|
||||
break;
|
||||
case 'Visual Studio':
|
||||
candidates.push(path.join(projectRoot, '.mcp.json'));
|
||||
candidates.push(path.join(projectRoot, '.vs', 'mcp.json'));
|
||||
break;
|
||||
case 'Cursor':
|
||||
candidates.push(path.join(projectRoot, '.cursor', 'mcp.json'));
|
||||
break;
|
||||
case 'Windsurf':
|
||||
candidates.push(path.join(projectRoot, '.windsurf', 'mcp_config.json'));
|
||||
break;
|
||||
case 'Claude Code':
|
||||
candidates.push(path.join(projectRoot, '.mcp.json'));
|
||||
break;
|
||||
case 'OpenAI Codex CLI':
|
||||
candidates.push(path.join(projectRoot, '.codex', 'config.toml'));
|
||||
break;
|
||||
case 'Aider':
|
||||
candidates.push(path.join(projectRoot, '.mcp.json'));
|
||||
break;
|
||||
case 'Open Hands':
|
||||
// For Open Hands, we target the main config file, not a separate mcp.json
|
||||
candidates.push(path.join(projectRoot, 'config.toml'));
|
||||
break;
|
||||
case 'Gemini CLI':
|
||||
candidates.push(path.join(projectRoot, '.gemini', 'settings.json'));
|
||||
break;
|
||||
case 'Junie':
|
||||
candidates.push(path.join(projectRoot, '.junie', 'mcp', 'mcp.json'));
|
||||
break;
|
||||
case 'Qwen Code':
|
||||
candidates.push(path.join(projectRoot, '.qwen', 'settings.json'));
|
||||
break;
|
||||
case 'Kilo Code':
|
||||
candidates.push(path.join(projectRoot, '.kilocode', 'mcp.json'));
|
||||
break;
|
||||
case 'Kiro':
|
||||
candidates.push(path.join(projectRoot, '.kiro', 'settings', 'mcp.json'));
|
||||
break;
|
||||
case 'OpenCode':
|
||||
candidates.push(path.join(projectRoot, 'opencode.json'));
|
||||
break;
|
||||
case 'Firebase Studio':
|
||||
candidates.push(path.join(projectRoot, '.idx', 'mcp.json'));
|
||||
break;
|
||||
case 'Factory Droid':
|
||||
candidates.push(path.join(projectRoot, '.factory', 'mcp.json'));
|
||||
break;
|
||||
case 'Zed':
|
||||
// Only consider project-local Zed settings (avoid writing to user home directory)
|
||||
candidates.push(path.join(projectRoot, '.zed', 'settings.json'));
|
||||
break;
|
||||
default:
|
||||
return null;
|
||||
}
|
||||
for (const p of candidates) {
|
||||
try {
|
||||
await fs_1.promises.access(p);
|
||||
return p;
|
||||
}
|
||||
catch {
|
||||
// continue
|
||||
}
|
||||
}
|
||||
// default to first candidate if none exist
|
||||
return candidates.length > 0 ? candidates[0] : null;
|
||||
}
|
||||
/** Read native MCP config from disk, or return empty object if missing/invalid. */
|
||||
async function readNativeMcp(filePath) {
|
||||
try {
|
||||
const text = await fs_1.promises.readFile(filePath, 'utf8');
|
||||
return JSON.parse(text);
|
||||
}
|
||||
catch {
|
||||
return {};
|
||||
}
|
||||
}
|
||||
/** Write native MCP config to disk, creating parent directories as needed. */
|
||||
async function writeNativeMcp(filePath, data) {
|
||||
await fs_1.promises.mkdir(path.dirname(filePath), { recursive: true });
|
||||
const text = JSON.stringify(data, null, 2) + '\n';
|
||||
await fs_1.promises.writeFile(filePath, text, 'utf8');
|
||||
}
|
||||
+186
@@ -0,0 +1,186 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || (function () {
|
||||
var ownKeys = function(o) {
|
||||
ownKeys = Object.getOwnPropertyNames || function (o) {
|
||||
var ar = [];
|
||||
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
||||
return ar;
|
||||
};
|
||||
return ownKeys(o);
|
||||
};
|
||||
return function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
})();
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.allAgents = void 0;
|
||||
exports.revertAllAgentConfigs = revertAllAgentConfigs;
|
||||
const path = __importStar(require("path"));
|
||||
const fs_1 = require("fs");
|
||||
const FileSystemUtils = __importStar(require("./core/FileSystemUtils"));
|
||||
const ConfigLoader_1 = require("./core/ConfigLoader");
|
||||
const agents_1 = require("./agents");
|
||||
Object.defineProperty(exports, "allAgents", { enumerable: true, get: function () { return agents_1.allAgents; } });
|
||||
const constants_1 = require("./constants");
|
||||
const revert_engine_1 = require("./core/revert-engine");
|
||||
const agent_selection_1 = require("./core/agent-selection");
|
||||
const config_utils_1 = require("./core/config-utils");
|
||||
const agents = agents_1.allAgents;
|
||||
/**
|
||||
* Reverts ruler configurations for selected AI agents.
|
||||
*/
|
||||
async function revertAllAgentConfigs(projectRoot, includedAgents, configPath, keepBackups = false, verbose = false, dryRun = false, localOnly = false) {
|
||||
(0, constants_1.logVerbose)(`Loading configuration for revert from project root: ${projectRoot}`, verbose);
|
||||
const config = await (0, ConfigLoader_1.loadConfig)({
|
||||
projectRoot,
|
||||
cliAgents: includedAgents,
|
||||
configPath,
|
||||
});
|
||||
const rulerDir = await FileSystemUtils.findRulerDir(projectRoot, !localOnly);
|
||||
if (!rulerDir) {
|
||||
throw (0, constants_1.createRulerError)(`.ruler directory not found`, `Searched from: ${projectRoot}`);
|
||||
}
|
||||
(0, constants_1.logVerbose)(`Found .ruler directory at: ${rulerDir}`, verbose);
|
||||
// Normalize per-agent config keys to agent identifiers
|
||||
config.agentConfigs = (0, config_utils_1.mapRawAgentConfigs)(config.agentConfigs, agents);
|
||||
// Select agents to revert (same logic as apply, but with backward compatibility for invalid agents)
|
||||
let selected;
|
||||
try {
|
||||
selected = (0, agent_selection_1.resolveSelectedAgents)(config, agents);
|
||||
}
|
||||
catch (error) {
|
||||
// For backward compatibility, revert continues with available agents if some are invalid
|
||||
// This preserves the original behavior where invalid agents were silently ignored
|
||||
if (error instanceof Error &&
|
||||
error.message.includes('Invalid agent specified')) {
|
||||
(0, constants_1.logVerbose)(`Warning: ${error.message} - continuing with valid agents only`, verbose);
|
||||
// Fall back to the old logic without validation
|
||||
if (config.cliAgents && config.cliAgents.length > 0) {
|
||||
const filters = config.cliAgents.map((n) => n.toLowerCase());
|
||||
selected = agents.filter((agent) => filters.some((f) => agent.getIdentifier() === f ||
|
||||
agent.getName().toLowerCase().includes(f)));
|
||||
}
|
||||
else if (config.defaultAgents && config.defaultAgents.length > 0) {
|
||||
const defaults = config.defaultAgents.map((n) => n.toLowerCase());
|
||||
selected = agents.filter((agent) => {
|
||||
const identifier = agent.getIdentifier();
|
||||
const override = config.agentConfigs[identifier]?.enabled;
|
||||
if (override !== undefined) {
|
||||
return override;
|
||||
}
|
||||
return defaults.some((d) => identifier === d || agent.getName().toLowerCase().includes(d));
|
||||
});
|
||||
}
|
||||
else {
|
||||
selected = agents.filter((agent) => config.agentConfigs[agent.getIdentifier()]?.enabled !== false);
|
||||
}
|
||||
}
|
||||
else {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
(0, constants_1.logVerbose)(`Selected agents: ${selected.map((a) => a.getName()).join(', ')}`, verbose);
|
||||
// Revert configurations for each agent
|
||||
let totalFilesProcessed = 0;
|
||||
let totalFilesRestored = 0;
|
||||
let totalFilesRemoved = 0;
|
||||
let totalBackupsRemoved = 0;
|
||||
for (const agent of selected) {
|
||||
const prefix = (0, constants_1.actionPrefix)(dryRun);
|
||||
console.log(`${prefix} Reverting ${agent.getName()}...`);
|
||||
const agentConfig = config.agentConfigs[agent.getIdentifier()];
|
||||
const result = await (0, revert_engine_1.revertAgentConfiguration)(agent, projectRoot, agentConfig, keepBackups, verbose, dryRun);
|
||||
totalFilesProcessed += result.restored + result.removed;
|
||||
totalFilesRestored += result.restored;
|
||||
totalFilesRemoved += result.removed;
|
||||
totalBackupsRemoved += result.backupsRemoved;
|
||||
}
|
||||
// Clean up auxiliary files and directories
|
||||
const cleanupResult = await (0, revert_engine_1.cleanUpAuxiliaryFiles)(projectRoot, verbose, dryRun);
|
||||
totalFilesRemoved += cleanupResult.additionalFilesRemoved;
|
||||
// Clean .gitignore if reverting all agents
|
||||
const gitignoreCleaned = !config.cliAgents || config.cliAgents.length === 0
|
||||
? await cleanGitignore(projectRoot, verbose, dryRun)
|
||||
: false;
|
||||
// Display summary
|
||||
const prefix = (0, constants_1.actionPrefix)(dryRun);
|
||||
if (dryRun) {
|
||||
console.log(`${prefix} Revert summary (dry run):`);
|
||||
}
|
||||
else {
|
||||
console.log(`${prefix} Revert completed successfully.`);
|
||||
}
|
||||
console.log(` Files processed: ${totalFilesProcessed}`);
|
||||
console.log(` Files restored from backup: ${totalFilesRestored}`);
|
||||
console.log(` Generated files removed: ${totalFilesRemoved}`);
|
||||
if (!keepBackups) {
|
||||
console.log(` Backup files removed: ${totalBackupsRemoved}`);
|
||||
}
|
||||
if (cleanupResult.directoriesRemoved > 0) {
|
||||
console.log(` Empty directories removed: ${cleanupResult.directoriesRemoved}`);
|
||||
}
|
||||
if (gitignoreCleaned) {
|
||||
console.log(` .gitignore cleaned: yes`);
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Removes the ruler-managed block from .gitignore file.
|
||||
*/
|
||||
async function cleanGitignore(projectRoot, verbose, dryRun) {
|
||||
const gitignorePath = path.join(projectRoot, '.gitignore');
|
||||
try {
|
||||
await fs_1.promises.access(gitignorePath);
|
||||
}
|
||||
catch {
|
||||
(0, constants_1.logVerbose)('No .gitignore file found', verbose);
|
||||
return false;
|
||||
}
|
||||
const content = await fs_1.promises.readFile(gitignorePath, 'utf8');
|
||||
const startMarker = '# START Ruler Generated Files';
|
||||
const endMarker = '# END Ruler Generated Files';
|
||||
const startIndex = content.indexOf(startMarker);
|
||||
const endIndex = content.indexOf(endMarker);
|
||||
if (startIndex === -1 || endIndex === -1) {
|
||||
(0, constants_1.logVerbose)('No ruler-managed block found in .gitignore', verbose);
|
||||
return false;
|
||||
}
|
||||
const prefix = (0, constants_1.actionPrefix)(dryRun);
|
||||
if (dryRun) {
|
||||
(0, constants_1.logVerbose)(`${prefix} Would remove ruler block from .gitignore`, verbose);
|
||||
}
|
||||
else {
|
||||
const beforeBlock = content.substring(0, startIndex);
|
||||
const afterBlock = content.substring(endIndex + endMarker.length);
|
||||
let newContent = beforeBlock + afterBlock;
|
||||
newContent = newContent.replace(/\n{3,}/g, '\n\n'); // Replace 3+ newlines with 2
|
||||
if (newContent.trim() === '') {
|
||||
await fs_1.promises.unlink(gitignorePath);
|
||||
(0, constants_1.logVerbose)(`${prefix} Removed empty .gitignore file`, verbose);
|
||||
}
|
||||
else {
|
||||
await fs_1.promises.writeFile(gitignorePath, newContent);
|
||||
(0, constants_1.logVerbose)(`${prefix} Removed ruler block from .gitignore`, verbose);
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
+2
@@ -0,0 +1,2 @@
|
||||
"use strict";
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
+117
@@ -0,0 +1,117 @@
|
||||
"use strict";
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || (function () {
|
||||
var ownKeys = function(o) {
|
||||
ownKeys = Object.getOwnPropertyNames || function (o) {
|
||||
var ar = [];
|
||||
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
||||
return ar;
|
||||
};
|
||||
return ownKeys(o);
|
||||
};
|
||||
return function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
})();
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.readVSCodeSettings = readVSCodeSettings;
|
||||
exports.writeVSCodeSettings = writeVSCodeSettings;
|
||||
exports.transformRulerToAugmentMcp = transformRulerToAugmentMcp;
|
||||
exports.mergeAugmentMcpServers = mergeAugmentMcpServers;
|
||||
exports.getVSCodeSettingsPath = getVSCodeSettingsPath;
|
||||
const fs_1 = require("fs");
|
||||
const path = __importStar(require("path"));
|
||||
/**
|
||||
* Read VSCode settings.json file
|
||||
*/
|
||||
async function readVSCodeSettings(settingsPath) {
|
||||
try {
|
||||
const content = await fs_1.promises.readFile(settingsPath, 'utf8');
|
||||
return JSON.parse(content);
|
||||
}
|
||||
catch (error) {
|
||||
if (error.code === 'ENOENT') {
|
||||
return {};
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Write VSCode settings.json file
|
||||
*/
|
||||
async function writeVSCodeSettings(settingsPath, settings) {
|
||||
await fs_1.promises.mkdir(path.dirname(settingsPath), { recursive: true });
|
||||
await fs_1.promises.writeFile(settingsPath, JSON.stringify(settings, null, 4));
|
||||
}
|
||||
/**
|
||||
* Transform ruler MCP config to Augment MCP server array format
|
||||
*/
|
||||
function transformRulerToAugmentMcp(rulerMcpJson) {
|
||||
const servers = [];
|
||||
if (rulerMcpJson.mcpServers && typeof rulerMcpJson.mcpServers === 'object') {
|
||||
const mcpServers = rulerMcpJson.mcpServers;
|
||||
for (const [name, serverConfig] of Object.entries(mcpServers)) {
|
||||
const augmentServer = {
|
||||
name,
|
||||
command: serverConfig.command,
|
||||
};
|
||||
if (serverConfig.args) {
|
||||
augmentServer.args = serverConfig.args;
|
||||
}
|
||||
if (serverConfig.env) {
|
||||
augmentServer.env = serverConfig.env;
|
||||
}
|
||||
servers.push(augmentServer);
|
||||
}
|
||||
}
|
||||
return servers;
|
||||
}
|
||||
/**
|
||||
* Merge MCP servers into VSCode settings using the specified strategy
|
||||
*/
|
||||
function mergeAugmentMcpServers(existingSettings, newServers, strategy) {
|
||||
const result = structuredClone(existingSettings);
|
||||
if (!result['augment.advanced']) {
|
||||
result['augment.advanced'] = {};
|
||||
}
|
||||
if (strategy === 'overwrite') {
|
||||
result['augment.advanced'].mcpServers = newServers;
|
||||
}
|
||||
else {
|
||||
const existingServers = result['augment.advanced'].mcpServers || [];
|
||||
const existingServerMap = new Map();
|
||||
for (const server of existingServers) {
|
||||
existingServerMap.set(server.name, server);
|
||||
}
|
||||
for (const newServer of newServers) {
|
||||
existingServerMap.set(newServer.name, newServer);
|
||||
}
|
||||
result['augment.advanced'].mcpServers = Array.from(existingServerMap.values());
|
||||
}
|
||||
return result;
|
||||
}
|
||||
/**
|
||||
* Get the VSCode settings.json path for a project (local)
|
||||
*/
|
||||
function getVSCodeSettingsPath(projectRoot) {
|
||||
return path.join(projectRoot, '.vscode', 'settings.json');
|
||||
}
|
||||
+72
@@ -0,0 +1,72 @@
|
||||
{
|
||||
"name": "@intellectronica/ruler",
|
||||
"version": "0.3.38",
|
||||
"description": "Ruler — apply the same rules to all coding agents",
|
||||
"main": "dist/lib.js",
|
||||
"scripts": {
|
||||
"lint": "eslint \"src/**/*.{ts,tsx}\"",
|
||||
"format": "prettier --write \"src/**/*.{ts,tsx,json,md}\"",
|
||||
"test": "jest",
|
||||
"test:watch": "jest --watch",
|
||||
"test:coverage": "jest --coverage",
|
||||
"test:integration": "jest tests/e2e/ruler.integration.test.ts --verbose",
|
||||
"build": "tsc",
|
||||
"prepare": "npm run build"
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git+https://github.com/intellectronica/ruler.git"
|
||||
},
|
||||
"keywords": [
|
||||
"ai",
|
||||
"developer-tools",
|
||||
"copilot",
|
||||
"codex",
|
||||
"claude",
|
||||
"cursor",
|
||||
"aider",
|
||||
"config",
|
||||
"rules",
|
||||
"automation"
|
||||
],
|
||||
"author": "Eleanor Berger",
|
||||
"license": "MIT",
|
||||
"bugs": {
|
||||
"url": "https://github.com/intellectronica/ruler/issues"
|
||||
},
|
||||
"homepage": "https://ai.intellectronica.net/ruler",
|
||||
"engines": {
|
||||
"node": "^20.19.0 || ^22.12.0 || >=23"
|
||||
},
|
||||
"files": [
|
||||
"dist",
|
||||
"README.md",
|
||||
"LICENSE"
|
||||
],
|
||||
"bin": {
|
||||
"ruler": "dist/cli/index.js"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/iarna__toml": "^2.0.5",
|
||||
"@types/jest": "^29.5.14",
|
||||
"@types/js-yaml": "^4.0.9",
|
||||
"@types/node": "^24.9.2",
|
||||
"@types/yargs": "^17.0.34",
|
||||
"@typescript-eslint/eslint-plugin": "^8.46.2",
|
||||
"@typescript-eslint/parser": "^8.46.2",
|
||||
"eslint": "^9.38.0",
|
||||
"eslint-config-prettier": "^10.1.8",
|
||||
"eslint-plugin-prettier": "^5.5.4",
|
||||
"jest": "^29.7.0",
|
||||
"prettier": "^3.6.2",
|
||||
"ts-jest": "^29.4.5",
|
||||
"typescript": "^5.9.3",
|
||||
"typescript-eslint": "^8.46.2"
|
||||
},
|
||||
"dependencies": {
|
||||
"@iarna/toml": "^2.2.5",
|
||||
"js-yaml": "^4.1.0",
|
||||
"yargs": "^18.0.0",
|
||||
"zod": "^4.1.12"
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user