chore: initialize recovered claude workspace

This commit is contained in:
2026-04-02 15:29:01 +08:00
commit a10efa3b4b
1940 changed files with 506426 additions and 0 deletions

View File

@@ -0,0 +1,71 @@
/**
* Reads plugin-related settings (enabledPlugins, extraKnownMarketplaces)
* from --add-dir directories.
*
* These have the LOWEST priority — callers must spread standard settings
* on top so that user/project/local/flag/policy sources all override.
*/
import { join } from 'path'
import type { z } from 'zod/v4'
import { getAdditionalDirectoriesForClaudeMd } from '../../bootstrap/state.js'
import { parseSettingsFile } from '../settings/settings.js'
import type {
ExtraKnownMarketplaceSchema,
SettingsJson,
} from '../settings/types.js'
type ExtraKnownMarketplace = z.infer<
ReturnType<typeof ExtraKnownMarketplaceSchema>
>
const SETTINGS_FILES = ['settings.json', 'settings.local.json'] as const
/**
* Returns a merged record of enabledPlugins from all --add-dir directories.
*
* Within each directory, settings.local.json is processed after settings.json
* (local wins within that dir). Across directories, later CLI-order wins on
* conflict.
*
* This has the lowest priority — callers must spread their standard settings
* on top to let user/project/local/flag/policy override.
*/
export function getAddDirEnabledPlugins(): NonNullable<
SettingsJson['enabledPlugins']
> {
const result: NonNullable<SettingsJson['enabledPlugins']> = {}
for (const dir of getAdditionalDirectoriesForClaudeMd()) {
for (const file of SETTINGS_FILES) {
const { settings } = parseSettingsFile(join(dir, '.claude', file))
if (!settings?.enabledPlugins) {
continue
}
Object.assign(result, settings.enabledPlugins)
}
}
return result
}
/**
* Returns a merged record of extraKnownMarketplaces from all --add-dir directories.
*
* Same priority rules as getAddDirEnabledPlugins: settings.local.json wins
* within each dir, and callers spread standard settings on top.
*/
export function getAddDirExtraMarketplaces(): Record<
string,
ExtraKnownMarketplace
> {
const result: Record<string, ExtraKnownMarketplace> = {}
for (const dir of getAdditionalDirectoriesForClaudeMd()) {
for (const file of SETTINGS_FILES) {
const { settings } = parseSettingsFile(join(dir, '.claude', file))
if (!settings?.extraKnownMarketplaces) {
continue
}
Object.assign(result, settings.extraKnownMarketplaces)
}
}
return result
}

View File

@@ -0,0 +1,196 @@
import { readdir, rm, stat, unlink, writeFile } from 'fs/promises'
import { join } from 'path'
import { clearCommandsCache } from '../../commands.js'
import { clearAllOutputStylesCache } from '../../constants/outputStyles.js'
import { clearAgentDefinitionsCache } from '../../tools/AgentTool/loadAgentsDir.js'
import { clearPromptCache } from '../../tools/SkillTool/prompt.js'
import { resetSentSkillNames } from '../attachments.js'
import { logForDebugging } from '../debug.js'
import { getErrnoCode } from '../errors.js'
import { logError } from '../log.js'
import { loadInstalledPluginsFromDisk } from './installedPluginsManager.js'
import { clearPluginAgentCache } from './loadPluginAgents.js'
import { clearPluginCommandCache } from './loadPluginCommands.js'
import {
clearPluginHookCache,
pruneRemovedPluginHooks,
} from './loadPluginHooks.js'
import { clearPluginOutputStyleCache } from './loadPluginOutputStyles.js'
import { clearPluginCache, getPluginCachePath } from './pluginLoader.js'
import { clearPluginOptionsCache } from './pluginOptionsStorage.js'
import { isPluginZipCacheEnabled } from './zipCache.js'
const ORPHANED_AT_FILENAME = '.orphaned_at'
const CLEANUP_AGE_MS = 7 * 24 * 60 * 60 * 1000 // 7 days
export function clearAllPluginCaches(): void {
clearPluginCache()
clearPluginCommandCache()
clearPluginAgentCache()
clearPluginHookCache()
// Prune hooks from plugins no longer in the enabled set so uninstalled/
// disabled plugins stop firing immediately (gh-36995). Prune-only: hooks
// from newly-enabled plugins are NOT added here — they wait for
// /reload-plugins like commands/agents/MCP do. Fire-and-forget: old hooks
// stay valid until the prune completes (preserves gh-29767). No-op when
// STATE.registeredHooks is empty (test/preload.ts beforeEach clears it via
// resetStateForTests before reaching here).
pruneRemovedPluginHooks().catch(e => logError(e))
clearPluginOptionsCache()
clearPluginOutputStyleCache()
clearAllOutputStylesCache()
}
export function clearAllCaches(): void {
clearAllPluginCaches()
clearCommandsCache()
clearAgentDefinitionsCache()
clearPromptCache()
resetSentSkillNames()
}
/**
* Mark a plugin version as orphaned.
* Called when a plugin is uninstalled or updated to a new version.
*/
export async function markPluginVersionOrphaned(
versionPath: string,
): Promise<void> {
try {
await writeFile(getOrphanedAtPath(versionPath), `${Date.now()}`, 'utf-8')
} catch (error) {
logForDebugging(`Failed to write .orphaned_at: ${versionPath}: ${error}`)
}
}
/**
* Clean up orphaned plugin versions that have been orphaned for more than 7 days.
*
* Pass 1: Remove .orphaned_at from installed versions (clears stale markers)
* Pass 2: For each cached version not in installed_plugins.json:
* - If no .orphaned_at exists: create it (handles old CC versions, manual edits)
* - If .orphaned_at exists and > 7 days old: delete the version
*/
export async function cleanupOrphanedPluginVersionsInBackground(): Promise<void> {
// Zip cache mode stores plugins as .zip files, not directories. readSubdirs
// filters to directories only, so removeIfEmpty would see plugin dirs as empty
// and delete them (including the ZIPs). Skip cleanup entirely in zip mode.
if (isPluginZipCacheEnabled()) {
return
}
try {
const installedVersions = getInstalledVersionPaths()
if (!installedVersions) return
const cachePath = getPluginCachePath()
const now = Date.now()
// Pass 1: Remove .orphaned_at from installed versions
// This handles cases where a plugin was reinstalled after being orphaned
await Promise.all(
[...installedVersions].map(p => removeOrphanedAtMarker(p)),
)
// Pass 2: Process orphaned versions
for (const marketplace of await readSubdirs(cachePath)) {
const marketplacePath = join(cachePath, marketplace)
for (const plugin of await readSubdirs(marketplacePath)) {
const pluginPath = join(marketplacePath, plugin)
for (const version of await readSubdirs(pluginPath)) {
const versionPath = join(pluginPath, version)
if (installedVersions.has(versionPath)) continue
await processOrphanedPluginVersion(versionPath, now)
}
await removeIfEmpty(pluginPath)
}
await removeIfEmpty(marketplacePath)
}
} catch (error) {
logForDebugging(`Plugin cache cleanup failed: ${error}`)
}
}
function getOrphanedAtPath(versionPath: string): string {
return join(versionPath, ORPHANED_AT_FILENAME)
}
async function removeOrphanedAtMarker(versionPath: string): Promise<void> {
const orphanedAtPath = getOrphanedAtPath(versionPath)
try {
await unlink(orphanedAtPath)
} catch (error) {
const code = getErrnoCode(error)
if (code === 'ENOENT') return
logForDebugging(`Failed to remove .orphaned_at: ${versionPath}: ${error}`)
}
}
function getInstalledVersionPaths(): Set<string> | null {
try {
const paths = new Set<string>()
const diskData = loadInstalledPluginsFromDisk()
for (const installations of Object.values(diskData.plugins)) {
for (const entry of installations) {
paths.add(entry.installPath)
}
}
return paths
} catch (error) {
logForDebugging(`Failed to load installed plugins: ${error}`)
return null
}
}
async function processOrphanedPluginVersion(
versionPath: string,
now: number,
): Promise<void> {
const orphanedAtPath = getOrphanedAtPath(versionPath)
let orphanedAt: number
try {
orphanedAt = (await stat(orphanedAtPath)).mtimeMs
} catch (error) {
const code = getErrnoCode(error)
if (code === 'ENOENT') {
await markPluginVersionOrphaned(versionPath)
return
}
logForDebugging(`Failed to stat orphaned marker: ${versionPath}: ${error}`)
return
}
if (now - orphanedAt > CLEANUP_AGE_MS) {
try {
await rm(versionPath, { recursive: true, force: true })
} catch (error) {
logForDebugging(
`Failed to delete orphaned version: ${versionPath}: ${error}`,
)
}
}
}
async function removeIfEmpty(dirPath: string): Promise<void> {
if ((await readSubdirs(dirPath)).length === 0) {
try {
await rm(dirPath, { recursive: true, force: true })
} catch (error) {
logForDebugging(`Failed to remove empty dir: ${dirPath}: ${error}`)
}
}
}
async function readSubdirs(dirPath: string): Promise<string[]> {
try {
const entries = await readdir(dirPath, { withFileTypes: true })
return entries.filter(d => d.isDirectory()).map(d => d.name)
} catch {
return []
}
}

View File

@@ -0,0 +1,305 @@
/**
* Plugin dependency resolution — pure functions, no I/O.
*
* Semantics are `apt`-style: a dependency is a *presence guarantee*, not a
* module graph. Plugin A depending on Plugin B means "B's namespaced
* components (MCP servers, commands, agents) must be available when A runs."
*
* Two entry points:
* - `resolveDependencyClosure` — install-time DFS walk, cycle detection
* - `verifyAndDemote` — load-time fixed-point check, demotes plugins with
* unsatisfied deps (session-local, does NOT write settings)
*/
import type { LoadedPlugin, PluginError } from '../../types/plugin.js'
import type { EditableSettingSource } from '../settings/constants.js'
import { getSettingsForSource } from '../settings/settings.js'
import { parsePluginIdentifier } from './pluginIdentifier.js'
import type { PluginId } from './schemas.js'
/**
* Synthetic marketplace sentinel for `--plugin-dir` plugins (pluginLoader.ts
* sets `source = "{name}@inline"`). Not a real marketplace — bare deps from
* these plugins cannot meaningfully inherit it.
*/
const INLINE_MARKETPLACE = 'inline'
/**
* Normalize a dependency reference to fully-qualified "name@marketplace" form.
* Bare names (no @) inherit the marketplace of the plugin declaring them —
* cross-marketplace deps are blocked anyway, so the @-suffix is boilerplate
* in the common case.
*
* EXCEPTION: if the declaring plugin is @inline (loaded via --plugin-dir),
* bare deps are returned unchanged. `inline` is a synthetic sentinel, not a
* real marketplace — fabricating "dep@inline" would never match anything.
* verifyAndDemote handles bare deps via name-only matching.
*/
export function qualifyDependency(
dep: string,
declaringPluginId: string,
): string {
if (parsePluginIdentifier(dep).marketplace) return dep
const mkt = parsePluginIdentifier(declaringPluginId).marketplace
if (!mkt || mkt === INLINE_MARKETPLACE) return dep
return `${dep}@${mkt}`
}
/**
* Minimal shape the resolver needs from a marketplace lookup. Keeping this
* narrow means the resolver stays testable without constructing full
* PluginMarketplaceEntry objects.
*/
export type DependencyLookupResult = {
// Entries may be bare names; qualifyDependency normalizes them.
dependencies?: string[]
}
export type ResolutionResult =
| { ok: true; closure: PluginId[] }
| { ok: false; reason: 'cycle'; chain: PluginId[] }
| { ok: false; reason: 'not-found'; missing: PluginId; requiredBy: PluginId }
| {
ok: false
reason: 'cross-marketplace'
dependency: PluginId
requiredBy: PluginId
}
/**
* Walk the transitive dependency closure of `rootId` via DFS.
*
* The returned `closure` ALWAYS contains `rootId`, plus every transitive
* dependency that is NOT in `alreadyEnabled`. Already-enabled deps are
* skipped (not recursed into) — this avoids surprise settings writes when a
* dep is already installed at a different scope. The root is never skipped,
* even if already enabled, so re-installing a plugin always re-caches it.
*
* Cross-marketplace dependencies are BLOCKED by default: a plugin in
* marketplace A cannot auto-install a plugin from marketplace B. This is
* a security boundary — installing from a trusted marketplace shouldn't
* silently pull from an untrusted one. Two escapes: (1) install the
* cross-mkt dep yourself first (already-enabled deps are skipped, so the
* closure won't touch it), or (2) the ROOT marketplace's
* `allowCrossMarketplaceDependenciesOn` allowlist — only the root's list
* applies for the whole walk (no transitive trust: if A allows B, B's
* plugin depending on C is still blocked unless A also allows C).
*
* @param rootId Root plugin to resolve from (format: "name@marketplace")
* @param lookup Async lookup returning `{dependencies}` or `null` if not found
* @param alreadyEnabled Plugin IDs to skip (deps only, root is never skipped)
* @param allowedCrossMarketplaces Marketplace names the root trusts for
* auto-install (from the root marketplace's manifest)
* @returns Closure to install, or a cycle/not-found/cross-marketplace error
*/
export async function resolveDependencyClosure(
rootId: PluginId,
lookup: (id: PluginId) => Promise<DependencyLookupResult | null>,
alreadyEnabled: ReadonlySet<PluginId>,
allowedCrossMarketplaces: ReadonlySet<string> = new Set(),
): Promise<ResolutionResult> {
const rootMarketplace = parsePluginIdentifier(rootId).marketplace
const closure: PluginId[] = []
const visited = new Set<PluginId>()
const stack: PluginId[] = []
async function walk(
id: PluginId,
requiredBy: PluginId,
): Promise<ResolutionResult | null> {
// Skip already-enabled DEPENDENCIES (avoids surprise settings writes),
// but NEVER skip the root: installing an already-enabled plugin must
// still cache/register it. Without this guard, re-installing a plugin
// that's in settings but missing from disk (e.g., cache cleared,
// installed_plugins.json stale) would return an empty closure and
// `cacheAndRegisterPlugin` would never fire — user sees
// "✔ Successfully installed" but nothing materializes.
if (id !== rootId && alreadyEnabled.has(id)) return null
// Security: block auto-install across marketplace boundaries. Runs AFTER
// the alreadyEnabled check — if the user manually installed a cross-mkt
// dep, it's in alreadyEnabled and we never reach this.
const idMarketplace = parsePluginIdentifier(id).marketplace
if (
idMarketplace !== rootMarketplace &&
!(idMarketplace && allowedCrossMarketplaces.has(idMarketplace))
) {
return {
ok: false,
reason: 'cross-marketplace',
dependency: id,
requiredBy,
}
}
if (stack.includes(id)) {
return { ok: false, reason: 'cycle', chain: [...stack, id] }
}
if (visited.has(id)) return null
visited.add(id)
const entry = await lookup(id)
if (!entry) {
return { ok: false, reason: 'not-found', missing: id, requiredBy }
}
stack.push(id)
for (const rawDep of entry.dependencies ?? []) {
const dep = qualifyDependency(rawDep, id)
const err = await walk(dep, id)
if (err) return err
}
stack.pop()
closure.push(id)
return null
}
const err = await walk(rootId, rootId)
if (err) return err
return { ok: true, closure }
}
/**
* Load-time safety net: for each enabled plugin, verify all manifest
* dependencies are also in the enabled set. Demote any that fail.
*
* Fixed-point loop: demoting plugin A may break plugin B that depends on A,
* so we iterate until nothing changes.
*
* The `reason` field distinguishes:
* - `'not-enabled'` — dep exists in the loaded set but is disabled
* - `'not-found'` — dep is entirely absent (not in any marketplace)
*
* Does NOT mutate input. Returns the set of plugin IDs (sources) to demote.
*
* @param plugins All loaded plugins (enabled + disabled)
* @returns Set of pluginIds to demote, plus errors for `/doctor`
*/
export function verifyAndDemote(plugins: readonly LoadedPlugin[]): {
demoted: Set<string>
errors: PluginError[]
} {
const known = new Set(plugins.map(p => p.source))
const enabled = new Set(plugins.filter(p => p.enabled).map(p => p.source))
// Name-only indexes for bare deps from --plugin-dir (@inline) plugins:
// the real marketplace is unknown, so match "B" against any enabled "B@*".
// enabledByName is a multiset: if B@epic AND B@other are both enabled,
// demoting one mustn't make "B" disappear from the index.
const knownByName = new Set(
plugins.map(p => parsePluginIdentifier(p.source).name),
)
const enabledByName = new Map<string, number>()
for (const id of enabled) {
const n = parsePluginIdentifier(id).name
enabledByName.set(n, (enabledByName.get(n) ?? 0) + 1)
}
const errors: PluginError[] = []
let changed = true
while (changed) {
changed = false
for (const p of plugins) {
if (!enabled.has(p.source)) continue
for (const rawDep of p.manifest.dependencies ?? []) {
const dep = qualifyDependency(rawDep, p.source)
// Bare dep ← @inline plugin: match by name only (see enabledByName)
const isBare = !parsePluginIdentifier(dep).marketplace
const satisfied = isBare
? (enabledByName.get(dep) ?? 0) > 0
: enabled.has(dep)
if (!satisfied) {
enabled.delete(p.source)
const count = enabledByName.get(p.name) ?? 0
if (count <= 1) enabledByName.delete(p.name)
else enabledByName.set(p.name, count - 1)
errors.push({
type: 'dependency-unsatisfied',
source: p.source,
plugin: p.name,
dependency: dep,
reason: (isBare ? knownByName.has(dep) : known.has(dep))
? 'not-enabled'
: 'not-found',
})
changed = true
break
}
}
}
}
const demoted = new Set(
plugins.filter(p => p.enabled && !enabled.has(p.source)).map(p => p.source),
)
return { demoted, errors }
}
/**
* Find all enabled plugins that declare `pluginId` as a dependency.
* Used to warn on uninstall/disable ("required by: X, Y").
*
* @param pluginId The plugin being removed/disabled
* @param plugins All loaded plugins (only enabled ones are checked)
* @returns Names of plugins that will break if `pluginId` goes away
*/
export function findReverseDependents(
pluginId: PluginId,
plugins: readonly LoadedPlugin[],
): string[] {
const { name: targetName } = parsePluginIdentifier(pluginId)
return plugins
.filter(
p =>
p.enabled &&
p.source !== pluginId &&
(p.manifest.dependencies ?? []).some(d => {
const qualified = qualifyDependency(d, p.source)
// Bare dep (from @inline plugin): match by name only
return parsePluginIdentifier(qualified).marketplace
? qualified === pluginId
: qualified === targetName
}),
)
.map(p => p.name)
}
/**
* Build the set of plugin IDs currently enabled at a given settings scope.
* Used by install-time resolution to skip already-enabled deps and avoid
* surprise settings writes.
*
* Matches `true` (plain enable) AND array values (version constraints per
* settings/types.ts:455-463 — a plugin at `"foo@bar": ["^1.0.0"]` IS enabled).
* Without the array check, a version-pinned dep would be re-added to the
* closure and the settings write would clobber the constraint with `true`.
*/
export function getEnabledPluginIdsForScope(
settingSource: EditableSettingSource,
): Set<PluginId> {
return new Set(
Object.entries(getSettingsForSource(settingSource)?.enabledPlugins ?? {})
.filter(([, v]) => v === true || Array.isArray(v))
.map(([k]) => k),
)
}
/**
* Format the "(+ N dependencies)" suffix for install success messages.
* Returns empty string when `installedDeps` is empty.
*/
export function formatDependencyCountSuffix(installedDeps: string[]): string {
if (installedDeps.length === 0) return ''
const n = installedDeps.length
return ` (+ ${n} ${n === 1 ? 'dependency' : 'dependencies'})`
}
/**
* Format the "warning: required by X, Y" suffix for uninstall/disable
* results. Em-dash style for CLI result messages (not the middot style
* used in the notification UI). Returns empty string when no dependents.
*/
export function formatReverseDependentsSuffix(
rdeps: string[] | undefined,
): string {
if (!rdeps || rdeps.length === 0) return ''
return ` — warning: required by ${rdeps.join(', ')}`
}

View File

@@ -0,0 +1,135 @@
/**
* Telemetry for plugin/marketplace fetches that hit the network.
*
* Added for inc-5046 (GitHub complained about claude-plugins-official load).
* Before this, fetch operations only had logForDebugging — no way to measure
* actual network volume. This surfaces what's hitting GitHub vs GCS vs
* user-hosted so we can see the GCS migration take effect and catch future
* hot-path regressions before GitHub emails us again.
*
* Volume: these fire at startup (install-counts 24h-TTL)
* and on explicit user action (install/update). NOT per-interaction. Similar
* envelope to tengu_binary_download_*.
*/
import {
logEvent,
type AnalyticsMetadata_I_VERIFIED_THIS_IS_NOT_CODE_OR_FILEPATHS as SafeString,
} from '../../services/analytics/index.js'
import { OFFICIAL_MARKETPLACE_NAME } from './officialMarketplace.js'
export type PluginFetchSource =
| 'install_counts'
| 'marketplace_clone'
| 'marketplace_pull'
| 'marketplace_url'
| 'plugin_clone'
| 'mcpb'
export type PluginFetchOutcome = 'success' | 'failure' | 'cache_hit'
// Allowlist of public hosts we report by name. Anything else (enterprise
// git, self-hosted, internal) is bucketed as 'other' — we don't want
// internal hostnames (git.mycorp.internal) landing in telemetry. Bounded
// cardinality also keeps the dashboard host-breakdown tractable.
const KNOWN_PUBLIC_HOSTS = new Set([
'github.com',
'raw.githubusercontent.com',
'objects.githubusercontent.com',
'gist.githubusercontent.com',
'gitlab.com',
'bitbucket.org',
'codeberg.org',
'dev.azure.com',
'ssh.dev.azure.com',
'storage.googleapis.com', // GCS — where Dickson's migration points
])
/**
* Extract hostname from a URL or git spec and bucket to the allowlist.
* Handles `https://host/...`, `git@host:path`, `ssh://host/...`.
* Returns a known public host, 'other' (parseable but not allowlisted —
* don't leak private hostnames), or 'unknown' (unparseable / local path).
*/
function extractHost(urlOrSpec: string): string {
let host: string
const scpMatch = /^[^@/]+@([^:/]+):/.exec(urlOrSpec)
if (scpMatch) {
host = scpMatch[1]!
} else {
try {
host = new URL(urlOrSpec).hostname
} catch {
return 'unknown'
}
}
const normalized = host.toLowerCase()
return KNOWN_PUBLIC_HOSTS.has(normalized) ? normalized : 'other'
}
/**
* True if the URL/spec points at anthropics/claude-plugins-official — the
* repo GitHub complained about. Lets the dashboard separate "our problem"
* traffic from user-configured marketplaces.
*/
function isOfficialRepo(urlOrSpec: string): boolean {
return urlOrSpec.includes(`anthropics/${OFFICIAL_MARKETPLACE_NAME}`)
}
export function logPluginFetch(
source: PluginFetchSource,
urlOrSpec: string | undefined,
outcome: PluginFetchOutcome,
durationMs: number,
errorKind?: string,
): void {
// String values are bounded enums / hostname-only — no code, no paths,
// no raw error messages. Same privacy envelope as tengu_web_fetch_host.
logEvent('tengu_plugin_remote_fetch', {
source: source as SafeString,
host: (urlOrSpec ? extractHost(urlOrSpec) : 'unknown') as SafeString,
is_official: urlOrSpec ? isOfficialRepo(urlOrSpec) : false,
outcome: outcome as SafeString,
duration_ms: Math.round(durationMs),
...(errorKind && { error_kind: errorKind as SafeString }),
})
}
/**
* Classify an error into a stable bucket for the error_kind field. Keeps
* cardinality bounded — raw error messages would explode dashboard grouping.
*
* Handles both axios Error objects (Node.js error codes like ENOTFOUND) and
* git stderr strings (human phrases like "Could not resolve host"). DNS
* checked BEFORE timeout because gitClone's error enhancement at
* marketplaceManager.ts:~950 rewrites DNS failures to include the word
* "timeout" — ordering the other way would misclassify git DNS as timeout.
*/
export function classifyFetchError(error: unknown): string {
const msg = String((error as { message?: unknown })?.message ?? error)
if (
/ENOTFOUND|ECONNREFUSED|EAI_AGAIN|Could not resolve host|Connection refused/i.test(
msg,
)
) {
return 'dns_or_refused'
}
if (/ETIMEDOUT|timed out|timeout/i.test(msg)) return 'timeout'
if (
/ECONNRESET|socket hang up|Connection reset by peer|remote end hung up/i.test(
msg,
)
) {
return 'conn_reset'
}
if (/403|401|authentication|permission denied/i.test(msg)) return 'auth'
if (/404|not found|repository not found/i.test(msg)) return 'not_found'
if (/certificate|SSL|TLS|unable to get local issuer/i.test(msg)) return 'tls'
// Schema validation throws "Invalid response format" (install_counts) —
// distinguish from true unknowns so the dashboard can
// see "server sent garbage" separately.
if (/Invalid response format|Invalid marketplace schema/i.test(msg)) {
return 'invalid_schema'
}
return 'other'
}

View File

@@ -0,0 +1,69 @@
/**
* Utility for checking git availability.
*
* Git is required for installing GitHub-based marketplaces. This module
* provides a memoized check to determine if git is available on the system.
*/
import memoize from 'lodash-es/memoize.js'
import { which } from '../which.js'
/**
* Check if a command is available in PATH.
*
* Uses which to find the actual executable without executing it.
* This is a security best practice to avoid executing arbitrary code
* in untrusted directories.
*
* @param command - The command to check for
* @returns True if the command exists and is executable
*/
async function isCommandAvailable(command: string): Promise<boolean> {
try {
return !!(await which(command))
} catch {
return false
}
}
/**
* Check if git is available on the system.
*
* This is memoized so repeated calls within a session return the cached result.
* Git availability is unlikely to change during a single CLI session.
*
* Only checks PATH — does not exec git. On macOS this means the /usr/bin/git
* xcrun shim passes even without Xcode CLT installed; callers that hit
* `xcrun: error:` at exec time should call markGitUnavailable() so the rest
* of the session behaves as though git is absent.
*
* @returns True if git is installed and executable
*/
export const checkGitAvailable = memoize(async (): Promise<boolean> => {
return isCommandAvailable('git')
})
/**
* Force the memoized git-availability check to return false for the rest of
* the session.
*
* Call this when a git invocation fails in a way that indicates the binary
* exists on PATH but cannot actually run — the macOS xcrun shim being the
* main case (`xcrun: error: invalid active developer path`). Subsequent
* checkGitAvailable() calls then short-circuit to false, so downstream code
* that guards on git availability skips cleanly instead of failing repeatedly
* with the same exec error.
*
* lodash memoize uses a no-arg cache key of undefined.
*/
export function markGitUnavailable(): void {
checkGitAvailable.cache?.set?.(undefined, Promise.resolve(false))
}
/**
* Clear the git availability cache.
* Used for testing purposes.
*/
export function clearGitAvailabilityCache(): void {
checkGitAvailable.cache?.clear?.()
}

View File

@@ -0,0 +1,174 @@
/**
* Plugin installation for headless/CCR mode.
*
* This module provides plugin installation without AppState updates,
* suitable for non-interactive environments like CCR.
*
* When CLAUDE_CODE_PLUGIN_USE_ZIP_CACHE is enabled, plugins are stored as
* ZIPs on a mounted volume. The storage layer (pluginLoader.ts) handles
* ZIP creation on install and extraction on load transparently.
*/
import { logEvent } from '../../services/analytics/index.js'
import { registerCleanup } from '../cleanupRegistry.js'
import { logForDebugging } from '../debug.js'
import { withDiagnosticsTiming } from '../diagLogs.js'
import { getFsImplementation } from '../fsOperations.js'
import { logError } from '../log.js'
import {
clearMarketplacesCache,
getDeclaredMarketplaces,
registerSeedMarketplaces,
} from './marketplaceManager.js'
import { detectAndUninstallDelistedPlugins } from './pluginBlocklist.js'
import { clearPluginCache } from './pluginLoader.js'
import { reconcileMarketplaces } from './reconciler.js'
import {
cleanupSessionPluginCache,
getZipCacheMarketplacesDir,
getZipCachePluginsDir,
isMarketplaceSourceSupportedByZipCache,
isPluginZipCacheEnabled,
} from './zipCache.js'
import { syncMarketplacesToZipCache } from './zipCacheAdapters.js'
/**
* Install plugins for headless/CCR mode.
*
* This is the headless equivalent of performBackgroundPluginInstallations(),
* but without AppState updates (no UI to update in headless mode).
*
* @returns true if any plugins were installed (caller should refresh MCP)
*/
export async function installPluginsForHeadless(): Promise<boolean> {
const zipCacheMode = isPluginZipCacheEnabled()
logForDebugging(
`installPluginsForHeadless: starting${zipCacheMode ? ' (zip cache mode)' : ''}`,
)
// Register seed marketplaces (CLAUDE_CODE_PLUGIN_SEED_DIR) before diffing.
// Idempotent; no-op if seed not configured. Without this, findMissingMarketplaces
// would see seed entries as missing → clone → defeats seed's purpose.
//
// If registration changed state, clear caches so the early plugin-load pass
// (which runs during CLI startup before this function) doesn't keep stale
// "marketplace not found" results. Without this clear, a first-boot headless
// run with a seed-cached plugin would show 0 plugin commands/agents/skills
// in the init message even though the seed has everything.
const seedChanged = await registerSeedMarketplaces()
if (seedChanged) {
clearMarketplacesCache()
clearPluginCache('headlessPluginInstall: seed marketplaces registered')
}
// Ensure zip cache directory structure exists
if (zipCacheMode) {
await getFsImplementation().mkdir(getZipCacheMarketplacesDir())
await getFsImplementation().mkdir(getZipCachePluginsDir())
}
// Declared now includes an implicit claude-plugins-official entry when any
// enabled plugin references it (see getDeclaredMarketplaces). This routes
// the official marketplace through the same reconciler path as any other —
// which composes correctly with CLAUDE_CODE_PLUGIN_SEED_DIR: seed registers
// it in known_marketplaces.json, reconciler diff sees it as upToDate, no clone.
const declaredCount = Object.keys(getDeclaredMarketplaces()).length
const metrics = {
marketplaces_installed: 0,
delisted_count: 0,
}
// Initialize from seedChanged so the caller (print.ts) calls
// refreshPluginState() → clearCommandsCache/clearAgentDefinitionsCache
// when seed registration added marketplaces. Without this, the caller
// only refreshes when an actual plugin install happened.
let pluginsChanged = seedChanged
try {
if (declaredCount === 0) {
logForDebugging('installPluginsForHeadless: no marketplaces declared')
} else {
// Reconcile declared marketplaces (settings intent + implicit official)
// with materialized state. Zip cache: skip unsupported source types.
const reconcileResult = await withDiagnosticsTiming(
'headless_marketplace_reconcile',
() =>
reconcileMarketplaces({
skip: zipCacheMode
? (_name, source) =>
!isMarketplaceSourceSupportedByZipCache(source)
: undefined,
onProgress: event => {
if (event.type === 'installed') {
logForDebugging(
`installPluginsForHeadless: installed marketplace ${event.name}`,
)
} else if (event.type === 'failed') {
logForDebugging(
`installPluginsForHeadless: failed to install marketplace ${event.name}: ${event.error}`,
)
}
},
}),
r => ({
installed_count: r.installed.length,
updated_count: r.updated.length,
failed_count: r.failed.length,
skipped_count: r.skipped.length,
}),
)
if (reconcileResult.skipped.length > 0) {
logForDebugging(
`installPluginsForHeadless: skipped ${reconcileResult.skipped.length} marketplace(s) unsupported by zip cache: ${reconcileResult.skipped.join(', ')}`,
)
}
const marketplacesChanged =
reconcileResult.installed.length + reconcileResult.updated.length
// Clear caches so newly-installed marketplace plugins are discoverable.
// Plugin caching is the loader's job — after caches clear, the caller's
// refreshPluginState() → loadAllPlugins() will cache any missing plugins
// from the newly-materialized marketplaces.
if (marketplacesChanged > 0) {
clearMarketplacesCache()
clearPluginCache('headlessPluginInstall: marketplaces reconciled')
pluginsChanged = true
}
metrics.marketplaces_installed = marketplacesChanged
}
// Zip cache: save marketplace JSONs for offline access on ephemeral containers.
// Runs unconditionally so that steady-state containers (all plugins installed)
// still sync marketplace data that may have been cloned in a previous run.
if (zipCacheMode) {
await syncMarketplacesToZipCache()
}
// Delisting enforcement
const newlyDelisted = await detectAndUninstallDelistedPlugins()
metrics.delisted_count = newlyDelisted.length
if (newlyDelisted.length > 0) {
pluginsChanged = true
}
if (pluginsChanged) {
clearPluginCache('headlessPluginInstall: plugins changed')
}
// Zip cache: register session cleanup for extracted plugin temp dirs
if (zipCacheMode) {
registerCleanup(cleanupSessionPluginCache)
}
return pluginsChanged
} catch (error) {
logError(error)
return false
} finally {
logEvent('tengu_headless_plugin_install', metrics)
}
}

View File

@@ -0,0 +1,164 @@
/**
* Plugin-hint recommendations.
*
* Companion to lspRecommendation.ts: where LSP recommendations are triggered
* by file edits, plugin hints are triggered by CLIs/SDKs emitting a
* `<claude-code-hint />` tag to stderr (detected by the Bash/PowerShell tools).
*
* State persists in GlobalConfig.claudeCodeHints — a show-once record per
* plugin and a disabled flag (user picked "don't show again"). Official-
* marketplace filtering is hardcoded for v1.
*/
import { getFeatureValue_CACHED_MAY_BE_STALE } from '../../services/analytics/growthbook.js'
import {
type AnalyticsMetadata_I_VERIFIED_THIS_IS_NOT_CODE_OR_FILEPATHS,
type AnalyticsMetadata_I_VERIFIED_THIS_IS_PII_TAGGED,
logEvent,
} from '../../services/analytics/index.js'
import {
type ClaudeCodeHint,
hasShownHintThisSession,
setPendingHint,
} from '../claudeCodeHints.js'
import { getGlobalConfig, saveGlobalConfig } from '../config.js'
import { logForDebugging } from '../debug.js'
import { isPluginInstalled } from './installedPluginsManager.js'
import { getPluginById } from './marketplaceManager.js'
import {
isOfficialMarketplaceName,
parsePluginIdentifier,
} from './pluginIdentifier.js'
import { isPluginBlockedByPolicy } from './pluginPolicy.js'
/**
* Hard cap on `claudeCodeHints.plugin[]` — bounds config growth. Each shown
* plugin appends one slug; past this point we stop prompting (and stop
* appending) rather than let the config grow without limit.
*/
const MAX_SHOWN_PLUGINS = 100
export type PluginHintRecommendation = {
pluginId: string
pluginName: string
marketplaceName: string
pluginDescription?: string
sourceCommand: string
}
/**
* Pre-store gate called by shell tools when a `type="plugin"` hint is detected.
* Drops the hint if:
*
* - a dialog has already been shown this session
* - user has disabled hints
* - the shown-plugins list has hit the config-growth cap
* - plugin slug doesn't parse as `name@marketplace`
* - marketplace isn't official (hardcoded for v1)
* - plugin is already installed
* - plugin was already shown in a prior session
*
* Synchronous on purpose — shell tools shouldn't await a marketplace lookup
* just to strip a stderr line. The async marketplace-cache check happens
* later in resolvePluginHint (hook side).
*/
export function maybeRecordPluginHint(hint: ClaudeCodeHint): void {
if (!getFeatureValue_CACHED_MAY_BE_STALE('tengu_lapis_finch', false)) return
if (hasShownHintThisSession()) return
const state = getGlobalConfig().claudeCodeHints
if (state?.disabled) return
const shown = state?.plugin ?? []
if (shown.length >= MAX_SHOWN_PLUGINS) return
const pluginId = hint.value
const { name, marketplace } = parsePluginIdentifier(pluginId)
if (!name || !marketplace) return
if (!isOfficialMarketplaceName(marketplace)) return
if (shown.includes(pluginId)) return
if (isPluginInstalled(pluginId)) return
if (isPluginBlockedByPolicy(pluginId)) return
// Bound repeat lookups on the same slug — a CLI that emits on every
// invocation shouldn't trigger N resolve cycles for the same plugin.
if (triedThisSession.has(pluginId)) return
triedThisSession.add(pluginId)
setPendingHint(hint)
}
const triedThisSession = new Set<string>()
/** Test-only reset. */
export function _resetHintRecommendationForTesting(): void {
triedThisSession.clear()
}
/**
* Resolve the pending hint to a renderable recommendation. Runs the async
* marketplace lookup that the sync pre-store gate skipped. Returns null if
* the plugin isn't in the marketplace cache — the hint is discarded.
*/
export async function resolvePluginHint(
hint: ClaudeCodeHint,
): Promise<PluginHintRecommendation | null> {
const pluginId = hint.value
const { name, marketplace } = parsePluginIdentifier(pluginId)
const pluginData = await getPluginById(pluginId)
logEvent('tengu_plugin_hint_detected', {
_PROTO_plugin_name: (name ??
'') as AnalyticsMetadata_I_VERIFIED_THIS_IS_PII_TAGGED,
_PROTO_marketplace_name: (marketplace ??
'') as AnalyticsMetadata_I_VERIFIED_THIS_IS_PII_TAGGED,
result: (pluginData
? 'passed'
: 'not_in_cache') as AnalyticsMetadata_I_VERIFIED_THIS_IS_NOT_CODE_OR_FILEPATHS,
})
if (!pluginData) {
logForDebugging(
`[hintRecommendation] ${pluginId} not found in marketplace cache`,
)
return null
}
return {
pluginId,
pluginName: pluginData.entry.name,
marketplaceName: marketplace ?? '',
pluginDescription: pluginData.entry.description,
sourceCommand: hint.sourceCommand,
}
}
/**
* Record that a prompt for this plugin was surfaced. Called regardless of
* the user's yes/no response — show-once semantics.
*/
export function markHintPluginShown(pluginId: string): void {
saveGlobalConfig(current => {
const existing = current.claudeCodeHints?.plugin ?? []
if (existing.includes(pluginId)) return current
return {
...current,
claudeCodeHints: {
...current.claudeCodeHints,
plugin: [...existing, pluginId],
},
}
})
}
/** Called when the user picks "don't show plugin installation hints again". */
export function disableHintRecommendations(): void {
saveGlobalConfig(current => {
if (current.claudeCodeHints?.disabled) return current
return {
...current,
claudeCodeHints: { ...current.claudeCodeHints, disabled: true },
}
})
}

View File

@@ -0,0 +1,292 @@
/**
* Plugin install counts data layer
*
* This module fetches and caches plugin install counts from the official
* Claude plugins statistics repository. The cache is refreshed if older
* than 24 hours.
*
* Cache location: ~/.claude/plugins/install-counts-cache.json
*/
import axios from 'axios'
import { randomBytes } from 'crypto'
import { readFile, rename, unlink, writeFile } from 'fs/promises'
import { join } from 'path'
import { logForDebugging } from '../debug.js'
import { errorMessage, getErrnoCode } from '../errors.js'
import { getFsImplementation } from '../fsOperations.js'
import { logError } from '../log.js'
import { jsonParse, jsonStringify } from '../slowOperations.js'
import { classifyFetchError, logPluginFetch } from './fetchTelemetry.js'
import { getPluginsDirectory } from './pluginDirectories.js'
const INSTALL_COUNTS_CACHE_VERSION = 1
const INSTALL_COUNTS_CACHE_FILENAME = 'install-counts-cache.json'
const INSTALL_COUNTS_URL =
'https://raw.githubusercontent.com/anthropics/claude-plugins-official/refs/heads/stats/stats/plugin-installs.json'
const CACHE_TTL_MS = 24 * 60 * 60 * 1000 // 24 hours in milliseconds
/**
* Structure of the install counts cache file
*/
type InstallCountsCache = {
version: number
fetchedAt: string // ISO timestamp
counts: Array<{
plugin: string // "pluginName@marketplace"
unique_installs: number
}>
}
/**
* Expected structure of the GitHub stats response
*/
type GitHubStatsResponse = {
plugins: Array<{
plugin: string
unique_installs: number
}>
}
/**
* Get the path to the install counts cache file
*/
function getInstallCountsCachePath(): string {
return join(getPluginsDirectory(), INSTALL_COUNTS_CACHE_FILENAME)
}
/**
* Load the install counts cache from disk.
* Returns null if the file doesn't exist, is invalid, or is stale (>24h old).
*/
async function loadInstallCountsCache(): Promise<InstallCountsCache | null> {
const cachePath = getInstallCountsCachePath()
try {
const content = await readFile(cachePath, { encoding: 'utf-8' })
const parsed = jsonParse(content) as unknown
// Validate basic structure
if (
typeof parsed !== 'object' ||
parsed === null ||
!('version' in parsed) ||
!('fetchedAt' in parsed) ||
!('counts' in parsed)
) {
logForDebugging('Install counts cache has invalid structure')
return null
}
const cache = parsed as {
version: unknown
fetchedAt: unknown
counts: unknown
}
// Validate version
if (cache.version !== INSTALL_COUNTS_CACHE_VERSION) {
logForDebugging(
`Install counts cache version mismatch (got ${cache.version}, expected ${INSTALL_COUNTS_CACHE_VERSION})`,
)
return null
}
// Validate fetchedAt and counts
if (typeof cache.fetchedAt !== 'string' || !Array.isArray(cache.counts)) {
logForDebugging('Install counts cache has invalid structure')
return null
}
// Validate fetchedAt is a valid date
const fetchedAt = new Date(cache.fetchedAt).getTime()
if (Number.isNaN(fetchedAt)) {
logForDebugging('Install counts cache has invalid fetchedAt timestamp')
return null
}
// Validate count entries have required fields
const validCounts = cache.counts.every(
(entry): entry is { plugin: string; unique_installs: number } =>
typeof entry === 'object' &&
entry !== null &&
typeof entry.plugin === 'string' &&
typeof entry.unique_installs === 'number',
)
if (!validCounts) {
logForDebugging('Install counts cache has malformed entries')
return null
}
// Check if cache is stale (>24 hours old)
const now = Date.now()
if (now - fetchedAt > CACHE_TTL_MS) {
logForDebugging('Install counts cache is stale (>24h old)')
return null
}
// Return validated cache
return {
version: cache.version as number,
fetchedAt: cache.fetchedAt,
counts: cache.counts,
}
} catch (error) {
const code = getErrnoCode(error)
if (code !== 'ENOENT') {
logForDebugging(
`Failed to load install counts cache: ${errorMessage(error)}`,
)
}
return null
}
}
/**
* Save the install counts cache to disk atomically.
* Uses a temp file + rename pattern to prevent corruption.
*/
async function saveInstallCountsCache(
cache: InstallCountsCache,
): Promise<void> {
const cachePath = getInstallCountsCachePath()
const tempPath = `${cachePath}.${randomBytes(8).toString('hex')}.tmp`
try {
// Ensure the plugins directory exists
const pluginsDir = getPluginsDirectory()
await getFsImplementation().mkdir(pluginsDir)
// Write to temp file
const content = jsonStringify(cache, null, 2)
await writeFile(tempPath, content, {
encoding: 'utf-8',
mode: 0o600,
})
// Atomic rename
await rename(tempPath, cachePath)
logForDebugging('Install counts cache saved successfully')
} catch (error) {
logError(error)
// Clean up temp file if it exists
try {
await unlink(tempPath)
} catch {
// Ignore cleanup errors
}
}
}
/**
* Fetch install counts from GitHub stats repository
*/
async function fetchInstallCountsFromGitHub(): Promise<
Array<{ plugin: string; unique_installs: number }>
> {
logForDebugging(`Fetching install counts from ${INSTALL_COUNTS_URL}`)
const started = performance.now()
try {
const response = await axios.get<GitHubStatsResponse>(INSTALL_COUNTS_URL, {
timeout: 10000,
})
if (!response.data?.plugins || !Array.isArray(response.data.plugins)) {
throw new Error('Invalid response format from install counts API')
}
logPluginFetch(
'install_counts',
INSTALL_COUNTS_URL,
'success',
performance.now() - started,
)
return response.data.plugins
} catch (error) {
logPluginFetch(
'install_counts',
INSTALL_COUNTS_URL,
'failure',
performance.now() - started,
classifyFetchError(error),
)
throw error
}
}
/**
* Get plugin install counts as a Map.
* Uses cached data if available and less than 24 hours old.
* Returns null on errors so UI can hide counts rather than show misleading zeros.
*
* @returns Map of plugin ID (name@marketplace) to install count, or null if unavailable
*/
export async function getInstallCounts(): Promise<Map<string, number> | null> {
// Try to load from cache first
const cache = await loadInstallCountsCache()
if (cache) {
logForDebugging('Using cached install counts')
logPluginFetch('install_counts', INSTALL_COUNTS_URL, 'cache_hit', 0)
const map = new Map<string, number>()
for (const entry of cache.counts) {
map.set(entry.plugin, entry.unique_installs)
}
return map
}
// Cache miss or stale - fetch from GitHub
try {
const counts = await fetchInstallCountsFromGitHub()
// Save to cache
const newCache: InstallCountsCache = {
version: INSTALL_COUNTS_CACHE_VERSION,
fetchedAt: new Date().toISOString(),
counts,
}
await saveInstallCountsCache(newCache)
// Convert to Map
const map = new Map<string, number>()
for (const entry of counts) {
map.set(entry.plugin, entry.unique_installs)
}
return map
} catch (error) {
// Log error and return null so UI can hide counts
logError(error)
logForDebugging(`Failed to fetch install counts: ${errorMessage(error)}`)
return null
}
}
/**
* Format an install count for display.
*
* @param count - The raw install count
* @returns Formatted string:
* - <1000: raw number (e.g., "42")
* - >=1000: K suffix with 1 decimal (e.g., "1.2K", "36.2K")
* - >=1000000: M suffix with 1 decimal (e.g., "1.2M")
*/
export function formatInstallCount(count: number): string {
if (count < 1000) {
return String(count)
}
if (count < 1000000) {
const k = count / 1000
// Use toFixed(1) but remove trailing .0
const formatted = k.toFixed(1)
return formatted.endsWith('.0')
? `${formatted.slice(0, -2)}K`
: `${formatted}K`
}
const m = count / 1000000
const formatted = m.toFixed(1)
return formatted.endsWith('.0')
? `${formatted.slice(0, -2)}M`
: `${formatted}M`
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,348 @@
import memoize from 'lodash-es/memoize.js'
import { basename } from 'path'
import { isAutoMemoryEnabled } from '../../memdir/paths.js'
import type { AgentColorName } from '../../tools/AgentTool/agentColorManager.js'
import {
type AgentMemoryScope,
loadAgentMemoryPrompt,
} from '../../tools/AgentTool/agentMemory.js'
import type { AgentDefinition } from '../../tools/AgentTool/loadAgentsDir.js'
import { FILE_EDIT_TOOL_NAME } from '../../tools/FileEditTool/constants.js'
import { FILE_READ_TOOL_NAME } from '../../tools/FileReadTool/prompt.js'
import { FILE_WRITE_TOOL_NAME } from '../../tools/FileWriteTool/prompt.js'
import { getPluginErrorMessage } from '../../types/plugin.js'
import { logForDebugging } from '../debug.js'
import { EFFORT_LEVELS, parseEffortValue } from '../effort.js'
import {
coerceDescriptionToString,
parseFrontmatter,
parsePositiveIntFromFrontmatter,
} from '../frontmatterParser.js'
import { getFsImplementation, isDuplicatePath } from '../fsOperations.js'
import {
parseAgentToolsFromFrontmatter,
parseSlashCommandToolsFromFrontmatter,
} from '../markdownConfigLoader.js'
import { loadAllPluginsCacheOnly } from './pluginLoader.js'
import {
loadPluginOptions,
substitutePluginVariables,
substituteUserConfigInContent,
} from './pluginOptionsStorage.js'
import type { PluginManifest } from './schemas.js'
import { walkPluginMarkdown } from './walkPluginMarkdown.js'
const VALID_MEMORY_SCOPES: AgentMemoryScope[] = ['user', 'project', 'local']
async function loadAgentsFromDirectory(
agentsPath: string,
pluginName: string,
sourceName: string,
pluginPath: string,
pluginManifest: PluginManifest,
loadedPaths: Set<string>,
): Promise<AgentDefinition[]> {
const agents: AgentDefinition[] = []
await walkPluginMarkdown(
agentsPath,
async (fullPath, namespace) => {
const agent = await loadAgentFromFile(
fullPath,
pluginName,
namespace,
sourceName,
pluginPath,
pluginManifest,
loadedPaths,
)
if (agent) agents.push(agent)
},
{ logLabel: 'agents' },
)
return agents
}
async function loadAgentFromFile(
filePath: string,
pluginName: string,
namespace: string[],
sourceName: string,
pluginPath: string,
pluginManifest: PluginManifest,
loadedPaths: Set<string>,
): Promise<AgentDefinition | null> {
const fs = getFsImplementation()
if (isDuplicatePath(fs, filePath, loadedPaths)) {
return null
}
try {
const content = await fs.readFile(filePath, { encoding: 'utf-8' })
const { frontmatter, content: markdownContent } = parseFrontmatter(
content,
filePath,
)
const baseAgentName =
(frontmatter.name as string) || basename(filePath).replace(/\.md$/, '')
// Apply namespace prefixing like we do for commands
const nameParts = [pluginName, ...namespace, baseAgentName]
const agentType = nameParts.join(':')
// Parse agent metadata from frontmatter
const whenToUse =
coerceDescriptionToString(frontmatter.description, agentType) ??
coerceDescriptionToString(frontmatter['when-to-use'], agentType) ??
`Agent from ${pluginName} plugin`
let tools = parseAgentToolsFromFrontmatter(frontmatter.tools)
const skills = parseSlashCommandToolsFromFrontmatter(frontmatter.skills)
const color = frontmatter.color as AgentColorName | undefined
const modelRaw = frontmatter.model
let model: string | undefined
if (typeof modelRaw === 'string' && modelRaw.trim().length > 0) {
const trimmed = modelRaw.trim()
model = trimmed.toLowerCase() === 'inherit' ? 'inherit' : trimmed
}
const backgroundRaw = frontmatter.background
const background =
backgroundRaw === 'true' || backgroundRaw === true ? true : undefined
// Substitute ${CLAUDE_PLUGIN_ROOT} so agents can reference bundled files,
// and ${user_config.X} (non-sensitive only) so they can embed configured
// usernames, endpoints, etc. Sensitive refs resolve to a placeholder.
let systemPrompt = substitutePluginVariables(markdownContent.trim(), {
path: pluginPath,
source: sourceName,
})
if (pluginManifest.userConfig) {
systemPrompt = substituteUserConfigInContent(
systemPrompt,
loadPluginOptions(sourceName),
pluginManifest.userConfig,
)
}
// Parse memory scope
const memoryRaw = frontmatter.memory as string | undefined
let memory: AgentMemoryScope | undefined
if (memoryRaw !== undefined) {
if (VALID_MEMORY_SCOPES.includes(memoryRaw as AgentMemoryScope)) {
memory = memoryRaw as AgentMemoryScope
} else {
logForDebugging(
`Plugin agent file ${filePath} has invalid memory value '${memoryRaw}'. Valid options: ${VALID_MEMORY_SCOPES.join(', ')}`,
)
}
}
// Parse isolation mode
const isolationRaw = frontmatter.isolation as string | undefined
const isolation =
isolationRaw === 'worktree' ? ('worktree' as const) : undefined
// Parse effort (string level or integer)
const effortRaw = frontmatter.effort
const effort =
effortRaw !== undefined ? parseEffortValue(effortRaw) : undefined
if (effortRaw !== undefined && effort === undefined) {
logForDebugging(
`Plugin agent file ${filePath} has invalid effort '${effortRaw}'. Valid options: ${EFFORT_LEVELS.join(', ')} or an integer`,
)
}
// permissionMode, hooks, and mcpServers are intentionally NOT parsed for
// plugin agents. Plugins are third-party marketplace code; these fields
// escalate what the agent can do beyond what the user approved at install
// time. For this level of control, define the agent in .claude/agents/
// where the user explicitly wrote the frontmatter. (Note: plugins can
// still ship hooks and MCP servers at the manifest level — that's the
// install-time trust boundary. Per-agent declarations would let a single
// agent file buried in agents/ silently add them.) See PR #22558 review.
for (const field of ['permissionMode', 'hooks', 'mcpServers'] as const) {
if (frontmatter[field] !== undefined) {
logForDebugging(
`Plugin agent file ${filePath} sets ${field}, which is ignored for plugin agents. Use .claude/agents/ for this level of control.`,
{ level: 'warn' },
)
}
}
// Parse maxTurns
const maxTurnsRaw = frontmatter.maxTurns
const maxTurns = parsePositiveIntFromFrontmatter(maxTurnsRaw)
if (maxTurnsRaw !== undefined && maxTurns === undefined) {
logForDebugging(
`Plugin agent file ${filePath} has invalid maxTurns '${maxTurnsRaw}'. Must be a positive integer.`,
)
}
// Parse disallowedTools
const disallowedTools =
frontmatter.disallowedTools !== undefined
? parseAgentToolsFromFrontmatter(frontmatter.disallowedTools)
: undefined
// If memory is enabled, inject Write/Edit/Read tools for memory access
if (isAutoMemoryEnabled() && memory && tools !== undefined) {
const toolSet = new Set(tools)
for (const tool of [
FILE_WRITE_TOOL_NAME,
FILE_EDIT_TOOL_NAME,
FILE_READ_TOOL_NAME,
]) {
if (!toolSet.has(tool)) {
tools = [...tools, tool]
}
}
}
return {
agentType,
whenToUse,
tools,
...(disallowedTools !== undefined ? { disallowedTools } : {}),
...(skills !== undefined ? { skills } : {}),
getSystemPrompt: () => {
if (isAutoMemoryEnabled() && memory) {
const memoryPrompt = loadAgentMemoryPrompt(agentType, memory)
return systemPrompt + '\n\n' + memoryPrompt
}
return systemPrompt
},
source: 'plugin' as const,
color,
model,
filename: baseAgentName,
plugin: sourceName,
...(background ? { background } : {}),
...(memory ? { memory } : {}),
...(isolation ? { isolation } : {}),
...(effort !== undefined ? { effort } : {}),
...(maxTurns !== undefined ? { maxTurns } : {}),
} as AgentDefinition
} catch (error) {
logForDebugging(`Failed to load agent from ${filePath}: ${error}`, {
level: 'error',
})
return null
}
}
export const loadPluginAgents = memoize(
async (): Promise<AgentDefinition[]> => {
// Only load agents from enabled plugins
const { enabled, errors } = await loadAllPluginsCacheOnly()
if (errors.length > 0) {
logForDebugging(
`Plugin loading errors: ${errors.map(e => getPluginErrorMessage(e)).join(', ')}`,
)
}
// Process plugins in parallel; each plugin has its own loadedPaths scope
const perPluginAgents = await Promise.all(
enabled.map(async (plugin): Promise<AgentDefinition[]> => {
// Track loaded file paths to prevent duplicates within this plugin
const loadedPaths = new Set<string>()
const pluginAgents: AgentDefinition[] = []
// Load agents from default agents directory
if (plugin.agentsPath) {
try {
const agents = await loadAgentsFromDirectory(
plugin.agentsPath,
plugin.name,
plugin.source,
plugin.path,
plugin.manifest,
loadedPaths,
)
pluginAgents.push(...agents)
if (agents.length > 0) {
logForDebugging(
`Loaded ${agents.length} agents from plugin ${plugin.name} default directory`,
)
}
} catch (error) {
logForDebugging(
`Failed to load agents from plugin ${plugin.name} default directory: ${error}`,
{ level: 'error' },
)
}
}
// Load agents from additional paths specified in manifest
if (plugin.agentsPaths) {
// Process all agentsPaths in parallel. isDuplicatePath is synchronous
// (check-and-add), so concurrent access to loadedPaths is safe.
const pathResults = await Promise.all(
plugin.agentsPaths.map(
async (agentPath): Promise<AgentDefinition[]> => {
try {
const fs = getFsImplementation()
const stats = await fs.stat(agentPath)
if (stats.isDirectory()) {
// Load all .md files from directory
const agents = await loadAgentsFromDirectory(
agentPath,
plugin.name,
plugin.source,
plugin.path,
plugin.manifest,
loadedPaths,
)
if (agents.length > 0) {
logForDebugging(
`Loaded ${agents.length} agents from plugin ${plugin.name} custom path: ${agentPath}`,
)
}
return agents
} else if (stats.isFile() && agentPath.endsWith('.md')) {
// Load single agent file
const agent = await loadAgentFromFile(
agentPath,
plugin.name,
[],
plugin.source,
plugin.path,
plugin.manifest,
loadedPaths,
)
if (agent) {
logForDebugging(
`Loaded agent from plugin ${plugin.name} custom file: ${agentPath}`,
)
return [agent]
}
}
return []
} catch (error) {
logForDebugging(
`Failed to load agents from plugin ${plugin.name} custom path ${agentPath}: ${error}`,
{ level: 'error' },
)
return []
}
},
),
)
for (const agents of pathResults) {
pluginAgents.push(...agents)
}
}
return pluginAgents
}),
)
const allAgents = perPluginAgents.flat()
logForDebugging(`Total plugin agents loaded: ${allAgents.length}`)
return allAgents
},
)
export function clearPluginAgentCache(): void {
loadPluginAgents.cache?.clear?.()
}

View File

@@ -0,0 +1,946 @@
import memoize from 'lodash-es/memoize.js'
import { basename, dirname, join } from 'path'
import { getInlinePlugins, getSessionId } from '../../bootstrap/state.js'
import type { Command } from '../../types/command.js'
import { getPluginErrorMessage } from '../../types/plugin.js'
import {
parseArgumentNames,
substituteArguments,
} from '../argumentSubstitution.js'
import { logForDebugging } from '../debug.js'
import { EFFORT_LEVELS, parseEffortValue } from '../effort.js'
import { isBareMode } from '../envUtils.js'
import { isENOENT } from '../errors.js'
import {
coerceDescriptionToString,
type FrontmatterData,
parseBooleanFrontmatter,
parseFrontmatter,
parseShellFrontmatter,
} from '../frontmatterParser.js'
import { getFsImplementation, isDuplicatePath } from '../fsOperations.js'
import {
extractDescriptionFromMarkdown,
parseSlashCommandToolsFromFrontmatter,
} from '../markdownConfigLoader.js'
import { parseUserSpecifiedModel } from '../model/model.js'
import { executeShellCommandsInPrompt } from '../promptShellExecution.js'
import { loadAllPluginsCacheOnly } from './pluginLoader.js'
import {
loadPluginOptions,
substitutePluginVariables,
substituteUserConfigInContent,
} from './pluginOptionsStorage.js'
import type { CommandMetadata, PluginManifest } from './schemas.js'
import { walkPluginMarkdown } from './walkPluginMarkdown.js'
// Similar to MarkdownFile but for plugin sources
type PluginMarkdownFile = {
filePath: string
baseDir: string
frontmatter: FrontmatterData
content: string
}
// Configuration for loading commands or skills
type LoadConfig = {
isSkillMode: boolean // true when loading from skills/ directory
}
/**
* Check if a file path is a skill file (SKILL.md)
*/
function isSkillFile(filePath: string): boolean {
return /^skill\.md$/i.test(basename(filePath))
}
/**
* Get command name from file path, handling both regular files and skills
*/
function getCommandNameFromFile(
filePath: string,
baseDir: string,
pluginName: string,
): string {
const isSkill = isSkillFile(filePath)
if (isSkill) {
// For skills, use the parent directory name
const skillDirectory = dirname(filePath)
const parentOfSkillDir = dirname(skillDirectory)
const commandBaseName = basename(skillDirectory)
// Build namespace from parent of skill directory
const relativePath = parentOfSkillDir.startsWith(baseDir)
? parentOfSkillDir.slice(baseDir.length).replace(/^\//, '')
: ''
const namespace = relativePath ? relativePath.split('/').join(':') : ''
return namespace
? `${pluginName}:${namespace}:${commandBaseName}`
: `${pluginName}:${commandBaseName}`
} else {
// For regular files, use filename without .md
const fileDirectory = dirname(filePath)
const commandBaseName = basename(filePath).replace(/\.md$/, '')
// Build namespace from file directory
const relativePath = fileDirectory.startsWith(baseDir)
? fileDirectory.slice(baseDir.length).replace(/^\//, '')
: ''
const namespace = relativePath ? relativePath.split('/').join(':') : ''
return namespace
? `${pluginName}:${namespace}:${commandBaseName}`
: `${pluginName}:${commandBaseName}`
}
}
/**
* Recursively collects all markdown files from a directory
*/
async function collectMarkdownFiles(
dirPath: string,
baseDir: string,
loadedPaths: Set<string>,
): Promise<PluginMarkdownFile[]> {
const files: PluginMarkdownFile[] = []
const fs = getFsImplementation()
await walkPluginMarkdown(
dirPath,
async fullPath => {
if (isDuplicatePath(fs, fullPath, loadedPaths)) return
const content = await fs.readFile(fullPath, { encoding: 'utf-8' })
const { frontmatter, content: markdownContent } = parseFrontmatter(
content,
fullPath,
)
files.push({
filePath: fullPath,
baseDir,
frontmatter,
content: markdownContent,
})
},
{ stopAtSkillDir: true, logLabel: 'commands' },
)
return files
}
/**
* Transforms plugin markdown files to handle skill directories
*/
function transformPluginSkillFiles(
files: PluginMarkdownFile[],
): PluginMarkdownFile[] {
const filesByDir = new Map<string, PluginMarkdownFile[]>()
for (const file of files) {
const dir = dirname(file.filePath)
const dirFiles = filesByDir.get(dir) ?? []
dirFiles.push(file)
filesByDir.set(dir, dirFiles)
}
const result: PluginMarkdownFile[] = []
for (const [dir, dirFiles] of filesByDir) {
const skillFiles = dirFiles.filter(f => isSkillFile(f.filePath))
if (skillFiles.length > 0) {
// Use the first skill file if multiple exist
const skillFile = skillFiles[0]!
if (skillFiles.length > 1) {
logForDebugging(
`Multiple skill files found in ${dir}, using ${basename(skillFile.filePath)}`,
)
}
// Directory has a skill - only include the skill file
result.push(skillFile)
} else {
result.push(...dirFiles)
}
}
return result
}
async function loadCommandsFromDirectory(
commandsPath: string,
pluginName: string,
sourceName: string,
pluginManifest: PluginManifest,
pluginPath: string,
config: LoadConfig = { isSkillMode: false },
loadedPaths: Set<string> = new Set(),
): Promise<Command[]> {
// Collect all markdown files
const markdownFiles = await collectMarkdownFiles(
commandsPath,
commandsPath,
loadedPaths,
)
// Apply skill transformation
const processedFiles = transformPluginSkillFiles(markdownFiles)
// Convert to commands
const commands: Command[] = []
for (const file of processedFiles) {
const commandName = getCommandNameFromFile(
file.filePath,
file.baseDir,
pluginName,
)
const command = createPluginCommand(
commandName,
file,
sourceName,
pluginManifest,
pluginPath,
isSkillFile(file.filePath),
config,
)
if (command) {
commands.push(command)
}
}
return commands
}
/**
* Create a Command from a plugin markdown file
*/
function createPluginCommand(
commandName: string,
file: PluginMarkdownFile,
sourceName: string,
pluginManifest: PluginManifest,
pluginPath: string,
isSkill: boolean,
config: LoadConfig = { isSkillMode: false },
): Command | null {
try {
const { frontmatter, content } = file
const validatedDescription = coerceDescriptionToString(
frontmatter.description,
commandName,
)
const description =
validatedDescription ??
extractDescriptionFromMarkdown(
content,
isSkill ? 'Plugin skill' : 'Plugin command',
)
// Substitute ${CLAUDE_PLUGIN_ROOT} in allowed-tools before parsing
const rawAllowedTools = frontmatter['allowed-tools']
const substitutedAllowedTools =
typeof rawAllowedTools === 'string'
? substitutePluginVariables(rawAllowedTools, {
path: pluginPath,
source: sourceName,
})
: Array.isArray(rawAllowedTools)
? rawAllowedTools.map(tool =>
typeof tool === 'string'
? substitutePluginVariables(tool, {
path: pluginPath,
source: sourceName,
})
: tool,
)
: rawAllowedTools
const allowedTools = parseSlashCommandToolsFromFrontmatter(
substitutedAllowedTools,
)
const argumentHint = frontmatter['argument-hint'] as string | undefined
const argumentNames = parseArgumentNames(
frontmatter.arguments as string | string[] | undefined,
)
const whenToUse = frontmatter.when_to_use as string | undefined
const version = frontmatter.version as string | undefined
const displayName = frontmatter.name as string | undefined
// Handle model configuration, resolving aliases like 'haiku', 'sonnet', 'opus'
const model =
frontmatter.model === 'inherit'
? undefined
: frontmatter.model
? parseUserSpecifiedModel(frontmatter.model as string)
: undefined
const effortRaw = frontmatter['effort']
const effort =
effortRaw !== undefined ? parseEffortValue(effortRaw) : undefined
if (effortRaw !== undefined && effort === undefined) {
logForDebugging(
`Plugin command ${commandName} has invalid effort '${effortRaw}'. Valid options: ${EFFORT_LEVELS.join(', ')} or an integer`,
)
}
const disableModelInvocation = parseBooleanFrontmatter(
frontmatter['disable-model-invocation'],
)
const userInvocableValue = frontmatter['user-invocable']
const userInvocable =
userInvocableValue === undefined
? true
: parseBooleanFrontmatter(userInvocableValue)
const shell = parseShellFrontmatter(frontmatter.shell, commandName)
return {
type: 'prompt',
name: commandName,
description,
hasUserSpecifiedDescription: validatedDescription !== null,
allowedTools,
argumentHint,
argNames: argumentNames.length > 0 ? argumentNames : undefined,
whenToUse,
version,
model,
effort,
disableModelInvocation,
userInvocable,
contentLength: content.length,
source: 'plugin' as const,
loadedFrom: isSkill || config.isSkillMode ? 'plugin' : undefined,
pluginInfo: {
pluginManifest,
repository: sourceName,
},
isHidden: !userInvocable,
progressMessage: isSkill || config.isSkillMode ? 'loading' : 'running',
userFacingName(): string {
return displayName || commandName
},
async getPromptForCommand(args, context) {
// For skills from skills/ directory, include base directory
let finalContent = config.isSkillMode
? `Base directory for this skill: ${dirname(file.filePath)}\n\n${content}`
: content
finalContent = substituteArguments(
finalContent,
args,
true,
argumentNames,
)
// Replace ${CLAUDE_PLUGIN_ROOT} and ${CLAUDE_PLUGIN_DATA} with their paths
finalContent = substitutePluginVariables(finalContent, {
path: pluginPath,
source: sourceName,
})
// Replace ${user_config.X} with saved option values. Sensitive keys
// resolve to a descriptive placeholder instead — skill content goes to
// the model prompt and we don't put secrets there.
if (pluginManifest.userConfig) {
finalContent = substituteUserConfigInContent(
finalContent,
loadPluginOptions(sourceName),
pluginManifest.userConfig,
)
}
// Replace ${CLAUDE_SKILL_DIR} with this specific skill's directory.
// Distinct from ${CLAUDE_PLUGIN_ROOT}: a plugin can contain multiple
// skills, so CLAUDE_PLUGIN_ROOT points to the plugin root while
// CLAUDE_SKILL_DIR points to the individual skill's subdirectory.
if (config.isSkillMode) {
const rawSkillDir = dirname(file.filePath)
const skillDir =
process.platform === 'win32'
? rawSkillDir.replace(/\\/g, '/')
: rawSkillDir
finalContent = finalContent.replace(
/\$\{CLAUDE_SKILL_DIR\}/g,
skillDir,
)
}
// Replace ${CLAUDE_SESSION_ID} with the current session ID
finalContent = finalContent.replace(
/\$\{CLAUDE_SESSION_ID\}/g,
getSessionId(),
)
finalContent = await executeShellCommandsInPrompt(
finalContent,
{
...context,
getAppState() {
const appState = context.getAppState()
return {
...appState,
toolPermissionContext: {
...appState.toolPermissionContext,
alwaysAllowRules: {
...appState.toolPermissionContext.alwaysAllowRules,
command: allowedTools,
},
},
}
},
},
`/${commandName}`,
shell,
)
return [{ type: 'text', text: finalContent }]
},
} satisfies Command
} catch (error) {
logForDebugging(
`Failed to create command from ${file.filePath}: ${error}`,
{
level: 'error',
},
)
return null
}
}
export const getPluginCommands = memoize(async (): Promise<Command[]> => {
// --bare: skip marketplace plugin auto-load. Explicit --plugin-dir still
// works — getInlinePlugins() is set by main.tsx from --plugin-dir.
// loadAllPluginsCacheOnly already short-circuits to inline-only when
// inlinePlugins.length > 0.
if (isBareMode() && getInlinePlugins().length === 0) {
return []
}
// Only load commands from enabled plugins
const { enabled, errors } = await loadAllPluginsCacheOnly()
if (errors.length > 0) {
logForDebugging(
`Plugin loading errors: ${errors.map(e => getPluginErrorMessage(e)).join(', ')}`,
)
}
// Process plugins in parallel; each plugin has its own loadedPaths scope
const perPluginCommands = await Promise.all(
enabled.map(async (plugin): Promise<Command[]> => {
// Track loaded file paths to prevent duplicates within this plugin
const loadedPaths = new Set<string>()
const pluginCommands: Command[] = []
// Load commands from default commands directory
if (plugin.commandsPath) {
try {
const commands = await loadCommandsFromDirectory(
plugin.commandsPath,
plugin.name,
plugin.source,
plugin.manifest,
plugin.path,
{ isSkillMode: false },
loadedPaths,
)
pluginCommands.push(...commands)
if (commands.length > 0) {
logForDebugging(
`Loaded ${commands.length} commands from plugin ${plugin.name} default directory`,
)
}
} catch (error) {
logForDebugging(
`Failed to load commands from plugin ${plugin.name} default directory: ${error}`,
{ level: 'error' },
)
}
}
// Load commands from additional paths specified in manifest
if (plugin.commandsPaths) {
logForDebugging(
`Plugin ${plugin.name} has commandsPaths: ${plugin.commandsPaths.join(', ')}`,
)
// Process all commandsPaths in parallel. isDuplicatePath is synchronous
// (check-and-add), so concurrent access to loadedPaths is safe.
const pathResults = await Promise.all(
plugin.commandsPaths.map(async (commandPath): Promise<Command[]> => {
try {
const fs = getFsImplementation()
const stats = await fs.stat(commandPath)
logForDebugging(
`Checking commandPath ${commandPath} - isDirectory: ${stats.isDirectory()}, isFile: ${stats.isFile()}`,
)
if (stats.isDirectory()) {
// Load all .md files and skill directories from directory
const commands = await loadCommandsFromDirectory(
commandPath,
plugin.name,
plugin.source,
plugin.manifest,
plugin.path,
{ isSkillMode: false },
loadedPaths,
)
if (commands.length > 0) {
logForDebugging(
`Loaded ${commands.length} commands from plugin ${plugin.name} custom path: ${commandPath}`,
)
} else {
logForDebugging(
`Warning: No commands found in plugin ${plugin.name} custom directory: ${commandPath}. Expected .md files or SKILL.md in subdirectories.`,
{ level: 'warn' },
)
}
return commands
} else if (stats.isFile() && commandPath.endsWith('.md')) {
if (isDuplicatePath(fs, commandPath, loadedPaths)) {
return []
}
// Load single command file
const content = await fs.readFile(commandPath, {
encoding: 'utf-8',
})
const { frontmatter, content: markdownContent } =
parseFrontmatter(content, commandPath)
// Check if there's metadata for this command (object-mapping format)
let commandName: string | undefined
let metadataOverride: CommandMetadata | undefined
if (plugin.commandsMetadata) {
// Find metadata by matching the command's absolute path to the metadata source
// Convert metadata.source (relative to plugin root) to absolute path for comparison
for (const [name, metadata] of Object.entries(
plugin.commandsMetadata,
)) {
if (metadata.source) {
const fullMetadataPath = join(
plugin.path,
metadata.source,
)
if (commandPath === fullMetadataPath) {
commandName = `${plugin.name}:${name}`
metadataOverride = metadata
break
}
}
}
}
// Fall back to filename-based naming if no metadata
if (!commandName) {
commandName = `${plugin.name}:${basename(commandPath).replace(/\.md$/, '')}`
}
// Apply metadata overrides to frontmatter
const finalFrontmatter = metadataOverride
? {
...frontmatter,
...(metadataOverride.description && {
description: metadataOverride.description,
}),
...(metadataOverride.argumentHint && {
'argument-hint': metadataOverride.argumentHint,
}),
...(metadataOverride.model && {
model: metadataOverride.model,
}),
...(metadataOverride.allowedTools && {
'allowed-tools':
metadataOverride.allowedTools.join(','),
}),
}
: frontmatter
const file: PluginMarkdownFile = {
filePath: commandPath,
baseDir: dirname(commandPath),
frontmatter: finalFrontmatter,
content: markdownContent,
}
const command = createPluginCommand(
commandName,
file,
plugin.source,
plugin.manifest,
plugin.path,
false,
)
if (command) {
logForDebugging(
`Loaded command from plugin ${plugin.name} custom file: ${commandPath}${metadataOverride ? ' (with metadata override)' : ''}`,
)
return [command]
}
}
return []
} catch (error) {
logForDebugging(
`Failed to load commands from plugin ${plugin.name} custom path ${commandPath}: ${error}`,
{ level: 'error' },
)
return []
}
}),
)
for (const commands of pathResults) {
pluginCommands.push(...commands)
}
}
// Load commands with inline content (no source file)
// Note: Commands with source files were already loaded in the previous loop
// when iterating through commandsPaths. This loop handles metadata entries
// that specify inline content instead of file references.
if (plugin.commandsMetadata) {
for (const [name, metadata] of Object.entries(
plugin.commandsMetadata,
)) {
// Only process entries with inline content (no source)
if (metadata.content && !metadata.source) {
try {
// Parse inline content for frontmatter
const { frontmatter, content: markdownContent } =
parseFrontmatter(
metadata.content,
`<inline:${plugin.name}:${name}>`,
)
// Apply metadata overrides to frontmatter
const finalFrontmatter: FrontmatterData = {
...frontmatter,
...(metadata.description && {
description: metadata.description,
}),
...(metadata.argumentHint && {
'argument-hint': metadata.argumentHint,
}),
...(metadata.model && {
model: metadata.model,
}),
...(metadata.allowedTools && {
'allowed-tools': metadata.allowedTools.join(','),
}),
}
const commandName = `${plugin.name}:${name}`
const file: PluginMarkdownFile = {
filePath: `<inline:${commandName}>`, // Virtual path for inline content
baseDir: plugin.path, // Use plugin root as base directory
frontmatter: finalFrontmatter,
content: markdownContent,
}
const command = createPluginCommand(
commandName,
file,
plugin.source,
plugin.manifest,
plugin.path,
false,
)
if (command) {
pluginCommands.push(command)
logForDebugging(
`Loaded inline content command from plugin ${plugin.name}: ${commandName}`,
)
}
} catch (error) {
logForDebugging(
`Failed to load inline content command ${name} from plugin ${plugin.name}: ${error}`,
{ level: 'error' },
)
}
}
}
}
return pluginCommands
}),
)
const allCommands = perPluginCommands.flat()
logForDebugging(`Total plugin commands loaded: ${allCommands.length}`)
return allCommands
})
export function clearPluginCommandCache(): void {
getPluginCommands.cache?.clear?.()
}
/**
* Loads skills from plugin skills directories
* Skills are directories containing SKILL.md files
*/
async function loadSkillsFromDirectory(
skillsPath: string,
pluginName: string,
sourceName: string,
pluginManifest: PluginManifest,
pluginPath: string,
loadedPaths: Set<string>,
): Promise<Command[]> {
const fs = getFsImplementation()
const skills: Command[] = []
// First, check if skillsPath itself contains SKILL.md (direct skill directory)
const directSkillPath = join(skillsPath, 'SKILL.md')
let directSkillContent: string | null = null
try {
directSkillContent = await fs.readFile(directSkillPath, {
encoding: 'utf-8',
})
} catch (e: unknown) {
if (!isENOENT(e)) {
logForDebugging(`Failed to load skill from ${directSkillPath}: ${e}`, {
level: 'error',
})
return skills
}
// ENOENT: no direct SKILL.md, fall through to scan subdirectories
}
if (directSkillContent !== null) {
// This is a direct skill directory, load the skill from here
if (isDuplicatePath(fs, directSkillPath, loadedPaths)) {
return skills
}
try {
const { frontmatter, content: markdownContent } = parseFrontmatter(
directSkillContent,
directSkillPath,
)
const skillName = `${pluginName}:${basename(skillsPath)}`
const file: PluginMarkdownFile = {
filePath: directSkillPath,
baseDir: dirname(directSkillPath),
frontmatter,
content: markdownContent,
}
const skill = createPluginCommand(
skillName,
file,
sourceName,
pluginManifest,
pluginPath,
true, // isSkill
{ isSkillMode: true }, // config
)
if (skill) {
skills.push(skill)
}
} catch (error) {
logForDebugging(
`Failed to load skill from ${directSkillPath}: ${error}`,
{
level: 'error',
},
)
}
return skills
}
// Otherwise, scan for subdirectories containing SKILL.md files
let entries
try {
entries = await fs.readdir(skillsPath)
} catch (e: unknown) {
if (!isENOENT(e)) {
logForDebugging(
`Failed to load skills from directory ${skillsPath}: ${e}`,
{ level: 'error' },
)
}
return skills
}
await Promise.all(
entries.map(async entry => {
// Accept both directories and symlinks (symlinks may point to skill directories)
if (!entry.isDirectory() && !entry.isSymbolicLink()) {
return
}
const skillDirPath = join(skillsPath, entry.name)
const skillFilePath = join(skillDirPath, 'SKILL.md')
// Try to read SKILL.md directly; skip if it doesn't exist
let content: string
try {
content = await fs.readFile(skillFilePath, { encoding: 'utf-8' })
} catch (e: unknown) {
if (!isENOENT(e)) {
logForDebugging(`Failed to load skill from ${skillFilePath}: ${e}`, {
level: 'error',
})
}
return
}
if (isDuplicatePath(fs, skillFilePath, loadedPaths)) {
return
}
try {
const { frontmatter, content: markdownContent } = parseFrontmatter(
content,
skillFilePath,
)
const skillName = `${pluginName}:${entry.name}`
const file: PluginMarkdownFile = {
filePath: skillFilePath,
baseDir: dirname(skillFilePath),
frontmatter,
content: markdownContent,
}
const skill = createPluginCommand(
skillName,
file,
sourceName,
pluginManifest,
pluginPath,
true, // isSkill
{ isSkillMode: true }, // config
)
if (skill) {
skills.push(skill)
}
} catch (error) {
logForDebugging(
`Failed to load skill from ${skillFilePath}: ${error}`,
{ level: 'error' },
)
}
}),
)
return skills
}
export const getPluginSkills = memoize(async (): Promise<Command[]> => {
// --bare: same gate as getPluginCommands above — honor explicit
// --plugin-dir, skip marketplace auto-load.
if (isBareMode() && getInlinePlugins().length === 0) {
return []
}
// Only load skills from enabled plugins
const { enabled, errors } = await loadAllPluginsCacheOnly()
if (errors.length > 0) {
logForDebugging(
`Plugin loading errors: ${errors.map(e => getPluginErrorMessage(e)).join(', ')}`,
)
}
logForDebugging(
`getPluginSkills: Processing ${enabled.length} enabled plugins`,
)
// Process plugins in parallel; each plugin has its own loadedPaths scope
const perPluginSkills = await Promise.all(
enabled.map(async (plugin): Promise<Command[]> => {
// Track loaded file paths to prevent duplicates within this plugin
const loadedPaths = new Set<string>()
const pluginSkills: Command[] = []
logForDebugging(
`Checking plugin ${plugin.name}: skillsPath=${plugin.skillsPath ? 'exists' : 'none'}, skillsPaths=${plugin.skillsPaths ? plugin.skillsPaths.length : 0} paths`,
)
// Load skills from default skills directory
if (plugin.skillsPath) {
logForDebugging(
`Attempting to load skills from plugin ${plugin.name} default skillsPath: ${plugin.skillsPath}`,
)
try {
const skills = await loadSkillsFromDirectory(
plugin.skillsPath,
plugin.name,
plugin.source,
plugin.manifest,
plugin.path,
loadedPaths,
)
pluginSkills.push(...skills)
logForDebugging(
`Loaded ${skills.length} skills from plugin ${plugin.name} default directory`,
)
} catch (error) {
logForDebugging(
`Failed to load skills from plugin ${plugin.name} default directory: ${error}`,
{ level: 'error' },
)
}
}
// Load skills from additional paths specified in manifest
if (plugin.skillsPaths) {
logForDebugging(
`Attempting to load skills from plugin ${plugin.name} skillsPaths: ${plugin.skillsPaths.join(', ')}`,
)
// Process all skillsPaths in parallel. isDuplicatePath is synchronous
// (check-and-add), so concurrent access to loadedPaths is safe.
const pathResults = await Promise.all(
plugin.skillsPaths.map(async (skillPath): Promise<Command[]> => {
try {
logForDebugging(
`Loading from skillPath: ${skillPath} for plugin ${plugin.name}`,
)
const skills = await loadSkillsFromDirectory(
skillPath,
plugin.name,
plugin.source,
plugin.manifest,
plugin.path,
loadedPaths,
)
logForDebugging(
`Loaded ${skills.length} skills from plugin ${plugin.name} custom path: ${skillPath}`,
)
return skills
} catch (error) {
logForDebugging(
`Failed to load skills from plugin ${plugin.name} custom path ${skillPath}: ${error}`,
{ level: 'error' },
)
return []
}
}),
)
for (const skills of pathResults) {
pluginSkills.push(...skills)
}
}
return pluginSkills
}),
)
const allSkills = perPluginSkills.flat()
logForDebugging(`Total plugin skills loaded: ${allSkills.length}`)
return allSkills
})
export function clearPluginSkillsCache(): void {
getPluginSkills.cache?.clear?.()
}

View File

@@ -0,0 +1,287 @@
import memoize from 'lodash-es/memoize.js'
import type { HookEvent } from 'src/entrypoints/agentSdkTypes.js'
import {
clearRegisteredPluginHooks,
getRegisteredHooks,
registerHookCallbacks,
} from '../../bootstrap/state.js'
import type { LoadedPlugin } from '../../types/plugin.js'
import { logForDebugging } from '../debug.js'
import { settingsChangeDetector } from '../settings/changeDetector.js'
import {
getSettings_DEPRECATED,
getSettingsForSource,
} from '../settings/settings.js'
import type { PluginHookMatcher } from '../settings/types.js'
import { jsonStringify } from '../slowOperations.js'
import { clearPluginCache, loadAllPluginsCacheOnly } from './pluginLoader.js'
// Track if hot reload subscription is set up
let hotReloadSubscribed = false
// Snapshot of enabledPlugins for change detection in hot reload
let lastPluginSettingsSnapshot: string | undefined
/**
* Convert plugin hooks configuration to native matchers with plugin context
*/
function convertPluginHooksToMatchers(
plugin: LoadedPlugin,
): Record<HookEvent, PluginHookMatcher[]> {
const pluginMatchers: Record<HookEvent, PluginHookMatcher[]> = {
PreToolUse: [],
PostToolUse: [],
PostToolUseFailure: [],
PermissionDenied: [],
Notification: [],
UserPromptSubmit: [],
SessionStart: [],
SessionEnd: [],
Stop: [],
StopFailure: [],
SubagentStart: [],
SubagentStop: [],
PreCompact: [],
PostCompact: [],
PermissionRequest: [],
Setup: [],
TeammateIdle: [],
TaskCreated: [],
TaskCompleted: [],
Elicitation: [],
ElicitationResult: [],
ConfigChange: [],
WorktreeCreate: [],
WorktreeRemove: [],
InstructionsLoaded: [],
CwdChanged: [],
FileChanged: [],
}
if (!plugin.hooksConfig) {
return pluginMatchers
}
// Process each hook event - pass through all hook types with plugin context
for (const [event, matchers] of Object.entries(plugin.hooksConfig)) {
const hookEvent = event as HookEvent
if (!pluginMatchers[hookEvent]) {
continue
}
for (const matcher of matchers) {
if (matcher.hooks.length > 0) {
pluginMatchers[hookEvent].push({
matcher: matcher.matcher,
hooks: matcher.hooks,
pluginRoot: plugin.path,
pluginName: plugin.name,
pluginId: plugin.source,
})
}
}
}
return pluginMatchers
}
/**
* Load and register hooks from all enabled plugins
*/
export const loadPluginHooks = memoize(async (): Promise<void> => {
const { enabled } = await loadAllPluginsCacheOnly()
const allPluginHooks: Record<HookEvent, PluginHookMatcher[]> = {
PreToolUse: [],
PostToolUse: [],
PostToolUseFailure: [],
PermissionDenied: [],
Notification: [],
UserPromptSubmit: [],
SessionStart: [],
SessionEnd: [],
Stop: [],
StopFailure: [],
SubagentStart: [],
SubagentStop: [],
PreCompact: [],
PostCompact: [],
PermissionRequest: [],
Setup: [],
TeammateIdle: [],
TaskCreated: [],
TaskCompleted: [],
Elicitation: [],
ElicitationResult: [],
ConfigChange: [],
WorktreeCreate: [],
WorktreeRemove: [],
InstructionsLoaded: [],
CwdChanged: [],
FileChanged: [],
}
// Process each enabled plugin
for (const plugin of enabled) {
if (!plugin.hooksConfig) {
continue
}
logForDebugging(`Loading hooks from plugin: ${plugin.name}`)
const pluginMatchers = convertPluginHooksToMatchers(plugin)
// Merge plugin hooks into the main collection
for (const event of Object.keys(pluginMatchers) as HookEvent[]) {
allPluginHooks[event].push(...pluginMatchers[event])
}
}
// Clear-then-register as an atomic pair. Previously the clear lived in
// clearPluginHookCache(), which meant any clearAllCaches() call (from
// /plugins UI, pluginInstallationHelpers, thinkback, etc.) wiped plugin
// hooks from STATE.registeredHooks and left them wiped until someone
// happened to call loadPluginHooks() again. SessionStart explicitly awaits
// loadPluginHooks() before firing so it always re-registered; Stop has no
// such guard, so plugin Stop hooks silently never fired after any plugin
// management operation (gh-29767). Doing the clear here makes the swap
// atomic — old hooks stay valid until this point, new hooks take over.
clearRegisteredPluginHooks()
registerHookCallbacks(allPluginHooks)
const totalHooks = Object.values(allPluginHooks).reduce(
(sum, matchers) => sum + matchers.reduce((s, m) => s + m.hooks.length, 0),
0,
)
logForDebugging(
`Registered ${totalHooks} hooks from ${enabled.length} plugins`,
)
})
export function clearPluginHookCache(): void {
// Only invalidate the memoize — do NOT wipe STATE.registeredHooks here.
// Wiping here left plugin hooks dead between clearAllCaches() and the next
// loadPluginHooks() call, which for Stop hooks might never happen
// (gh-29767). The clear now lives inside loadPluginHooks() as an atomic
// clear-then-register, so old hooks stay valid until the fresh load swaps
// them out.
loadPluginHooks.cache?.clear?.()
}
/**
* Remove hooks from plugins no longer in the enabled set, without adding
* hooks from newly-enabled plugins. Called from clearAllCaches() so
* uninstalled/disabled plugins stop firing hooks immediately (gh-36995),
* while newly-enabled plugins wait for /reload-plugins — consistent with
* how commands/agents/MCP behave.
*
* The full swap (clear + register all) still happens via loadPluginHooks(),
* which /reload-plugins awaits.
*/
export async function pruneRemovedPluginHooks(): Promise<void> {
// Early return when nothing to prune — avoids seeding the loadAllPluginsCacheOnly
// memoize in test/preload.ts beforeEach (which clears registeredHooks).
if (!getRegisteredHooks()) return
const { enabled } = await loadAllPluginsCacheOnly()
const enabledRoots = new Set(enabled.map(p => p.path))
// Re-read after the await: a concurrent loadPluginHooks() (hot-reload)
// could have swapped STATE.registeredHooks during the gap. Holding the
// pre-await reference would compute survivors from stale data.
const current = getRegisteredHooks()
if (!current) return
// Collect plugin hooks whose pluginRoot is still enabled, then swap via
// the existing clear+register pair (same atomic-pair pattern as
// loadPluginHooks above). Callback hooks are preserved by
// clearRegisteredPluginHooks; we only need to re-register survivors.
const survivors: Partial<Record<HookEvent, PluginHookMatcher[]>> = {}
for (const [event, matchers] of Object.entries(current)) {
const kept = matchers.filter(
(m): m is PluginHookMatcher =>
'pluginRoot' in m && enabledRoots.has(m.pluginRoot),
)
if (kept.length > 0) survivors[event as HookEvent] = kept
}
clearRegisteredPluginHooks()
registerHookCallbacks(survivors)
}
/**
* Reset hot reload subscription state. Only for testing.
*/
export function resetHotReloadState(): void {
hotReloadSubscribed = false
lastPluginSettingsSnapshot = undefined
}
/**
* Build a stable string snapshot of the settings that feed into
* `loadAllPluginsCacheOnly()` for change detection. Sorts keys so comparison is
* deterministic regardless of insertion order.
*
* Hashes FOUR fields — not just enabledPlugins — because the memoized
* loadAllPluginsCacheOnly() also reads strictKnownMarketplaces, blockedMarketplaces
* (pluginLoader.ts:1933 via getBlockedMarketplaces), and
* extraKnownMarketplaces. If remote managed settings set only one of
* these (no enabledPlugins), a snapshot keyed only on enabledPlugins
* would never diff, the listener would skip, and the memoized result
* would retain the pre-remote marketplace allow/blocklist.
* See #23085 / #23152 poisoned-cache discussion (Slack C09N89L3VNJ).
*/
// Exported for testing — the listener at setupPluginHookHotReload uses this
// for change detection; tests verify it diffs on the fields that matter.
export function getPluginAffectingSettingsSnapshot(): string {
const merged = getSettings_DEPRECATED()
const policy = getSettingsForSource('policySettings')
// Key-sort the two Record fields so insertion order doesn't flap the hash.
// Array fields (strictKnownMarketplaces, blockedMarketplaces) have
// schema-stable order.
const sortKeys = <T extends Record<string, unknown>>(o: T | undefined) =>
o ? Object.fromEntries(Object.entries(o).sort()) : {}
return jsonStringify({
enabledPlugins: sortKeys(merged.enabledPlugins),
extraKnownMarketplaces: sortKeys(merged.extraKnownMarketplaces),
strictKnownMarketplaces: policy?.strictKnownMarketplaces ?? [],
blockedMarketplaces: policy?.blockedMarketplaces ?? [],
})
}
/**
* Set up hot reload for plugin hooks when remote settings change.
* When policySettings changes (e.g., from remote managed settings),
* compares the plugin-affecting settings snapshot and only reloads if it
* actually changed.
*/
export function setupPluginHookHotReload(): void {
if (hotReloadSubscribed) {
return
}
hotReloadSubscribed = true
// Capture the initial snapshot so the first policySettings change can compare
lastPluginSettingsSnapshot = getPluginAffectingSettingsSnapshot()
settingsChangeDetector.subscribe(source => {
if (source === 'policySettings') {
const newSnapshot = getPluginAffectingSettingsSnapshot()
if (newSnapshot === lastPluginSettingsSnapshot) {
logForDebugging(
'Plugin hooks: skipping reload, plugin-affecting settings unchanged',
)
return
}
lastPluginSettingsSnapshot = newSnapshot
logForDebugging(
'Plugin hooks: reloading due to plugin-affecting settings change',
)
// Clear all plugin-related caches
clearPluginCache('loadPluginHooks: plugin-affecting settings changed')
clearPluginHookCache()
// Reload hooks (fire-and-forget, don't block)
void loadPluginHooks()
}
})
}

View File

@@ -0,0 +1,178 @@
import memoize from 'lodash-es/memoize.js'
import { basename } from 'path'
import type { OutputStyleConfig } from '../../constants/outputStyles.js'
import { getPluginErrorMessage } from '../../types/plugin.js'
import { logForDebugging } from '../debug.js'
import {
coerceDescriptionToString,
parseFrontmatter,
} from '../frontmatterParser.js'
import { getFsImplementation, isDuplicatePath } from '../fsOperations.js'
import { extractDescriptionFromMarkdown } from '../markdownConfigLoader.js'
import { loadAllPluginsCacheOnly } from './pluginLoader.js'
import { walkPluginMarkdown } from './walkPluginMarkdown.js'
async function loadOutputStylesFromDirectory(
outputStylesPath: string,
pluginName: string,
loadedPaths: Set<string>,
): Promise<OutputStyleConfig[]> {
const styles: OutputStyleConfig[] = []
await walkPluginMarkdown(
outputStylesPath,
async fullPath => {
const style = await loadOutputStyleFromFile(
fullPath,
pluginName,
loadedPaths,
)
if (style) styles.push(style)
},
{ logLabel: 'output-styles' },
)
return styles
}
async function loadOutputStyleFromFile(
filePath: string,
pluginName: string,
loadedPaths: Set<string>,
): Promise<OutputStyleConfig | null> {
const fs = getFsImplementation()
if (isDuplicatePath(fs, filePath, loadedPaths)) {
return null
}
try {
const content = await fs.readFile(filePath, { encoding: 'utf-8' })
const { frontmatter, content: markdownContent } = parseFrontmatter(
content,
filePath,
)
const fileName = basename(filePath, '.md')
const baseStyleName = (frontmatter.name as string) || fileName
// Namespace output styles with plugin name, consistent with commands and agents
const name = `${pluginName}:${baseStyleName}`
const description =
coerceDescriptionToString(frontmatter.description, name) ??
extractDescriptionFromMarkdown(
markdownContent,
`Output style from ${pluginName} plugin`,
)
// Parse forceForPlugin flag (supports both boolean and string values)
const forceRaw = frontmatter['force-for-plugin']
const forceForPlugin =
forceRaw === true || forceRaw === 'true'
? true
: forceRaw === false || forceRaw === 'false'
? false
: undefined
return {
name,
description,
prompt: markdownContent.trim(),
source: 'plugin',
forceForPlugin,
}
} catch (error) {
logForDebugging(`Failed to load output style from ${filePath}: ${error}`, {
level: 'error',
})
return null
}
}
export const loadPluginOutputStyles = memoize(
async (): Promise<OutputStyleConfig[]> => {
// Only load output styles from enabled plugins
const { enabled, errors } = await loadAllPluginsCacheOnly()
const allStyles: OutputStyleConfig[] = []
if (errors.length > 0) {
logForDebugging(
`Plugin loading errors: ${errors.map(e => getPluginErrorMessage(e)).join(', ')}`,
)
}
for (const plugin of enabled) {
// Track loaded file paths to prevent duplicates within this plugin
const loadedPaths = new Set<string>()
// Load output styles from default output-styles directory
if (plugin.outputStylesPath) {
try {
const styles = await loadOutputStylesFromDirectory(
plugin.outputStylesPath,
plugin.name,
loadedPaths,
)
allStyles.push(...styles)
if (styles.length > 0) {
logForDebugging(
`Loaded ${styles.length} output styles from plugin ${plugin.name} default directory`,
)
}
} catch (error) {
logForDebugging(
`Failed to load output styles from plugin ${plugin.name} default directory: ${error}`,
{ level: 'error' },
)
}
}
// Load output styles from additional paths specified in manifest
if (plugin.outputStylesPaths) {
for (const stylePath of plugin.outputStylesPaths) {
try {
const fs = getFsImplementation()
const stats = await fs.stat(stylePath)
if (stats.isDirectory()) {
// Load all .md files from directory
const styles = await loadOutputStylesFromDirectory(
stylePath,
plugin.name,
loadedPaths,
)
allStyles.push(...styles)
if (styles.length > 0) {
logForDebugging(
`Loaded ${styles.length} output styles from plugin ${plugin.name} custom path: ${stylePath}`,
)
}
} else if (stats.isFile() && stylePath.endsWith('.md')) {
// Load single output style file
const style = await loadOutputStyleFromFile(
stylePath,
plugin.name,
loadedPaths,
)
if (style) {
allStyles.push(style)
logForDebugging(
`Loaded output style from plugin ${plugin.name} custom file: ${stylePath}`,
)
}
}
} catch (error) {
logForDebugging(
`Failed to load output styles from plugin ${plugin.name} custom path ${stylePath}: ${error}`,
{ level: 'error' },
)
}
}
}
}
logForDebugging(`Total plugin output styles loaded: ${allStyles.length}`)
return allStyles
},
)
export function clearPluginOutputStyleCache(): void {
loadPluginOutputStyles.cache?.clear?.()
}

View File

@@ -0,0 +1,387 @@
import { readFile } from 'fs/promises'
import { join, relative, resolve } from 'path'
import { z } from 'zod/v4'
import type {
LspServerConfig,
ScopedLspServerConfig,
} from '../../services/lsp/types.js'
import { expandEnvVarsInString } from '../../services/mcp/envExpansion.js'
import type { LoadedPlugin, PluginError } from '../../types/plugin.js'
import { logForDebugging } from '../debug.js'
import { isENOENT, toError } from '../errors.js'
import { logError } from '../log.js'
import { jsonParse } from '../slowOperations.js'
import { getPluginDataDir } from './pluginDirectories.js'
import {
getPluginStorageId,
loadPluginOptions,
type PluginOptionValues,
substitutePluginVariables,
substituteUserConfigVariables,
} from './pluginOptionsStorage.js'
import { LspServerConfigSchema } from './schemas.js'
/**
* Validate that a resolved path stays within the plugin directory.
* Prevents path traversal attacks via .. or absolute paths.
*/
function validatePathWithinPlugin(
pluginPath: string,
relativePath: string,
): string | null {
// Resolve both paths to absolute paths
const resolvedPluginPath = resolve(pluginPath)
const resolvedFilePath = resolve(pluginPath, relativePath)
// Check if the resolved file path is within the plugin directory
const rel = relative(resolvedPluginPath, resolvedFilePath)
// If relative path starts with .. or is absolute, it's outside the plugin dir
if (rel.startsWith('..') || resolve(rel) === rel) {
return null
}
return resolvedFilePath
}
/**
* Load LSP server configurations from a plugin.
* Checks for:
* 1. .lsp.json file in plugin directory
* 2. manifest.lspServers field
*
* @param plugin - The loaded plugin
* @param errors - Array to collect any errors encountered
* @returns Record of server name to config, or undefined if no servers
*/
export async function loadPluginLspServers(
plugin: LoadedPlugin,
errors: PluginError[] = [],
): Promise<Record<string, LspServerConfig> | undefined> {
const servers: Record<string, LspServerConfig> = {}
// 1. Check for .lsp.json file in plugin directory
const lspJsonPath = join(plugin.path, '.lsp.json')
try {
const content = await readFile(lspJsonPath, 'utf-8')
const parsed = jsonParse(content)
const result = z
.record(z.string(), LspServerConfigSchema())
.safeParse(parsed)
if (result.success) {
Object.assign(servers, result.data)
} else {
const errorMsg = `LSP config validation failed for .lsp.json in plugin ${plugin.name}: ${result.error.message}`
logError(new Error(errorMsg))
errors.push({
type: 'lsp-config-invalid',
plugin: plugin.name,
serverName: '.lsp.json',
validationError: result.error.message,
source: 'plugin',
})
}
} catch (error) {
// .lsp.json is optional, ignore if it doesn't exist
if (!isENOENT(error)) {
const _errorMsg =
error instanceof Error
? `Failed to read/parse .lsp.json in plugin ${plugin.name}: ${error.message}`
: `Failed to read/parse .lsp.json file in plugin ${plugin.name}`
logError(toError(error))
errors.push({
type: 'lsp-config-invalid',
plugin: plugin.name,
serverName: '.lsp.json',
validationError:
error instanceof Error
? `Failed to parse JSON: ${error.message}`
: 'Failed to parse JSON file',
source: 'plugin',
})
}
}
// 2. Check manifest.lspServers field
if (plugin.manifest.lspServers) {
const manifestServers = await loadLspServersFromManifest(
plugin.manifest.lspServers,
plugin.path,
plugin.name,
errors,
)
if (manifestServers) {
Object.assign(servers, manifestServers)
}
}
return Object.keys(servers).length > 0 ? servers : undefined
}
/**
* Load LSP servers from manifest declaration (handles multiple formats).
*/
async function loadLspServersFromManifest(
declaration:
| string
| Record<string, LspServerConfig>
| Array<string | Record<string, LspServerConfig>>,
pluginPath: string,
pluginName: string,
errors: PluginError[],
): Promise<Record<string, LspServerConfig> | undefined> {
const servers: Record<string, LspServerConfig> = {}
// Normalize to array
const declarations = Array.isArray(declaration) ? declaration : [declaration]
for (const decl of declarations) {
if (typeof decl === 'string') {
// Validate path to prevent directory traversal
const validatedPath = validatePathWithinPlugin(pluginPath, decl)
if (!validatedPath) {
const securityMsg = `Security: Path traversal attempt blocked in plugin ${pluginName}: ${decl}`
logError(new Error(securityMsg))
logForDebugging(securityMsg, { level: 'warn' })
errors.push({
type: 'lsp-config-invalid',
plugin: pluginName,
serverName: decl,
validationError:
'Invalid path: must be relative and within plugin directory',
source: 'plugin',
})
continue
}
// Load from file
try {
const content = await readFile(validatedPath, 'utf-8')
const parsed = jsonParse(content)
const result = z
.record(z.string(), LspServerConfigSchema())
.safeParse(parsed)
if (result.success) {
Object.assign(servers, result.data)
} else {
const errorMsg = `LSP config validation failed for ${decl} in plugin ${pluginName}: ${result.error.message}`
logError(new Error(errorMsg))
errors.push({
type: 'lsp-config-invalid',
plugin: pluginName,
serverName: decl,
validationError: result.error.message,
source: 'plugin',
})
}
} catch (error) {
const _errorMsg =
error instanceof Error
? `Failed to read/parse LSP config from ${decl} in plugin ${pluginName}: ${error.message}`
: `Failed to read/parse LSP config file ${decl} in plugin ${pluginName}`
logError(toError(error))
errors.push({
type: 'lsp-config-invalid',
plugin: pluginName,
serverName: decl,
validationError:
error instanceof Error
? `Failed to parse JSON: ${error.message}`
: 'Failed to parse JSON file',
source: 'plugin',
})
}
} else {
// Inline configs
for (const [serverName, config] of Object.entries(decl)) {
const result = LspServerConfigSchema().safeParse(config)
if (result.success) {
servers[serverName] = result.data
} else {
const errorMsg = `LSP config validation failed for inline server "${serverName}" in plugin ${pluginName}: ${result.error.message}`
logError(new Error(errorMsg))
errors.push({
type: 'lsp-config-invalid',
plugin: pluginName,
serverName,
validationError: result.error.message,
source: 'plugin',
})
}
}
}
}
return Object.keys(servers).length > 0 ? servers : undefined
}
/**
* Resolve environment variables for plugin LSP servers.
* Handles ${CLAUDE_PLUGIN_ROOT}, ${user_config.X}, and general ${VAR}
* substitution. Tracks missing environment variables for error reporting.
*/
export function resolvePluginLspEnvironment(
config: LspServerConfig,
plugin: { path: string; source: string },
userConfig?: PluginOptionValues,
_errors?: PluginError[],
): LspServerConfig {
const allMissingVars: string[] = []
const resolveValue = (value: string): string => {
// First substitute plugin-specific variables
let resolved = substitutePluginVariables(value, plugin)
// Then substitute user config variables if provided
if (userConfig) {
resolved = substituteUserConfigVariables(resolved, userConfig)
}
// Finally expand general environment variables
const { expanded, missingVars } = expandEnvVarsInString(resolved)
allMissingVars.push(...missingVars)
return expanded
}
const resolved = { ...config }
// Resolve command path
if (resolved.command) {
resolved.command = resolveValue(resolved.command)
}
// Resolve args
if (resolved.args) {
resolved.args = resolved.args.map(arg => resolveValue(arg))
}
// Resolve environment variables and add CLAUDE_PLUGIN_ROOT / CLAUDE_PLUGIN_DATA
const resolvedEnv: Record<string, string> = {
CLAUDE_PLUGIN_ROOT: plugin.path,
CLAUDE_PLUGIN_DATA: getPluginDataDir(plugin.source),
...(resolved.env || {}),
}
for (const [key, value] of Object.entries(resolvedEnv)) {
if (key !== 'CLAUDE_PLUGIN_ROOT' && key !== 'CLAUDE_PLUGIN_DATA') {
resolvedEnv[key] = resolveValue(value)
}
}
resolved.env = resolvedEnv
// Resolve workspaceFolder if present
if (resolved.workspaceFolder) {
resolved.workspaceFolder = resolveValue(resolved.workspaceFolder)
}
// Log missing variables if any were found
if (allMissingVars.length > 0) {
const uniqueMissingVars = [...new Set(allMissingVars)]
const warnMsg = `Missing environment variables in plugin LSP config: ${uniqueMissingVars.join(', ')}`
logError(new Error(warnMsg))
logForDebugging(warnMsg, { level: 'warn' })
}
return resolved
}
/**
* Add plugin scope to LSP server configs
* This adds a prefix to server names to avoid conflicts between plugins
*/
export function addPluginScopeToLspServers(
servers: Record<string, LspServerConfig>,
pluginName: string,
): Record<string, ScopedLspServerConfig> {
const scopedServers: Record<string, ScopedLspServerConfig> = {}
for (const [name, config] of Object.entries(servers)) {
// Add plugin prefix to server name to avoid conflicts
const scopedName = `plugin:${pluginName}:${name}`
scopedServers[scopedName] = {
...config,
scope: 'dynamic', // Use dynamic scope for plugin servers
source: pluginName,
}
}
return scopedServers
}
/**
* Get LSP servers from a specific plugin with environment variable resolution and scoping
* This function is called when the LSP servers need to be activated and ensures they have
* the proper environment variables and scope applied
*/
export async function getPluginLspServers(
plugin: LoadedPlugin,
errors: PluginError[] = [],
): Promise<Record<string, ScopedLspServerConfig> | undefined> {
if (!plugin.enabled) {
return undefined
}
// Use cached servers if available
const servers =
plugin.lspServers || (await loadPluginLspServers(plugin, errors))
if (!servers) {
return undefined
}
// Resolve environment variables. Top-level manifest.userConfig values
// become available as ${user_config.KEY} in LSP command/args/env.
// Gate on manifest.userConfig — same rationale as buildMcpUserConfig:
// loadPluginOptions always returns {} so without this guard userConfig is
// truthy for every plugin and substituteUserConfigVariables throws on any
// unresolved ${user_config.X}. Also skips unneeded keychain reads.
const userConfig = plugin.manifest.userConfig
? loadPluginOptions(getPluginStorageId(plugin))
: undefined
const resolvedServers: Record<string, LspServerConfig> = {}
for (const [name, config] of Object.entries(servers)) {
resolvedServers[name] = resolvePluginLspEnvironment(
config,
plugin,
userConfig,
errors,
)
}
// Add plugin scope
return addPluginScopeToLspServers(resolvedServers, plugin.name)
}
/**
* Extract all LSP servers from loaded plugins
*/
export async function extractLspServersFromPlugins(
plugins: LoadedPlugin[],
errors: PluginError[] = [],
): Promise<Record<string, ScopedLspServerConfig>> {
const allServers: Record<string, ScopedLspServerConfig> = {}
for (const plugin of plugins) {
if (!plugin.enabled) continue
const servers = await loadPluginLspServers(plugin, errors)
if (servers) {
const scopedServers = addPluginScopeToLspServers(servers, plugin.name)
Object.assign(allServers, scopedServers)
// Store the servers on the plugin for caching
plugin.lspServers = servers
logForDebugging(
`Loaded ${Object.keys(servers).length} LSP servers from plugin ${plugin.name}`,
)
}
}
return allServers
}

View File

@@ -0,0 +1,374 @@
/**
* LSP Plugin Recommendation Utility
*
* Scans installed marketplaces for LSP plugins and recommends plugins
* based on file extensions, but ONLY when the LSP binary is already
* installed on the system.
*
* Limitation: Can only detect LSP plugins that declare their servers
* inline in the marketplace entry. Plugins with separate .lsp.json files
* are not detectable until after installation.
*/
import { extname } from 'path'
import { isBinaryInstalled } from '../binaryCheck.js'
import { getGlobalConfig, saveGlobalConfig } from '../config.js'
import { logForDebugging } from '../debug.js'
import { isPluginInstalled } from './installedPluginsManager.js'
import {
getMarketplace,
loadKnownMarketplacesConfig,
} from './marketplaceManager.js'
import {
ALLOWED_OFFICIAL_MARKETPLACE_NAMES,
type PluginMarketplaceEntry,
} from './schemas.js'
/**
* LSP plugin recommendation returned to the caller
*/
export type LspPluginRecommendation = {
pluginId: string // "plugin-name@marketplace-name"
pluginName: string // Human-readable plugin name
marketplaceName: string // Marketplace name
description?: string // Plugin description
isOfficial: boolean // From official marketplace?
extensions: string[] // File extensions this plugin supports
command: string // LSP server command (e.g., "typescript-language-server")
}
// Maximum number of times user can ignore recommendations before we stop showing
const MAX_IGNORED_COUNT = 5
/**
* Check if a marketplace is official (from Anthropic)
*/
function isOfficialMarketplace(name: string): boolean {
return ALLOWED_OFFICIAL_MARKETPLACE_NAMES.has(name.toLowerCase())
}
/**
* Internal type for LSP info extracted from plugin manifest
*/
type LspInfo = {
extensions: Set<string>
command: string
}
/**
* Extract LSP info (extensions and command) from inline lspServers config.
*
* NOTE: Can only read inline configs, not external .lsp.json files.
* String paths are skipped as they reference files only available after installation.
*
* @param lspServers - The lspServers field from PluginMarketplaceEntry
* @returns LSP info with extensions and command, or null if not extractable
*/
function extractLspInfoFromManifest(
lspServers: PluginMarketplaceEntry['lspServers'],
): LspInfo | null {
if (!lspServers) {
return null
}
// If it's a string path (e.g., "./.lsp.json"), we can't read it from marketplace
if (typeof lspServers === 'string') {
logForDebugging(
'[lspRecommendation] Skipping string path lspServers (not readable from marketplace)',
)
return null
}
// If it's an array, process each element
if (Array.isArray(lspServers)) {
for (const item of lspServers) {
// Skip string paths in arrays
if (typeof item === 'string') {
continue
}
// Try to extract from inline config object
const info = extractFromServerConfigRecord(item)
if (info) {
return info
}
}
return null
}
// It's an inline config object: Record<string, LspServerConfig>
return extractFromServerConfigRecord(lspServers)
}
/**
* Extract LSP info from a server config record (inline object format)
*/
/**
* Type guard to check if a value is a record object
*/
function isRecord(value: unknown): value is Record<string, unknown> {
return typeof value === 'object' && value !== null
}
function extractFromServerConfigRecord(
serverConfigs: Record<string, unknown>,
): LspInfo | null {
const extensions = new Set<string>()
let command: string | null = null
for (const [_serverName, config] of Object.entries(serverConfigs)) {
if (!isRecord(config)) {
continue
}
// Get command from first valid server config
if (!command && typeof config.command === 'string') {
command = config.command
}
// Collect all extensions from extensionToLanguage mapping
const extMapping = config.extensionToLanguage
if (isRecord(extMapping)) {
for (const ext of Object.keys(extMapping)) {
extensions.add(ext.toLowerCase())
}
}
}
if (!command || extensions.size === 0) {
return null
}
return { extensions, command }
}
/**
* Internal type for plugin with LSP info
*/
type LspPluginInfo = {
entry: PluginMarketplaceEntry
marketplaceName: string
extensions: Set<string>
command: string
isOfficial: boolean
}
/**
* Get all LSP plugins from all installed marketplaces
*
* @returns Map of pluginId to plugin info with LSP metadata
*/
async function getLspPluginsFromMarketplaces(): Promise<
Map<string, LspPluginInfo>
> {
const result = new Map<string, LspPluginInfo>()
try {
const config = await loadKnownMarketplacesConfig()
for (const marketplaceName of Object.keys(config)) {
try {
const marketplace = await getMarketplace(marketplaceName)
const isOfficial = isOfficialMarketplace(marketplaceName)
for (const entry of marketplace.plugins) {
// Skip plugins without lspServers
if (!entry.lspServers) {
continue
}
const lspInfo = extractLspInfoFromManifest(entry.lspServers)
if (!lspInfo) {
continue
}
const pluginId = `${entry.name}@${marketplaceName}`
result.set(pluginId, {
entry,
marketplaceName,
extensions: lspInfo.extensions,
command: lspInfo.command,
isOfficial,
})
}
} catch (error) {
logForDebugging(
`[lspRecommendation] Failed to load marketplace ${marketplaceName}: ${error}`,
)
}
}
} catch (error) {
logForDebugging(
`[lspRecommendation] Failed to load marketplaces config: ${error}`,
)
}
return result
}
/**
* Find matching LSP plugins for a file path.
*
* Returns recommendations for plugins that:
* 1. Support the file's extension
* 2. Have their LSP binary installed on the system
* 3. Are not already installed
* 4. Are not in the user's "never suggest" list
*
* Results are sorted with official marketplace plugins first.
*
* @param filePath - Path to the file to find LSP plugins for
* @returns Array of matching plugin recommendations (empty if none or disabled)
*/
export async function getMatchingLspPlugins(
filePath: string,
): Promise<LspPluginRecommendation[]> {
// Check if globally disabled
if (isLspRecommendationsDisabled()) {
logForDebugging('[lspRecommendation] Recommendations are disabled')
return []
}
// Extract file extension
const ext = extname(filePath).toLowerCase()
if (!ext) {
logForDebugging('[lspRecommendation] No file extension found')
return []
}
logForDebugging(`[lspRecommendation] Looking for LSP plugins for ${ext}`)
// Get all LSP plugins from marketplaces
const allLspPlugins = await getLspPluginsFromMarketplaces()
// Get config for filtering
const config = getGlobalConfig()
const neverPlugins = config.lspRecommendationNeverPlugins ?? []
// Filter to matching plugins
const matchingPlugins: Array<{ info: LspPluginInfo; pluginId: string }> = []
for (const [pluginId, info] of allLspPlugins) {
// Check extension match
if (!info.extensions.has(ext)) {
continue
}
// Filter: not in "never" list
if (neverPlugins.includes(pluginId)) {
logForDebugging(
`[lspRecommendation] Skipping ${pluginId} (in never suggest list)`,
)
continue
}
// Filter: not already installed
if (isPluginInstalled(pluginId)) {
logForDebugging(
`[lspRecommendation] Skipping ${pluginId} (already installed)`,
)
continue
}
matchingPlugins.push({ info, pluginId })
}
// Filter: binary must be installed (async check)
const pluginsWithBinary: Array<{ info: LspPluginInfo; pluginId: string }> = []
for (const { info, pluginId } of matchingPlugins) {
const binaryExists = await isBinaryInstalled(info.command)
if (binaryExists) {
pluginsWithBinary.push({ info, pluginId })
logForDebugging(
`[lspRecommendation] Binary '${info.command}' found for ${pluginId}`,
)
} else {
logForDebugging(
`[lspRecommendation] Skipping ${pluginId} (binary '${info.command}' not found)`,
)
}
}
// Sort: official marketplaces first
pluginsWithBinary.sort((a, b) => {
if (a.info.isOfficial && !b.info.isOfficial) return -1
if (!a.info.isOfficial && b.info.isOfficial) return 1
return 0
})
// Convert to recommendations
return pluginsWithBinary.map(({ info, pluginId }) => ({
pluginId,
pluginName: info.entry.name,
marketplaceName: info.marketplaceName,
description: info.entry.description,
isOfficial: info.isOfficial,
extensions: Array.from(info.extensions),
command: info.command,
}))
}
/**
* Add a plugin to the "never suggest" list
*
* @param pluginId - Plugin ID to never suggest again
*/
export function addToNeverSuggest(pluginId: string): void {
saveGlobalConfig(currentConfig => {
const current = currentConfig.lspRecommendationNeverPlugins ?? []
if (current.includes(pluginId)) {
return currentConfig
}
return {
...currentConfig,
lspRecommendationNeverPlugins: [...current, pluginId],
}
})
logForDebugging(`[lspRecommendation] Added ${pluginId} to never suggest`)
}
/**
* Increment the ignored recommendation count.
* After MAX_IGNORED_COUNT ignores, recommendations are disabled.
*/
export function incrementIgnoredCount(): void {
saveGlobalConfig(currentConfig => {
const newCount = (currentConfig.lspRecommendationIgnoredCount ?? 0) + 1
return {
...currentConfig,
lspRecommendationIgnoredCount: newCount,
}
})
logForDebugging('[lspRecommendation] Incremented ignored count')
}
/**
* Check if LSP recommendations are disabled.
* Disabled when:
* - User explicitly disabled via config
* - User has ignored MAX_IGNORED_COUNT recommendations
*/
export function isLspRecommendationsDisabled(): boolean {
const config = getGlobalConfig()
return (
config.lspRecommendationDisabled === true ||
(config.lspRecommendationIgnoredCount ?? 0) >= MAX_IGNORED_COUNT
)
}
/**
* Reset the ignored count (useful if user re-enables recommendations)
*/
export function resetIgnoredCount(): void {
saveGlobalConfig(currentConfig => {
const currentCount = currentConfig.lspRecommendationIgnoredCount ?? 0
if (currentCount === 0) {
return currentConfig
}
return {
...currentConfig,
lspRecommendationIgnoredCount: 0,
}
})
logForDebugging('[lspRecommendation] Reset ignored count')
}

View File

@@ -0,0 +1,27 @@
import { getSettingsForSource } from '../settings/settings.js'
/**
* Plugin names locked by org policy (policySettings.enabledPlugins).
*
* Returns null when managed settings declare no plugin entries (common
* case — no policy in effect).
*/
export function getManagedPluginNames(): Set<string> | null {
const enabledPlugins = getSettingsForSource('policySettings')?.enabledPlugins
if (!enabledPlugins) {
return null
}
const names = new Set<string>()
for (const [pluginId, value] of Object.entries(enabledPlugins)) {
// Only plugin@marketplace boolean entries (true OR false) are
// protected. Legacy owner/repo array form is not.
if (typeof value !== 'boolean' || !pluginId.includes('@')) {
continue
}
const name = pluginId.split('@')[0]
if (name) {
names.add(name)
}
}
return names.size > 0 ? names : null
}

View File

@@ -0,0 +1,592 @@
import isEqual from 'lodash-es/isEqual.js'
import { toError } from '../errors.js'
import { logError } from '../log.js'
import { getSettingsForSource } from '../settings/settings.js'
import { plural } from '../stringUtils.js'
import { checkGitAvailable } from './gitAvailability.js'
import { getMarketplace } from './marketplaceManager.js'
import type { KnownMarketplace, MarketplaceSource } from './schemas.js'
/**
* Format plugin failure details for user display
* @param failures - Array of failures with names and reasons
* @param includeReasons - Whether to include failure reasons (true for full errors, false for summaries)
* @returns Formatted string like "plugin-a (reason); plugin-b (reason)" or "plugin-a, plugin-b"
*/
export function formatFailureDetails(
failures: Array<{ name: string; reason?: string; error?: string }>,
includeReasons: boolean,
): string {
const maxShow = 2
const details = failures
.slice(0, maxShow)
.map(f => {
const reason = f.reason || f.error || 'unknown error'
return includeReasons ? `${f.name} (${reason})` : f.name
})
.join(includeReasons ? '; ' : ', ')
const remaining = failures.length - maxShow
const moreText = remaining > 0 ? ` and ${remaining} more` : ''
return `${details}${moreText}`
}
/**
* Extract source display string from marketplace configuration
*/
export function getMarketplaceSourceDisplay(source: MarketplaceSource): string {
switch (source.source) {
case 'github':
return source.repo
case 'url':
return source.url
case 'git':
return source.url
case 'directory':
return source.path
case 'file':
return source.path
case 'settings':
return `settings:${source.name}`
default:
return 'Unknown source'
}
}
/**
* Create a plugin ID from plugin name and marketplace name
*/
export function createPluginId(
pluginName: string,
marketplaceName: string,
): string {
return `${pluginName}@${marketplaceName}`
}
/**
* Load marketplaces with graceful degradation for individual failures.
* Blocked marketplaces (per enterprise policy) are excluded from the results.
*/
export async function loadMarketplacesWithGracefulDegradation(
config: Record<string, KnownMarketplace>,
): Promise<{
marketplaces: Array<{
name: string
config: KnownMarketplace
data: Awaited<ReturnType<typeof getMarketplace>> | null
}>
failures: Array<{ name: string; error: string }>
}> {
const marketplaces: Array<{
name: string
config: KnownMarketplace
data: Awaited<ReturnType<typeof getMarketplace>> | null
}> = []
const failures: Array<{ name: string; error: string }> = []
for (const [name, marketplaceConfig] of Object.entries(config)) {
// Skip marketplaces blocked by enterprise policy
if (!isSourceAllowedByPolicy(marketplaceConfig.source)) {
continue
}
let data = null
try {
data = await getMarketplace(name)
} catch (err) {
// Track individual marketplace failures but continue loading others
const errorMessage = err instanceof Error ? err.message : String(err)
failures.push({ name, error: errorMessage })
// Log for monitoring
logError(toError(err))
}
marketplaces.push({
name,
config: marketplaceConfig,
data,
})
}
return { marketplaces, failures }
}
/**
* Format marketplace loading failures into appropriate user messages
*/
export function formatMarketplaceLoadingErrors(
failures: Array<{ name: string; error: string }>,
successCount: number,
): { type: 'warning' | 'error'; message: string } | null {
if (failures.length === 0) {
return null
}
// If some marketplaces succeeded, show warning
if (successCount > 0) {
const message =
failures.length === 1
? `Warning: Failed to load marketplace '${failures[0]!.name}': ${failures[0]!.error}`
: `Warning: Failed to load ${failures.length} marketplaces: ${formatFailureNames(failures)}`
return { type: 'warning', message }
}
// All marketplaces failed - this is a critical error
return {
type: 'error',
message: `Failed to load all marketplaces. Errors: ${formatFailureErrors(failures)}`,
}
}
function formatFailureNames(
failures: Array<{ name: string; error: string }>,
): string {
return failures.map(f => f.name).join(', ')
}
function formatFailureErrors(
failures: Array<{ name: string; error: string }>,
): string {
return failures.map(f => `${f.name}: ${f.error}`).join('; ')
}
/**
* Get the strict marketplace source allowlist from policy settings.
* Returns null if no restriction is in place, or an array of allowed sources.
*/
export function getStrictKnownMarketplaces(): MarketplaceSource[] | null {
const policySettings = getSettingsForSource('policySettings')
if (!policySettings?.strictKnownMarketplaces) {
return null // No restrictions
}
return policySettings.strictKnownMarketplaces
}
/**
* Get the marketplace source blocklist from policy settings.
* Returns null if no blocklist is in place, or an array of blocked sources.
*/
export function getBlockedMarketplaces(): MarketplaceSource[] | null {
const policySettings = getSettingsForSource('policySettings')
if (!policySettings?.blockedMarketplaces) {
return null // No blocklist
}
return policySettings.blockedMarketplaces
}
/**
* Get the custom plugin trust message from policy settings.
* Returns undefined if not configured.
*/
export function getPluginTrustMessage(): string | undefined {
return getSettingsForSource('policySettings')?.pluginTrustMessage
}
/**
* Compare two MarketplaceSource objects for equality.
* Sources are equal if they have the same type and all relevant fields match.
*/
function areSourcesEqual(a: MarketplaceSource, b: MarketplaceSource): boolean {
if (a.source !== b.source) return false
switch (a.source) {
case 'url':
return a.url === (b as typeof a).url
case 'github':
return (
a.repo === (b as typeof a).repo &&
(a.ref || undefined) === ((b as typeof a).ref || undefined) &&
(a.path || undefined) === ((b as typeof a).path || undefined)
)
case 'git':
return (
a.url === (b as typeof a).url &&
(a.ref || undefined) === ((b as typeof a).ref || undefined) &&
(a.path || undefined) === ((b as typeof a).path || undefined)
)
case 'npm':
return a.package === (b as typeof a).package
case 'file':
return a.path === (b as typeof a).path
case 'directory':
return a.path === (b as typeof a).path
case 'settings':
return (
a.name === (b as typeof a).name &&
isEqual(a.plugins, (b as typeof a).plugins)
)
default:
return false
}
}
/**
* Extract the host/domain from a marketplace source.
* Used for hostPattern matching in strictKnownMarketplaces.
*
* Currently only supports github, git, and url sources.
* npm, file, and directory sources are not supported for hostPattern matching.
*
* @param source - The marketplace source to extract host from
* @returns The hostname string, or null if extraction fails or source type not supported
*/
export function extractHostFromSource(
source: MarketplaceSource,
): string | null {
switch (source.source) {
case 'github':
// GitHub shorthand always means github.com
return 'github.com'
case 'git': {
// SSH format: user@HOST:path (e.g., git@github.com:owner/repo.git)
const sshMatch = source.url.match(/^[^@]+@([^:]+):/)
if (sshMatch?.[1]) {
return sshMatch[1]
}
// HTTPS format: extract hostname from URL
try {
return new URL(source.url).hostname
} catch {
return null
}
}
case 'url':
try {
return new URL(source.url).hostname
} catch {
return null
}
// npm, file, directory, hostPattern, pathPattern sources are not supported for hostPattern matching
default:
return null
}
}
/**
* Check if a source matches a hostPattern entry.
* Extracts the host from the source and tests it against the regex pattern.
*
* @param source - The marketplace source to check
* @param pattern - The hostPattern entry from strictKnownMarketplaces
* @returns true if the source's host matches the pattern
*/
function doesSourceMatchHostPattern(
source: MarketplaceSource,
pattern: MarketplaceSource & { source: 'hostPattern' },
): boolean {
const host = extractHostFromSource(source)
if (!host) {
return false
}
try {
const regex = new RegExp(pattern.hostPattern)
return regex.test(host)
} catch {
// Invalid regex - log and return false
logError(new Error(`Invalid hostPattern regex: ${pattern.hostPattern}`))
return false
}
}
/**
* Check if a source matches a pathPattern entry.
* Tests the source's .path (file and directory sources only) against the regex pattern.
*
* @param source - The marketplace source to check
* @param pattern - The pathPattern entry from strictKnownMarketplaces
* @returns true if the source's path matches the pattern
*/
function doesSourceMatchPathPattern(
source: MarketplaceSource,
pattern: MarketplaceSource & { source: 'pathPattern' },
): boolean {
// Only file and directory sources have a .path to match against
if (source.source !== 'file' && source.source !== 'directory') {
return false
}
try {
const regex = new RegExp(pattern.pathPattern)
return regex.test(source.path)
} catch {
logError(new Error(`Invalid pathPattern regex: ${pattern.pathPattern}`))
return false
}
}
/**
* Get hosts from hostPattern entries in the allowlist.
* Used to provide helpful error messages.
*/
export function getHostPatternsFromAllowlist(): string[] {
const allowlist = getStrictKnownMarketplaces()
if (!allowlist) return []
return allowlist
.filter(
(entry): entry is MarketplaceSource & { source: 'hostPattern' } =>
entry.source === 'hostPattern',
)
.map(entry => entry.hostPattern)
}
/**
* Extract GitHub owner/repo from a git URL if it's a GitHub URL.
* Returns null if not a GitHub URL.
*
* Handles:
* - git@github.com:owner/repo.git
* - https://github.com/owner/repo.git
* - https://github.com/owner/repo
*/
function extractGitHubRepoFromGitUrl(url: string): string | null {
// SSH format: git@github.com:owner/repo.git
const sshMatch = url.match(/^git@github\.com:([^/]+\/[^/]+?)(?:\.git)?$/)
if (sshMatch && sshMatch[1]) {
return sshMatch[1]
}
// HTTPS format: https://github.com/owner/repo.git or https://github.com/owner/repo
const httpsMatch = url.match(
/^https?:\/\/github\.com\/([^/]+\/[^/]+?)(?:\.git)?$/,
)
if (httpsMatch && httpsMatch[1]) {
return httpsMatch[1]
}
return null
}
/**
* Check if a blocked ref/path constraint matches a source.
* If the blocklist entry has no ref/path, it matches ALL refs/paths (wildcard).
* If the blocklist entry has a specific ref/path, it only matches that exact value.
*/
function blockedConstraintMatches(
blockedValue: string | undefined,
sourceValue: string | undefined,
): boolean {
// If blocklist doesn't specify a constraint, it's a wildcard - matches anything
if (!blockedValue) {
return true
}
// If blocklist specifies a constraint, source must match exactly
return (blockedValue || undefined) === (sourceValue || undefined)
}
/**
* Check if two sources refer to the same GitHub repository, even if using
* different source types (github vs git with GitHub URL).
*
* Blocklist matching is asymmetric:
* - If blocklist entry has no ref/path, it blocks ALL refs/paths (wildcard)
* - If blocklist entry has a specific ref/path, only that exact value is blocked
*/
function areSourcesEquivalentForBlocklist(
source: MarketplaceSource,
blocked: MarketplaceSource,
): boolean {
// Check exact same source type
if (source.source === blocked.source) {
switch (source.source) {
case 'github': {
const b = blocked as typeof source
if (source.repo !== b.repo) return false
return (
blockedConstraintMatches(b.ref, source.ref) &&
blockedConstraintMatches(b.path, source.path)
)
}
case 'git': {
const b = blocked as typeof source
if (source.url !== b.url) return false
return (
blockedConstraintMatches(b.ref, source.ref) &&
blockedConstraintMatches(b.path, source.path)
)
}
case 'url':
return source.url === (blocked as typeof source).url
case 'npm':
return source.package === (blocked as typeof source).package
case 'file':
return source.path === (blocked as typeof source).path
case 'directory':
return source.path === (blocked as typeof source).path
case 'settings':
return source.name === (blocked as typeof source).name
default:
return false
}
}
// Check if a git source matches a github blocklist entry
if (source.source === 'git' && blocked.source === 'github') {
const extractedRepo = extractGitHubRepoFromGitUrl(source.url)
if (extractedRepo === blocked.repo) {
return (
blockedConstraintMatches(blocked.ref, source.ref) &&
blockedConstraintMatches(blocked.path, source.path)
)
}
}
// Check if a github source matches a git blocklist entry (GitHub URL)
if (source.source === 'github' && blocked.source === 'git') {
const extractedRepo = extractGitHubRepoFromGitUrl(blocked.url)
if (extractedRepo === source.repo) {
return (
blockedConstraintMatches(blocked.ref, source.ref) &&
blockedConstraintMatches(blocked.path, source.path)
)
}
}
return false
}
/**
* Check if a marketplace source is explicitly in the blocklist.
* Used for error message differentiation.
*
* This also catches attempts to bypass a github blocklist entry by using
* git URLs (e.g., git@github.com:owner/repo.git or https://github.com/owner/repo.git).
*/
export function isSourceInBlocklist(source: MarketplaceSource): boolean {
const blocklist = getBlockedMarketplaces()
if (blocklist === null) {
return false
}
return blocklist.some(blocked =>
areSourcesEquivalentForBlocklist(source, blocked),
)
}
/**
* Check if a marketplace source is allowed by enterprise policy.
* Returns true if allowed (or no policy), false if blocked.
* This check happens BEFORE downloading, so blocked sources never touch the filesystem.
*
* Policy precedence:
* 1. blockedMarketplaces (blocklist) - if source matches, it's blocked
* 2. strictKnownMarketplaces (allowlist) - if set, source must be in the list
*/
export function isSourceAllowedByPolicy(source: MarketplaceSource): boolean {
// Check blocklist first (takes precedence)
if (isSourceInBlocklist(source)) {
return false
}
// Then check allowlist
const allowlist = getStrictKnownMarketplaces()
if (allowlist === null) {
return true // No restrictions
}
// Check each entry in the allowlist
return allowlist.some(allowed => {
// Handle hostPattern entries - match by extracted host
if (allowed.source === 'hostPattern') {
return doesSourceMatchHostPattern(source, allowed)
}
// Handle pathPattern entries - match file/directory .path by regex
if (allowed.source === 'pathPattern') {
return doesSourceMatchPathPattern(source, allowed)
}
// Handle regular source entries - exact match
return areSourcesEqual(source, allowed)
})
}
/**
* Format a MarketplaceSource for display in error messages
*/
export function formatSourceForDisplay(source: MarketplaceSource): string {
switch (source.source) {
case 'github':
return `github:${source.repo}${source.ref ? `@${source.ref}` : ''}`
case 'url':
return source.url
case 'git':
return `git:${source.url}${source.ref ? `@${source.ref}` : ''}`
case 'npm':
return `npm:${source.package}`
case 'file':
return `file:${source.path}`
case 'directory':
return `dir:${source.path}`
case 'hostPattern':
return `hostPattern:${source.hostPattern}`
case 'pathPattern':
return `pathPattern:${source.pathPattern}`
case 'settings':
return `settings:${source.name} (${source.plugins.length} ${plural(source.plugins.length, 'plugin')})`
default:
return 'unknown source'
}
}
/**
* Reasons why no marketplaces are available in the Discover screen
*/
export type EmptyMarketplaceReason =
| 'git-not-installed'
| 'all-blocked-by-policy'
| 'policy-restricts-sources'
| 'all-marketplaces-failed'
| 'no-marketplaces-configured'
| 'all-plugins-installed'
/**
* Detect why no marketplaces are available.
* Checks in order of priority: git availability → policy restrictions → config state → failures
*/
export async function detectEmptyMarketplaceReason({
configuredMarketplaceCount,
failedMarketplaceCount,
}: {
configuredMarketplaceCount: number
failedMarketplaceCount: number
}): Promise<EmptyMarketplaceReason> {
// Check if git is installed (required for most marketplace sources)
const gitAvailable = await checkGitAvailable()
if (!gitAvailable) {
return 'git-not-installed'
}
// Check policy restrictions
const allowlist = getStrictKnownMarketplaces()
if (allowlist !== null) {
if (allowlist.length === 0) {
// Policy explicitly blocks all marketplaces
return 'all-blocked-by-policy'
}
// Policy restricts which sources can be used
if (configuredMarketplaceCount === 0) {
return 'policy-restricts-sources'
}
}
// Check if any marketplaces are configured
if (configuredMarketplaceCount === 0) {
return 'no-marketplaces-configured'
}
// Check if all configured marketplaces failed to load
if (
failedMarketplaceCount > 0 &&
failedMarketplaceCount === configuredMarketplaceCount
) {
return 'all-marketplaces-failed'
}
// Marketplaces are configured and loaded, but no plugins available
// This typically means all plugins are already installed
return 'all-plugins-installed'
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,634 @@
import { join } from 'path'
import { expandEnvVarsInString } from '../../services/mcp/envExpansion.js'
import {
type McpServerConfig,
McpServerConfigSchema,
type ScopedMcpServerConfig,
} from '../../services/mcp/types.js'
import type { LoadedPlugin, PluginError } from '../../types/plugin.js'
import { logForDebugging } from '../debug.js'
import { errorMessage, isENOENT } from '../errors.js'
import { getFsImplementation } from '../fsOperations.js'
import { jsonParse } from '../slowOperations.js'
import {
isMcpbSource,
loadMcpbFile,
loadMcpServerUserConfig,
type McpbLoadResult,
type UserConfigSchema,
type UserConfigValues,
validateUserConfig,
} from './mcpbHandler.js'
import { getPluginDataDir } from './pluginDirectories.js'
import {
getPluginStorageId,
loadPluginOptions,
substitutePluginVariables,
substituteUserConfigVariables,
} from './pluginOptionsStorage.js'
/**
* Load MCP servers from an MCPB file
* Handles downloading, extracting, and converting DXT manifest to MCP config
*/
async function loadMcpServersFromMcpb(
plugin: LoadedPlugin,
mcpbPath: string,
errors: PluginError[],
): Promise<Record<string, McpServerConfig> | null> {
try {
logForDebugging(`Loading MCP servers from MCPB: ${mcpbPath}`)
// Use plugin.repository directly - it's already in "plugin@marketplace" format
const pluginId = plugin.repository
const result = await loadMcpbFile(
mcpbPath,
plugin.path,
pluginId,
status => {
logForDebugging(`MCPB [${plugin.name}]: ${status}`)
},
)
// Check if MCPB needs user configuration
if ('status' in result && result.status === 'needs-config') {
// User config needed - this is normal for unconfigured plugins
// Don't load the MCP server yet - user can configure via /plugin menu
logForDebugging(
`MCPB ${mcpbPath} requires user configuration. ` +
`User can configure via: /plugin → Manage plugins → ${plugin.name} → Configure`,
)
// Return null to skip this server for now (not an error)
return null
}
// Type guard passed - result is success type
const successResult = result as McpbLoadResult
// Use the DXT manifest name as the server name
const serverName = successResult.manifest.name
// Check for server name conflicts with existing servers
// This will be checked later when merging all servers, but we log here for debugging
logForDebugging(
`Loaded MCP server "${serverName}" from MCPB (extracted to ${successResult.extractedPath})`,
)
return { [serverName]: successResult.mcpConfig }
} catch (error) {
const errorMsg = errorMessage(error)
logForDebugging(`Failed to load MCPB ${mcpbPath}: ${errorMsg}`, {
level: 'error',
})
// Use plugin@repository as source (consistent with other plugin errors)
const source = `${plugin.name}@${plugin.repository}`
// Determine error type based on error message
const isUrl = mcpbPath.startsWith('http')
if (
isUrl &&
(errorMsg.includes('download') || errorMsg.includes('network'))
) {
errors.push({
type: 'mcpb-download-failed',
source,
plugin: plugin.name,
url: mcpbPath,
reason: errorMsg,
})
} else if (
errorMsg.includes('manifest') ||
errorMsg.includes('user configuration')
) {
errors.push({
type: 'mcpb-invalid-manifest',
source,
plugin: plugin.name,
mcpbPath,
validationError: errorMsg,
})
} else {
errors.push({
type: 'mcpb-extract-failed',
source,
plugin: plugin.name,
mcpbPath,
reason: errorMsg,
})
}
return null
}
}
/**
* Load MCP servers from a plugin's manifest
* This function loads MCP server configurations from various sources within the plugin
* including manifest entries, .mcp.json files, and .mcpb files
*/
export async function loadPluginMcpServers(
plugin: LoadedPlugin,
errors: PluginError[] = [],
): Promise<Record<string, McpServerConfig> | undefined> {
let servers: Record<string, McpServerConfig> = {}
// Check for .mcp.json in plugin directory first (lowest priority)
const defaultMcpServers = await loadMcpServersFromFile(
plugin.path,
'.mcp.json',
)
if (defaultMcpServers) {
servers = { ...servers, ...defaultMcpServers }
}
// Handle manifest mcpServers if present (higher priority)
if (plugin.manifest.mcpServers) {
const mcpServersSpec = plugin.manifest.mcpServers
// Handle different mcpServers formats
if (typeof mcpServersSpec === 'string') {
// Check if it's an MCPB file
if (isMcpbSource(mcpServersSpec)) {
const mcpbServers = await loadMcpServersFromMcpb(
plugin,
mcpServersSpec,
errors,
)
if (mcpbServers) {
servers = { ...servers, ...mcpbServers }
}
} else {
// Path to JSON file
const mcpServers = await loadMcpServersFromFile(
plugin.path,
mcpServersSpec,
)
if (mcpServers) {
servers = { ...servers, ...mcpServers }
}
}
} else if (Array.isArray(mcpServersSpec)) {
// Array of paths or inline configs.
// Load all specs in parallel, then merge in original order so
// last-wins collision semantics are preserved.
const results = await Promise.all(
mcpServersSpec.map(async spec => {
try {
if (typeof spec === 'string') {
// Check if it's an MCPB file
if (isMcpbSource(spec)) {
return await loadMcpServersFromMcpb(plugin, spec, errors)
}
// Path to JSON file
return await loadMcpServersFromFile(plugin.path, spec)
}
// Inline MCP server configs (sync)
return spec
} catch (e) {
// Defensive: if one spec throws, don't lose results from the
// others. The previous serial loop implicitly tolerated this.
logForDebugging(
`Failed to load MCP servers from spec for plugin ${plugin.name}: ${e}`,
{ level: 'error' },
)
return null
}
}),
)
for (const result of results) {
if (result) {
servers = { ...servers, ...result }
}
}
} else {
// Direct MCP server configs
servers = { ...servers, ...mcpServersSpec }
}
}
return Object.keys(servers).length > 0 ? servers : undefined
}
/**
* Load MCP servers from a JSON file within a plugin
* This is a simplified version that doesn't expand environment variables
* and is specifically for plugin MCP configs
*/
async function loadMcpServersFromFile(
pluginPath: string,
relativePath: string,
): Promise<Record<string, McpServerConfig> | null> {
const fs = getFsImplementation()
const filePath = join(pluginPath, relativePath)
let content: string
try {
content = await fs.readFile(filePath, { encoding: 'utf-8' })
} catch (e: unknown) {
if (isENOENT(e)) {
return null
}
logForDebugging(`Failed to load MCP servers from ${filePath}: ${e}`, {
level: 'error',
})
return null
}
try {
const parsed = jsonParse(content)
// Check if it's in the .mcp.json format with mcpServers key
const mcpServers = parsed.mcpServers || parsed
// Validate each server config
const validatedServers: Record<string, McpServerConfig> = {}
for (const [name, config] of Object.entries(mcpServers)) {
const result = McpServerConfigSchema().safeParse(config)
if (result.success) {
validatedServers[name] = result.data
} else {
logForDebugging(
`Invalid MCP server config for ${name} in ${filePath}: ${result.error.message}`,
{ level: 'error' },
)
}
}
return validatedServers
} catch (error) {
logForDebugging(`Failed to load MCP servers from ${filePath}: ${error}`, {
level: 'error',
})
return null
}
}
/**
* A channel entry from a plugin's manifest whose userConfig has not yet been
* filled in (required fields are missing from saved settings).
*/
export type UnconfiguredChannel = {
server: string
displayName: string
configSchema: UserConfigSchema
}
/**
* Find channel entries in a plugin's manifest whose required userConfig
* fields are not yet saved. Pure function — no React, no prompting.
* ManagePlugins.tsx calls this after a plugin is enabled to decide whether
* to show the config dialog.
*
* Entries without a `userConfig` schema are skipped (nothing to prompt for).
* Entries whose saved config already satisfies `validateUserConfig` are
* skipped. The `configSchema` in the return value is structurally a
* `UserConfigSchema` because the Zod schema in schemas.ts matches
* `McpbUserConfigurationOption` field-for-field.
*/
export function getUnconfiguredChannels(
plugin: LoadedPlugin,
): UnconfiguredChannel[] {
const channels = plugin.manifest.channels
if (!channels || channels.length === 0) {
return []
}
// plugin.repository is already in "plugin@marketplace" format — same key
// loadMcpServerUserConfig / saveMcpServerUserConfig use.
const pluginId = plugin.repository
const unconfigured: UnconfiguredChannel[] = []
for (const channel of channels) {
if (!channel.userConfig || Object.keys(channel.userConfig).length === 0) {
continue
}
const saved = loadMcpServerUserConfig(pluginId, channel.server) ?? {}
const validation = validateUserConfig(saved, channel.userConfig)
if (!validation.valid) {
unconfigured.push({
server: channel.server,
displayName: channel.displayName ?? channel.server,
configSchema: channel.userConfig,
})
}
}
return unconfigured
}
/**
* Look up saved user config for a server, if this server is declared as a
* channel in the plugin's manifest. Returns undefined for non-channel servers
* or channels without a userConfig schema — resolvePluginMcpEnvironment will
* then skip ${user_config.X} substitution for that server.
*/
function loadChannelUserConfig(
plugin: LoadedPlugin,
serverName: string,
): UserConfigValues | undefined {
const channel = plugin.manifest.channels?.find(c => c.server === serverName)
if (!channel?.userConfig) {
return undefined
}
return loadMcpServerUserConfig(plugin.repository, serverName) ?? undefined
}
/**
* Add plugin scope to MCP server configs
* This adds a prefix to server names to avoid conflicts between plugins
*/
export function addPluginScopeToServers(
servers: Record<string, McpServerConfig>,
pluginName: string,
pluginSource: string,
): Record<string, ScopedMcpServerConfig> {
const scopedServers: Record<string, ScopedMcpServerConfig> = {}
for (const [name, config] of Object.entries(servers)) {
// Add plugin prefix to server name to avoid conflicts
const scopedName = `plugin:${pluginName}:${name}`
const scoped: ScopedMcpServerConfig = {
...config,
scope: 'dynamic', // Use dynamic scope for plugin servers
pluginSource,
}
scopedServers[scopedName] = scoped
}
return scopedServers
}
/**
* Extract all MCP servers from loaded plugins
* NOTE: Resolves environment variables for all servers before returning
*/
export async function extractMcpServersFromPlugins(
plugins: LoadedPlugin[],
errors: PluginError[] = [],
): Promise<Record<string, ScopedMcpServerConfig>> {
const allServers: Record<string, ScopedMcpServerConfig> = {}
const scopedResults = await Promise.all(
plugins.map(async plugin => {
if (!plugin.enabled) return null
const servers = await loadPluginMcpServers(plugin, errors)
if (!servers) return null
// Resolve environment variables before scoping. When a saved channel
// config is missing a key (plugin update added a required field, or a
// hand-edited settings.json), substituteUserConfigVariables throws
// inside resolvePluginMcpEnvironment — catch per-server so one bad
// config doesn't crash the whole plugin load via Promise.all.
const resolvedServers: Record<string, McpServerConfig> = {}
for (const [name, config] of Object.entries(servers)) {
const userConfig = buildMcpUserConfig(plugin, name)
try {
resolvedServers[name] = resolvePluginMcpEnvironment(
config,
plugin,
userConfig,
errors,
plugin.name,
name,
)
} catch (err) {
errors?.push({
type: 'generic-error',
source: name,
plugin: plugin.name,
error: errorMessage(err),
})
}
}
// Store the UNRESOLVED servers on the plugin for caching
// (Environment variables will be resolved fresh each time they're needed)
plugin.mcpServers = servers
logForDebugging(
`Loaded ${Object.keys(servers).length} MCP servers from plugin ${plugin.name}`,
)
return addPluginScopeToServers(
resolvedServers,
plugin.name,
plugin.source,
)
}),
)
for (const scopedServers of scopedResults) {
if (scopedServers) {
Object.assign(allServers, scopedServers)
}
}
return allServers
}
/**
* Build the userConfig map for a single MCP server by merging the plugin's
* top-level manifest.userConfig values with the channel-specific per-server
* config (assistant-mode channels). Channel-specific wins on collision so
* plugins that declare the same key at both levels get the more specific value.
*
* Returns undefined when neither source has anything — resolvePluginMcpEnvironment
* skips substituteUserConfigVariables in that case.
*/
function buildMcpUserConfig(
plugin: LoadedPlugin,
serverName: string,
): UserConfigValues | undefined {
// Gate on manifest.userConfig. loadPluginOptions always returns at least {}
// (it spreads two `?? {}` fallbacks), so without this guard topLevel is never
// undefined — the `!topLevel` check below is dead, we return {} for
// unconfigured plugins, and resolvePluginMcpEnvironment runs
// substituteUserConfigVariables against an empty map → throws on any
// ${user_config.X} ref. The manifest check also skips the unconditional
// keychain read (~50-100ms on macOS) for plugins that don't use options.
const topLevel = plugin.manifest.userConfig
? loadPluginOptions(getPluginStorageId(plugin))
: undefined
const channelSpecific = loadChannelUserConfig(plugin, serverName)
if (!topLevel && !channelSpecific) return undefined
return { ...topLevel, ...channelSpecific }
}
/**
* Resolve environment variables for plugin MCP servers
* Handles ${CLAUDE_PLUGIN_ROOT}, ${user_config.X}, and general ${VAR} substitution
* Tracks missing environment variables for error reporting
*/
export function resolvePluginMcpEnvironment(
config: McpServerConfig,
plugin: { path: string; source: string },
userConfig?: UserConfigValues,
errors?: PluginError[],
pluginName?: string,
serverName?: string,
): McpServerConfig {
const allMissingVars: string[] = []
const resolveValue = (value: string): string => {
// First substitute plugin-specific variables
let resolved = substitutePluginVariables(value, plugin)
// Then substitute user config variables if provided
if (userConfig) {
resolved = substituteUserConfigVariables(resolved, userConfig)
}
// Finally expand general environment variables
// This is done last so plugin-specific and user config vars take precedence
const { expanded, missingVars } = expandEnvVarsInString(resolved)
allMissingVars.push(...missingVars)
return expanded
}
let resolved: McpServerConfig
// Handle different server types
switch (config.type) {
case undefined:
case 'stdio': {
const stdioConfig = { ...config }
// Resolve command path
if (stdioConfig.command) {
stdioConfig.command = resolveValue(stdioConfig.command)
}
// Resolve args
if (stdioConfig.args) {
stdioConfig.args = stdioConfig.args.map(arg => resolveValue(arg))
}
// Resolve environment variables and add CLAUDE_PLUGIN_ROOT / CLAUDE_PLUGIN_DATA
const resolvedEnv: Record<string, string> = {
CLAUDE_PLUGIN_ROOT: plugin.path,
CLAUDE_PLUGIN_DATA: getPluginDataDir(plugin.source),
...(stdioConfig.env || {}),
}
for (const [key, value] of Object.entries(resolvedEnv)) {
if (key !== 'CLAUDE_PLUGIN_ROOT' && key !== 'CLAUDE_PLUGIN_DATA') {
resolvedEnv[key] = resolveValue(value)
}
}
stdioConfig.env = resolvedEnv
resolved = stdioConfig
break
}
case 'sse':
case 'http':
case 'ws': {
const remoteConfig = { ...config }
// Resolve URL
if (remoteConfig.url) {
remoteConfig.url = resolveValue(remoteConfig.url)
}
// Resolve headers
if (remoteConfig.headers) {
const resolvedHeaders: Record<string, string> = {}
for (const [key, value] of Object.entries(remoteConfig.headers)) {
resolvedHeaders[key] = resolveValue(value)
}
remoteConfig.headers = resolvedHeaders
}
resolved = remoteConfig
break
}
// For other types (sse-ide, ws-ide, sdk, claudeai-proxy), pass through unchanged
case 'sse-ide':
case 'ws-ide':
case 'sdk':
case 'claudeai-proxy':
resolved = config
break
}
// Log and track missing variables if any were found and errors array provided
if (errors && allMissingVars.length > 0) {
const uniqueMissingVars = [...new Set(allMissingVars)]
const varList = uniqueMissingVars.join(', ')
logForDebugging(
`Missing environment variables in plugin MCP config: ${varList}`,
{ level: 'warn' },
)
// Add error to the errors array if plugin and server names are provided
if (pluginName && serverName) {
errors.push({
type: 'mcp-config-invalid',
source: `plugin:${pluginName}`,
plugin: pluginName,
serverName,
validationError: `Missing environment variables: ${varList}`,
})
}
}
return resolved
}
/**
* Get MCP servers from a specific plugin with environment variable resolution and scoping
* This function is called when the MCP servers need to be activated and ensures they have
* the proper environment variables and scope applied
*/
export async function getPluginMcpServers(
plugin: LoadedPlugin,
errors: PluginError[] = [],
): Promise<Record<string, ScopedMcpServerConfig> | undefined> {
if (!plugin.enabled) {
return undefined
}
// Use cached servers if available
const servers =
plugin.mcpServers || (await loadPluginMcpServers(plugin, errors))
if (!servers) {
return undefined
}
// Resolve environment variables. Same per-server try/catch as
// extractMcpServersFromPlugins above: a partial saved channel config
// (plugin update added a required field) would make
// substituteUserConfigVariables throw inside resolvePluginMcpEnvironment,
// and this function runs inside Promise.all at config.ts:911 — one
// uncaught throw crashes all plugin MCP loading.
const resolvedServers: Record<string, McpServerConfig> = {}
for (const [name, config] of Object.entries(servers)) {
const userConfig = buildMcpUserConfig(plugin, name)
try {
resolvedServers[name] = resolvePluginMcpEnvironment(
config,
plugin,
userConfig,
errors,
plugin.name,
name,
)
} catch (err) {
errors?.push({
type: 'generic-error',
source: name,
plugin: plugin.name,
error: errorMessage(err),
})
}
}
// Add plugin scope
return addPluginScopeToServers(resolvedServers, plugin.name, plugin.source)
}

View File

@@ -0,0 +1,968 @@
import type {
McpbManifest,
McpbUserConfigurationOption,
} from '@anthropic-ai/mcpb'
import axios from 'axios'
import { createHash } from 'crypto'
import { chmod, writeFile } from 'fs/promises'
import { dirname, join } from 'path'
import type { McpServerConfig } from '../../services/mcp/types.js'
import { logForDebugging } from '../debug.js'
import { parseAndValidateManifestFromBytes } from '../dxt/helpers.js'
import { parseZipModes, unzipFile } from '../dxt/zip.js'
import { errorMessage, getErrnoCode, isENOENT, toError } from '../errors.js'
import { getFsImplementation } from '../fsOperations.js'
import { logError } from '../log.js'
import { getSecureStorage } from '../secureStorage/index.js'
import {
getSettings_DEPRECATED,
updateSettingsForSource,
} from '../settings/settings.js'
import { jsonParse, jsonStringify } from '../slowOperations.js'
import { getSystemDirectories } from '../systemDirectories.js'
import { classifyFetchError, logPluginFetch } from './fetchTelemetry.js'
/**
* User configuration values for MCPB
*/
export type UserConfigValues = Record<
string,
string | number | boolean | string[]
>
/**
* User configuration schema from DXT manifest
*/
export type UserConfigSchema = Record<string, McpbUserConfigurationOption>
/**
* Result of loading an MCPB file (success case)
*/
export type McpbLoadResult = {
manifest: McpbManifest
mcpConfig: McpServerConfig
extractedPath: string
contentHash: string
}
/**
* Result when MCPB needs user configuration
*/
export type McpbNeedsConfigResult = {
status: 'needs-config'
manifest: McpbManifest
extractedPath: string
contentHash: string
configSchema: UserConfigSchema
existingConfig: UserConfigValues
validationErrors: string[]
}
/**
* Metadata stored for each cached MCPB
*/
export type McpbCacheMetadata = {
source: string
contentHash: string
extractedPath: string
cachedAt: string
lastChecked: string
}
/**
* Progress callback for download and extraction operations
*/
export type ProgressCallback = (status: string) => void
/**
* Check if a source string is an MCPB file reference
*/
export function isMcpbSource(source: string): boolean {
return source.endsWith('.mcpb') || source.endsWith('.dxt')
}
/**
* Check if a source is a URL
*/
function isUrl(source: string): boolean {
return source.startsWith('http://') || source.startsWith('https://')
}
/**
* Generate content hash for an MCPB file
*/
function generateContentHash(data: Uint8Array): string {
return createHash('sha256').update(data).digest('hex').substring(0, 16)
}
/**
* Get cache directory for MCPB files
*/
function getMcpbCacheDir(pluginPath: string): string {
return join(pluginPath, '.mcpb-cache')
}
/**
* Get metadata file path for cached MCPB
*/
function getMetadataPath(cacheDir: string, source: string): string {
const sourceHash = createHash('md5')
.update(source)
.digest('hex')
.substring(0, 8)
return join(cacheDir, `${sourceHash}.metadata.json`)
}
/**
* Compose the secureStorage key for a per-server secret bucket.
* `pluginSecrets` is a flat map — per-server secrets share it with top-level
* plugin options (pluginOptionsStorage.ts) using a `${pluginId}/${server}`
* composite key. `/` can't appear in plugin IDs (`name@marketplace`) or
* server names (MCP identifier constraints), so it's unambiguous. Keeps the
* SecureStorageData schema unchanged and the single-keychain-entry size
* budget (~2KB stdin-safe, see INC-3028) shared across all plugin secrets.
*/
function serverSecretsKey(pluginId: string, serverName: string): string {
return `${pluginId}/${serverName}`
}
/**
* Load user configuration for an MCP server, merging non-sensitive values
* (from settings.json) with sensitive values (from secureStorage keychain).
* secureStorage wins on collision — schema determines destination so
* collision shouldn't happen, but if a user hand-edits settings.json we
* trust the more secure source.
*
* Returns null only if NEITHER source has anything — callers skip
* ${user_config.X} substitution in that case.
*
* @param pluginId - Plugin identifier in "plugin@marketplace" format
* @param serverName - MCP server name from DXT manifest
*/
export function loadMcpServerUserConfig(
pluginId: string,
serverName: string,
): UserConfigValues | null {
try {
const settings = getSettings_DEPRECATED()
const nonSensitive =
settings.pluginConfigs?.[pluginId]?.mcpServers?.[serverName]
const sensitive =
getSecureStorage().read()?.pluginSecrets?.[
serverSecretsKey(pluginId, serverName)
]
if (!nonSensitive && !sensitive) {
return null
}
logForDebugging(
`Loaded user config for ${pluginId}/${serverName} (settings + secureStorage)`,
)
return { ...nonSensitive, ...sensitive }
} catch (error) {
const errorObj = toError(error)
logError(errorObj)
logForDebugging(
`Failed to load user config for ${pluginId}/${serverName}: ${error}`,
{ level: 'error' },
)
return null
}
}
/**
* Save user configuration for an MCP server, splitting by `schema[key].sensitive`.
* Mirrors savePluginOptions (pluginOptionsStorage.ts:90) for top-level options:
* - `sensitive: true` → secureStorage (keychain on macOS, .credentials.json 0600 elsewhere)
* - everything else → settings.json pluginConfigs[pluginId].mcpServers[serverName]
*
* Without this split, per-channel `sensitive: true` was a false sense of
* security — the dialog masked the input but the save went to plaintext
* settings.json anyway. H1 #3617646 (Telegram/Discord bot tokens in
* world-readable .env) surfaced this as the gap to close.
*
* Writes are skipped if nothing in that category is present.
*
* @param pluginId - Plugin identifier in "plugin@marketplace" format
* @param serverName - MCP server name from DXT manifest
* @param config - User configuration values
* @param schema - The userConfig schema for this server (manifest.user_config
* or channels[].userConfig) — drives the sensitive/non-sensitive split
*/
export function saveMcpServerUserConfig(
pluginId: string,
serverName: string,
config: UserConfigValues,
schema: UserConfigSchema,
): void {
try {
const nonSensitive: UserConfigValues = {}
const sensitive: Record<string, string> = {}
for (const [key, value] of Object.entries(config)) {
if (schema[key]?.sensitive === true) {
sensitive[key] = String(value)
} else {
nonSensitive[key] = value
}
}
// Scrub ONLY keys we're writing in this call. Covers both directions
// across schema-version flips:
// - sensitive→secureStorage ⇒ remove stale plaintext from settings.json
// - nonSensitive→settings.json ⇒ remove stale entry from secureStorage
// (otherwise loadMcpServerUserConfig's {...nonSensitive, ...sensitive}
// would let the stale secureStorage value win on next read)
// Partial `config` (user only re-enters one field) leaves other fields
// untouched in BOTH stores — defense-in-depth against future callers.
const sensitiveKeysInThisSave = new Set(Object.keys(sensitive))
const nonSensitiveKeysInThisSave = new Set(Object.keys(nonSensitive))
// Sensitive → secureStorage FIRST. If this fails (keychain locked,
// .credentials.json perms), throw before touching settings.json — the
// old plaintext stays as a fallback instead of losing BOTH copies.
//
// Also scrub non-sensitive keys from secureStorage — schema flipped
// sensitive→false and they're being written to settings.json now. Without
// this, loadMcpServerUserConfig's merge would let the stale secureStorage
// value win on next read.
const storage = getSecureStorage()
const k = serverSecretsKey(pluginId, serverName)
const existingInSecureStorage =
storage.read()?.pluginSecrets?.[k] ?? undefined
const secureScrubbed = existingInSecureStorage
? Object.fromEntries(
Object.entries(existingInSecureStorage).filter(
([key]) => !nonSensitiveKeysInThisSave.has(key),
),
)
: undefined
const needSecureScrub =
secureScrubbed &&
existingInSecureStorage &&
Object.keys(secureScrubbed).length !==
Object.keys(existingInSecureStorage).length
if (Object.keys(sensitive).length > 0 || needSecureScrub) {
const existing = storage.read() ?? {}
if (!existing.pluginSecrets) {
existing.pluginSecrets = {}
}
// secureStorage keyvault is a flat object — direct replace, no merge
// semantics to worry about (unlike settings.json's mergeWith).
existing.pluginSecrets[k] = {
...secureScrubbed,
...sensitive,
}
const result = storage.update(existing)
if (!result.success) {
throw new Error(
`Failed to save sensitive config to secure storage for ${k}`,
)
}
if (result.warning) {
logForDebugging(`Server secrets save warning: ${result.warning}`, {
level: 'warn',
})
}
if (needSecureScrub) {
logForDebugging(
`saveMcpServerUserConfig: scrubbed ${
Object.keys(existingInSecureStorage!).length -
Object.keys(secureScrubbed!).length
} stale non-sensitive key(s) from secureStorage for ${k}`,
)
}
}
// Non-sensitive → settings.json. Write whenever there are new non-sensitive
// values OR existing plaintext sensitive values to scrub — so reconfiguring
// a sensitive-only schema still cleans up the old settings.json. Runs
// AFTER the secureStorage write succeeded, so the scrub can't leave you
// with zero copies of the secret.
//
// updateSettingsForSource does mergeWith(diskSettings, ourSettings, ...)
// which PRESERVES destination keys absent from source — so simply omitting
// sensitive keys doesn't scrub them, the disk copy merges back in. Instead:
// set each sensitive key to explicit `undefined` — mergeWith (with the
// customizer at settings.ts:349) treats explicit undefined as a delete.
const settings = getSettings_DEPRECATED()
const existingInSettings =
settings.pluginConfigs?.[pluginId]?.mcpServers?.[serverName] ?? {}
const keysToScrubFromSettings = Object.keys(existingInSettings).filter(k =>
sensitiveKeysInThisSave.has(k),
)
if (
Object.keys(nonSensitive).length > 0 ||
keysToScrubFromSettings.length > 0
) {
if (!settings.pluginConfigs) {
settings.pluginConfigs = {}
}
if (!settings.pluginConfigs[pluginId]) {
settings.pluginConfigs[pluginId] = {}
}
if (!settings.pluginConfigs[pluginId].mcpServers) {
settings.pluginConfigs[pluginId].mcpServers = {}
}
// Build the scrub-via-undefined map. The UserConfigValues type doesn't
// include undefined, but updateSettingsForSource's mergeWith customizer
// needs explicit undefined to delete — cast is deliberate internal
// plumbing (same rationale as deletePluginOptions in
// pluginOptionsStorage.ts:184, see CLAUDE.md's 10% case).
const scrubbed = Object.fromEntries(
keysToScrubFromSettings.map(k => [k, undefined]),
) as Record<string, undefined>
settings.pluginConfigs[pluginId].mcpServers![serverName] = {
...nonSensitive,
...scrubbed,
} as UserConfigValues
const result = updateSettingsForSource('userSettings', settings)
if (result.error) {
throw result.error
}
if (keysToScrubFromSettings.length > 0) {
logForDebugging(
`saveMcpServerUserConfig: scrubbed ${keysToScrubFromSettings.length} plaintext sensitive key(s) from settings.json for ${pluginId}/${serverName}`,
)
}
}
logForDebugging(
`Saved user config for ${pluginId}/${serverName} (${Object.keys(nonSensitive).length} non-sensitive, ${Object.keys(sensitive).length} sensitive)`,
)
} catch (error) {
const errorObj = toError(error)
logError(errorObj)
throw new Error(
`Failed to save user configuration for ${pluginId}/${serverName}: ${errorObj.message}`,
)
}
}
/**
* Validate user configuration values against DXT user_config schema
*/
export function validateUserConfig(
values: UserConfigValues,
schema: UserConfigSchema,
): { valid: boolean; errors: string[] } {
const errors: string[] = []
// Check each field in the schema
for (const [key, fieldSchema] of Object.entries(schema)) {
const value = values[key]
// Check required fields
if (fieldSchema.required && (value === undefined || value === '')) {
errors.push(`${fieldSchema.title || key} is required but not provided`)
continue
}
// Skip validation for optional fields that aren't provided
if (value === undefined || value === '') {
continue
}
// Type validation
if (fieldSchema.type === 'string') {
if (Array.isArray(value)) {
// String arrays are allowed if multiple: true
if (!fieldSchema.multiple) {
errors.push(
`${fieldSchema.title || key} must be a string, not an array`,
)
} else if (!value.every(v => typeof v === 'string')) {
errors.push(`${fieldSchema.title || key} must be an array of strings`)
}
} else if (typeof value !== 'string') {
errors.push(`${fieldSchema.title || key} must be a string`)
}
} else if (fieldSchema.type === 'number' && typeof value !== 'number') {
errors.push(`${fieldSchema.title || key} must be a number`)
} else if (fieldSchema.type === 'boolean' && typeof value !== 'boolean') {
errors.push(`${fieldSchema.title || key} must be a boolean`)
} else if (
(fieldSchema.type === 'file' || fieldSchema.type === 'directory') &&
typeof value !== 'string'
) {
errors.push(`${fieldSchema.title || key} must be a path string`)
}
// Number range validation
if (fieldSchema.type === 'number' && typeof value === 'number') {
if (fieldSchema.min !== undefined && value < fieldSchema.min) {
errors.push(
`${fieldSchema.title || key} must be at least ${fieldSchema.min}`,
)
}
if (fieldSchema.max !== undefined && value > fieldSchema.max) {
errors.push(
`${fieldSchema.title || key} must be at most ${fieldSchema.max}`,
)
}
}
}
return { valid: errors.length === 0, errors }
}
/**
* Generate MCP server configuration from DXT manifest
*/
async function generateMcpConfig(
manifest: McpbManifest,
extractedPath: string,
userConfig: UserConfigValues = {},
): Promise<McpServerConfig> {
// Lazy import: @anthropic-ai/mcpb barrel pulls in zod v3 schemas (~700KB of
// bound closures). See dxt/helpers.ts for details.
const { getMcpConfigForManifest } = await import('@anthropic-ai/mcpb')
const mcpConfig = await getMcpConfigForManifest({
manifest,
extensionPath: extractedPath,
systemDirs: getSystemDirectories(),
userConfig,
pathSeparator: '/',
})
if (!mcpConfig) {
const error = new Error(
`Failed to generate MCP server configuration from manifest "${manifest.name}"`,
)
logError(error)
throw error
}
return mcpConfig as McpServerConfig
}
/**
* Load cache metadata for an MCPB source
*/
async function loadCacheMetadata(
cacheDir: string,
source: string,
): Promise<McpbCacheMetadata | null> {
const fs = getFsImplementation()
const metadataPath = getMetadataPath(cacheDir, source)
try {
const content = await fs.readFile(metadataPath, { encoding: 'utf-8' })
return jsonParse(content) as McpbCacheMetadata
} catch (error) {
const code = getErrnoCode(error)
if (code === 'ENOENT') return null
const errorObj = toError(error)
logError(errorObj)
logForDebugging(`Failed to load MCPB cache metadata: ${error}`, {
level: 'error',
})
return null
}
}
/**
* Save cache metadata for an MCPB source
*/
async function saveCacheMetadata(
cacheDir: string,
source: string,
metadata: McpbCacheMetadata,
): Promise<void> {
const metadataPath = getMetadataPath(cacheDir, source)
await getFsImplementation().mkdir(cacheDir)
await writeFile(metadataPath, jsonStringify(metadata, null, 2), 'utf-8')
}
/**
* Download MCPB file from URL
*/
async function downloadMcpb(
url: string,
destPath: string,
onProgress?: ProgressCallback,
): Promise<Uint8Array> {
logForDebugging(`Downloading MCPB from ${url}`)
if (onProgress) {
onProgress(`Downloading ${url}...`)
}
const started = performance.now()
let fetchTelemetryFired = false
try {
const response = await axios.get(url, {
timeout: 120000, // 2 minute timeout
responseType: 'arraybuffer',
maxRedirects: 5, // Follow redirects (like curl -L)
onDownloadProgress: progressEvent => {
if (progressEvent.total && onProgress) {
const percent = Math.round(
(progressEvent.loaded / progressEvent.total) * 100,
)
onProgress(`Downloading... ${percent}%`)
}
},
})
const data = new Uint8Array(response.data)
// Fire telemetry before writeFile — the event measures the network
// fetch, not disk I/O. A writeFile EACCES would otherwise match
// classifyFetchError's /permission denied/ → misreport as auth.
logPluginFetch('mcpb', url, 'success', performance.now() - started)
fetchTelemetryFired = true
// Save to disk (binary data)
await writeFile(destPath, Buffer.from(data))
logForDebugging(`Downloaded ${data.length} bytes to ${destPath}`)
if (onProgress) {
onProgress('Download complete')
}
return data
} catch (error) {
if (!fetchTelemetryFired) {
logPluginFetch(
'mcpb',
url,
'failure',
performance.now() - started,
classifyFetchError(error),
)
}
const errorMsg = errorMessage(error)
const fullError = new Error(
`Failed to download MCPB file from ${url}: ${errorMsg}`,
)
logError(fullError)
throw fullError
}
}
/**
* Extract MCPB file and write contents to extraction directory.
*
* @param modes - name→mode map from `parseZipModes`. MCPB bundles can ship
* native MCP server binaries, so preserving the exec bit matters here.
*/
async function extractMcpbContents(
unzipped: Record<string, Uint8Array>,
extractPath: string,
modes: Record<string, number>,
onProgress?: ProgressCallback,
): Promise<void> {
if (onProgress) {
onProgress('Extracting files...')
}
// Create extraction directory
await getFsImplementation().mkdir(extractPath)
// Write all files. Filter directory entries from the count so progress
// messages use the same denominator as filesWritten (which skips them).
let filesWritten = 0
const entries = Object.entries(unzipped).filter(([k]) => !k.endsWith('/'))
const totalFiles = entries.length
for (const [filePath, fileData] of entries) {
// Directory entries (common in zip -r, Python zipfile, Java ZipOutputStream)
// are filtered above — writeFile would create `bin/` as an empty regular
// file, then mkdir for `bin/server` would fail with ENOTDIR. The
// mkdir(dirname(fullPath)) below creates parent dirs implicitly.
const fullPath = join(extractPath, filePath)
const dir = dirname(fullPath)
// Ensure directory exists (recursive handles already-existing)
if (dir !== extractPath) {
await getFsImplementation().mkdir(dir)
}
// Determine if text or binary
const isTextFile =
filePath.endsWith('.json') ||
filePath.endsWith('.js') ||
filePath.endsWith('.ts') ||
filePath.endsWith('.txt') ||
filePath.endsWith('.md') ||
filePath.endsWith('.yml') ||
filePath.endsWith('.yaml')
if (isTextFile) {
const content = new TextDecoder().decode(fileData)
await writeFile(fullPath, content, 'utf-8')
} else {
await writeFile(fullPath, Buffer.from(fileData))
}
const mode = modes[filePath]
if (mode && mode & 0o111) {
// Swallow EPERM/ENOTSUP (NFS root_squash, some FUSE mounts) — losing +x
// is the pre-PR behavior and better than aborting mid-extraction.
await chmod(fullPath, mode & 0o777).catch(() => {})
}
filesWritten++
if (onProgress && filesWritten % 10 === 0) {
onProgress(`Extracted ${filesWritten}/${totalFiles} files`)
}
}
logForDebugging(`Extracted ${filesWritten} files to ${extractPath}`)
if (onProgress) {
onProgress(`Extraction complete (${filesWritten} files)`)
}
}
/**
* Check if an MCPB source has changed and needs re-extraction
*/
export async function checkMcpbChanged(
source: string,
pluginPath: string,
): Promise<boolean> {
const fs = getFsImplementation()
const cacheDir = getMcpbCacheDir(pluginPath)
const metadata = await loadCacheMetadata(cacheDir, source)
if (!metadata) {
// No cache metadata, needs loading
return true
}
// Check if extraction directory still exists
try {
await fs.stat(metadata.extractedPath)
} catch (error) {
const code = getErrnoCode(error)
if (code === 'ENOENT') {
logForDebugging(`MCPB extraction path missing: ${metadata.extractedPath}`)
} else {
logForDebugging(
`MCPB extraction path inaccessible: ${metadata.extractedPath}: ${error}`,
{ level: 'error' },
)
}
return true
}
// For local files, check mtime
if (!isUrl(source)) {
const localPath = join(pluginPath, source)
let stats
try {
stats = await fs.stat(localPath)
} catch (error) {
const code = getErrnoCode(error)
if (code === 'ENOENT') {
logForDebugging(`MCPB source file missing: ${localPath}`)
} else {
logForDebugging(
`MCPB source file inaccessible: ${localPath}: ${error}`,
{ level: 'error' },
)
}
return true
}
const cachedTime = new Date(metadata.cachedAt).getTime()
// Floor to match the ms precision of cachedAt (ISO string). Sub-ms
// precision on mtimeMs would make a freshly-cached file appear "newer"
// than its own cache timestamp when both happen in the same millisecond.
const fileTime = Math.floor(stats.mtimeMs)
if (fileTime > cachedTime) {
logForDebugging(
`MCPB file modified: ${new Date(fileTime)} > ${new Date(cachedTime)}`,
)
return true
}
}
// For URLs, we'll re-check on explicit update (handled elsewhere)
return false
}
/**
* Load and extract an MCPB file, with caching and user configuration support
*
* @param source - MCPB file path or URL
* @param pluginPath - Plugin directory path
* @param pluginId - Plugin identifier in "plugin@marketplace" format (for config storage)
* @param onProgress - Progress callback
* @param providedUserConfig - User configuration values (for initial setup or reconfiguration)
* @returns Success with MCP config, or needs-config status with schema
*/
export async function loadMcpbFile(
source: string,
pluginPath: string,
pluginId: string,
onProgress?: ProgressCallback,
providedUserConfig?: UserConfigValues,
forceConfigDialog?: boolean,
): Promise<McpbLoadResult | McpbNeedsConfigResult> {
const fs = getFsImplementation()
const cacheDir = getMcpbCacheDir(pluginPath)
await fs.mkdir(cacheDir)
logForDebugging(`Loading MCPB from source: ${source}`)
// Check cache first
const metadata = await loadCacheMetadata(cacheDir, source)
if (metadata && !(await checkMcpbChanged(source, pluginPath))) {
logForDebugging(
`Using cached MCPB from ${metadata.extractedPath} (hash: ${metadata.contentHash})`,
)
// Load manifest from cache
const manifestPath = join(metadata.extractedPath, 'manifest.json')
let manifestContent: string
try {
manifestContent = await fs.readFile(manifestPath, { encoding: 'utf-8' })
} catch (error) {
if (isENOENT(error)) {
const err = new Error(`Cached manifest not found: ${manifestPath}`)
logError(err)
throw err
}
throw error
}
const manifestData = new TextEncoder().encode(manifestContent)
const manifest = await parseAndValidateManifestFromBytes(manifestData)
// Check for user_config requirement
if (manifest.user_config && Object.keys(manifest.user_config).length > 0) {
// Server name from DXT manifest
const serverName = manifest.name
// Try to load existing config from settings.json or use provided config
const savedConfig = loadMcpServerUserConfig(pluginId, serverName)
const userConfig = providedUserConfig || savedConfig || {}
// Validate we have all required fields
const validation = validateUserConfig(userConfig, manifest.user_config)
// Return needs-config if: forced (reconfiguration) OR validation failed
if (forceConfigDialog || !validation.valid) {
return {
status: 'needs-config',
manifest,
extractedPath: metadata.extractedPath,
contentHash: metadata.contentHash,
configSchema: manifest.user_config,
existingConfig: savedConfig || {},
validationErrors: validation.valid ? [] : validation.errors,
}
}
// Save config if it was provided (first time or reconfiguration)
if (providedUserConfig) {
saveMcpServerUserConfig(
pluginId,
serverName,
providedUserConfig,
manifest.user_config ?? {},
)
}
// Generate MCP config WITH user config
const mcpConfig = await generateMcpConfig(
manifest,
metadata.extractedPath,
userConfig,
)
return {
manifest,
mcpConfig,
extractedPath: metadata.extractedPath,
contentHash: metadata.contentHash,
}
}
// No user_config required - generate config without it
const mcpConfig = await generateMcpConfig(manifest, metadata.extractedPath)
return {
manifest,
mcpConfig,
extractedPath: metadata.extractedPath,
contentHash: metadata.contentHash,
}
}
// Not cached or changed - need to download/load and extract
let mcpbData: Uint8Array
let mcpbFilePath: string
if (isUrl(source)) {
// Download from URL
const sourceHash = createHash('md5')
.update(source)
.digest('hex')
.substring(0, 8)
mcpbFilePath = join(cacheDir, `${sourceHash}.mcpb`)
mcpbData = await downloadMcpb(source, mcpbFilePath, onProgress)
} else {
// Load from local path
const localPath = join(pluginPath, source)
if (onProgress) {
onProgress(`Loading ${source}...`)
}
try {
mcpbData = await fs.readFileBytes(localPath)
mcpbFilePath = localPath
} catch (error) {
if (isENOENT(error)) {
const err = new Error(`MCPB file not found: ${localPath}`)
logError(err)
throw err
}
throw error
}
}
// Generate content hash
const contentHash = generateContentHash(mcpbData)
logForDebugging(`MCPB content hash: ${contentHash}`)
// Extract ZIP
if (onProgress) {
onProgress('Extracting MCPB archive...')
}
const unzipped = await unzipFile(Buffer.from(mcpbData))
// fflate doesn't surface external_attr — parse the central directory so
// native MCP server binaries keep their exec bit after extraction.
const modes = parseZipModes(mcpbData)
// Check for manifest.json
const manifestData = unzipped['manifest.json']
if (!manifestData) {
const error = new Error('No manifest.json found in MCPB file')
logError(error)
throw error
}
// Parse and validate manifest
const manifest = await parseAndValidateManifestFromBytes(manifestData)
logForDebugging(
`MCPB manifest: ${manifest.name} v${manifest.version} by ${manifest.author.name}`,
)
// Check if manifest has server config
if (!manifest.server) {
const error = new Error(
`MCPB manifest for "${manifest.name}" does not define a server configuration`,
)
logError(error)
throw error
}
// Extract to cache directory
const extractPath = join(cacheDir, contentHash)
await extractMcpbContents(unzipped, extractPath, modes, onProgress)
// Check for user_config requirement
if (manifest.user_config && Object.keys(manifest.user_config).length > 0) {
// Server name from DXT manifest
const serverName = manifest.name
// Try to load existing config from settings.json or use provided config
const savedConfig = loadMcpServerUserConfig(pluginId, serverName)
const userConfig = providedUserConfig || savedConfig || {}
// Validate we have all required fields
const validation = validateUserConfig(userConfig, manifest.user_config)
if (!validation.valid) {
// Save cache metadata even though config is incomplete
const newMetadata: McpbCacheMetadata = {
source,
contentHash,
extractedPath: extractPath,
cachedAt: new Date().toISOString(),
lastChecked: new Date().toISOString(),
}
await saveCacheMetadata(cacheDir, source, newMetadata)
// Return "needs configuration" status
return {
status: 'needs-config',
manifest,
extractedPath: extractPath,
contentHash,
configSchema: manifest.user_config,
existingConfig: savedConfig || {},
validationErrors: validation.errors,
}
}
// Save config if it was provided (first time or reconfiguration)
if (providedUserConfig) {
saveMcpServerUserConfig(
pluginId,
serverName,
providedUserConfig,
manifest.user_config ?? {},
)
}
// Generate MCP config WITH user config
if (onProgress) {
onProgress('Generating MCP server configuration...')
}
const mcpConfig = await generateMcpConfig(manifest, extractPath, userConfig)
// Save cache metadata
const newMetadata: McpbCacheMetadata = {
source,
contentHash,
extractedPath: extractPath,
cachedAt: new Date().toISOString(),
lastChecked: new Date().toISOString(),
}
await saveCacheMetadata(cacheDir, source, newMetadata)
return {
manifest,
mcpConfig,
extractedPath: extractPath,
contentHash,
}
}
// No user_config required - generate config without it
if (onProgress) {
onProgress('Generating MCP server configuration...')
}
const mcpConfig = await generateMcpConfig(manifest, extractPath)
// Save cache metadata
const newMetadata: McpbCacheMetadata = {
source,
contentHash,
extractedPath: extractPath,
cachedAt: new Date().toISOString(),
lastChecked: new Date().toISOString(),
}
await saveCacheMetadata(cacheDir, source, newMetadata)
logForDebugging(
`Successfully loaded MCPB: ${manifest.name} (extracted to ${extractPath})`,
)
return {
manifest,
mcpConfig: mcpConfig as McpServerConfig,
extractedPath: extractPath,
contentHash,
}
}

View File

@@ -0,0 +1,25 @@
/**
* Constants for the official Anthropic plugins marketplace.
*
* The official marketplace is hosted on GitHub and provides first-party
* plugins developed by Anthropic. This file defines the constants needed
* to install and identify this marketplace.
*/
import type { MarketplaceSource } from './schemas.js'
/**
* Source configuration for the official Anthropic plugins marketplace.
* Used when auto-installing the marketplace on startup.
*/
export const OFFICIAL_MARKETPLACE_SOURCE = {
source: 'github',
repo: 'anthropics/claude-plugins-official',
} as const satisfies MarketplaceSource
/**
* Display name for the official marketplace.
* This is the name under which the marketplace will be registered
* in the known_marketplaces.json file.
*/
export const OFFICIAL_MARKETPLACE_NAME = 'claude-plugins-official'

View File

@@ -0,0 +1,216 @@
/**
* inc-5046: fetch the official marketplace from a GCS mirror instead of
* git-cloning GitHub on every startup.
*
* Backend (anthropic#317037) publishes a marketplace-only zip alongside the
* titanium squashfs, keyed by base repo SHA. This module fetches the `latest`
* pointer, compares against a local sentinel, and downloads+extracts the zip
* when there's a new SHA. Callers decide fallback behavior on failure.
*/
import axios from 'axios'
import { chmod, mkdir, readFile, rename, rm, writeFile } from 'fs/promises'
import { dirname, join, resolve, sep } from 'path'
import { waitForScrollIdle } from '../../bootstrap/state.js'
import type { AnalyticsMetadata_I_VERIFIED_THIS_IS_NOT_CODE_OR_FILEPATHS } from '../../services/analytics/index.js'
import { logEvent } from '../../services/analytics/index.js'
import { logForDebugging } from '../debug.js'
import { parseZipModes, unzipFile } from '../dxt/zip.js'
import { errorMessage, getErrnoCode } from '../errors.js'
type SafeString = AnalyticsMetadata_I_VERIFIED_THIS_IS_NOT_CODE_OR_FILEPATHS
// CDN-fronted domain for the public GCS bucket (same bucket the native
// binary ships from — nativeInstaller/download.ts:24 uses the raw GCS URL).
// `{sha}.zip` is content-addressed so CDN can cache it indefinitely;
// `latest` has Cache-Control: max-age=300 so CDN staleness is bounded.
// Backend (anthropic#317037) populates this prefix.
const GCS_BASE =
'https://downloads.claude.ai/claude-code-releases/plugins/claude-plugins-official'
// Zip arc paths are seed-dir-relative (marketplaces/claude-plugins-official/…)
// so the titanium seed machinery can use the same zip. Strip this prefix when
// extracting for a laptop install.
const ARC_PREFIX = 'marketplaces/claude-plugins-official/'
/**
* Fetch the official marketplace from GCS and extract to installLocation.
* Idempotent — checks a `.gcs-sha` sentinel before downloading the ~3.5MB zip.
*
* @param installLocation where to extract (must be inside marketplacesCacheDir)
* @param marketplacesCacheDir the plugins marketplace cache root — passed in
* by callers (rather than imported from pluginDirectories) to break a
* circular-dep edge through marketplaceManager
* @returns the fetched SHA on success (including no-op), null on any failure
* (network, 404, zip parse). Caller decides whether to fall through to git.
*/
export async function fetchOfficialMarketplaceFromGcs(
installLocation: string,
marketplacesCacheDir: string,
): Promise<string | null> {
// Defense in depth: this function does `rm(installLocation, {recursive})`
// during the atomic swap. A corrupted known_marketplaces.json (gh-32793 —
// Windows path read on WSL, literal tilde, manual edit) could point at the
// user's project. Refuse any path outside the marketplaces cache dir.
// Same guard as refreshMarketplace() at marketplaceManager.ts:~2392 but
// inside the function so ALL callers are covered.
const cacheDir = resolve(marketplacesCacheDir)
const resolvedLoc = resolve(installLocation)
if (resolvedLoc !== cacheDir && !resolvedLoc.startsWith(cacheDir + sep)) {
logForDebugging(
`fetchOfficialMarketplaceFromGcs: refusing path outside cache dir: ${installLocation}`,
{ level: 'error' },
)
return null
}
// Network + zip extraction competes for the event loop with scroll frames.
// This is a fire-and-forget startup call — delaying by a few hundred ms
// until scroll settles is invisible to the user.
await waitForScrollIdle()
const start = performance.now()
let outcome: 'noop' | 'updated' | 'failed' = 'failed'
let sha: string | undefined
let bytes: number | undefined
let errKind: string | undefined
try {
// 1. Latest pointer — ~40 bytes, backend sets Cache-Control: no-cache,
// max-age=300. Cheap enough to hit every startup.
const latest = await axios.get(`${GCS_BASE}/latest`, {
responseType: 'text',
timeout: 10_000,
})
sha = String(latest.data).trim()
if (!sha) {
// Empty /latest body — backend misconfigured. Bail (null), don't
// lock into a permanently-broken empty-sentinel state.
throw new Error('latest pointer returned empty body')
}
// 2. Sentinel check — `.gcs-sha` at the install root holds the last
// extracted SHA. Matching means we already have this content.
const sentinelPath = join(installLocation, '.gcs-sha')
const currentSha = await readFile(sentinelPath, 'utf8').then(
s => s.trim(),
() => null, // ENOENT — first fetch, proceed to download
)
if (currentSha === sha) {
outcome = 'noop'
return sha
}
// 3. Download zip and extract to a staging dir, then atomic-swap into
// place. Crash mid-extract leaves a .staging dir (next run rm's it)
// rather than a half-written installLocation.
const zipResp = await axios.get(`${GCS_BASE}/${sha}.zip`, {
responseType: 'arraybuffer',
timeout: 60_000,
})
const zipBuf = Buffer.from(zipResp.data)
bytes = zipBuf.length
const files = await unzipFile(zipBuf)
// fflate doesn't surface external_attr, so parse the central directory
// ourselves to recover exec bits. Without this, hooks/scripts extract as
// 0644 and `sh -c "/path/script.sh"` (hooks.ts:~1002) fails with EACCES
// on Unix. Git-clone preserves +x natively; this keeps GCS at parity.
const modes = parseZipModes(zipBuf)
const staging = `${installLocation}.staging`
await rm(staging, { recursive: true, force: true })
await mkdir(staging, { recursive: true })
for (const [arcPath, data] of Object.entries(files)) {
if (!arcPath.startsWith(ARC_PREFIX)) continue
const rel = arcPath.slice(ARC_PREFIX.length)
if (!rel || rel.endsWith('/')) continue // prefix dir entry or subdir entry
const dest = join(staging, rel)
await mkdir(dirname(dest), { recursive: true })
await writeFile(dest, data)
const mode = modes[arcPath]
if (mode && mode & 0o111) {
// Only chmod when an exec bit is set — skip plain files to save syscalls.
// Swallow EPERM/ENOTSUP (NFS root_squash, some FUSE mounts) — losing +x
// is the pre-PR behavior and better than aborting mid-extraction.
await chmod(dest, mode & 0o777).catch(() => {})
}
}
await writeFile(join(staging, '.gcs-sha'), sha)
// Atomic swap: rm old, rename staging. Brief window where installLocation
// doesn't exist — acceptable for a background refresh (caller retries next
// startup if it crashes here).
await rm(installLocation, { recursive: true, force: true })
await rename(staging, installLocation)
outcome = 'updated'
return sha
} catch (e) {
errKind = classifyGcsError(e)
logForDebugging(
`Official marketplace GCS fetch failed: ${errorMessage(e)}`,
{ level: 'warn' },
)
return null
} finally {
// tengu_plugin_remote_fetch schema shared with the telemetry PR
// (.daisy/inc-5046/index.md) — adds source:'marketplace_gcs'. All string
// values below are static enums or a git SHA — not code/filepaths/PII.
logEvent('tengu_plugin_remote_fetch', {
source: 'marketplace_gcs' as SafeString,
host: 'downloads.claude.ai' as SafeString,
is_official: true,
outcome: outcome as SafeString,
duration_ms: Math.round(performance.now() - start),
...(bytes !== undefined && { bytes }),
...(sha && { sha: sha as SafeString }),
...(errKind && { error_kind: errKind as SafeString }),
})
}
}
// Bounded set of errno codes we report by name. Anything else buckets as
// fs_other to keep dashboard cardinality tractable.
const KNOWN_FS_CODES = new Set([
'ENOSPC',
'EACCES',
'EPERM',
'EXDEV',
'EBUSY',
'ENOENT',
'ENOTDIR',
'EROFS',
'EMFILE',
'ENAMETOOLONG',
])
/**
* Classify a GCS fetch error into a stable telemetry bucket.
*
* Telemetry from v2.1.83+ showed 50% of failures landing in 'other' — and
* 99.99% of those had both sha+bytes set, meaning download succeeded but
* extraction/fs failed. This splits that bucket so we can see whether the
* failures are fixable (wrong staging dir, cross-device rename) or inherent
* (disk full, permission denied) before flipping the git-fallback kill switch.
*/
export function classifyGcsError(e: unknown): string {
if (axios.isAxiosError(e)) {
if (e.code === 'ECONNABORTED') return 'timeout'
if (e.response) return `http_${e.response.status}`
return 'network'
}
const code = getErrnoCode(e)
// Node fs errno codes are E<UPPERCASE> (ENOSPC, EACCES). Axios also sets
// .code (ERR_NETWORK, ERR_BAD_OPTION, EPROTO) — don't bucket those as fs.
if (code && /^E[A-Z]+$/.test(code) && !code.startsWith('ERR_')) {
return KNOWN_FS_CODES.has(code) ? `fs_${code}` : 'fs_other'
}
// fflate sets numeric .code (0-14) on inflate/unzip errors — catches
// deflate-level corruption ("unexpected EOF", "invalid block type") that
// the message regex misses.
if (typeof (e as { code?: unknown })?.code === 'number') return 'zip_parse'
const msg = errorMessage(e)
if (/unzip|invalid zip|central directory/i.test(msg)) return 'zip_parse'
if (/empty body/.test(msg)) return 'empty_latest'
return 'other'
}

View File

@@ -0,0 +1,439 @@
/**
* Auto-install logic for the official Anthropic marketplace.
*
* This module handles automatically installing the official marketplace
* on startup for new users, with appropriate checks for:
* - Enterprise policy restrictions
* - Git availability
* - Previous installation attempts
*/
import { join } from 'path'
import { getFeatureValue_CACHED_MAY_BE_STALE } from '../../services/analytics/growthbook.js'
import { logEvent } from '../../services/analytics/index.js'
import { getGlobalConfig, saveGlobalConfig } from '../config.js'
import { logForDebugging } from '../debug.js'
import { isEnvTruthy } from '../envUtils.js'
import { toError } from '../errors.js'
import { logError } from '../log.js'
import { checkGitAvailable, markGitUnavailable } from './gitAvailability.js'
import { isSourceAllowedByPolicy } from './marketplaceHelpers.js'
import {
addMarketplaceSource,
getMarketplacesCacheDir,
loadKnownMarketplacesConfig,
saveKnownMarketplacesConfig,
} from './marketplaceManager.js'
import {
OFFICIAL_MARKETPLACE_NAME,
OFFICIAL_MARKETPLACE_SOURCE,
} from './officialMarketplace.js'
import { fetchOfficialMarketplaceFromGcs } from './officialMarketplaceGcs.js'
/**
* Reason why the official marketplace was not installed
*/
export type OfficialMarketplaceSkipReason =
| 'already_attempted'
| 'already_installed'
| 'policy_blocked'
| 'git_unavailable'
| 'gcs_unavailable'
| 'unknown'
/**
* Check if official marketplace auto-install is disabled via environment variable.
*/
export function isOfficialMarketplaceAutoInstallDisabled(): boolean {
return isEnvTruthy(
process.env.CLAUDE_CODE_DISABLE_OFFICIAL_MARKETPLACE_AUTOINSTALL,
)
}
/**
* Configuration for retry logic
*/
export const RETRY_CONFIG = {
MAX_ATTEMPTS: 10,
INITIAL_DELAY_MS: 60 * 60 * 1000, // 1 hour
BACKOFF_MULTIPLIER: 2,
MAX_DELAY_MS: 7 * 24 * 60 * 60 * 1000, // 1 week
}
/**
* Calculate next retry delay using exponential backoff
*/
function calculateNextRetryDelay(retryCount: number): number {
const delay =
RETRY_CONFIG.INITIAL_DELAY_MS *
Math.pow(RETRY_CONFIG.BACKOFF_MULTIPLIER, retryCount)
return Math.min(delay, RETRY_CONFIG.MAX_DELAY_MS)
}
/**
* Determine if installation should be retried based on failure reason and retry state
*/
function shouldRetryInstallation(
config: ReturnType<typeof getGlobalConfig>,
): boolean {
// If never attempted, should try
if (!config.officialMarketplaceAutoInstallAttempted) {
return true
}
// If already installed successfully, don't retry
if (config.officialMarketplaceAutoInstalled) {
return false
}
const failReason = config.officialMarketplaceAutoInstallFailReason
const retryCount = config.officialMarketplaceAutoInstallRetryCount || 0
const nextRetryTime = config.officialMarketplaceAutoInstallNextRetryTime
const now = Date.now()
// Check if we've exceeded max attempts
if (retryCount >= RETRY_CONFIG.MAX_ATTEMPTS) {
return false
}
// Permanent failures - don't retry
if (failReason === 'policy_blocked') {
return false
}
// Check if enough time has passed for next retry
if (nextRetryTime && now < nextRetryTime) {
return false
}
// Retry for temporary failures (unknown), semi-permanent (git_unavailable),
// and legacy state (undefined failReason from before retry logic existed)
return (
failReason === 'unknown' ||
failReason === 'git_unavailable' ||
failReason === 'gcs_unavailable' ||
failReason === undefined
)
}
/**
* Result of the auto-install check
*/
export type OfficialMarketplaceCheckResult = {
/** Whether the marketplace was successfully installed */
installed: boolean
/** Whether the installation was skipped (and why) */
skipped: boolean
/** Reason for skipping, if applicable */
reason?: OfficialMarketplaceSkipReason
/** Whether saving retry metadata to config failed */
configSaveFailed?: boolean
}
/**
* Check and install the official marketplace on startup.
*
* This function is designed to be called as a fire-and-forget operation
* during startup. It will:
* 1. Check if installation was already attempted
* 2. Check if marketplace is already installed
* 3. Check enterprise policy restrictions
* 4. Check git availability
* 5. Attempt installation
* 6. Record the result in GlobalConfig
*
* @returns Result indicating whether installation succeeded or was skipped
*/
export async function checkAndInstallOfficialMarketplace(): Promise<OfficialMarketplaceCheckResult> {
const config = getGlobalConfig()
// Check if we should retry installation
if (!shouldRetryInstallation(config)) {
const reason: OfficialMarketplaceSkipReason =
config.officialMarketplaceAutoInstallFailReason ?? 'already_attempted'
logForDebugging(`Official marketplace auto-install skipped: ${reason}`)
return {
installed: false,
skipped: true,
reason,
}
}
try {
// Check if auto-install is disabled via env var
if (isOfficialMarketplaceAutoInstallDisabled()) {
logForDebugging(
'Official marketplace auto-install disabled via env var, skipping',
)
saveGlobalConfig(current => ({
...current,
officialMarketplaceAutoInstallAttempted: true,
officialMarketplaceAutoInstalled: false,
officialMarketplaceAutoInstallFailReason: 'policy_blocked',
}))
logEvent('tengu_official_marketplace_auto_install', {
installed: false,
skipped: true,
policy_blocked: true,
})
return { installed: false, skipped: true, reason: 'policy_blocked' }
}
// Check if marketplace is already installed
const knownMarketplaces = await loadKnownMarketplacesConfig()
if (knownMarketplaces[OFFICIAL_MARKETPLACE_NAME]) {
logForDebugging(
`Official marketplace '${OFFICIAL_MARKETPLACE_NAME}' already installed, skipping`,
)
// Mark as attempted so we don't check again
saveGlobalConfig(current => ({
...current,
officialMarketplaceAutoInstallAttempted: true,
officialMarketplaceAutoInstalled: true,
}))
return { installed: false, skipped: true, reason: 'already_installed' }
}
// Check enterprise policy restrictions
if (!isSourceAllowedByPolicy(OFFICIAL_MARKETPLACE_SOURCE)) {
logForDebugging(
'Official marketplace blocked by enterprise policy, skipping',
)
saveGlobalConfig(current => ({
...current,
officialMarketplaceAutoInstallAttempted: true,
officialMarketplaceAutoInstalled: false,
officialMarketplaceAutoInstallFailReason: 'policy_blocked',
}))
logEvent('tengu_official_marketplace_auto_install', {
installed: false,
skipped: true,
policy_blocked: true,
})
return { installed: false, skipped: true, reason: 'policy_blocked' }
}
// inc-5046: try GCS mirror first — doesn't need git, doesn't hit GitHub.
// Backend (anthropic#317037) publishes a marketplace zip to the same
// bucket as the native binary. If GCS succeeds, register the marketplace
// with source:'github' (still true — GCS is a mirror) and skip git
// entirely.
const cacheDir = getMarketplacesCacheDir()
const installLocation = join(cacheDir, OFFICIAL_MARKETPLACE_NAME)
const gcsSha = await fetchOfficialMarketplaceFromGcs(
installLocation,
cacheDir,
)
if (gcsSha !== null) {
const known = await loadKnownMarketplacesConfig()
known[OFFICIAL_MARKETPLACE_NAME] = {
source: OFFICIAL_MARKETPLACE_SOURCE,
installLocation,
lastUpdated: new Date().toISOString(),
}
await saveKnownMarketplacesConfig(known)
saveGlobalConfig(current => ({
...current,
officialMarketplaceAutoInstallAttempted: true,
officialMarketplaceAutoInstalled: true,
officialMarketplaceAutoInstallFailReason: undefined,
officialMarketplaceAutoInstallRetryCount: undefined,
officialMarketplaceAutoInstallLastAttemptTime: undefined,
officialMarketplaceAutoInstallNextRetryTime: undefined,
}))
logEvent('tengu_official_marketplace_auto_install', {
installed: true,
skipped: false,
via_gcs: true,
})
return { installed: true, skipped: false }
}
// GCS failed (404 until backend writes, or network). Fall through to git
// ONLY if the kill-switch allows — same gate as refreshMarketplace().
if (
!getFeatureValue_CACHED_MAY_BE_STALE(
'tengu_plugin_official_mkt_git_fallback',
true,
)
) {
logForDebugging(
'Official marketplace GCS failed; git fallback disabled by flag — skipping install',
)
// Same retry-with-backoff metadata as git_unavailable below — transient
// GCS failures should retry with exponential backoff, not give up.
const retryCount =
(config.officialMarketplaceAutoInstallRetryCount || 0) + 1
const now = Date.now()
const nextRetryTime = now + calculateNextRetryDelay(retryCount)
saveGlobalConfig(current => ({
...current,
officialMarketplaceAutoInstallAttempted: true,
officialMarketplaceAutoInstalled: false,
officialMarketplaceAutoInstallFailReason: 'gcs_unavailable',
officialMarketplaceAutoInstallRetryCount: retryCount,
officialMarketplaceAutoInstallLastAttemptTime: now,
officialMarketplaceAutoInstallNextRetryTime: nextRetryTime,
}))
logEvent('tengu_official_marketplace_auto_install', {
installed: false,
skipped: true,
gcs_unavailable: true,
retry_count: retryCount,
})
return { installed: false, skipped: true, reason: 'gcs_unavailable' }
}
// Check git availability
const gitAvailable = await checkGitAvailable()
if (!gitAvailable) {
logForDebugging(
'Git not available, skipping official marketplace auto-install',
)
const retryCount =
(config.officialMarketplaceAutoInstallRetryCount || 0) + 1
const now = Date.now()
const nextRetryDelay = calculateNextRetryDelay(retryCount)
const nextRetryTime = now + nextRetryDelay
let configSaveFailed = false
try {
saveGlobalConfig(current => ({
...current,
officialMarketplaceAutoInstallAttempted: true,
officialMarketplaceAutoInstalled: false,
officialMarketplaceAutoInstallFailReason: 'git_unavailable',
officialMarketplaceAutoInstallRetryCount: retryCount,
officialMarketplaceAutoInstallLastAttemptTime: now,
officialMarketplaceAutoInstallNextRetryTime: nextRetryTime,
}))
} catch (saveError) {
configSaveFailed = true
// Log the error properly so it gets tracked
const configError = toError(saveError)
logError(configError)
logForDebugging(
`Failed to save marketplace auto-install git_unavailable state: ${saveError}`,
{ level: 'error' },
)
}
logEvent('tengu_official_marketplace_auto_install', {
installed: false,
skipped: true,
git_unavailable: true,
retry_count: retryCount,
})
return {
installed: false,
skipped: true,
reason: 'git_unavailable',
configSaveFailed,
}
}
// Attempt installation
logForDebugging('Attempting to auto-install official marketplace')
await addMarketplaceSource(OFFICIAL_MARKETPLACE_SOURCE)
// Success
logForDebugging('Successfully auto-installed official marketplace')
const previousRetryCount =
config.officialMarketplaceAutoInstallRetryCount || 0
saveGlobalConfig(current => ({
...current,
officialMarketplaceAutoInstallAttempted: true,
officialMarketplaceAutoInstalled: true,
// Clear retry metadata on success
officialMarketplaceAutoInstallFailReason: undefined,
officialMarketplaceAutoInstallRetryCount: undefined,
officialMarketplaceAutoInstallLastAttemptTime: undefined,
officialMarketplaceAutoInstallNextRetryTime: undefined,
}))
logEvent('tengu_official_marketplace_auto_install', {
installed: true,
skipped: false,
retry_count: previousRetryCount,
})
return { installed: true, skipped: false }
} catch (error) {
// Handle installation failure
const errorMessage = error instanceof Error ? error.message : String(error)
// On macOS, /usr/bin/git is an xcrun shim that always exists on PATH, so
// checkGitAvailable() (which only does `which git`) passes even without
// Xcode CLT installed. The shim then fails at clone time with
// "xcrun: error: invalid active developer path (...)". Poison the memoized
// availability check so other git callers in this session skip cleanly,
// then return silently without recording any attempt state — next startup
// tries fresh (no backoff machinery for what is effectively "git absent").
if (errorMessage.includes('xcrun: error:')) {
markGitUnavailable()
logForDebugging(
'Official marketplace auto-install: git is a non-functional macOS xcrun shim, treating as git_unavailable',
)
logEvent('tengu_official_marketplace_auto_install', {
installed: false,
skipped: true,
git_unavailable: true,
macos_xcrun_shim: true,
})
return {
installed: false,
skipped: true,
reason: 'git_unavailable',
}
}
logForDebugging(
`Failed to auto-install official marketplace: ${errorMessage}`,
{ level: 'error' },
)
logError(toError(error))
const retryCount =
(config.officialMarketplaceAutoInstallRetryCount || 0) + 1
const now = Date.now()
const nextRetryDelay = calculateNextRetryDelay(retryCount)
const nextRetryTime = now + nextRetryDelay
let configSaveFailed = false
try {
saveGlobalConfig(current => ({
...current,
officialMarketplaceAutoInstallAttempted: true,
officialMarketplaceAutoInstalled: false,
officialMarketplaceAutoInstallFailReason: 'unknown',
officialMarketplaceAutoInstallRetryCount: retryCount,
officialMarketplaceAutoInstallLastAttemptTime: now,
officialMarketplaceAutoInstallNextRetryTime: nextRetryTime,
}))
} catch (saveError) {
configSaveFailed = true
// Log the error properly so it gets tracked
const configError = toError(saveError)
logError(configError)
logForDebugging(
`Failed to save marketplace auto-install failure state: ${saveError}`,
{ level: 'error' },
)
// Still return the failure result even if config save failed
// This ensures we report the installation failure correctly
}
logEvent('tengu_official_marketplace_auto_install', {
installed: false,
skipped: true,
failed: true,
retry_count: retryCount,
})
return {
installed: false,
skipped: true,
reason: 'unknown',
configSaveFailed,
}
}
}

View File

@@ -0,0 +1,114 @@
/**
* Provides ripgrep glob exclusion patterns for orphaned plugin versions.
*
* When plugin versions are updated, old versions are marked with a
* `.orphaned_at` file but kept on disk for 7 days (since concurrent
* sessions might still reference them). During this window, Grep/Glob
* could return files from orphaned versions, causing Claude to use
* outdated plugin code.
*
* We find `.orphaned_at` markers via a single ripgrep call and generate
* `--glob '!<dir>/**'` patterns for their parent directories. The cache
* is warmed in main.tsx AFTER cleanupOrphanedPluginVersionsInBackground
* settles disk state. Once populated, the exclusion list is frozen for
* the session unless /reload-plugins is called; subsequent disk mutations
* (autoupdate, concurrent sessions) don't affect it.
*/
import { dirname, isAbsolute, join, normalize, relative, sep } from 'path'
import { ripGrep } from '../ripgrep.js'
import { getPluginsDirectory } from './pluginDirectories.js'
// Inlined from cacheUtils.ts to avoid a circular dep through commands.js.
const ORPHANED_AT_FILENAME = '.orphaned_at'
/** Session-scoped cache. Frozen once computed — only cleared by explicit /reload-plugins. */
let cachedExclusions: string[] | null = null
/**
* Get ripgrep glob exclusion patterns for orphaned plugin versions.
*
* @param searchPath - When provided, exclusions are only returned if the
* search overlaps the plugin cache directory (avoids unnecessary --glob
* args for searches outside the cache).
*
* Warmed eagerly in main.tsx after orphan GC; the lazy-compute path here
* is a fallback. Best-effort: returns empty array if anything goes wrong.
*/
export async function getGlobExclusionsForPluginCache(
searchPath?: string,
): Promise<string[]> {
const cachePath = normalize(join(getPluginsDirectory(), 'cache'))
if (searchPath && !pathsOverlap(searchPath, cachePath)) {
return []
}
if (cachedExclusions !== null) {
return cachedExclusions
}
try {
// Find all .orphaned_at files within the plugin cache directory.
// --hidden: marker is a dotfile. --no-ignore: don't let a stray
// .gitignore hide it. --max-depth 4: marker is always at
// cache/<marketplace>/<plugin>/<version>/.orphaned_at — don't recurse
// into plugin contents (node_modules, etc.). Never-aborts signal: no
// caller signal to thread.
const markers = await ripGrep(
[
'--files',
'--hidden',
'--no-ignore',
'--max-depth',
'4',
'--glob',
ORPHANED_AT_FILENAME,
],
cachePath,
new AbortController().signal,
)
cachedExclusions = markers.map(markerPath => {
// ripgrep may return absolute or relative — normalize to relative.
const versionDir = dirname(markerPath)
const rel = isAbsolute(versionDir)
? relative(cachePath, versionDir)
: versionDir
// ripgrep glob patterns always use forward slashes, even on Windows
const posixRelative = rel.replace(/\\/g, '/')
return `!**/${posixRelative}/**`
})
return cachedExclusions
} catch {
// Best-effort — don't break core search tools if ripgrep fails here
cachedExclusions = []
return cachedExclusions
}
}
export function clearPluginCacheExclusions(): void {
cachedExclusions = null
}
/**
* One path is a prefix of the other. Special-cases root (normalize('/') + sep
* = '//'). Case-insensitive on win32 since normalize() doesn't lowercase
* drive letters and CLAUDE_CODE_PLUGIN_CACHE_DIR may disagree with resolved.
*/
function pathsOverlap(a: string, b: string): boolean {
const na = normalizeForCompare(a)
const nb = normalizeForCompare(b)
return (
na === nb ||
na === sep ||
nb === sep ||
na.startsWith(nb + sep) ||
nb.startsWith(na + sep)
)
}
function normalizeForCompare(p: string): string {
const n = normalize(p)
return process.platform === 'win32' ? n.toLowerCase() : n
}

View File

@@ -0,0 +1,162 @@
import { homedir } from 'os'
import { resolve } from 'path'
import { getErrnoCode } from '../errors.js'
import { getFsImplementation } from '../fsOperations.js'
import type { MarketplaceSource } from './schemas.js'
/**
* Parses a marketplace input string and returns the appropriate marketplace source type.
* Handles various input formats:
* - Git SSH URLs (user@host:path or user@host:path.git)
* - Standard: git@github.com:owner/repo.git
* - GitHub Enterprise SSH certificates: org-123456@github.com:owner/repo.git
* - Custom usernames: deploy@gitlab.com:group/project.git
* - Self-hosted: user@192.168.10.123:path/to/repo
* - HTTP/HTTPS URLs
* - GitHub shorthand (owner/repo)
* - Local file paths (.json files)
* - Local directory paths
*
* @param input The marketplace source input string
* @returns MarketplaceSource object, error object, or null if format is unrecognized
*/
export async function parseMarketplaceInput(
input: string,
): Promise<MarketplaceSource | { error: string } | null> {
const trimmed = input.trim()
const fs = getFsImplementation()
// Handle git SSH URLs with any valid username (not just 'git')
// Supports: user@host:path, user@host:path.git, and with #ref suffix
// Username can contain: alphanumeric, dots, underscores, hyphens
const sshMatch = trimmed.match(
/^([a-zA-Z0-9._-]+@[^:]+:.+?(?:\.git)?)(#(.+))?$/,
)
if (sshMatch?.[1]) {
const url = sshMatch[1]
const ref = sshMatch[3]
return ref ? { source: 'git', url, ref } : { source: 'git', url }
}
// Handle URLs
if (trimmed.startsWith('http://') || trimmed.startsWith('https://')) {
// Extract fragment (ref) from URL if present
const fragmentMatch = trimmed.match(/^([^#]+)(#(.+))?$/)
const urlWithoutFragment = fragmentMatch?.[1] || trimmed
const ref = fragmentMatch?.[3]
// When user explicitly provides an HTTPS/HTTP URL that looks like a git
// repo, use the git source type so we clone rather than fetch-as-JSON.
// The .git suffix is a GitHub/GitLab/Bitbucket convention. Azure DevOps
// uses /_git/ in the path with NO suffix (appending .git breaks ADO:
// TF401019 "repo does not exist"). Without this check, an ADO URL falls
// through to source:'url' below, which tries to fetch it as a raw
// marketplace.json — the HTML response parses as "expected object,
// received string". (gh-31256 / CC-299)
if (
urlWithoutFragment.endsWith('.git') ||
urlWithoutFragment.includes('/_git/')
) {
return ref
? { source: 'git', url: urlWithoutFragment, ref }
: { source: 'git', url: urlWithoutFragment }
}
// Parse URL to check hostname
let url: URL
try {
url = new URL(urlWithoutFragment)
} catch (_err) {
// Not a valid URL for parsing, treat as generic URL
// new URL() throws TypeError for invalid URLs
return { source: 'url', url: urlWithoutFragment }
}
if (url.hostname === 'github.com' || url.hostname === 'www.github.com') {
const match = url.pathname.match(/^\/([^/]+\/[^/]+?)(\/|\.git|$)/)
if (match?.[1]) {
// User explicitly provided HTTPS URL - keep it as HTTPS via 'git' type
// Add .git suffix if not present for proper git clone
const gitUrl = urlWithoutFragment.endsWith('.git')
? urlWithoutFragment
: `${urlWithoutFragment}.git`
return ref
? { source: 'git', url: gitUrl, ref }
: { source: 'git', url: gitUrl }
}
}
return { source: 'url', url: urlWithoutFragment }
}
// Handle local paths
// On Windows, also recognize backslash-relative (.\, ..\) and drive letter paths (C:\)
// These are Windows-only because backslashes are valid filename chars on Unix
const isWindows = process.platform === 'win32'
const isWindowsPath =
isWindows &&
(trimmed.startsWith('.\\') ||
trimmed.startsWith('..\\') ||
/^[a-zA-Z]:[/\\]/.test(trimmed))
if (
trimmed.startsWith('./') ||
trimmed.startsWith('../') ||
trimmed.startsWith('/') ||
trimmed.startsWith('~') ||
isWindowsPath
) {
const resolvedPath = resolve(
trimmed.startsWith('~') ? trimmed.replace(/^~/, homedir()) : trimmed,
)
// Stat the path to determine if it's a file or directory. Swallow all stat
// errors (ENOENT, EACCES, EPERM, etc.) and return an error result instead
// of throwing — matches the old existsSync behavior which never threw.
let stats
try {
stats = await fs.stat(resolvedPath)
} catch (e: unknown) {
const code = getErrnoCode(e)
return {
error:
code === 'ENOENT'
? `Path does not exist: ${resolvedPath}`
: `Cannot access path: ${resolvedPath} (${code ?? e})`,
}
}
if (stats.isFile()) {
if (resolvedPath.endsWith('.json')) {
return { source: 'file', path: resolvedPath }
} else {
return {
error: `File path must point to a .json file (marketplace.json), but got: ${resolvedPath}`,
}
}
} else if (stats.isDirectory()) {
return { source: 'directory', path: resolvedPath }
} else {
return {
error: `Path is neither a file nor a directory: ${resolvedPath}`,
}
}
}
// Handle GitHub shorthand (owner/repo, owner/repo#ref, or owner/repo@ref)
// Accept both # and @ as ref separators — the display formatter uses @, so users
// naturally type @ when copying from error messages or managed settings.
if (trimmed.includes('/') && !trimmed.startsWith('@')) {
if (trimmed.includes(':')) {
return null
}
// Extract ref if present (either #ref or @ref)
const fragmentMatch = trimmed.match(/^([^#@]+)(?:[#@](.+))?$/)
const repo = fragmentMatch?.[1] || trimmed
const ref = fragmentMatch?.[2]
// Assume it's a GitHub repo
return ref ? { source: 'github', repo, ref } : { source: 'github', repo }
}
// NPM packages not yet implemented
// Returning null for unrecognized input
return null
}

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,284 @@
/**
* Background plugin autoupdate functionality
*
* At startup, this module:
* 1. First updates marketplaces that have autoUpdate enabled
* 2. Then checks all installed plugins from those marketplaces and updates them
*
* Updates are non-inplace (disk-only), requiring a restart to take effect.
* Official Anthropic marketplaces have autoUpdate enabled by default,
* but users can disable it per-marketplace.
*/
import { updatePluginOp } from '../../services/plugins/pluginOperations.js'
import { shouldSkipPluginAutoupdate } from '../config.js'
import { logForDebugging } from '../debug.js'
import { errorMessage } from '../errors.js'
import { logError } from '../log.js'
import {
getPendingUpdatesDetails,
hasPendingUpdates,
isInstallationRelevantToCurrentProject,
loadInstalledPluginsFromDisk,
} from './installedPluginsManager.js'
import {
getDeclaredMarketplaces,
loadKnownMarketplacesConfig,
refreshMarketplace,
} from './marketplaceManager.js'
import { parsePluginIdentifier } from './pluginIdentifier.js'
import { isMarketplaceAutoUpdate, type PluginScope } from './schemas.js'
/**
* Callback type for notifying when plugins have been updated
*/
export type PluginAutoUpdateCallback = (updatedPlugins: string[]) => void
// Store callback for plugin update notifications
let pluginUpdateCallback: PluginAutoUpdateCallback | null = null
// Store pending updates that occurred before callback was registered
// This handles the race condition where updates complete before REPL mounts
let pendingNotification: string[] | null = null
/**
* Register a callback to be notified when plugins are auto-updated.
* This is used by the REPL to show restart notifications.
*
* If plugins were already updated before the callback was registered,
* the callback will be invoked immediately with the pending updates.
*/
export function onPluginsAutoUpdated(
callback: PluginAutoUpdateCallback,
): () => void {
pluginUpdateCallback = callback
// If there are pending updates that happened before registration, deliver them now
if (pendingNotification !== null && pendingNotification.length > 0) {
callback(pendingNotification)
pendingNotification = null
}
return () => {
pluginUpdateCallback = null
}
}
/**
* Check if pending updates came from autoupdate (for notification purposes).
* Returns the list of plugin names that have pending updates.
*/
export function getAutoUpdatedPluginNames(): string[] {
if (!hasPendingUpdates()) {
return []
}
return getPendingUpdatesDetails().map(
d => parsePluginIdentifier(d.pluginId).name,
)
}
/**
* Get the set of marketplaces that have autoUpdate enabled.
* Returns the marketplace names that should be auto-updated.
*/
async function getAutoUpdateEnabledMarketplaces(): Promise<Set<string>> {
const config = await loadKnownMarketplacesConfig()
const declared = getDeclaredMarketplaces()
const enabled = new Set<string>()
for (const [name, entry] of Object.entries(config)) {
// Settings-declared autoUpdate takes precedence over JSON state
const declaredAutoUpdate = declared[name]?.autoUpdate
const autoUpdate =
declaredAutoUpdate !== undefined
? declaredAutoUpdate
: isMarketplaceAutoUpdate(name, entry)
if (autoUpdate) {
enabled.add(name.toLowerCase())
}
}
return enabled
}
/**
* Update a single plugin's installations.
* Returns the plugin ID if any installation was updated, null otherwise.
*/
async function updatePlugin(
pluginId: string,
installations: Array<{ scope: PluginScope; projectPath?: string }>,
): Promise<string | null> {
let wasUpdated = false
for (const { scope } of installations) {
try {
const result = await updatePluginOp(pluginId, scope)
if (result.success && !result.alreadyUpToDate) {
wasUpdated = true
logForDebugging(
`Plugin autoupdate: updated ${pluginId} from ${result.oldVersion} to ${result.newVersion}`,
)
} else if (!result.alreadyUpToDate) {
logForDebugging(
`Plugin autoupdate: failed to update ${pluginId}: ${result.message}`,
{ level: 'warn' },
)
}
} catch (error) {
logForDebugging(
`Plugin autoupdate: error updating ${pluginId}: ${errorMessage(error)}`,
{ level: 'warn' },
)
}
}
return wasUpdated ? pluginId : null
}
/**
* Update all project-relevant installed plugins from the given marketplaces.
*
* Iterates installed_plugins.json, filters to plugins whose marketplace is in
* the set, further filters each plugin's installations to those relevant to
* the current project (user/managed scope, or project/local scope matching
* cwd — see isInstallationRelevantToCurrentProject), then calls updatePluginOp
* per installation. Already-up-to-date plugins are silently skipped.
*
* Called by:
* - updatePlugins() below — background autoupdate path (autoUpdate-enabled
* marketplaces only; third-party marketplaces default autoUpdate: false)
* - ManageMarketplaces.tsx applyChanges() — user-initiated /plugin marketplace
* update. Before #29512 this path only called refreshMarketplace() (git
* pull on the marketplace clone), so the loader would create the new
* version cache dir but installed_plugins.json stayed on the old version,
* and the orphan GC stamped the NEW dir with .orphaned_at on next startup.
*
* @param marketplaceNames - lowercase marketplace names to update plugins from
* @returns plugin IDs that were actually updated (not already up-to-date)
*/
export async function updatePluginsForMarketplaces(
marketplaceNames: Set<string>,
): Promise<string[]> {
const installedPlugins = loadInstalledPluginsFromDisk()
const pluginIds = Object.keys(installedPlugins.plugins)
if (pluginIds.length === 0) {
return []
}
const results = await Promise.allSettled(
pluginIds.map(async pluginId => {
const { marketplace } = parsePluginIdentifier(pluginId)
if (!marketplace || !marketplaceNames.has(marketplace.toLowerCase())) {
return null
}
const allInstallations = installedPlugins.plugins[pluginId]
if (!allInstallations || allInstallations.length === 0) {
return null
}
const relevantInstallations = allInstallations.filter(
isInstallationRelevantToCurrentProject,
)
if (relevantInstallations.length === 0) {
return null
}
return updatePlugin(pluginId, relevantInstallations)
}),
)
return results
.filter(
(r): r is PromiseFulfilledResult<string> =>
r.status === 'fulfilled' && r.value !== null,
)
.map(r => r.value)
}
/**
* Update plugins from marketplaces that have autoUpdate enabled.
* Returns the list of plugin IDs that were updated.
*/
async function updatePlugins(
autoUpdateEnabledMarketplaces: Set<string>,
): Promise<string[]> {
return updatePluginsForMarketplaces(autoUpdateEnabledMarketplaces)
}
/**
* Auto-update marketplaces and plugins in the background.
*
* This function:
* 1. Checks which marketplaces have autoUpdate enabled
* 2. Refreshes only those marketplaces (git pull/re-download)
* 3. Updates installed plugins from those marketplaces
* 4. If any plugins were updated, notifies via the registered callback
*
* Official Anthropic marketplaces have autoUpdate enabled by default,
* but users can disable it per-marketplace in the UI.
*
* This function runs silently without blocking user interaction.
* Called from main.tsx during startup as a background job.
*/
export function autoUpdateMarketplacesAndPluginsInBackground(): void {
void (async () => {
if (shouldSkipPluginAutoupdate()) {
logForDebugging('Plugin autoupdate: skipped (auto-updater disabled)')
return
}
try {
// Get marketplaces with autoUpdate enabled
const autoUpdateEnabledMarketplaces =
await getAutoUpdateEnabledMarketplaces()
if (autoUpdateEnabledMarketplaces.size === 0) {
return
}
// Refresh only marketplaces with autoUpdate enabled
const refreshResults = await Promise.allSettled(
Array.from(autoUpdateEnabledMarketplaces).map(async name => {
try {
await refreshMarketplace(name, undefined, {
disableCredentialHelper: true,
})
} catch (error) {
logForDebugging(
`Plugin autoupdate: failed to refresh marketplace ${name}: ${errorMessage(error)}`,
{ level: 'warn' },
)
}
}),
)
// Log any refresh failures
const failures = refreshResults.filter(r => r.status === 'rejected')
if (failures.length > 0) {
logForDebugging(
`Plugin autoupdate: ${failures.length} marketplace refresh(es) failed`,
{ level: 'warn' },
)
}
logForDebugging('Plugin autoupdate: checking installed plugins')
const updatedPlugins = await updatePlugins(autoUpdateEnabledMarketplaces)
if (updatedPlugins.length > 0) {
if (pluginUpdateCallback) {
// Callback is already registered, invoke it immediately
pluginUpdateCallback(updatedPlugins)
} else {
// Callback not yet registered (REPL not mounted), store for later delivery
pendingNotification = updatedPlugins
}
}
} catch (error) {
logError(error)
}
})()
}

View File

@@ -0,0 +1,127 @@
/**
* Plugin delisting detection.
*
* Compares installed plugins against marketplace manifests to find plugins
* that have been removed, and auto-uninstalls them.
*
* The security.json fetch was removed (see #25447) — ~29.5M/week GitHub hits
* for UI reason/text only. If re-introduced, serve from downloads.claude.ai.
*/
import { uninstallPluginOp } from '../../services/plugins/pluginOperations.js'
import { logForDebugging } from '../debug.js'
import { errorMessage } from '../errors.js'
import { loadInstalledPluginsV2 } from './installedPluginsManager.js'
import {
getMarketplace,
loadKnownMarketplacesConfigSafe,
} from './marketplaceManager.js'
import {
addFlaggedPlugin,
getFlaggedPlugins,
loadFlaggedPlugins,
} from './pluginFlagging.js'
import type { InstalledPluginsFileV2, PluginMarketplace } from './schemas.js'
/**
* Detect plugins installed from a marketplace that are no longer listed there.
*
* @param installedPlugins All installed plugins
* @param marketplace The marketplace to check against
* @param marketplaceName The marketplace name suffix (e.g. "claude-plugins-official")
* @returns List of delisted plugin IDs in "name@marketplace" format
*/
export function detectDelistedPlugins(
installedPlugins: InstalledPluginsFileV2,
marketplace: PluginMarketplace,
marketplaceName: string,
): string[] {
const marketplacePluginNames = new Set(marketplace.plugins.map(p => p.name))
const suffix = `@${marketplaceName}`
const delisted: string[] = []
for (const pluginId of Object.keys(installedPlugins.plugins)) {
if (!pluginId.endsWith(suffix)) continue
const pluginName = pluginId.slice(0, -suffix.length)
if (!marketplacePluginNames.has(pluginName)) {
delisted.push(pluginId)
}
}
return delisted
}
/**
* Detect delisted plugins across all marketplaces, auto-uninstall them,
* and record them as flagged.
*
* This is the core delisting enforcement logic, shared between interactive
* mode (useManagePlugins) and headless mode (main.tsx print path).
*
* @returns List of newly flagged plugin IDs
*/
export async function detectAndUninstallDelistedPlugins(): Promise<string[]> {
await loadFlaggedPlugins()
const installedPlugins = loadInstalledPluginsV2()
const alreadyFlagged = getFlaggedPlugins()
// Read-only iteration — Safe variant so a corrupted config doesn't throw
// out of this function (it's called in the same try-block as loadAllPlugins
// in useManagePlugins, so a throw here would void loadAllPlugins' resilience).
const knownMarketplaces = await loadKnownMarketplacesConfigSafe()
const newlyFlagged: string[] = []
for (const marketplaceName of Object.keys(knownMarketplaces)) {
try {
const marketplace = await getMarketplace(marketplaceName)
if (!marketplace.forceRemoveDeletedPlugins) continue
const delisted = detectDelistedPlugins(
installedPlugins,
marketplace,
marketplaceName,
)
for (const pluginId of delisted) {
if (pluginId in alreadyFlagged) continue
// Skip managed-only plugins — enterprise admin should handle those
const installations = installedPlugins.plugins[pluginId] ?? []
const hasUserInstall = installations.some(
i =>
i.scope === 'user' || i.scope === 'project' || i.scope === 'local',
)
if (!hasUserInstall) continue
// Auto-uninstall the delisted plugin from all user-controllable scopes
for (const installation of installations) {
const { scope } = installation
if (scope !== 'user' && scope !== 'project' && scope !== 'local') {
continue
}
try {
await uninstallPluginOp(pluginId, scope)
} catch (error) {
logForDebugging(
`Failed to auto-uninstall delisted plugin ${pluginId} from ${scope}: ${errorMessage(error)}`,
{ level: 'error' },
)
}
}
await addFlaggedPlugin(pluginId)
newlyFlagged.push(pluginId)
}
} catch (error) {
// Marketplace may not be available yet — log and continue
logForDebugging(
`Failed to check for delisted plugins in "${marketplaceName}": ${errorMessage(error)}`,
{ level: 'warn' },
)
}
}
return newlyFlagged
}

View File

@@ -0,0 +1,178 @@
/**
* Centralized plugin directory configuration.
*
* This module provides the single source of truth for the plugins directory path.
* It supports switching between 'plugins' and 'cowork_plugins' directories via:
* - CLI flag: --cowork
* - Environment variable: CLAUDE_CODE_USE_COWORK_PLUGINS
*
* The base directory can be overridden via CLAUDE_CODE_PLUGIN_CACHE_DIR.
*/
import { mkdirSync } from 'fs'
import { readdir, rm, stat } from 'fs/promises'
import { delimiter, join } from 'path'
import { getUseCoworkPlugins } from '../../bootstrap/state.js'
import { logForDebugging } from '../debug.js'
import { getClaudeConfigHomeDir, isEnvTruthy } from '../envUtils.js'
import { errorMessage, isFsInaccessible } from '../errors.js'
import { formatFileSize } from '../format.js'
import { expandTilde } from '../permissions/pathValidation.js'
const PLUGINS_DIR = 'plugins'
const COWORK_PLUGINS_DIR = 'cowork_plugins'
/**
* Get the plugins directory name based on current mode.
* Uses session state (from --cowork flag) or env var.
*
* Priority:
* 1. Session state (set by CLI flag --cowork)
* 2. Environment variable CLAUDE_CODE_USE_COWORK_PLUGINS
* 3. Default: 'plugins'
*/
function getPluginsDirectoryName(): string {
// Session state takes precedence (set by CLI flag)
if (getUseCoworkPlugins()) {
return COWORK_PLUGINS_DIR
}
// Fall back to env var
if (isEnvTruthy(process.env.CLAUDE_CODE_USE_COWORK_PLUGINS)) {
return COWORK_PLUGINS_DIR
}
return PLUGINS_DIR
}
/**
* Get the full path to the plugins directory.
*
* Priority:
* 1. CLAUDE_CODE_PLUGIN_CACHE_DIR env var (explicit override)
* 2. Default: ~/.claude/plugins or ~/.claude/cowork_plugins
*/
export function getPluginsDirectory(): string {
// expandTilde: when CLAUDE_CODE_PLUGIN_CACHE_DIR is set via settings.json
// `env` (not shell), ~ is not expanded by the shell. Without this, a value
// like "~/.claude/plugins" becomes a literal `~` directory created in the
// cwd of every project (gh-30794 / CC-212).
const envOverride = process.env.CLAUDE_CODE_PLUGIN_CACHE_DIR
if (envOverride) {
return expandTilde(envOverride)
}
return join(getClaudeConfigHomeDir(), getPluginsDirectoryName())
}
/**
* Get the read-only plugin seed directories, if configured.
*
* Customers can pre-bake a populated plugins directory into their container
* image and point CLAUDE_CODE_PLUGIN_SEED_DIR at it. CC will use it as a
* read-only fallback layer under the primary plugins directory — marketplaces
* and plugin caches found in the seed are used in place without re-cloning.
*
* Multiple seed directories can be layered using the platform path delimiter
* (':' on Unix, ';' on Windows), in PATH-like precedence order — the first
* seed that contains a given marketplace or plugin cache wins.
*
* Seed structure mirrors the primary plugins directory:
* $CLAUDE_CODE_PLUGIN_SEED_DIR/
* known_marketplaces.json
* marketplaces/<name>/...
* cache/<marketplace>/<plugin>/<version>/...
*
* @returns Absolute paths to seed dirs in precedence order (empty if unset)
*/
export function getPluginSeedDirs(): string[] {
// Same tilde-expansion rationale as getPluginsDirectory (gh-30794).
const raw = process.env.CLAUDE_CODE_PLUGIN_SEED_DIR
if (!raw) return []
return raw.split(delimiter).filter(Boolean).map(expandTilde)
}
function sanitizePluginId(pluginId: string): string {
// Same character class as the install-cache sanitizer (pluginLoader.ts)
return pluginId.replace(/[^a-zA-Z0-9\-_]/g, '-')
}
/** Pure path — no mkdir. For display (e.g. uninstall dialog). */
export function pluginDataDirPath(pluginId: string): string {
return join(getPluginsDirectory(), 'data', sanitizePluginId(pluginId))
}
/**
* Persistent per-plugin data directory, exposed to plugins as
* ${CLAUDE_PLUGIN_DATA}. Unlike the version-scoped install cache
* (${CLAUDE_PLUGIN_ROOT}, which is orphaned and GC'd on every update),
* this survives plugin updates — only removed on last-scope uninstall.
*
* Creates the directory on call (mkdir). The *lazy* behavior is at the
* substitutePluginVariables call site — the DATA pattern uses function-form
* .replace() so this isn't invoked unless ${CLAUDE_PLUGIN_DATA} is present
* (ROOT also uses function-form, but for $-pattern safety, not laziness).
* Env-var export sites (MCP/LSP server env, hook env) call this eagerly
* since subprocesses may expect the dir to exist before writing to it.
*
* Sync because it's called from substitutePluginVariables (sync, inside
* String.replace) — making this async would cascade through 6 call sites
* and their sync iteration loops. One mkdir in plugin-load path is cheap.
*/
export function getPluginDataDir(pluginId: string): string {
const dir = pluginDataDirPath(pluginId)
mkdirSync(dir, { recursive: true })
return dir
}
/**
* Size of the data dir for the uninstall confirmation prompt. Returns null
* when the dir is absent or empty so callers can skip the prompt entirely.
* Recursive walk — not hot-path (only on uninstall).
*/
export async function getPluginDataDirSize(
pluginId: string,
): Promise<{ bytes: number; human: string } | null> {
const dir = pluginDataDirPath(pluginId)
let bytes = 0
const walk = async (p: string) => {
for (const entry of await readdir(p, { withFileTypes: true })) {
const full = join(p, entry.name)
if (entry.isDirectory()) {
await walk(full)
} else {
// Per-entry catch: a broken symlink makes stat() throw ENOENT.
// Without this, one broken link bubbles to the outer catch →
// returns null → dialog skipped → data silently deleted.
try {
bytes += (await stat(full)).size
} catch {
// Broken symlink / raced delete — skip this entry, keep walking
}
}
}
}
try {
await walk(dir)
} catch (e) {
if (isFsInaccessible(e)) return null
throw e
}
if (bytes === 0) return null
return { bytes, human: formatFileSize(bytes) }
}
/**
* Best-effort cleanup on last-scope uninstall. Failure is logged but does
* not throw — the uninstall itself already succeeded; we don't want a
* cleanup side-effect surfacing as "uninstall failed". Same rationale as
* deletePluginOptions (pluginOptionsStorage.ts).
*/
export async function deletePluginDataDir(pluginId: string): Promise<void> {
const dir = pluginDataDirPath(pluginId)
try {
await rm(dir, { recursive: true, force: true })
} catch (e) {
logForDebugging(
`Failed to delete plugin data dir ${dir}: ${errorMessage(e)}`,
{ level: 'warn' },
)
}
}

View File

@@ -0,0 +1,208 @@
/**
* Flagged plugin tracking utilities
*
* Tracks plugins that were auto-removed because they were delisted from
* their marketplace. Data is stored in ~/.claude/plugins/flagged-plugins.json.
* Flagged plugins appear in a "Flagged" section in /plugins until the user
* dismisses them.
*
* Uses a module-level cache so that getFlaggedPlugins() can be called
* synchronously during React render. The cache is populated on the first
* async call (loadFlaggedPlugins or addFlaggedPlugin) and kept in sync
* with writes.
*/
import { randomBytes } from 'crypto'
import { readFile, rename, unlink, writeFile } from 'fs/promises'
import { join } from 'path'
import { logForDebugging } from '../debug.js'
import { getFsImplementation } from '../fsOperations.js'
import { logError } from '../log.js'
import { jsonParse, jsonStringify } from '../slowOperations.js'
import { getPluginsDirectory } from './pluginDirectories.js'
const FLAGGED_PLUGINS_FILENAME = 'flagged-plugins.json'
export type FlaggedPlugin = {
flaggedAt: string
seenAt?: string
}
const SEEN_EXPIRY_MS = 48 * 60 * 60 * 1000 // 48 hours
// Module-level cache — populated by loadFlaggedPlugins(), updated by writes.
let cache: Record<string, FlaggedPlugin> | null = null
function getFlaggedPluginsPath(): string {
return join(getPluginsDirectory(), FLAGGED_PLUGINS_FILENAME)
}
function parsePluginsData(content: string): Record<string, FlaggedPlugin> {
const parsed = jsonParse(content) as unknown
if (
typeof parsed !== 'object' ||
parsed === null ||
!('plugins' in parsed) ||
typeof (parsed as { plugins: unknown }).plugins !== 'object' ||
(parsed as { plugins: unknown }).plugins === null
) {
return {}
}
const plugins = (parsed as { plugins: Record<string, unknown> }).plugins
const result: Record<string, FlaggedPlugin> = {}
for (const [id, entry] of Object.entries(plugins)) {
if (
entry &&
typeof entry === 'object' &&
'flaggedAt' in entry &&
typeof (entry as { flaggedAt: unknown }).flaggedAt === 'string'
) {
const parsed: FlaggedPlugin = {
flaggedAt: (entry as { flaggedAt: string }).flaggedAt,
}
if (
'seenAt' in entry &&
typeof (entry as { seenAt: unknown }).seenAt === 'string'
) {
parsed.seenAt = (entry as { seenAt: string }).seenAt
}
result[id] = parsed
}
}
return result
}
async function readFromDisk(): Promise<Record<string, FlaggedPlugin>> {
try {
const content = await readFile(getFlaggedPluginsPath(), {
encoding: 'utf-8',
})
return parsePluginsData(content)
} catch {
return {}
}
}
async function writeToDisk(
plugins: Record<string, FlaggedPlugin>,
): Promise<void> {
const filePath = getFlaggedPluginsPath()
const tempPath = `${filePath}.${randomBytes(8).toString('hex')}.tmp`
try {
await getFsImplementation().mkdir(getPluginsDirectory())
const content = jsonStringify({ plugins }, null, 2)
await writeFile(tempPath, content, {
encoding: 'utf-8',
mode: 0o600,
})
await rename(tempPath, filePath)
cache = plugins
} catch (error) {
logError(error)
try {
await unlink(tempPath)
} catch {
// Ignore cleanup errors
}
}
}
/**
* Load flagged plugins from disk into the module cache.
* Must be called (and awaited) before getFlaggedPlugins() returns
* meaningful data. Called by useManagePlugins during plugin refresh.
*/
export async function loadFlaggedPlugins(): Promise<void> {
const all = await readFromDisk()
const now = Date.now()
let changed = false
for (const [id, entry] of Object.entries(all)) {
if (
entry.seenAt &&
now - new Date(entry.seenAt).getTime() >= SEEN_EXPIRY_MS
) {
delete all[id]
changed = true
}
}
cache = all
if (changed) {
await writeToDisk(all)
}
}
/**
* Get all flagged plugins from the in-memory cache.
* Returns an empty object if loadFlaggedPlugins() has not been called yet.
*/
export function getFlaggedPlugins(): Record<string, FlaggedPlugin> {
return cache ?? {}
}
/**
* Add a plugin to the flagged list.
*
* @param pluginId "name@marketplace" format
*/
export async function addFlaggedPlugin(pluginId: string): Promise<void> {
if (cache === null) {
cache = await readFromDisk()
}
const updated = {
...cache,
[pluginId]: {
flaggedAt: new Date().toISOString(),
},
}
await writeToDisk(updated)
logForDebugging(`Flagged plugin: ${pluginId}`)
}
/**
* Mark flagged plugins as seen. Called when the Installed view renders
* flagged plugins. Sets seenAt on entries that don't already have it.
* After 48 hours from seenAt, entries are auto-cleared on next load.
*/
export async function markFlaggedPluginsSeen(
pluginIds: string[],
): Promise<void> {
if (cache === null) {
cache = await readFromDisk()
}
const now = new Date().toISOString()
let changed = false
const updated = { ...cache }
for (const id of pluginIds) {
const entry = updated[id]
if (entry && !entry.seenAt) {
updated[id] = { ...entry, seenAt: now }
changed = true
}
}
if (changed) {
await writeToDisk(updated)
}
}
/**
* Remove a plugin from the flagged list. Called when the user dismisses
* a flagged plugin notification in /plugins.
*/
export async function removeFlaggedPlugin(pluginId: string): Promise<void> {
if (cache === null) {
cache = await readFromDisk()
}
if (!(pluginId in cache)) return
const { [pluginId]: _, ...rest } = cache
cache = rest
await writeToDisk(rest)
}

View File

@@ -0,0 +1,123 @@
import type {
EditableSettingSource,
SettingSource,
} from '../settings/constants.js'
import {
ALLOWED_OFFICIAL_MARKETPLACE_NAMES,
type PluginScope,
} from './schemas.js'
/**
* Extended scope type that includes 'flag' for session-only plugins.
* 'flag' scope is NOT persisted to installed_plugins.json.
*/
export type ExtendedPluginScope = PluginScope | 'flag'
/**
* Scopes that are persisted to installed_plugins.json.
* Excludes 'flag' which is session-only.
*/
export type PersistablePluginScope = Exclude<ExtendedPluginScope, 'flag'>
/**
* Map from SettingSource to plugin scope.
* Note: flagSettings maps to 'flag' which is session-only and not persisted.
*/
export const SETTING_SOURCE_TO_SCOPE = {
policySettings: 'managed',
userSettings: 'user',
projectSettings: 'project',
localSettings: 'local',
flagSettings: 'flag',
} as const satisfies Record<SettingSource, ExtendedPluginScope>
/**
* Parsed plugin identifier with name and optional marketplace
*/
export type ParsedPluginIdentifier = {
name: string
marketplace?: string
}
/**
* Parse a plugin identifier string into name and marketplace components
* @param plugin The plugin identifier (name or name@marketplace)
* @returns Parsed plugin name and optional marketplace
*
* Note: Only the first '@' is used as separator. If the input contains multiple '@' symbols
* (e.g., "plugin@market@place"), everything after the second '@' is ignored.
* This is intentional as marketplace names should not contain '@'.
*/
export function parsePluginIdentifier(plugin: string): ParsedPluginIdentifier {
if (plugin.includes('@')) {
const parts = plugin.split('@')
return { name: parts[0] || '', marketplace: parts[1] }
}
return { name: plugin }
}
/**
* Build a plugin ID from name and marketplace
* @param name The plugin name
* @param marketplace Optional marketplace name
* @returns Plugin ID in format "name" or "name@marketplace"
*/
export function buildPluginId(name: string, marketplace?: string): string {
return marketplace ? `${name}@${marketplace}` : name
}
/**
* Check if a marketplace name is an official (Anthropic-controlled) marketplace.
* Used for telemetry redaction — official plugin identifiers are safe to log to
* general-access additional_metadata; third-party identifiers go only to the
* PII-tagged _PROTO_* BQ columns.
*/
export function isOfficialMarketplaceName(
marketplace: string | undefined,
): boolean {
return (
marketplace !== undefined &&
ALLOWED_OFFICIAL_MARKETPLACE_NAMES.has(marketplace.toLowerCase())
)
}
/**
* Map from installable plugin scope to editable setting source.
* This is the inverse of SETTING_SOURCE_TO_SCOPE for editable scopes only.
* Note: 'managed' scope cannot be installed to, so it's not included here.
*/
const SCOPE_TO_EDITABLE_SOURCE: Record<
Exclude<PluginScope, 'managed'>,
EditableSettingSource
> = {
user: 'userSettings',
project: 'projectSettings',
local: 'localSettings',
}
/**
* Convert a plugin scope to its corresponding editable setting source
* @param scope The plugin installation scope
* @returns The corresponding setting source for reading/writing settings
* @throws Error if scope is 'managed' (cannot install plugins to managed scope)
*/
export function scopeToSettingSource(
scope: PluginScope,
): EditableSettingSource {
if (scope === 'managed') {
throw new Error('Cannot install plugins to managed scope')
}
return SCOPE_TO_EDITABLE_SOURCE[scope]
}
/**
* Convert an editable setting source to its corresponding plugin scope.
* Derived from SETTING_SOURCE_TO_SCOPE to maintain a single source of truth.
* @param source The setting source
* @returns The corresponding plugin scope
*/
export function settingSourceToScope(
source: EditableSettingSource,
): Exclude<PluginScope, 'managed'> {
return SETTING_SOURCE_TO_SCOPE[source] as Exclude<PluginScope, 'managed'>
}

View File

@@ -0,0 +1,595 @@
/**
* Shared helper functions for plugin installation
*
* This module contains common utilities used across the plugin installation
* system to reduce code duplication and improve maintainability.
*/
import { randomBytes } from 'crypto'
import { rename, rm } from 'fs/promises'
import { dirname, join, resolve, sep } from 'path'
import {
type AnalyticsMetadata_I_VERIFIED_THIS_IS_NOT_CODE_OR_FILEPATHS,
type AnalyticsMetadata_I_VERIFIED_THIS_IS_PII_TAGGED,
logEvent,
} from '../../services/analytics/index.js'
import { getCwd } from '../cwd.js'
import { toError } from '../errors.js'
import { getFsImplementation } from '../fsOperations.js'
import { logError } from '../log.js'
import {
getSettingsForSource,
updateSettingsForSource,
} from '../settings/settings.js'
import { buildPluginTelemetryFields } from '../telemetry/pluginTelemetry.js'
import { clearAllCaches } from './cacheUtils.js'
import {
formatDependencyCountSuffix,
getEnabledPluginIdsForScope,
type ResolutionResult,
resolveDependencyClosure,
} from './dependencyResolver.js'
import {
addInstalledPlugin,
getGitCommitSha,
} from './installedPluginsManager.js'
import { getManagedPluginNames } from './managedPlugins.js'
import { getMarketplaceCacheOnly, getPluginById } from './marketplaceManager.js'
import {
isOfficialMarketplaceName,
parsePluginIdentifier,
scopeToSettingSource,
} from './pluginIdentifier.js'
import {
cachePlugin,
getVersionedCachePath,
getVersionedZipCachePath,
} from './pluginLoader.js'
import { isPluginBlockedByPolicy } from './pluginPolicy.js'
import { calculatePluginVersion } from './pluginVersioning.js'
import {
isLocalPluginSource,
type PluginMarketplaceEntry,
type PluginScope,
type PluginSource,
} from './schemas.js'
import {
convertDirectoryToZipInPlace,
isPluginZipCacheEnabled,
} from './zipCache.js'
/**
* Plugin installation metadata for installed_plugins.json
*/
export type PluginInstallationInfo = {
pluginId: string
installPath: string
version?: string
}
/**
* Get current ISO timestamp
*/
export function getCurrentTimestamp(): string {
return new Date().toISOString()
}
/**
* Validate that a resolved path stays within a base directory.
* Prevents path traversal attacks where malicious paths like './../../../etc/passwd'
* could escape the expected directory.
*
* @param basePath - The base directory that the resolved path must stay within
* @param relativePath - The relative path to validate
* @returns The validated absolute path
* @throws Error if the path would escape the base directory
*/
export function validatePathWithinBase(
basePath: string,
relativePath: string,
): string {
const resolvedPath = resolve(basePath, relativePath)
const normalizedBase = resolve(basePath) + sep
// Check if the resolved path starts with the base path
// Adding sep ensures we don't match partial directory names
// e.g., /foo/bar should not match /foo/barbaz
if (
!resolvedPath.startsWith(normalizedBase) &&
resolvedPath !== resolve(basePath)
) {
throw new Error(
`Path traversal detected: "${relativePath}" would escape the base directory`,
)
}
return resolvedPath
}
/**
* Cache a plugin (local or external) and add it to installed_plugins.json
*
* This function combines the common pattern of:
* 1. Caching a plugin to ~/.claude/plugins/cache/
* 2. Adding it to the installed plugins registry
*
* Both local plugins (with string source like "./path") and external plugins
* (with object source like {source: "github", ...}) are cached to the same
* location to ensure consistent behavior.
*
* @param pluginId - Plugin ID in "plugin@marketplace" format
* @param entry - Plugin marketplace entry
* @param scope - Installation scope (user, project, local, or managed). Defaults to 'user'.
* 'managed' scope is used for plugins installed automatically from managed settings.
* @param projectPath - Project path (required for project/local scopes)
* @param localSourcePath - For local plugins, the resolved absolute path to the source directory
* @returns The installation path
*/
export async function cacheAndRegisterPlugin(
pluginId: string,
entry: PluginMarketplaceEntry,
scope: PluginScope = 'user',
projectPath?: string,
localSourcePath?: string,
): Promise<string> {
// For local plugins, we need the resolved absolute path
// Cast to PluginSource since cachePlugin handles any string path at runtime
const source: PluginSource =
typeof entry.source === 'string' && localSourcePath
? (localSourcePath as PluginSource)
: entry.source
const cacheResult = await cachePlugin(source, {
manifest: entry as PluginMarketplaceEntry,
})
// For local plugins, use the original source path for Git SHA calculation
// because the cached temp directory doesn't have .git (it's copied from a
// subdirectory of the marketplace git repo). For external plugins, use the
// cached path. For git-subdir sources, cachePlugin already captured the SHA
// before discarding the ephemeral clone (the extracted subdir has no .git).
const pathForGitSha = localSourcePath || cacheResult.path
const gitCommitSha =
cacheResult.gitCommitSha ?? (await getGitCommitSha(pathForGitSha))
const now = getCurrentTimestamp()
const version = await calculatePluginVersion(
pluginId,
entry.source,
cacheResult.manifest,
pathForGitSha,
entry.version,
cacheResult.gitCommitSha,
)
// Move the cached plugin to the versioned path: cache/marketplace/plugin/version/
const versionedPath = getVersionedCachePath(pluginId, version)
let finalPath = cacheResult.path
// Only move if the paths are different and plugin was cached to a different location
if (cacheResult.path !== versionedPath) {
// Create the versioned directory structure
await getFsImplementation().mkdir(dirname(versionedPath))
// Remove existing versioned path if present (force: no-op if missing)
await rm(versionedPath, { recursive: true, force: true })
// Check if versionedPath is a subdirectory of cacheResult.path
// This happens when marketplace name equals plugin name (e.g., "exa-mcp-server@exa-mcp-server")
// In this case, we can't directly rename because we'd be moving a directory into itself
const normalizedCachePath = cacheResult.path.endsWith(sep)
? cacheResult.path
: cacheResult.path + sep
const isSubdirectory = versionedPath.startsWith(normalizedCachePath)
if (isSubdirectory) {
// Move to a temp location first, then to final destination
// We can't directly rename/copy a directory into its own subdirectory
// Use the parent of cacheResult.path (same filesystem) to avoid EXDEV
// errors when /tmp is on a different filesystem (e.g., tmpfs)
const tempPath = join(
dirname(cacheResult.path),
`.claude-plugin-temp-${Date.now()}-${randomBytes(4).toString('hex')}`,
)
await rename(cacheResult.path, tempPath)
await getFsImplementation().mkdir(dirname(versionedPath))
await rename(tempPath, versionedPath)
} else {
// Move the cached plugin to the versioned location
await rename(cacheResult.path, versionedPath)
}
finalPath = versionedPath
}
// Zip cache mode: convert directory to ZIP and remove the directory
if (isPluginZipCacheEnabled()) {
const zipPath = getVersionedZipCachePath(pluginId, version)
await convertDirectoryToZipInPlace(finalPath, zipPath)
finalPath = zipPath
}
// Add to both V1 and V2 installed_plugins files with correct scope
addInstalledPlugin(
pluginId,
{
version,
installedAt: now,
lastUpdated: now,
installPath: finalPath,
gitCommitSha,
},
scope,
projectPath,
)
return finalPath
}
/**
* Register a plugin installation without caching
*
* Used for local plugins that are already on disk and don't need remote caching.
* External plugins should use cacheAndRegisterPlugin() instead.
*
* @param info - Plugin installation information
* @param scope - Installation scope (user, project, local, or managed). Defaults to 'user'.
* 'managed' scope is used for plugins registered from managed settings.
* @param projectPath - Project path (required for project/local scopes)
*/
export function registerPluginInstallation(
info: PluginInstallationInfo,
scope: PluginScope = 'user',
projectPath?: string,
): void {
const now = getCurrentTimestamp()
addInstalledPlugin(
info.pluginId,
{
version: info.version || 'unknown',
installedAt: now,
lastUpdated: now,
installPath: info.installPath,
},
scope,
projectPath,
)
}
/**
* Parse plugin ID into components
*
* @param pluginId - Plugin ID in "plugin@marketplace" format
* @returns Parsed components or null if invalid
*/
export function parsePluginId(
pluginId: string,
): { name: string; marketplace: string } | null {
const parts = pluginId.split('@')
if (parts.length !== 2 || !parts[0] || !parts[1]) {
return null
}
return {
name: parts[0],
marketplace: parts[1],
}
}
/**
* Structured result from the install core. Wrappers format messages and
* handle analytics/error-catching around this.
*/
export type InstallCoreResult =
| { ok: true; closure: string[]; depNote: string }
| { ok: false; reason: 'local-source-no-location'; pluginName: string }
| { ok: false; reason: 'settings-write-failed'; message: string }
| {
ok: false
reason: 'resolution-failed'
resolution: ResolutionResult & { ok: false }
}
| { ok: false; reason: 'blocked-by-policy'; pluginName: string }
| {
ok: false
reason: 'dependency-blocked-by-policy'
pluginName: string
blockedDependency: string
}
/**
* Format a failed ResolutionResult into a user-facing message. Unified on
* the richer CLI messages (the "Is the X marketplace added?" hint is useful
* for UI users too).
*/
export function formatResolutionError(
r: ResolutionResult & { ok: false },
): string {
switch (r.reason) {
case 'cycle':
return `Dependency cycle: ${r.chain.join(' → ')}`
case 'cross-marketplace': {
const depMkt = parsePluginIdentifier(r.dependency).marketplace
const where = depMkt
? `marketplace "${depMkt}"`
: 'a different marketplace'
const hint = depMkt
? ` Add "${depMkt}" to allowCrossMarketplaceDependenciesOn in the ROOT marketplace's marketplace.json (the marketplace of the plugin you're installing — only its allowlist applies; no transitive trust).`
: ''
return `Dependency "${r.dependency}" (required by ${r.requiredBy}) is in ${where}, which is not in the allowlist — cross-marketplace dependencies are blocked by default. Install it manually first.${hint}`
}
case 'not-found': {
const { marketplace: depMkt } = parsePluginIdentifier(r.missing)
return depMkt
? `Dependency "${r.missing}" (required by ${r.requiredBy}) not found. Is the "${depMkt}" marketplace added?`
: `Dependency "${r.missing}" (required by ${r.requiredBy}) not found in any configured marketplace`
}
}
}
/**
* Core plugin install logic, shared by the CLI path (`installPluginOp`) and
* the interactive UI path (`installPluginFromMarketplace`). Given a
* pre-resolved marketplace entry, this:
*
* 1. Guards against local-source plugins without a marketplace install
* location (would silently no-op otherwise).
* 2. Resolves the transitive dependency closure (when PLUGIN_DEPENDENCIES
* is on; trivial single-plugin closure otherwise).
* 3. Writes the entire closure to enabledPlugins in one settings update.
* 4. Caches each closure member (downloads/copies sources as needed).
* 5. Clears memoization caches.
*
* Returns a structured result. Message formatting, analytics, and top-level
* error wrapping stay in the caller-specific wrappers.
*
* @param marketplaceInstallLocation Pass this if the caller already has it
* (from a prior marketplace search) to avoid a redundant lookup.
*/
export async function installResolvedPlugin({
pluginId,
entry,
scope,
marketplaceInstallLocation,
}: {
pluginId: string
entry: PluginMarketplaceEntry
scope: 'user' | 'project' | 'local'
marketplaceInstallLocation?: string
}): Promise<InstallCoreResult> {
const settingSource = scopeToSettingSource(scope)
// ── Policy guard ──
// Org-blocked plugins (managed-settings.json enabledPlugins: false) cannot
// be installed. Checked here so all install paths (CLI, UI, hint-triggered)
// are covered in one place.
if (isPluginBlockedByPolicy(pluginId)) {
return { ok: false, reason: 'blocked-by-policy', pluginName: entry.name }
}
// ── Resolve dependency closure ──
// depInfo caches marketplace lookups so the materialize loop doesn't
// re-fetch. Seed the root if the caller gave us its install location.
const depInfo = new Map<
string,
{ entry: PluginMarketplaceEntry; marketplaceInstallLocation: string }
>()
// Without this guard, a local-source root with undefined
// marketplaceInstallLocation falls through: depInfo isn't seeded, the
// materialize loop's `if (!info) continue` skips the root, and the user
// sees "Successfully installed" while nothing is cached.
if (isLocalPluginSource(entry.source) && !marketplaceInstallLocation) {
return {
ok: false,
reason: 'local-source-no-location',
pluginName: entry.name,
}
}
if (marketplaceInstallLocation) {
depInfo.set(pluginId, { entry, marketplaceInstallLocation })
}
const rootMarketplace = parsePluginIdentifier(pluginId).marketplace
const allowedCrossMarketplaces = new Set(
(rootMarketplace
? (await getMarketplaceCacheOnly(rootMarketplace))
?.allowCrossMarketplaceDependenciesOn
: undefined) ?? [],
)
const resolution = await resolveDependencyClosure(
pluginId,
async id => {
if (depInfo.has(id)) return depInfo.get(id)!.entry
if (id === pluginId) return entry
const info = await getPluginById(id)
if (info) depInfo.set(id, info)
return info?.entry ?? null
},
getEnabledPluginIdsForScope(settingSource),
allowedCrossMarketplaces,
)
if (!resolution.ok) {
return { ok: false, reason: 'resolution-failed', resolution }
}
// ── Policy guard for transitive dependencies ──
// The root plugin was already checked above, but any dependency in the
// closure could also be policy-blocked. Check before writing to settings
// so a non-blocked plugin can't pull in a blocked dependency.
for (const id of resolution.closure) {
if (id !== pluginId && isPluginBlockedByPolicy(id)) {
return {
ok: false,
reason: 'dependency-blocked-by-policy',
pluginName: entry.name,
blockedDependency: id,
}
}
}
// ── ACTION: write entire closure to settings in one call ──
const closureEnabled: Record<string, true> = {}
for (const id of resolution.closure) closureEnabled[id] = true
const { error } = updateSettingsForSource(settingSource, {
enabledPlugins: {
...getSettingsForSource(settingSource)?.enabledPlugins,
...closureEnabled,
},
})
if (error) {
return {
ok: false,
reason: 'settings-write-failed',
message: error.message,
}
}
// ── Materialize: cache each closure member ──
const projectPath = scope !== 'user' ? getCwd() : undefined
for (const id of resolution.closure) {
let info = depInfo.get(id)
// Root wasn't pre-seeded (caller didn't pass marketplaceInstallLocation
// for a non-local source). Fetch now; it's needed for the cache write.
if (!info && id === pluginId) {
const mktLocation = (await getPluginById(id))?.marketplaceInstallLocation
if (mktLocation) info = { entry, marketplaceInstallLocation: mktLocation }
}
if (!info) continue
let localSourcePath: string | undefined
const { source } = info.entry
if (isLocalPluginSource(source)) {
localSourcePath = validatePathWithinBase(
info.marketplaceInstallLocation,
source,
)
}
await cacheAndRegisterPlugin(
id,
info.entry,
scope,
projectPath,
localSourcePath,
)
}
clearAllCaches()
const depNote = formatDependencyCountSuffix(
resolution.closure.filter(id => id !== pluginId),
)
return { ok: true, closure: resolution.closure, depNote }
}
/**
* Result of a plugin installation operation
*/
export type InstallPluginResult =
| { success: true; message: string }
| { success: false; error: string }
/**
* Parameters for installing a plugin from marketplace
*/
export type InstallPluginParams = {
pluginId: string
entry: PluginMarketplaceEntry
marketplaceName: string
scope?: 'user' | 'project' | 'local'
trigger?: 'hint' | 'user'
}
/**
* Install a single plugin from a marketplace with the specified scope.
* Interactive-UI wrapper around `installResolvedPlugin` — adds try/catch,
* analytics, and UI-style message formatting.
*/
export async function installPluginFromMarketplace({
pluginId,
entry,
marketplaceName,
scope = 'user',
trigger = 'user',
}: InstallPluginParams): Promise<InstallPluginResult> {
try {
// Look up the marketplace install location for local-source plugins.
// Without this, plugins with relative-path sources fail from the
// interactive UI path (/plugin install) even though the CLI path works.
const pluginInfo = await getPluginById(pluginId)
const marketplaceInstallLocation = pluginInfo?.marketplaceInstallLocation
const result = await installResolvedPlugin({
pluginId,
entry,
scope,
marketplaceInstallLocation,
})
if (!result.ok) {
switch (result.reason) {
case 'local-source-no-location':
return {
success: false,
error: `Cannot install local plugin "${result.pluginName}" without marketplace install location`,
}
case 'settings-write-failed':
return {
success: false,
error: `Failed to update settings: ${result.message}`,
}
case 'resolution-failed':
return {
success: false,
error: formatResolutionError(result.resolution),
}
case 'blocked-by-policy':
return {
success: false,
error: `Plugin "${result.pluginName}" is blocked by your organization's policy and cannot be installed`,
}
case 'dependency-blocked-by-policy':
return {
success: false,
error: `Cannot install "${result.pluginName}": dependency "${result.blockedDependency}" is blocked by your organization's policy`,
}
}
}
// _PROTO_* routes to PII-tagged plugin_name/marketplace_name BQ columns.
// plugin_id kept in additional_metadata (redacted to 'third-party' for
// non-official) because dbt external_claude_code_plugin_installs.sql
// extracts $.plugin_id for official-marketplace install tracking. Other
// plugin lifecycle events drop the blob key — no downstream consumers.
logEvent('tengu_plugin_installed', {
_PROTO_plugin_name:
entry.name as AnalyticsMetadata_I_VERIFIED_THIS_IS_PII_TAGGED,
_PROTO_marketplace_name:
marketplaceName as AnalyticsMetadata_I_VERIFIED_THIS_IS_PII_TAGGED,
plugin_id: (isOfficialMarketplaceName(marketplaceName)
? pluginId
: 'third-party') as AnalyticsMetadata_I_VERIFIED_THIS_IS_NOT_CODE_OR_FILEPATHS,
trigger:
trigger as AnalyticsMetadata_I_VERIFIED_THIS_IS_NOT_CODE_OR_FILEPATHS,
install_source: (trigger === 'hint'
? 'ui-suggestion'
: 'ui-discover') as AnalyticsMetadata_I_VERIFIED_THIS_IS_NOT_CODE_OR_FILEPATHS,
...buildPluginTelemetryFields(
entry.name,
marketplaceName,
getManagedPluginNames(),
),
...(entry.version && {
version:
entry.version as AnalyticsMetadata_I_VERIFIED_THIS_IS_NOT_CODE_OR_FILEPATHS,
}),
})
return {
success: true,
message: `✓ Installed ${entry.name}${result.depNote}. Run /reload-plugins to activate.`,
}
} catch (err) {
const errorMessage = err instanceof Error ? err.message : String(err)
logError(toError(err))
return { success: false, error: `Failed to install: ${errorMessage}` }
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,400 @@
/**
* Plugin option storage and substitution.
*
* Plugins declare user-configurable options in `manifest.userConfig` — a record
* of field schemas matching `McpbUserConfigurationOption`. At enable time the
* user is prompted for values. Storage splits by `sensitive`:
* - `sensitive: true` → secureStorage (keychain on macOS, .credentials.json elsewhere)
* - everything else → settings.json `pluginConfigs[pluginId].options`
*
* `loadPluginOptions` reads and merges both. The substitution helpers are also
* here (moved from mcpPluginIntegration.ts) so hooks/LSP/skills don't all
* import from MCP-specific code.
*/
import memoize from 'lodash-es/memoize.js'
import type { LoadedPlugin } from '../../types/plugin.js'
import { logForDebugging } from '../debug.js'
import { logError } from '../log.js'
import { getSecureStorage } from '../secureStorage/index.js'
import {
getSettings_DEPRECATED,
updateSettingsForSource,
} from '../settings/settings.js'
import {
type UserConfigSchema,
type UserConfigValues,
validateUserConfig,
} from './mcpbHandler.js'
import { getPluginDataDir } from './pluginDirectories.js'
export type PluginOptionValues = UserConfigValues
export type PluginOptionSchema = UserConfigSchema
/**
* Canonical storage key for a plugin's options in both `settings.pluginConfigs`
* and `secureStorage.pluginSecrets`. Today this is `plugin.source` — always
* `"${name}@${marketplace}"` (pluginLoader.ts:1400). `plugin.repository` is
* a backward-compat alias that's set to the same string (1401); don't use it
* for storage. UI code that manually constructs `` `${name}@${marketplace}` ``
* produces the same key by convention — see PluginOptionsFlow, ManagePlugins.
*
* Exists so there's exactly one place to change if the key format ever drifts.
*/
export function getPluginStorageId(plugin: LoadedPlugin): string {
return plugin.source
}
/**
* Load saved option values for a plugin, merging non-sensitive (from settings)
* with sensitive (from secureStorage). SecureStorage wins on key collision.
*
* Memoized per-pluginId because hooks can fire per-tool-call and each call
* would otherwise do a settings read + keychain spawn. Cache cleared via
* `clearPluginOptionsCache` when settings change or plugins reload.
*/
export const loadPluginOptions = memoize(
(pluginId: string): PluginOptionValues => {
const settings = getSettings_DEPRECATED()
const nonSensitive =
settings.pluginConfigs?.[pluginId]?.options ?? ({} as PluginOptionValues)
// NOTE: storage.read() spawns `security find-generic-password` on macOS
// (~50-100ms, synchronous). Mitigated by the memoize above (per-pluginId,
// session-lifetime) + keychain's own 30s TTL cache — so one blocking spawn
// per session per plugin-with-options. /reload-plugins clears the memoize
// and the next hook/MCP-load after that eats a fresh spawn.
const storage = getSecureStorage()
const sensitive =
storage.read()?.pluginSecrets?.[pluginId] ??
({} as Record<string, string>)
// secureStorage wins on collision — schema determines destination so
// collision shouldn't happen, but if a user hand-edits settings.json we
// trust the more secure source.
return { ...nonSensitive, ...sensitive }
},
)
export function clearPluginOptionsCache(): void {
loadPluginOptions.cache?.clear?.()
}
/**
* Save option values, splitting by `schema[key].sensitive`. Non-sensitive go
* to userSettings; sensitive go to secureStorage. Writes are skipped if nothing
* in that category is present.
*
* Clears the load cache on success so the next `loadPluginOptions` sees fresh.
*/
export function savePluginOptions(
pluginId: string,
values: PluginOptionValues,
schema: PluginOptionSchema,
): void {
const nonSensitive: PluginOptionValues = {}
const sensitive: Record<string, string> = {}
for (const [key, value] of Object.entries(values)) {
if (schema[key]?.sensitive === true) {
sensitive[key] = String(value)
} else {
nonSensitive[key] = value
}
}
// Scrub sets — see saveMcpServerUserConfig (mcpbHandler.ts) for the
// rationale. Only keys in THIS save are scrubbed from the other store,
// so partial reconfigures don't lose data.
const sensitiveKeysInThisSave = new Set(Object.keys(sensitive))
const nonSensitiveKeysInThisSave = new Set(Object.keys(nonSensitive))
// secureStorage FIRST — if keychain fails, throw before touching
// settings.json so old plaintext (if any) stays as fallback.
const storage = getSecureStorage()
const existingInSecureStorage =
storage.read()?.pluginSecrets?.[pluginId] ?? undefined
const secureScrubbed = existingInSecureStorage
? Object.fromEntries(
Object.entries(existingInSecureStorage).filter(
([k]) => !nonSensitiveKeysInThisSave.has(k),
),
)
: undefined
const needSecureScrub =
secureScrubbed &&
existingInSecureStorage &&
Object.keys(secureScrubbed).length !==
Object.keys(existingInSecureStorage).length
if (Object.keys(sensitive).length > 0 || needSecureScrub) {
const existing = storage.read() ?? {}
if (!existing.pluginSecrets) {
existing.pluginSecrets = {}
}
existing.pluginSecrets[pluginId] = {
...secureScrubbed,
...sensitive,
}
const result = storage.update(existing)
if (!result.success) {
const err = new Error(
`Failed to save sensitive plugin options for ${pluginId} to secure storage`,
)
logError(err)
throw err
}
if (result.warning) {
logForDebugging(`Plugin secrets save warning: ${result.warning}`, {
level: 'warn',
})
}
}
// settings.json AFTER secureStorage — scrub sensitive keys via explicit
// undefined (mergeWith deletion pattern).
//
// TODO: getSettings_DEPRECATED returns MERGED settings across all scopes.
// Mutating that and writing to userSettings can leak project-scope
// pluginConfigs into ~/.claude/settings.json. Same pattern exists in
// saveMcpServerUserConfig. Safe today since pluginConfigs is only ever
// written here (user-scope), but will bite if we add project-scoped
// plugin options.
const settings = getSettings_DEPRECATED()
const existingInSettings = settings.pluginConfigs?.[pluginId]?.options ?? {}
const keysToScrubFromSettings = Object.keys(existingInSettings).filter(k =>
sensitiveKeysInThisSave.has(k),
)
if (
Object.keys(nonSensitive).length > 0 ||
keysToScrubFromSettings.length > 0
) {
if (!settings.pluginConfigs) {
settings.pluginConfigs = {}
}
if (!settings.pluginConfigs[pluginId]) {
settings.pluginConfigs[pluginId] = {}
}
const scrubbed = Object.fromEntries(
keysToScrubFromSettings.map(k => [k, undefined]),
) as Record<string, undefined>
settings.pluginConfigs[pluginId].options = {
...nonSensitive,
...scrubbed,
} as PluginOptionValues
const result = updateSettingsForSource('userSettings', settings)
if (result.error) {
logError(result.error)
throw new Error(
`Failed to save plugin options for ${pluginId}: ${result.error.message}`,
)
}
}
clearPluginOptionsCache()
}
/**
* Delete all stored option values for a plugin — both the non-sensitive
* `settings.pluginConfigs[pluginId]` entry and the sensitive
* `secureStorage.pluginSecrets[pluginId]` entry.
*
* Call this when the LAST installation of a plugin is uninstalled (i.e.,
* alongside `markPluginVersionOrphaned`). Don't call on every uninstall —
* a plugin can be installed in multiple scopes and the user's config should
* survive removing it from one scope while it remains in another.
*
* Best-effort: keychain write failure is logged but doesn't throw, since
* the uninstall itself succeeded and we don't want to surface a confusing
* "uninstall failed" message for a cleanup side-effect.
*/
export function deletePluginOptions(pluginId: string): void {
// Settings side — also wipes the legacy mcpServers sub-key (same story:
// orphaned on uninstall, never cleaned up before this PR).
//
// Use `undefined` (not `delete`) because `updateSettingsForSource` merges
// via `mergeWith` — absent keys are ignored, only `undefined` triggers
// removal. Cast is deliberate (CLAUDE.md's 10% case): adding z.undefined()
// to the schema instead (like enabledPlugins:466 does) leaks
// `| {[k: string]: unknown}` into the public SDK type, which subsumes the
// real object arm and kills excess-property checks for SDK consumers. The
// mergeWith-deletion contract is internal plumbing — it shouldn't shape
// the Zod schema. enabledPlugins gets away with it only because its other
// arms (string[] | boolean) are non-objects that stay distinct.
const settings = getSettings_DEPRECATED()
type PluginConfigs = NonNullable<typeof settings.pluginConfigs>
if (settings.pluginConfigs?.[pluginId]) {
// Partial<Record<K,V>> = Record<K, V | undefined> — gives us the widening
// for the undefined value, and Partial-of-X overlaps with X so the cast
// is a narrowing TS accepts (same approach as marketplaceManager.ts:1795).
const pluginConfigs: Partial<PluginConfigs> = { [pluginId]: undefined }
const { error } = updateSettingsForSource('userSettings', {
pluginConfigs: pluginConfigs as PluginConfigs,
})
if (error) {
logForDebugging(
`deletePluginOptions: failed to clear settings.pluginConfigs[${pluginId}]: ${error.message}`,
{ level: 'warn' },
)
}
}
// Secure storage side — delete both the top-level pluginSecrets[pluginId]
// and any per-server composite keys `${pluginId}/${server}` (from
// saveMcpServerUserConfig's sensitive split). `/` prefix match is safe:
// plugin IDs are `name@marketplace`, never contain `/`, so
// startsWith(`${id}/`) can't false-positive on a different plugin.
const storage = getSecureStorage()
const existing = storage.read()
if (existing?.pluginSecrets) {
const prefix = `${pluginId}/`
const survivingEntries = Object.entries(existing.pluginSecrets).filter(
([k]) => k !== pluginId && !k.startsWith(prefix),
)
if (
survivingEntries.length !== Object.keys(existing.pluginSecrets).length
) {
const result = storage.update({
...existing,
pluginSecrets:
survivingEntries.length > 0
? Object.fromEntries(survivingEntries)
: undefined,
})
if (!result.success) {
logForDebugging(
`deletePluginOptions: failed to clear pluginSecrets for ${pluginId} from keychain`,
{ level: 'warn' },
)
}
}
}
clearPluginOptionsCache()
}
/**
* Find option keys whose saved values don't satisfy the schema — i.e., what to
* prompt for. Returns the schema slice for those keys, or empty if everything
* validates. Empty manifest.userConfig → empty result.
*
* Used by PluginOptionsFlow to decide whether to show the prompt after enable.
*/
export function getUnconfiguredOptions(
plugin: LoadedPlugin,
): PluginOptionSchema {
const manifestSchema = plugin.manifest.userConfig
if (!manifestSchema || Object.keys(manifestSchema).length === 0) {
return {}
}
const saved = loadPluginOptions(getPluginStorageId(plugin))
const validation = validateUserConfig(saved, manifestSchema)
if (validation.valid) {
return {}
}
// Return only the fields that failed. validateUserConfig reports errors as
// strings keyed by title/key — simpler to just re-check each field here than
// parse error strings.
const unconfigured: PluginOptionSchema = {}
for (const [key, fieldSchema] of Object.entries(manifestSchema)) {
const single = validateUserConfig(
{ [key]: saved[key] } as PluginOptionValues,
{ [key]: fieldSchema },
)
if (!single.valid) {
unconfigured[key] = fieldSchema
}
}
return unconfigured
}
/**
* Substitute ${CLAUDE_PLUGIN_ROOT} and ${CLAUDE_PLUGIN_DATA} with their paths.
* On Windows, normalizes backslashes to forward slashes so shell commands
* don't interpret them as escape characters.
*
* ${CLAUDE_PLUGIN_ROOT} — version-scoped install dir (recreated on update)
* ${CLAUDE_PLUGIN_DATA} — persistent state dir (survives updates)
*
* Both patterns use the function-replacement form of .replace(): ROOT so
* `$`-patterns in NTFS paths ($$, $', $`, $&) aren't interpreted; DATA so
* getPluginDataDir (which lazily mkdirs) only runs when actually present.
*
* Used in MCP/LSP server command/args/env, hook commands, skill/agent content.
*/
export function substitutePluginVariables(
value: string,
plugin: { path: string; source?: string },
): string {
const normalize = (p: string) =>
process.platform === 'win32' ? p.replace(/\\/g, '/') : p
let out = value.replace(/\$\{CLAUDE_PLUGIN_ROOT\}/g, () =>
normalize(plugin.path),
)
// source can be absent (e.g. hooks where pluginRoot is a skill root without
// a plugin context). In that case ${CLAUDE_PLUGIN_DATA} is left literal.
if (plugin.source) {
const source = plugin.source
out = out.replace(/\$\{CLAUDE_PLUGIN_DATA\}/g, () =>
normalize(getPluginDataDir(source)),
)
}
return out
}
/**
* Substitute ${user_config.KEY} with saved option values.
*
* Throws on missing keys — callers pass this only after `validateUserConfig`
* succeeded, so a miss here means a plugin references a key it never declared
* in its schema. That's a plugin authoring bug; failing loud surfaces it.
*
* Use `substituteUserConfigInContent` for skill/agent prose — it handles
* missing keys and sensitive-filtering instead of throwing.
*/
export function substituteUserConfigVariables(
value: string,
userConfig: PluginOptionValues,
): string {
return value.replace(/\$\{user_config\.([^}]+)\}/g, (_match, key) => {
const configValue = userConfig[key]
if (configValue === undefined) {
throw new Error(
`Missing required user configuration value: ${key}. ` +
`This should have been validated before variable substitution.`,
)
}
return String(configValue)
})
}
/**
* Content-safe variant for skill/agent prose. Differences from
* `substituteUserConfigVariables`:
*
* - Sensitive-marked keys substitute to a descriptive placeholder instead of
* the actual value — skill/agent content goes to the model prompt, and
* we don't put secrets in the model's context.
* - Unknown keys stay literal (no throw) — matches how `${VAR}` env refs
* behave today when the var is unset.
*
* A ref to a sensitive key produces obvious-looking output so plugin authors
* notice and move the ref into a hook/MCP env instead.
*/
export function substituteUserConfigInContent(
content: string,
options: PluginOptionValues,
schema: PluginOptionSchema,
): string {
return content.replace(/\$\{user_config\.([^}]+)\}/g, (match, key) => {
if (schema[key]?.sensitive === true) {
return `[sensitive option '${key}' not available in skill content]`
}
const value = options[key]
if (value === undefined) {
return match
}
return String(value)
})
}

View File

@@ -0,0 +1,20 @@
/**
* Plugin policy checks backed by managed settings (policySettings).
*
* Kept as a leaf module (only imports settings) to avoid circular dependencies
* — marketplaceHelpers.ts imports marketplaceManager.ts which transitively
* reaches most of the plugin subsystem.
*/
import { getSettingsForSource } from '../settings/settings.js'
/**
* Check if a plugin is force-disabled by org policy (managed-settings.json).
* Policy-blocked plugins cannot be installed or enabled by the user at any
* scope. Used as the single source of truth for policy blocking across the
* install chokepoint, enable op, and UI filters.
*/
export function isPluginBlockedByPolicy(pluginId: string): boolean {
const policyEnabled = getSettingsForSource('policySettings')?.enabledPlugins
return policyEnabled?.[pluginId] === false
}

View File

@@ -0,0 +1,341 @@
import { join } from 'path'
import { getCwd } from '../cwd.js'
import { logForDebugging } from '../debug.js'
import { logError } from '../log.js'
import type { SettingSource } from '../settings/constants.js'
import {
getInitialSettings,
getSettingsForSource,
updateSettingsForSource,
} from '../settings/settings.js'
import { getAddDirEnabledPlugins } from './addDirPluginSettings.js'
import {
getInMemoryInstalledPlugins,
migrateFromEnabledPlugins,
} from './installedPluginsManager.js'
import { getPluginById } from './marketplaceManager.js'
import {
type ExtendedPluginScope,
type PersistablePluginScope,
SETTING_SOURCE_TO_SCOPE,
scopeToSettingSource,
} from './pluginIdentifier.js'
import {
cacheAndRegisterPlugin,
registerPluginInstallation,
} from './pluginInstallationHelpers.js'
import { isLocalPluginSource, type PluginScope } from './schemas.js'
/**
* Checks for enabled plugins across all settings sources, including --add-dir.
*
* Uses getInitialSettings() which merges all sources with policy as
* highest priority, then layers --add-dir plugins underneath. This is the
* authoritative "is this plugin enabled?" check — don't delegate to
* getPluginEditableScopes() which serves a different purpose (scope tracking).
*
* @returns Array of plugin IDs (plugin@marketplace format) that are enabled
*/
export async function checkEnabledPlugins(): Promise<string[]> {
const settings = getInitialSettings()
const enabledPlugins: string[] = []
// Start with --add-dir plugins (lowest priority)
const addDirPlugins = getAddDirEnabledPlugins()
for (const [pluginId, value] of Object.entries(addDirPlugins)) {
if (pluginId.includes('@') && value) {
enabledPlugins.push(pluginId)
}
}
// Merged settings (policy > local > project > user) override --add-dir
if (settings.enabledPlugins) {
for (const [pluginId, value] of Object.entries(settings.enabledPlugins)) {
if (!pluginId.includes('@')) {
continue
}
const idx = enabledPlugins.indexOf(pluginId)
if (value) {
if (idx === -1) {
enabledPlugins.push(pluginId)
}
} else {
// Explicitly disabled — remove even if --add-dir enabled it
if (idx !== -1) {
enabledPlugins.splice(idx, 1)
}
}
}
}
return enabledPlugins
}
/**
* Gets the user-editable scope that "owns" each enabled plugin.
*
* Used for scope tracking: determining where to write back when a user
* enables/disables a plugin. Managed (policy) settings are processed first
* (lowest priority) because the user cannot edit them — the scope should
* resolve to the highest user-controllable source.
*
* NOTE: This is NOT the authoritative "is this plugin enabled?" check.
* Use checkEnabledPlugins() for that — it uses merged settings where
* policy has highest priority and can block user-enabled plugins.
*
* Precedence (lowest to highest):
* 0. addDir (--add-dir directories) - session-only, lowest priority
* 1. managed (policySettings) - not user-editable
* 2. user (userSettings)
* 3. project (projectSettings)
* 4. local (localSettings)
* 5. flag (flagSettings) - session-only, not persisted
*
* @returns Map of plugin ID to the user-editable scope that owns it
*/
export function getPluginEditableScopes(): Map<string, ExtendedPluginScope> {
const result = new Map<string, ExtendedPluginScope>()
// Process --add-dir directories FIRST (lowest priority, overridden by all standard sources)
const addDirPlugins = getAddDirEnabledPlugins()
for (const [pluginId, value] of Object.entries(addDirPlugins)) {
if (!pluginId.includes('@')) {
continue
}
if (value === true) {
result.set(pluginId, 'flag') // 'flag' scope = session-only, no write-back
} else if (value === false) {
result.delete(pluginId)
}
}
// Process standard sources in precedence order (later overrides earlier)
const scopeSources: Array<{
scope: ExtendedPluginScope
source: SettingSource
}> = [
{ scope: 'managed', source: 'policySettings' },
{ scope: 'user', source: 'userSettings' },
{ scope: 'project', source: 'projectSettings' },
{ scope: 'local', source: 'localSettings' },
{ scope: 'flag', source: 'flagSettings' },
]
for (const { scope, source } of scopeSources) {
const settings = getSettingsForSource(source)
if (!settings?.enabledPlugins) {
continue
}
for (const [pluginId, value] of Object.entries(settings.enabledPlugins)) {
// Skip invalid format
if (!pluginId.includes('@')) {
continue
}
// Log when a standard source overrides an --add-dir plugin
if (pluginId in addDirPlugins && addDirPlugins[pluginId] !== value) {
logForDebugging(
`Plugin ${pluginId} from --add-dir (${addDirPlugins[pluginId]}) overridden by ${source} (${value})`,
)
}
if (value === true) {
// Plugin enabled at this scope
result.set(pluginId, scope)
} else if (value === false) {
// Explicitly disabled - remove from result
result.delete(pluginId)
}
// Note: Other values (like version strings for future P2) are ignored for now
}
}
logForDebugging(
`Found ${result.size} enabled plugins with scopes: ${Array.from(
result.entries(),
)
.map(([id, scope]) => `${id}(${scope})`)
.join(', ')}`,
)
return result
}
/**
* Check if a scope is persistable (not session-only).
* @param scope The scope to check
* @returns true if the scope should be persisted to installed_plugins.json
*/
export function isPersistableScope(
scope: ExtendedPluginScope,
): scope is PersistablePluginScope {
return scope !== 'flag'
}
/**
* Convert SettingSource to plugin scope.
* @param source The settings source
* @returns The corresponding plugin scope
*/
export function settingSourceToScope(
source: SettingSource,
): ExtendedPluginScope {
return SETTING_SOURCE_TO_SCOPE[source]
}
/**
* Gets the list of currently installed plugins
* Reads from installed_plugins.json which tracks global installation state.
* Automatically runs migration on first call if needed.
*
* Always uses V2 format and initializes the in-memory session state
* (which triggers V1→V2 migration if needed).
*
* @returns Array of installed plugin IDs
*/
export async function getInstalledPlugins(): Promise<string[]> {
// Trigger sync in background (don't await - don't block startup)
// This syncs enabledPlugins from settings.json to installed_plugins.json
void migrateFromEnabledPlugins().catch(error => {
logError(error)
})
// Always use V2 format - initializes in-memory session state and triggers V1→V2 migration
const v2Data = getInMemoryInstalledPlugins()
const installed = Object.keys(v2Data.plugins)
logForDebugging(`Found ${installed.length} installed plugins`)
return installed
}
/**
* Finds plugins that are enabled but not installed
* @param enabledPlugins Array of enabled plugin IDs
* @returns Array of missing plugin IDs
*/
export async function findMissingPlugins(
enabledPlugins: string[],
): Promise<string[]> {
try {
const installedPlugins = await getInstalledPlugins()
// Filter to not-installed synchronously, then look up all in parallel.
// Results are collected in original enabledPlugins order.
const notInstalled = enabledPlugins.filter(
id => !installedPlugins.includes(id),
)
const lookups = await Promise.all(
notInstalled.map(async pluginId => {
try {
const plugin = await getPluginById(pluginId)
return { pluginId, found: plugin !== null && plugin !== undefined }
} catch (error) {
logForDebugging(
`Failed to check plugin ${pluginId} in marketplace: ${error}`,
)
// Plugin doesn't exist in any marketplace, will be handled as an error
return { pluginId, found: false }
}
}),
)
const missing = lookups
.filter(({ found }) => found)
.map(({ pluginId }) => pluginId)
return missing
} catch (error) {
logError(error)
return []
}
}
/**
* Result of plugin installation attempt
*/
export type PluginInstallResult = {
installed: string[]
failed: Array<{ name: string; error: string }>
}
/**
* Installation scope type for install functions (excludes 'managed' which is read-only)
*/
type InstallableScope = Exclude<PluginScope, 'managed'>
/**
* Installs the selected plugins
* @param pluginsToInstall Array of plugin IDs to install
* @param onProgress Optional callback for installation progress
* @param scope Installation scope: user, project, or local (defaults to 'user')
* @returns Installation results with succeeded and failed plugins
*/
export async function installSelectedPlugins(
pluginsToInstall: string[],
onProgress?: (name: string, index: number, total: number) => void,
scope: InstallableScope = 'user',
): Promise<PluginInstallResult> {
// Get projectPath for non-user scopes
const projectPath = scope !== 'user' ? getCwd() : undefined
// Get the correct settings source for this scope
const settingSource = scopeToSettingSource(scope)
const settings = getSettingsForSource(settingSource)
const updatedEnabledPlugins = { ...settings?.enabledPlugins }
const installed: string[] = []
const failed: Array<{ name: string; error: string }> = []
for (let i = 0; i < pluginsToInstall.length; i++) {
const pluginId = pluginsToInstall[i]
if (!pluginId) continue
if (onProgress) {
onProgress(pluginId, i + 1, pluginsToInstall.length)
}
try {
const pluginInfo = await getPluginById(pluginId)
if (!pluginInfo) {
failed.push({
name: pluginId,
error: 'Plugin not found in any marketplace',
})
continue
}
// Cache the plugin if it's from an external source
const { entry, marketplaceInstallLocation } = pluginInfo
if (!isLocalPluginSource(entry.source)) {
// External plugin - cache and register it with scope
await cacheAndRegisterPlugin(pluginId, entry, scope, projectPath)
} else {
// Local plugin - just register it with the install path and scope
registerPluginInstallation(
{
pluginId,
installPath: join(marketplaceInstallLocation, entry.source),
version: entry.version,
},
scope,
projectPath,
)
}
// Mark as enabled in settings
updatedEnabledPlugins[pluginId] = true
installed.push(pluginId)
} catch (error) {
const errorMessage =
error instanceof Error ? error.message : String(error)
failed.push({ name: pluginId, error: errorMessage })
logError(error)
}
}
// Update settings with newly enabled plugins using the correct settings source
updateSettingsForSource(settingSource, {
...settings,
enabledPlugins: updatedEnabledPlugins,
})
return { installed, failed }
}

View File

@@ -0,0 +1,157 @@
/**
* Plugin Version Calculation Module
*
* Handles version calculation for plugins from various sources.
* Versions are used for versioned cache paths and update detection.
*
* Version sources (in order of preference):
* 1. Explicit version from plugin.json
* 2. Git commit SHA (for git/github sources)
* 3. Fallback timestamp for local sources
*/
import { createHash } from 'crypto'
import { logForDebugging } from '../debug.js'
import { getHeadForDir } from '../git/gitFilesystem.js'
import type { PluginManifest, PluginSource } from './schemas.js'
/**
* Calculate the version for a plugin based on its source.
*
* Version sources (in order of priority):
* 1. plugin.json version field (highest priority)
* 2. Provided version (typically from marketplace entry)
* 3. Git commit SHA from install path
* 4. 'unknown' as last resort
*
* @param pluginId - Plugin identifier (e.g., "plugin@marketplace")
* @param source - Plugin source configuration (used for git-subdir path hashing)
* @param manifest - Optional plugin manifest with version field
* @param installPath - Optional path to installed plugin (for git SHA extraction)
* @param providedVersion - Optional version from marketplace entry or caller
* @param gitCommitSha - Optional pre-resolved git SHA (for sources like
* git-subdir where the clone is discarded and the install path has no .git)
* @returns Version string (semver, short SHA, or 'unknown')
*/
export async function calculatePluginVersion(
pluginId: string,
source: PluginSource,
manifest?: PluginManifest,
installPath?: string,
providedVersion?: string,
gitCommitSha?: string,
): Promise<string> {
// 1. Use explicit version from plugin.json if available
if (manifest?.version) {
logForDebugging(
`Using manifest version for ${pluginId}: ${manifest.version}`,
)
return manifest.version
}
// 2. Use provided version (typically from marketplace entry)
if (providedVersion) {
logForDebugging(
`Using provided version for ${pluginId}: ${providedVersion}`,
)
return providedVersion
}
// 3. Use pre-resolved git SHA if caller captured it before discarding the clone
if (gitCommitSha) {
const shortSha = gitCommitSha.substring(0, 12)
if (typeof source === 'object' && source.source === 'git-subdir') {
// Encode the subdir path in the version so cache keys differ when
// marketplace.json's `path` changes but the monorepo SHA doesn't.
// Without this, two plugins at different subdirs of the same commit
// collide at cache/<m>/<p>/<sha>/ and serve each other's trees.
//
// Normalization MUST match the squashfs cron byte-for-byte:
// 1. backslash → forward slash
// 2. strip one leading `./`
// 3. strip all trailing `/`
// 4. UTF-8 sha256, first 8 hex chars
// See api/…/plugins_official_squashfs/job.py _validate_subdir().
const normPath = source.path
.replace(/\\/g, '/')
.replace(/^\.\//, '')
.replace(/\/+$/, '')
const pathHash = createHash('sha256')
.update(normPath)
.digest('hex')
.substring(0, 8)
const v = `${shortSha}-${pathHash}`
logForDebugging(
`Using git-subdir SHA+path version for ${pluginId}: ${v} (path=${normPath})`,
)
return v
}
logForDebugging(`Using pre-resolved git SHA for ${pluginId}: ${shortSha}`)
return shortSha
}
// 4. Try to get git SHA from install path
if (installPath) {
const sha = await getGitCommitSha(installPath)
if (sha) {
const shortSha = sha.substring(0, 12)
logForDebugging(`Using git SHA for ${pluginId}: ${shortSha}`)
return shortSha
}
}
// 5. Return 'unknown' as last resort
logForDebugging(`No version found for ${pluginId}, using 'unknown'`)
return 'unknown'
}
/**
* Get the git commit SHA for a directory.
*
* @param dirPath - Path to directory (should be a git repository)
* @returns Full commit SHA or null if not a git repo
*/
export function getGitCommitSha(dirPath: string): Promise<string | null> {
return getHeadForDir(dirPath)
}
/**
* Extract version from a versioned cache path.
*
* Given a path like `~/.claude/plugins/cache/marketplace/plugin/1.0.0`,
* extracts and returns `1.0.0`.
*
* @param installPath - Full path to plugin installation
* @returns Version string from path, or null if not a versioned path
*/
export function getVersionFromPath(installPath: string): string | null {
// Versioned paths have format: .../plugins/cache/marketplace/plugin/version/
const parts = installPath.split('/').filter(Boolean)
// Find 'cache' index to determine depth
const cacheIndex = parts.findIndex(
(part, i) => part === 'cache' && parts[i - 1] === 'plugins',
)
if (cacheIndex === -1) {
return null
}
// Versioned path has 3 components after 'cache': marketplace/plugin/version
const componentsAfterCache = parts.slice(cacheIndex + 1)
if (componentsAfterCache.length >= 3) {
return componentsAfterCache[2] || null
}
return null
}
/**
* Check if a path is a versioned plugin path.
*
* @param path - Path to check
* @returns True if path follows versioned structure
*/
export function isVersionedPath(path: string): boolean {
return getVersionFromPath(path) !== null
}

View File

@@ -0,0 +1,265 @@
/**
* Marketplace reconciler — makes known_marketplaces.json consistent with
* declared intent in settings.
*
* Two layers:
* - diffMarketplaces(): comparison (reads .git for worktree canonicalization, memoized)
* - reconcileMarketplaces(): bundled diff + install (I/O, idempotent, additive)
*/
import isEqual from 'lodash-es/isEqual.js'
import { isAbsolute, resolve } from 'path'
import { getOriginalCwd } from '../../bootstrap/state.js'
import { logForDebugging } from '../debug.js'
import { errorMessage } from '../errors.js'
import { pathExists } from '../file.js'
import { findCanonicalGitRoot } from '../git.js'
import { logError } from '../log.js'
import {
addMarketplaceSource,
type DeclaredMarketplace,
getDeclaredMarketplaces,
loadKnownMarketplacesConfig,
} from './marketplaceManager.js'
import {
isLocalMarketplaceSource,
type KnownMarketplacesFile,
type MarketplaceSource,
} from './schemas.js'
export type MarketplaceDiff = {
/** Declared in settings, absent from known_marketplaces.json */
missing: string[]
/** Present in both, but settings source ≠ JSON source (settings wins) */
sourceChanged: Array<{
name: string
declaredSource: MarketplaceSource
materializedSource: MarketplaceSource
}>
/** Present in both, sources match */
upToDate: string[]
}
/**
* Compare declared intent (settings) against materialized state (JSON).
*
* Resolves relative directory/file paths in `declared` before comparing,
* so project settings with `./path` match JSON's absolute path. Path
* resolution reads `.git` to canonicalize worktree paths (memoized).
*/
export function diffMarketplaces(
declared: Record<string, DeclaredMarketplace>,
materialized: KnownMarketplacesFile,
opts?: { projectRoot?: string },
): MarketplaceDiff {
const missing: string[] = []
const sourceChanged: MarketplaceDiff['sourceChanged'] = []
const upToDate: string[] = []
for (const [name, intent] of Object.entries(declared)) {
const state = materialized[name]
const normalizedIntent = normalizeSource(intent.source, opts?.projectRoot)
if (!state) {
missing.push(name)
} else if (intent.sourceIsFallback) {
// Fallback: presence suffices. Don't compare sources — the declared source
// is only a default for the `missing` branch. If seed/prior-install/mirror
// materialized this marketplace under ANY source, leave it alone. Comparing
// would report sourceChanged → re-clone → stomp the materialized content.
upToDate.push(name)
} else if (!isEqual(normalizedIntent, state.source)) {
sourceChanged.push({
name,
declaredSource: normalizedIntent,
materializedSource: state.source,
})
} else {
upToDate.push(name)
}
}
return { missing, sourceChanged, upToDate }
}
export type ReconcileOptions = {
/** Skip a declared marketplace. Used by zip-cache mode for unsupported source types. */
skip?: (name: string, source: MarketplaceSource) => boolean
onProgress?: (event: ReconcileProgressEvent) => void
}
export type ReconcileProgressEvent =
| {
type: 'installing'
name: string
action: 'install' | 'update'
index: number
total: number
}
| { type: 'installed'; name: string; alreadyMaterialized: boolean }
| { type: 'failed'; name: string; error: string }
export type ReconcileResult = {
installed: string[]
updated: string[]
failed: Array<{ name: string; error: string }>
upToDate: string[]
skipped: string[]
}
/**
* Make known_marketplaces.json consistent with declared intent.
* Idempotent. Additive only (never deletes). Does not touch AppState.
*/
export async function reconcileMarketplaces(
opts?: ReconcileOptions,
): Promise<ReconcileResult> {
const declared = getDeclaredMarketplaces()
if (Object.keys(declared).length === 0) {
return { installed: [], updated: [], failed: [], upToDate: [], skipped: [] }
}
let materialized: KnownMarketplacesFile
try {
materialized = await loadKnownMarketplacesConfig()
} catch (e) {
logError(e)
materialized = {}
}
const diff = diffMarketplaces(declared, materialized, {
projectRoot: getOriginalCwd(),
})
type WorkItem = {
name: string
source: MarketplaceSource
action: 'install' | 'update'
}
const work: WorkItem[] = [
...diff.missing.map(
(name): WorkItem => ({
name,
source: normalizeSource(declared[name]!.source),
action: 'install',
}),
),
...diff.sourceChanged.map(
({ name, declaredSource }): WorkItem => ({
name,
source: declaredSource,
action: 'update',
}),
),
]
const skipped: string[] = []
const toProcess: WorkItem[] = []
for (const item of work) {
if (opts?.skip?.(item.name, item.source)) {
skipped.push(item.name)
continue
}
// For sourceChanged local-path entries, skip if the declared path doesn't
// exist. Guards multi-checkout scenarios where normalizeSource can't
// canonicalize and produces a dead path — the materialized entry may still
// be valid; addMarketplaceSource would fail anyway, so skipping avoids a
// noisy "failed" event and preserves the working entry. Missing entries
// are NOT skipped (nothing to preserve; the user should see the error).
if (
item.action === 'update' &&
isLocalMarketplaceSource(item.source) &&
!(await pathExists(item.source.path))
) {
logForDebugging(
`[reconcile] '${item.name}' declared path does not exist; keeping materialized entry`,
)
skipped.push(item.name)
continue
}
toProcess.push(item)
}
if (toProcess.length === 0) {
return {
installed: [],
updated: [],
failed: [],
upToDate: diff.upToDate,
skipped,
}
}
logForDebugging(
`[reconcile] ${toProcess.length} marketplace(s): ${toProcess.map(w => `${w.name}(${w.action})`).join(', ')}`,
)
const installed: string[] = []
const updated: string[] = []
const failed: ReconcileResult['failed'] = []
for (let i = 0; i < toProcess.length; i++) {
const { name, source, action } = toProcess[i]!
opts?.onProgress?.({
type: 'installing',
name,
action,
index: i + 1,
total: toProcess.length,
})
try {
// addMarketplaceSource is source-idempotent — same source returns
// alreadyMaterialized:true without cloning. For 'update' (source
// changed), the new source won't match existing → proceeds with clone
// and overwrites the old JSON entry.
const result = await addMarketplaceSource(source)
if (action === 'install') installed.push(name)
else updated.push(name)
opts?.onProgress?.({
type: 'installed',
name,
alreadyMaterialized: result.alreadyMaterialized,
})
} catch (e) {
const error = errorMessage(e)
failed.push({ name, error })
opts?.onProgress?.({ type: 'failed', name, error })
logError(e)
}
}
return { installed, updated, failed, upToDate: diff.upToDate, skipped }
}
/**
* Resolve relative directory/file paths for stable comparison.
* Settings declared at project scope may use project-relative paths;
* JSON stores absolute paths.
*
* For git worktrees, resolve against the main checkout (canonical root)
* instead of the worktree cwd. Project settings are checked into git,
* so `./foo` means "relative to this repo" — but known_marketplaces.json is
* user-global with one entry per marketplace name. Resolving against the
* worktree cwd means each worktree session overwrites the shared entry with
* its own absolute path, and deleting the worktree leaves a dead
* installLocation. The canonical root is stable across all worktrees.
*/
function normalizeSource(
source: MarketplaceSource,
projectRoot?: string,
): MarketplaceSource {
if (
(source.source === 'directory' || source.source === 'file') &&
!isAbsolute(source.path)
) {
const base = projectRoot ?? getOriginalCwd()
const canonicalRoot = findCanonicalGitRoot(base)
return {
...source,
path: resolve(canonicalRoot ?? base, source.path),
}
}
return source
}

View File

@@ -0,0 +1,215 @@
/**
* Layer-3 refresh primitive: swap active plugin components in the running session.
*
* Three-layer model (see reconciler.ts for Layer-2):
* - Layer 1: intent (settings)
* - Layer 2: materialization (~/.claude/plugins/) — reconcileMarketplaces()
* - Layer 3: active components (AppState) — this file
*
* Called from:
* - /reload-plugins command (interactive, user-initiated)
* - print.ts refreshPluginState() (headless, auto before first query with SYNC_PLUGIN_INSTALL)
* - performBackgroundPluginInstallations() (background, auto after new marketplace install)
*
* NOT called from:
* - useManagePlugins needsRefresh effect — interactive mode shows a notification;
* user explicitly runs /reload-plugins (PR 5c)
* - /plugin menu — sets needsRefresh, user runs /reload-plugins (PR 5b)
*/
import { getOriginalCwd } from '../../bootstrap/state.js'
import type { Command } from '../../commands.js'
import { reinitializeLspServerManager } from '../../services/lsp/manager.js'
import type { AppState } from '../../state/AppState.js'
import type { AgentDefinitionsResult } from '../../tools/AgentTool/loadAgentsDir.js'
import { getAgentDefinitionsWithOverrides } from '../../tools/AgentTool/loadAgentsDir.js'
import type { PluginError } from '../../types/plugin.js'
import { logForDebugging } from '../debug.js'
import { errorMessage } from '../errors.js'
import { logError } from '../log.js'
import { clearAllCaches } from './cacheUtils.js'
import { getPluginCommands } from './loadPluginCommands.js'
import { loadPluginHooks } from './loadPluginHooks.js'
import { loadPluginLspServers } from './lspPluginIntegration.js'
import { loadPluginMcpServers } from './mcpPluginIntegration.js'
import { clearPluginCacheExclusions } from './orphanedPluginFilter.js'
import { loadAllPlugins } from './pluginLoader.js'
type SetAppState = (updater: (prev: AppState) => AppState) => void
export type RefreshActivePluginsResult = {
enabled_count: number
disabled_count: number
command_count: number
agent_count: number
hook_count: number
mcp_count: number
/** LSP servers provided by enabled plugins. reinitializeLspServerManager()
* is called unconditionally so the manager picks these up (no-op if
* manager was never initialized). */
lsp_count: number
error_count: number
/** The refreshed agent definitions, for callers (e.g. print.ts) that also
* maintain a local mutable reference outside AppState. */
agentDefinitions: AgentDefinitionsResult
/** The refreshed plugin commands, same rationale as agentDefinitions. */
pluginCommands: Command[]
}
/**
* Refresh all active plugin components: commands, agents, hooks, MCP-reconnect
* trigger, AppState plugin arrays. Clears ALL plugin caches (unlike the old
* needsRefresh path which only cleared loadAllPlugins and returned stale data
* from downstream memoized loaders).
*
* Consumes plugins.needsRefresh (sets to false).
* Increments mcp.pluginReconnectKey so useManageMCPConnections effects re-run
* and pick up new plugin MCP servers.
*
* LSP: if plugins now contribute LSP servers, reinitializeLspServerManager()
* re-reads config. Servers are lazy-started so this is just config parsing.
*/
export async function refreshActivePlugins(
setAppState: SetAppState,
): Promise<RefreshActivePluginsResult> {
logForDebugging('refreshActivePlugins: clearing all plugin caches')
clearAllCaches()
// Orphan exclusions are session-frozen by default, but /reload-plugins is
// an explicit "disk changed, re-read it" signal — recompute them too.
clearPluginCacheExclusions()
// Sequence the full load before cache-only consumers. Before #23693 all
// three shared loadAllPlugins()'s memoize promise so Promise.all was a
// no-op race. After #23693 getPluginCommands/getAgentDefinitions call
// loadAllPluginsCacheOnly (separate memoize) — racing them means they
// read installed_plugins.json before loadAllPlugins() has cloned+cached
// the plugin, returning plugin-cache-miss. loadAllPlugins warms the
// cache-only memoize on completion, so the awaits below are ~free.
const pluginResult = await loadAllPlugins()
const [pluginCommands, agentDefinitions] = await Promise.all([
getPluginCommands(),
getAgentDefinitionsWithOverrides(getOriginalCwd()),
])
const { enabled, disabled, errors } = pluginResult
// Populate mcpServers/lspServers on each enabled plugin. These are lazy
// cache slots NOT filled by loadAllPlugins() — they're written later by
// extractMcpServersFromPlugins/getPluginLspServers, which races with this.
// Loading here gives accurate metrics AND warms the cache slots so the MCP
// connection manager (triggered by pluginReconnectKey bump) sees the servers
// without re-parsing manifests. Errors are pushed to the shared errors array.
const [mcpCounts, lspCounts] = await Promise.all([
Promise.all(
enabled.map(async p => {
if (p.mcpServers) return Object.keys(p.mcpServers).length
const servers = await loadPluginMcpServers(p, errors)
if (servers) p.mcpServers = servers
return servers ? Object.keys(servers).length : 0
}),
),
Promise.all(
enabled.map(async p => {
if (p.lspServers) return Object.keys(p.lspServers).length
const servers = await loadPluginLspServers(p, errors)
if (servers) p.lspServers = servers
return servers ? Object.keys(servers).length : 0
}),
),
])
const mcp_count = mcpCounts.reduce((sum, n) => sum + n, 0)
const lsp_count = lspCounts.reduce((sum, n) => sum + n, 0)
setAppState(prev => ({
...prev,
plugins: {
...prev.plugins,
enabled,
disabled,
commands: pluginCommands,
errors: mergePluginErrors(prev.plugins.errors, errors),
needsRefresh: false,
},
agentDefinitions,
mcp: {
...prev.mcp,
pluginReconnectKey: prev.mcp.pluginReconnectKey + 1,
},
}))
// Re-initialize LSP manager so newly-loaded plugin LSP servers are picked
// up. No-op if LSP was never initialized (headless subcommand path).
// Unconditional so removing the last LSP plugin also clears stale config.
// Fixes issue #15521: LSP manager previously read a stale memoized
// loadAllPlugins() result from before marketplaces were reconciled.
reinitializeLspServerManager()
// clearAllCaches() prunes removed-plugin hooks; this does the FULL swap
// (adds hooks from newly-enabled plugins too). Catching here so
// hook_load_failed can feed error_count; a failure doesn't lose the
// plugin/command/agent data above (hooks go to STATE.registeredHooks, not
// AppState).
let hook_load_failed = false
try {
await loadPluginHooks()
} catch (e) {
hook_load_failed = true
logError(e)
logForDebugging(
`refreshActivePlugins: loadPluginHooks failed: ${errorMessage(e)}`,
)
}
const hook_count = enabled.reduce((sum, p) => {
if (!p.hooksConfig) return sum
return (
sum +
Object.values(p.hooksConfig).reduce(
(s, matchers) =>
s + (matchers?.reduce((h, m) => h + m.hooks.length, 0) ?? 0),
0,
)
)
}, 0)
logForDebugging(
`refreshActivePlugins: ${enabled.length} enabled, ${pluginCommands.length} commands, ${agentDefinitions.allAgents.length} agents, ${hook_count} hooks, ${mcp_count} MCP, ${lsp_count} LSP`,
)
return {
enabled_count: enabled.length,
disabled_count: disabled.length,
command_count: pluginCommands.length,
agent_count: agentDefinitions.allAgents.length,
hook_count,
mcp_count,
lsp_count,
error_count: errors.length + (hook_load_failed ? 1 : 0),
agentDefinitions,
pluginCommands,
}
}
/**
* Merge fresh plugin-load errors with existing errors, preserving LSP and
* plugin-component errors that were recorded by other systems and
* deduplicating. Same logic as refreshPlugins()/updatePluginState(), extracted
* so refresh.ts doesn't leave those errors stranded.
*/
function mergePluginErrors(
existing: PluginError[],
fresh: PluginError[],
): PluginError[] {
const preserved = existing.filter(
e => e.source === 'lsp-manager' || e.source.startsWith('plugin:'),
)
const freshKeys = new Set(fresh.map(errorKey))
const deduped = preserved.filter(e => !freshKeys.has(errorKey(e)))
return [...deduped, ...fresh]
}
function errorKey(e: PluginError): string {
return e.type === 'generic-error'
? `generic-error:${e.source}:${e.error}`
: `${e.type}:${e.source}`
}

1681
src/utils/plugins/schemas.ts Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,903 @@
import type { Dirent, Stats } from 'fs'
import { readdir, readFile, stat } from 'fs/promises'
import * as path from 'path'
import { z } from 'zod/v4'
import { errorMessage, getErrnoCode, isENOENT } from '../errors.js'
import { FRONTMATTER_REGEX } from '../frontmatterParser.js'
import { jsonParse } from '../slowOperations.js'
import { parseYaml } from '../yaml.js'
import {
PluginHooksSchema,
PluginManifestSchema,
PluginMarketplaceEntrySchema,
PluginMarketplaceSchema,
} from './schemas.js'
/**
* Fields that belong in marketplace.json entries (PluginMarketplaceEntrySchema)
* but not plugin.json (PluginManifestSchema). Plugin authors reasonably copy
* one into the other. Surfaced as warnings by `claude plugin validate` since
* they're a known confusion point — the load path silently strips all unknown
* keys via zod's default behavior, so they're harmless at runtime but worth
* flagging to authors.
*/
const MARKETPLACE_ONLY_MANIFEST_FIELDS = new Set([
'category',
'source',
'tags',
'strict',
'id',
])
export type ValidationResult = {
success: boolean
errors: ValidationError[]
warnings: ValidationWarning[]
filePath: string
fileType: 'plugin' | 'marketplace' | 'skill' | 'agent' | 'command' | 'hooks'
}
export type ValidationError = {
path: string
message: string
code?: string
}
export type ValidationWarning = {
path: string
message: string
}
/**
* Detect whether a file is a plugin manifest or marketplace manifest
*/
function detectManifestType(
filePath: string,
): 'plugin' | 'marketplace' | 'unknown' {
const fileName = path.basename(filePath)
const dirName = path.basename(path.dirname(filePath))
// Check filename patterns
if (fileName === 'plugin.json') return 'plugin'
if (fileName === 'marketplace.json') return 'marketplace'
// Check if it's in .claude-plugin directory
if (dirName === '.claude-plugin') {
return 'plugin' // Most likely plugin.json
}
return 'unknown'
}
/**
* Format Zod validation errors into a readable format
*/
function formatZodErrors(zodError: z.ZodError): ValidationError[] {
return zodError.issues.map(error => ({
path: error.path.join('.') || 'root',
message: error.message,
code: error.code,
}))
}
/**
* Check for parent-directory segments ('..') in a path string.
*
* For plugin.json component paths this is a security concern (escaping the plugin dir).
* For marketplace.json source paths it's almost always a resolution-base misunderstanding:
* paths resolve from the marketplace repo root, not from marketplace.json itself, so the
* '..' a user added to "climb out of .claude-plugin/" is unnecessary. Callers pass `hint`
* to attach the right explanation.
*/
function checkPathTraversal(
p: string,
field: string,
errors: ValidationError[],
hint?: string,
): void {
if (p.includes('..')) {
errors.push({
path: field,
message: hint
? `Path contains "..": ${p}. ${hint}`
: `Path contains ".." which could be a path traversal attempt: ${p}`,
})
}
}
// Shown when a marketplace plugin source contains '..'. Most users hit this because
// they expect paths to resolve relative to marketplace.json (inside .claude-plugin/),
// but resolution actually starts at the marketplace repo root — see gh-29485.
// Computes a tailored "use X instead of Y" suggestion from the user's actual path
// rather than a hardcoded example (review feedback on #20895).
function marketplaceSourceHint(p: string): string {
// Strip leading ../ segments: the '..' a user added to "climb out of
// .claude-plugin/" is unnecessary since paths already start at the repo root.
// If '..' appears mid-path (rare), fall back to a generic example.
const stripped = p.replace(/^(\.\.\/)+/, '')
const corrected = stripped !== p ? `./${stripped}` : './plugins/my-plugin'
return (
'Plugin source paths are resolved relative to the marketplace root (the directory ' +
'containing .claude-plugin/), not relative to marketplace.json. ' +
`Use "${corrected}" instead of "${p}".`
)
}
/**
* Validate a plugin manifest file (plugin.json)
*/
export async function validatePluginManifest(
filePath: string,
): Promise<ValidationResult> {
const errors: ValidationError[] = []
const warnings: ValidationWarning[] = []
const absolutePath = path.resolve(filePath)
// Read file content — handle ENOENT / EISDIR / permission errors directly
let content: string
try {
content = await readFile(absolutePath, { encoding: 'utf-8' })
} catch (error: unknown) {
const code = getErrnoCode(error)
let message: string
if (code === 'ENOENT') {
message = `File not found: ${absolutePath}`
} else if (code === 'EISDIR') {
message = `Path is not a file: ${absolutePath}`
} else {
message = `Failed to read file: ${errorMessage(error)}`
}
return {
success: false,
errors: [{ path: 'file', message, code }],
warnings: [],
filePath: absolutePath,
fileType: 'plugin',
}
}
let parsed: unknown
try {
parsed = jsonParse(content)
} catch (error) {
return {
success: false,
errors: [
{
path: 'json',
message: `Invalid JSON syntax: ${errorMessage(error)}`,
},
],
warnings: [],
filePath: absolutePath,
fileType: 'plugin',
}
}
// Check for path traversal in the parsed JSON before schema validation
// This ensures we catch security issues even if schema validation fails
if (parsed && typeof parsed === 'object') {
const obj = parsed as Record<string, unknown>
// Check commands
if (obj.commands) {
const commands = Array.isArray(obj.commands)
? obj.commands
: [obj.commands]
commands.forEach((cmd, i) => {
if (typeof cmd === 'string') {
checkPathTraversal(cmd, `commands[${i}]`, errors)
}
})
}
// Check agents
if (obj.agents) {
const agents = Array.isArray(obj.agents) ? obj.agents : [obj.agents]
agents.forEach((agent, i) => {
if (typeof agent === 'string') {
checkPathTraversal(agent, `agents[${i}]`, errors)
}
})
}
// Check skills
if (obj.skills) {
const skills = Array.isArray(obj.skills) ? obj.skills : [obj.skills]
skills.forEach((skill, i) => {
if (typeof skill === 'string') {
checkPathTraversal(skill, `skills[${i}]`, errors)
}
})
}
}
// Surface marketplace-only fields as a warning BEFORE validation flags
// them. `claude plugin validate` is a developer tool — authors running it
// want to know these fields don't belong here. But it's a warning, not an
// error: the plugin loads fine at runtime (the base schema strips unknown
// keys). We strip them here so the .strict() call below doesn't double-
// report them as unrecognized-key errors on top of the targeted warnings.
let toValidate = parsed
if (typeof parsed === 'object' && parsed !== null) {
const obj = parsed as Record<string, unknown>
const strayKeys = Object.keys(obj).filter(k =>
MARKETPLACE_ONLY_MANIFEST_FIELDS.has(k),
)
if (strayKeys.length > 0) {
const stripped = { ...obj }
for (const key of strayKeys) {
delete stripped[key]
warnings.push({
path: key,
message:
`Field '${key}' belongs in the marketplace entry (marketplace.json), ` +
`not plugin.json. It's harmless here but unused — Claude Code ` +
`ignores it at load time.`,
})
}
toValidate = stripped
}
}
// Validate against schema (post-strip, so marketplace fields don't fail it).
// We call .strict() locally here even though the base schema is lenient —
// the runtime load path silently strips unknown keys for resilience, but
// this is a developer tool and authors running it want typo feedback.
const result = PluginManifestSchema().strict().safeParse(toValidate)
if (!result.success) {
errors.push(...formatZodErrors(result.error))
}
// Check for common issues and add warnings
if (result.success) {
const manifest = result.data
// Warn if name isn't strict kebab-case. CC's schema only rejects spaces,
// but the Claude.ai marketplace sync rejects non-kebab names. Surfacing
// this here lets authors catch it in CI before the sync fails on them.
if (!/^[a-z0-9]+(-[a-z0-9]+)*$/.test(manifest.name)) {
warnings.push({
path: 'name',
message:
`Plugin name "${manifest.name}" is not kebab-case. Claude Code accepts ` +
`it, but the Claude.ai marketplace sync requires kebab-case ` +
`(lowercase letters, digits, and hyphens only, e.g., "my-plugin").`,
})
}
// Warn if no version specified
if (!manifest.version) {
warnings.push({
path: 'version',
message:
'No version specified. Consider adding a version following semver (e.g., "1.0.0")',
})
}
// Warn if no description
if (!manifest.description) {
warnings.push({
path: 'description',
message:
'No description provided. Adding a description helps users understand what your plugin does',
})
}
// Warn if no author
if (!manifest.author) {
warnings.push({
path: 'author',
message:
'No author information provided. Consider adding author details for plugin attribution',
})
}
}
return {
success: errors.length === 0,
errors,
warnings,
filePath: absolutePath,
fileType: 'plugin',
}
}
/**
* Validate a marketplace manifest file (marketplace.json)
*/
export async function validateMarketplaceManifest(
filePath: string,
): Promise<ValidationResult> {
const errors: ValidationError[] = []
const warnings: ValidationWarning[] = []
const absolutePath = path.resolve(filePath)
// Read file content — handle ENOENT / EISDIR / permission errors directly
let content: string
try {
content = await readFile(absolutePath, { encoding: 'utf-8' })
} catch (error: unknown) {
const code = getErrnoCode(error)
let message: string
if (code === 'ENOENT') {
message = `File not found: ${absolutePath}`
} else if (code === 'EISDIR') {
message = `Path is not a file: ${absolutePath}`
} else {
message = `Failed to read file: ${errorMessage(error)}`
}
return {
success: false,
errors: [{ path: 'file', message, code }],
warnings: [],
filePath: absolutePath,
fileType: 'marketplace',
}
}
let parsed: unknown
try {
parsed = jsonParse(content)
} catch (error) {
return {
success: false,
errors: [
{
path: 'json',
message: `Invalid JSON syntax: ${errorMessage(error)}`,
},
],
warnings: [],
filePath: absolutePath,
fileType: 'marketplace',
}
}
// Check for path traversal in plugin sources before schema validation
// This ensures we catch security issues even if schema validation fails
if (parsed && typeof parsed === 'object') {
const obj = parsed as Record<string, unknown>
if (Array.isArray(obj.plugins)) {
obj.plugins.forEach((plugin: unknown, i: number) => {
if (plugin && typeof plugin === 'object' && 'source' in plugin) {
const source = (plugin as { source: unknown }).source
// Check string sources (relative paths)
if (typeof source === 'string') {
checkPathTraversal(
source,
`plugins[${i}].source`,
errors,
marketplaceSourceHint(source),
)
}
// Check object-source .path (git-subdir: subdirectory within the
// remote repo, sparse-cloned). '..' here is a genuine traversal attempt
// within the remote repo tree, not a marketplace-root misunderstanding —
// keep the security framing (no marketplaceSourceHint). See #20895 review.
if (
source &&
typeof source === 'object' &&
'path' in source &&
typeof (source as { path: unknown }).path === 'string'
) {
checkPathTraversal(
(source as { path: string }).path,
`plugins[${i}].source.path`,
errors,
)
}
}
})
}
}
// Validate against schema.
// The base schemas are lenient (strip unknown keys) for runtime resilience,
// but this is a developer tool — authors want typo feedback. We rebuild the
// schema with .strict() here. Note .strict() on the outer object does NOT
// propagate into z.array() elements, so we also override the plugins array
// with strict entries to catch typos inside individual plugin entries too.
const strictMarketplaceSchema = PluginMarketplaceSchema()
.extend({
plugins: z.array(PluginMarketplaceEntrySchema().strict()),
})
.strict()
const result = strictMarketplaceSchema.safeParse(parsed)
if (!result.success) {
errors.push(...formatZodErrors(result.error))
}
// Check for common issues and add warnings
if (result.success) {
const marketplace = result.data
// Warn if no plugins
if (!marketplace.plugins || marketplace.plugins.length === 0) {
warnings.push({
path: 'plugins',
message: 'Marketplace has no plugins defined',
})
}
// Check each plugin entry
if (marketplace.plugins) {
marketplace.plugins.forEach((plugin, i) => {
// Check for duplicate plugin names
const duplicates = marketplace.plugins.filter(
p => p.name === plugin.name,
)
if (duplicates.length > 1) {
errors.push({
path: `plugins[${i}].name`,
message: `Duplicate plugin name "${plugin.name}" found in marketplace`,
})
}
})
// Version-mismatch check: for local-source entries that declare a
// version, compare against the plugin's own plugin.json. At install
// time, calculatePluginVersion (pluginVersioning.ts) prefers the
// manifest version and silently ignores the entry version — so a
// stale entry.version is invisible user confusion (marketplace UI
// shows one version, /status shows another after install).
// Only local sources: remote sources would need cloning to check.
const manifestDir = path.dirname(absolutePath)
const marketplaceRoot =
path.basename(manifestDir) === '.claude-plugin'
? path.dirname(manifestDir)
: manifestDir
for (const [i, entry] of marketplace.plugins.entries()) {
if (
!entry.version ||
typeof entry.source !== 'string' ||
!entry.source.startsWith('./')
) {
continue
}
const pluginJsonPath = path.join(
marketplaceRoot,
entry.source,
'.claude-plugin',
'plugin.json',
)
let manifestVersion: string | undefined
try {
const raw = await readFile(pluginJsonPath, { encoding: 'utf-8' })
const parsed = jsonParse(raw) as { version?: unknown }
if (typeof parsed.version === 'string') {
manifestVersion = parsed.version
}
} catch {
// Missing/unreadable plugin.json is someone else's error to report
continue
}
if (manifestVersion && manifestVersion !== entry.version) {
warnings.push({
path: `plugins[${i}].version`,
message:
`Entry declares version "${entry.version}" but ${entry.source}/.claude-plugin/plugin.json says "${manifestVersion}". ` +
`At install time, plugin.json wins (calculatePluginVersion precedence) — the entry version is silently ignored. ` +
`Update this entry to "${manifestVersion}" to match.`,
})
}
}
}
// Warn if no description in metadata
if (!marketplace.metadata?.description) {
warnings.push({
path: 'metadata.description',
message:
'No marketplace description provided. Adding a description helps users understand what this marketplace offers',
})
}
}
return {
success: errors.length === 0,
errors,
warnings,
filePath: absolutePath,
fileType: 'marketplace',
}
}
/**
* Validate the YAML frontmatter in a plugin component markdown file.
*
* The runtime loader (parseFrontmatter) silently drops unparseable YAML to a
* debug log and returns an empty object. That's the right resilience choice
* for the load path, but authors running `claude plugin validate` want a hard
* signal. This re-parses the frontmatter block and surfaces what the loader
* would silently swallow.
*/
function validateComponentFile(
filePath: string,
content: string,
fileType: 'skill' | 'agent' | 'command',
): ValidationResult {
const errors: ValidationError[] = []
const warnings: ValidationWarning[] = []
const match = content.match(FRONTMATTER_REGEX)
if (!match) {
warnings.push({
path: 'frontmatter',
message:
'No frontmatter block found. Add YAML frontmatter between --- delimiters ' +
'at the top of the file to set description and other metadata.',
})
return { success: true, errors, warnings, filePath, fileType }
}
const frontmatterText = match[1] || ''
let parsed: unknown
try {
parsed = parseYaml(frontmatterText)
} catch (e) {
errors.push({
path: 'frontmatter',
message:
`YAML frontmatter failed to parse: ${errorMessage(e)}. ` +
`At runtime this ${fileType} loads with empty metadata (all frontmatter ` +
`fields silently dropped).`,
})
return { success: false, errors, warnings, filePath, fileType }
}
if (parsed === null || typeof parsed !== 'object' || Array.isArray(parsed)) {
errors.push({
path: 'frontmatter',
message:
'Frontmatter must be a YAML mapping (key: value pairs), got ' +
`${Array.isArray(parsed) ? 'an array' : parsed === null ? 'null' : typeof parsed}.`,
})
return { success: false, errors, warnings, filePath, fileType }
}
const fm = parsed as Record<string, unknown>
// description: must be scalar. coerceDescriptionToString logs+drops arrays/objects at runtime.
if (fm.description !== undefined) {
const d = fm.description
if (
typeof d !== 'string' &&
typeof d !== 'number' &&
typeof d !== 'boolean' &&
d !== null
) {
errors.push({
path: 'description',
message:
`description must be a string, got ${Array.isArray(d) ? 'array' : typeof d}. ` +
`At runtime this value is dropped.`,
})
}
} else {
warnings.push({
path: 'description',
message:
`No description in frontmatter. A description helps users and Claude ` +
`understand when to use this ${fileType}.`,
})
}
// name: if present, must be a string (skills/commands use it as displayName;
// plugin agents use it as the agentType stem — non-strings would stringify to garbage)
if (
fm.name !== undefined &&
fm.name !== null &&
typeof fm.name !== 'string'
) {
errors.push({
path: 'name',
message: `name must be a string, got ${typeof fm.name}.`,
})
}
// allowed-tools: string or array of strings
const at = fm['allowed-tools']
if (at !== undefined && at !== null) {
if (typeof at !== 'string' && !Array.isArray(at)) {
errors.push({
path: 'allowed-tools',
message: `allowed-tools must be a string or array of strings, got ${typeof at}.`,
})
} else if (Array.isArray(at) && at.some(t => typeof t !== 'string')) {
errors.push({
path: 'allowed-tools',
message: 'allowed-tools array must contain only strings.',
})
}
}
// shell: 'bash' | 'powershell' (controls !`cmd` block routing)
const sh = fm.shell
if (sh !== undefined && sh !== null) {
if (typeof sh !== 'string') {
errors.push({
path: 'shell',
message: `shell must be a string, got ${typeof sh}.`,
})
} else {
// Normalize to match parseShellFrontmatter() runtime behavior —
// `shell: PowerShell` should not fail validation but work at runtime.
const normalized = sh.trim().toLowerCase()
if (normalized !== 'bash' && normalized !== 'powershell') {
errors.push({
path: 'shell',
message: `shell must be 'bash' or 'powershell', got '${sh}'.`,
})
}
}
}
return { success: errors.length === 0, errors, warnings, filePath, fileType }
}
/**
* Validate a plugin's hooks.json file. Unlike frontmatter, this one HARD-ERRORS
* at runtime (pluginLoader uses .parse() not .safeParse()) — a bad hooks.json
* breaks the whole plugin. Surfacing it here is essential.
*/
async function validateHooksJson(filePath: string): Promise<ValidationResult> {
let content: string
try {
content = await readFile(filePath, { encoding: 'utf-8' })
} catch (e: unknown) {
const code = getErrnoCode(e)
// ENOENT is fine — hooks are optional
if (code === 'ENOENT') {
return {
success: true,
errors: [],
warnings: [],
filePath,
fileType: 'hooks',
}
}
return {
success: false,
errors: [
{ path: 'file', message: `Failed to read file: ${errorMessage(e)}` },
],
warnings: [],
filePath,
fileType: 'hooks',
}
}
let parsed: unknown
try {
parsed = jsonParse(content)
} catch (e) {
return {
success: false,
errors: [
{
path: 'json',
message:
`Invalid JSON syntax: ${errorMessage(e)}. ` +
`At runtime this breaks the entire plugin load.`,
},
],
warnings: [],
filePath,
fileType: 'hooks',
}
}
const result = PluginHooksSchema().safeParse(parsed)
if (!result.success) {
return {
success: false,
errors: formatZodErrors(result.error),
warnings: [],
filePath,
fileType: 'hooks',
}
}
return {
success: true,
errors: [],
warnings: [],
filePath,
fileType: 'hooks',
}
}
/**
* Recursively collect .md files under a directory. Uses withFileTypes to
* avoid a stat per entry. Returns absolute paths so error messages stay
* readable.
*/
async function collectMarkdown(
dir: string,
isSkillsDir: boolean,
): Promise<string[]> {
let entries: Dirent[]
try {
entries = await readdir(dir, { withFileTypes: true })
} catch (e: unknown) {
const code = getErrnoCode(e)
if (code === 'ENOENT' || code === 'ENOTDIR') return []
throw e
}
// Skills use <name>/SKILL.md — only descend one level, only collect SKILL.md.
// Matches the runtime loader: single .md files in skills/ are NOT loaded,
// and subdirectories of a skill dir aren't scanned. Paths are speculative
// (the subdir may lack SKILL.md); the caller handles ENOENT.
if (isSkillsDir) {
return entries
.filter(e => e.isDirectory())
.map(e => path.join(dir, e.name, 'SKILL.md'))
}
// Commands/agents: recurse and collect all .md files.
const out: string[] = []
for (const entry of entries) {
const full = path.join(dir, entry.name)
if (entry.isDirectory()) {
out.push(...(await collectMarkdown(full, false)))
} else if (entry.isFile() && entry.name.toLowerCase().endsWith('.md')) {
out.push(full)
}
}
return out
}
/**
* Validate the content files inside a plugin directory — skills, agents,
* commands, and hooks.json. Scans the default component directories (the
* manifest can declare custom paths but the default layout covers the vast
* majority of plugins; this is a linter, not a loader).
*
* Returns one ValidationResult per file that has errors or warnings. A clean
* plugin returns an empty array.
*/
export async function validatePluginContents(
pluginDir: string,
): Promise<ValidationResult[]> {
const results: ValidationResult[] = []
const dirs: Array<['skill' | 'agent' | 'command', string]> = [
['skill', path.join(pluginDir, 'skills')],
['agent', path.join(pluginDir, 'agents')],
['command', path.join(pluginDir, 'commands')],
]
for (const [fileType, dir] of dirs) {
const files = await collectMarkdown(dir, fileType === 'skill')
for (const filePath of files) {
let content: string
try {
content = await readFile(filePath, { encoding: 'utf-8' })
} catch (e: unknown) {
// ENOENT is expected for speculative skill paths (subdirs without SKILL.md)
if (isENOENT(e)) continue
results.push({
success: false,
errors: [
{ path: 'file', message: `Failed to read: ${errorMessage(e)}` },
],
warnings: [],
filePath,
fileType,
})
continue
}
const r = validateComponentFile(filePath, content, fileType)
if (r.errors.length > 0 || r.warnings.length > 0) {
results.push(r)
}
}
}
const hooksResult = await validateHooksJson(
path.join(pluginDir, 'hooks', 'hooks.json'),
)
if (hooksResult.errors.length > 0 || hooksResult.warnings.length > 0) {
results.push(hooksResult)
}
return results
}
/**
* Validate a manifest file or directory (auto-detects type)
*/
export async function validateManifest(
filePath: string,
): Promise<ValidationResult> {
const absolutePath = path.resolve(filePath)
// Stat path to check if it's a directory — handle ENOENT inline
let stats: Stats | null = null
try {
stats = await stat(absolutePath)
} catch (e: unknown) {
if (!isENOENT(e)) {
throw e
}
}
if (stats?.isDirectory()) {
// Look for manifest files in .claude-plugin directory
// Prefer marketplace.json over plugin.json
const marketplacePath = path.join(
absolutePath,
'.claude-plugin',
'marketplace.json',
)
const marketplaceResult = await validateMarketplaceManifest(marketplacePath)
// Only fall through if the marketplace file was not found (ENOENT)
if (marketplaceResult.errors[0]?.code !== 'ENOENT') {
return marketplaceResult
}
const pluginPath = path.join(absolutePath, '.claude-plugin', 'plugin.json')
const pluginResult = await validatePluginManifest(pluginPath)
if (pluginResult.errors[0]?.code !== 'ENOENT') {
return pluginResult
}
return {
success: false,
errors: [
{
path: 'directory',
message: `No manifest found in directory. Expected .claude-plugin/marketplace.json or .claude-plugin/plugin.json`,
},
],
warnings: [],
filePath: absolutePath,
fileType: 'plugin',
}
}
const manifestType = detectManifestType(filePath)
switch (manifestType) {
case 'plugin':
return validatePluginManifest(filePath)
case 'marketplace':
return validateMarketplaceManifest(filePath)
case 'unknown': {
// Try to parse and guess based on content
try {
const content = await readFile(absolutePath, { encoding: 'utf-8' })
const parsed = jsonParse(content) as Record<string, unknown>
// Heuristic: if it has a "plugins" array, it's probably a marketplace
if (Array.isArray(parsed.plugins)) {
return validateMarketplaceManifest(filePath)
}
} catch (e: unknown) {
const code = getErrnoCode(e)
if (code === 'ENOENT') {
return {
success: false,
errors: [
{
path: 'file',
message: `File not found: ${absolutePath}`,
},
],
warnings: [],
filePath: absolutePath,
fileType: 'plugin', // Default to plugin for error reporting
}
}
// Fall through to default validation for other errors (e.g., JSON parse)
}
// Default: validate as plugin manifest
return validatePluginManifest(filePath)
}
}
}

View File

@@ -0,0 +1,69 @@
import { join } from 'path'
import { logForDebugging } from '../debug.js'
import { getFsImplementation } from '../fsOperations.js'
const SKILL_MD_RE = /^skill\.md$/i
/**
* Recursively walk a plugin directory, invoking onFile for each .md file.
*
* The namespace array tracks the subdirectory path relative to the root
* (e.g., ['foo', 'bar'] for root/foo/bar/file.md). Callers that don't need
* namespacing can ignore the second argument.
*
* When stopAtSkillDir is true and a directory contains SKILL.md, onFile is
* called for all .md files in that directory but subdirectories are not
* scanned — skill directories are leaf containers.
*
* Readdir errors are swallowed with a debug log so one bad directory doesn't
* abort a plugin load.
*/
export async function walkPluginMarkdown(
rootDir: string,
onFile: (fullPath: string, namespace: string[]) => Promise<void>,
opts: { stopAtSkillDir?: boolean; logLabel?: string } = {},
): Promise<void> {
const fs = getFsImplementation()
const label = opts.logLabel ?? 'plugin'
async function scan(dirPath: string, namespace: string[]): Promise<void> {
try {
const entries = await fs.readdir(dirPath)
if (
opts.stopAtSkillDir &&
entries.some(e => e.isFile() && SKILL_MD_RE.test(e.name))
) {
// Skill directory: collect .md files here, don't recurse.
await Promise.all(
entries.map(entry =>
entry.isFile() && entry.name.toLowerCase().endsWith('.md')
? onFile(join(dirPath, entry.name), namespace)
: undefined,
),
)
return
}
await Promise.all(
entries.map(entry => {
const fullPath = join(dirPath, entry.name)
if (entry.isDirectory()) {
return scan(fullPath, [...namespace, entry.name])
}
if (entry.isFile() && entry.name.toLowerCase().endsWith('.md')) {
return onFile(fullPath, namespace)
}
return undefined
}),
)
} catch (error) {
logForDebugging(
`Failed to scan ${label} directory ${dirPath}: ${error}`,
{ level: 'error' },
)
}
}
await scan(rootDir, [])
}

View File

@@ -0,0 +1,406 @@
/**
* Plugin Zip Cache Module
*
* Manages plugins as ZIP archives in a mounted directory (e.g., Filestore).
* When CLAUDE_CODE_PLUGIN_USE_ZIP_CACHE is enabled and CLAUDE_CODE_PLUGIN_CACHE_DIR
* is set, plugins are stored as ZIPs in that directory and extracted to a
* session-local temp directory at startup.
*
* Limitations:
* - Only headless mode is supported
* - All settings sources are used (same as normal plugin flow)
* - Only github, git, and url marketplace sources are supported
* - Only strict:true marketplace entries are supported
* - Auto-update is non-blocking (background, does not affect current session)
*
* Directory structure of the zip cache:
* /mnt/plugins-cache/
* ├── known_marketplaces.json
* ├── installed_plugins.json
* ├── marketplaces/
* │ ├── official-marketplace.json
* │ └── company-marketplace.json
* └── plugins/
* ├── official-marketplace/
* │ └── plugin-a/
* │ └── 1.0.0.zip
* └── company-marketplace/
* └── plugin-b/
* └── 2.1.3.zip
*/
import { randomBytes } from 'crypto'
import {
chmod,
lstat,
readdir,
readFile,
rename,
rm,
stat,
writeFile,
} from 'fs/promises'
import { tmpdir } from 'os'
import { basename, dirname, join } from 'path'
import { logForDebugging } from '../debug.js'
import { parseZipModes, unzipFile } from '../dxt/zip.js'
import { isEnvTruthy } from '../envUtils.js'
import { getFsImplementation } from '../fsOperations.js'
import { expandTilde } from '../permissions/pathValidation.js'
import type { MarketplaceSource } from './schemas.js'
/**
* Check if the plugin zip cache mode is enabled.
*/
export function isPluginZipCacheEnabled(): boolean {
return isEnvTruthy(process.env.CLAUDE_CODE_PLUGIN_USE_ZIP_CACHE)
}
/**
* Get the path to the zip cache directory.
* Requires CLAUDE_CODE_PLUGIN_CACHE_DIR to be set.
* Returns undefined if zip cache is not enabled.
*/
export function getPluginZipCachePath(): string | undefined {
if (!isPluginZipCacheEnabled()) {
return undefined
}
const dir = process.env.CLAUDE_CODE_PLUGIN_CACHE_DIR
return dir ? expandTilde(dir) : undefined
}
/**
* Get the path to known_marketplaces.json in the zip cache.
*/
export function getZipCacheKnownMarketplacesPath(): string {
const cachePath = getPluginZipCachePath()
if (!cachePath) {
throw new Error('Plugin zip cache is not enabled')
}
return join(cachePath, 'known_marketplaces.json')
}
/**
* Get the path to installed_plugins.json in the zip cache.
*/
export function getZipCacheInstalledPluginsPath(): string {
const cachePath = getPluginZipCachePath()
if (!cachePath) {
throw new Error('Plugin zip cache is not enabled')
}
return join(cachePath, 'installed_plugins.json')
}
/**
* Get the marketplaces directory within the zip cache.
*/
export function getZipCacheMarketplacesDir(): string {
const cachePath = getPluginZipCachePath()
if (!cachePath) {
throw new Error('Plugin zip cache is not enabled')
}
return join(cachePath, 'marketplaces')
}
/**
* Get the plugins directory within the zip cache.
*/
export function getZipCachePluginsDir(): string {
const cachePath = getPluginZipCachePath()
if (!cachePath) {
throw new Error('Plugin zip cache is not enabled')
}
return join(cachePath, 'plugins')
}
// Session plugin cache: a temp directory on local disk (NOT in the mounted zip cache)
// that holds extracted plugins for the duration of the session.
let sessionPluginCachePath: string | null = null
let sessionPluginCachePromise: Promise<string> | null = null
/**
* Get or create the session plugin cache directory.
* This is a temp directory on local disk where plugins are extracted for the session.
*/
export async function getSessionPluginCachePath(): Promise<string> {
if (sessionPluginCachePath) {
return sessionPluginCachePath
}
if (!sessionPluginCachePromise) {
sessionPluginCachePromise = (async () => {
const suffix = randomBytes(8).toString('hex')
const dir = join(tmpdir(), `claude-plugin-session-${suffix}`)
await getFsImplementation().mkdir(dir)
sessionPluginCachePath = dir
logForDebugging(`Created session plugin cache at ${dir}`)
return dir
})()
}
return sessionPluginCachePromise
}
/**
* Clean up the session plugin cache directory.
* Should be called when the session ends.
*/
export async function cleanupSessionPluginCache(): Promise<void> {
if (!sessionPluginCachePath) {
return
}
try {
await rm(sessionPluginCachePath, { recursive: true, force: true })
logForDebugging(
`Cleaned up session plugin cache at ${sessionPluginCachePath}`,
)
} catch (error) {
logForDebugging(`Failed to clean up session plugin cache: ${error}`)
} finally {
sessionPluginCachePath = null
sessionPluginCachePromise = null
}
}
/**
* Reset the session plugin cache path (for testing).
*/
export function resetSessionPluginCache(): void {
sessionPluginCachePath = null
sessionPluginCachePromise = null
}
/**
* Write data to a file in the zip cache atomically.
* Writes to a temp file in the same directory, then renames.
*/
export async function atomicWriteToZipCache(
targetPath: string,
data: string | Uint8Array,
): Promise<void> {
const dir = dirname(targetPath)
await getFsImplementation().mkdir(dir)
const tmpName = `.${basename(targetPath)}.tmp.${randomBytes(4).toString('hex')}`
const tmpPath = join(dir, tmpName)
try {
if (typeof data === 'string') {
await writeFile(tmpPath, data, { encoding: 'utf-8' })
} else {
await writeFile(tmpPath, data)
}
await rename(tmpPath, targetPath)
} catch (error) {
// Clean up tmp file on failure
try {
await rm(tmpPath, { force: true })
} catch {
// ignore cleanup errors
}
throw error
}
}
// fflate's ZippableFile tuple form: [data, opts]. Using the tuple lets us
// store {os, attrs} so parseZipModes can recover exec bits on extraction.
type ZipEntry = [Uint8Array, { os: number; attrs: number }]
/**
* Create a ZIP archive from a directory.
* Resolves symlinks to actual file contents (replaces symlinks with real data).
* Stores Unix mode bits in external_attr so extractZipToDirectory can restore
* +x — otherwise the round-trip (git clone → zip → extract) loses exec bits.
*
* @param sourceDir - Directory to zip
* @returns ZIP file as Uint8Array
*/
export async function createZipFromDirectory(
sourceDir: string,
): Promise<Uint8Array> {
const files: Record<string, ZipEntry> = {}
const visited = new Set<string>()
await collectFilesForZip(sourceDir, '', files, visited)
const { zipSync } = await import('fflate')
const zipData = zipSync(files, { level: 6 })
logForDebugging(
`Created ZIP from ${sourceDir}: ${Object.keys(files).length} files, ${zipData.length} bytes`,
)
return zipData
}
/**
* Recursively collect files from a directory for zipping.
* Uses lstat to detect symlinks and tracks visited inodes for cycle detection.
*/
async function collectFilesForZip(
baseDir: string,
relativePath: string,
files: Record<string, ZipEntry>,
visited: Set<string>,
): Promise<void> {
const currentDir = relativePath ? join(baseDir, relativePath) : baseDir
let entries: string[]
try {
entries = await readdir(currentDir)
} catch {
return
}
// Track visited directories by dev+ino to detect symlink cycles.
// bigint: true is required — on Windows NTFS, the file index packs a 16-bit
// sequence number into the high bits. Once that sequence exceeds ~32 (very
// common on a busy CI runner that churns through temp files), the value
// exceeds Number.MAX_SAFE_INTEGER and two adjacent directories round to the
// same JS number, causing subdirs to be silently skipped as "cycles". This
// broke the round-trip test on Windows CI when sharding shuffled which tests
// ran first and pushed MFT sequence numbers over the precision cliff.
// See also: markdownConfigLoader.ts getFileIdentity, anthropics/claude-code#13893
try {
const dirStat = await stat(currentDir, { bigint: true })
// ReFS (Dev Drive), NFS, some FUSE mounts report dev=0 and ino=0 for
// everything. Fail open: skip cycle detection rather than skip the
// directory. We already skip symlinked directories unconditionally below,
// so the only cycle left here is a bind mount, which we accept.
if (dirStat.dev !== 0n || dirStat.ino !== 0n) {
const key = `${dirStat.dev}:${dirStat.ino}`
if (visited.has(key)) {
logForDebugging(`Skipping symlink cycle at ${currentDir}`)
return
}
visited.add(key)
}
} catch {
return
}
for (const entry of entries) {
// Skip hidden files that are git-related
if (entry === '.git') {
continue
}
const fullPath = join(currentDir, entry)
const relPath = relativePath ? `${relativePath}/${entry}` : entry
let fileStat
try {
fileStat = await lstat(fullPath)
} catch {
continue
}
// Skip symlinked directories (follow symlinked files)
if (fileStat.isSymbolicLink()) {
try {
const targetStat = await stat(fullPath)
if (targetStat.isDirectory()) {
continue
}
// Symlinked file — read its contents below
fileStat = targetStat
} catch {
continue // broken symlink
}
}
if (fileStat.isDirectory()) {
await collectFilesForZip(baseDir, relPath, files, visited)
} else if (fileStat.isFile()) {
try {
const content = await readFile(fullPath)
// os=3 (Unix) + st_mode in high 16 bits of external_attr — this is
// what parseZipModes reads back on extraction. fileStat is already
// in hand from the lstat/stat above, so no extra syscall.
files[relPath] = [
new Uint8Array(content),
{ os: 3, attrs: (fileStat.mode & 0xffff) << 16 },
]
} catch (error) {
logForDebugging(`Failed to read file for zip: ${relPath}: ${error}`)
}
}
}
}
/**
* Extract a ZIP file to a target directory.
*
* @param zipPath - Path to the ZIP file
* @param targetDir - Directory to extract into
*/
export async function extractZipToDirectory(
zipPath: string,
targetDir: string,
): Promise<void> {
const zipBuf = await getFsImplementation().readFileBytes(zipPath)
const files = await unzipFile(zipBuf)
// fflate doesn't surface external_attr — parse the central directory so
// exec bits survive extraction (hooks/scripts need +x to run via `sh -c`).
const modes = parseZipModes(zipBuf)
await getFsImplementation().mkdir(targetDir)
for (const [relPath, data] of Object.entries(files)) {
// Skip directory entries (trailing slash)
if (relPath.endsWith('/')) {
await getFsImplementation().mkdir(join(targetDir, relPath))
continue
}
const fullPath = join(targetDir, relPath)
await getFsImplementation().mkdir(dirname(fullPath))
await writeFile(fullPath, data)
const mode = modes[relPath]
if (mode && mode & 0o111) {
// Swallow EPERM/ENOTSUP (NFS root_squash, some FUSE mounts) — losing +x
// is the pre-PR behavior and better than aborting mid-extraction.
await chmod(fullPath, mode & 0o777).catch(() => {})
}
}
logForDebugging(
`Extracted ZIP to ${targetDir}: ${Object.keys(files).length} entries`,
)
}
/**
* Convert a plugin directory to a ZIP in-place: zip → atomic write → delete dir.
* Both call sites (cacheAndRegisterPlugin, copyPluginToVersionedCache) need the
* same sequence; getting it wrong (non-atomic write, forgetting rm) corrupts cache.
*/
export async function convertDirectoryToZipInPlace(
dirPath: string,
zipPath: string,
): Promise<void> {
const zipData = await createZipFromDirectory(dirPath)
await atomicWriteToZipCache(zipPath, zipData)
await rm(dirPath, { recursive: true, force: true })
}
/**
* Get the relative path for a marketplace JSON file within the zip cache.
* Format: marketplaces/{marketplace-name}.json
*/
export function getMarketplaceJsonRelativePath(
marketplaceName: string,
): string {
const sanitized = marketplaceName.replace(/[^a-zA-Z0-9\-_]/g, '-')
return join('marketplaces', `${sanitized}.json`)
}
/**
* Check if a marketplace source type is supported by zip cache mode.
*
* Supported sources write to `join(cacheDir, name)` — syncMarketplacesToZipCache
* reads marketplace.json from that installLocation, source-type-agnostic.
* - github/git/url: clone to temp, rename into cacheDir
* - settings: write synthetic marketplace.json directly to cacheDir (no fetch)
*
* Excluded: file/directory (installLocation is the user's path OUTSIDE cacheDir —
* nonsensical in ephemeral containers), npm (node_modules bloat on Filestore mount).
*/
export function isMarketplaceSourceSupportedByZipCache(
source: MarketplaceSource,
): boolean {
return ['github', 'git', 'url', 'settings'].includes(source.source)
}

View File

@@ -0,0 +1,164 @@
/**
* Zip Cache Adapters
*
* I/O helpers for the plugin zip cache. These functions handle reading/writing
* zip-cache-local metadata files, extracting ZIPs to session directories,
* and creating ZIPs for newly installed plugins.
*
* The zip cache stores data on a mounted volume (e.g., Filestore) that persists
* across ephemeral container lifetimes. The session cache is a local temp dir
* for extracted plugins used during a single session.
*/
import { readFile } from 'fs/promises'
import { join } from 'path'
import { logForDebugging } from '../debug.js'
import { jsonParse, jsonStringify } from '../slowOperations.js'
import { loadKnownMarketplacesConfigSafe } from './marketplaceManager.js'
import {
type KnownMarketplacesFile,
KnownMarketplacesFileSchema,
type PluginMarketplace,
PluginMarketplaceSchema,
} from './schemas.js'
import {
atomicWriteToZipCache,
getMarketplaceJsonRelativePath,
getPluginZipCachePath,
getZipCacheKnownMarketplacesPath,
} from './zipCache.js'
// ── Metadata I/O ──
/**
* Read known_marketplaces.json from the zip cache.
* Returns empty object if file doesn't exist, can't be parsed, or fails schema
* validation (data comes from a shared mounted volume — other containers may write).
*/
export async function readZipCacheKnownMarketplaces(): Promise<KnownMarketplacesFile> {
try {
const content = await readFile(getZipCacheKnownMarketplacesPath(), 'utf-8')
const parsed = KnownMarketplacesFileSchema().safeParse(jsonParse(content))
if (!parsed.success) {
logForDebugging(
`Invalid known_marketplaces.json in zip cache: ${parsed.error.message}`,
{ level: 'error' },
)
return {}
}
return parsed.data
} catch {
return {}
}
}
/**
* Write known_marketplaces.json to the zip cache atomically.
*/
export async function writeZipCacheKnownMarketplaces(
data: KnownMarketplacesFile,
): Promise<void> {
await atomicWriteToZipCache(
getZipCacheKnownMarketplacesPath(),
jsonStringify(data, null, 2),
)
}
// ── Marketplace JSON ──
/**
* Read a marketplace JSON file from the zip cache.
*/
export async function readMarketplaceJson(
marketplaceName: string,
): Promise<PluginMarketplace | null> {
const zipCachePath = getPluginZipCachePath()
if (!zipCachePath) {
return null
}
const relPath = getMarketplaceJsonRelativePath(marketplaceName)
const fullPath = join(zipCachePath, relPath)
try {
const content = await readFile(fullPath, 'utf-8')
const parsed = jsonParse(content)
const result = PluginMarketplaceSchema().safeParse(parsed)
if (result.success) {
return result.data
}
logForDebugging(
`Invalid marketplace JSON for ${marketplaceName}: ${result.error}`,
)
return null
} catch {
return null
}
}
/**
* Save a marketplace JSON to the zip cache from its install location.
*/
export async function saveMarketplaceJsonToZipCache(
marketplaceName: string,
installLocation: string,
): Promise<void> {
const zipCachePath = getPluginZipCachePath()
if (!zipCachePath) {
return
}
const content = await readMarketplaceJsonContent(installLocation)
if (content !== null) {
const relPath = getMarketplaceJsonRelativePath(marketplaceName)
await atomicWriteToZipCache(join(zipCachePath, relPath), content)
}
}
/**
* Read marketplace.json content from a cloned marketplace directory or file.
* For directory sources: checks .claude-plugin/marketplace.json, marketplace.json
* For URL sources: the installLocation IS the marketplace JSON file itself.
*/
async function readMarketplaceJsonContent(dir: string): Promise<string | null> {
const candidates = [
join(dir, '.claude-plugin', 'marketplace.json'),
join(dir, 'marketplace.json'),
dir, // For URL sources, installLocation IS the marketplace JSON file
]
for (const candidate of candidates) {
try {
return await readFile(candidate, 'utf-8')
} catch {
// ENOENT (doesn't exist) or EISDIR (directory) — try next
}
}
return null
}
/**
* Sync marketplace data to zip cache for offline access.
* Saves marketplace JSONs and merges with previously cached data
* so ephemeral containers can access marketplaces without re-cloning.
*/
export async function syncMarketplacesToZipCache(): Promise<void> {
// Read-only iteration — Safe variant so a corrupted config doesn't throw.
// This runs during startup paths; a throw here cascades to the same
// try-block that catches loadAllPlugins failures.
const knownMarketplaces = await loadKnownMarketplacesConfigSafe()
// Save marketplace JSONs to zip cache
for (const [name, entry] of Object.entries(knownMarketplaces)) {
if (!entry.installLocation) continue
try {
await saveMarketplaceJsonToZipCache(name, entry.installLocation)
} catch (error) {
logForDebugging(`Failed to save marketplace JSON for ${name}: ${error}`)
}
}
// Merge with previously cached data (ephemeral containers lose global config)
const zipCacheKnownMarketplaces = await readZipCacheKnownMarketplaces()
const mergedKnownMarketplaces: KnownMarketplacesFile = {
...zipCacheKnownMarketplaces,
...knownMarketplaces,
}
await writeZipCacheKnownMarketplaces(mergedKnownMarketplaces)
}