improving the front matter parsing by using a real library
This commit is contained in:
parent
553ef42856
commit
d1a8663c56
@ -1,5 +1,5 @@
|
||||
---
|
||||
description: 'Your perfect AI chat mode for high-level architectural documentation and review. Perfect for targeted updates after a story or researching that legacy system when nobody remembers what it's supposed to be doing.'
|
||||
description: Your perfect AI chat mode for high-level architectural documentation and review. Perfect for targeted updates after a story or researching that legacy system when nobody remembers what it's supposed to be doing.
|
||||
model: 'claude-sonnet-4'
|
||||
tools:
|
||||
- 'codebase'
|
||||
|
||||
@ -1,10 +1,6 @@
|
||||
---
|
||||
description: '4.1 voidBeast_GPT41Enhanced 1.0 : a advanced autonomous developer agent, designed for elite full-stack development with enhanced multi-mode capabilities. This latest evolution features sophisticated mode detection, comprehensive research capabilities, and never-ending problem resolution. Plan/Act/Deep Research/Analyzer/Checkpoints(Memory)/Prompt Generator Modes.
|
||||
'
|
||||
description: '4.1 voidBeast_GPT41Enhanced 1.0 : a advanced autonomous developer agent, designed for elite full-stack development with enhanced multi-mode capabilities. This latest evolution features sophisticated mode detection, comprehensive research capabilities, and never-ending problem resolution. Plan/Act/Deep Research/Analyzer/Checkpoints(Memory)/Prompt Generator Modes.'
|
||||
tools: ['changes', 'codebase', 'edit/editFiles', 'extensions', 'fetch', 'findTestFiles', 'githubRepo', 'new', 'openSimpleBrowser', 'problems', 'readCellOutput', 'runCommands', 'runNotebooks', 'runTasks', 'runTests', 'search', 'searchResults', 'terminalLastCommand', 'terminalSelection', 'testFailure', 'updateUserPreferences', 'usages', 'vscodeAPI']
|
||||
|
||||
---
|
||||
|
||||
---
|
||||
|
||||
# voidBeast_GPT41Enhanced 1.0 - Elite Developer AI Assistant
|
||||
|
||||
@ -1,3 +1,5 @@
|
||||
const path = require("path");
|
||||
|
||||
// Template sections for the README
|
||||
const TEMPLATES = {
|
||||
instructionsSection: `## 📋 Custom Instructions
|
||||
@ -111,3 +113,17 @@ const AKA_INSTALL_URLS = {
|
||||
agent: "https://aka.ms/awesome-copilot/install/agent",
|
||||
};
|
||||
exports.AKA_INSTALL_URLS = AKA_INSTALL_URLS;
|
||||
const ROOT_FOLDER = path.join(__dirname, "..");
|
||||
exports.ROOT_FOLDER = ROOT_FOLDER;
|
||||
const INSTRUCTIOSN_DIR = path.join(ROOT_FOLDER, "instructions");
|
||||
exports.INSTRUCTIOSN_DIR = INSTRUCTIOSN_DIR;
|
||||
const PROMPTS_DIR = path.join(ROOT_FOLDER, "prompts");
|
||||
exports.PROMPTS_DIR = PROMPTS_DIR;
|
||||
const CHATMODES_DIR = path.join(ROOT_FOLDER, "chatmodes");
|
||||
exports.CHATMODES_DIR = CHATMODES_DIR;
|
||||
const AGENTS_DIR = path.join(ROOT_FOLDER, "agents");
|
||||
exports.AGENTS_DIR = AGENTS_DIR;
|
||||
const COLLECTIONS_DIR = path.join(ROOT_FOLDER, "collections");
|
||||
exports.COLLECTIONS_DIR = COLLECTIONS_DIR; // Maximum number of items allowed in a collection
|
||||
const MAX_COLLECTION_ITEMS = 50;
|
||||
exports.MAX_COLLECTION_ITEMS = MAX_COLLECTION_ITEMS;
|
||||
|
||||
@ -5,7 +5,7 @@ const path = require("path");
|
||||
const {
|
||||
parseCollectionYaml,
|
||||
extractMcpServers,
|
||||
parseAgentFrontmatter,
|
||||
parseFrontmatter,
|
||||
} = require("./yaml-parser");
|
||||
const {
|
||||
TEMPLATES,
|
||||
@ -13,6 +13,12 @@ const {
|
||||
repoBaseUrl,
|
||||
vscodeInstallImage,
|
||||
vscodeInsidersInstallImage,
|
||||
ROOT_FOLDER,
|
||||
INSTRUCTIOSN_DIR,
|
||||
PROMPTS_DIR,
|
||||
CHATMODES_DIR,
|
||||
AGENTS_DIR,
|
||||
COLLECTIONS_DIR,
|
||||
} = require("./constants");
|
||||
|
||||
// Add error handling utility
|
||||
@ -34,47 +40,21 @@ function extractTitle(filePath) {
|
||||
const content = fs.readFileSync(filePath, "utf8");
|
||||
const lines = content.split("\n");
|
||||
|
||||
// Step 1: Look for title in frontmatter for all file types
|
||||
let inFrontmatter = false;
|
||||
let frontmatterEnded = false;
|
||||
let hasFrontmatter = false;
|
||||
// Step 1: Try to get title from frontmatter using vfile-matter
|
||||
const frontmatter = parseFrontmatter(filePath);
|
||||
|
||||
for (const line of lines) {
|
||||
if (line.trim() === "---") {
|
||||
if (!inFrontmatter) {
|
||||
inFrontmatter = true;
|
||||
hasFrontmatter = true;
|
||||
} else if (!frontmatterEnded) {
|
||||
frontmatterEnded = true;
|
||||
break;
|
||||
}
|
||||
continue;
|
||||
if (frontmatter) {
|
||||
// Check for title field
|
||||
if (frontmatter.title && typeof frontmatter.title === "string") {
|
||||
return frontmatter.title;
|
||||
}
|
||||
|
||||
if (inFrontmatter && !frontmatterEnded) {
|
||||
// Look for title field in frontmatter
|
||||
if (line.includes("title:")) {
|
||||
// Extract everything after 'title:'
|
||||
const afterTitle = line
|
||||
.substring(line.indexOf("title:") + 6)
|
||||
.trim();
|
||||
// Remove quotes if present
|
||||
const cleanTitle = afterTitle.replace(/^['"]|['"]$/g, "");
|
||||
return cleanTitle;
|
||||
}
|
||||
|
||||
// Look for name field in frontmatter
|
||||
if (line.includes("name:")) {
|
||||
// Extract everything after 'name:'
|
||||
const afterName = line.substring(line.indexOf("name:") + 5).trim();
|
||||
// Remove quotes if present
|
||||
const cleanName = afterName.replace(/^['"]|['"]$/g, "");
|
||||
// Convert hyphenated lowercase to title case
|
||||
return cleanName
|
||||
.split("-")
|
||||
.map((word) => word.charAt(0).toUpperCase() + word.slice(1))
|
||||
.join(" ");
|
||||
}
|
||||
// Check for name field and convert to title case
|
||||
if (frontmatter.name && typeof frontmatter.name === "string") {
|
||||
return frontmatter.name
|
||||
.split("-")
|
||||
.map((word) => word.charAt(0).toUpperCase() + word.slice(1))
|
||||
.join(" ");
|
||||
}
|
||||
}
|
||||
|
||||
@ -84,41 +64,24 @@ function extractTitle(filePath) {
|
||||
filePath.includes(".chatmode.md") ||
|
||||
filePath.includes(".instructions.md")
|
||||
) {
|
||||
// If we had frontmatter, only look for headings after it ended
|
||||
if (hasFrontmatter) {
|
||||
let inFrontmatter2 = false;
|
||||
let frontmatterEnded2 = false;
|
||||
let inCodeBlock = false;
|
||||
// Look for first heading after frontmatter
|
||||
let inFrontmatter = false;
|
||||
let frontmatterEnded = false;
|
||||
let inCodeBlock = false;
|
||||
|
||||
for (const line of lines) {
|
||||
if (line.trim() === "---") {
|
||||
if (!inFrontmatter2) {
|
||||
inFrontmatter2 = true;
|
||||
} else if (inFrontmatter2 && !frontmatterEnded2) {
|
||||
frontmatterEnded2 = true;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
// Track code blocks to ignore headings inside them
|
||||
if (frontmatterEnded2) {
|
||||
if (
|
||||
line.trim().startsWith("```") ||
|
||||
line.trim().startsWith("````")
|
||||
) {
|
||||
inCodeBlock = !inCodeBlock;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!inCodeBlock && line.startsWith("# ")) {
|
||||
return line.substring(2).trim();
|
||||
}
|
||||
for (const line of lines) {
|
||||
if (line.trim() === "---") {
|
||||
if (!inFrontmatter) {
|
||||
inFrontmatter = true;
|
||||
} else if (inFrontmatter && !frontmatterEnded) {
|
||||
frontmatterEnded = true;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
} else {
|
||||
// No frontmatter, look for first heading (but not in code blocks)
|
||||
let inCodeBlock = false;
|
||||
for (const line of lines) {
|
||||
|
||||
// Only look for headings after frontmatter ends
|
||||
if (frontmatterEnded || !inFrontmatter) {
|
||||
// Track code blocks to ignore headings inside them
|
||||
if (
|
||||
line.trim().startsWith("```") ||
|
||||
line.trim().startsWith("````")
|
||||
@ -147,7 +110,7 @@ function extractTitle(filePath) {
|
||||
.replace(/\b\w/g, (l) => l.toUpperCase());
|
||||
}
|
||||
|
||||
// Step 4: For instruction files, look for the first heading (but not in code blocks)
|
||||
// Step 4: For other files, look for the first heading (but not in code blocks)
|
||||
let inCodeBlock = false;
|
||||
for (const line of lines) {
|
||||
if (line.trim().startsWith("```") || line.trim().startsWith("````")) {
|
||||
@ -177,81 +140,11 @@ function extractTitle(filePath) {
|
||||
function extractDescription(filePath) {
|
||||
return safeFileOperation(
|
||||
() => {
|
||||
// Special handling for agent files
|
||||
if (filePath.endsWith(".agent.md")) {
|
||||
const agent = parseAgentFrontmatter(filePath);
|
||||
if (agent && agent.description) {
|
||||
return agent.description;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
// Use vfile-matter to parse frontmatter for all file types
|
||||
const frontmatter = parseFrontmatter(filePath);
|
||||
|
||||
const content = fs.readFileSync(filePath, "utf8");
|
||||
|
||||
// Parse frontmatter for description (for both prompts and instructions)
|
||||
const lines = content.split("\n");
|
||||
let inFrontmatter = false;
|
||||
|
||||
// For multi-line descriptions
|
||||
let isMultilineDescription = false;
|
||||
let multilineDescription = [];
|
||||
|
||||
for (let i = 0; i < lines.length; i++) {
|
||||
const line = lines[i];
|
||||
|
||||
if (line.trim() === "---") {
|
||||
if (!inFrontmatter) {
|
||||
inFrontmatter = true;
|
||||
continue;
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
if (inFrontmatter) {
|
||||
// Check for multi-line description with pipe syntax (|)
|
||||
const multilineMatch = line.match(/^description:\s*\|(\s*)$/);
|
||||
if (multilineMatch) {
|
||||
isMultilineDescription = true;
|
||||
// Continue to next line to start collecting the multi-line content
|
||||
continue;
|
||||
}
|
||||
|
||||
// If we're collecting a multi-line description
|
||||
if (isMultilineDescription) {
|
||||
// If the line has no indentation or has another frontmatter key, stop collecting
|
||||
if (!line.startsWith(" ") || line.match(/^[a-zA-Z0-9_-]+:/)) {
|
||||
// Join the collected lines and return
|
||||
return multilineDescription.join(" ").trim();
|
||||
}
|
||||
|
||||
// Add the line to our multi-line collection (removing the 2-space indentation)
|
||||
multilineDescription.push(line.substring(2));
|
||||
} else {
|
||||
// Look for single-line description field in frontmatter
|
||||
const descriptionMatch = line.match(
|
||||
/^description:\s*['"]?(.+?)['"]?\s*$/
|
||||
);
|
||||
if (descriptionMatch) {
|
||||
let description = descriptionMatch[1];
|
||||
|
||||
// Check if the description is wrapped in single quotes and handle escaped quotes
|
||||
const singleQuoteMatch = line.match(
|
||||
/^description:\s*'(.+?)'\s*$/
|
||||
);
|
||||
if (singleQuoteMatch) {
|
||||
// Replace escaped single quotes ('') with single quotes (')
|
||||
description = singleQuoteMatch[1].replace(/''/g, "'");
|
||||
}
|
||||
|
||||
return description;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If we've collected multi-line description but the frontmatter ended
|
||||
if (multilineDescription.length > 0) {
|
||||
return multilineDescription.join(" ").trim();
|
||||
if (frontmatter && frontmatter.description) {
|
||||
return frontmatter.description;
|
||||
}
|
||||
|
||||
return null;
|
||||
@ -723,14 +616,14 @@ function generateCollectionReadme(collection, collectionId) {
|
||||
const items = [...collection.items];
|
||||
if (collection.display?.ordering === "alpha") {
|
||||
items.sort((a, b) => {
|
||||
const titleA = extractTitle(path.join(rootFolder, a.path));
|
||||
const titleB = extractTitle(path.join(rootFolder, b.path));
|
||||
const titleA = extractTitle(path.join(ROOT_FOLDER, a.path));
|
||||
const titleB = extractTitle(path.join(ROOT_FOLDER, b.path));
|
||||
return titleA.localeCompare(titleB);
|
||||
});
|
||||
}
|
||||
|
||||
for (const item of items) {
|
||||
const filePath = path.join(rootFolder, item.path);
|
||||
const filePath = path.join(ROOT_FOLDER, item.path);
|
||||
const title = extractTitle(filePath);
|
||||
const description = extractDescription(filePath) || "No description";
|
||||
|
||||
@ -849,13 +742,6 @@ function buildCategoryReadme(sectionBuilder, dirPath, headerLine, usageLine) {
|
||||
return `${headerLine}\n\n${usageLine}\n\n_No entries found yet._`;
|
||||
}
|
||||
|
||||
const rootFolder = path.join(__dirname, "..");
|
||||
const instructionsDir = path.join(rootFolder, "instructions");
|
||||
const promptsDir = path.join(rootFolder, "prompts");
|
||||
const chatmodesDir = path.join(rootFolder, "chatmodes");
|
||||
const agentsDir = path.join(rootFolder, "agents");
|
||||
const collectionsDir = path.join(rootFolder, "collections");
|
||||
|
||||
// Main execution
|
||||
try {
|
||||
console.log("Generating category README files...");
|
||||
@ -875,19 +761,19 @@ try {
|
||||
|
||||
const instructionsReadme = buildCategoryReadme(
|
||||
generateInstructionsSection,
|
||||
instructionsDir,
|
||||
INSTRUCTIOSN_DIR,
|
||||
instructionsHeader,
|
||||
TEMPLATES.instructionsUsage
|
||||
);
|
||||
const promptsReadme = buildCategoryReadme(
|
||||
generatePromptsSection,
|
||||
promptsDir,
|
||||
PROMPTS_DIR,
|
||||
promptsHeader,
|
||||
TEMPLATES.promptsUsage
|
||||
);
|
||||
const chatmodesReadme = buildCategoryReadme(
|
||||
generateChatModesSection,
|
||||
chatmodesDir,
|
||||
CHATMODES_DIR,
|
||||
chatmodesHeader,
|
||||
TEMPLATES.chatmodesUsage
|
||||
);
|
||||
@ -895,7 +781,7 @@ try {
|
||||
// Generate agents README
|
||||
const agentsReadme = buildCategoryReadme(
|
||||
generateAgentsSection,
|
||||
agentsDir,
|
||||
AGENTS_DIR,
|
||||
agentsHeader,
|
||||
TEMPLATES.agentsUsage
|
||||
);
|
||||
@ -903,37 +789,40 @@ try {
|
||||
// Generate collections README
|
||||
const collectionsReadme = buildCategoryReadme(
|
||||
generateCollectionsSection,
|
||||
collectionsDir,
|
||||
COLLECTIONS_DIR,
|
||||
collectionsHeader,
|
||||
TEMPLATES.collectionsUsage
|
||||
);
|
||||
|
||||
// Write category outputs
|
||||
writeFileIfChanged(
|
||||
path.join(rootFolder, "README.instructions.md"),
|
||||
path.join(ROOT_FOLDER, "README.instructions.md"),
|
||||
instructionsReadme
|
||||
);
|
||||
writeFileIfChanged(path.join(rootFolder, "README.prompts.md"), promptsReadme);
|
||||
writeFileIfChanged(
|
||||
path.join(rootFolder, "README.chatmodes.md"),
|
||||
path.join(ROOT_FOLDER, "README.prompts.md"),
|
||||
promptsReadme
|
||||
);
|
||||
writeFileIfChanged(
|
||||
path.join(ROOT_FOLDER, "README.chatmodes.md"),
|
||||
chatmodesReadme
|
||||
);
|
||||
writeFileIfChanged(path.join(rootFolder, "README.agents.md"), agentsReadme);
|
||||
writeFileIfChanged(path.join(ROOT_FOLDER, "README.agents.md"), agentsReadme);
|
||||
writeFileIfChanged(
|
||||
path.join(rootFolder, "README.collections.md"),
|
||||
path.join(ROOT_FOLDER, "README.collections.md"),
|
||||
collectionsReadme
|
||||
);
|
||||
|
||||
// Generate individual collection README files
|
||||
if (fs.existsSync(collectionsDir)) {
|
||||
if (fs.existsSync(COLLECTIONS_DIR)) {
|
||||
console.log("Generating individual collection README files...");
|
||||
|
||||
const collectionFiles = fs
|
||||
.readdirSync(collectionsDir)
|
||||
.readdirSync(COLLECTIONS_DIR)
|
||||
.filter((file) => file.endsWith(".collection.yml"));
|
||||
|
||||
for (const file of collectionFiles) {
|
||||
const filePath = path.join(collectionsDir, file);
|
||||
const filePath = path.join(COLLECTIONS_DIR, file);
|
||||
const collection = parseCollectionYaml(filePath);
|
||||
|
||||
if (collection) {
|
||||
@ -943,7 +832,7 @@ try {
|
||||
collection,
|
||||
collectionId
|
||||
);
|
||||
const readmeFile = path.join(collectionsDir, `${collectionId}.md`);
|
||||
const readmeFile = path.join(COLLECTIONS_DIR, `${collectionId}.md`);
|
||||
writeFileIfChanged(readmeFile, readmeContent);
|
||||
}
|
||||
}
|
||||
@ -951,10 +840,10 @@ try {
|
||||
|
||||
// Generate featured collections section and update main README.md
|
||||
console.log("Updating main README.md with featured collections...");
|
||||
const featuredSection = generateFeaturedCollectionsSection(collectionsDir);
|
||||
const featuredSection = generateFeaturedCollectionsSection(COLLECTIONS_DIR);
|
||||
|
||||
if (featuredSection) {
|
||||
const mainReadmePath = path.join(rootFolder, "README.md");
|
||||
const mainReadmePath = path.join(ROOT_FOLDER, "README.md");
|
||||
|
||||
if (fs.existsSync(mainReadmePath)) {
|
||||
let readmeContent = fs.readFileSync(mainReadmePath, "utf8");
|
||||
|
||||
@ -2,13 +2,12 @@
|
||||
|
||||
const fs = require("fs");
|
||||
const path = require("path");
|
||||
const { parseCollectionYaml, parseFrontmatter } = require("./yaml-parser");
|
||||
const {
|
||||
parseCollectionYaml,
|
||||
parseAgentFrontmatter,
|
||||
} = require("./yaml-parser");
|
||||
|
||||
// Maximum number of items allowed in a collection
|
||||
const MAX_COLLECTION_ITEMS = 50;
|
||||
ROOT_FOLDER,
|
||||
COLLECTIONS_DIR,
|
||||
MAX_COLLECTION_ITEMS,
|
||||
} = require("./constants");
|
||||
|
||||
// Validation functions
|
||||
function validateCollectionId(id) {
|
||||
@ -67,9 +66,9 @@ function validateCollectionTags(tags) {
|
||||
return null;
|
||||
}
|
||||
|
||||
function validateAgentFile(filePath, itemNumber) {
|
||||
function validateAgentFile(filePath) {
|
||||
try {
|
||||
const agent = parseAgentFrontmatter(filePath);
|
||||
const agent = parseFrontmatter(filePath);
|
||||
|
||||
if (!agent) {
|
||||
return `Item ${filePath} agent file could not be parsed`;
|
||||
@ -188,7 +187,7 @@ function validateCollectionItems(items) {
|
||||
}
|
||||
|
||||
// Validate file path exists
|
||||
const filePath = path.join(__dirname, item.path);
|
||||
const filePath = path.join(ROOT_FOLDER, item.path);
|
||||
if (!fs.existsSync(filePath)) {
|
||||
return `Item ${i + 1} file does not exist: ${item.path}`;
|
||||
}
|
||||
@ -301,15 +300,13 @@ function validateCollectionManifest(collection, filePath) {
|
||||
|
||||
// Main validation function
|
||||
function validateCollections() {
|
||||
const collectionsDir = path.join(__dirname, "collections");
|
||||
|
||||
if (!fs.existsSync(collectionsDir)) {
|
||||
if (!fs.existsSync(COLLECTIONS_DIR)) {
|
||||
console.log("No collections directory found - validation skipped");
|
||||
return true;
|
||||
}
|
||||
|
||||
const collectionFiles = fs
|
||||
.readdirSync(collectionsDir)
|
||||
.readdirSync(COLLECTIONS_DIR)
|
||||
.filter((file) => file.endsWith(".collection.yml"));
|
||||
|
||||
if (collectionFiles.length === 0) {
|
||||
@ -323,7 +320,7 @@ function validateCollections() {
|
||||
const usedIds = new Set();
|
||||
|
||||
for (const file of collectionFiles) {
|
||||
const filePath = path.join(collectionsDir, file);
|
||||
const filePath = path.join(COLLECTIONS_DIR, file);
|
||||
console.log(`\nValidating ${file}...`);
|
||||
|
||||
const collection = parseCollectionYaml(filePath);
|
||||
|
||||
@ -1,6 +1,8 @@
|
||||
// YAML parser for collection files and agent frontmatter
|
||||
// YAML parser for collection files and frontmatter parsing using vfile-matter
|
||||
const fs = require("fs");
|
||||
const yaml = require("js-yaml");
|
||||
const { VFile } = require("vfile");
|
||||
const { matter } = require("vfile-matter");
|
||||
|
||||
function safeFileOperation(operation, filePath, defaultValue = null) {
|
||||
try {
|
||||
@ -31,54 +33,31 @@ function parseCollectionYaml(filePath) {
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse agent frontmatter from an agent markdown file (.agent.md)
|
||||
* Agent files use standard markdown frontmatter with --- delimiters
|
||||
* @param {string} filePath - Path to the agent file
|
||||
* @returns {object|null} Parsed agent frontmatter or null on error
|
||||
* Parse frontmatter from a markdown file using vfile-matter
|
||||
* Works with any markdown file that has YAML frontmatter (agents, prompts, chatmodes, instructions)
|
||||
* @param {string} filePath - Path to the markdown file
|
||||
* @returns {object|null} Parsed frontmatter object or null on error
|
||||
*/
|
||||
function parseAgentFrontmatter(filePath) {
|
||||
function parseFrontmatter(filePath) {
|
||||
return safeFileOperation(
|
||||
() => {
|
||||
const content = fs.readFileSync(filePath, "utf8");
|
||||
const lines = content.split("\n");
|
||||
const file = new VFile({ path: filePath, value: content });
|
||||
|
||||
// Agent files use standard markdown frontmatter format
|
||||
// Find the YAML frontmatter between --- delimiters
|
||||
let yamlStart = -1;
|
||||
let yamlEnd = -1;
|
||||
let delimiterCount = 0;
|
||||
// Parse the frontmatter using vfile-matter
|
||||
matter(file);
|
||||
|
||||
for (let i = 0; i < lines.length; i++) {
|
||||
const trimmed = lines[i].trim();
|
||||
|
||||
if (trimmed === "---") {
|
||||
delimiterCount++;
|
||||
if (delimiterCount === 1) {
|
||||
yamlStart = i + 1;
|
||||
} else if (delimiterCount === 2) {
|
||||
yamlEnd = i;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (yamlStart === -1 || yamlEnd === -1) {
|
||||
throw new Error(
|
||||
"Could not find YAML frontmatter delimiters (---) in agent file"
|
||||
);
|
||||
}
|
||||
|
||||
// Extract YAML content between delimiters
|
||||
const yamlContent = lines.slice(yamlStart, yamlEnd).join("\n");
|
||||
|
||||
// Parse YAML directly with js-yaml
|
||||
const frontmatter = yaml.load(yamlContent, { schema: yaml.JSON_SCHEMA });
|
||||
// The frontmatter is now available in file.data.matter
|
||||
const frontmatter = file.data.matter;
|
||||
|
||||
// Normalize string fields that can accumulate trailing newlines/spaces
|
||||
if (frontmatter) {
|
||||
if (typeof frontmatter.name === "string") {
|
||||
frontmatter.name = frontmatter.name.replace(/[\r\n]+$/g, "").trim();
|
||||
}
|
||||
if (typeof frontmatter.title === "string") {
|
||||
frontmatter.title = frontmatter.title.replace(/[\r\n]+$/g, "").trim();
|
||||
}
|
||||
if (typeof frontmatter.description === "string") {
|
||||
// Remove only trailing whitespace/newlines; preserve internal formatting
|
||||
frontmatter.description = frontmatter.description.replace(
|
||||
@ -101,7 +80,7 @@ function parseAgentFrontmatter(filePath) {
|
||||
* @returns {object|null} Agent metadata object with name, description, tools, and mcp-servers
|
||||
*/
|
||||
function extractAgentMetadata(filePath) {
|
||||
const frontmatter = parseAgentFrontmatter(filePath);
|
||||
const frontmatter = parseFrontmatter(filePath);
|
||||
|
||||
if (!frontmatter) {
|
||||
return null;
|
||||
@ -135,7 +114,7 @@ function extractMcpServers(filePath) {
|
||||
|
||||
module.exports = {
|
||||
parseCollectionYaml,
|
||||
parseAgentFrontmatter,
|
||||
parseFrontmatter,
|
||||
extractAgentMetadata,
|
||||
extractMcpServers,
|
||||
safeFileOperation,
|
||||
|
||||
77
package-lock.json
generated
77
package-lock.json
generated
@ -9,7 +9,9 @@
|
||||
"version": "1.0.0",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"js-yaml": "^4.1.0"
|
||||
"js-yaml": "^4.1.0",
|
||||
"vfile": "^6.0.3",
|
||||
"vfile-matter": "^5.0.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"all-contributors-cli": "^6.26.1"
|
||||
@ -25,6 +27,12 @@
|
||||
"node": ">=6.9.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@types/unist": {
|
||||
"version": "3.0.3",
|
||||
"resolved": "https://registry.npmjs.org/@types/unist/-/unist-3.0.3.tgz",
|
||||
"integrity": "sha512-ko/gIFJRv177XgZsZcBwnqJN5x/Gien8qNOn0D5bQU/zAzVf9Zt3BlcUiLqhV9y4ARk0GbT3tnUiPNgnTXzc/Q==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/all-contributors-cli": {
|
||||
"version": "6.26.1",
|
||||
"resolved": "https://registry.npmjs.org/all-contributors-cli/-/all-contributors-cli-6.26.1.tgz",
|
||||
@ -710,6 +718,61 @@
|
||||
"url": "https://github.com/sponsors/sindresorhus"
|
||||
}
|
||||
},
|
||||
"node_modules/unist-util-stringify-position": {
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/unist-util-stringify-position/-/unist-util-stringify-position-4.0.0.tgz",
|
||||
"integrity": "sha512-0ASV06AAoKCDkS2+xw5RXJywruurpbC4JZSm7nr7MOt1ojAzvyyaO+UxZf18j8FCF6kmzCZKcAgN/yu2gm2XgQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@types/unist": "^3.0.0"
|
||||
},
|
||||
"funding": {
|
||||
"type": "opencollective",
|
||||
"url": "https://opencollective.com/unified"
|
||||
}
|
||||
},
|
||||
"node_modules/vfile": {
|
||||
"version": "6.0.3",
|
||||
"resolved": "https://registry.npmjs.org/vfile/-/vfile-6.0.3.tgz",
|
||||
"integrity": "sha512-KzIbH/9tXat2u30jf+smMwFCsno4wHVdNmzFyL+T/L3UGqqk6JKfVqOFOZEpZSHADH1k40ab6NUIXZq422ov3Q==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@types/unist": "^3.0.0",
|
||||
"vfile-message": "^4.0.0"
|
||||
},
|
||||
"funding": {
|
||||
"type": "opencollective",
|
||||
"url": "https://opencollective.com/unified"
|
||||
}
|
||||
},
|
||||
"node_modules/vfile-matter": {
|
||||
"version": "5.0.1",
|
||||
"resolved": "https://registry.npmjs.org/vfile-matter/-/vfile-matter-5.0.1.tgz",
|
||||
"integrity": "sha512-o6roP82AiX0XfkyTHyRCMXgHfltUNlXSEqCIS80f+mbAyiQBE2fxtDVMtseyytGx75sihiJFo/zR6r/4LTs2Cw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"vfile": "^6.0.0",
|
||||
"yaml": "^2.0.0"
|
||||
},
|
||||
"funding": {
|
||||
"type": "opencollective",
|
||||
"url": "https://opencollective.com/unified"
|
||||
}
|
||||
},
|
||||
"node_modules/vfile-message": {
|
||||
"version": "4.0.3",
|
||||
"resolved": "https://registry.npmjs.org/vfile-message/-/vfile-message-4.0.3.tgz",
|
||||
"integrity": "sha512-QTHzsGd1EhbZs4AsQ20JX1rC3cOlt/IWJruk893DfLRr57lcnOeMaWG4K0JrRta4mIJZKth2Au3mM3u03/JWKw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@types/unist": "^3.0.0",
|
||||
"unist-util-stringify-position": "^4.0.0"
|
||||
},
|
||||
"funding": {
|
||||
"type": "opencollective",
|
||||
"url": "https://opencollective.com/unified"
|
||||
}
|
||||
},
|
||||
"node_modules/webidl-conversions": {
|
||||
"version": "3.0.1",
|
||||
"resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz",
|
||||
@ -757,6 +820,18 @@
|
||||
"dev": true,
|
||||
"license": "ISC"
|
||||
},
|
||||
"node_modules/yaml": {
|
||||
"version": "2.8.1",
|
||||
"resolved": "https://registry.npmjs.org/yaml/-/yaml-2.8.1.tgz",
|
||||
"integrity": "sha512-lcYcMxX2PO9XMGvAJkJ3OsNMw+/7FKes7/hgerGUYWIoWu5j/+YQqcZr5JnPZWzOsEBgMbSbiSTn/dv/69Mkpw==",
|
||||
"license": "ISC",
|
||||
"bin": {
|
||||
"yaml": "bin.mjs"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 14.6"
|
||||
}
|
||||
},
|
||||
"node_modules/yargs": {
|
||||
"version": "15.4.1",
|
||||
"resolved": "https://registry.npmjs.org/yargs/-/yargs-15.4.1.tgz",
|
||||
|
||||
@ -28,6 +28,8 @@
|
||||
"all-contributors-cli": "^6.26.1"
|
||||
},
|
||||
"dependencies": {
|
||||
"js-yaml": "^4.1.0"
|
||||
"js-yaml": "^4.1.0",
|
||||
"vfile": "^6.0.3",
|
||||
"vfile-matter": "^5.0.1"
|
||||
}
|
||||
}
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user