Nicholai dc9673005b fix: embed Morgan system prompt for Cloudflare deployment
Morgan's system prompt is now generated at build time and embedded directly
in the code, making it available in Cloudflare Worker environments where
file system access isn't available.

Changes:
- Add scripts/generate-morgan-prompt.js to generate TypeScript constant from markdown
- Generate src/lib/agents/morgan-system-prompt.ts with full Fortura Agent Bundle
- Update agent definitions to import and use the embedded constant
- Update package.json build scripts to generate prompt before building
- Remove runtime file system access (readFileSync) that failed on Cloudflare

This ensures Morgan agent has full system prompt capabilities on all deployments.

🤖 Generated with [Claude Code](https://claude.com/claude-code)

Co-Authored-By: Claude <noreply@anthropic.com>
2025-11-16 14:32:51 -07:00

105 lines
2.9 KiB
TypeScript

import { describe, it, expect, beforeEach, vi } from 'vitest'
import { POST } from '@/app/api/chat/route'
import { NextRequest } from 'next/server'
import { resetFlagsCache } from '@/lib/flags'
describe('/api/chat', () => {
beforeEach(() => {
resetFlagsCache()
vi.clearAllMocks()
process.env.IMAGE_UPLOADS_ENABLED = 'true'
process.env.OPENROUTER_API_KEY = 'test-key'
process.env.OPENROUTER_MODEL = 'openai/gpt-oss-120b'
})
it('requires message field', async () => {
const request = new NextRequest('http://localhost:3000/api/chat', {
method: 'POST',
body: JSON.stringify({
agentId: 'agent-1',
sessionId: 'test-session',
timestamp: new Date().toISOString(),
}),
})
const response = await POST(request)
const data = await response.json()
expect(response.status).toBe(400)
expect(data.error).toBe('Message is required')
})
it('requires agentId field', async () => {
const request = new NextRequest('http://localhost:3000/api/chat', {
method: 'POST',
body: JSON.stringify({
message: 'Hello',
sessionId: 'test-session',
timestamp: new Date().toISOString(),
}),
})
const response = await POST(request)
const data = await response.json()
expect(response.status).toBe(400)
expect(data.error).toBe('Agent ID is required')
})
it('rejects images when IMAGE_UPLOADS_ENABLED is false', async () => {
process.env.IMAGE_UPLOADS_ENABLED = 'false'
resetFlagsCache()
const request = new NextRequest('http://localhost:3000/api/chat', {
method: 'POST',
body: JSON.stringify({
message: 'Hello',
agentId: 'agent-1',
sessionId: 'test-session',
timestamp: new Date().toISOString(),
images: ['data:image/png;base64,abc123'],
}),
})
const response = await POST(request)
const data = await response.json()
expect(response.status).toBe(403)
expect(data.error).toBe('Image uploads are not enabled')
})
it('accepts valid chat request for standard agent', async () => {
const request = new NextRequest('http://localhost:3000/api/chat', {
method: 'POST',
body: JSON.stringify({
message: 'Hello agent',
agentId: 'agent-1',
sessionId: 'test-session',
timestamp: new Date().toISOString(),
}),
})
const response = await POST(request)
// Should return 200
expect(response.status).toBe(200)
})
it('accepts valid chat request for Morgan agent', async () => {
const request = new NextRequest('http://localhost:3000/api/chat', {
method: 'POST',
body: JSON.stringify({
message: 'Create an agent',
agentId: 'agent-2',
sessionId: 'test-session',
timestamp: new Date().toISOString(),
}),
})
const response = await POST(request)
// Should return 200
expect(response.status).toBe(200)
})
})