276 lines
7.9 KiB
TypeScript
276 lines
7.9 KiB
TypeScript
import fs from 'fs'
|
|
import path from 'path'
|
|
|
|
export type Metadata = {
|
|
title: string
|
|
publishedAt: string
|
|
summary: string
|
|
image?: string
|
|
tags?: string[]
|
|
}
|
|
|
|
export type Post = {
|
|
metadata: Metadata
|
|
slug: string
|
|
content: string
|
|
source: 'fs' | 'github'
|
|
}
|
|
|
|
function parseFrontmatter(fileContent: string) {
|
|
const frontmatterRegex = /---\s*([\s\S]*?)\s*---/
|
|
const match = frontmatterRegex.exec(fileContent)
|
|
if (!match) {
|
|
// No frontmatter, treat entire content as body with minimal metadata
|
|
return {
|
|
metadata: {
|
|
title: 'Untitled',
|
|
publishedAt: new Date().toISOString(),
|
|
summary: '',
|
|
} as Metadata,
|
|
content: fileContent.trim(),
|
|
}
|
|
}
|
|
|
|
const frontMatterBlock = match[1]
|
|
const content = fileContent.replace(frontmatterRegex, '').trim()
|
|
const frontMatterLines = frontMatterBlock.trim().split('\n')
|
|
const metadata: Partial<Metadata> = {}
|
|
|
|
frontMatterLines.forEach((line) => {
|
|
const [rawKey, ...valueArr] = line.split(': ')
|
|
const key = rawKey?.trim()
|
|
if (!key) return
|
|
let value = valueArr.join(': ').trim()
|
|
|
|
// Remove surrounding quotes
|
|
value = value.replace(/^['"](.*)['"]$/, '$1')
|
|
|
|
// Support simple array syntax for tags: [tag1, tag2]
|
|
if (key === 'tags') {
|
|
const arr =
|
|
value.startsWith('[') && value.endsWith(']')
|
|
? value
|
|
.slice(1, -1)
|
|
.split(',')
|
|
.map((v) => v.trim().replace(/^['"](.*)['"]$/, '$1'))
|
|
.filter(Boolean)
|
|
: value
|
|
.split(',')
|
|
.map((v) => v.trim())
|
|
.filter(Boolean)
|
|
;(metadata as Record<string, unknown>)[key] = arr
|
|
} else {
|
|
;(metadata as Record<string, unknown>)[key] = value
|
|
}
|
|
})
|
|
|
|
return { metadata: metadata as Metadata, content }
|
|
}
|
|
|
|
// ============ Local FS provider ============
|
|
function getMDXFiles(dir: string) {
|
|
return fs.existsSync(dir)
|
|
? fs.readdirSync(dir).filter((file) => path.extname(file) === '.mdx')
|
|
: []
|
|
}
|
|
|
|
function readMDXFile(filePath: string) {
|
|
const rawContent = fs.readFileSync(filePath, 'utf-8')
|
|
return parseFrontmatter(rawContent)
|
|
}
|
|
|
|
function getMDXData(dir: string): Post[] {
|
|
const mdxFiles = getMDXFiles(dir)
|
|
return mdxFiles.map((file) => {
|
|
const { metadata, content } = readMDXFile(path.join(dir, file))
|
|
const slug = path.basename(file, path.extname(file))
|
|
return {
|
|
metadata,
|
|
slug,
|
|
content,
|
|
source: 'fs',
|
|
}
|
|
})
|
|
}
|
|
|
|
function getFSPosts(): Post[] {
|
|
return getMDXData(path.join(process.cwd(), 'app', 'blog', 'posts'))
|
|
}
|
|
|
|
// ============ GitHub provider (optional) ============
|
|
const BLOG_REPO = process.env.BLOG_REPO // 'owner/repo'
|
|
const BLOG_PATH = process.env.BLOG_PATH || ''
|
|
const BLOG_BRANCH = process.env.BLOG_BRANCH || 'main'
|
|
const GITHUB_TOKEN = process.env.GITHUB_TOKEN
|
|
const BLOG_REVALIDATE_SECONDS = Number(process.env.BLOG_REVALIDATE_SECONDS || 300)
|
|
const BLOG_CACHE_TAG = process.env.BLOG_CACHE_TAG || 'blog-content'
|
|
|
|
type GithubContentItem = {
|
|
name: string
|
|
path: string
|
|
type: 'file' | 'dir'
|
|
}
|
|
|
|
type GithubFileResponse = {
|
|
content?: string
|
|
}
|
|
|
|
async function githubApi<T>(url: string): Promise<T> {
|
|
const headers: Record<string, string> = {
|
|
Accept: 'application/vnd.github+json',
|
|
}
|
|
if (GITHUB_TOKEN) headers.Authorization = `Bearer ${GITHUB_TOKEN}`
|
|
|
|
const res = await fetch(url, {
|
|
headers,
|
|
next: { revalidate: BLOG_REVALIDATE_SECONDS, tags: [BLOG_CACHE_TAG] },
|
|
})
|
|
if (!res.ok) {
|
|
throw new Error(`GitHub API error ${res.status} on ${url}`)
|
|
}
|
|
return (await res.json()) as T
|
|
}
|
|
|
|
async function getGithubPosts(): Promise<Post[]> {
|
|
if (!BLOG_REPO) return []
|
|
const [owner, repo] = BLOG_REPO.split('/')
|
|
const base = `https://api.github.com/repos/${owner}/${repo}/contents`
|
|
const dir = BLOG_PATH ? `/${BLOG_PATH}` : ''
|
|
const listUrl = `${base}${dir}?ref=${encodeURIComponent(BLOG_BRANCH)}`
|
|
|
|
let items: GithubContentItem[]
|
|
try {
|
|
items = await githubApi<GithubContentItem[]>(listUrl)
|
|
} catch {
|
|
return []
|
|
}
|
|
|
|
const mdxItems = items.filter((it) => it.type === 'file' && it.name.endsWith('.mdx'))
|
|
|
|
const posts: Post[] = []
|
|
for (const item of mdxItems) {
|
|
// Fetch file content (base64) via contents API
|
|
const fileUrl = `${base}/${encodeURIComponent(item.path)}?ref=${encodeURIComponent(BLOG_BRANCH)}`
|
|
try {
|
|
const fileJson = await githubApi<GithubFileResponse>(fileUrl)
|
|
const contentBase64: string | undefined = fileJson.content
|
|
if (!contentBase64) continue
|
|
const raw = Buffer.from(contentBase64, 'base64').toString('utf8')
|
|
const { metadata, content } = parseFrontmatter(raw)
|
|
const slug = path.basename(item.name, '.mdx')
|
|
posts.push({
|
|
metadata,
|
|
slug,
|
|
content,
|
|
source: 'github',
|
|
})
|
|
} catch {
|
|
// skip on individual file failure
|
|
continue
|
|
}
|
|
}
|
|
return posts
|
|
}
|
|
|
|
// ============ Public API ============
|
|
|
|
// Backward-compatible local-only function (used by older imports)
|
|
export function getBlogPosts(): Post[] {
|
|
return getFSPosts()
|
|
}
|
|
|
|
// New unified provider that merges FS with optional GitHub content
|
|
export async function getAllPosts(): Promise<Post[]> {
|
|
const fsPosts = getFSPosts()
|
|
const ghPosts = await getGithubPosts().catch(() => [])
|
|
// Merge by slug, with GitHub taking precedence on duplicates
|
|
const map = new Map<string, Post>()
|
|
for (const p of fsPosts) map.set(p.slug, p)
|
|
for (const p of ghPosts) map.set(p.slug, p)
|
|
return Array.from(map.values())
|
|
.filter((p) => !!p.metadata?.publishedAt && !!p.metadata?.title)
|
|
.sort((a, b) => {
|
|
const da = new Date(a.metadata.publishedAt).getTime()
|
|
const db = new Date(b.metadata.publishedAt).getTime()
|
|
return db - da
|
|
})
|
|
}
|
|
|
|
export function formatDate(date: string, includeRelative = false) {
|
|
const currentDate = new Date()
|
|
if (!date.includes('T')) {
|
|
date = `${date}T00:00:00`
|
|
}
|
|
const targetDate = new Date(date)
|
|
|
|
const yearsAgo = currentDate.getFullYear() - targetDate.getFullYear()
|
|
const monthsAgo =
|
|
currentDate.getMonth() -
|
|
targetDate.getMonth() +
|
|
yearsAgo * 12
|
|
const daysAgo = Math.floor(
|
|
(currentDate.getTime() - targetDate.getTime()) / (1000 * 60 * 60 * 24)
|
|
)
|
|
|
|
let formattedDate = ''
|
|
|
|
if (yearsAgo > 0) {
|
|
formattedDate = `${yearsAgo}y ago`
|
|
} else if (monthsAgo > 0) {
|
|
formattedDate = `${monthsAgo}mo ago`
|
|
} else if (daysAgo > 0) {
|
|
formattedDate = `${daysAgo}d ago`
|
|
} else {
|
|
formattedDate = 'Today'
|
|
}
|
|
|
|
const fullDate = targetDate.toLocaleString('en-us', {
|
|
month: 'long',
|
|
day: 'numeric',
|
|
year: 'numeric',
|
|
})
|
|
|
|
if (!includeRelative) {
|
|
return fullDate
|
|
}
|
|
|
|
return `${fullDate} (${formattedDate})`
|
|
}
|
|
|
|
export function getReadingTime(content: string) {
|
|
const words = (content || '')
|
|
.trim()
|
|
.split(/\s+/)
|
|
.filter(Boolean).length
|
|
const minutes = Math.max(1, Math.ceil(words / 200))
|
|
return { minutes, text: `${minutes} min read`, words }
|
|
}
|
|
|
|
export function getExcerpt(summary?: string, content?: string, maxChars = 220) {
|
|
if (summary && summary.trim().length > 0) return summary.trim()
|
|
if (!content) return ''
|
|
// Get first non-empty paragraph
|
|
const firstPara =
|
|
content
|
|
.split(/\n{2,}/)
|
|
.map((s) => s.trim())
|
|
.find((p) => p.length > 0) || ''
|
|
const clean = firstPara
|
|
.replace(/```[\s\S]*?```/g, '') // remove code fences
|
|
.replace(/`[^`]*`/g, '') // remove inline code
|
|
.replace(/[#>*_~\[\]()\-]/g, '') // remove some md tokens
|
|
.replace(/\s+/g, ' ')
|
|
.trim()
|
|
if (clean.length <= maxChars) return clean
|
|
return clean.slice(0, maxChars).replace(/\s+\S*$/, '') + '…'
|
|
}
|
|
|
|
export function findAdjacentPosts(posts: Post[], slug: string) {
|
|
const idx = posts.findIndex((p) => p.slug === slug)
|
|
if (idx === -1) return { prev: undefined, next: undefined }
|
|
// posts expected sorted desc (newest to oldest)
|
|
const prev = idx > 0 ? posts[idx - 1] : undefined // newer
|
|
const next = idx < posts.length - 1 ? posts[idx + 1] : undefined // older
|
|
return { prev, next }
|
|
}
|