🖼️ added images support
- Should investigate how to resize the image from .md specs
This commit is contained in:
85
lib/image-utils.ts
Normal file
85
lib/image-utils.ts
Normal file
@@ -0,0 +1,85 @@
|
||||
import { promises as fs } from 'fs'
|
||||
import path from 'path'
|
||||
|
||||
export async function imageExists(imagePath: string): Promise<boolean> {
|
||||
try {
|
||||
const fullPath = path.join(process.cwd(), 'public', imagePath)
|
||||
await fs.access(fullPath)
|
||||
return true
|
||||
} catch {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
export async function getImageDimensions(
|
||||
imagePath: string
|
||||
): Promise<{ width: number; height: number } | null> {
|
||||
try {
|
||||
const fullPath = path.join(process.cwd(), 'public', imagePath)
|
||||
const buffer = await fs.readFile(fullPath)
|
||||
|
||||
if (imagePath.endsWith('.png')) {
|
||||
const width = buffer.readUInt32BE(16)
|
||||
const height = buffer.readUInt32BE(20)
|
||||
return { width, height }
|
||||
}
|
||||
|
||||
if (imagePath.endsWith('.jpg') || imagePath.endsWith('.jpeg')) {
|
||||
let offset = 2
|
||||
while (offset < buffer.length) {
|
||||
if (buffer[offset] !== 0xff) break
|
||||
|
||||
const marker = buffer[offset + 1]
|
||||
if (marker === 0xc0 || marker === 0xc2) {
|
||||
const height = buffer.readUInt16BE(offset + 5)
|
||||
const width = buffer.readUInt16BE(offset + 7)
|
||||
return { width, height }
|
||||
}
|
||||
|
||||
offset += 2 + buffer.readUInt16BE(offset + 2)
|
||||
}
|
||||
}
|
||||
|
||||
return null
|
||||
} catch {
|
||||
return null
|
||||
}
|
||||
}
|
||||
|
||||
export function getOptimizedImageUrl(
|
||||
src: string,
|
||||
width?: number,
|
||||
height?: number,
|
||||
quality: number = 75
|
||||
): string {
|
||||
const params = new URLSearchParams()
|
||||
|
||||
if (width) params.set('w', width.toString())
|
||||
if (height) params.set('h', height.toString())
|
||||
params.set('q', quality.toString())
|
||||
|
||||
const queryString = params.toString()
|
||||
return queryString ? `${src}?${queryString}` : src
|
||||
}
|
||||
|
||||
export async function getImageWithPlaceholder(
|
||||
imagePath: string
|
||||
): Promise<{ src: string; width: number; height: number; placeholder?: string }> {
|
||||
const dimensions = await getImageDimensions(imagePath)
|
||||
|
||||
if (!dimensions) {
|
||||
return {
|
||||
src: imagePath,
|
||||
width: 800,
|
||||
height: 600,
|
||||
}
|
||||
}
|
||||
|
||||
const placeholder = `data:image/svg+xml,%3Csvg xmlns='http://www.w3.org/2000/svg' width='${dimensions.width}' height='${dimensions.height}'%3E%3Crect width='${dimensions.width}' height='${dimensions.height}' fill='%2318181b'/%3E%3C/svg%3E`
|
||||
|
||||
return {
|
||||
src: imagePath,
|
||||
...dimensions,
|
||||
placeholder,
|
||||
}
|
||||
}
|
||||
@@ -1,8 +1,11 @@
|
||||
import fs from 'fs'
|
||||
import path from 'path'
|
||||
import matter from 'gray-matter'
|
||||
import { remark } from 'remark'
|
||||
import remarkGfm from 'remark-gfm'
|
||||
import { FrontMatter, Post } from './types/frontmatter'
|
||||
import { generateExcerpt } from './utils'
|
||||
import { remarkCopyImages } from './remark-copy-images'
|
||||
|
||||
const POSTS_PATH = path.join(process.cwd(), 'content', 'blog')
|
||||
|
||||
@@ -52,7 +55,7 @@ export function validateFrontmatter(data: any): FrontMatter {
|
||||
}
|
||||
}
|
||||
|
||||
export function getPostBySlug(slug: string | string[]): Post | null {
|
||||
export async function getPostBySlug(slug: string | string[]): Promise<Post | null> {
|
||||
const slugArray = Array.isArray(slug) ? slug : slug.split('/')
|
||||
const sanitized = slugArray.map(s => sanitizePath(s))
|
||||
const fullPath = path.join(POSTS_PATH, ...sanitized) + '.md'
|
||||
@@ -65,19 +68,30 @@ export function getPostBySlug(slug: string | string[]): Post | null {
|
||||
const { data, content } = matter(fileContents)
|
||||
const frontmatter = validateFrontmatter(data)
|
||||
|
||||
const processed = await remark()
|
||||
.use(remarkGfm)
|
||||
.use(remarkCopyImages, {
|
||||
contentDir: 'content/blog',
|
||||
publicDir: 'public/blog',
|
||||
currentSlug: sanitized.join('/'),
|
||||
})
|
||||
.process(content)
|
||||
|
||||
const processedContent = processed.toString()
|
||||
|
||||
return {
|
||||
slug: sanitized.join('/'),
|
||||
frontmatter,
|
||||
content,
|
||||
readingTime: calculateReadingTime(content),
|
||||
excerpt: generateExcerpt(content),
|
||||
content: processedContent,
|
||||
readingTime: calculateReadingTime(processedContent),
|
||||
excerpt: generateExcerpt(processedContent),
|
||||
}
|
||||
}
|
||||
|
||||
export function getAllPosts(includeContent = false): Post[] {
|
||||
export async function getAllPosts(includeContent = false): Promise<Post[]> {
|
||||
const posts: Post[] = []
|
||||
|
||||
function walkDir(dir: string, prefix = ''): void {
|
||||
async function walkDir(dir: string, prefix = ''): Promise<void> {
|
||||
const files = fs.readdirSync(dir)
|
||||
|
||||
for (const file of files) {
|
||||
@@ -85,11 +99,11 @@ export function getAllPosts(includeContent = false): Post[] {
|
||||
const stat = fs.statSync(filePath)
|
||||
|
||||
if (stat.isDirectory()) {
|
||||
walkDir(filePath, prefix ? `${prefix}/${file}` : file)
|
||||
await walkDir(filePath, prefix ? `${prefix}/${file}` : file)
|
||||
} else if (file.endsWith('.md')) {
|
||||
const slug = prefix ? `${prefix}/${file.replace(/\.md$/, '')}` : file.replace(/\.md$/, '')
|
||||
try {
|
||||
const post = getPostBySlug(slug.split('/'))
|
||||
const post = await getPostBySlug(slug.split('/'))
|
||||
if (post && !post.frontmatter.draft) {
|
||||
posts.push(includeContent ? post : { ...post, content: '' })
|
||||
}
|
||||
@@ -101,7 +115,7 @@ export function getAllPosts(includeContent = false): Post[] {
|
||||
}
|
||||
|
||||
if (fs.existsSync(POSTS_PATH)) {
|
||||
walkDir(POSTS_PATH)
|
||||
await walkDir(POSTS_PATH)
|
||||
}
|
||||
|
||||
return posts.sort(
|
||||
@@ -110,10 +124,10 @@ export function getAllPosts(includeContent = false): Post[] {
|
||||
}
|
||||
|
||||
export async function getRelatedPosts(currentSlug: string, limit = 3): Promise<Post[]> {
|
||||
const currentPost = getPostBySlug(currentSlug)
|
||||
const currentPost = await getPostBySlug(currentSlug)
|
||||
if (!currentPost) return []
|
||||
|
||||
const allPosts = getAllPosts(false)
|
||||
const allPosts = await getAllPosts(false)
|
||||
const { category, tags } = currentPost.frontmatter
|
||||
|
||||
const scored = allPosts
|
||||
|
||||
146
lib/remark-copy-images.ts
Normal file
146
lib/remark-copy-images.ts
Normal file
@@ -0,0 +1,146 @@
|
||||
import { visit } from 'unist-util-visit'
|
||||
import fs from 'fs/promises'
|
||||
import path from 'path'
|
||||
import { Node } from 'unist'
|
||||
|
||||
interface ImageNode extends Node {
|
||||
type: 'image'
|
||||
url: string
|
||||
alt?: string
|
||||
title?: string
|
||||
}
|
||||
|
||||
interface Options {
|
||||
contentDir: string
|
||||
publicDir: string
|
||||
currentSlug: string
|
||||
}
|
||||
|
||||
function isRelativePath(url: string): boolean {
|
||||
// Matches: ./, ../, or bare filenames without protocol/absolute path
|
||||
return (
|
||||
url.startsWith('./') || url.startsWith('../') || (!url.startsWith('/') && !url.includes('://'))
|
||||
)
|
||||
}
|
||||
|
||||
function stripQueryParams(url: string): string {
|
||||
return url.split('?')[0]
|
||||
}
|
||||
|
||||
// In-memory cache to prevent duplicate copies across parallel compilations
|
||||
const copiedFiles = new Set<string>()
|
||||
|
||||
async function copyAndRewritePath(node: ImageNode, options: Options): Promise<void> {
|
||||
const { contentDir, publicDir, currentSlug } = options
|
||||
|
||||
const urlWithoutParams = stripQueryParams(node.url)
|
||||
const slugParts = currentSlug.split('/')
|
||||
const contentPostDir = path.join(process.cwd(), contentDir, ...slugParts.slice(0, -1))
|
||||
|
||||
const sourcePath = path.resolve(contentPostDir, urlWithoutParams)
|
||||
|
||||
if (sourcePath.includes('..') && !sourcePath.startsWith(path.join(process.cwd(), contentDir))) {
|
||||
throw new Error(`Invalid image path: ${node.url} (path traversal detected)`)
|
||||
}
|
||||
|
||||
const relativeToContent = path.relative(path.join(process.cwd(), contentDir), sourcePath)
|
||||
const destPath = path.join(process.cwd(), publicDir, relativeToContent)
|
||||
|
||||
try {
|
||||
await fs.access(sourcePath)
|
||||
} catch {
|
||||
throw new Error(
|
||||
`Image not found: ${sourcePath}\nReferenced in: ${currentSlug}\nURL: ${node.url}`
|
||||
)
|
||||
}
|
||||
|
||||
const destDir = path.dirname(destPath)
|
||||
await fs.mkdir(destDir, { recursive: true })
|
||||
|
||||
// Deduplication: check cache first
|
||||
const cacheKey = `${sourcePath}:${destPath}`
|
||||
if (copiedFiles.has(cacheKey)) {
|
||||
// Already copied, just rewrite URL
|
||||
const publicUrl =
|
||||
'/' + path.relative(path.join(process.cwd(), 'public'), destPath).replace(/\\/g, '/')
|
||||
const queryParams = node.url.includes('?') ? '?' + node.url.split('?')[1] : ''
|
||||
node.url = publicUrl + queryParams
|
||||
return
|
||||
}
|
||||
|
||||
// Check if destination exists with matching size
|
||||
try {
|
||||
const [sourceStat, destStat] = await Promise.all([
|
||||
fs.stat(sourcePath),
|
||||
fs.stat(destPath).catch(() => null),
|
||||
])
|
||||
|
||||
if (destStat && sourceStat.size === destStat.size) {
|
||||
// File already exists and matches, skip copy
|
||||
copiedFiles.add(cacheKey)
|
||||
const publicUrl =
|
||||
'/' + path.relative(path.join(process.cwd(), 'public'), destPath).replace(/\\/g, '/')
|
||||
const queryParams = node.url.includes('?') ? '?' + node.url.split('?')[1] : ''
|
||||
node.url = publicUrl + queryParams
|
||||
return
|
||||
}
|
||||
} catch (error) {
|
||||
// Stat failed, proceed with copy
|
||||
}
|
||||
|
||||
// Attempt copy with EBUSY retry logic
|
||||
try {
|
||||
await fs.copyFile(sourcePath, destPath)
|
||||
copiedFiles.add(cacheKey)
|
||||
} catch (error: unknown) {
|
||||
const err = error as NodeJS.ErrnoException
|
||||
if (err.code === 'EBUSY') {
|
||||
// Race condition: another process is copying this file
|
||||
// Wait briefly and check if file now exists
|
||||
await new Promise(resolve => setTimeout(resolve, 100))
|
||||
|
||||
try {
|
||||
await fs.access(destPath)
|
||||
// File exists now, verify integrity
|
||||
const [sourceStat, destStat] = await Promise.all([fs.stat(sourcePath), fs.stat(destPath)])
|
||||
|
||||
if (sourceStat.size === destStat.size) {
|
||||
// Successfully copied by another process
|
||||
copiedFiles.add(cacheKey)
|
||||
} else {
|
||||
// File corrupted, retry once
|
||||
await fs.copyFile(sourcePath, destPath)
|
||||
copiedFiles.add(cacheKey)
|
||||
}
|
||||
} catch {
|
||||
// File still doesn't exist, retry copy
|
||||
await fs.copyFile(sourcePath, destPath)
|
||||
copiedFiles.add(cacheKey)
|
||||
}
|
||||
} else {
|
||||
// Unknown error, rethrow
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
const publicUrl =
|
||||
'/' + path.relative(path.join(process.cwd(), 'public'), destPath).replace(/\\/g, '/')
|
||||
|
||||
const queryParams = node.url.includes('?') ? '?' + node.url.split('?')[1] : ''
|
||||
node.url = publicUrl + queryParams
|
||||
}
|
||||
|
||||
export function remarkCopyImages(options: Options) {
|
||||
return async (tree: Node) => {
|
||||
const promises: Promise<void>[] = []
|
||||
|
||||
visit(tree, 'image', (node: Node) => {
|
||||
const imageNode = node as ImageNode
|
||||
if (isRelativePath(imageNode.url)) {
|
||||
promises.push(copyAndRewritePath(imageNode, options))
|
||||
}
|
||||
})
|
||||
|
||||
await Promise.all(promises)
|
||||
}
|
||||
}
|
||||
110
lib/tags.ts
110
lib/tags.ts
@@ -1,15 +1,15 @@
|
||||
import { getAllPosts } from './markdown';
|
||||
import type { Post } from './types/frontmatter';
|
||||
import { getAllPosts } from './markdown'
|
||||
import type { Post } from './types/frontmatter'
|
||||
|
||||
export interface TagInfo {
|
||||
name: string;
|
||||
slug: string;
|
||||
count: number;
|
||||
name: string
|
||||
slug: string
|
||||
count: number
|
||||
}
|
||||
|
||||
export interface TagWithPosts {
|
||||
tag: TagInfo;
|
||||
posts: Post[];
|
||||
tag: TagInfo
|
||||
posts: Post[]
|
||||
}
|
||||
|
||||
export function slugifyTag(tag: string): string {
|
||||
@@ -22,110 +22,108 @@ export function slugifyTag(tag: string): string {
|
||||
.replace(/\s+/g, '-')
|
||||
.replace(/[^a-z0-9-]/g, '')
|
||||
.replace(/-+/g, '-')
|
||||
.replace(/^-|-$/g, '');
|
||||
.replace(/^-|-$/g, '')
|
||||
}
|
||||
|
||||
export async function getAllTags(): Promise<TagInfo[]> {
|
||||
const posts = getAllPosts();
|
||||
const tagMap = new Map<string, number>();
|
||||
const posts = await getAllPosts()
|
||||
const tagMap = new Map<string, number>()
|
||||
|
||||
posts.forEach(post => {
|
||||
const tags = post.frontmatter.tags?.filter(Boolean) || [];
|
||||
const tags = post.frontmatter.tags?.filter(Boolean) || []
|
||||
tags.forEach(tag => {
|
||||
const count = tagMap.get(tag) || 0;
|
||||
tagMap.set(tag, count + 1);
|
||||
});
|
||||
});
|
||||
const count = tagMap.get(tag) || 0
|
||||
tagMap.set(tag, count + 1)
|
||||
})
|
||||
})
|
||||
|
||||
return Array.from(tagMap.entries())
|
||||
.map(([name, count]) => ({
|
||||
name,
|
||||
slug: slugifyTag(name),
|
||||
count
|
||||
count,
|
||||
}))
|
||||
.sort((a, b) => b.count - a.count);
|
||||
.sort((a, b) => b.count - a.count)
|
||||
}
|
||||
|
||||
export async function getPostsByTag(tagSlug: string): Promise<Post[]> {
|
||||
const posts = getAllPosts();
|
||||
const posts = await getAllPosts()
|
||||
|
||||
return posts.filter(post => {
|
||||
const tags = post.frontmatter.tags?.filter(Boolean) || [];
|
||||
return tags.some(tag => slugifyTag(tag) === tagSlug);
|
||||
});
|
||||
const tags = post.frontmatter.tags?.filter(Boolean) || []
|
||||
return tags.some(tag => slugifyTag(tag) === tagSlug)
|
||||
})
|
||||
}
|
||||
|
||||
export async function getTagInfo(tagSlug: string): Promise<TagInfo | null> {
|
||||
const allTags = await getAllTags();
|
||||
return allTags.find(tag => tag.slug === tagSlug) || null;
|
||||
const allTags = await getAllTags()
|
||||
return allTags.find(tag => tag.slug === tagSlug) || null
|
||||
}
|
||||
|
||||
export async function getPopularTags(limit = 10): Promise<TagInfo[]> {
|
||||
const allTags = await getAllTags();
|
||||
return allTags.slice(0, limit);
|
||||
const allTags = await getAllTags()
|
||||
return allTags.slice(0, limit)
|
||||
}
|
||||
|
||||
export async function getRelatedTags(tagSlug: string, limit = 5): Promise<TagInfo[]> {
|
||||
const posts = await getPostsByTag(tagSlug);
|
||||
const relatedTagMap = new Map<string, number>();
|
||||
const posts = await getPostsByTag(tagSlug)
|
||||
const relatedTagMap = new Map<string, number>()
|
||||
|
||||
posts.forEach(post => {
|
||||
const tags = post.frontmatter.tags?.filter(Boolean) || [];
|
||||
const tags = post.frontmatter.tags?.filter(Boolean) || []
|
||||
tags.forEach(tag => {
|
||||
const slug = slugifyTag(tag);
|
||||
const slug = slugifyTag(tag)
|
||||
if (slug !== tagSlug) {
|
||||
const count = relatedTagMap.get(tag) || 0;
|
||||
relatedTagMap.set(tag, count + 1);
|
||||
const count = relatedTagMap.get(tag) || 0
|
||||
relatedTagMap.set(tag, count + 1)
|
||||
}
|
||||
});
|
||||
});
|
||||
})
|
||||
})
|
||||
|
||||
return Array.from(relatedTagMap.entries())
|
||||
.map(([name, count]) => ({
|
||||
name,
|
||||
slug: slugifyTag(name),
|
||||
count
|
||||
count,
|
||||
}))
|
||||
.sort((a, b) => b.count - a.count)
|
||||
.slice(0, limit);
|
||||
.slice(0, limit)
|
||||
}
|
||||
|
||||
export function validateTags(tags: any): string[] {
|
||||
if (!tags) return [];
|
||||
if (!tags) return []
|
||||
|
||||
if (!Array.isArray(tags)) {
|
||||
console.warn('Tags should be an array');
|
||||
return [];
|
||||
console.warn('Tags should be an array')
|
||||
return []
|
||||
}
|
||||
|
||||
const validTags = tags
|
||||
.filter(tag => tag && typeof tag === 'string')
|
||||
.slice(0, 3);
|
||||
const validTags = tags.filter(tag => tag && typeof tag === 'string').slice(0, 3)
|
||||
|
||||
if (tags.length > 3) {
|
||||
console.warn(`Too many tags provided (${tags.length}). Limited to first 3.`);
|
||||
console.warn(`Too many tags provided (${tags.length}). Limited to first 3.`)
|
||||
}
|
||||
|
||||
return validTags;
|
||||
return validTags
|
||||
}
|
||||
|
||||
export async function getTagCloud(): Promise<Array<TagInfo & { size: 'sm' | 'md' | 'lg' | 'xl' }>> {
|
||||
const tags = await getAllTags();
|
||||
if (tags.length === 0) return [];
|
||||
const tags = await getAllTags()
|
||||
if (tags.length === 0) return []
|
||||
|
||||
const maxCount = Math.max(...tags.map(t => t.count));
|
||||
const minCount = Math.min(...tags.map(t => t.count));
|
||||
const range = maxCount - minCount || 1;
|
||||
const maxCount = Math.max(...tags.map(t => t.count))
|
||||
const minCount = Math.min(...tags.map(t => t.count))
|
||||
const range = maxCount - minCount || 1
|
||||
|
||||
return tags.map(tag => {
|
||||
const normalized = (tag.count - minCount) / range;
|
||||
let size: 'sm' | 'md' | 'lg' | 'xl';
|
||||
const normalized = (tag.count - minCount) / range
|
||||
let size: 'sm' | 'md' | 'lg' | 'xl'
|
||||
|
||||
if (normalized < 0.25) size = 'sm';
|
||||
else if (normalized < 0.5) size = 'md';
|
||||
else if (normalized < 0.75) size = 'lg';
|
||||
else size = 'xl';
|
||||
if (normalized < 0.25) size = 'sm'
|
||||
else if (normalized < 0.5) size = 'md'
|
||||
else if (normalized < 0.75) size = 'lg'
|
||||
else size = 'xl'
|
||||
|
||||
return { ...tag, size };
|
||||
});
|
||||
return { ...tag, size }
|
||||
})
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user