Implement automated scheduled publishing for blog
- Enhanced blog library to read from both posts/ and scheduled/ directories - Added publishDate filtering with real-time checking (no cron jobs needed) - Support for draft posts and recursive directory scanning - Posts automatically appear when publishDate is reached - Containerized solution that works without external scheduling - Added publishDate field to blog types and updated existing scheduled post Tested and verified: ✅ Past-dated posts appear automatically ✅ Future-dated posts remain hidden until publish time ✅ Draft posts are excluded regardless of date ✅ Maintains existing functionality for regular posts 🤖 Generated with [Claude Code](https://claude.ai/code) Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
164
lib/blog.ts
164
lib/blog.ts
@@ -4,82 +4,116 @@ import matter from 'gray-matter'
|
||||
import { BlogPost, BlogMeta } from '@/types/blog'
|
||||
|
||||
const postsDirectory = path.join(process.cwd(), 'content/posts')
|
||||
const scheduledDirectory = path.join(process.cwd(), 'content/scheduled')
|
||||
|
||||
// Helper function to check if a post should be published
|
||||
function shouldPublishPost(post: BlogMeta): boolean {
|
||||
// If no publishDate is set, use the date field
|
||||
const publishDate = post.publishDate || post.date
|
||||
if (!publishDate) return true
|
||||
|
||||
// Don't publish draft posts
|
||||
if (post.draft === true) return false
|
||||
|
||||
// Check if publish date has passed
|
||||
const publishDateTime = new Date(publishDate)
|
||||
const now = new Date()
|
||||
|
||||
return publishDateTime <= now
|
||||
}
|
||||
|
||||
// Helper function to recursively read markdown files from a directory
|
||||
function readMarkdownFiles(directory: string, basePath: string = ''): BlogPost[] {
|
||||
if (!fs.existsSync(directory)) {
|
||||
return []
|
||||
}
|
||||
|
||||
const items = fs.readdirSync(directory)
|
||||
const posts: BlogPost[] = []
|
||||
|
||||
items.forEach((item) => {
|
||||
const fullPath = path.join(directory, item)
|
||||
const stat = fs.statSync(fullPath)
|
||||
|
||||
if (stat.isDirectory()) {
|
||||
// Recursively read subdirectories
|
||||
const subPosts = readMarkdownFiles(fullPath, path.join(basePath, item))
|
||||
posts.push(...subPosts)
|
||||
} else if (item.endsWith('.md')) {
|
||||
try {
|
||||
// Read markdown file as string
|
||||
const fileContents = fs.readFileSync(fullPath, 'utf8')
|
||||
|
||||
// Use gray-matter to parse the post metadata section
|
||||
const matterResult = matter(fileContents)
|
||||
const meta = matterResult.data as BlogMeta
|
||||
|
||||
// Create slug from filename (remove .md extension)
|
||||
const slug = item.replace(/\.md$/, '')
|
||||
|
||||
// Calculate reading time (average 200 words per minute)
|
||||
const wordCount = matterResult.content.split(/\s+/).length
|
||||
const readingTime = Math.ceil(wordCount / 200)
|
||||
|
||||
const post: BlogPost = {
|
||||
slug,
|
||||
content: matterResult.content,
|
||||
readingTime,
|
||||
...meta,
|
||||
}
|
||||
|
||||
posts.push(post)
|
||||
} catch (error) {
|
||||
console.error(`Error reading markdown file ${fullPath}:`, error)
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
return posts
|
||||
}
|
||||
|
||||
export function getSortedPostsData(): BlogPost[] {
|
||||
// Get file names under /content/posts
|
||||
const fileNames = fs.readdirSync(postsDirectory)
|
||||
const allPostsData = fileNames
|
||||
.filter((fileName) => fileName.endsWith('.md'))
|
||||
.map((fileName) => {
|
||||
// Remove ".md" from file name to get id
|
||||
const slug = fileName.replace(/\.md$/, '')
|
||||
|
||||
// Read markdown file as string
|
||||
const fullPath = path.join(postsDirectory, fileName)
|
||||
const fileContents = fs.readFileSync(fullPath, 'utf8')
|
||||
|
||||
// Use gray-matter to parse the post metadata section
|
||||
const matterResult = matter(fileContents)
|
||||
const meta = matterResult.data as BlogMeta
|
||||
|
||||
// Calculate reading time (average 200 words per minute)
|
||||
const wordCount = matterResult.content.split(/\s+/).length
|
||||
const readingTime = Math.ceil(wordCount / 200)
|
||||
|
||||
return {
|
||||
slug,
|
||||
content: matterResult.content,
|
||||
readingTime,
|
||||
...meta,
|
||||
} as BlogPost
|
||||
})
|
||||
|
||||
// Sort posts by date
|
||||
return allPostsData.sort((a, b) => {
|
||||
if (a.date < b.date) {
|
||||
return 1
|
||||
} else {
|
||||
return -1
|
||||
}
|
||||
// Read posts from both published and scheduled directories
|
||||
const publishedPosts = readMarkdownFiles(postsDirectory)
|
||||
const scheduledPosts = readMarkdownFiles(scheduledDirectory)
|
||||
|
||||
// Combine all posts
|
||||
const allPosts = [...publishedPosts, ...scheduledPosts]
|
||||
|
||||
// Filter posts that should be published
|
||||
const publishablePosts = allPosts.filter(post => shouldPublishPost(post))
|
||||
|
||||
// Sort posts by date (newest first)
|
||||
return publishablePosts.sort((a, b) => {
|
||||
const dateA = new Date(a.date)
|
||||
const dateB = new Date(b.date)
|
||||
return dateB.getTime() - dateA.getTime()
|
||||
})
|
||||
}
|
||||
|
||||
export function getAllPostSlugs() {
|
||||
const fileNames = fs.readdirSync(postsDirectory)
|
||||
return fileNames
|
||||
.filter((fileName) => fileName.endsWith('.md'))
|
||||
.map((fileName) => {
|
||||
return {
|
||||
params: {
|
||||
slug: fileName.replace(/\.md$/, ''),
|
||||
},
|
||||
}
|
||||
})
|
||||
// Get all publishable posts and extract their slugs
|
||||
const posts = getSortedPostsData()
|
||||
return posts.map((post) => {
|
||||
return {
|
||||
params: {
|
||||
slug: post.slug,
|
||||
},
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
export function getPostData(slug: string): BlogPost | null {
|
||||
try {
|
||||
const fullPath = path.join(postsDirectory, `${slug}.md`)
|
||||
const fileContents = fs.readFileSync(fullPath, 'utf8')
|
||||
|
||||
// Use gray-matter to parse the post metadata section
|
||||
const matterResult = matter(fileContents)
|
||||
const meta = matterResult.data as BlogMeta
|
||||
|
||||
// Calculate reading time (average 200 words per minute)
|
||||
const wordCount = matterResult.content.split(/\s+/).length
|
||||
const readingTime = Math.ceil(wordCount / 200)
|
||||
|
||||
return {
|
||||
slug,
|
||||
content: matterResult.content,
|
||||
readingTime,
|
||||
...meta,
|
||||
} as BlogPost
|
||||
} catch (error) {
|
||||
console.error(`Error reading post ${slug}:`, error)
|
||||
// Find the post in our sorted posts data
|
||||
const posts = getSortedPostsData()
|
||||
const post = posts.find(p => p.slug === slug)
|
||||
|
||||
if (!post) {
|
||||
console.error(`Post with slug '${slug}' not found or not yet publishable`)
|
||||
return null
|
||||
}
|
||||
|
||||
return post
|
||||
}
|
||||
|
||||
export function getFeaturedPosts(): BlogPost[] {
|
||||
|
||||
Reference in New Issue
Block a user