package main import ( "bufio" "embed" "fmt" "path/filepath" "sort" "strings" "time" "git.valxntine.dev/valxntine/blog/models" chromahtml "github.com/alecthomas/chroma/v2/formatters/html" "github.com/yuin/goldmark" highlighting "github.com/yuin/goldmark-highlighting/v2" "github.com/yuin/goldmark/extension" "github.com/yuin/goldmark/parser" "github.com/yuin/goldmark/renderer/html" ) //go:embed posts/*.md var postsFS embed.FS var ( postCache []models.Post postCacheTime time.Time cacheValid = false ) type PostMeta struct { Title string Date string Tags []string Excerpt string WordCount int Draft bool } var md = goldmark.New( goldmark.WithExtensions( extension.GFM, extension.Table, extension.Strikethrough, extension.Linkify, extension.TaskList, highlighting.NewHighlighting( highlighting.WithStyle("monokai"), highlighting.WithFormatOptions( chromahtml.WithLineNumbers(true), // chromahtml.WithClasses(true), ), ), ), goldmark.WithParserOptions( parser.WithAutoHeadingID(), ), goldmark.WithRendererOptions( html.WithHardWraps(), html.WithXHTML(), html.WithUnsafe(), ), ) func parseFrontmatter(content string) (PostMeta, string, error) { lines := strings.Split(content, "\n") if len(lines) < 2 || lines[0] != "---" { return PostMeta{}, content, fmt.Errorf("no frontmatter found") } meta := PostMeta{} var endID int for i := 1; i < len(lines); i++ { if lines[i] == "---" { endID = i break } } if endID == 0 { return meta, content, fmt.Errorf("frontmatter not closed") } for i := 1; i < endID; i++ { line := strings.TrimSpace(lines[i]) if line == "" { continue } parts := strings.SplitN(line, ":", 2) if len(parts) != 2 { continue } k := strings.TrimSpace(parts[0]) v := strings.TrimSpace(parts[1]) if len(v) >= 2 && v[0] == '"' && v[len(v)-1] == '"' { v = v[1 : len(v)-1] } switch strings.ToLower(k) { case "title": meta.Title = v case "date": meta.Date = v case "excerpt": meta.Excerpt = v case "tags": if v != "" { tags := strings.Split(v, ",") for _, t := range tags { meta.Tags = append(meta.Tags, strings.TrimSpace(t)) } } case "draft": meta.Draft = strings.ToLower(v) == "true" } } body := strings.Join(lines[endID+1:], "\n") return meta, body, nil } func generateSlug(fn string) string { name := filepath.Base(fn) name = strings.TrimSuffix(name, filepath.Ext(name)) if len(name) > 11 && name[4] == '-' && name[7] == '-' && name[10] == '-' { name = name[11:] } return name } func countWords(c string) int { sc := bufio.NewScanner(strings.NewReader(c)) sc.Split(bufio.ScanWords) count := 0 for sc.Scan() { count++ } return count } func load() ([]models.Post, error) { if cacheValid && time.Since(postCacheTime) < 10*time.Minute { return postCache, nil } var posts []models.Post entries, err := postsFS.ReadDir("posts") if err != nil { return nil, fmt.Errorf("failed to read posts dir: %w", err) } for _, entry := range entries { if entry.IsDir() || !strings.HasSuffix(entry.Name(), ".md") { continue } c, err := postsFS.ReadFile(filepath.Join("posts", entry.Name())) if err != nil { continue } meta, body, err := parseFrontmatter(string(c)) if err != nil { meta = PostMeta{ Title: strings.TrimSuffix(entry.Name(), ".md"), Date: time.Now().Format("2006-01-02"), } body = string(c) } if meta.Draft { continue } published, err := time.Parse("2006-01-02", meta.Date) if err != nil { published = time.Now() } var htmlC strings.Builder if err := md.Convert([]byte(body), &htmlC); err != nil { continue } excerpt := meta.Excerpt if excerpt == "" { plain := stripHTML(htmlC.String()) words := strings.Fields(plain) if len(words) > 30 { excerpt = strings.Join(words[:30], " ") + "..." } else { excerpt = plain } } count := meta.WordCount if count == 0 { count = countWords(body) } slug := generateSlug(entry.Name()) post := models.Post{ ID: slug, Title: meta.Title, Slug: slug, Excerpt: excerpt, Content: htmlC.String(), Tags: meta.Tags, PublishedAt: published, WordCount: count, } posts = append(posts, post) } sort.Slice(posts, func(i, j int) bool { return posts[i].PublishedAt.After(posts[j].PublishedAt) }) postCache = posts postCacheTime = time.Now() cacheValid = true return posts, nil } func stripHTML(s string) string { in := false var sb strings.Builder for _, r := range s { if r == '<' { in = true continue } if r == '>' { in = false continue } if !in { sb.WriteRune(r) } } return sb.String() } func GetPaginatedPosts(page, perPage int) []models.Post { posts, err := load() if err != nil { return []models.Post{} } start := (page - 1) * perPage end := start + perPage if start >= len(posts) { return []models.Post{} } if end > len(posts) { end = len(posts) } return posts[start:end] } func GetTotalPages(per int) int { posts, err := load() if err != nil { return 1 } return (len(posts) + per - 1) / per } func GetPostBySlug(slug string) *models.Post { posts, err := load() if err != nil { return nil } for _, post := range posts { if post.Slug == slug { return &post } } return nil } func GetPostsByTag(tag string, page, per int) []models.Post { posts, err := load() if err != nil { return []models.Post{} } var filtered []models.Post for _, post := range posts { for _, t := range post.Tags { if t == tag { filtered = append(filtered, post) break } } } start := (page - 1) * per end := start + per if start >= len(filtered) { return []models.Post{} } if end > len(filtered) { end = len(filtered) } return filtered[start:end] } func GetTotalPagesByTag(tag string, per int) int { posts, err := load() if err != nil { return 1 } count := 0 for _, post := range posts { for _, t := range post.Tags { if t == tag { count++ break } } } return (count + per - 1) / per } func GetAllPosts() []models.Post { posts, err := load() if err != nil { return []models.Post{} } return posts } func GetAllTags() []string { posts := GetAllPosts() if len(posts) == 0 { return []string{} } set := make(map[string]bool) for _, post := range posts { for _, tag := range post.Tags { set[tag] = true } } var tags []string for tag := range set { tags = append(tags, tag) } sort.Strings(tags) return tags }