2015-07-28 12:21:09 +08:00
|
|
|
package markdown
|
|
|
|
|
|
|
|
import (
|
2015-07-30 07:41:52 +08:00
|
|
|
"bytes"
|
2015-07-28 12:21:09 +08:00
|
|
|
"io/ioutil"
|
2015-08-05 06:35:09 +08:00
|
|
|
"log"
|
2015-07-28 12:21:09 +08:00
|
|
|
"os"
|
|
|
|
"path/filepath"
|
|
|
|
"sort"
|
|
|
|
"strings"
|
|
|
|
"sync"
|
|
|
|
"time"
|
|
|
|
|
|
|
|
"github.com/russross/blackfriday"
|
|
|
|
)
|
|
|
|
|
|
|
|
const (
|
2015-07-29 19:15:02 +08:00
|
|
|
// Date format YYYY-MM-DD HH:MM:SS
|
2015-07-28 12:21:09 +08:00
|
|
|
timeLayout = `2006-01-02 15:04:05`
|
2015-07-29 19:15:02 +08:00
|
|
|
|
2015-07-30 07:41:52 +08:00
|
|
|
// Maximum length of page summary.
|
|
|
|
summaryLen = 500
|
2015-07-28 12:21:09 +08:00
|
|
|
)
|
|
|
|
|
2015-07-29 19:15:02 +08:00
|
|
|
// PageLink represents a statically generated markdown page.
|
2015-07-28 12:21:09 +08:00
|
|
|
type PageLink struct {
|
|
|
|
Title string
|
|
|
|
Summary string
|
|
|
|
Date time.Time
|
2015-07-30 07:41:52 +08:00
|
|
|
URL string
|
2015-07-28 12:21:09 +08:00
|
|
|
}
|
|
|
|
|
2015-07-30 01:06:53 +08:00
|
|
|
// byDate sorts PageLink by newest date to oldest.
|
|
|
|
type byDate []PageLink
|
2015-07-28 12:21:09 +08:00
|
|
|
|
2015-07-30 01:06:53 +08:00
|
|
|
func (p byDate) Len() int { return len(p) }
|
|
|
|
func (p byDate) Swap(i, j int) { p[i], p[j] = p[j], p[i] }
|
|
|
|
func (p byDate) Less(i, j int) bool { return p[i].Date.After(p[j].Date) }
|
2015-07-28 12:21:09 +08:00
|
|
|
|
2015-07-29 19:15:02 +08:00
|
|
|
type linkGen struct {
|
|
|
|
generating bool
|
|
|
|
waiters int
|
|
|
|
lastErr error
|
|
|
|
sync.RWMutex
|
|
|
|
sync.WaitGroup
|
|
|
|
}
|
|
|
|
|
|
|
|
func (l *linkGen) addWaiter() {
|
|
|
|
l.WaitGroup.Add(1)
|
|
|
|
l.waiters++
|
|
|
|
}
|
|
|
|
|
|
|
|
func (l *linkGen) discardWaiters() {
|
|
|
|
l.Lock()
|
|
|
|
defer l.Unlock()
|
|
|
|
for i := 0; i < l.waiters; i++ {
|
|
|
|
l.Done()
|
2015-07-28 12:21:09 +08:00
|
|
|
}
|
2015-07-29 19:15:02 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
func (l *linkGen) started() bool {
|
|
|
|
l.RLock()
|
|
|
|
defer l.RUnlock()
|
|
|
|
return l.generating
|
|
|
|
}
|
2015-07-28 12:21:09 +08:00
|
|
|
|
2015-08-05 16:55:04 +08:00
|
|
|
// generateLinks generate links to markdown files if there are file changes.
|
|
|
|
// It returns true when generation is done and false otherwise.
|
|
|
|
func (l *linkGen) generateLinks(md Markdown, cfg *Config) bool {
|
2015-07-29 19:15:02 +08:00
|
|
|
l.Lock()
|
|
|
|
l.generating = true
|
|
|
|
l.Unlock()
|
2015-07-28 12:21:09 +08:00
|
|
|
|
2015-08-02 06:09:10 +08:00
|
|
|
fp := filepath.Join(md.Root, cfg.PathScope) // path to scan for .md files
|
|
|
|
|
|
|
|
// If the file path to scan for Markdown files (fp) does
|
|
|
|
// not exist, there are no markdown files to scan for.
|
|
|
|
if _, err := os.Stat(fp); os.IsNotExist(err) {
|
|
|
|
l.Lock()
|
|
|
|
l.lastErr = err
|
2015-08-05 07:41:04 +08:00
|
|
|
l.generating = false
|
2015-08-02 06:09:10 +08:00
|
|
|
l.Unlock()
|
2015-08-05 16:55:04 +08:00
|
|
|
return false
|
2015-08-02 06:09:10 +08:00
|
|
|
}
|
2015-07-28 12:21:09 +08:00
|
|
|
|
2015-08-05 06:35:09 +08:00
|
|
|
hash, err := computeDirHash(md, *cfg)
|
|
|
|
|
|
|
|
// same hash, return.
|
|
|
|
if err == nil && hash == cfg.linksHash {
|
2015-08-05 07:41:04 +08:00
|
|
|
l.Lock()
|
|
|
|
l.generating = false
|
|
|
|
l.Unlock()
|
2015-08-05 16:55:04 +08:00
|
|
|
return false
|
2015-08-05 06:35:09 +08:00
|
|
|
} else if err != nil {
|
|
|
|
log.Println("Error:", err)
|
|
|
|
}
|
|
|
|
|
2015-07-28 12:21:09 +08:00
|
|
|
cfg.Links = []PageLink{}
|
2015-07-29 19:15:02 +08:00
|
|
|
|
|
|
|
cfg.Lock()
|
|
|
|
l.lastErr = filepath.Walk(fp, func(path string, info os.FileInfo, err error) error {
|
2015-07-28 12:21:09 +08:00
|
|
|
for _, ext := range cfg.Extensions {
|
|
|
|
if !info.IsDir() && strings.HasSuffix(info.Name(), ext) {
|
|
|
|
// Load the file
|
|
|
|
body, err := ioutil.ReadFile(path)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
|
|
|
// Get the relative path as if it were a HTTP request,
|
|
|
|
// then prepend with "/" (like a real HTTP request)
|
|
|
|
reqPath, err := filepath.Rel(md.Root, path)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
reqPath = "/" + reqPath
|
|
|
|
|
|
|
|
parser := findParser(body)
|
|
|
|
if parser == nil {
|
|
|
|
// no metadata, ignore.
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
summary, err := parser.Parse(body)
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2015-07-30 07:41:52 +08:00
|
|
|
// truncate summary to maximum length
|
2015-07-28 12:21:09 +08:00
|
|
|
if len(summary) > summaryLen {
|
|
|
|
summary = summary[:summaryLen]
|
2015-07-30 07:41:52 +08:00
|
|
|
|
|
|
|
// trim to nearest word
|
|
|
|
lastSpace := bytes.LastIndex(summary, []byte(" "))
|
|
|
|
if lastSpace != -1 {
|
|
|
|
summary = summary[:lastSpace]
|
|
|
|
}
|
2015-07-28 12:21:09 +08:00
|
|
|
}
|
|
|
|
|
|
|
|
metadata := parser.Metadata()
|
|
|
|
|
|
|
|
cfg.Links = append(cfg.Links, PageLink{
|
|
|
|
Title: metadata.Title,
|
2015-07-30 07:41:52 +08:00
|
|
|
URL: reqPath,
|
2015-07-28 12:21:09 +08:00
|
|
|
Date: metadata.Date,
|
2015-08-05 08:31:14 +08:00
|
|
|
Summary: string(blackfriday.Markdown(summary, SummaryRenderer{}, 0)),
|
2015-07-28 12:21:09 +08:00
|
|
|
})
|
|
|
|
|
|
|
|
break // don't try other file extensions
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return nil
|
|
|
|
})
|
|
|
|
|
2015-07-29 19:15:02 +08:00
|
|
|
// sort by newest date
|
2015-07-30 01:06:53 +08:00
|
|
|
sort.Sort(byDate(cfg.Links))
|
2015-08-05 06:35:09 +08:00
|
|
|
|
|
|
|
cfg.linksHash = hash
|
2015-07-29 19:15:02 +08:00
|
|
|
cfg.Unlock()
|
|
|
|
|
|
|
|
l.Lock()
|
|
|
|
l.generating = false
|
|
|
|
l.Unlock()
|
2015-08-05 16:55:04 +08:00
|
|
|
return true
|
2015-08-05 06:35:09 +08:00
|
|
|
}
|