URI: 
       Fix CreatePages - hugo - [fork] hugo port for 9front
  HTML git clone git@git.drkhsh.at/hugo.git
   DIR Log
   DIR Files
   DIR Refs
   DIR Submodules
   DIR README
   DIR LICENSE
       ---
   DIR commit 47c91a4ca2ac3db496b9d495757eda121b612bec
   DIR parent 93addfcbeec301e4f37b0f83bc5735f1939f1895
  HTML Author: Nate Finch <nate.finch@gmail.com>
       Date:   Fri, 29 Aug 2014 13:40:21 -0400
       
       Fix CreatePages
       
       This fixes #450.  There are two problems:
       
       1.) We're creating a new goroutine for every page.
       2.) We're calling s.Pages = append(s.Pages, page) inside each goroutine.
       
       1 is a problem if in that if you have a ton of pages, that's a ton of goroutines.  It's not really useful to have more than a few goroutines at a time, and lots can actually make your code much slower, and, evidently, crash.
       
       2 is a problem in that append is not thread safe. Sometimes it returns a new slice with a larger capacity, when the original slice isn't large enough.  This can cause problems if two goroutines do this at the same time.
       
       The solution for 1 is to use a limited number of workers (I chose 2*GOMAXPROCS as a nice guess).
       The solution for 2 is to serialize access to s.Pages, which I did by doing it in a single goroutine.
       
       Diffstat:
         M hugolib/site.go                     |     131 ++++++++++++++++++++++---------
       
       1 file changed, 94 insertions(+), 37 deletions(-)
       ---
   DIR diff --git a/hugolib/site.go b/hugolib/site.go
       @@ -19,6 +19,7 @@ import (
                "html/template"
                "io"
                "os"
       +        "strconv"
                "strings"
                "sync"
                "time"
       @@ -311,60 +312,107 @@ func (s *Site) checkDirectories() (err error) {
                return
        }
        
       -func (s *Site) CreatePages() (err error) {
       +type pageRenderResult struct {
       +        page *Page
       +        err  error
       +}
       +
       +func (s *Site) CreatePages() error {
                if s.Source == nil {
                        panic(fmt.Sprintf("s.Source not set %s", s.absContentDir()))
                }
                if len(s.Source.Files()) < 1 {
       -                return
       +                return nil
                }
        
       -        var wg sync.WaitGroup
       -        for _, fi := range s.Source.Files() {
       -                wg.Add(1)
       -                go func(file *source.File) (err error) {
       -                        defer wg.Done()
       +        files := s.Source.Files()
        
       -                        page, err := NewPage(file.LogicalName)
       -                        if err != nil {
       -                                return err
       -                        }
       -                        err = page.ReadFrom(file.Contents)
       -                        if err != nil {
       -                                return err
       -                        }
       -                        page.Site = &s.Info
       -                        page.Tmpl = s.Tmpl
       -                        page.Section = file.Section
       -                        page.Dir = file.Dir
       +        results := make(chan pageRenderResult)
       +        input := make(chan *source.File)
        
       -                        //Handling short codes prior to Conversion to HTML
       -                        page.ProcessShortcodes(s.Tmpl)
       +        procs := getGoMaxProcs()
        
       -                        err = page.Convert()
       -                        if err != nil {
       -                                return err
       -                        }
       +        wg := &sync.WaitGroup{}
        
       -                        if page.ShouldBuild() {
       -                                s.Pages = append(s.Pages, page)
       -                        }
       +        for i := 0; i < procs*2; i++ {
       +                wg.Add(1)
       +                go pageRenderer(s, input, results, wg)
       +        }
        
       -                        if page.IsDraft() {
       -                                s.draftCount += 1
       -                        }
       +        errs := make(chan error)
        
       -                        if page.IsFuture() {
       -                                s.futureCount += 1
       -                        }
       +        // we can only have exactly one result collator, since it makes changes that
       +        // must be synchronized.
       +        go resultCollator(s, results, errs)
        
       -                        return
       -                }(fi)
       +        for _, fi := range files {
       +                input <- fi
                }
        
       +        close(input)
       +
                wg.Wait()
       +
       +        close(results)
       +
       +        return <-errs
       +}
       +
       +func pageRenderer(s *Site, input <-chan *source.File, results chan<- pageRenderResult, wg *sync.WaitGroup) {
       +        for file := range input {
       +                page, err := NewPage(file.LogicalName)
       +                if err != nil {
       +                        results <- pageRenderResult{nil, err}
       +                        continue
       +                }
       +                err = page.ReadFrom(file.Contents)
       +                if err != nil {
       +                        results <- pageRenderResult{nil, err}
       +                        continue
       +                }
       +                page.Site = &s.Info
       +                page.Tmpl = s.Tmpl
       +                page.Section = file.Section
       +                page.Dir = file.Dir
       +
       +                //Handling short codes prior to Conversion to HTML
       +                page.ProcessShortcodes(s.Tmpl)
       +
       +                err = page.Convert()
       +                if err != nil {
       +                        results <- pageRenderResult{nil, err}
       +                        continue
       +                }
       +                results <- pageRenderResult{page, nil}
       +        }
       +        wg.Done()
       +}
       +
       +func resultCollator(s *Site, results <-chan pageRenderResult, errs chan<- error) {
       +        errMsgs := []string{}
       +        for r := range results {
       +                if r.err != nil {
       +                        errMsgs = append(errMsgs, r.err.Error())
       +                        continue
       +                }
       +
       +                if r.page.ShouldBuild() {
       +                        s.Pages = append(s.Pages, r.page)
       +                }
       +
       +                if r.page.IsDraft() {
       +                        s.draftCount += 1
       +                }
       +
       +                if r.page.IsFuture() {
       +                        s.futureCount += 1
       +                }
       +        }
                s.Pages.Sort()
       -        return
       +        if len(errMsgs) == 0 {
       +                errs <- nil
       +        }
       +        errs <- fmt.Errorf("Errors rendering pages: %s", strings.Join(errMsgs, "\n"))
        }
        
        func (s *Site) BuildSiteMeta() (err error) {
       @@ -1010,3 +1058,12 @@ func (s *Site) futureStats() string {
        
                return "0 of " + msg
        }
       +
       +func getGoMaxProcs() int {
       +        if gmp := os.Getenv("GOMAXPROCS"); gmp != "" {
       +                if p, err := strconv.Atoi(gmp); err != nil {
       +                        return p
       +                }
       +        }
       +        return 1
       +}