diff --git a/help/en/feeds.myco b/help/en/feeds.myco new file mode 100644 index 0000000..204b2cf --- /dev/null +++ b/help/en/feeds.myco @@ -0,0 +1,39 @@ +# Help: Feeds +Mycorrhiza Wiki has RSS, Atom, and JSON feeds to track the latest changes on the wiki. +These feeds are linked on the [[/recent-changes | recent changes page]]. + +## Options +These feeds have options to combine related changes into groups: +* { + **period** Can be set to lengths of time like `5m`, `24h`, etc. + Edits by the same author that happen within this time of each other can be grouped into one item in the feed. +} +* { + **same** Can be set to `author`, `message`, or `none`. + Edits will only be grouped together if they have the same author or message. By default, edits need to have the same author and message. If it is `none`, all edits can be grouped. +} +* { + **order** Can be set to `old-to-now` (default) or `new-to-old`. + This determines what order edits in groups will be shown in in your feed. +} + +If none of these options are set, changes will never be grouped. + +## Examples +URLs for feeds using these options look like this: +* { + `/recent-changes-rss?period=1h` + Changes within one hour of each other with the same author and message will be grouped together. +} +* { + `/recent-changes-atom?period=1h&order=new-to-old` + Same as the last one, but the groups will be shown in the opposite order. +} +* { + `/recent-changes-atom?period=1h&same=none` + Changes within one hour of each other will be grouped together, even with different authors and messages. +} +* { + `/recent-changes-atom?same=author&same=message` + Changes with the same author and message will be grouped together no matter how much time passes between them. +} diff --git a/history/feed.go b/history/feed.go new file mode 100644 index 0000000..18b7001 --- /dev/null +++ b/history/feed.go @@ -0,0 +1,318 @@ +package history + +import ( + "errors" + "fmt" + "net/url" + "strings" + "time" + + "github.com/bouncepaw/mycorrhiza/cfg" + + "github.com/gorilla/feeds" +) + +const changeGroupMaxSize = 30 + +func recentChangesFeed(opts FeedOptions) *feeds.Feed { + feed := &feeds.Feed{ + Title: cfg.WikiName + " (recent changes)", + Link: &feeds.Link{Href: cfg.URL}, + Description: fmt.Sprintf("List of %d recent changes on the wiki", changeGroupMaxSize), + Updated: time.Now(), + } + revs := newRecentChangesStream() + groups := groupRevisions(revs, opts) + for _, grp := range groups { + item := grp.feedItem(opts) + feed.Add(&item) + } + return feed +} + +// RecentChangesRSS creates recent changes feed in RSS format. +func RecentChangesRSS(opts FeedOptions) (string, error) { + return recentChangesFeed(opts).ToRss() +} + +// RecentChangesAtom creates recent changes feed in Atom format. +func RecentChangesAtom(opts FeedOptions) (string, error) { + return recentChangesFeed(opts).ToAtom() +} + +// RecentChangesJSON creates recent changes feed in JSON format. +func RecentChangesJSON(opts FeedOptions) (string, error) { + return recentChangesFeed(opts).ToJSON() +} + +// revisionGroup is a slice of revisions, ordered most recent first. +type revisionGroup []Revision + +func newRevisionGroup(rev Revision) revisionGroup { + return revisionGroup([]Revision{rev}) +} + +func (grp *revisionGroup) addRevision(rev Revision) { + *grp = append(*grp, rev) +} + +// orderedIndex returns the ith revision in the group following the given order. +func (grp *revisionGroup) orderedIndex(i int, order feedGroupOrder) *Revision { + switch order { + case newToOld: + return &(*grp)[i] + case oldToNew: + return &(*grp)[len(*grp)-1-i] + } + // unreachable + return nil +} + +func groupRevisionsByMonth(revs []Revision) (res []revisionGroup) { + var ( + currentYear int + currentMonth time.Month + ) + for _, rev := range revs { + if rev.Time.Month() != currentMonth || rev.Time.Year() != currentYear { + currentYear = rev.Time.Year() + currentMonth = rev.Time.Month() + res = append(res, newRevisionGroup(rev)) + } else { + res[len(res)-1].addRevision(rev) + } + } + return res +} + +// groupRevisions groups revisions for a feed. +// It returns the first changeGroupMaxSize (30) groups. +// The grouping parameter determines when two revisions will be grouped. +func groupRevisions(revs recentChangesStream, opts FeedOptions) (res []revisionGroup) { + nextRev := revs.iterator() + rev, empty := nextRev() + if empty { + return res + } + + currGroup := newRevisionGroup(rev) + for rev, done := nextRev(); !done; rev, done = nextRev() { + if opts.canGroup(currGroup, rev) { + currGroup.addRevision(rev) + } else { + res = append(res, currGroup) + if len(res) == changeGroupMaxSize { + return res + } + currGroup = newRevisionGroup(rev) + } + } + // no more revisions, haven't added the last group yet + return append(res, currGroup) +} + +func (grp revisionGroup) feedItem(opts FeedOptions) feeds.Item { + title, author := grp.titleAndAuthor(opts.order) + return feeds.Item{ + Title: title, + Author: author, + Id: grp[len(grp)-1].Hash, + Description: grp.descriptionForFeed(opts.order), + Created: grp[len(grp)-1].Time, // earliest revision + Updated: grp[0].Time, // latest revision + Link: &feeds.Link{Href: cfg.URL + grp[0].bestLink()}, + } +} + +// titleAndAuthor creates a title and author for a feed item. +// If all messages and authors are the same (or there's just one rev), "message by author" +// If all authors are the same, "num edits (first message, ...) by author" +// Else (even if all messages are the same), "num edits (first message, ...)" +func (grp revisionGroup) titleAndAuthor(order feedGroupOrder) (title string, author *feeds.Author) { + allMessagesSame := true + allAuthorsSame := true + for _, rev := range grp[1:] { + if rev.Message != grp[0].Message { + allMessagesSame = false + } + if rev.Username != grp[0].Username { + allAuthorsSame = false + } + if !allMessagesSame && !allAuthorsSame { + break + } + } + + if allMessagesSame && allAuthorsSame { + title = grp[0].Message + } else { + title = fmt.Sprintf("%d edits (%s, ...)", len(grp), grp.orderedIndex(0, order).Message) + } + + if allAuthorsSame { + title += fmt.Sprintf(" by %s", grp[0].Username) + author = &feeds.Author{Name: grp[0].Username} + } else { + author = nil + } + + return title, author +} + +func (grp revisionGroup) descriptionForFeed(order feedGroupOrder) string { + builder := strings.Builder{} + for i := 0; i < len(grp); i++ { + desc := grp.orderedIndex(i, order).descriptionForFeed() + builder.WriteString(desc) + } + return builder.String() +} + +type feedOptionParserState struct { + isAnythingSet bool + conds []groupingCondition + order feedGroupOrder +} + +// feedGrouping represents a set of conditions that must all be satisfied for revisions to be grouped. +// If there are no conditions, revisions will never be grouped. +type FeedOptions struct { + conds []groupingCondition + order feedGroupOrder +} + +func ParseFeedOptions(query url.Values) (FeedOptions, error) { + parser := feedOptionParserState{} + + err := parser.parseFeedGroupingPeriod(query) + if err != nil { + return FeedOptions{}, err + } + err = parser.parseFeedGroupingSame(query) + if err != nil { + return FeedOptions{}, err + } + err = parser.parseFeedGroupingOrder(query) + if err != nil { + return FeedOptions{}, err + } + + var conds []groupingCondition + if parser.isAnythingSet { + conds = parser.conds + } else { + // if no options are applied, do no grouping instead of using the default options + conds = nil + } + return FeedOptions{conds: conds, order: parser.order}, nil +} + +func (parser *feedOptionParserState) parseFeedGroupingPeriod(query url.Values) error { + if query["period"] != nil { + parser.isAnythingSet = true + period, err := time.ParseDuration(query.Get("period")) + if err != nil { + return err + } + parser.conds = append(parser.conds, periodGroupingCondition{period}) + } + return nil +} + +func (parser *feedOptionParserState) parseFeedGroupingSame(query url.Values) error { + if same := query["same"]; same != nil { + parser.isAnythingSet = true + if len(same) == 1 && same[0] == "none" { + // same=none adds no condition + parser.conds = append(parser.conds, sameGroupingCondition{}) + return nil + } else { + // handle same=author, same=author&same=message, etc. + cond := sameGroupingCondition{} + for _, sameCond := range same { + switch sameCond { + case "author": + if cond.author { + return errors.New("set same=author twice") + } + cond.author = true + case "message": + if cond.message { + return errors.New("set same=message twice") + } + cond.message = true + default: + return errors.New("unknown same option " + sameCond) + } + } + parser.conds = append(parser.conds, cond) + return nil + } + } else { + // same defaults to both author and message + // but this won't be applied if no grouping options are set + parser.conds = append(parser.conds, sameGroupingCondition{author: true, message: true}) + return nil + } +} + +type feedGroupOrder int + +const ( + newToOld feedGroupOrder = iota + oldToNew feedGroupOrder = iota +) + +func (parser *feedOptionParserState) parseFeedGroupingOrder(query url.Values) error { + if order := query["order"]; order != nil { + parser.isAnythingSet = true + switch query.Get("order") { + case "old-to-new": + parser.order = oldToNew + case "new-to-old": + parser.order = newToOld + default: + return errors.New("unknown order option " + query.Get("order")) + } + } else { + parser.order = oldToNew + } + return nil +} + +// canGroup determines whether a revision can be added to a group. +func (opts FeedOptions) canGroup(grp revisionGroup, rev Revision) bool { + if len(opts.conds) == 0 { + return false + } + + for _, cond := range opts.conds { + if !cond.canGroup(grp, rev) { + return false + } + } + return true +} + +type groupingCondition interface { + canGroup(grp revisionGroup, rev Revision) bool +} + +// periodGroupingCondition will group two revisions if they are within period of each other. +type periodGroupingCondition struct { + period time.Duration +} + +func (cond periodGroupingCondition) canGroup(grp revisionGroup, rev Revision) bool { + return grp[len(grp)-1].Time.Sub(rev.Time) < cond.period +} + +type sameGroupingCondition struct { + author bool + message bool +} + +func (c sameGroupingCondition) canGroup(grp revisionGroup, rev Revision) bool { + return (!c.author || grp[0].Username == rev.Username) && + (!c.message || grp[0].Message == rev.Message) +} diff --git a/history/history.go b/history/history.go index 37ce429..559d9d5 100644 --- a/history/history.go +++ b/history/history.go @@ -4,14 +4,10 @@ package history import ( "bytes" "fmt" - "html" "log" "os/exec" "path/filepath" "regexp" - "strconv" - "strings" - "time" "github.com/bouncepaw/mycorrhiza/files" "github.com/bouncepaw/mycorrhiza/util" @@ -54,131 +50,6 @@ func InitGitRepo() { } } -// Revision represents a revision, duh. Hash is usually short. Username is extracted from email. -type Revision struct { - Hash string - Username string - Time time.Time - Message string - filesAffectedBuf []string - hyphaeAffectedBuf []string -} - -// filesAffected tells what files have been affected by the revision. -func (rev *Revision) filesAffected() (filenames []string) { - if nil != rev.filesAffectedBuf { - return rev.filesAffectedBuf - } - // List of files affected by this revision, one per line. - out, err := silentGitsh("diff-tree", "--no-commit-id", "--name-only", "-r", rev.Hash) - // There's an error? Well, whatever, let's just assign an empty slice, who cares. - if err != nil { - rev.filesAffectedBuf = []string{} - } else { - rev.filesAffectedBuf = strings.Split(out.String(), "\n") - } - return rev.filesAffectedBuf -} - -// determine what hyphae were affected by this revision -func (rev *Revision) hyphaeAffected() (hyphae []string) { - if nil != rev.hyphaeAffectedBuf { - return rev.hyphaeAffectedBuf - } - hyphae = make([]string, 0) - var ( - // set is used to determine if a certain hypha has been already noted (hyphae are stored in 2 files at most currently). - set = make(map[string]bool) - isNewName = func(hyphaName string) bool { - if _, present := set[hyphaName]; present { - return false - } - set[hyphaName] = true - return true - } - filesAffected = rev.filesAffected() - ) - for _, filename := range filesAffected { - if strings.IndexRune(filename, '.') >= 0 { - dotPos := strings.LastIndexByte(filename, '.') - hyphaName := string([]byte(filename)[0:dotPos]) // is it safe? - if isNewName(hyphaName) { - hyphae = append(hyphae, hyphaName) - } - } - } - rev.hyphaeAffectedBuf = hyphae - return hyphae -} - -// TimeString returns a human readable time representation. -func (rev Revision) TimeString() string { - return rev.Time.Format(time.RFC822) -} - -// HyphaeLinksHTML returns a comma-separated list of hyphae that were affected by this revision as HTML string. -func (rev Revision) HyphaeLinksHTML() (html string) { - hyphae := rev.hyphaeAffected() - for i, hyphaName := range hyphae { - if i > 0 { - html += `` - } - html += fmt.Sprintf(`%[1]s`, hyphaName) - } - return html -} - -// descriptionForFeed generates a good enough HTML contents for a web feed. -func (rev *Revision) descriptionForFeed() (htmlDesc string) { - return fmt.Sprintf( - `

%s

-

Hyphae affected: %s

-
%s
`, rev.Message, rev.HyphaeLinksHTML(), html.EscapeString(rev.textDiff())) -} - -// textDiff generates a good enough diff to display in a web feed. It is not html-escaped. -func (rev *Revision) textDiff() (diff string) { - filenames, ok := rev.mycoFiles() - if !ok { - return "No text changes" - } - for _, filename := range filenames { - text, err := PrimitiveDiffAtRevision(filename, rev.Hash) - if err != nil { - diff += "\nAn error has occurred with " + filename + "\n" - } - diff += text + "\n" - } - return diff -} - -// mycoFiles returns filenames of .myco file. It is not ok if there are no myco files. -func (rev *Revision) mycoFiles() (filenames []string, ok bool) { - filenames = []string{} - for _, filename := range rev.filesAffected() { - if strings.HasSuffix(filename, ".myco") { - filenames = append(filenames, filename) - } - } - return filenames, len(filenames) > 0 -} - -// Try and guess what link is the most important by looking at the message. -func (rev *Revision) bestLink() string { - var ( - revs = rev.hyphaeAffected() - renameRes = renameMsgPattern.FindStringSubmatch(rev.Message) - ) - switch { - case renameRes != nil: - return "/hypha/" + renameRes[1] - case len(revs) == 0: - return "" - default: - return "/hypha/" + revs[0] - } -} - // I pronounce it as [gɪt͡ʃ]. // gitsh is async-safe, therefore all other git-related functions in this module are too. func gitsh(args ...string) (out bytes.Buffer, err error) { @@ -204,16 +75,6 @@ func silentGitsh(args ...string) (out bytes.Buffer, err error) { return *bytes.NewBuffer(b), err } -// Convert a UNIX timestamp as string into a time. If nil is returned, it means that the timestamp could not be converted. -func unixTimestampAsTime(ts string) *time.Time { - i, err := strconv.ParseInt(ts, 10, 64) - if err != nil { - return nil - } - tm := time.Unix(i, 0) - return &tm -} - // Rename renames from `from` to `to` using `git mv`. func Rename(from, to string) error { log.Println(util.ShorterPath(from), util.ShorterPath(to)) diff --git a/history/information.go b/history/information.go deleted file mode 100644 index 8237c1c..0000000 --- a/history/information.go +++ /dev/null @@ -1,199 +0,0 @@ -package history - -// information.go -// Things related to gathering existing information. -import ( - "fmt" - "log" - "regexp" - "strconv" - "strings" - "time" - - "github.com/bouncepaw/mycorrhiza/cfg" - "github.com/bouncepaw/mycorrhiza/files" - - "github.com/gorilla/feeds" -) - -func recentChangesFeed() *feeds.Feed { - feed := &feeds.Feed{ - Title: "Recent changes", - Link: &feeds.Link{Href: cfg.URL}, - Description: "List of 30 recent changes on the wiki", - Author: &feeds.Author{Name: "Wikimind", Email: "wikimind@mycorrhiza"}, - Updated: time.Now(), - } - var ( - out, err = silentGitsh( - "log", "--oneline", "--no-merges", - "--pretty=format:\"%h\t%ae\t%at\t%s\"", - "--max-count=30", - ) - revs []Revision - ) - if err == nil { - for _, line := range strings.Split(out.String(), "\n") { - revs = append(revs, parseRevisionLine(line)) - } - } - log.Printf("Found %d recent changes", len(revs)) - for _, rev := range revs { - feed.Add(&feeds.Item{ - Title: rev.Message, - Author: &feeds.Author{Name: rev.Username}, - Id: rev.Hash, - Description: rev.descriptionForFeed(), - Created: rev.Time, - Updated: rev.Time, - Link: &feeds.Link{Href: cfg.URL + rev.bestLink()}, - }) - } - return feed -} - -// RecentChangesRSS creates recent changes feed in RSS format. -func RecentChangesRSS() (string, error) { - return recentChangesFeed().ToRss() -} - -// RecentChangesAtom creates recent changes feed in Atom format. -func RecentChangesAtom() (string, error) { - return recentChangesFeed().ToAtom() -} - -// RecentChangesJSON creates recent changes feed in JSON format. -func RecentChangesJSON() (string, error) { - return recentChangesFeed().ToJSON() -} - -// RecentChanges gathers an arbitrary number of latest changes in form of revisions slice. -func RecentChanges(n int) []Revision { - var ( - out, err = silentGitsh( - "log", "--oneline", "--no-merges", - "--pretty=format:\"%h\t%ae\t%at\t%s\"", - "--max-count="+strconv.Itoa(n), - ) - revs []Revision - ) - if err == nil { - for _, line := range strings.Split(out.String(), "\n") { - revs = append(revs, parseRevisionLine(line)) - } - } - log.Printf("Found %d recent changes", len(revs)) - return revs -} - -// FileChanged tells you if the file has been changed. -func FileChanged(path string) bool { - _, err := gitsh("diff", "--exit-code", path) - return err != nil -} - -// Revisions returns slice of revisions for the given hypha name. -func Revisions(hyphaName string) ([]Revision, error) { - var ( - out, err = silentGitsh( - "log", "--oneline", "--no-merges", - // Hash, author email, author time, commit msg separated by tab - "--pretty=format:\"%h\t%ae\t%at\t%s\"", - "--", hyphaName+".*", - ) - revs []Revision - ) - if err == nil { - for _, line := range strings.Split(out.String(), "\n") { - if line != "" { - revs = append(revs, parseRevisionLine(line)) - } - } - } - log.Printf("Found %d revisions for ‘%s’\n", len(revs), hyphaName) - return revs, err -} - -// WithRevisions returns an html representation of `revs` that is meant to be inserted in a history page. -func WithRevisions(hyphaName string, revs []Revision) (html string) { - var ( - currentYear int - currentMonth time.Month - ) - for i, rev := range revs { - if rev.Time.Month() != currentMonth || rev.Time.Year() != currentYear { - currentYear = rev.Time.Year() - currentMonth = rev.Time.Month() - if i != 0 { - html += ` - -` - } - html += fmt.Sprintf(` -
- -

%[3]s

-
-
  • `) -//line views/stuff.qtpl:181 +//line views/stuff.qtpl:182 qw422016.E().S(lc.GetWithLocale(lang, "help.configuration")) -//line views/stuff.qtpl:181 +//line views/stuff.qtpl:182 qw422016.N().S(` @@ -749,91 +758,91 @@ func streamhelpTopicsHTML(qw422016 *qt422016.Writer, lang string, lc *l18n.Local `) -//line views/stuff.qtpl:191 +//line views/stuff.qtpl:192 } -//line views/stuff.qtpl:191 +//line views/stuff.qtpl:192 func writehelpTopicsHTML(qq422016 qtio422016.Writer, lang string, lc *l18n.Localizer) { -//line views/stuff.qtpl:191 +//line views/stuff.qtpl:192 qw422016 := qt422016.AcquireWriter(qq422016) -//line views/stuff.qtpl:191 +//line views/stuff.qtpl:192 streamhelpTopicsHTML(qw422016, lang, lc) -//line views/stuff.qtpl:191 +//line views/stuff.qtpl:192 qt422016.ReleaseWriter(qw422016) -//line views/stuff.qtpl:191 +//line views/stuff.qtpl:192 } -//line views/stuff.qtpl:191 +//line views/stuff.qtpl:192 func helpTopicsHTML(lang string, lc *l18n.Localizer) string { -//line views/stuff.qtpl:191 +//line views/stuff.qtpl:192 qb422016 := qt422016.AcquireByteBuffer() -//line views/stuff.qtpl:191 +//line views/stuff.qtpl:192 writehelpTopicsHTML(qb422016, lang, lc) -//line views/stuff.qtpl:191 +//line views/stuff.qtpl:192 qs422016 := string(qb422016.B) -//line views/stuff.qtpl:191 +//line views/stuff.qtpl:192 qt422016.ReleaseByteBuffer(qb422016) -//line views/stuff.qtpl:191 +//line views/stuff.qtpl:192 return qs422016 -//line views/stuff.qtpl:191 +//line views/stuff.qtpl:192 } -//line views/stuff.qtpl:193 +//line views/stuff.qtpl:194 func streamhelpTopicBadgeHTML(qw422016 *qt422016.Writer, lang, topic string) { -//line views/stuff.qtpl:193 +//line views/stuff.qtpl:194 qw422016.N().S(` ? `) -//line views/stuff.qtpl:195 +//line views/stuff.qtpl:196 } -//line views/stuff.qtpl:195 +//line views/stuff.qtpl:196 func writehelpTopicBadgeHTML(qq422016 qtio422016.Writer, lang, topic string) { -//line views/stuff.qtpl:195 +//line views/stuff.qtpl:196 qw422016 := qt422016.AcquireWriter(qq422016) -//line views/stuff.qtpl:195 +//line views/stuff.qtpl:196 streamhelpTopicBadgeHTML(qw422016, lang, topic) -//line views/stuff.qtpl:195 +//line views/stuff.qtpl:196 qt422016.ReleaseWriter(qw422016) -//line views/stuff.qtpl:195 +//line views/stuff.qtpl:196 } -//line views/stuff.qtpl:195 +//line views/stuff.qtpl:196 func helpTopicBadgeHTML(lang, topic string) string { -//line views/stuff.qtpl:195 +//line views/stuff.qtpl:196 qb422016 := qt422016.AcquireByteBuffer() -//line views/stuff.qtpl:195 +//line views/stuff.qtpl:196 writehelpTopicBadgeHTML(qb422016, lang, topic) -//line views/stuff.qtpl:195 +//line views/stuff.qtpl:196 qs422016 := string(qb422016.B) -//line views/stuff.qtpl:195 +//line views/stuff.qtpl:196 qt422016.ReleaseByteBuffer(qb422016) -//line views/stuff.qtpl:195 +//line views/stuff.qtpl:196 return qs422016 -//line views/stuff.qtpl:195 +//line views/stuff.qtpl:196 } -//line views/stuff.qtpl:197 +//line views/stuff.qtpl:198 func StreamUserListHTML(qw422016 *qt422016.Writer, lc *l18n.Localizer) { -//line views/stuff.qtpl:197 +//line views/stuff.qtpl:198 qw422016.N().S(`

    `) -//line views/stuff.qtpl:200 +//line views/stuff.qtpl:201 qw422016.E().S(lc.Get("ui.users_heading")) -//line views/stuff.qtpl:200 +//line views/stuff.qtpl:201 qw422016.N().S(`

    `) -//line views/stuff.qtpl:202 +//line views/stuff.qtpl:203 var ( admins = make([]string, 0) moderators = make([]string, 0) @@ -853,149 +862,149 @@ func StreamUserListHTML(qw422016 *qt422016.Writer, lc *l18n.Localizer) { sort.Strings(moderators) sort.Strings(editors) -//line views/stuff.qtpl:220 +//line views/stuff.qtpl:221 qw422016.N().S(`

    `) -//line views/stuff.qtpl:222 +//line views/stuff.qtpl:223 qw422016.E().S(lc.Get("ui.users_admins")) -//line views/stuff.qtpl:222 +//line views/stuff.qtpl:223 qw422016.N().S(`

      `) -//line views/stuff.qtpl:223 +//line views/stuff.qtpl:224 for _, name := range admins { -//line views/stuff.qtpl:223 +//line views/stuff.qtpl:224 qw422016.N().S(`
    1. `) -//line views/stuff.qtpl:224 +//line views/stuff.qtpl:225 qw422016.E().S(name) -//line views/stuff.qtpl:224 +//line views/stuff.qtpl:225 qw422016.N().S(`
    2. `) -//line views/stuff.qtpl:225 +//line views/stuff.qtpl:226 } -//line views/stuff.qtpl:225 +//line views/stuff.qtpl:226 qw422016.N().S(`

    `) -//line views/stuff.qtpl:228 +//line views/stuff.qtpl:229 qw422016.E().S(lc.Get("ui.users_moderators")) -//line views/stuff.qtpl:228 +//line views/stuff.qtpl:229 qw422016.N().S(`

      `) -//line views/stuff.qtpl:229 +//line views/stuff.qtpl:230 for _, name := range moderators { -//line views/stuff.qtpl:229 +//line views/stuff.qtpl:230 qw422016.N().S(`
    1. `) -//line views/stuff.qtpl:230 +//line views/stuff.qtpl:231 qw422016.E().S(name) -//line views/stuff.qtpl:230 +//line views/stuff.qtpl:231 qw422016.N().S(`
    2. `) -//line views/stuff.qtpl:231 +//line views/stuff.qtpl:232 } -//line views/stuff.qtpl:231 +//line views/stuff.qtpl:232 qw422016.N().S(`

    `) -//line views/stuff.qtpl:234 +//line views/stuff.qtpl:235 qw422016.E().S(lc.Get("ui.users_editors")) -//line views/stuff.qtpl:234 +//line views/stuff.qtpl:235 qw422016.N().S(`

      `) -//line views/stuff.qtpl:235 +//line views/stuff.qtpl:236 for _, name := range editors { -//line views/stuff.qtpl:235 +//line views/stuff.qtpl:236 qw422016.N().S(`
    1. `) -//line views/stuff.qtpl:236 +//line views/stuff.qtpl:237 qw422016.E().S(name) -//line views/stuff.qtpl:236 +//line views/stuff.qtpl:237 qw422016.N().S(`
    2. `) -//line views/stuff.qtpl:237 +//line views/stuff.qtpl:238 } -//line views/stuff.qtpl:237 +//line views/stuff.qtpl:238 qw422016.N().S(`
    `) -//line views/stuff.qtpl:241 +//line views/stuff.qtpl:242 } -//line views/stuff.qtpl:241 +//line views/stuff.qtpl:242 func WriteUserListHTML(qq422016 qtio422016.Writer, lc *l18n.Localizer) { -//line views/stuff.qtpl:241 +//line views/stuff.qtpl:242 qw422016 := qt422016.AcquireWriter(qq422016) -//line views/stuff.qtpl:241 +//line views/stuff.qtpl:242 StreamUserListHTML(qw422016, lc) -//line views/stuff.qtpl:241 +//line views/stuff.qtpl:242 qt422016.ReleaseWriter(qw422016) -//line views/stuff.qtpl:241 +//line views/stuff.qtpl:242 } -//line views/stuff.qtpl:241 +//line views/stuff.qtpl:242 func UserListHTML(lc *l18n.Localizer) string { -//line views/stuff.qtpl:241 +//line views/stuff.qtpl:242 qb422016 := qt422016.AcquireByteBuffer() -//line views/stuff.qtpl:241 +//line views/stuff.qtpl:242 WriteUserListHTML(qb422016, lc) -//line views/stuff.qtpl:241 +//line views/stuff.qtpl:242 qs422016 := string(qb422016.B) -//line views/stuff.qtpl:241 +//line views/stuff.qtpl:242 qt422016.ReleaseByteBuffer(qb422016) -//line views/stuff.qtpl:241 +//line views/stuff.qtpl:242 return qs422016 -//line views/stuff.qtpl:241 +//line views/stuff.qtpl:242 } -//line views/stuff.qtpl:243 +//line views/stuff.qtpl:244 func StreamHyphaListHTML(qw422016 *qt422016.Writer, lc *l18n.Localizer) { -//line views/stuff.qtpl:243 +//line views/stuff.qtpl:244 qw422016.N().S(`

    `) -//line views/stuff.qtpl:246 +//line views/stuff.qtpl:247 qw422016.E().S(lc.Get("ui.list_heading")) -//line views/stuff.qtpl:246 +//line views/stuff.qtpl:247 qw422016.N().S(`

    `) -//line views/stuff.qtpl:247 +//line views/stuff.qtpl:248 qw422016.E().S(lc.GetPlural("ui.list_desc", hyphae.Count())) -//line views/stuff.qtpl:247 +//line views/stuff.qtpl:248 qw422016.N().S(`

      `) -//line views/stuff.qtpl:250 +//line views/stuff.qtpl:251 hyphaNames := make(chan string) sortedHypha := hyphae.PathographicSort(hyphaNames) for hypha := range hyphae.YieldExistingHyphae() { @@ -1003,252 +1012,252 @@ func StreamHyphaListHTML(qw422016 *qt422016.Writer, lc *l18n.Localizer) { } close(hyphaNames) -//line views/stuff.qtpl:256 +//line views/stuff.qtpl:257 qw422016.N().S(` `) -//line views/stuff.qtpl:257 +//line views/stuff.qtpl:258 for hyphaName := range sortedHypha { -//line views/stuff.qtpl:257 +//line views/stuff.qtpl:258 qw422016.N().S(` `) -//line views/stuff.qtpl:258 +//line views/stuff.qtpl:259 hypha := hyphae.ByName(hyphaName) -//line views/stuff.qtpl:258 +//line views/stuff.qtpl:259 qw422016.N().S(`
    • `) -//line views/stuff.qtpl:260 +//line views/stuff.qtpl:261 qw422016.E().S(util.BeautifulName(hypha.Name)) -//line views/stuff.qtpl:260 +//line views/stuff.qtpl:261 qw422016.N().S(` `) -//line views/stuff.qtpl:261 +//line views/stuff.qtpl:262 if hypha.BinaryPath != "" { -//line views/stuff.qtpl:261 +//line views/stuff.qtpl:262 qw422016.N().S(` `) -//line views/stuff.qtpl:262 +//line views/stuff.qtpl:263 qw422016.E().S(filepath.Ext(hypha.BinaryPath)[1:]) -//line views/stuff.qtpl:262 +//line views/stuff.qtpl:263 qw422016.N().S(` `) -//line views/stuff.qtpl:263 +//line views/stuff.qtpl:264 } -//line views/stuff.qtpl:263 +//line views/stuff.qtpl:264 qw422016.N().S(`
    • `) -//line views/stuff.qtpl:265 +//line views/stuff.qtpl:266 } -//line views/stuff.qtpl:265 +//line views/stuff.qtpl:266 qw422016.N().S(`
    `) -//line views/stuff.qtpl:269 +//line views/stuff.qtpl:270 } -//line views/stuff.qtpl:269 +//line views/stuff.qtpl:270 func WriteHyphaListHTML(qq422016 qtio422016.Writer, lc *l18n.Localizer) { -//line views/stuff.qtpl:269 +//line views/stuff.qtpl:270 qw422016 := qt422016.AcquireWriter(qq422016) -//line views/stuff.qtpl:269 +//line views/stuff.qtpl:270 StreamHyphaListHTML(qw422016, lc) -//line views/stuff.qtpl:269 +//line views/stuff.qtpl:270 qt422016.ReleaseWriter(qw422016) -//line views/stuff.qtpl:269 +//line views/stuff.qtpl:270 } -//line views/stuff.qtpl:269 +//line views/stuff.qtpl:270 func HyphaListHTML(lc *l18n.Localizer) string { -//line views/stuff.qtpl:269 +//line views/stuff.qtpl:270 qb422016 := qt422016.AcquireByteBuffer() -//line views/stuff.qtpl:269 +//line views/stuff.qtpl:270 WriteHyphaListHTML(qb422016, lc) -//line views/stuff.qtpl:269 +//line views/stuff.qtpl:270 qs422016 := string(qb422016.B) -//line views/stuff.qtpl:269 +//line views/stuff.qtpl:270 qt422016.ReleaseByteBuffer(qb422016) -//line views/stuff.qtpl:269 +//line views/stuff.qtpl:270 return qs422016 -//line views/stuff.qtpl:269 +//line views/stuff.qtpl:270 } -//line views/stuff.qtpl:271 +//line views/stuff.qtpl:272 func StreamAboutHTML(qw422016 *qt422016.Writer, lc *l18n.Localizer) { -//line views/stuff.qtpl:271 +//line views/stuff.qtpl:272 qw422016.N().S(`

    `) -//line views/stuff.qtpl:275 +//line views/stuff.qtpl:276 qw422016.E().S(lc.Get("ui.about_title", &l18n.Replacements{"name": cfg.WikiName})) -//line views/stuff.qtpl:275 +//line views/stuff.qtpl:276 qw422016.N().S(`

    • `) -//line views/stuff.qtpl:277 +//line views/stuff.qtpl:278 qw422016.N().S(lc.Get("ui.about_version", &l18n.Replacements{"pre": "", "post": ""})) -//line views/stuff.qtpl:277 +//line views/stuff.qtpl:278 qw422016.N().S(` 1.5.0
    • `) -//line views/stuff.qtpl:278 +//line views/stuff.qtpl:279 if cfg.UseAuth { -//line views/stuff.qtpl:278 +//line views/stuff.qtpl:279 qw422016.N().S(`
    • `) -//line views/stuff.qtpl:279 +//line views/stuff.qtpl:280 qw422016.E().S(lc.Get("ui.about_usercount")) -//line views/stuff.qtpl:279 +//line views/stuff.qtpl:280 qw422016.N().S(` `) -//line views/stuff.qtpl:279 +//line views/stuff.qtpl:280 qw422016.N().DUL(user.Count()) -//line views/stuff.qtpl:279 +//line views/stuff.qtpl:280 qw422016.N().S(`
    • `) -//line views/stuff.qtpl:280 +//line views/stuff.qtpl:281 qw422016.E().S(lc.Get("ui.about_homepage")) -//line views/stuff.qtpl:280 +//line views/stuff.qtpl:281 qw422016.N().S(` `) -//line views/stuff.qtpl:280 +//line views/stuff.qtpl:281 qw422016.E().S(cfg.HomeHypha) -//line views/stuff.qtpl:280 +//line views/stuff.qtpl:281 qw422016.N().S(`
    • `) -//line views/stuff.qtpl:281 +//line views/stuff.qtpl:282 qw422016.E().S(lc.Get("ui.about_admins")) -//line views/stuff.qtpl:281 +//line views/stuff.qtpl:282 qw422016.N().S(``) -//line views/stuff.qtpl:281 +//line views/stuff.qtpl:282 for i, username := range user.ListUsersWithGroup("admin") { -//line views/stuff.qtpl:282 +//line views/stuff.qtpl:283 if i > 0 { -//line views/stuff.qtpl:282 +//line views/stuff.qtpl:283 qw422016.N().S(` `) -//line views/stuff.qtpl:283 +//line views/stuff.qtpl:284 } -//line views/stuff.qtpl:283 +//line views/stuff.qtpl:284 qw422016.N().S(` `) -//line views/stuff.qtpl:284 +//line views/stuff.qtpl:285 qw422016.E().S(username) -//line views/stuff.qtpl:284 +//line views/stuff.qtpl:285 qw422016.N().S(``) -//line views/stuff.qtpl:284 +//line views/stuff.qtpl:285 } -//line views/stuff.qtpl:284 +//line views/stuff.qtpl:285 qw422016.N().S(`
    • `) -//line views/stuff.qtpl:285 +//line views/stuff.qtpl:286 } else { -//line views/stuff.qtpl:285 +//line views/stuff.qtpl:286 qw422016.N().S(`
    • `) -//line views/stuff.qtpl:286 +//line views/stuff.qtpl:287 qw422016.E().S(lc.Get("ui.about_noauth")) -//line views/stuff.qtpl:286 +//line views/stuff.qtpl:287 qw422016.N().S(`
    • `) -//line views/stuff.qtpl:287 +//line views/stuff.qtpl:288 } -//line views/stuff.qtpl:287 +//line views/stuff.qtpl:288 qw422016.N().S(`

    `) -//line views/stuff.qtpl:289 +//line views/stuff.qtpl:290 qw422016.N().S(lc.Get("ui.about_hyphae", &l18n.Replacements{"link": "/list"})) -//line views/stuff.qtpl:289 +//line views/stuff.qtpl:290 qw422016.N().S(`

    `) -//line views/stuff.qtpl:293 +//line views/stuff.qtpl:294 } -//line views/stuff.qtpl:293 +//line views/stuff.qtpl:294 func WriteAboutHTML(qq422016 qtio422016.Writer, lc *l18n.Localizer) { -//line views/stuff.qtpl:293 +//line views/stuff.qtpl:294 qw422016 := qt422016.AcquireWriter(qq422016) -//line views/stuff.qtpl:293 +//line views/stuff.qtpl:294 StreamAboutHTML(qw422016, lc) -//line views/stuff.qtpl:293 +//line views/stuff.qtpl:294 qt422016.ReleaseWriter(qw422016) -//line views/stuff.qtpl:293 +//line views/stuff.qtpl:294 } -//line views/stuff.qtpl:293 +//line views/stuff.qtpl:294 func AboutHTML(lc *l18n.Localizer) string { -//line views/stuff.qtpl:293 +//line views/stuff.qtpl:294 qb422016 := qt422016.AcquireByteBuffer() -//line views/stuff.qtpl:293 +//line views/stuff.qtpl:294 WriteAboutHTML(qb422016, lc) -//line views/stuff.qtpl:293 +//line views/stuff.qtpl:294 qs422016 := string(qb422016.B) -//line views/stuff.qtpl:293 +//line views/stuff.qtpl:294 qt422016.ReleaseByteBuffer(qb422016) -//line views/stuff.qtpl:293 +//line views/stuff.qtpl:294 return qs422016 -//line views/stuff.qtpl:293 +//line views/stuff.qtpl:294 } -//line views/stuff.qtpl:295 +//line views/stuff.qtpl:296 func StreamCommonScripts(qw422016 *qt422016.Writer) { -//line views/stuff.qtpl:295 +//line views/stuff.qtpl:296 qw422016.N().S(` `) -//line views/stuff.qtpl:296 +//line views/stuff.qtpl:297 for _, scriptPath := range cfg.CommonScripts { -//line views/stuff.qtpl:296 +//line views/stuff.qtpl:297 qw422016.N().S(` `) -//line views/stuff.qtpl:298 +//line views/stuff.qtpl:299 } -//line views/stuff.qtpl:298 +//line views/stuff.qtpl:299 qw422016.N().S(` `) -//line views/stuff.qtpl:299 +//line views/stuff.qtpl:300 } -//line views/stuff.qtpl:299 +//line views/stuff.qtpl:300 func WriteCommonScripts(qq422016 qtio422016.Writer) { -//line views/stuff.qtpl:299 +//line views/stuff.qtpl:300 qw422016 := qt422016.AcquireWriter(qq422016) -//line views/stuff.qtpl:299 +//line views/stuff.qtpl:300 StreamCommonScripts(qw422016) -//line views/stuff.qtpl:299 +//line views/stuff.qtpl:300 qt422016.ReleaseWriter(qw422016) -//line views/stuff.qtpl:299 +//line views/stuff.qtpl:300 } -//line views/stuff.qtpl:299 +//line views/stuff.qtpl:300 func CommonScripts() string { -//line views/stuff.qtpl:299 +//line views/stuff.qtpl:300 qb422016 := qt422016.AcquireByteBuffer() -//line views/stuff.qtpl:299 +//line views/stuff.qtpl:300 WriteCommonScripts(qb422016) -//line views/stuff.qtpl:299 +//line views/stuff.qtpl:300 qs422016 := string(qb422016.B) -//line views/stuff.qtpl:299 +//line views/stuff.qtpl:300 qt422016.ReleaseByteBuffer(qb422016) -//line views/stuff.qtpl:299 +//line views/stuff.qtpl:300 return qs422016 -//line views/stuff.qtpl:299 +//line views/stuff.qtpl:300 } diff --git a/web/history.go b/web/history.go index 595d19f..9c84b28 100644 --- a/web/history.go +++ b/web/history.go @@ -61,8 +61,14 @@ func handlerRecentChanges(w http.ResponseWriter, rq *http.Request) { } // genericHandlerOfFeeds is a helper function for the web feed handlers. -func genericHandlerOfFeeds(w http.ResponseWriter, rq *http.Request, f func() (string, error), name string, contentType string) { - if content, err := f(); err != nil { +func genericHandlerOfFeeds(w http.ResponseWriter, rq *http.Request, f func(history.FeedOptions) (string, error), name string, contentType string) { + opts, err := history.ParseFeedOptions(rq.URL.Query()) + var content string + if err == nil { + content, err = f(opts) + } + + if err != nil { w.Header().Set("Content-Type", "text/plain;charset=utf-8") w.WriteHeader(http.StatusInternalServerError) fmt.Fprint(w, "An error while generating "+name+": "+err.Error())