}
commits = &HashesIter{hashes}
} else {
- for i := 0; i < offset; i++ {
+ for range offset {
if _, err = repoLog.Next(); err != nil {
break
}
logEnded := false
entries := make([]TableMenuEntry, 0, PageEntries)
- for i := 0; i < PageEntries; i++ {
+ for range PageEntries {
var commit *object.Commit
commit, err = commits.Next()
if err != nil {
)),
})
}
- offsetPrev := offset - PageEntries
- if offsetPrev < 0 {
- offsetPrev = 0
- }
+ offsetPrev := max(offset-PageEntries, 0)
err = TmplGemMenu.Execute(os.Stdout, struct {
T *spreak.Localizer
Cfg *Cfg
}
var commits CommitIterNext
if topic == "" {
- for i := 0; i < offset; i++ {
+ for range offset {
if _, err = repoLog.Next(); err != nil {
break
}
logEnded := false
entries := make([]TableMenuEntry, 0, PageEntries)
- for i := 0; i < PageEntries; i++ {
+ for range PageEntries {
var commit *object.Commit
commit, err = commits.Next()
if err != nil {
)),
})
}
- offsetPrev := offset - PageEntries
- if offsetPrev < 0 {
- offsetPrev = 0
- }
+ offsetPrev := max(offset-PageEntries, 0)
err = TmplGopherMenu.Execute(os.Stdout, struct {
T *spreak.Localizer
Cfg *Cfg
return strings.Join(cols, " ")
}
-func lineURLizeInTemplate(urlPrefix, line interface{}) string {
+func lineURLizeInTemplate(urlPrefix, line any) string {
return lineURLize(urlPrefix.(string), line.(string))
}
out = &outBuf
var zstdWriter *zstd.Encoder
acceptEncoding := os.Getenv("HTTP_ACCEPT_ENCODING")
- for _, encoding := range strings.Split(acceptEncoding, ", ") {
+ for encoding := range strings.SplitSeq(acceptEncoding, ", ") {
if encoding == "zstd" {
zstdWriter, err = zstd.NewWriter(&outBuf, zstd.WithEncoderLevel(zstd.SpeedDefault))
if err != nil {
}
commits = &HashesIter{hashes}
} else {
- for i := 0; i < offset; i++ {
+ for range offset {
if _, err = repoLog.Next(); err != nil {
break
}
}
etagHash.Write([]byte("INDEX"))
etagHash.Write([]byte(topic))
- for i := 0; i < PageEntries; i++ {
+ for range PageEntries {
var commit *object.Commit
commit, err = commits.Next()
if err != nil {
entry.Topics = sgblog.ParseTopics(entry.TopicsRaw)
entries[i] = entry
}
- offsetPrev := offset - PageEntries
- if offsetPrev < 0 {
- offsetPrev = 0
- }
+ offsetPrev := max(offset-PageEntries, 0)
os.Stdout.Write([]byte(startHeader(etagHash, zstdWriter != nil)))
err = TmplHTMLIndex.Execute(out, struct {
T *spreak.Localizer
if err != nil {
makeErr(err, http.StatusInternalServerError)
}
- for i := 0; i < PageEntries; i++ {
+ for range PageEntries {
commit, err = repoLog.Next()
if err != nil {
break
commits = &HashesIter{hashes}
}
- for i := 0; i < PageEntries; i++ {
+ for range PageEntries {
commit, err = commits.Next()
if err != nil {
break
if err != nil {
makeErr(err, http.StatusInternalServerError)
}
- for i := 0; i < PageEntries; i++ {
+ for range PageEntries {
commit, err = repoLog.Next()
if err != nil {
break