implement homepage caching with periodic refresh
This commit is contained in:
parent
7f478c9e3c
commit
65e369deea
86
api.go
86
api.go
@ -14,10 +14,48 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
type Repo struct {
|
type Repo struct {
|
||||||
Name string `json:"name"`
|
Name string `json:"name"`
|
||||||
Description string `json:"description"`
|
Description string `json:"description"`
|
||||||
CreatedAt time.Time `json:"created_at"`
|
GiteaCreated time.Time `json:"created_at"`
|
||||||
UpdatedAt time.Time `json:"updated_at"`
|
GiteaUpdated time.Time `json:"updated_at"`
|
||||||
|
|
||||||
|
oldestCommit time.Time
|
||||||
|
newestCommit time.Time
|
||||||
|
topics []string
|
||||||
|
}
|
||||||
|
|
||||||
|
func (this *Application) populateCommitInfo(ctx context.Context, rr *Repo) {
|
||||||
|
|
||||||
|
// The most recent commit will be the head of one of the branches (easy to find)
|
||||||
|
|
||||||
|
brs, err := this.branches(ctx, rr.Name)
|
||||||
|
if err != nil {
|
||||||
|
log.Printf("loading branches for '%s': %s", rr.Name, err)
|
||||||
|
rr.newestCommit = rr.GiteaUpdated // best guess
|
||||||
|
|
||||||
|
} else {
|
||||||
|
|
||||||
|
newestCommit := time.Unix(0, 0) // sentinel
|
||||||
|
for _, br := range brs {
|
||||||
|
if br.Commit.Timestamp.After(newestCommit) {
|
||||||
|
newestCommit = br.Commit.Timestamp
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if !newestCommit.Equal(time.Unix(0, 0)) {
|
||||||
|
rr.newestCommit = newestCommit // replace it
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// The oldest commit needs us to page through the commit history to find it
|
||||||
|
|
||||||
|
oldestCommit, err := this.oldestCommit(ctx, rr.Name, "")
|
||||||
|
if err != nil {
|
||||||
|
log.Printf("finding oldest commit for '%s': %s", rr.Name, err)
|
||||||
|
rr.oldestCommit = rr.GiteaCreated // best guess
|
||||||
|
|
||||||
|
} else {
|
||||||
|
rr.oldestCommit = oldestCommit.Commit.Author.Date
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
type ContentsResponse struct {
|
type ContentsResponse struct {
|
||||||
@ -197,46 +235,6 @@ func (this *Application) repos(ctx context.Context) ([]Repo, error) {
|
|||||||
nextPage += 1
|
nextPage += 1
|
||||||
}
|
}
|
||||||
|
|
||||||
// The Created/Modified times aren't very good
|
|
||||||
// Replace them with the earliest/latest commit dates we can find
|
|
||||||
|
|
||||||
for i, rr := range ret {
|
|
||||||
|
|
||||||
// The most recent commit will be the head of one of the branches (easy to find)
|
|
||||||
|
|
||||||
brs, err := this.branches(ctx, rr.Name)
|
|
||||||
if err != nil {
|
|
||||||
log.Printf("loading branches for '%s': %s", rr.Name, err)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
newestCommit := time.Unix(0, 0) // sentinel
|
|
||||||
for _, br := range brs {
|
|
||||||
if br.Commit.Timestamp.After(newestCommit) {
|
|
||||||
newestCommit = br.Commit.Timestamp
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if !newestCommit.Equal(time.Unix(0, 0)) {
|
|
||||||
ret[i].UpdatedAt = newestCommit // replace it
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Separate loop for oldest-commits, in case we needed to continue/break out
|
|
||||||
// of the earliest-commit loop
|
|
||||||
|
|
||||||
for i, rr := range ret {
|
|
||||||
|
|
||||||
// The oldest commit needs us to page through the commit history to find it
|
|
||||||
|
|
||||||
oldestCommit, err := this.oldestCommit(ctx, rr.Name, "")
|
|
||||||
if err != nil {
|
|
||||||
log.Printf("finding oldest commit for '%s': %s", rr.Name, err)
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
ret[i].CreatedAt = oldestCommit.Commit.Author.Date
|
|
||||||
}
|
|
||||||
|
|
||||||
return ret, nil
|
return ret, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
9
main.go
9
main.go
@ -1,11 +1,13 @@
|
|||||||
package main
|
package main
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"context"
|
||||||
"flag"
|
"flag"
|
||||||
"log"
|
"log"
|
||||||
"net/http"
|
"net/http"
|
||||||
"regexp"
|
"regexp"
|
||||||
"strings"
|
"strings"
|
||||||
|
"sync"
|
||||||
|
|
||||||
"github.com/BurntSushi/toml"
|
"github.com/BurntSushi/toml"
|
||||||
"golang.org/x/sync/semaphore"
|
"golang.org/x/sync/semaphore"
|
||||||
@ -30,6 +32,10 @@ type Application struct {
|
|||||||
|
|
||||||
rxRepoPage, rxRepoImage *regexp.Regexp
|
rxRepoPage, rxRepoImage *regexp.Regexp
|
||||||
apiSem *semaphore.Weighted
|
apiSem *semaphore.Weighted
|
||||||
|
|
||||||
|
reposMut sync.RWMutex
|
||||||
|
reposCache []Repo // Sorted by recently-created-first
|
||||||
|
reposAlphabeticalOrder map[string]int
|
||||||
}
|
}
|
||||||
|
|
||||||
func main() {
|
func main() {
|
||||||
@ -58,6 +64,9 @@ func main() {
|
|||||||
app.apiSem = semaphore.NewWeighted(app.cfg.Gitea.MaxConnections)
|
app.apiSem = semaphore.NewWeighted(app.cfg.Gitea.MaxConnections)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Sync worker
|
||||||
|
go app.syncWorker(context.Background())
|
||||||
|
|
||||||
log.Printf("Starting web server on [%s]...", app.cfg.BindTo)
|
log.Printf("Starting web server on [%s]...", app.cfg.BindTo)
|
||||||
log.Fatal(http.ListenAndServe(app.cfg.BindTo, &app))
|
log.Fatal(http.ListenAndServe(app.cfg.BindTo, &app))
|
||||||
}
|
}
|
||||||
|
60
pages.go
60
pages.go
@ -7,7 +7,6 @@ import (
|
|||||||
"log"
|
"log"
|
||||||
"net/http"
|
"net/http"
|
||||||
"net/url"
|
"net/url"
|
||||||
"sort"
|
|
||||||
"strings"
|
"strings"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -52,36 +51,27 @@ func (this *Application) internalError(w http.ResponseWriter, r *http.Request, e
|
|||||||
http.Error(w, "An internal error occurred.", 500)
|
http.Error(w, "An internal error occurred.", 500)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (this *Application) Homepage(w http.ResponseWriter, r *http.Request) {
|
func (this *Application) Delay(w http.ResponseWriter, r *http.Request) {
|
||||||
ctx := r.Context()
|
this.Templatepage(w, r, "Loading...", "", func() {
|
||||||
|
fmt.Fprintf(w, `
|
||||||
|
<h2>Loading, please wait...</h2>
|
||||||
|
|
||||||
repos, err := this.repos(ctx)
|
<meta http-equiv="refresh" content="5">
|
||||||
if err != nil {
|
`)
|
||||||
this.internalError(w, r, fmt.Errorf("listing repos: %w", err))
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func (this *Application) Homepage(w http.ResponseWriter, r *http.Request) {
|
||||||
|
|
||||||
|
this.reposMut.RLock()
|
||||||
|
defer this.reposMut.RUnlock()
|
||||||
|
|
||||||
|
if len(this.reposCache) == 0 {
|
||||||
|
// We haven't loaded the repositories from Gitea yet
|
||||||
|
this.Delay(w, r)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
topics := make(map[string][]string)
|
|
||||||
for _, repo := range repos {
|
|
||||||
if t, err := this.topicsForRepo(ctx, repo.Name); err == nil {
|
|
||||||
topics[repo.Name] = t
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Sort repos once alphabetically, to get alphabetical indexes...
|
|
||||||
sort.Slice(repos, func(i, j int) bool {
|
|
||||||
return repos[i].Name < repos[j].Name
|
|
||||||
})
|
|
||||||
alphabeticalOrderIndexes := make(map[string]int, len(repos))
|
|
||||||
for idx, repo := range repos {
|
|
||||||
alphabeticalOrderIndexes[repo.Name] = idx
|
|
||||||
}
|
|
||||||
|
|
||||||
// But then make sure the final sort is by most-recently-created
|
|
||||||
sort.Slice(repos, func(i, j int) bool {
|
|
||||||
return repos[i].CreatedAt.After(repos[j].CreatedAt)
|
|
||||||
})
|
|
||||||
|
|
||||||
// Ready for template
|
// Ready for template
|
||||||
|
|
||||||
this.Templatepage(w, r, "", "", func() {
|
this.Templatepage(w, r, "", "", func() {
|
||||||
@ -94,10 +84,10 @@ func (this *Application) Homepage(w http.ResponseWriter, r *http.Request) {
|
|||||||
<option value="data-sort-mt">Recent updates</option>
|
<option value="data-sort-mt">Recent updates</option>
|
||||||
</select>
|
</select>
|
||||||
|
|
||||||
<h2>Projects <small>(`+fmt.Sprintf("%d", len(repos))+`)</small></h2>
|
<h2>Projects <small>(`+fmt.Sprintf("%d", len(this.reposCache))+`)</small></h2>
|
||||||
<table id="projtable-main" class="projtable">
|
<table id="projtable-main" class="projtable">
|
||||||
`)
|
`)
|
||||||
for _, repo := range repos {
|
for _, repo := range this.reposCache {
|
||||||
pageHref := html.EscapeString(`/` + url.PathEscape(repo.Name))
|
pageHref := html.EscapeString(`/` + url.PathEscape(repo.Name))
|
||||||
|
|
||||||
normalisedDesc := repo.Description
|
normalisedDesc := repo.Description
|
||||||
@ -116,17 +106,17 @@ func (this *Application) Homepage(w http.ResponseWriter, r *http.Request) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
rowClass := ""
|
rowClass := ""
|
||||||
for _, topic := range topics[repo.Name] {
|
for _, topic := range repo.topics {
|
||||||
rowClass += `taggedWith-` + topic + ` `
|
rowClass += `taggedWith-` + topic + ` `
|
||||||
}
|
}
|
||||||
|
|
||||||
fmt.Fprint(w, `
|
fmt.Fprint(w, `
|
||||||
<tr
|
<tr
|
||||||
class="`+html.EscapeString(rowClass)+`"
|
class="`+html.EscapeString(rowClass)+`"
|
||||||
data-sort-al="`+fmt.Sprintf("-%d", alphabeticalOrderIndexes[repo.Name])+`"
|
data-sort-al="`+fmt.Sprintf("-%d", this.reposAlphabeticalOrder[repo.Name])+`"
|
||||||
data-sort-ls="`+fmt.Sprintf("%.0f", repo.UpdatedAt.Sub(repo.CreatedAt).Seconds())+`"
|
data-sort-ls="`+fmt.Sprintf("%.0f", repo.newestCommit.Sub(repo.oldestCommit).Seconds())+`"
|
||||||
data-sort-ct="`+fmt.Sprintf("%d", repo.CreatedAt.Unix())+`"
|
data-sort-ct="`+fmt.Sprintf("%d", repo.oldestCommit.Unix())+`"
|
||||||
data-sort-mt="`+fmt.Sprintf("%d", repo.UpdatedAt.Unix())+`"
|
data-sort-mt="`+fmt.Sprintf("%d", repo.newestCommit.Unix())+`"
|
||||||
>
|
>
|
||||||
<td>
|
<td>
|
||||||
<a href="`+pageHref+`"><img class="homeimage" loading="lazy" src="`+html.EscapeString(`/:banner/`+url.PathEscape(repo.Name))+`"></div></a>
|
<a href="`+pageHref+`"><img class="homeimage" loading="lazy" src="`+html.EscapeString(`/:banner/`+url.PathEscape(repo.Name))+`"></div></a>
|
||||||
@ -137,7 +127,7 @@ func (this *Application) Homepage(w http.ResponseWriter, r *http.Request) {
|
|||||||
<br>
|
<br>
|
||||||
<small>
|
<small>
|
||||||
`)
|
`)
|
||||||
for _, topic := range topics[repo.Name] {
|
for _, topic := range repo.topics {
|
||||||
fmt.Fprint(w, `<a class="tag tag-link" data-tag="`+html.EscapeString(topic)+`">`+html.EscapeString(topic)+`</a> `)
|
fmt.Fprint(w, `<a class="tag tag-link" data-tag="`+html.EscapeString(topic)+`">`+html.EscapeString(topic)+`</a> `)
|
||||||
}
|
}
|
||||||
fmt.Fprint(w, `
|
fmt.Fprint(w, `
|
||||||
|
110
sync.go
Normal file
110
sync.go
Normal file
@ -0,0 +1,110 @@
|
|||||||
|
package main
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"log"
|
||||||
|
"sort"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
func (this *Application) sync(ctx context.Context) (bool, error) {
|
||||||
|
|
||||||
|
// List repositories on Gitea
|
||||||
|
repos, err := this.repos(ctx)
|
||||||
|
if err != nil {
|
||||||
|
return false, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Compare this list of repositories to our existing one
|
||||||
|
// If the repository is new, or if it's update-time has changed since we last
|
||||||
|
// saw it, then re-refresh its real git commit timestamps
|
||||||
|
// Otherwise copy them from the previous version
|
||||||
|
|
||||||
|
this.reposMut.RLock() // readonly
|
||||||
|
|
||||||
|
anyChanges := false
|
||||||
|
if len(repos) != len(this.reposCache) {
|
||||||
|
anyChanges = true
|
||||||
|
}
|
||||||
|
|
||||||
|
for i, rr := range repos {
|
||||||
|
if idx, ok := this.reposAlphabeticalOrder[rr.Name]; ok && this.reposCache[idx].GiteaUpdated == rr.GiteaUpdated {
|
||||||
|
// Already exists in cache with same Gitea update time
|
||||||
|
// Copy timestamps
|
||||||
|
repos[i] = this.reposCache[idx]
|
||||||
|
|
||||||
|
} else {
|
||||||
|
|
||||||
|
// New repo, or Gitea has updated timestamp
|
||||||
|
anyChanges = true
|
||||||
|
|
||||||
|
// Refresh timestamps
|
||||||
|
this.populateCommitInfo(ctx, &rr)
|
||||||
|
|
||||||
|
// Refresh topics
|
||||||
|
if t, err := this.topicsForRepo(ctx, rr.Name); err == nil {
|
||||||
|
rr.topics = t
|
||||||
|
}
|
||||||
|
|
||||||
|
// Save
|
||||||
|
repos[i] = rr
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
this.reposMut.RUnlock()
|
||||||
|
|
||||||
|
//
|
||||||
|
if !anyChanges {
|
||||||
|
return false, nil // nothing to do
|
||||||
|
}
|
||||||
|
|
||||||
|
// We have a final updated repos array
|
||||||
|
|
||||||
|
// Sort repos once alphabetically, to get alphabetical indexes...
|
||||||
|
sort.Slice(repos, func(i, j int) bool {
|
||||||
|
return repos[i].Name < repos[j].Name
|
||||||
|
})
|
||||||
|
alphabeticalOrderIndexes := make(map[string]int, len(repos))
|
||||||
|
for idx, repo := range repos {
|
||||||
|
alphabeticalOrderIndexes[repo.Name] = idx
|
||||||
|
}
|
||||||
|
|
||||||
|
// But then make sure the final sort is by most-recently-created
|
||||||
|
sort.Slice(repos, func(i, j int) bool {
|
||||||
|
return repos[i].oldestCommit.After(repos[j].oldestCommit)
|
||||||
|
})
|
||||||
|
|
||||||
|
// Commit our changes for the other threads to look at
|
||||||
|
this.reposMut.Lock()
|
||||||
|
this.reposCache = repos
|
||||||
|
this.reposAlphabeticalOrder = alphabeticalOrderIndexes
|
||||||
|
this.reposMut.Unlock()
|
||||||
|
|
||||||
|
// Done
|
||||||
|
return true, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (this *Application) syncWorker(ctx context.Context) {
|
||||||
|
|
||||||
|
t := time.NewTicker(30 * time.Minute)
|
||||||
|
defer t.Stop()
|
||||||
|
|
||||||
|
for {
|
||||||
|
anyChanges, err := this.sync(ctx)
|
||||||
|
if err != nil {
|
||||||
|
// log and continue
|
||||||
|
log.Printf("Refreshing repositories: %s", err.Error())
|
||||||
|
}
|
||||||
|
if anyChanges {
|
||||||
|
log.Printf("Repositories updated")
|
||||||
|
}
|
||||||
|
|
||||||
|
select {
|
||||||
|
case <-t.C:
|
||||||
|
continue
|
||||||
|
|
||||||
|
case <-ctx.Done():
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
Loading…
Reference in New Issue
Block a user