Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add test against #0 #32

Merged
merged 2 commits into from
Jul 13, 2024
Merged
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
79 changes: 79 additions & 0 deletions httprc_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -2,17 +2,30 @@ package httprc_test

import (
"context"
"flag"
"fmt"
"log"
"net/http"
"net/http/httptest"
"os"
"sync"
"testing"
"time"

"github.com/lestrrat-go/httprc"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)

func TestMain(m *testing.M) {
flag.Parse() // need to be called for us to use testing.Verbose()
if testing.Verbose() {
log.Printf("github.com/lestrrat-go/httprc: Because these tests deal with caching and timing,")
log.Printf(" they may take a while to run. Please be patient.")
}
os.Exit(m.Run())
}

type dummyErrSink struct {
mu sync.RWMutex
errors []error
Expand Down Expand Up @@ -110,3 +123,69 @@ func TestCache(t *testing.T) {
return
}
}

// This test is taken from https://gist.github.com/TheJokr/d5b836cca484d4a00967504c553987cf
// It reproduces a panic that could occur when a refresh is scheduled while a fetch worker is busy.
func TestGH30(t *testing.T) {
t.Parallel()

// Simulate slow endpoint with 2s response latency
slowServer := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
time.Sleep(2 * time.Second)
w.WriteHeader(http.StatusOK)
}))
defer slowServer.Close()

// Even slower endpoint to make fetch worker busy
blockServer := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
time.Sleep(5 * time.Second)
w.WriteHeader(http.StatusOK)
}))
defer blockServer.Close()

ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
defer cancel()

slowURL := slowServer.URL
blockURL := blockServer.URL

// Refresh quickly to make sure scheduled refresh and
// foreground refresh are queued while blockUrl is fetching
refreshWindow := 1 * time.Second
refreshInterval := 2 * time.Second
// Limit to 1 worker to make queueing requests easier
workerCount := 1

cache := httprc.NewCache(ctx,
httprc.WithRefreshWindow(refreshWindow),
httprc.WithFetcherWorkerCount(workerCount))

require.NoError(t, cache.Register(blockURL, httprc.WithRefreshInterval(time.Hour)), `register should succeed`)
require.NoError(t, cache.Register(slowURL, httprc.WithRefreshInterval(refreshInterval)), `register should succeed`)

// Step 1: Fetch slowUrl once to schedule refresh
_, err := cache.Get(ctx, slowURL)
require.NoError(t, err, `get should succeed`)

// Step 2: Fetch blockUrl in a separate goroutine
// to make sure our single fetch worker is busy
running := make(chan struct{})
go func() {
close(running)
_, err := cache.Get(ctx, blockURL)
require.NoError(t, err, `get (block url) should succeed`)
}()

// Step 3: Wait for blockUrl to start fetching
<-running

// Step 4: Queue foreground refresh
// By the time the blockUrl fetch finishes, both this Refresh(...)
// and the scheduled refresh for slowUrl will be queued. The second
// of those will cause the panic.
_, err = cache.Refresh(ctx, slowURL)
require.NoError(t, err, `get (slow url) should succeed`)

// Step 5: Wait for panic
<-ctx.Done()
}
Loading