Merge pull request #688

v3.6.0
This commit is contained in:
5rahim
2026-04-15 15:52:58 +02:00
committed by GitHub
301 changed files with 21866 additions and 48766 deletions

22
.gitignore vendored
View File

@@ -1,13 +1,11 @@
.idea
.junie
logs/
*.db
.run/
testdata/
.vscode
.cursor
.DS_Store
*/.DS_Store
.todo/
Dockerfile
Dockerfile.dev
@@ -91,3 +89,21 @@ codegen/nonext/
local_testing
dist-server
scripts/build.go
# editors, tools
.junie
.vscode
.cursor
.agents
.claude
.zed
.fleet
.windstuf
.aider*
.supermaven
.skills
skills*.json
.github/agents/
.github/prompts/
.github/skills/
*.agent.md

View File

@@ -188,24 +188,45 @@ go mod tidy
#### Writing Tests
Tests use the `test_utils` package which provides:
- `InitTestProvider` method to initialize the test configuration
- Flags to enable/disable specific test categories
Tests use the `internal/testutil` package which provides:
- `InitTestProvider` to load test configuration and apply feature-flag skips
- `NewTestEnv` to create an isolated temp root, app data dir, cache dir, and database for tests
- `FixtureRelPath` and fixture helpers
- `RequireSampleVideoPath` for media-player tests that need a real sample file
Example:
```go
func TestSomething(t *testing.T) {
test_utils.InitTestProvider(t, test_utils.Anilist())
// Test code here
env := testutil.NewTestEnv(t, testutil.Anilist())
database := env.MustNewDatabase(util.NewLogger())
_ = database
}
```
AniList mock fixtures are read-only during normal test runs. Set `SEANIME_TEST_RECORD_ANILIST_FIXTURES=true` when you intentionally want missing or refreshed fixtures written back to the repository.
To avoid remembering the environment variable and basic auth checks, use the refresh wrapper:
```bash
go run ./scripts/record_anilist_fixtures
```
Notes:
- It validates that `test/config.toml` exists, `flags.enable_anilist_tests=true`, and `provider.anilist_jwt` is set.
- It defaults to refreshing `./internal/api/anilist` and sets `SEANIME_TEST_RECORD_ANILIST_FIXTURES=true` for the test process.
- Pass packages to widen the refresh scope, for example `go run ./scripts/record_anilist_fixtures ./internal/api/anilist ./internal/library/scanner`.
- Pass `-run` to target specific live refresh tests, for example `go run ./scripts/record_anilist_fixtures -run 'TestGetAnimeByIdLive|TestBaseAnime_FetchMediaTree_BaseAnimeLive'`.
#### Testing with Third-Party Apps
Some tests interact with applications like Transmission and qBittorrent:
- Ensure these applications are installed and running
- Configure `test/config.toml` with appropriate connection details
Media-player tests that open a file also require `path.sampleVideoPath` in `test/config.toml`, or `TEST_SAMPLE_VIDEO_PATH` in the environment.
## Notes and Warnings
- hls.js versions 1.6.0 and above may cause appendBuffer fatal errors

View File

@@ -86,7 +86,6 @@ This is a one-person project and may not meet every use case. If it doesnt fu
### Not planned
- Android, iOS, AndroidTV, tvOS, ... apps
- Built-in support for other trackers such as MyAnimeList, Trakt, SIMKL, etc.
- Built-in support for other media players
- Built-in localization (translations)

File diff suppressed because it is too large Load Diff

View File

@@ -100,7 +100,7 @@ func GenerateTypescriptEndpointsFile(handlersJsonPath string, structsJsonPath st
if len(route.Api.Descriptions) > 0 {
writeLine(f, " /**")
f.WriteString(fmt.Sprintf(" * @description\n"))
f.WriteString(" * @description\n")
f.WriteString(fmt.Sprintf(" * Route %s\n", route.Api.Summary))
for _, cmt := range route.Api.Descriptions {
writeLine(f, fmt.Sprintf(" * %s", strings.TrimSpace(cmt)))
@@ -229,7 +229,7 @@ func GenerateTypescriptEndpointsFile(handlersJsonPath string, structsJsonPath st
typeF.WriteString(fmt.Sprintf(" * - Filename: %s\n", route.Filename))
typeF.WriteString(fmt.Sprintf(" * - Endpoint: %s\n", route.Api.Endpoint))
if len(route.Api.Summary) > 0 {
typeF.WriteString(fmt.Sprintf(" * @description\n"))
typeF.WriteString(" * @description\n")
typeF.WriteString(fmt.Sprintf(" * Route %s\n", strings.TrimSpace(route.Api.Summary)))
}
typeF.WriteString(" */\n")
@@ -451,16 +451,12 @@ func getEndpointKey(s string, groupName string) string {
result += string(v)
}
result = strings.ToLower(result)
if strings.Contains(result, "t-v-d-b") {
result = strings.Replace(result, "t-v-d-b", "tvdb", 1)
}
if strings.Contains(result, "m-a-l") {
result = strings.Replace(result, "m-a-l", "mal", 1)
}
result = strings.Replace(result, "t-v-d-b", "tvdb", 1)
result = strings.Replace(result, "m-a-l", "mal", 1)
return strings.ReplaceAll(groupName, "_", "-") + "-" + result
}
func writeLine(file *os.File, template string) {
template = strings.ReplaceAll(template, "\t", space)
file.WriteString(fmt.Sprintf(template + "\n"))
file.WriteString(template + "\n")
}

View File

@@ -11,6 +11,7 @@ import (
"seanime/internal/events"
"seanime/internal/util"
"strconv"
"sync"
"time"
"github.com/goccy/go-json"
@@ -259,7 +260,231 @@ func (ac *AnilistClientImpl) AnimeAiringScheduleRaw(ctx context.Context, ids []*
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
var sentRateLimitWarningTime = time.Now().Add(-10 * time.Second)
type requestRateBlocker interface {
Wait(ctx context.Context, sleep requestSleepFunc) error
BlockUntil(until time.Time) bool
}
type requestSleepFunc func(ctx context.Context, delay time.Duration) error
type aniListRateBlocker struct {
mu sync.Mutex
blockedUntil time.Time
now func() time.Time
}
func newAniListRateBlocker() *aniListRateBlocker {
return &aniListRateBlocker{now: time.Now}
}
func (b *aniListRateBlocker) Wait(ctx context.Context, sleep requestSleepFunc) error {
if sleep == nil {
sleep = sleepWithContext
}
for {
b.mu.Lock()
blockedUntil := b.blockedUntil
now := b.currentTime()
b.mu.Unlock()
if blockedUntil.IsZero() || !now.Before(blockedUntil) {
return nil
}
if err := sleep(ctx, blockedUntil.Sub(now)); err != nil {
return err
}
}
}
func (b *aniListRateBlocker) BlockUntil(until time.Time) bool {
if until.IsZero() {
return false
}
b.mu.Lock()
defer b.mu.Unlock()
now := b.currentTime()
if !until.After(now) || !until.After(b.blockedUntil) {
return false
}
b.blockedUntil = until
return true
}
func (b *aniListRateBlocker) currentTime() time.Time {
if b.now != nil {
return b.now()
}
return time.Now()
}
func parseResponseDate(headers http.Header) (time.Time, bool) {
raw := headers.Get("Date")
if raw == "" {
return time.Time{}, false
}
parsed, err := http.ParseTime(raw)
if err != nil {
return time.Time{}, false
}
return parsed, true
}
func parseAniListRateLimitResetTime(headers http.Header, now time.Time) (time.Time, bool) {
if resetAt, ok := parseRetryAfterTime(headers, now); ok {
return resetAt, true
}
raw := headers.Get("X-RateLimit-Reset")
if raw == "" {
return time.Time{}, false
}
if unixSeconds, err := strconv.ParseInt(raw, 10, 64); err == nil && unixSeconds > 0 {
return time.Unix(unixSeconds, 0), true
}
parsed, err := http.ParseTime(raw)
if err != nil {
return time.Time{}, false
}
return parsed, true
}
func parseRetryAfterTime(headers http.Header, now time.Time) (time.Time, bool) {
raw := headers.Get("Retry-After")
if raw == "" {
return time.Time{}, false
}
if retryAfterSeconds, err := strconv.Atoi(raw); err == nil {
return now.Truncate(time.Second).Add(time.Duration(retryAfterSeconds+1) * time.Second), true
}
parsed, err := http.ParseTime(raw)
if err != nil {
return time.Time{}, false
}
return parsed, true
}
var (
sentRateLimitWarningTime = time.Now().Add(-10 * time.Second)
sharedAniListRateBlocker requestRateBlocker = newAniListRateBlocker()
)
func doAniListRequestWithRetries(
client *http.Client,
req *http.Request,
rateBlocker requestRateBlocker,
sleep requestSleepFunc,
onRateLimited func(waitSeconds int),
) (resp *http.Response, rlRemainingStr string, err error) {
if client == nil {
client = http.DefaultClient
}
if sleep == nil {
sleep = sleepWithContext
}
const retryCount = 2
for i := 0; i < retryCount; i++ {
if err := req.Context().Err(); err != nil {
return nil, rlRemainingStr, err
}
if rateBlocker != nil {
if err := rateBlocker.Wait(req.Context(), sleep); err != nil {
return nil, rlRemainingStr, err
}
}
if i > 0 && req.Body != nil {
if req.GetBody == nil {
return nil, rlRemainingStr, errors.New("failed to retry request: request body is not replayable")
}
newBody, err := req.GetBody()
if err != nil {
return nil, rlRemainingStr, fmt.Errorf("failed to get request body: %w", err)
}
req.Body = newBody
}
resp, err = client.Do(req)
if err != nil {
return nil, rlRemainingStr, fmt.Errorf("request failed: %w", err)
}
rlRemainingStr = resp.Header.Get("X-Ratelimit-Remaining")
responseTime := time.Now()
if responseDate, ok := parseResponseDate(resp.Header); ok {
responseTime = responseDate
}
if resetAt, ok := parseAniListRateLimitResetTime(resp.Header, responseTime); ok {
if rateBlocker == nil || rateBlocker.BlockUntil(resetAt) {
if onRateLimited != nil {
waitSeconds := int(resetAt.Sub(responseTime).Round(time.Second) / time.Second)
if waitSeconds < 1 {
waitSeconds = 1
}
onRateLimited(waitSeconds)
}
}
closeAniListResponseBody(resp)
continue
}
return resp, rlRemainingStr, nil
}
return resp, rlRemainingStr, nil
}
func closeAniListResponseBody(resp *http.Response) {
if resp == nil || resp.Body == nil {
return
}
_ = resp.Body.Close()
resp.Body = nil
}
func sleepWithContext(ctx context.Context, delay time.Duration) error {
timer := time.NewTimer(delay)
defer timer.Stop()
select {
case <-ctx.Done():
return ctx.Err()
case <-timer.C:
return nil
}
}
func notifyAniListRateLimit(logger *zerolog.Logger, waitSeconds int) {
if logger != nil {
logger.Warn().Msgf("anilist: Rate limited, retrying in %d seconds", waitSeconds)
}
if time.Since(sentRateLimitWarningTime) <= 10*time.Second {
return
}
if events.GlobalWSEventManager != nil {
events.GlobalWSEventManager.SendEvent(events.WarningToast, "anilist: Rate limited, retrying in "+strconv.Itoa(waitSeconds)+" seconds")
}
sentRateLimitWarningTime = time.Now()
}
// customDoFunc is a custom request interceptor function that handles rate limiting and retries.
func (ac *AnilistClientImpl) customDoFunc(ctx context.Context, req *http.Request, gqlInfo *clientv2.GQLRequestInfo, res interface{}) (err error) {
@@ -280,60 +505,18 @@ func (ac *AnilistClientImpl) customDoFunc(ctx context.Context, req *http.Request
}
}()
client := http.DefaultClient
var resp *http.Response
retryCount := 2
for i := 0; i < retryCount; i++ {
// Reset response body for retry
if resp != nil && resp.Body != nil {
resp.Body.Close()
}
// Recreate the request body if it was read in a previous attempt
if req.GetBody != nil {
newBody, err := req.GetBody()
if err != nil {
return fmt.Errorf("failed to get request body: %w", err)
}
req.Body = newBody
}
resp, err = client.Do(req)
if err != nil {
return fmt.Errorf("request failed: %w", err)
}
rlRemainingStr = resp.Header.Get("X-Ratelimit-Remaining")
rlRetryAfterStr := resp.Header.Get("Retry-After")
//println("Remaining:", rlRemainingStr, " | RetryAfter:", rlRetryAfterStr)
// If we have a rate limit, sleep for the time
rlRetryAfter, err := strconv.Atoi(rlRetryAfterStr)
if err == nil {
ac.logger.Warn().Msgf("anilist: Rate limited, retrying in %d seconds", rlRetryAfter+1)
if time.Since(sentRateLimitWarningTime) > 10*time.Second {
if events.GlobalWSEventManager != nil {
events.GlobalWSEventManager.SendEvent(events.WarningToast, "anilist: Rate limited, retrying in "+strconv.Itoa(rlRetryAfter+1)+" seconds")
}
sentRateLimitWarningTime = time.Now()
}
select {
case <-time.After(time.Duration(rlRetryAfter+1) * time.Second):
continue
}
}
if rlRemainingStr == "" {
select {
case <-time.After(5 * time.Second):
continue
}
}
break
resp, rlRemainingStr, err = doAniListRequestWithRetries(
http.DefaultClient,
req,
sharedAniListRateBlocker,
sleepWithContext,
func(waitSeconds int) {
notifyAniListRateLimit(ac.logger, waitSeconds)
},
)
if err != nil {
return err
}
defer resp.Body.Close()

File diff suppressed because it is too large Load Diff

View File

@@ -3,71 +3,93 @@ package anilist
import (
"context"
"os"
"seanime/internal/test_utils"
"path/filepath"
"seanime/internal/testutil"
"strconv"
"strings"
"testing"
"github.com/goccy/go-json"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
// USE CASE: Generate a boilerplate Anilist AnimeCollection for testing purposes and save it to 'test/data/BoilerplateAnimeCollection'.
// The generated AnimeCollection will have all entries in the 'Planning' status.
// The generated AnimeCollection will be used to test various Anilist API methods.
// You can use TestModifyAnimeCollectionEntry to modify the generated AnimeCollection before using it in a test.
// - DO NOT RUN IF YOU DON'T PLAN TO GENERATE A NEW 'test/data/BoilerplateAnimeCollection'
func TestGenerateBoilerplateAnimeCollection(t *testing.T) {
t.Skip("This test is not meant to be run")
test_utils.InitTestProvider(t, test_utils.Anilist())
const recordCompleteAnimeIDsEnvName = "SEANIME_TEST_RECORD_COMPLETE_ANIME_IDS"
anilistClient := TestGetMockAnilistClient()
func TestMaybeWriteJSONFixtureCreatesDirectories(t *testing.T) {
t.Setenv(testutil.RecordAnilistFixturesEnvName, "true")
ac, err := anilistClient.AnimeCollection(context.Background(), &test_utils.ConfigData.Provider.AnilistUsername)
target := filepath.Join(t.TempDir(), "fixtures", "nested", "fixture.json")
err := maybeWriteJSONFixture(target, map[string]string{"status": "ok"}, nil)
assert.NoError(t, err)
if assert.NoError(t, err) {
_, err = os.Stat(target)
assert.NoError(t, err)
}
lists := ac.GetMediaListCollection().GetLists()
func TestCustomQueryFixturePathIsStable(t *testing.T) {
body := []byte(`{"query":"query Example { Page { pageInfo { total } } }","variables":{"page":1}}`)
entriesToAddToPlanning := make([]*AnimeListEntry, 0)
path1 := customQueryFixturePath(body)
path2 := customQueryFixturePath(body)
if assert.NoError(t, err) {
assert.Equal(t, path1, path2)
assert.Contains(t, path1, filepath.Join("test", "testdata", "anilist-custom-query"))
}
for _, list := range lists {
if list.Status != nil {
if list.GetStatus().String() != string(MediaListStatusPlanning) {
entries := list.GetEntries()
for _, entry := range entries {
entry.Progress = new(0)
entry.Score = new(0.0)
entry.Status = new(MediaListStatusPlanning)
entriesToAddToPlanning = append(entriesToAddToPlanning, entry)
}
list.Entries = make([]*AnimeListEntry, 0)
}
}
}
func TestFixtureMangaCollectionUsesCommittedFixture(t *testing.T) {
client := NewFixtureAnilistClient()
newLists := make([]*AnimeCollection_MediaListCollection_Lists, 0)
for _, list := range lists {
if list.Status == nil {
continue
}
if *list.GetStatus() == MediaListStatusPlanning {
list.Entries = append(list.Entries, entriesToAddToPlanning...)
newLists = append(newLists, list)
} else {
newLists = append(newLists, list)
}
}
collection, err := client.MangaCollection(context.Background(), nil)
require.NoError(t, err)
require.NotNil(t, collection)
ac.MediaListCollection.Lists = newLists
data, err := json.Marshal(ac)
if assert.NoError(t, err) {
err = os.WriteFile(test_utils.GetDataPath("BoilerplateAnimeCollection"), data, 0644)
assert.NoError(t, err)
}
}
entry, found := collection.GetListEntryFromMangaId(101517)
require.True(t, found)
require.Equal(t, MediaListStatusCurrent, *entry.GetStatus())
require.Equal(t, 260, *entry.GetProgress())
}
func TestRecordCompleteAnimeByIDFixtures(t *testing.T) {
if !testutil.ShouldRecordAnilistFixtures() {
t.Skip("AniList fixture recording disabled")
}
rawIDs := strings.TrimSpace(os.Getenv(recordCompleteAnimeIDsEnvName))
if rawIDs == "" {
t.Skip("no CompleteAnimeByID fixture ids requested")
}
cfg := testutil.LoadConfig(t)
if cfg.Provider.AnilistJwt == "" {
t.Skip("AniList fixture recording requires provider.anilist_jwt")
}
client := NewFixtureAnilistClientWithToken(cfg.Provider.AnilistJwt)
for _, mediaID := range parseFixtureMediaIDs(t, rawIDs) {
_, err := client.CompleteAnimeByID(context.Background(), &mediaID)
require.NoErrorf(t, err, "failed to record CompleteAnimeByID fixture for media %d", mediaID)
}
}
func parseFixtureMediaIDs(t *testing.T, raw string) []int {
t.Helper()
parts := strings.FieldsFunc(raw, func(r rune) bool {
switch r {
case ',', ' ', '\n', '\t':
return true
default:
return false
}
})
require.NotEmpty(t, parts, "expected at least one media id")
ids := make([]int, 0, len(parts))
for _, part := range parts {
mediaID, err := strconv.Atoi(part)
require.NoErrorf(t, err, "invalid media id %q", part)
ids = append(ids, mediaID)
}
return ids
}

View File

@@ -1,153 +1,60 @@
package anilist
import (
"bytes"
"context"
"seanime/internal/test_utils"
"io"
"net/http"
"seanime/internal/util"
"testing"
"time"
"github.com/davecgh/go-spew/spew"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
//func TestHiddenFromStatus(t *testing.T) {
// test_utils.InitTestProvider(t, test_utils.Anilist())
//
// token := test_utils.ConfigData.Provider.AnilistJwt
// logger := util.NewLogger()
// //anilistClient := NewAnilistClient(test_utils.ConfigData.Provider.AnilistJwt)
//
// variables := map[string]interface{}{}
//
// variables["userName"] = test_utils.ConfigData.Provider.AnilistUsername
// variables["type"] = "ANIME"
//
// requestBody, err := json.Marshal(map[string]interface{}{
// "query": testQuery,
// "variables": variables,
// })
// require.NoError(t, err)
//
// data, err := customQuery(requestBody, logger, token)
// require.NoError(t, err)
//
// var mediaLists []*MediaList
//
// type retData struct {
// Page Page
// PageInfo PageInfo
// }
//
// var ret retData
// m, err := json.Marshal(data)
// require.NoError(t, err)
// if err := json.Unmarshal(m, &ret); err != nil {
// t.Fatalf("Failed to unmarshal data: %v", err)
// }
//
// mediaLists = append(mediaLists, ret.Page.MediaList...)
//
// util.Spew(ret.Page.PageInfo)
//
// var currentPage = 1
// var hasNextPage = false
// if ret.Page.PageInfo != nil && ret.Page.PageInfo.HasNextPage != nil {
// hasNextPage = *ret.Page.PageInfo.HasNextPage
// }
// for hasNextPage {
// currentPage++
// variables["page"] = currentPage
// requestBody, err = json.Marshal(map[string]interface{}{
// "query": testQuery,
// "variables": variables,
// })
// require.NoError(t, err)
// data, err = customQuery(requestBody, logger, token)
// require.NoError(t, err)
// m, err = json.Marshal(data)
// require.NoError(t, err)
// if err := json.Unmarshal(m, &ret); err != nil {
// t.Fatalf("Failed to unmarshal data: %v", err)
// }
// util.Spew(ret.Page.PageInfo)
// if ret.Page.PageInfo != nil && ret.Page.PageInfo.HasNextPage != nil {
// hasNextPage = *ret.Page.PageInfo.HasNextPage
// }
// mediaLists = append(mediaLists, ret.Page.MediaList...)
// }
//
// //res, err := anilistClient.AnimeCollection(context.Background(), &test_utils.ConfigData.Provider.AnilistUsername)
// //assert.NoError(t, err)
//
// for _, mediaList := range mediaLists {
// util.Spew(mediaList.Media.ID)
// if mediaList.Media.ID == 151514 {
// util.Spew(mediaList)
// }
// }
//
//}
//
//const testQuery = `query ($page: Int, $userName: String, $type: MediaType) {
// Page (page: $page, perPage: 100) {
// pageInfo {
// hasNextPage
// total
// perPage
// currentPage
// lastPage
// }
// mediaList (type: $type, userName: $userName) {
// status
// startedAt {
// year
// month
// day
// }
// completedAt {
// year
// month
// day
// }
// repeat
// score(format: POINT_100)
// progress
// progressVolumes
// notes
// media {
// siteUrl
// id
// idMal
// episodes
// chapters
// volumes
// status
// averageScore
// coverImage{
// large
// extraLarge
// }
// bannerImage
// title {
// userPreferred
// }
// }
// }
// }
// }`
type roundTripFunc func(req *http.Request) (*http.Response, error)
func (f roundTripFunc) RoundTrip(req *http.Request) (*http.Response, error) {
return f(req)
}
type testClock struct {
now time.Time
}
func (c *testClock) Now() time.Time {
return c.now
}
func (c *testClock) Advance(delay time.Duration) {
c.now = c.now.Add(delay)
}
func newAniListTestResponse(statusCode int, body string, headers map[string]string) *http.Response {
respHeaders := make(http.Header)
for key, value := range headers {
respHeaders.Set(key, value)
}
return &http.Response{
StatusCode: statusCode,
Header: respHeaders,
Body: io.NopCloser(bytes.NewBufferString(body)),
}
}
func TestGetAnimeById(t *testing.T) {
test_utils.InitTestProvider(t, test_utils.Anilist())
anilistClient := TestGetMockAnilistClient()
anilistClient := NewTestAnilistClient()
tests := []struct {
name string
mediaId int
}{
{
name: "Cowboy Bebop",
mediaId: 1,
name: "Re:Zero",
mediaId: 21355,
},
}
@@ -160,9 +67,16 @@ func TestGetAnimeById(t *testing.T) {
}
}
func TestListAnime(t *testing.T) {
test_utils.InitTestProvider(t, test_utils.Anilist())
func TestGetAnimeByIdLive(t *testing.T) {
anilistClient := newLiveAnilistClient(t)
mediaID := 1
res, err := anilistClient.BaseAnimeByID(context.Background(), &mediaID)
assert.NoError(t, err)
assert.NotNil(t, res)
}
func TestListAnime(t *testing.T) {
tests := []struct {
name string
Page *int
@@ -195,7 +109,7 @@ func TestListAnime(t *testing.T) {
},
}
anilistClient := TestGetMockAnilistClient()
anilistClient := NewTestAnilistClient()
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
@@ -242,3 +156,122 @@ func TestListAnime(t *testing.T) {
})
}
}
func TestDoAniListRequestWithRetriesWaitsBetweenRateLimitedAttempts(t *testing.T) {
clock := &testClock{now: time.Date(2026, time.April, 7, 12, 0, 0, 0, time.UTC)}
rateBlocker := newAniListRateBlocker()
rateBlocker.now = clock.Now
requestBody := `{"query":"test"}`
requestBodies := make([]string, 0, 2)
sleepDurations := make([]time.Duration, 0, 1)
rateLimitWarnings := make([]int, 0, 1)
attempt := 0
client := &http.Client{Transport: roundTripFunc(func(req *http.Request) (*http.Response, error) {
attempt++
body, err := io.ReadAll(req.Body)
require.NoError(t, err)
requestBodies = append(requestBodies, string(body))
if attempt == 1 {
return newAniListTestResponse(http.StatusTooManyRequests, `{"errors":[{"message":"rate limited"}]}`, map[string]string{
"Date": clock.Now().Format(http.TimeFormat),
"Retry-After": "0",
}), nil
}
return newAniListTestResponse(http.StatusOK, `{"data":{"ok":true}}`, map[string]string{
"X-Ratelimit-Remaining": "9",
}), nil
})}
req, err := http.NewRequest(http.MethodPost, "https://anilist.test/graphql", bytes.NewBufferString(requestBody))
require.NoError(t, err)
resp, rlRemainingStr, err := doAniListRequestWithRetries(
client,
req,
rateBlocker,
func(ctx context.Context, delay time.Duration) error {
sleepDurations = append(sleepDurations, delay)
clock.Advance(delay)
return nil
},
func(waitSeconds int) {
rateLimitWarnings = append(rateLimitWarnings, waitSeconds)
},
)
require.NoError(t, err)
require.NotNil(t, resp)
defer resp.Body.Close()
assert.Equal(t, 2, attempt)
assert.Equal(t, []time.Duration{time.Second}, sleepDurations)
assert.Equal(t, []int{1}, rateLimitWarnings)
assert.Equal(t, []string{requestBody, requestBody}, requestBodies)
assert.Equal(t, "9", rlRemainingStr)
}
func TestDoAniListRequestWithRetriesDoesNotRetryWhenRateLimitHeadersAreMissing(t *testing.T) {
// without explicit rate-limit headers, the response should be returned as-is.
sleepDurations := make([]time.Duration, 0, 1)
attempt := 0
client := &http.Client{Transport: roundTripFunc(func(req *http.Request) (*http.Response, error) {
attempt++
return newAniListTestResponse(http.StatusOK, `{"data":{"ok":true}}`, nil), nil
})}
req, err := http.NewRequest(http.MethodPost, "https://anilist.test/graphql", bytes.NewBufferString(`{"query":"test"}`))
require.NoError(t, err)
resp, rlRemainingStr, err := doAniListRequestWithRetries(
client,
req,
nil,
func(ctx context.Context, delay time.Duration) error {
sleepDurations = append(sleepDurations, delay)
return nil
},
nil,
)
require.NoError(t, err)
require.NotNil(t, resp)
defer resp.Body.Close()
assert.Equal(t, 1, attempt)
assert.Empty(t, sleepDurations)
assert.Equal(t, "", rlRemainingStr)
}
func TestAniListRateBlockerWaitsUntilBlockExpires(t *testing.T) {
// once blocked, later requests should wait until the shared block expires.
clock := &testClock{now: time.Date(2026, time.April, 7, 12, 0, 10, 0, time.UTC)}
rateBlocker := newAniListRateBlocker()
rateBlocker.now = clock.Now
require.True(t, rateBlocker.BlockUntil(clock.Now().Add(18*time.Second)))
sleepDurations := make([]time.Duration, 0, 1)
err := rateBlocker.Wait(context.Background(), func(ctx context.Context, delay time.Duration) error {
sleepDurations = append(sleepDurations, delay)
clock.Advance(delay)
return nil
})
require.NoError(t, err)
assert.Equal(t, []time.Duration{18 * time.Second}, sleepDurations)
}
func TestAniListRateBlockerIgnoresDuplicateOrShorterBlocks(t *testing.T) {
// concurrent 429s with the same reset should not re-announce the same block repeatedly.
clock := &testClock{now: time.Date(2026, time.April, 7, 12, 0, 20, 0, time.UTC)}
rateBlocker := newAniListRateBlocker()
rateBlocker.now = clock.Now
blockedUntil := clock.Now().Add(18 * time.Second)
assert.True(t, rateBlocker.BlockUntil(blockedUntil))
assert.False(t, rateBlocker.BlockUntil(blockedUntil))
assert.False(t, rateBlocker.BlockUntil(clock.Now().Add(5*time.Second)))
assert.True(t, rateBlocker.BlockUntil(clock.Now().Add(25*time.Second)))
}

View File

@@ -2,16 +2,17 @@ package anilist
import (
"fmt"
"seanime/internal/testutil"
"seanime/internal/util"
"testing"
"github.com/davecgh/go-spew/spew"
"github.com/goccy/go-json"
"github.com/stretchr/testify/require"
"seanime/internal/test_utils"
"seanime/internal/util"
"testing"
)
func TestCompoundQuery(t *testing.T) {
test_utils.InitTestProvider(t)
func TestCompoundQueryLive(t *testing.T) {
testutil.InitTestProvider(t, testutil.Anilist(), testutil.Live())
var ids = []int{171457, 21}

View File

@@ -7,9 +7,7 @@ import (
"fmt"
"net/http"
"seanime/internal/constants"
"seanime/internal/events"
"seanime/internal/util"
"strconv"
"time"
"github.com/goccy/go-json"
@@ -47,8 +45,6 @@ func customQuery(body []byte, logger *zerolog.Logger, token ...string) (data int
err = errors.New("panic in customQuery")
})
client := http.DefaultClient
var req *http.Request
req, err = http.NewRequest("POST", constants.AnilistApiUrl, bytes.NewBuffer(body))
if err != nil {
@@ -61,54 +57,18 @@ func customQuery(body []byte, logger *zerolog.Logger, token ...string) (data int
req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", token[0]))
}
// Send request
retryCount := 2
var resp *http.Response
for i := 0; i < retryCount; i++ {
// Reset response body for retry
if resp != nil && resp.Body != nil {
resp.Body.Close()
}
// Recreate the request body if it was read in a previous attempt
if req.GetBody != nil {
newBody, err := req.GetBody()
if err != nil {
return nil, fmt.Errorf("failed to get request body: %w", err)
}
req.Body = newBody
}
resp, err = client.Do(req)
if err != nil {
return nil, fmt.Errorf("request failed: %w", err)
}
rlRemainingStr = resp.Header.Get("X-Ratelimit-Remaining")
rlRetryAfterStr := resp.Header.Get("Retry-After")
rlRetryAfter, err := strconv.Atoi(rlRetryAfterStr)
if err == nil {
logger.Warn().Msgf("anilist: Rate limited, retrying in %d seconds", rlRetryAfter+1)
if time.Since(sentRateLimitWarningTime) > 10*time.Second {
events.GlobalWSEventManager.SendEvent(events.WarningToast, "anilist: Rate limited, retrying in "+strconv.Itoa(rlRetryAfter+1)+" seconds")
sentRateLimitWarningTime = time.Now()
}
select {
case <-time.After(time.Duration(rlRetryAfter+1) * time.Second):
continue
}
}
if rlRemainingStr == "" {
select {
case <-time.After(5 * time.Second):
continue
}
}
break
resp, rlRemainingStr, err = doAniListRequestWithRetries(
http.DefaultClient,
req,
sharedAniListRateBlocker,
sleepWithContext,
func(waitSeconds int) {
notifyAniListRateLimit(logger, waitSeconds)
},
)
if err != nil {
return nil, err
}
defer resp.Body.Close()

View File

@@ -2,17 +2,16 @@ package anilist
import (
"context"
"github.com/davecgh/go-spew/spew"
"github.com/stretchr/testify/assert"
"seanime/internal/test_utils"
"seanime/internal/util"
"seanime/internal/util/limiter"
"testing"
"github.com/davecgh/go-spew/spew"
"github.com/stretchr/testify/assert"
)
func TestBaseAnime_FetchMediaTree_BaseAnime(t *testing.T) {
test_utils.InitTestProvider(t, test_utils.Anilist())
anilistClient := TestGetMockAnilistClient()
anilistClient := NewTestAnilistClient()
lim := limiter.NewAnilistLimiter()
completeAnimeCache := NewCompleteAnimeCache()
@@ -32,16 +31,6 @@ func TestBaseAnime_FetchMediaTree_BaseAnime(t *testing.T) {
163263, // BSD5
},
},
{
name: "Re:Zero",
mediaId: 21355,
edgeIds: []int{
21355, // Re:Zero 1
108632, // Re:Zero 2
119661, // Re:Zero 2 Part 2
163134, // Re:Zero 3
},
},
}
for _, tt := range tests {
@@ -69,7 +58,7 @@ func TestBaseAnime_FetchMediaTree_BaseAnime(t *testing.T) {
for _, treeId := range tt.edgeIds {
a, found := tree.Get(treeId)
assert.Truef(t, found, "expected tree to contain %d", treeId)
spew.Dump(a.GetTitleSafe())
util.Spew(a.GetTitleSafe())
}
}
@@ -80,3 +69,36 @@ func TestBaseAnime_FetchMediaTree_BaseAnime(t *testing.T) {
}
}
func TestBaseAnime_FetchMediaTree_BaseAnimeLive(t *testing.T) {
anilistClient := newLiveAnilistClient(t)
lim := limiter.NewAnilistLimiter()
completeAnimeCache := NewCompleteAnimeCache()
mediaID := 21355
edgeIDs := []int{21355, 108632, 119661, 163134}
mediaF, err := anilistClient.CompleteAnimeByID(context.Background(), &mediaID)
if !assert.NoError(t, err) {
return
}
media := mediaF.GetMedia()
tree := NewCompleteAnimeRelationTree()
err = media.FetchMediaTree(
FetchMediaTreeAll,
anilistClient,
lim,
tree,
completeAnimeCache,
)
if !assert.NoError(t, err) {
return
}
for _, treeID := range edgeIDs {
a, found := tree.Get(treeID)
assert.Truef(t, found, "expected tree to contain %d", treeID)
spew.Dump(a.GetTitleSafe())
}
}

View File

@@ -0,0 +1,17 @@
package anilist
import (
"seanime/internal/testutil"
"testing"
)
func newLiveAnilistClient(t testing.TB) AnilistClient {
t.Helper()
cfg := testutil.InitTestProvider(t, testutil.Anilist(), testutil.Live())
if cfg.Provider.AnilistJwt == "" {
t.Skip("anilist live tests require anilist_jwt")
}
return NewAnilistClient(cfg.Provider.AnilistJwt, "")
}

View File

@@ -0,0 +1,286 @@
package anilist
import (
"context"
"fmt"
)
type AnimeCollectionEntryPatch struct {
Status *MediaListStatus
Progress *int
Score *float64
Repeat *int
AiredEpisodes *int
NextAiringEpisode *BaseAnime_NextAiringEpisode
}
func PatchAnimeCollectionEntry(collection *AnimeCollection, mediaID int, patch AnimeCollectionEntryPatch) *AnimeCollection {
if collection == nil {
panic("anilist: anime collection is nil")
}
entry, currentList := findAnimeCollectionEntry(collection, mediaID)
if entry == nil {
panic(fmt.Sprintf("anilist: anime %d not found in collection; use EnsureAnimeCollectionEntry for missing media", mediaID))
}
if patch.Status != nil {
currentList = moveAnimeEntryToStatus(collection, currentList, entry, *patch.Status)
_ = currentList
}
applyAnimeCollectionEntryPatch(entry, patch)
return collection
}
func EnsureAnimeCollectionEntry(collection *AnimeCollection, mediaID int, patch AnimeCollectionEntryPatch, client AnilistClient) *AnimeCollection {
if collection == nil {
panic("anilist: anime collection is nil")
}
if _, currentList := findAnimeCollectionEntry(collection, mediaID); currentList != nil {
return collection
}
if client == nil {
panic(fmt.Sprintf("anilist: cannot add anime %d without a client", mediaID))
}
if patch.Status == nil {
panic(fmt.Sprintf("anilist: cannot add anime %d without a status", mediaID))
}
baseAnime, err := client.BaseAnimeByID(context.Background(), &mediaID)
if err != nil {
panic(err)
}
entry := &AnimeCollection_MediaListCollection_Lists_Entries{
Media: baseAnime.GetMedia(),
}
list := ensureAnimeStatusList(collection, *patch.Status)
list.Entries = append(list.Entries, entry)
return PatchAnimeCollectionEntry(collection, mediaID, patch)
}
func PatchAnimeCollectionWithRelationsEntry(collection *AnimeCollectionWithRelations, mediaID int, patch AnimeCollectionEntryPatch) *AnimeCollectionWithRelations {
if collection == nil {
panic("anilist: anime collection with relations is nil")
}
entry, currentList := findAnimeCollectionWithRelationsEntry(collection, mediaID)
if entry == nil {
panic(fmt.Sprintf("anilist: anime %d not found in relation collection; use EnsureAnimeCollectionWithRelationsEntry for missing media", mediaID))
}
if patch.Status != nil {
currentList = moveAnimeRelationsEntryToStatus(collection, currentList, entry, *patch.Status)
_ = currentList
}
applyAnimeCollectionWithRelationsEntryPatch(entry, patch)
return collection
}
func EnsureAnimeCollectionWithRelationsEntry(collection *AnimeCollectionWithRelations, mediaID int, patch AnimeCollectionEntryPatch, client AnilistClient) *AnimeCollectionWithRelations {
if collection == nil {
panic("anilist: anime collection with relations is nil")
}
if _, currentList := findAnimeCollectionWithRelationsEntry(collection, mediaID); currentList != nil {
return collection
}
if client == nil {
panic(fmt.Sprintf("anilist: cannot add anime %d without a client", mediaID))
}
if patch.Status == nil {
panic(fmt.Sprintf("anilist: cannot add anime %d without a status", mediaID))
}
completeAnime, err := client.CompleteAnimeByID(context.Background(), &mediaID)
if err != nil {
panic(err)
}
entry := &AnimeCollectionWithRelations_MediaListCollection_Lists_Entries{
Media: completeAnime.GetMedia(),
}
list := ensureAnimeRelationsStatusList(collection, *patch.Status)
list.Entries = append(list.Entries, entry)
return PatchAnimeCollectionWithRelationsEntry(collection, mediaID, patch)
}
func applyAnimeCollectionEntryPatch(entry *AnimeCollection_MediaListCollection_Lists_Entries, patch AnimeCollectionEntryPatch) {
if patch.Status != nil {
entry.Status = patch.Status
}
if patch.Progress != nil {
entry.Progress = patch.Progress
}
if patch.Score != nil {
entry.Score = patch.Score
}
if patch.Repeat != nil {
entry.Repeat = patch.Repeat
}
if patch.AiredEpisodes != nil {
entry.Media.Episodes = patch.AiredEpisodes
}
if patch.NextAiringEpisode != nil {
entry.Media.NextAiringEpisode = patch.NextAiringEpisode
}
}
func applyAnimeCollectionWithRelationsEntryPatch(entry *AnimeCollectionWithRelations_MediaListCollection_Lists_Entries, patch AnimeCollectionEntryPatch) {
if patch.Status != nil {
entry.Status = patch.Status
}
if patch.Progress != nil {
entry.Progress = patch.Progress
}
if patch.Score != nil {
entry.Score = patch.Score
}
if patch.Repeat != nil {
entry.Repeat = patch.Repeat
}
if patch.AiredEpisodes != nil {
entry.Media.Episodes = patch.AiredEpisodes
}
}
func findAnimeCollectionEntry(collection *AnimeCollection, mediaID int) (*AnimeCollection_MediaListCollection_Lists_Entries, *AnimeCollection_MediaListCollection_Lists) {
if collection == nil || collection.MediaListCollection == nil {
return nil, nil
}
for _, list := range collection.MediaListCollection.Lists {
if list == nil || list.Entries == nil {
continue
}
for _, entry := range list.Entries {
if entry != nil && entry.GetMedia().GetID() == mediaID {
return entry, list
}
}
}
return nil, nil
}
func findAnimeCollectionWithRelationsEntry(collection *AnimeCollectionWithRelations, mediaID int) (*AnimeCollectionWithRelations_MediaListCollection_Lists_Entries, *AnimeCollectionWithRelations_MediaListCollection_Lists) {
if collection == nil || collection.MediaListCollection == nil {
return nil, nil
}
for _, list := range collection.MediaListCollection.Lists {
if list == nil || list.Entries == nil {
continue
}
for _, entry := range list.Entries {
if entry != nil && entry.GetMedia().GetID() == mediaID {
return entry, list
}
}
}
return nil, nil
}
func moveAnimeEntryToStatus(collection *AnimeCollection, currentList *AnimeCollection_MediaListCollection_Lists, entry *AnimeCollection_MediaListCollection_Lists_Entries, status MediaListStatus) *AnimeCollection_MediaListCollection_Lists {
if currentList != nil && currentList.Status != nil && *currentList.Status == status {
return currentList
}
if currentList != nil {
removeAnimeEntry(currentList, entry.GetMedia().GetID())
}
target := ensureAnimeStatusList(collection, status)
target.Entries = append(target.Entries, entry)
return target
}
func moveAnimeRelationsEntryToStatus(collection *AnimeCollectionWithRelations, currentList *AnimeCollectionWithRelations_MediaListCollection_Lists, entry *AnimeCollectionWithRelations_MediaListCollection_Lists_Entries, status MediaListStatus) *AnimeCollectionWithRelations_MediaListCollection_Lists {
if currentList != nil && currentList.Status != nil && *currentList.Status == status {
return currentList
}
if currentList != nil {
removeAnimeRelationsEntry(currentList, entry.GetMedia().GetID())
}
target := ensureAnimeRelationsStatusList(collection, status)
target.Entries = append(target.Entries, entry)
return target
}
func ensureAnimeStatusList(collection *AnimeCollection, status MediaListStatus) *AnimeCollection_MediaListCollection_Lists {
if collection.MediaListCollection == nil {
collection.MediaListCollection = &AnimeCollection_MediaListCollection{}
}
for _, list := range collection.MediaListCollection.Lists {
if list != nil && list.Status != nil && *list.Status == status {
if list.Entries == nil {
list.Entries = []*AnimeCollection_MediaListCollection_Lists_Entries{}
}
return list
}
}
name := string(status)
isCustomList := false
list := &AnimeCollection_MediaListCollection_Lists{
Status: testPointer(status),
Name: &name,
IsCustomList: &isCustomList,
Entries: []*AnimeCollection_MediaListCollection_Lists_Entries{},
}
collection.MediaListCollection.Lists = append(collection.MediaListCollection.Lists, list)
return list
}
func ensureAnimeRelationsStatusList(collection *AnimeCollectionWithRelations, status MediaListStatus) *AnimeCollectionWithRelations_MediaListCollection_Lists {
if collection.MediaListCollection == nil {
collection.MediaListCollection = &AnimeCollectionWithRelations_MediaListCollection{}
}
for _, list := range collection.MediaListCollection.Lists {
if list != nil && list.Status != nil && *list.Status == status {
if list.Entries == nil {
list.Entries = []*AnimeCollectionWithRelations_MediaListCollection_Lists_Entries{}
}
return list
}
}
name := string(status)
isCustomList := false
list := &AnimeCollectionWithRelations_MediaListCollection_Lists{
Status: testPointer(status),
Name: &name,
IsCustomList: &isCustomList,
Entries: []*AnimeCollectionWithRelations_MediaListCollection_Lists_Entries{},
}
collection.MediaListCollection.Lists = append(collection.MediaListCollection.Lists, list)
return list
}
func removeAnimeEntry(list *AnimeCollection_MediaListCollection_Lists, mediaID int) {
for idx, entry := range list.GetEntries() {
if entry != nil && entry.GetMedia().GetID() == mediaID {
list.Entries = append(list.Entries[:idx], list.Entries[idx+1:]...)
return
}
}
}
func removeAnimeRelationsEntry(list *AnimeCollectionWithRelations_MediaListCollection_Lists, mediaID int) {
for idx, entry := range list.GetEntries() {
if entry != nil && entry.GetMedia().GetID() == mediaID {
list.Entries = append(list.Entries[:idx], list.Entries[idx+1:]...)
return
}
}
}
func testPointer[T any](value T) *T {
return &value
}

View File

@@ -1,11 +1,14 @@
package anizip
import (
"github.com/stretchr/testify/assert"
"seanime/internal/testutil"
"testing"
"github.com/stretchr/testify/assert"
)
func TestFetchAniZipMedia(t *testing.T) {
testutil.InitTestProvider(t, testutil.Live())
tests := []struct {
name string

View File

@@ -1,17 +1,18 @@
package filler
package filler_test
import (
"seanime/internal/api/filler"
"seanime/internal/testutil"
"seanime/internal/util"
"testing"
"github.com/davecgh/go-spew/spew"
)
func TestAnimeFillerList_Search(t *testing.T) {
testutil.InitTestProvider(t, testutil.Live())
af := NewAnimeFillerList(util.NewLogger())
af := filler.NewAnimeFillerList(util.NewLogger())
opts := SearchOptions{
opts := filler.SearchOptions{
Titles: []string{"Hunter x Hunter (2011)"},
}
@@ -20,5 +21,5 @@ func TestAnimeFillerList_Search(t *testing.T) {
t.Error(err)
}
spew.Dump(ret)
util.Spew(ret)
}

View File

@@ -1,16 +1,17 @@
package mal
import (
"github.com/davecgh/go-spew/spew"
"seanime/internal/test_utils"
"seanime/internal/testutil"
"seanime/internal/util"
"testing"
"github.com/davecgh/go-spew/spew"
)
func TestGetAnimeDetails(t *testing.T) {
test_utils.InitTestProvider(t, test_utils.MyAnimeList())
func TestGetAnimeDetailsLive(t *testing.T) {
cfg := testutil.InitTestProvider(t, testutil.MyAnimeList(), testutil.Live())
malWrapper := NewWrapper(test_utils.ConfigData.Provider.MalJwt, util.NewLogger())
malWrapper := NewWrapper(cfg.Provider.MalJwt, util.NewLogger())
res, err := malWrapper.GetAnimeDetails(51179)
@@ -23,10 +24,10 @@ func TestGetAnimeDetails(t *testing.T) {
t.Log(res.Title)
}
func TestGetAnimeCollection(t *testing.T) {
test_utils.InitTestProvider(t, test_utils.MyAnimeList())
func TestGetAnimeCollectionLive(t *testing.T) {
cfg := testutil.InitTestProvider(t, testutil.MyAnimeList(), testutil.Live())
malWrapper := NewWrapper(test_utils.ConfigData.Provider.MalJwt, util.NewLogger())
malWrapper := NewWrapper(cfg.Provider.MalJwt, util.NewLogger())
res, err := malWrapper.GetAnimeCollection()
@@ -42,10 +43,10 @@ func TestGetAnimeCollection(t *testing.T) {
}
}
func TestUpdateAnimeListStatus(t *testing.T) {
test_utils.InitTestProvider(t, test_utils.MyAnimeList(), test_utils.MyAnimeListMutation())
func TestUpdateAnimeListStatusLive(t *testing.T) {
cfg := testutil.InitTestProvider(t, testutil.MyAnimeList(), testutil.MyAnimeListMutation(), testutil.Live())
malWrapper := NewWrapper(test_utils.ConfigData.Provider.MalJwt, util.NewLogger())
malWrapper := NewWrapper(cfg.Provider.MalJwt, util.NewLogger())
mId := 51179
progress := 2

View File

@@ -1,16 +1,17 @@
package mal
import (
"github.com/davecgh/go-spew/spew"
"seanime/internal/test_utils"
"seanime/internal/testutil"
"seanime/internal/util"
"testing"
"github.com/davecgh/go-spew/spew"
)
func TestGetMangaDetails(t *testing.T) {
test_utils.InitTestProvider(t, test_utils.MyAnimeList())
func TestGetMangaDetailsLive(t *testing.T) {
cfg := testutil.InitTestProvider(t, testutil.MyAnimeList(), testutil.Live())
malWrapper := NewWrapper(test_utils.ConfigData.Provider.MalJwt, util.NewLogger())
malWrapper := NewWrapper(cfg.Provider.MalJwt, util.NewLogger())
res, err := malWrapper.GetMangaDetails(13)
@@ -23,10 +24,10 @@ func TestGetMangaDetails(t *testing.T) {
t.Log(res.Title)
}
func TestGetMangaCollection(t *testing.T) {
test_utils.InitTestProvider(t, test_utils.MyAnimeList())
func TestGetMangaCollectionLive(t *testing.T) {
cfg := testutil.InitTestProvider(t, testutil.MyAnimeList(), testutil.Live())
malWrapper := NewWrapper(test_utils.ConfigData.Provider.MalJwt, util.NewLogger())
malWrapper := NewWrapper(cfg.Provider.MalJwt, util.NewLogger())
res, err := malWrapper.GetMangaCollection()
@@ -42,10 +43,10 @@ func TestGetMangaCollection(t *testing.T) {
}
}
func TestUpdateMangaListStatus(t *testing.T) {
test_utils.InitTestProvider(t, test_utils.MyAnimeList(), test_utils.MyAnimeListMutation())
func TestUpdateMangaListStatusLive(t *testing.T) {
cfg := testutil.InitTestProvider(t, testutil.MyAnimeList(), testutil.MyAnimeListMutation(), testutil.Live())
malWrapper := NewWrapper(test_utils.ConfigData.Provider.MalJwt, util.NewLogger())
malWrapper := NewWrapper(cfg.Provider.MalJwt, util.NewLogger())
mId := 13
progress := 1000

View File

@@ -1,12 +1,12 @@
package mal
import (
"seanime/internal/test_utils"
"seanime/internal/testutil"
"testing"
)
func TestSearchWithMAL(t *testing.T) {
test_utils.InitTestProvider(t, test_utils.MyAnimeList())
func TestSearchWithMALLive(t *testing.T) {
testutil.InitTestProvider(t, testutil.MyAnimeList(), testutil.Live())
res, err := SearchWithMAL("bungo stray dogs", 4)
@@ -20,8 +20,8 @@ func TestSearchWithMAL(t *testing.T) {
}
func TestAdvancedSearchWithMal(t *testing.T) {
test_utils.InitTestProvider(t, test_utils.MyAnimeList())
func TestAdvancedSearchWithMalLive(t *testing.T) {
testutil.InitTestProvider(t, testutil.MyAnimeList(), testutil.Live())
res, err := AdvancedSearchWithMAL("sousou no frieren")

View File

@@ -1,22 +0,0 @@
package metadata_provider
import (
"seanime/internal/database/db"
"seanime/internal/extension"
"seanime/internal/util"
"seanime/internal/util/filecache"
"testing"
"github.com/stretchr/testify/require"
)
func GetFakeProvider(t *testing.T, db *db.Database) Provider {
filecacher, err := filecache.NewCacher(t.TempDir())
require.NoError(t, err)
return NewProvider(&NewProviderImplOptions{
Logger: util.NewLogger(),
FileCacher: filecacher,
Database: db,
ExtensionBankRef: util.NewRef(extension.NewUnifiedBank()),
})
}

View File

@@ -10,7 +10,7 @@ import (
func TestProvider(t *testing.T) {
metadataProvider := GetFakeProvider(t, nil)
metadataProvider := NewTestProvider(t, nil)
tests := []struct {
platform metadata.Platform

View File

@@ -0,0 +1,24 @@
package metadata_provider
import (
"seanime/internal/database/db"
"seanime/internal/extension"
"seanime/internal/testutil"
"seanime/internal/util"
"testing"
)
func NewTestProvider(t *testing.T, db *db.Database) Provider {
t.Helper()
return NewTestProviderWithEnv(testutil.NewTestEnv(t), db)
}
func NewTestProviderWithEnv(env *testutil.TestEnv, db *db.Database) Provider {
return NewProvider(&NewProviderImplOptions{
Logger: env.Logger(),
FileCacher: env.NewCacher("metadata-provider"),
Database: db,
ExtensionBankRef: util.NewRef(extension.NewUnifiedBank()),
})
}

View File

@@ -6,8 +6,8 @@ import (
)
const (
Version = "3.5.2"
VersionName = "Hakumei"
Version = "3.6.0"
VersionName = "Kagero"
GcTime = time.Minute * 30
ConfigFileName = "config.toml"
MalClientId = "51cb4294feb400f3ddc66a30f9b9a00f"

View File

@@ -1,79 +0,0 @@
package continuity
import (
"github.com/stretchr/testify/require"
"path/filepath"
"seanime/internal/database/db"
"seanime/internal/test_utils"
"seanime/internal/util"
"seanime/internal/util/filecache"
"testing"
)
func TestHistoryItems(t *testing.T) {
test_utils.SetTwoLevelDeep()
test_utils.InitTestProvider(t)
logger := util.NewLogger()
tempDir := t.TempDir()
t.Log(tempDir)
database, err := db.NewDatabase(test_utils.ConfigData.Path.DataDir, test_utils.ConfigData.Database.Name, logger)
require.NoError(t, err)
cacher, err := filecache.NewCacher(filepath.Join(tempDir, "cache"))
require.NoError(t, err)
manager := NewManager(&NewManagerOptions{
FileCacher: cacher,
Logger: logger,
Database: database,
})
require.NotNil(t, manager)
var mediaIds = make([]int, MaxWatchHistoryItems+1)
for i := 0; i < MaxWatchHistoryItems+1; i++ {
mediaIds[i] = i + 1
}
// Add items to the history
for _, mediaId := range mediaIds {
err = manager.UpdateWatchHistoryItem(&UpdateWatchHistoryItemOptions{
MediaId: mediaId,
EpisodeNumber: 1,
CurrentTime: 10,
Duration: 100,
})
require.NoError(t, err)
}
// Check if the oldest item was removed
items, err := filecache.GetAll[WatchHistoryItem](cacher, *manager.watchHistoryFileCacheBucket)
require.NoError(t, err)
require.Len(t, items, MaxWatchHistoryItems)
// Update an item
err = manager.UpdateWatchHistoryItem(&UpdateWatchHistoryItemOptions{
MediaId: mediaIds[0], // 1
EpisodeNumber: 2,
CurrentTime: 30,
Duration: 100,
})
require.NoError(t, err)
// Check if the item was updated
items, err = filecache.GetAll[WatchHistoryItem](cacher, *manager.watchHistoryFileCacheBucket)
require.NoError(t, err)
require.Len(t, items, MaxWatchHistoryItems)
item, found := items["1"]
require.True(t, found)
require.Equal(t, 2, item.EpisodeNumber)
require.Equal(t, 30., item.CurrentTime)
require.Equal(t, 100., item.Duration)
}

View File

@@ -0,0 +1,309 @@
package continuity
import (
"seanime/internal/database/db_bridge"
"seanime/internal/library/anime"
"seanime/internal/testutil"
"seanime/internal/util/filecache"
"strconv"
"testing"
"time"
"github.com/samber/mo"
"github.com/stretchr/testify/require"
)
func TestTrimWatchHistoryItemsRemovesOldestItem(t *testing.T) {
manager, cacher := newHistoryTestManager(t)
baseTime := time.Now().Add(-time.Hour)
for mediaID := 1; mediaID <= MaxWatchHistoryItems+1; mediaID++ {
err := cacher.Set(*manager.watchHistoryFileCacheBucket, strconv.Itoa(mediaID), &WatchHistoryItem{
MediaId: mediaID,
EpisodeNumber: 1,
CurrentTime: 10,
Duration: 100,
TimeAdded: baseTime.Add(time.Duration(mediaID) * time.Minute),
TimeUpdated: baseTime.Add(time.Duration(mediaID) * time.Minute),
})
require.NoError(t, err)
}
require.NoError(t, manager.trimWatchHistoryItems())
items := getAllHistoryItems(t, cacher, manager)
require.Len(t, items, MaxWatchHistoryItems)
require.NotContains(t, items, "1")
require.Contains(t, items, strconv.Itoa(MaxWatchHistoryItems+1))
}
func TestUpdateWatchHistoryItemCreatesAndUpdatesExistingItem(t *testing.T) {
manager, cacher := newHistoryTestManager(t)
originalTime := time.Now().Add(-2 * time.Hour)
err := cacher.Set(*manager.watchHistoryFileCacheBucket, "42", &WatchHistoryItem{
Kind: MediastreamKind,
Filepath: "/tmp/original.mkv",
MediaId: 42,
EpisodeNumber: 1,
CurrentTime: 20,
Duration: 100,
TimeAdded: originalTime,
TimeUpdated: originalTime,
})
require.NoError(t, err)
err = manager.UpdateWatchHistoryItem(&UpdateWatchHistoryItemOptions{
Kind: OnlinestreamKind,
MediaId: 42,
EpisodeNumber: 2,
CurrentTime: 30,
Duration: 120,
Filepath: "/tmp/updated.mkv",
})
require.NoError(t, err)
response := manager.GetWatchHistoryItem(42)
require.True(t, response.Found)
require.NotNil(t, response.Item)
require.Equal(t, OnlinestreamKind, response.Item.Kind)
require.Equal(t, 2, response.Item.EpisodeNumber)
require.Equal(t, 30.0, response.Item.CurrentTime)
require.Equal(t, 120.0, response.Item.Duration)
require.True(t, response.Item.TimeAdded.Equal(originalTime))
require.True(t, response.Item.TimeUpdated.After(originalTime))
require.Equal(t, "/tmp/original.mkv", response.Item.Filepath)
}
func TestGetWatchHistoryItemAppliesCompletionThresholds(t *testing.T) {
t.Run("returns item within resumable range", func(t *testing.T) {
manager, cacher := newHistoryTestManager(t)
seedWatchHistoryItem(t, cacher, manager, &WatchHistoryItem{
MediaId: 10,
EpisodeNumber: 1,
CurrentTime: 50,
Duration: 100,
})
response := manager.GetWatchHistoryItem(10)
require.True(t, response.Found)
require.NotNil(t, response.Item)
})
t.Run("hides nearly finished item and deletes it", func(t *testing.T) {
manager, cacher := newHistoryTestManager(t)
seedWatchHistoryItem(t, cacher, manager, &WatchHistoryItem{
MediaId: 11,
EpisodeNumber: 1,
CurrentTime: 90,
Duration: 100,
})
response := manager.GetWatchHistoryItem(11)
require.False(t, response.Found)
require.Nil(t, response.Item)
require.Eventually(t, func() bool {
items := getAllHistoryItems(t, cacher, manager)
_, found := items["11"]
return !found
}, time.Second, 10*time.Millisecond)
})
t.Run("hides barely started item without deleting it", func(t *testing.T) {
manager, cacher := newHistoryTestManager(t)
seedWatchHistoryItem(t, cacher, manager, &WatchHistoryItem{
MediaId: 12,
EpisodeNumber: 1,
CurrentTime: 4,
Duration: 100,
})
response := manager.GetWatchHistoryItem(12)
require.False(t, response.Found)
require.Nil(t, response.Item)
items := getAllHistoryItems(t, cacher, manager)
item, found := items["12"]
require.True(t, found)
require.Equal(t, 4.0, item.CurrentTime)
})
}
func TestDeleteWatchHistoryItemRemovesStoredEntry(t *testing.T) {
manager, cacher := newHistoryTestManager(t)
seedWatchHistoryItem(t, cacher, manager, &WatchHistoryItem{
MediaId: 20,
EpisodeNumber: 1,
CurrentTime: 20,
Duration: 100,
})
require.NoError(t, manager.DeleteWatchHistoryItem(20))
response := manager.GetWatchHistoryItem(20)
require.False(t, response.Found)
require.Nil(t, response.Item)
require.NotContains(t, getAllHistoryItems(t, cacher, manager), "20")
}
func TestUpdateExternalPlayerEpisodeWatchHistoryItem(t *testing.T) {
t.Run("does nothing when continuity is disabled", func(t *testing.T) {
manager, cacher := newHistoryTestManager(t)
manager.SetExternalPlayerEpisodeDetails(&ExternalPlayerEpisodeDetails{
MediaId: 30,
EpisodeNumber: 5,
Filepath: "/tmp/external-disabled.mkv",
})
manager.UpdateExternalPlayerEpisodeWatchHistoryItem(40, 100)
require.Empty(t, getAllHistoryItems(t, cacher, manager))
})
t.Run("creates and updates item when enabled", func(t *testing.T) {
manager, _ := newHistoryTestManager(t)
manager.SetSettings(&Settings{WatchContinuityEnabled: true})
manager.SetExternalPlayerEpisodeDetails(&ExternalPlayerEpisodeDetails{
MediaId: 31,
EpisodeNumber: 5,
Filepath: "/tmp/external.mkv",
})
manager.UpdateExternalPlayerEpisodeWatchHistoryItem(40, 100)
response := manager.GetWatchHistoryItem(31)
require.True(t, response.Found)
require.NotNil(t, response.Item)
require.Equal(t, ExternalPlayerKind, response.Item.Kind)
require.Equal(t, "/tmp/external.mkv", response.Item.Filepath)
require.Equal(t, 5, response.Item.EpisodeNumber)
require.Equal(t, 40.0, response.Item.CurrentTime)
manager.SetExternalPlayerEpisodeDetails(&ExternalPlayerEpisodeDetails{
MediaId: 31,
EpisodeNumber: 6,
Filepath: "/tmp/external.mkv",
})
manager.UpdateExternalPlayerEpisodeWatchHistoryItem(55, 120)
updated := manager.GetWatchHistoryItem(31)
require.True(t, updated.Found)
require.Equal(t, 6, updated.Item.EpisodeNumber)
require.Equal(t, 55.0, updated.Item.CurrentTime)
require.Equal(t, 120.0, updated.Item.Duration)
})
}
func TestGetExternalPlayerEpisodeWatchHistoryItemStream(t *testing.T) {
manager, cacher := newHistoryTestManager(t)
seedWatchHistoryItem(t, cacher, manager, &WatchHistoryItem{
MediaId: 40,
EpisodeNumber: 7,
CurrentTime: 45,
Duration: 100,
})
response := manager.GetExternalPlayerEpisodeWatchHistoryItem("ignored", true, 7, 40)
require.False(t, response.Found)
manager.SetSettings(&Settings{WatchContinuityEnabled: true})
response = manager.GetExternalPlayerEpisodeWatchHistoryItem("ignored", true, 7, 40)
require.True(t, response.Found)
require.NotNil(t, response.Item)
require.Equal(t, 40, response.Item.MediaId)
mismatch := manager.GetExternalPlayerEpisodeWatchHistoryItem("ignored", true, 8, 40)
require.False(t, mismatch.Found)
require.Nil(t, mismatch.Item)
missingIDs := manager.GetExternalPlayerEpisodeWatchHistoryItem("ignored", true, 0, 40)
require.False(t, missingIDs.Found)
require.Nil(t, missingIDs.Item)
}
func TestGetExternalPlayerEpisodeWatchHistoryItemLocalFile(t *testing.T) {
manager, cacher := newHistoryTestManager(t)
manager.SetSettings(&Settings{WatchContinuityEnabled: true})
resetLocalFilesCache(t)
localFiles := anime.NewTestLocalFiles(anime.TestLocalFileGroup{
LibraryPath: "/library",
FilePathTemplate: "/library/show/episode-%ep.mkv",
MediaID: 50,
Episodes: []anime.TestLocalFileEpisode{{
Episode: 3,
AniDBEpisode: "3",
Type: anime.LocalFileTypeMain,
}},
})
_, err := db_bridge.InsertLocalFiles(manager.db, localFiles)
require.NoError(t, err)
seedWatchHistoryItem(t, cacher, manager, &WatchHistoryItem{
MediaId: 50,
EpisodeNumber: 3,
CurrentTime: 60,
Duration: 120,
})
byPath := manager.GetExternalPlayerEpisodeWatchHistoryItem(localFiles[0].Path, false, 0, 0)
require.True(t, byPath.Found)
require.NotNil(t, byPath.Item)
require.Equal(t, 50, byPath.Item.MediaId)
byFilename := manager.GetExternalPlayerEpisodeWatchHistoryItem(localFiles[0].Name, false, 0, 0)
require.True(t, byFilename.Found)
require.NotNil(t, byFilename.Item)
}
func newHistoryTestManager(t *testing.T) (*Manager, *filecache.Cacher) {
t.Helper()
env := testutil.NewTestEnv(t)
manager := NewManager(&NewManagerOptions{
FileCacher: env.NewCacher("continuity"),
Logger: env.Logger(),
Database: env.NewDatabase(""),
})
require.NotNil(t, manager)
return manager, manager.fileCacher
}
func seedWatchHistoryItem(t *testing.T, cacher *filecache.Cacher, manager *Manager, item *WatchHistoryItem) {
t.Helper()
if item.TimeAdded.IsZero() {
item.TimeAdded = time.Now().Add(-time.Minute)
}
if item.TimeUpdated.IsZero() {
item.TimeUpdated = item.TimeAdded
}
err := cacher.Set(*manager.watchHistoryFileCacheBucket, strconv.Itoa(item.MediaId), item)
require.NoError(t, err)
}
func getAllHistoryItems(t *testing.T, cacher *filecache.Cacher, manager *Manager) map[string]*WatchHistoryItem {
t.Helper()
items, err := filecache.GetAll[*WatchHistoryItem](cacher, *manager.watchHistoryFileCacheBucket)
require.NoError(t, err)
return items
}
func resetLocalFilesCache(t *testing.T) {
t.Helper()
originalFiles := db_bridge.CurrLocalFiles
originalID := db_bridge.CurrLocalFilesDbId
db_bridge.CurrLocalFiles = mo.None[[]*anime.LocalFile]()
db_bridge.CurrLocalFilesDbId = 0
t.Cleanup(func() {
db_bridge.CurrLocalFiles = originalFiles
db_bridge.CurrLocalFilesDbId = originalID
})
}

View File

@@ -1,15 +1,16 @@
package continuity
import (
"github.com/stretchr/testify/require"
"path/filepath"
"seanime/internal/database/db"
"seanime/internal/util"
"seanime/internal/util/filecache"
"testing"
"github.com/stretchr/testify/require"
)
func GetMockManager(t *testing.T, db *db.Database) *Manager {
func NewTestManager(t *testing.T, db *db.Database) *Manager {
logger := util.NewLogger()
cacher, err := filecache.NewCacher(filepath.Join(t.TempDir(), "cache"))
require.NoError(t, err)

View File

@@ -140,20 +140,6 @@ func (a *App) initModulesOnce() {
a.MangaDownloader.Start()
// +---------------------+
// | Media Stream |
// +---------------------+
a.MediastreamRepository = mediastream.NewRepository(&mediastream.NewRepositoryOptions{
Logger: a.Logger,
WSEventManager: a.WSEventManager,
FileCacher: a.FileCacher,
})
a.AddCleanupFunction(func() {
a.MediastreamRepository.OnCleanup()
})
// +---------------------+
// | Video Core |
// +---------------------+
@@ -171,6 +157,21 @@ func (a *App) initModulesOnce() {
IsOfflineRef: a.IsOfflineRef(),
})
// +---------------------+
// | Media Stream |
// +---------------------+
a.MediastreamRepository = mediastream.NewRepository(&mediastream.NewRepositoryOptions{
Logger: a.Logger,
WSEventManager: a.WSEventManager,
FileCacher: a.FileCacher,
VideoCore: a.VideoCore,
})
a.AddCleanupFunction(func() {
a.MediastreamRepository.OnCleanup()
})
// +---------------------+
// | Native Player |
// +---------------------+

View File

@@ -17,6 +17,7 @@ func RunJobs(app *core.App) {
}
refreshAnilistTicker := time.NewTicker(10 * time.Minute)
refreshAnilistSimulatedTicker := time.NewTicker(30 * time.Minute)
refreshLocalDataTicker := time.NewTicker(30 * time.Minute)
refetchReleaseTicker := time.NewTicker(1 * time.Hour)
refetchAnnouncementsTicker := time.NewTicker(10 * time.Minute)
@@ -25,7 +26,7 @@ func RunJobs(app *core.App) {
for {
select {
case <-refreshAnilistTicker.C:
if app.IsOffline() {
if app.IsOffline() || app.GetUser().IsSimulated {
continue
}
RefreshAnilistDataJob(ctx)
@@ -34,6 +35,18 @@ func RunJobs(app *core.App) {
}
}()
go func() {
for {
select {
case <-refreshAnilistSimulatedTicker.C:
if app.IsOffline() || !app.GetUser().IsSimulated {
continue
}
RefreshAnilistDataJob(ctx)
}
}
}()
go func() {
for {
select {

View File

@@ -0,0 +1,442 @@
package customsource
import (
"context"
"seanime/internal/api/anilist"
"seanime/internal/api/metadata"
"seanime/internal/extension"
hibikecustomsource "seanime/internal/extension/hibike/customsource"
"seanime/internal/testutil"
"seanime/internal/util"
"testing"
"github.com/stretchr/testify/require"
)
// Verifies that custom source IDs round-trip through the pack/unpack helpers
// and that plain AniList IDs are not mistaken for custom source IDs.
func TestMediaIDHelpers(t *testing.T) {
// Custom source IDs encode the extension identifier and the provider-local media ID into one runtime ID.
mediaID := GenerateMediaId(321, 987654)
require.True(t, IsExtensionId(mediaID))
extensionIdentifier, localID := ExtractExtensionData(mediaID)
require.Equal(t, 321, extensionIdentifier)
require.Equal(t, 987654, localID)
require.False(t, IsExtensionId(12345))
plainExt, plainLocal := ExtractExtensionData(12345)
require.Zero(t, plainExt)
require.Zero(t, plainLocal)
}
// Verifies how custom source URLs are tagged and recovered.
// This protects the split between extension-owned URLs and normal AniList URLs.
func TestSiteURLHelpers(t *testing.T) {
t.Run("formats nil and custom urls", func(t *testing.T) {
// This covers the two custom-source formats we care about:
// a synthetic source-only URL when there is no site URL,
// and a tagged URL when the provider returns its own link.
formattedNil := formatSiteUrl("demo", nil)
require.NotNil(t, formattedNil)
require.Equal(t, "ext_custom_source_demo", *formattedNil)
customURL := "https://example.com/item"
formattedCustom := formatSiteUrl("demo", &customURL)
require.NotNil(t, formattedCustom)
require.Equal(t, "ext_custom_source_demo|END|https://example.com/item", *formattedCustom)
extID, ok := GetCustomSourceExtensionIdFromSiteUrl(formattedCustom)
require.True(t, ok)
require.Equal(t, "demo", extID)
})
t.Run("keeps AniList urls untouched", func(t *testing.T) {
// AniList URLs are intentionally left alone so downstream code can still treat them as native AniList media.
aniListURL := "https://anilist.co/anime/1"
formatted := formatSiteUrl("demo", &aniListURL)
require.Same(t, &aniListURL, formatted)
extID, ok := GetCustomSourceExtensionIdFromSiteUrl(formatted)
require.False(t, ok)
require.Empty(t, extID)
})
}
// Verifies that normalization rewrites provider media into the runtime shape
// the rest of the app expects, including IDs, tagged URLs, and title fallbacks.
func TestNormalizeMedia(t *testing.T) {
t.Run("normalizes anime ids urls and title fallback", func(t *testing.T) {
// Normalization rewrites both the ID and the site URL so the rest of the app can tell this apart from AniList media.
anime := &anilist.BaseAnime{
ID: 25,
SiteURL: new("https://example.com/anime/25"),
Title: &anilist.BaseAnime_Title{
English: new("Fresh Anime"),
},
}
NormalizeMedia(17, "demo", anime)
require.Equal(t, GenerateMediaId(17, 25), anime.ID)
require.NotNil(t, anime.SiteURL)
require.Equal(t, "ext_custom_source_demo|END|https://example.com/anime/25", *anime.SiteURL)
require.NotNil(t, anime.Title)
require.Equal(t, "Fresh Anime", *anime.Title.UserPreferred)
_, ok := GetCustomSourceExtensionIdFromSiteUrl(anime.SiteURL)
require.True(t, ok)
})
t.Run("fills missing manga title", func(t *testing.T) {
// Providers are allowed to omit titles, but the app expects something printable.
manga := &anilist.BaseManga{ID: 30}
NormalizeMedia(21, "reader", manga)
require.Equal(t, GenerateMediaId(21, 30), manga.ID)
require.NotNil(t, manga.SiteURL)
require.Equal(t, "ext_custom_source_reader", *manga.SiteURL)
require.NotNil(t, manga.Title)
require.Equal(t, "???", *manga.Title.UserPreferred)
require.Equal(t, "???", *manga.Title.English)
})
}
// Verifies that the manager can resolve a provider from all supported entry points:
// direct media ID lookup, BaseAnime lookup, and the missing-provider path.
func TestManagerProviderResolution(t *testing.T) {
provider := &fakeCustomSourceProvider{extensionIdentifier: 7}
manager := newCustomSourceTestManager(t, customSourceTestExtension{
id: "demo-ext",
identifier: 7,
provider: provider,
})
customID := GenerateMediaId(7, 55)
ext, localID, isCustom, exists := manager.GetProviderFromId(customID)
require.True(t, isCustom)
require.True(t, exists)
require.NotNil(t, ext)
require.Equal(t, 55, localID)
require.Equal(t, "demo-ext", ext.GetID())
missingExt, missingLocalID, missingIsCustom, missingExists := manager.GetProviderFromId(GenerateMediaId(9, 11))
require.True(t, missingIsCustom)
require.False(t, missingExists)
require.Nil(t, missingExt)
require.Zero(t, missingLocalID)
baseAnimeExt, animeLocalID, animeIsCustom, animeExists := manager.GetProviderFromBaseAnime(&anilist.BaseAnime{ID: customID})
require.True(t, animeIsCustom)
require.True(t, animeExists)
require.NotNil(t, baseAnimeExt)
require.Equal(t, 55, animeLocalID)
baseMangaExt, mangaLocalID, mangaIsCustom, mangaExists := manager.GetProviderFromBaseManga(&anilist.BaseManga{ID: 123})
require.False(t, mangaIsCustom)
require.False(t, mangaExists)
require.Nil(t, baseMangaExt)
require.Zero(t, mangaLocalID)
}
// Verifies that stored anime entries are refreshed against the live provider on read
// and that entries for extensions that are no longer loaded are filtered out.
func TestGetCustomSourceAnimeEntriesRefreshesMedia(t *testing.T) {
provider := &fakeCustomSourceProvider{
extensionIdentifier: 13,
animeByID: map[int]*anilist.BaseAnime{
101: newBaseAnime(101, "Fresh Title", "https://example.com/fresh"),
},
}
manager := newCustomSourceTestManager(t, customSourceTestExtension{
id: "anime-ext",
identifier: 13,
provider: provider,
})
require.NoError(t, manager.SaveCustomSourceAnimeEntries("anime-ext", map[int]*anilist.AnimeListEntry{
101: {
ID: 101,
Status: new(anilist.MediaListStatusCurrent),
Media: newBaseAnime(101, "Stale Title", "https://example.com/stale"),
},
}))
require.NoError(t, manager.SaveCustomSourceAnimeEntries("missing-ext", map[int]*anilist.AnimeListEntry{
9: {ID: 9, Media: newBaseAnime(9, "Ghost", "https://example.com/ghost")},
}))
// Stored entries are refreshed from the extension on read so stale media data does not leak back into the collection.
entries, ok := manager.GetCustomSourceAnimeEntries()
require.True(t, ok)
require.Contains(t, entries, "anime-ext")
require.NotContains(t, entries, "missing-ext")
require.Equal(t, "Fresh Title", *entries["anime-ext"][101].Media.Title.English)
require.Equal(t, "https://example.com/fresh", *entries["anime-ext"][101].Media.SiteURL)
}
// Verifies the main anime mutation lifecycle end to end:
// create an entry, update progress, auto-complete at the total count, update repeat, then delete it.
func TestUpdateEntryAnimeLifecycle(t *testing.T) {
provider := &fakeCustomSourceProvider{
extensionIdentifier: 3,
animeByID: map[int]*anilist.BaseAnime{
77: newBaseAnime(77, "Tracked Anime", "https://example.com/anime/77"),
},
}
manager := newCustomSourceTestManager(t, customSourceTestExtension{
id: "tracker",
identifier: 3,
provider: provider,
})
mediaID := GenerateMediaId(3, 77)
status := anilist.MediaListStatusPlanning
score := 84
progress := 6
startedAt := &anilist.FuzzyDateInput{Year: new(2024), Month: new(2), Day: new(10)}
completedAt := &anilist.FuzzyDateInput{Year: new(2024), Month: new(3), Day: new(1)}
// This walks the main mutation flow: create the entry, advance progress, bump repeat count, then remove it.
require.NoError(t, manager.UpdateEntry(context.Background(), mediaID, &status, &score, &progress, startedAt, completedAt))
entries, ok := manager.GetCustomSourceAnimeEntries()
require.True(t, ok)
entry, found := entries["tracker"][77]
require.True(t, found)
require.Equal(t, status, *entry.Status)
require.Equal(t, 84.0, *entry.Score)
require.Equal(t, 6, *entry.Progress)
require.Equal(t, 2024, *entry.StartedAt.Year)
require.Equal(t, 1, *entry.CompletedAt.Day)
require.NoError(t, manager.UpdateEntryProgress(context.Background(), mediaID, 12, new(12)))
entries, ok = manager.GetCustomSourceAnimeEntries()
require.True(t, ok)
entry = entries["tracker"][77]
require.Equal(t, anilist.MediaListStatusCompleted, *entry.Status)
require.Equal(t, 12, *entry.Progress)
require.NoError(t, manager.UpdateEntryRepeat(context.Background(), mediaID, 2))
entries, ok = manager.GetCustomSourceAnimeEntries()
require.True(t, ok)
entry = entries["tracker"][77]
require.Equal(t, 2, *entry.Repeat)
require.NoError(t, manager.DeleteEntry(context.Background(), mediaID, 0))
entries, ok = manager.GetCustomSourceAnimeEntries()
require.True(t, ok)
_, found = entries["tracker"]
require.False(t, found)
}
// Verifies the type-detection fallback inside UpdateEntry.
// If anime lookup does not find anything, the manager should create a manga entry instead of failing.
func TestUpdateEntryCreatesMangaEntryWhenAnimeLookupMisses(t *testing.T) {
provider := &fakeCustomSourceProvider{
extensionIdentifier: 5,
mangaByID: map[int]*anilist.BaseManga{
88: newBaseManga(88, "Tracked Manga", "https://example.com/manga/88"),
},
}
manager := newCustomSourceTestManager(t, customSourceTestExtension{
id: "reader",
identifier: 5,
provider: provider,
})
mediaID := GenerateMediaId(5, 88)
status := anilist.MediaListStatusCurrent
progress := 14
// UpdateEntry tries anime first, then falls back to manga when the anime lookup does not return anything.
require.NoError(t, manager.UpdateEntry(context.Background(), mediaID, &status, nil, &progress, nil, nil))
entries, ok := manager.GetCustomSourceMangaCollection()
require.True(t, ok)
entry, found := entries["reader"][88]
require.True(t, found)
require.Equal(t, status, *entry.Status)
require.Equal(t, 14, *entry.Progress)
require.Equal(t, "Tracked Manga", *entry.Media.Title.English)
}
// Verifies that custom source anime entries are merged into an existing AniList collection
// under the right status list and with normalized runtime media data.
func TestMergeAnimeEntries(t *testing.T) {
provider := &fakeCustomSourceProvider{
extensionIdentifier: 11,
animeByID: map[int]*anilist.BaseAnime{
41: newBaseAnime(41, "Merged Anime", "https://example.com/merged"),
},
}
manager := newCustomSourceTestManager(t, customSourceTestExtension{
id: "merge-ext",
identifier: 11,
provider: provider,
})
require.NoError(t, manager.SaveCustomSourceAnimeEntries("merge-ext", map[int]*anilist.AnimeListEntry{
41: {
ID: 41,
Status: new(anilist.MediaListStatusCurrent),
Progress: new(4),
Media: newBaseAnime(41, "Stored Anime", "https://example.com/stored"),
},
}))
collection := &anilist.AnimeCollection{
MediaListCollection: &anilist.AnimeCollection_MediaListCollection{
Lists: []*anilist.AnimeCollection_MediaListCollection_Lists{{
Status: new(anilist.MediaListStatusPlanning),
Entries: []*anilist.AnimeCollection_MediaListCollection_Lists_Entries{},
}},
},
}
// Merge keeps the existing AniList lists and appends a generated list for the custom source status bucket.
manager.MergeAnimeEntries(collection)
require.Len(t, collection.MediaListCollection.Lists, 2)
currentList := findAnimeListByStatus(t, collection, anilist.MediaListStatusCurrent)
require.Len(t, currentList.Entries, 1)
require.Equal(t, GenerateMediaId(11, 41), currentList.Entries[0].ID)
require.Equal(t, GenerateMediaId(11, 41), currentList.Entries[0].Media.ID)
require.Equal(t, "ext_custom_source_merge-ext|END|https://example.com/merged", *currentList.Entries[0].Media.SiteURL)
require.Equal(t, "Merged Anime", *currentList.Entries[0].Media.Title.UserPreferred)
}
type customSourceTestExtension struct {
id string
identifier int
provider *fakeCustomSourceProvider
}
func newCustomSourceTestManager(t *testing.T, exts ...customSourceTestExtension) *Manager {
t.Helper()
env := testutil.NewTestEnv(t)
bank := extension.NewUnifiedBank()
// The real manager listens to the unified extension bank, so the test harness builds the same wiring with a temp DB.
for _, spec := range exts {
ext := extension.NewCustomSourceExtension(&extension.Extension{
ID: spec.id,
Name: spec.id,
Version: "1.0.0",
ManifestURI: "builtin",
Language: extension.LanguageGo,
Type: extension.TypeCustomSource,
}, spec.provider)
ext.SetExtensionIdentifier(spec.identifier)
bank.Set(spec.id, ext)
}
manager := NewManager(util.NewRef(bank), env.NewDatabase(""), env.Logger())
t.Cleanup(manager.Close)
return manager
}
func findAnimeListByStatus(t *testing.T, collection *anilist.AnimeCollection, status anilist.MediaListStatus) *anilist.AnimeCollection_MediaListCollection_Lists {
t.Helper()
for _, list := range collection.MediaListCollection.Lists {
if list.Status != nil && *list.Status == status {
return list
}
}
t.Fatalf("anime list with status %s not found", status)
return nil
}
func newBaseAnime(id int, title string, siteURL string) *anilist.BaseAnime {
return &anilist.BaseAnime{
ID: id,
SiteURL: new(siteURL),
Title: &anilist.BaseAnime_Title{
English: new(title),
},
}
}
func newBaseManga(id int, title string, siteURL string) *anilist.BaseManga {
return &anilist.BaseManga{
ID: id,
SiteURL: new(siteURL),
Title: &anilist.BaseManga_Title{
English: new(title),
},
}
}
type fakeCustomSourceProvider struct {
extensionIdentifier int
animeByID map[int]*anilist.BaseAnime
mangaByID map[int]*anilist.BaseManga
animeErr error
mangaErr error
}
// The fake provider only implements the read paths these tests need; everything else can stay nil.
func (f *fakeCustomSourceProvider) GetExtensionIdentifier() int {
return f.extensionIdentifier
}
func (f *fakeCustomSourceProvider) GetSettings() hibikecustomsource.Settings {
return hibikecustomsource.Settings{
SupportsAnime: true,
SupportsManga: true,
}
}
func (f *fakeCustomSourceProvider) GetAnime(_ context.Context, ids []int) ([]*anilist.BaseAnime, error) {
if f.animeErr != nil {
return nil, f.animeErr
}
ret := make([]*anilist.BaseAnime, 0, len(ids))
for _, id := range ids {
if media, ok := f.animeByID[id]; ok {
ret = append(ret, media)
}
}
return ret, nil
}
func (f *fakeCustomSourceProvider) ListAnime(_ context.Context, _ string, _ int, _ int) (*hibikecustomsource.ListAnimeResponse, error) {
return nil, nil
}
func (f *fakeCustomSourceProvider) GetAnimeWithRelations(_ context.Context, _ int) (*anilist.CompleteAnime, error) {
return nil, nil
}
func (f *fakeCustomSourceProvider) GetAnimeMetadata(_ context.Context, _ int) (*metadata.AnimeMetadata, error) {
return nil, nil
}
func (f *fakeCustomSourceProvider) GetAnimeDetails(_ context.Context, _ int) (*anilist.AnimeDetailsById_Media, error) {
return nil, nil
}
func (f *fakeCustomSourceProvider) GetManga(_ context.Context, ids []int) ([]*anilist.BaseManga, error) {
if f.mangaErr != nil {
return nil, f.mangaErr
}
ret := make([]*anilist.BaseManga, 0, len(ids))
for _, id := range ids {
if media, ok := f.mangaByID[id]; ok {
ret = append(ret, media)
}
}
return ret, nil
}
func (f *fakeCustomSourceProvider) ListManga(_ context.Context, _ string, _ int, _ int) (*hibikecustomsource.ListMangaResponse, error) {
return nil, nil
}
func (f *fakeCustomSourceProvider) GetMangaDetails(_ context.Context, _ int) (*anilist.MangaDetailsById_Media, error) {
return nil, nil
}

View File

@@ -179,7 +179,7 @@ func (a *AllDebrid) doQuery(method, endpoint string, body io.Reader, contentType
if err != nil {
return nil, err
}
a.logger.Debug().Str("method", method).Str("url", u.String()).Msg("alldebrid: doQuery")
req.Header.Set("Authorization", "Bearer "+apiKey)
@@ -234,22 +234,34 @@ func (a *AllDebrid) doQueryCtx(ctx context.Context, method, endpoint string, bod
func (a *AllDebrid) AddTorrent(opts debrid.AddTorrentOptions) (string, error) {
a.logger.Debug().Msgf("alldebrid: AddTorrent called with: %s", opts.MagnetLink)
if opts.InfoHash != "" {
torrents, err := a.GetTorrents()
if err == nil {
for _, torrent := range torrents {
if strings.EqualFold(torrent.Hash, opts.InfoHash) {
a.logger.Debug().Str("torrentId", torrent.ID).Msg("alldebrid: Torrent already added")
return torrent.ID, nil
}
}
}
}
if strings.HasPrefix(opts.MagnetLink, "http") {
a.logger.Debug().Msg("alldebrid: detected http link, using addTorrentFile")
return a.addTorrentFile(opts.MagnetLink)
}
// Endpoint: /magnet/upload
var body bytes.Buffer
writer := multipart.NewWriter(&body)
err := writer.WriteField("magnets[]", opts.MagnetLink)
if err != nil {
return "", err
}
writer.Close()
resp, err := a.doQuery("POST", "/magnet/upload", &body, writer.FormDataContentType())
if err != nil {
a.logger.Error().Err(err).Msgf("alldebrid: AddTorrent failed. URL: %s/magnet/upload", a.baseUrl)
@@ -261,7 +273,7 @@ func (a *AllDebrid) AddTorrent(opts debrid.AddTorrentOptions) (string, error) {
if err := json.Unmarshal(b, &data); err != nil {
return "", err
}
if len(data.Magnets) == 0 {
return "", fmt.Errorf("no magnet added")
}
@@ -269,7 +281,7 @@ func (a *AllDebrid) AddTorrent(opts debrid.AddTorrentOptions) (string, error) {
if data.Magnets[0].Error != nil {
return "", fmt.Errorf("api error: %s", data.Magnets[0].Error.Message)
}
return strconv.FormatInt(data.Magnets[0].ID, 10), nil
}
@@ -319,7 +331,7 @@ func (a *AllDebrid) addTorrentFile(urlStr string) (string, error) {
// Prepare upload
var body bytes.Buffer
writer := multipart.NewWriter(&body)
part, err := writer.CreateFormFile("files[]", "torrent.torrent")
if err != nil {
return "", err
@@ -353,12 +365,12 @@ func (a *AllDebrid) addTorrentFile(urlStr string) (string, error) {
}
func (a *AllDebrid) GetTorrentStreamUrl(ctx context.Context, opts debrid.StreamTorrentOptions, itemCh chan debrid.TorrentItem) (streamUrl string, err error) {
doneCh := make(chan struct{})
go func(ctx context.Context) {
defer close(doneCh)
for {
select {
case <-ctx.Done():
@@ -372,9 +384,9 @@ func (a *AllDebrid) GetTorrentStreamUrl(ctx context.Context, opts debrid.StreamT
a.logger.Error().Err(sErr).Msg("alldebrid: Failed to get torrent status")
continue // Retry
}
itemCh <- *tInfo
if tInfo.IsReady {
// Get download link
// We need to find the link that matches the file selected
@@ -382,24 +394,24 @@ func (a *AllDebrid) GetTorrentStreamUrl(ctx context.Context, opts debrid.StreamT
// AllDebrid links are usually just a list.
// We need 'GetTorrentInfo' which returns files list and match?
// Or 'GetTorrent' logic.
// Let's call GetTorrentDownloadUrl
url, dErr := a.GetTorrentDownloadUrl(debrid.DownloadTorrentOptions{
ID: opts.ID,
ID: opts.ID,
FileId: opts.FileId,
})
if dErr != nil {
a.logger.Error().Err(dErr).Msg("alldebrid: failed to get download url")
return
}
streamUrl = url
return
}
}
}
}(ctx)
<-doneCh
return
}
@@ -455,11 +467,11 @@ func (a *AllDebrid) GetTorrentDownloadUrl(opts debrid.DownloadTorrentOptions) (s
if err != nil {
return "", err
}
if len(filesResp.Magnets) == 0 {
return "", fmt.Errorf("magnet not found")
}
info := filesResp.Magnets[0]
if info.Error != nil {
return "", fmt.Errorf("api error: %s", info.Error.Message)
@@ -467,7 +479,7 @@ func (a *AllDebrid) GetTorrentDownloadUrl(opts debrid.DownloadTorrentOptions) (s
// Flatten the hierarchical file tree
flatFiles := flattenFileTree(info.Files, "")
if len(flatFiles) == 0 {
return "", fmt.Errorf("no files found in torrent")
}
@@ -478,11 +490,11 @@ func (a *AllDebrid) GetTorrentDownloadUrl(opts debrid.DownloadTorrentOptions) (s
if err != nil {
return "", fmt.Errorf("invalid file id: %s", opts.FileId)
}
if idx < 0 || idx >= len(flatFiles) {
return "", fmt.Errorf("file index out of range")
}
// Unlock/Unrestrict the link
return a.unlockLink(flatFiles[idx].Link)
}
@@ -495,7 +507,7 @@ func (a *AllDebrid) GetTorrentDownloadUrl(opts debrid.DownloadTorrentOptions) (s
a.logger.Error().Err(err).Str("fileName", file.Name).Msg("alldebrid: Failed to unlock link for file")
continue
}
downloadUrls = append(downloadUrls, unlockedUrl)
}
@@ -506,8 +518,6 @@ func (a *AllDebrid) GetTorrentDownloadUrl(opts debrid.DownloadTorrentOptions) (s
return strings.Join(downloadUrls, ","), nil
}
func (a *AllDebrid) GetInstantAvailability(hashes []string) map[string]debrid.TorrentItemInstantAvailability {
// AllDebrid does not have a dedicated instant availability endpoint that checks for cached torrents without adding them.
// We return an empty map to indicate no instant availability check is performed.
@@ -524,35 +534,35 @@ func (a *AllDebrid) GetTorrent(id string) (*debrid.TorrentItem, error) {
func (a *AllDebrid) GetTorrentInfo(opts debrid.GetTorrentInfoOptions) (*debrid.TorrentInfo, error) {
// Similar to RealDebrid approach: Add -> Get Info -> Delete
if opts.MagnetLink == "" {
return nil, fmt.Errorf("magnet link required")
}
id, err := a.AddTorrent(debrid.AddTorrentOptions{MagnetLink: opts.MagnetLink})
if err != nil {
return nil, fmt.Errorf("failed to add torrent for info: %w", err)
}
// Fetch info
status, err := a.getTorrent(id)
if err != nil {
a.DeleteTorrent(id)
return nil, err
}
// Get files to list them
filesResp, err := a.getTorrentFiles(id)
if err != nil {
a.DeleteTorrent(id)
return nil, err
}
if len(filesResp.Magnets) == 0 {
a.DeleteTorrent(id)
return nil, fmt.Errorf("magnet files not found")
}
filesInfo := filesResp.Magnets[0]
// Create info
@@ -562,14 +572,14 @@ func (a *AllDebrid) GetTorrentInfo(opts debrid.GetTorrentInfoOptions) (*debrid.T
Hash: status.Hash,
Size: status.Size,
}
if filesInfo.Files != nil {
for i, l := range filesInfo.Files {
ret.Files = append(ret.Files, &debrid.TorrentItemFile{
ID: strconv.Itoa(i),
Index: i,
Name: l.Name,
Path: l.Name,
Path: l.Name,
Size: l.Size,
})
}
@@ -577,19 +587,19 @@ func (a *AllDebrid) GetTorrentInfo(opts debrid.GetTorrentInfoOptions) (*debrid.T
// Delete
a.DeleteTorrent(id)
return ret, nil
}
func (a *AllDebrid) GetTorrents() ([]*debrid.TorrentItem, error) {
endpoint := "/magnet/status"
// v4.1 API requires POST, not GET
resp, err := a.doQuery("POST", endpoint, nil, "")
if err != nil {
return nil, err
}
var data GetTorrentsResponse
b, _ := json.Marshal(resp.Data)
json.Unmarshal(b, &data)
@@ -598,7 +608,7 @@ func (a *AllDebrid) GetTorrents() ([]*debrid.TorrentItem, error) {
for _, m := range data.Magnets {
ret = append(ret, toDebridTorrent(&m))
}
// Sort by ID desc
slices.SortFunc(ret, func(i, j *debrid.TorrentItem) int {
return strings.Compare(j.ID, i.ID)
@@ -629,48 +639,48 @@ func (a *AllDebrid) DeleteTorrent(id string) error {
func (a *AllDebrid) getTorrent(id string) (*Torrent, error) {
endpoint := "/magnet/status"
var body io.Reader
var contentType string
if id != "" {
// v4.1 API requires POST with form data, not GET with query params
var formBody bytes.Buffer
writer := multipart.NewWriter(&formBody)
writer.WriteField("id", id)
writer.Close()
body = &formBody
contentType = writer.FormDataContentType()
}
resp, err := a.doQuery("POST", endpoint, body, contentType)
if err != nil {
return nil, err
}
if id != "" {
var data GetTorrentResponse
b, _ := json.Marshal(resp.Data)
json.Unmarshal(b, &data)
if data.Magnets.ID == 0 {
a.logger.Error().Any("data", data).Msg("alldebrid: getTorrent - magnet not found in response")
return nil, fmt.Errorf("magnet not found")
}
return &data.Magnets, nil
}
// This branch should mostly not be used by this helper as it's typically called with ID
// But if it is...
var data GetTorrentsResponse
b, _ := json.Marshal(resp.Data)
json.Unmarshal(b, &data)
if len(data.Magnets) == 0 {
return nil, fmt.Errorf("magnet not found")
}
return &data.Magnets[0], nil
}
@@ -687,13 +697,13 @@ func (a *AllDebrid) getTorrentFiles(id string) (*GetTorrentFilesResponse, error)
if err != nil {
return nil, err
}
var data GetTorrentFilesResponse
b, _ := json.Marshal(resp.Data)
if err := json.Unmarshal(b, &data); err != nil {
return nil, err
}
return &data, nil
}
@@ -710,23 +720,23 @@ func (a *AllDebrid) unlockLink(link string) (string, error) {
if err != nil {
return "", err
}
var data UnrestrictLinkResponse
b, _ := json.Marshal(resp.Data)
json.Unmarshal(b, &data)
return data.Link, nil
}
func toDebridTorrent(t *Torrent) *debrid.TorrentItem {
status := toDebridTorrentStatus(t)
// Convert Unix timestamp to RFC3339 format
addedAt := ""
if t.UploadDate > 0 {
addedAt = time.Unix(t.UploadDate, 0).Format(time.RFC3339)
}
// Calculate completion percentage
completionPercentage := 0
if t.Size > 0 && t.Downloaded > 0 {
@@ -742,7 +752,7 @@ func toDebridTorrent(t *Torrent) *debrid.TorrentItem {
Size: t.Size,
FormattedSize: util.Bytes(uint64(t.Size)),
CompletionPercentage: completionPercentage,
ETA: "",
ETA: "",
Status: status,
AddedAt: addedAt,
Speed: util.ToHumanReadableSpeed(int(t.DownloadSpeed)),
@@ -767,4 +777,4 @@ func toDebridTorrentStatus(t *Torrent) debrid.TorrentItemStatus {
}
return debrid.TorrentItemStatusOther
}
}
}

View File

@@ -1,105 +0,0 @@
package alldebrid
import (
"encoding/json"
"net/http"
"net/http/httptest"
"os"
"testing"
"github.com/rs/zerolog"
"github.com/samber/mo"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestAllDebrid_Authenticate(t *testing.T) {
// Mock server
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
assert.Equal(t, "/user", r.URL.Path)
assert.Equal(t, "Bearer test-api-key", r.Header.Get("Authorization"))
w.WriteHeader(http.StatusOK)
json.NewEncoder(w).Encode(Response{
Status: "success",
Data: map[string]interface{}{
"user": map[string]string{"username": "testuser"},
},
})
}))
defer server.Close()
logger := zerolog.Nop()
ad := NewAllDebrid(&logger).(*AllDebrid)
ad.baseUrl = server.URL // Override base URL for testing
err := ad.Authenticate("test-api-key")
assert.NoError(t, err)
}
func TestAllDebrid_GetTorrents(t *testing.T) {
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
assert.Equal(t, "/../v4.1/magnet/status", r.URL.Path)
assert.Equal(t, "Bearer key", r.Header.Get("Authorization"))
w.WriteHeader(http.StatusOK)
json.NewEncoder(w).Encode(Response{
Status: "success",
Data: map[string]interface{}{
"magnets": []map[string]interface{}{
{
"id": 123,
"filename": "test.mkv",
"size": 1000,
"hash": "hash123",
"status": "Ready",
"statusCode": 4,
"downloaded": 1000,
"uploaded": 0,
"seeders": 10,
"downloadSpeed": 0,
"uploadSpeed": 0,
"links": []interface{}{},
},
},
},
})
}))
defer server.Close()
logger := zerolog.Nop()
ad := NewAllDebrid(&logger).(*AllDebrid)
ad.baseUrl = server.URL
ad.apiKey = mo.Some("key")
torrents, err := ad.GetTorrents()
assert.NoError(t, err)
assert.Len(t, torrents, 1)
assert.Equal(t, "123", torrents[0].ID)
assert.Equal(t, "test.mkv", torrents[0].Name)
assert.True(t, torrents[0].IsReady)
}
func TestAllDebrid_Integration(t *testing.T) {
apiKey := os.Getenv("ALLDEBRID_API_KEY")
if apiKey == "" {
t.Skip("ALLDEBRID_API_KEY not set")
}
logger := zerolog.Nop()
ad := NewAllDebrid(&logger).(*AllDebrid)
// Test Authenticate
err := ad.Authenticate(apiKey)
require.NoError(t, err)
// Test GetTorrents
torrents, err := ad.GetTorrents()
require.NoError(t, err)
t.Logf("Found %d torrents", len(torrents))
// Optional: Add a magnet and check info
// magnet := "magnet:?xt=urn:btih:..."
// id, err := ad.AddTorrent(debrid.AddTorrentOptions{MagnetLink: magnet})
// ...
}

View File

@@ -1,11 +0,0 @@
package debrid_client
import (
"seanime/internal/test_utils"
"testing"
)
func TestDownload(t *testing.T) {
test_utils.InitTestProvider(t)
}

View File

@@ -110,13 +110,6 @@ func (r *Repository) findBestTorrentFromManualSelection(provider debrid.Provider
r.logger.Debug().Msgf("debridstream: Analyzing torrent from %s for %s", t.Link, media.GetTitleSafe())
// Get the torrent's provider extension
providerExtension, ok := r.torrentRepository.GetAnimeProviderExtension(t.Provider)
if !ok {
r.logger.Error().Str("provider", t.Provider).Msg("debridstream: provider extension not found")
return nil, fmt.Errorf("provider extension not found")
}
// Check if the torrent is cached
if t.InfoHash != "" {
instantAvail := provider.GetInstantAvailability([]string{t.InfoHash})
@@ -127,7 +120,7 @@ func (r *Repository) findBestTorrentFromManualSelection(provider debrid.Provider
}
// Get the magnet link
magnet, err := providerExtension.GetProvider().GetTorrentMagnetLink(t)
magnet, err := r.torrentRepository.ResolveMagnetLink(t)
if err != nil {
r.logger.Error().Err(err).Msgf("debridstream: Error scraping magnet link for %s", t.Link)
return nil, fmt.Errorf("could not get magnet link from %s", t.Link)

View File

@@ -1,50 +0,0 @@
package debrid_client
import (
"seanime/internal/api/anilist"
"seanime/internal/api/metadata_provider"
"seanime/internal/continuity"
"seanime/internal/database/db"
"seanime/internal/events"
"seanime/internal/extension"
"seanime/internal/library/playbackmanager"
"seanime/internal/platforms/anilist_platform"
"seanime/internal/util"
"testing"
)
func GetMockRepository(t *testing.T, db *db.Database) *Repository {
logger := util.NewLogger()
wsEventManager := events.NewWSEventManager(logger)
anilistClient := anilist.TestGetMockAnilistClient()
anilistClientRef := util.NewRef(anilistClient)
extensionBankRef := util.NewRef(extension.NewUnifiedBank())
platform := anilist_platform.NewAnilistPlatform(anilistClientRef, extensionBankRef, logger, db)
metadataProvider := metadata_provider.GetFakeProvider(t, db)
platformRef := util.NewRef(platform)
metadataProviderRef := util.NewRef(metadataProvider)
playbackManager := playbackmanager.New(&playbackmanager.NewPlaybackManagerOptions{
WSEventManager: wsEventManager,
Logger: logger,
PlatformRef: platformRef,
MetadataProviderRef: metadataProviderRef,
Database: db,
RefreshAnimeCollectionFunc: func() {
// Do nothing
},
DiscordPresence: nil,
IsOfflineRef: util.NewRef(false),
ContinuityManager: continuity.GetMockManager(t, db),
})
r := NewRepository(&NewRepositoryOptions{
Logger: logger,
WSEventManager: wsEventManager,
Database: db,
MetadataProviderRef: metadataProviderRef,
PlatformRef: platformRef,
PlaybackManager: playbackManager,
})
return r
}

View File

@@ -117,7 +117,9 @@ func (s *StreamManager) startStream(ctx context.Context, opts *StartStreamOption
//}()
if opts.PlaybackType == PlaybackTypeNativePlayer {
s.repository.directStreamManager.PrepareNewStream(opts.ClientId, "Selecting torrent...")
s.repository.directStreamManager.BeginOpen(opts.ClientId, "Selecting torrent...", func() {
s.repository.CancelStream(&CancelStreamOptions{RemoveTorrent: true})
})
}
//
@@ -128,6 +130,10 @@ func (s *StreamManager) startStream(ctx context.Context, opts *StartStreamOption
s.repository.wsEventManager.SendEvent(events.HideIndefiniteLoader, "debridstream")
return err
}
if opts.PlaybackType == PlaybackTypeNativePlayer && !s.repository.directStreamManager.IsOpenActive(opts.ClientId) {
s.repository.wsEventManager.SendEvent(events.HideIndefiniteLoader, "debridstream")
return nil
}
episodeNumber := opts.EpisodeNumber
aniDbEpisode := strconv.Itoa(episodeNumber)
@@ -205,6 +211,10 @@ func (s *StreamManager) startStream(ctx context.Context, opts *StartStreamOption
s.repository.wsEventManager.SendEvent(events.HideIndefiniteLoader, "debridstream")
return fmt.Errorf("debridstream: Failed to start stream, no torrent provided")
}
if opts.PlaybackType == PlaybackTypeNativePlayer && !s.repository.directStreamManager.IsOpenActive(opts.ClientId) {
s.repository.wsEventManager.SendEvent(events.HideIndefiniteLoader, "debridstream")
return nil
}
s.repository.wsEventManager.SendEvent(events.DebridStreamState, StreamState{
Status: StreamStatusDownloading,
@@ -271,7 +281,9 @@ func (s *StreamManager) startStream(ctx context.Context, opts *StartStreamOption
go func() {
for item := range itemCh {
if opts.PlaybackType == PlaybackTypeNativePlayer {
s.repository.directStreamManager.PrepareNewStream(opts.ClientId, fmt.Sprintf("Awaiting stream: %d%%", item.CompletionPercentage))
if !s.repository.directStreamManager.UpdateOpenStep(opts.ClientId, fmt.Sprintf("Awaiting stream: %d%%", item.CompletionPercentage)) {
return
}
}
s.repository.wsEventManager.SendEvent(events.DebridStreamState, StreamState{
@@ -298,6 +310,10 @@ func (s *StreamManager) startStream(ctx context.Context, opts *StartStreamOption
ready()
return
}
if opts.PlaybackType == PlaybackTypeNativePlayer && !s.repository.directStreamManager.IsOpenActive(opts.ClientId) {
ready()
return
}
if err != nil {
s.repository.logger.Err(err).Msg("debridstream: Failed to get stream URL")
@@ -510,6 +526,9 @@ func (s *StreamManager) startStream(ctx context.Context, opts *StartStreamOption
Message: "External player link sent",
})
case PlaybackTypeNativePlayer:
if !s.repository.directStreamManager.IsOpenActive(opts.ClientId) {
return
}
err := s.repository.directStreamManager.PlayDebridStream(ctx, filepath, directstream.PlayDebridStreamOptions{
StreamUrl: streamUrl,
MediaId: media.ID,
@@ -555,12 +574,16 @@ func (s *StreamManager) startStream(ctx context.Context, opts *StartStreamOption
}
func (s *StreamManager) cancelStream(opts *CancelStreamOptions) {
if s.repository.directStreamManager != nil {
s.repository.directStreamManager.CloseOpen("")
}
if s.downloadCtxCancelFunc != nil {
s.downloadCtxCancelFunc()
s.downloadCtxCancelFunc = nil
}
s.repository.wsEventManager.SendEvent(events.ShowIndefiniteLoader, "debridstream")
s.repository.wsEventManager.SendEvent(events.HideIndefiniteLoader, "debridstream")
s.currentStreamUrl = ""

View File

@@ -4,6 +4,7 @@ import (
"fmt"
"os"
"path/filepath"
"seanime/internal/testutil"
"testing"
"github.com/stretchr/testify/require"
@@ -30,6 +31,20 @@ func PrintPathStructure(path string, indent string) error {
return nil
}
func writeFixtureFile(t testing.TB, root string, fixturePath string) string {
t.Helper()
target := filepath.Join(root, testutil.FixtureRelPath(fixturePath))
if err := os.MkdirAll(filepath.Dir(target), 0755); err != nil {
t.Fatalf("failed to create directory: %v", err)
}
if err := os.WriteFile(target, []byte("dummy content"), 0644); err != nil {
t.Fatalf("failed to create file %s: %v", target, err)
}
return target
}
func TestCreateTempDir(t *testing.T) {
files := []string{
@@ -39,13 +54,7 @@ func TestCreateTempDir(t *testing.T) {
root := t.TempDir()
for _, file := range files {
path := filepath.Join(root, file)
if err := os.MkdirAll(filepath.Dir(path), 0755); err != nil {
t.Fatalf("failed to create directory: %v", err)
}
if err := os.WriteFile(path, []byte("dummy content"), 0644); err != nil {
t.Fatalf("failed to create file %s: %v", path, err)
}
writeFixtureFile(t, root, file)
}
defer os.RemoveAll(root)
@@ -172,13 +181,7 @@ func TestMoveContentsTo(t *testing.T) {
// Create the source directory structure
root := t.TempDir()
for _, file := range tt.files {
path := filepath.Join(root, file)
if err := os.MkdirAll(filepath.Dir(path), 0755); err != nil {
t.Fatalf("failed to create directory: %v", err)
}
if err := os.WriteFile(path, []byte("dummy content"), 0644); err != nil {
t.Fatalf("failed to create file %s: %v", path, err)
}
writeFixtureFile(t, root, file)
}
PrintPathStructure(root, "")

View File

@@ -289,7 +289,7 @@ func (t *RealDebrid) AddTorrent(opts debrid.AddTorrentOptions) (string, error) {
torrents, err := t.getTorrents(false)
if err == nil {
for _, torrent := range torrents {
if torrent.Hash == opts.InfoHash {
if strings.EqualFold(torrent.Hash, opts.InfoHash) {
t.logger.Debug().Str("torrentId", torrent.ID).Msg("realdebrid: Torrent already added")
torrentId = torrent.ID
break

View File

@@ -1,151 +0,0 @@
package realdebrid
import (
"fmt"
"seanime/internal/debrid/debrid"
"seanime/internal/test_utils"
"seanime/internal/util"
"strings"
"testing"
"github.com/stretchr/testify/require"
)
func TestTorBox_GetTorrents(t *testing.T) {
test_utils.InitTestProvider(t)
logger := util.NewLogger()
rd := NewRealDebrid(logger)
err := rd.Authenticate(test_utils.ConfigData.Provider.RealDebridApiKey)
require.NoError(t, err)
fmt.Println("=== All torrents ===")
torrents, err := rd.GetTorrents()
require.NoError(t, err)
util.Spew(torrents)
}
func TestTorBox_AddTorrent(t *testing.T) {
t.Skip("Skipping test that adds a torrent to RealDebrid")
test_utils.InitTestProvider(t)
// Already added
magnet := ""
logger := util.NewLogger()
rd := NewRealDebrid(logger)
err := rd.Authenticate(test_utils.ConfigData.Provider.RealDebridApiKey)
require.NoError(t, err)
torrentId, err := rd.AddTorrent(debrid.AddTorrentOptions{
MagnetLink: magnet,
InfoHash: "TODO",
})
require.NoError(t, err)
torrentId2, err := rd.AddTorrent(debrid.AddTorrentOptions{
MagnetLink: magnet,
InfoHash: "TODO",
})
require.NoError(t, err)
require.Equal(t, torrentId, torrentId2)
fmt.Println(torrentId)
}
func TestTorBox_getTorrentInfo(t *testing.T) {
test_utils.InitTestProvider(t)
logger := util.NewLogger()
rd := NewRealDebridT(logger)
err := rd.Authenticate(test_utils.ConfigData.Provider.RealDebridApiKey)
require.NoError(t, err)
ti, err := rd.getTorrentInfo("W3IWF5TX3AE6G")
require.NoError(t, err)
util.Spew(ti)
}
func TestTorBox_GetDownloadUrl(t *testing.T) {
test_utils.InitTestProvider(t)
logger := util.NewLogger()
rd := NewRealDebridT(logger)
err := rd.Authenticate(test_utils.ConfigData.Provider.RealDebridApiKey)
require.NoError(t, err)
urls, err := rd.GetTorrentDownloadUrl(debrid.DownloadTorrentOptions{
ID: "W3IWF5TX3AE6G",
FileId: "11",
})
require.NoError(t, err)
util.Spew(strings.Split(urls, ","))
}
func TestTorBox_InstantAvailability(t *testing.T) {
test_utils.InitTestProvider(t)
logger := util.NewLogger()
rd := NewRealDebridT(logger)
err := rd.Authenticate(test_utils.ConfigData.Provider.RealDebridApiKey)
require.NoError(t, err)
avail := rd.GetInstantAvailability([]string{"9f4961a9c71eeb53abce2ef2afc587b452dee5eb"})
require.NoError(t, err)
util.Spew(avail)
}
func TestTorBox_ChooseFileAndDownload(t *testing.T) {
//t.Skip("Skipping test that adds a torrent to RealDebrid")
test_utils.InitTestProvider(t)
magnet := ""
logger := util.NewLogger()
rd := NewRealDebrid(logger)
err := rd.Authenticate(test_utils.ConfigData.Provider.RealDebridApiKey)
require.NoError(t, err)
// Should add the torrent and get the torrent info
torrentInfo, err := rd.GetTorrentInfo(debrid.GetTorrentInfoOptions{
MagnetLink: magnet,
InfoHash: "TODO",
})
require.NoError(t, err)
// The torrent should have one file
require.Len(t, torrentInfo.Files, 1)
file := torrentInfo.Files[0]
// Download the file
resp, err := rd.AddTorrent(debrid.AddTorrentOptions{
MagnetLink: magnet,
InfoHash: "TODO",
SelectFileId: file.ID,
})
require.NoError(t, err)
util.Spew(resp)
}

View File

@@ -238,7 +238,7 @@ func (t *TorBox) AddTorrent(opts debrid.AddTorrentOptions) (string, error) {
torrents, err := t.getTorrents()
if err == nil {
for _, torrent := range torrents {
if torrent.Hash == opts.InfoHash {
if strings.EqualFold(torrent.Hash, opts.InfoHash) {
return strconv.Itoa(torrent.ID), nil
}
}

View File

@@ -1,68 +0,0 @@
package torbox
import (
"fmt"
"seanime/internal/debrid/debrid"
"seanime/internal/test_utils"
"seanime/internal/util"
"strconv"
"testing"
"github.com/stretchr/testify/require"
)
func TestTorBox_GetTorrents(t *testing.T) {
test_utils.InitTestProvider(t)
logger := util.NewLogger()
tb := NewTorBox(logger)
err := tb.Authenticate(test_utils.ConfigData.Provider.TorBoxApiKey)
require.NoError(t, err)
fmt.Println("=== All torrents ===")
torrents, err := tb.GetTorrents()
require.NoError(t, err)
util.Spew(torrents)
fmt.Println("=== Selecting torrent ===")
torrent, err := tb.GetTorrent(strconv.Itoa(98926))
require.NoError(t, err)
util.Spew(torrent)
fmt.Println("=== Download link ===")
downloadUrl, err := tb.GetTorrentDownloadUrl(debrid.DownloadTorrentOptions{
ID: strconv.Itoa(98926),
})
require.NoError(t, err)
fmt.Println(downloadUrl)
}
func TestTorBox_AddTorrent(t *testing.T) {
t.Skip("Skipping test that adds a torrent to TorBox")
test_utils.InitTestProvider(t)
// Already added
magnet := ""
logger := util.NewLogger()
tb := NewTorBox(logger)
err := tb.Authenticate(test_utils.ConfigData.Provider.TorBoxApiKey)
require.NoError(t, err)
torrentId, err := tb.AddTorrent(debrid.AddTorrentOptions{
MagnetLink: magnet,
})
require.NoError(t, err)
fmt.Println(torrentId)
}

View File

@@ -179,15 +179,30 @@ var videoProxyClient = &http.Client{
MaxIdleConns: 100,
MaxIdleConnsPerHost: 10,
IdleConnTimeout: 90 * time.Second,
TLSHandshakeTimeout: 10 * time.Second,
ForceAttemptHTTP2: false, // Fixes issues on Linux
},
Timeout: 60 * time.Second,
}
// Headers that should not be forwarded to the CDN
var proxyHopHeaders = map[string]bool{
"Host": true,
"Accept": true,
"Accept-Encoding": true,
"Range": true,
"Connection": true,
"Proxy-Connection": true,
"Keep-Alive": true,
"Proxy-Authenticate": true,
"Proxy-Authorization": true,
"Te": true,
"Trailer": true,
"Transfer-Encoding": true,
"Upgrade": true,
}
func (s *DebridStream) GetStreamHandler() http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
//s.logger.Trace().Str("range", r.Header.Get("Range")).Str("method", r.Method).Msg("directstream(debrid): Stream endpoint hit")
if s.streamUrl == "" {
s.logger.Error().Msg("directstream(debrid): No URL to stream")
http.Error(w, "No URL to stream", http.StatusNotFound)
@@ -217,7 +232,9 @@ func (s *DebridStream) GetStreamHandler() http.Handler {
if err != nil {
s.logger.Error().Err(err).Msg("directstream(debrid): Failed to create reader for stream url")
http.Error(w, "Failed to create reader for stream url", http.StatusInternalServerError)
return
}
defer reader.Close()
if isThumbnailRequest(r) {
ra, ok := handleRange(w, r, reader, s.filename, s.contentLength)
@@ -241,11 +258,15 @@ func (s *DebridStream) GetStreamHandler() http.Handler {
return
}
if ra.Start < s.contentLength-1024*1024 {
// subReader is closed inside the subtitle goroutine
go s.StartSubtitleStreamP(s, s.manager.playbackCtx, subReader, ra.Start, 0)
} else {
_ = subReader.Close()
}
}
req, err := http.NewRequest(http.MethodGet, s.streamUrl, nil)
// Use the client's request context so the CDN request is cancelled when the client disconnects
req, err := http.NewRequestWithContext(r.Context(), http.MethodGet, s.streamUrl, nil)
if err != nil {
http.Error(w, "Failed to create request", http.StatusInternalServerError)
return
@@ -254,8 +275,11 @@ func (s *DebridStream) GetStreamHandler() http.Handler {
req.Header.Set("Accept", "*/*")
req.Header.Set("Range", rangeHeader)
// Copy original request headers to the proxied request
// Only forward safe headers to avoid conflicts with the CDN
for key, values := range r.Header {
if proxyHopHeaders[http.CanonicalHeaderKey(key)] {
continue
}
for _, value := range values {
req.Header.Add(key, value)
}
@@ -263,11 +287,19 @@ func (s *DebridStream) GetStreamHandler() http.Handler {
resp, err := videoProxyClient.Do(req)
if err != nil {
s.logger.Error().Err(err).Str("range", rangeHeader).Msg("directstream(debrid): CDN proxy request failed")
http.Error(w, "Failed to proxy request", http.StatusInternalServerError)
return
}
defer resp.Body.Close()
// Reject non-2xx CDN responses to avoid corrupting the file cache
if resp.StatusCode >= 300 {
s.logger.Error().Int("status", resp.StatusCode).Str("range", rangeHeader).Msg("directstream(debrid): CDN returned non-2xx status")
http.Error(w, fmt.Sprintf("CDN error: %d", resp.StatusCode), resp.StatusCode)
return
}
// Copy response headers
for key, values := range resp.Header {
for _, value := range values {
@@ -278,7 +310,9 @@ func (s *DebridStream) GetStreamHandler() http.Handler {
w.Header().Set("Content-Type", s.LoadContentType()) // overwrite the type
w.WriteHeader(resp.StatusCode)
_ = s.httpStream.WriteAndFlush(resp.Body, w, ra.Start)
if err := s.httpStream.WriteAndFlush(resp.Body, w, ra.Start); err != nil {
s.logger.Warn().Err(err).Str("range", rangeHeader).Msg("directstream(debrid): WriteAndFlush error")
}
})
}
@@ -333,6 +367,8 @@ func (m *Manager) PlayDebridStream(ctx context.Context, filepath string, opts Pl
}
go func() {
m.playbackMu.Lock()
defer m.playbackMu.Unlock()
m.loadStream(stream)
}()

View File

@@ -242,6 +242,8 @@ type PlayLocalFileOptions struct {
// PlayLocalFile is used by a module to load a new torrent stream.
func (m *Manager) PlayLocalFile(ctx context.Context, opts PlayLocalFileOptions) error {
m.ResetOpenState(opts.ClientId)
m.playbackMu.Lock()
defer m.playbackMu.Unlock()

View File

@@ -50,7 +50,10 @@ type (
// ---------- Playback State ---------- //
currentStream mo.Option[Stream] // The current stream being played
currentStream mo.Option[Stream] // The current stream being played
preparingClientID string
preparationCanceled bool
preparationCancelFunc func()
// \/ Stream playback
// This is set by [SetStreamEpisodeCollection]

View File

@@ -284,6 +284,8 @@ type PlayNakamaStreamOptions struct {
// PlayNakamaStream is used by a module to load a new nakama stream.
func (m *Manager) PlayNakamaStream(ctx context.Context, opts PlayNakamaStreamOptions) error {
m.ResetOpenState(opts.ClientId)
m.playbackMu.Lock()
defer m.playbackMu.Unlock()
@@ -320,6 +322,8 @@ func (m *Manager) PlayNakamaStream(ctx context.Context, opts PlayNakamaStreamOpt
}
go func() {
m.playbackMu.Lock()
defer m.playbackMu.Unlock()
m.loadStream(stream)
}()

View File

@@ -61,17 +61,128 @@ type Stream interface {
func (m *Manager) getStreamHandler() http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
m.playbackMu.Lock()
stream, ok := m.currentStream.Get()
m.playbackMu.Unlock()
if !ok {
http.Error(w, "no stream", http.StatusInternalServerError)
return
}
playbackInfo, err := stream.LoadPlaybackInfo()
if err != nil || playbackInfo == nil {
http.Error(w, "stream is not ready", http.StatusInternalServerError)
return
}
requestStreamID := r.URL.Query().Get("id")
if requestStreamID == "" || requestStreamID != playbackInfo.ID {
http.Error(w, "stream not found", http.StatusNotFound)
return
}
stream.GetStreamHandler().ServeHTTP(w, r)
})
}
func (m *Manager) BeginOpen(clientId string, step string, onCancel func()) bool {
m.playbackMu.Lock()
defer m.playbackMu.Unlock()
m.prepareNewStream()
m.preparingClientID = clientId
m.preparationCanceled = false
m.preparationCancelFunc = onCancel
return m.updateOpenStepLocked(clientId, step)
}
func (m *Manager) UpdateOpenStep(clientId string, step string) bool {
m.playbackMu.Lock()
defer m.playbackMu.Unlock()
return m.updateOpenStepLocked(clientId, step)
}
func (m *Manager) IsOpenActive(clientId string) bool {
m.playbackMu.Lock()
defer m.playbackMu.Unlock()
if m.preparingClientID == "" {
return true
}
if clientId != "" && m.preparingClientID != clientId {
return true
}
return !m.preparationCanceled
}
func (m *Manager) CancelOpen(clientId string) bool {
m.playbackMu.Lock()
cancelFunc, ok := m.cancelPreparationLocked(clientId, true)
m.playbackMu.Unlock()
if !ok {
return false
}
if cancelFunc != nil {
cancelFunc()
}
return true
}
func (m *Manager) CloseOpen(clientId string) bool {
m.playbackMu.Lock()
if m.preparingClientID == "" {
m.playbackMu.Unlock()
return false
}
if clientId != "" && m.preparingClientID != clientId {
m.playbackMu.Unlock()
return false
}
targetClientID := m.preparingClientID
if clientId != "" {
targetClientID = clientId
}
_, _ = m.cancelPreparationLocked(targetClientID, true)
m.playbackMu.Unlock()
m.nativePlayer.AbortOpen(targetClientID, "")
return true
}
func (m *Manager) ResetOpenState(clientId string) {
m.playbackMu.Lock()
defer m.playbackMu.Unlock()
if clientId != "" && m.preparingClientID != "" && m.preparingClientID != clientId {
return
}
m.clearPreparationLocked()
}
func (m *Manager) GetCurrentPlaybackIdentity() (playbackID string, clientID string, ok bool) {
m.playbackMu.Lock()
stream, hasStream := m.currentStream.Get()
m.playbackMu.Unlock()
if !hasStream {
return "", "", false
}
playbackInfo, err := stream.LoadPlaybackInfo()
if err != nil || playbackInfo == nil {
return "", "", false
}
return playbackInfo.ID, stream.ClientId(), true
}
func (m *Manager) PrepareNewStream(clientId string, step string) {
m.prepareNewStream(clientId, step)
m.BeginOpen(clientId, step, nil)
}
func (m *Manager) StreamError(err error) {
@@ -83,10 +194,12 @@ func (m *Manager) StreamError(err error) {
}
func (m *Manager) AbortOpen(clientId string, err error) {
m.playbackMu.Lock()
defer m.playbackMu.Unlock()
m.abortPreparation(clientId, err)
}
func (m *Manager) prepareNewStream(clientId string, step string) {
func (m *Manager) prepareNewStream() {
// Cancel the previous playback
if m.playbackCtxCancelFunc != nil {
m.Logger.Trace().Msgf("directstream: Cancelling previous playback")
@@ -101,35 +214,75 @@ func (m *Manager) prepareNewStream(clientId string, step string) {
m.currentStream = mo.None[Stream]()
}
m.clearPreparationLocked()
}
func (m *Manager) updateOpenStepLocked(clientId string, step string) bool {
if m.preparingClientID == clientId && m.preparationCanceled {
m.Logger.Debug().Str("clientId", clientId).Msg("directstream: Skipping open step for cancelled preparation")
return false
}
if m.preparingClientID == "" {
m.preparingClientID = clientId
}
m.Logger.Debug().Msgf("directstream: Signaling native player that a new stream is starting")
// Signal the native player that a new stream is starting
m.nativePlayer.OpenAndAwait(clientId, step)
return true
}
func (m *Manager) abortPreparation(clientId string, err error) {
// Cancel the previous playback
if m.playbackCtxCancelFunc != nil {
m.Logger.Trace().Msgf("directstream: Cancelling previous playback")
m.playbackCtxCancelFunc()
m.playbackCtxCancelFunc = nil
}
// Clear the current stream if it exists
if stream, ok := m.currentStream.Get(); ok {
m.Logger.Debug().Msgf("directstream: Terminating previous stream before preparing new stream")
stream.Terminate()
m.currentStream = mo.None[Stream]()
}
m.prepareNewStream()
m.Logger.Debug().Msgf("directstream: Signaling native player to abort stream preparation, reason: %s", err.Error())
// Signal the native player that a new stream is starting
m.clearPreparationLocked()
m.nativePlayer.AbortOpen(clientId, err.Error())
}
func (m *Manager) clearPreparationLocked() {
m.preparingClientID = ""
m.preparationCanceled = false
m.preparationCancelFunc = nil
}
func (m *Manager) cancelPreparationLocked(clientId string, clearCancelFunc bool) (func(), bool) {
if clientId != "" && m.preparingClientID != "" && m.preparingClientID != clientId {
return nil, false
}
if m.preparingClientID == "" {
m.preparingClientID = clientId
}
if clientId != "" {
m.preparingClientID = clientId
}
m.preparationCanceled = true
cancelFunc := m.preparationCancelFunc
if clearCancelFunc {
m.preparationCancelFunc = nil
}
return cancelFunc, true
}
func (m *Manager) shouldStopOpeningLocked(clientId string) bool {
return m.preparingClientID == clientId && m.preparationCanceled
}
func (m *Manager) discardCurrentStreamLocked(stream Stream) {
if currentStream, ok := m.currentStream.Get(); ok && currentStream == stream {
m.currentStream = mo.None[Stream]()
}
}
// loadStream loads a new stream and cancels the previous one.
// Caller should use mutex to lock the manager.
func (m *Manager) loadStream(stream Stream) {
m.prepareNewStream(stream.ClientId(), "Loading stream...")
if !m.updateOpenStepLocked(stream.ClientId(), "Loading stream...") {
return
}
m.Logger.Debug().Msgf("directstream: Loading stream")
m.currentStream = mo.Some(stream)
@@ -140,7 +293,10 @@ func (m *Manager) loadStream(stream Stream) {
m.playbackCtxCancelFunc = cancel
m.Logger.Debug().Msgf("directstream: Loading content type")
m.nativePlayer.OpenAndAwait(stream.ClientId(), "Loading metadata...")
if !m.updateOpenStepLocked(stream.ClientId(), "Loading metadata...") {
m.discardCurrentStreamLocked(stream)
return
}
// Load the content type
contentType := stream.LoadContentType()
if contentType == "" {
@@ -148,6 +304,10 @@ func (m *Manager) loadStream(stream Stream) {
m.preStreamError(stream, fmt.Errorf("failed to load content type"))
return
}
if ctx.Err() != nil || m.shouldStopOpeningLocked(stream.ClientId()) {
m.discardCurrentStreamLocked(stream)
return
}
m.Logger.Debug().Msgf("directstream: Signaling native player that metadata is being loaded")
@@ -159,6 +319,10 @@ func (m *Manager) loadStream(stream Stream) {
m.preStreamError(stream, fmt.Errorf("failed to load playback info: %w", err))
return
}
if ctx.Err() != nil || m.shouldStopOpeningLocked(stream.ClientId()) {
m.discardCurrentStreamLocked(stream)
return
}
// Shut the mkv parser logger
//parser, ok := playbackInfo.MkvMetadataParser.Get()
@@ -167,6 +331,7 @@ func (m *Manager) loadStream(stream Stream) {
//}
m.Logger.Debug().Msgf("directstream: Signaling native player that stream is ready")
m.clearPreparationLocked()
m.nativePlayer.Watch(stream.ClientId(), playbackInfo)
}
@@ -179,17 +344,49 @@ func (m *Manager) listenToPlayerEvents() {
for {
select {
case event := <-m.videoCoreSubscriber.Events():
cs, ok := m.currentStream.Get()
if !ok {
continue
}
if !event.IsNativePlayer() {
continue
}
m.playbackMu.Lock()
cs, ok := m.currentStream.Get()
if !ok {
var cancelFunc func()
shouldCancel := false
if _, isTerminated := event.(*videocore.VideoTerminatedEvent); isTerminated {
cancelFunc, shouldCancel = m.cancelPreparationLocked(event.GetClientId(), true)
}
m.playbackMu.Unlock()
if shouldCancel {
if cancelFunc != nil {
cancelFunc()
}
}
continue
}
m.playbackMu.Unlock()
if event.GetClientId() != cs.ClientId() {
continue
}
if _, isTerminated := event.(*videocore.VideoTerminatedEvent); isTerminated {
m.Logger.Debug().Msgf("directstream: Video terminated")
m.playbackMu.Lock()
_, _ = m.cancelPreparationLocked(cs.ClientId(), true)
m.playbackMu.Unlock()
cs.Terminate()
continue
}
playbackInfo, err := cs.LoadPlaybackInfo()
if err != nil || playbackInfo == nil {
continue
}
if playbackInfo.ID != "" && event.GetPlaybackId() != "" && event.GetPlaybackId() != playbackInfo.ID {
continue
}
switch event := event.(type) {
case *videocore.VideoLoadedMetadataEvent:
m.Logger.Debug().Msgf("directstream: Video loaded metadata")
@@ -210,13 +407,10 @@ func (m *Manager) listenToPlayerEvents() {
}
case *videocore.VideoErrorEvent:
m.Logger.Debug().Msgf("directstream: Video error, Error: %s", event.Error)
cs.StreamError(fmt.Errorf(event.Error))
cs.StreamError(fmt.Errorf("%s", event.Error))
case *videocore.SubtitleFileUploadedEvent:
m.Logger.Debug().Msgf("directstream: Subtitle file uploaded, Filename: %s", event.Filename)
cs.OnSubtitleFileUploaded(event.Filename, event.Content)
case *videocore.VideoTerminatedEvent:
m.Logger.Debug().Msgf("directstream: Video terminated")
cs.Terminate()
case *videocore.VideoCompletedEvent:
m.Logger.Debug().Msgf("directstream: Video completed")
@@ -333,7 +527,7 @@ func (s *BaseStream) Terminate() {
// Cancel all active subtitle streams
s.activeSubtitleStreams.Range(func(_ string, s *SubtitleStream) bool {
s.cleanupFunc()
s.Stop(s.completed)
return true
})
s.activeSubtitleStreams.Clear()
@@ -395,6 +589,7 @@ func loadContentType(path string, reader ...io.ReadSeekCloser) string {
func (m *Manager) preStreamError(stream Stream, err error) {
stream.Terminate()
m.clearPreparationLocked()
m.nativePlayer.Error(stream.ClientId(), err)
m.unloadStream()
}

View File

@@ -0,0 +1,210 @@
package directstream
import (
"context"
"io"
"net/http"
"net/http/httptest"
"seanime/internal/api/anilist"
"seanime/internal/library/anime"
"testing"
"time"
"seanime/internal/events"
"seanime/internal/mkvparser"
"seanime/internal/nativeplayer"
"seanime/internal/util"
"seanime/internal/util/result"
"seanime/internal/videocore"
"github.com/samber/mo"
"github.com/stretchr/testify/require"
)
type testStream struct {
BaseStream
handler http.Handler
}
func (s *testStream) Type() nativeplayer.StreamType {
return nativeplayer.StreamTypeTorrent
}
func (s *testStream) GetStreamHandler() http.Handler {
return s.handler
}
func (s *testStream) LoadPlaybackInfo() (*nativeplayer.PlaybackInfo, error) {
return s.playbackInfo, s.playbackInfoErr
}
type trackingReadSeekCloser struct {
closed bool
}
type blockingStream struct {
clientID string
loadPlaybackCh chan struct{}
terminatedCh chan struct{}
terminated bool
}
func (s *blockingStream) Type() nativeplayer.StreamType { return nativeplayer.StreamTypeTorrent }
func (s *blockingStream) LoadContentType() string { return "video/webm" }
func (s *blockingStream) ClientId() string { return s.clientID }
func (s *blockingStream) Media() *anilist.BaseAnime { return nil }
func (s *blockingStream) Episode() *anime.Episode { return nil }
func (s *blockingStream) ListEntryData() *anime.EntryListData { return nil }
func (s *blockingStream) EpisodeCollection() *anime.EpisodeCollection { return nil }
func (s *blockingStream) LoadPlaybackInfo() (*nativeplayer.PlaybackInfo, error) {
<-s.loadPlaybackCh
return &nativeplayer.PlaybackInfo{ID: "blocked"}, nil
}
func (s *blockingStream) GetAttachmentByName(string) (*mkvparser.AttachmentInfo, bool) {
return nil, false
}
func (s *blockingStream) GetStreamHandler() http.Handler { return http.NewServeMux() }
func (s *blockingStream) StreamError(error) {}
func (s *blockingStream) Terminate() {
if s.terminated {
return
}
s.terminated = true
close(s.terminatedCh)
}
func (s *blockingStream) GetSubtitleEventCache() *result.Map[string, *mkvparser.SubtitleEvent] {
return result.NewMap[string, *mkvparser.SubtitleEvent]()
}
func (s *blockingStream) OnSubtitleFileUploaded(string, string) {}
func (r *trackingReadSeekCloser) Read(_ []byte) (int, error) {
return 0, io.EOF
}
func (r *trackingReadSeekCloser) Seek(_ int64, _ int) (int64, error) {
return 0, nil
}
func (r *trackingReadSeekCloser) Close() error {
r.closed = true
return nil
}
func TestGetStreamHandlerRejectsMismatchedPlaybackID(t *testing.T) {
called := false
stream := &testStream{
BaseStream: BaseStream{
clientId: "client-1",
playbackInfo: &nativeplayer.PlaybackInfo{
ID: "expected-playback-id",
},
},
handler: http.HandlerFunc(func(w http.ResponseWriter, _ *http.Request) {
called = true
w.WriteHeader(http.StatusNoContent)
}),
}
manager := &Manager{
currentStream: mo.Some[Stream](stream),
}
req := httptest.NewRequest(http.MethodGet, "/api/v1/directstream/stream?id=stale-playback-id", nil)
rec := httptest.NewRecorder()
manager.getStreamHandler().ServeHTTP(rec, req)
require.Equal(t, http.StatusNotFound, rec.Code)
require.False(t, called)
}
func TestGetStreamHandlerForwardsMatchingPlaybackID(t *testing.T) {
called := false
stream := &testStream{
BaseStream: BaseStream{
clientId: "client-1",
playbackInfo: &nativeplayer.PlaybackInfo{
ID: "playback-id",
},
},
handler: http.HandlerFunc(func(w http.ResponseWriter, _ *http.Request) {
called = true
w.WriteHeader(http.StatusNoContent)
}),
}
manager := &Manager{
currentStream: mo.Some[Stream](stream),
}
req := httptest.NewRequest(http.MethodGet, "/api/v1/directstream/stream?id=playback-id", nil)
rec := httptest.NewRecorder()
manager.getStreamHandler().ServeHTTP(rec, req)
require.Equal(t, http.StatusNoContent, rec.Code)
require.True(t, called)
}
func TestStartSubtitleStreamPClosesReaderWhenParserMissing(t *testing.T) {
reader := &trackingReadSeekCloser{}
stream := &BaseStream{
logger: util.NewLogger(),
playbackInfo: &nativeplayer.PlaybackInfo{
MkvMetadataParser: mo.None[*mkvparser.MetadataParser](),
},
activeSubtitleStreams: result.NewMap[string, *SubtitleStream](),
}
stream.StartSubtitleStreamP(stream, context.Background(), reader, 0, 1024)
require.True(t, reader.closed)
}
func TestListenToPlayerEventsTerminatesWithoutWaitingForPlaybackInfo(t *testing.T) {
logger := util.NewLogger()
ws := events.NewMockWSEventManager(logger)
vc := videocore.New(videocore.NewVideoCoreOptions{
WsEventManager: ws,
Logger: logger,
})
np := nativeplayer.New(nativeplayer.NewNativePlayerOptions{
WsEventManager: ws,
Logger: logger,
VideoCore: vc,
})
manager := NewManager(NewManagerOptions{
Logger: logger,
WSEventManager: ws,
NativePlayer: np,
VideoCore: vc,
})
stream := &blockingStream{
clientID: "player-client",
loadPlaybackCh: make(chan struct{}),
terminatedCh: make(chan struct{}),
}
manager.currentStream = mo.Some[Stream](stream)
t.Cleanup(func() {
close(stream.loadPlaybackCh)
vc.Shutdown()
})
ws.MockSendClientEvent(&events.WebsocketClientEvent{
ClientID: "socket-client",
Type: events.VideoCoreEventType,
Payload: videocore.ClientEvent{
ClientId: "player-client",
Type: videocore.PlayerEventVideoTerminated,
},
})
select {
case <-stream.terminatedCh:
case <-time.After(250 * time.Millisecond):
t.Fatal("expected terminate to bypass playback info loading")
}
}

View File

@@ -27,6 +27,7 @@ type SubtitleStream struct {
completed bool // ran until the EOF
cleanupFunc func()
onStop func()
stopOnce sync.Once
}
@@ -34,7 +35,12 @@ func (s *SubtitleStream) Stop(completed bool) {
s.stopOnce.Do(func() {
s.logger.Debug().Int64("offset", s.offset).Msg("directstream: Stopping subtitle stream")
s.completed = completed
s.cleanupFunc()
if s.onStop != nil {
s.onStop()
}
if s.cleanupFunc != nil {
s.cleanupFunc()
}
})
}
@@ -42,6 +48,7 @@ func (s *SubtitleStream) Stop(completed bool) {
func (s *BaseStream) StartSubtitleStreamP(stream Stream, playbackCtx context.Context, newReader io.ReadSeekCloser, offset int64, backoffBytes int64) {
mkvMetadataParser, ok := s.playbackInfo.MkvMetadataParser.Get()
if !ok {
_ = newReader.Close()
return
}
@@ -70,6 +77,7 @@ func (s *BaseStream) StartSubtitleStreamP(stream Stream, playbackCtx context.Con
if !shouldContinue {
s.logger.Debug().Int64("offset", offset).Msg("directstream: Skipping subtitle stream, range already fulfilled")
_ = newReader.Close()
return
}
@@ -77,6 +85,9 @@ func (s *BaseStream) StartSubtitleStreamP(stream Stream, playbackCtx context.Con
subtitleStream.cleanupFunc = subtitleCtxCancel
subtitleStreamId := uuid.New().String()
subtitleStream.onStop = func() {
s.activeSubtitleStreams.Delete(subtitleStreamId)
}
s.activeSubtitleStreams.Set(subtitleStreamId, subtitleStream)
subtitleCh, errCh, _ := subtitleStream.parser.ExtractSubtitles(ctx, newReader, offset, backoffBytes)
@@ -138,7 +149,7 @@ func (s *BaseStream) StartSubtitleStreamP(stream Stream, playbackCtx context.Con
}(newReader)
defer func() {
onFirstEventSent()
subtitleStream.cleanupFunc()
subtitleStream.Stop(subtitleStream.completed)
}()
// Keep track if channels are active to manage loop termination

View File

@@ -81,6 +81,7 @@ func (s *TorrentStream) LoadPlaybackInfo() (ret *nativeplayer.PlaybackInfo, err
// If the content type is an EBML content type, we can create a metadata parser
if isEbmlContent(s.LoadContentType()) {
reader := torrentutil.NewReadSeeker(s.torrent, s.file, s.logger)
defer reader.Close()
parser := mkvparser.NewMetadataParser(reader, s.logger)
metadata := parser.GetMetadata(context.Background())
if metadata.Error != nil {
@@ -150,21 +151,34 @@ func (s *TorrentStream) GetStreamHandler() http.Handler {
_ = tr.Close()
}()
playbackCtx := s.manager.playbackCtx
if playbackCtx == nil {
playbackCtx = r.Context()
}
serveCtx, cancelServe := context.WithCancel(playbackCtx)
stopRequestCancel := context.AfterFunc(r.Context(), cancelServe)
defer func() {
stopRequestCancel()
cancelServe()
}()
ra, ok := handleRange(w, r, tr, name, size)
if !ok {
return
}
go func() {
if _, ok := s.playbackInfo.MkvMetadataParser.Get(); ok {
// Start a subtitle stream from the current position
subReader := s.file.NewReader()
subReader.SetResponsive()
s.StartSubtitleStream(s, s.manager.playbackCtx, subReader, ra.Start)
}
}()
if ra.Start > 0 {
go func(offset int64, subtitleCtx context.Context) {
if _, ok := s.playbackInfo.MkvMetadataParser.Get(); ok {
// Start a subtitle stream from the current position
subReader := s.file.NewReader()
subReader.SetResponsive()
s.StartSubtitleStream(s, subtitleCtx, subReader, offset)
}
}(ra.Start, serveCtx)
}
serveContentRange(w, r, s.manager.playbackCtx, tr, name, size, s.LoadContentType(), ra)
serveContentRange(w, r, serveCtx, tr, name, size, s.LoadContentType(), ra)
})
}
@@ -226,6 +240,8 @@ func (m *Manager) PlayTorrentStream(ctx context.Context, opts PlayTorrentStreamO
go func() {
<-stream.streamReadyCh
m.playbackMu.Lock()
defer m.playbackMu.Unlock()
m.loadStream(stream)
}()

View File

@@ -1,49 +0,0 @@
package discordrpc_client
import (
"seanime/internal/constants"
"testing"
"time"
)
func TestClient(t *testing.T) {
drpc, err := New(constants.DiscordApplicationId)
if err != nil {
t.Fatalf("failed to connect to discord ipc: %v", err)
}
defer drpc.Close()
mangaActivity := Activity{
Details: "Boku no Kokoro no Yabai Yatsu",
State: "Reading Chapter 30",
Assets: &Assets{
LargeImage: "https://s4.anilist.co/file/anilistcdn/media/manga/cover/medium/bx101557-bEJu54cmVYxx.jpg",
LargeText: "Boku no Kokoro no Yabai Yatsu",
SmallImage: "logo",
SmallText: "Seanime",
},
Timestamps: &Timestamps{
Start: &Epoch{
Time: time.Now(),
},
},
Instance: true,
Type: 3,
}
go func() {
_ = drpc.SetActivity(mangaActivity)
time.Sleep(10 * time.Second)
mangaActivity2 := mangaActivity
mangaActivity2.Timestamps.Start.Time = time.Now()
mangaActivity2.State = "Reading Chapter 31"
_ = drpc.SetActivity(mangaActivity2)
return
}()
//if err != nil {
// t.Fatalf("failed to set activity: %v", err)
//}
time.Sleep(30 * time.Second)
}

View File

@@ -1,60 +0,0 @@
package discordrpc_presence
import (
"seanime/internal/database/models"
"seanime/internal/util"
"testing"
"time"
)
func TestPresence(t *testing.T) {
settings := &models.DiscordSettings{
EnableRichPresence: true,
EnableAnimeRichPresence: true,
EnableMangaRichPresence: true,
}
presence := New(nil, util.NewLogger())
presence.SetSettings(settings)
presence.SetUsername("test")
defer presence.Close()
presence.SetMangaActivity(&MangaActivity{
Title: "Boku no Kokoro no Yabai Yatsu",
Image: "https://s4.anilist.co/file/anilistcdn/media/manga/cover/medium/bx101557-bEJu54cmVYxx.jpg",
Chapter: "30",
})
time.Sleep(10 * time.Second)
// Simulate settings being updated
settings.EnableMangaRichPresence = false
presence.SetSettings(settings)
presence.SetUsername("test")
time.Sleep(5 * time.Second)
presence.SetMangaActivity(&MangaActivity{
Title: "Boku no Kokoro no Yabai Yatsu",
Image: "https://s4.anilist.co/file/anilistcdn/media/manga/cover/medium/bx101557-bEJu54cmVYxx.jpg",
Chapter: "31",
})
// Simulate settings being updated
settings.EnableMangaRichPresence = true
presence.SetSettings(settings)
presence.SetUsername("test")
time.Sleep(5 * time.Second)
presence.SetMangaActivity(&MangaActivity{
Title: "Boku no Kokoro no Yabai Yatsu",
Image: "https://s4.anilist.co/file/anilistcdn/media/manga/cover/medium/bx101557-bEJu54cmVYxx.jpg",
Chapter: "31",
})
time.Sleep(10 * time.Second)
}

View File

@@ -1,83 +1,646 @@
package extension_playground
import (
"os"
"seanime/internal/api/anilist"
"encoding/json"
"fmt"
metadataapi "seanime/internal/api/metadata"
"seanime/internal/api/metadata_provider"
"seanime/internal/database/db"
"seanime/internal/extension"
"seanime/internal/platforms/anilist_platform"
"seanime/internal/test_utils"
hibikemanga "seanime/internal/extension/hibike/manga"
hibikeonlinestream "seanime/internal/extension/hibike/onlinestream"
hibiketorrent "seanime/internal/extension/hibike/torrent"
"seanime/internal/platforms/platform"
"seanime/internal/testmocks"
"seanime/internal/util"
"testing"
"github.com/stretchr/testify/require"
)
func TestGojaAnimeTorrentProvider(t *testing.T) {
test_utils.SetTwoLevelDeep()
test_utils.InitTestProvider(t, test_utils.Anilist())
const (
testAnimeID = 101
testMangaID = 202
)
logger := util.NewLogger()
database, _ := db.NewDatabase(test_utils.ConfigData.Path.DataDir, test_utils.ConfigData.Database.Name, logger)
const torrentProviderScript = `
class Provider {
getSettings() {
console.log("getSettings")
return {
type: "main",
canSmartSearch: true,
supportsAdult: true,
smartSearchFilters: ["query", "episodeNumber", "resolution", "batch", "bestReleases"],
}
}
anilistClient := anilist.TestGetMockAnilistClient()
anilistClientRef := util.NewRef(anilistClient)
extensionBankRef := util.NewRef(extension.NewUnifiedBank())
platform := anilist_platform.NewAnilistPlatform(anilistClientRef, extensionBankRef, logger, database)
platformRef := util.NewRef(platform)
async search(options) {
console.log("search:" + options.query)
return [this.makeTorrent(options.query + ":" + options.media.romajiTitle, options.media.absoluteSeasonOffset, options.media.synonyms.length)]
}
metadataProvider := metadata_provider.GetFakeProvider(t, database)
metadataProviderRef := util.NewRef(metadataProvider)
async smartSearch(options) {
console.log("smartSearch:" + options.query)
return [this.makeTorrent("smart:" + [
options.media.absoluteSeasonOffset,
options.anidbAID,
options.anidbEID,
options.episodeNumber,
options.resolution,
options.bestReleases,
options.batch,
].join(":"), options.episodeNumber, options.anidbEID)]
}
repo := NewPlaygroundRepository(logger, platformRef, metadataProviderRef)
async getTorrentInfoHash(torrent) {
return torrent.infoHash || "calculated-hash"
}
// Get the script
filepath := "../extension_repo/goja_torrent_test/my-torrent-provider.ts"
fileB, err := os.ReadFile(filepath)
if err != nil {
t.Fatal(err)
}
async getTorrentMagnetLink(torrent) {
return torrent.magnetLink || ("magnet:?xt=urn:btih:" + (torrent.infoHash || "calculated-hash"))
}
params := RunPlaygroundCodeParams{
Type: extension.TypeAnimeTorrentProvider,
Language: extension.LanguageTypescript,
Code: string(fileB),
Inputs: nil,
Function: "",
}
tests := []struct {
name string
inputs map[string]interface{}
function string
}{
{
name: "Search",
function: "search",
inputs: map[string]interface{}{
"query": "One Piece",
"mediaId": 21,
},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
params.Function = tt.function
params.Inputs = tt.inputs
resp, err := repo.RunPlaygroundCode(&params)
require.NoError(t, err)
t.Log("Logs:")
t.Log(resp.Logs)
t.Log("\n\nValue:")
t.Log(resp.Value)
})
}
async getLatest() {
return [this.makeTorrent("latest", 0, 0)]
}
makeTorrent(name, episodeNumber, seeders) {
return {
name: name,
date: "2024-01-02T03:04:05Z",
size: 1234,
formattedSize: "1.2 KB",
seeders: seeders,
leechers: 1,
downloadCount: 2,
link: "https://example.com/torrent",
downloadUrl: "https://example.com/torrent/download",
magnetLink: "magnet:?xt=urn:btih:abcdef1234567890",
infoHash: "abcdef1234567890",
resolution: "1080p",
isBatch: episodeNumber === 0,
episodeNumber: episodeNumber,
releaseGroup: "subsplease",
isBestRelease: true,
confirmed: true,
}
}
}
`
const mangaProviderScript = `
class Provider {
getSettings() {
return {
supportsMultiLanguage: true,
supportsMultiScanlator: false,
}
}
async search(options) {
console.log("manga-search:" + options.query)
return [
{
id: options.query === "Blue Lock" ? "exact" : "fallback",
title: options.query,
synonyms: [options.query + " Alt"],
year: options.year,
image: "https://example.com/manga.jpg",
},
{
id: "mismatch",
title: "Completely Different",
synonyms: ["No Match"],
year: 1999,
image: "https://example.com/other.jpg",
},
]
}
async findChapters(id) {
return [
{
id: id + ":1",
url: "https://example.com/chapters/1",
title: "Chapter 1 - Start",
chapter: "1",
index: 0,
language: "en",
},
]
}
async findChapterPages(id) {
return [
{
url: "https://example.com/pages/1.jpg",
index: 0,
headers: {
Referer: "https://example.com/chapters/" + id,
},
},
]
}
}
`
const onlinestreamProviderScript = `
class Provider {
getSettings() {
return {
episodeServers: ["default", "mirror"],
supportsDub: true,
}
}
async search(options) {
console.log("stream-search:" + options.query + ":" + options.dub)
return [
{
id: "stream-" + options.query.toLowerCase().replace(/\s+/g, "-"),
title: options.query,
url: "https://example.com/anime/" + options.query.toLowerCase().replace(/\s+/g, "-"),
subOrDub: options.dub ? "dub" : "sub",
},
]
}
async findEpisodes(id) {
return [
{
id: id + "-1",
number: 1,
url: "https://example.com/anime/" + id + "/1",
title: "Episode 1",
},
]
}
async findEpisodeServer(episode, server) {
return {
server: server,
headers: {
Referer: episode.url,
},
videoSources: [
{
url: "https://cdn.example.com/video.m3u8",
type: "m3u8",
quality: "1080p",
subtitles: [
{
id: "en",
url: "https://cdn.example.com/subtitles/en.vtt",
language: "en",
isDefault: true,
},
],
},
],
}
}
}
`
const noResultsOnlinestreamProviderScript = `
class Provider {
getSettings() {
return {
episodeServers: ["default"],
supportsDub: false,
}
}
async search(_options) {
return []
}
async findEpisodes(_id) {
return []
}
async findEpisodeServer(_episode, _server) {
return {
server: "default",
headers: {},
videoSources: [],
}
}
}
`
func TestPlaygroundResponseFormatting(t *testing.T) {
repo, _, _ := newTestPlaygroundRepository()
t.Run("string value", func(t *testing.T) {
playgroundLogger := repo.newPlaygroundDebugLogger()
playgroundLogger.logger.Info().Msg("plain-value")
resp := newPlaygroundResponse(playgroundLogger, "ok")
require.Equal(t, "ok", resp.Value)
require.Contains(t, resp.Logs, "plain-value")
})
t.Run("error value", func(t *testing.T) {
playgroundLogger := repo.newPlaygroundDebugLogger()
resp := newPlaygroundResponse(playgroundLogger, fmt.Errorf("boom"))
require.Contains(t, resp.Value, "ERROR: boom")
})
t.Run("marshal failure", func(t *testing.T) {
playgroundLogger := repo.newPlaygroundDebugLogger()
resp := newPlaygroundResponse(playgroundLogger, make(chan int))
require.Contains(t, resp.Value, "ERROR: Failed to marshal value to JSON")
})
}
func TestPlaygroundRepositoryCachesFetchedMedia(t *testing.T) {
repo, fakePlatform, fakeMetadataProvider := newTestPlaygroundRepository()
anime, metadata, err := repo.getAnime(testAnimeID)
require.NoError(t, err)
require.NotNil(t, anime)
require.NotNil(t, metadata)
anime, metadata, err = repo.getAnime(testAnimeID)
require.NoError(t, err)
require.NotNil(t, anime)
require.NotNil(t, metadata)
require.Equal(t, 1, fakePlatform.AnimeCalls(testAnimeID))
require.Equal(t, 2, fakeMetadataProvider.MetadataCalls(testAnimeID))
manga, err := repo.getManga(testMangaID)
require.NoError(t, err)
require.NotNil(t, manga)
manga, err = repo.getManga(testMangaID)
require.NoError(t, err)
require.NotNil(t, manga)
require.Equal(t, 1, fakePlatform.MangaCalls(testMangaID))
}
func TestRunPlaygroundCodeValidation(t *testing.T) {
repo, _, _ := newTestPlaygroundRepository()
resp, err := repo.RunPlaygroundCode(nil)
require.Nil(t, resp)
require.EqualError(t, err, "no parameters provided")
resp, err = repo.RunPlaygroundCode(&RunPlaygroundCodeParams{
Type: extension.Type("not-a-provider"),
Language: extension.LanguageJavascript,
Code: "class Provider {}",
Inputs: map[string]interface{}{},
})
require.Nil(t, resp)
require.EqualError(t, err, "invalid extension type")
}
func TestRunPlaygroundCodeAnimeTorrentProvider(t *testing.T) {
repo, _, _ := newTestPlaygroundRepository()
t.Run("invalid media id", func(t *testing.T) {
resp, err := repo.RunPlaygroundCode(&RunPlaygroundCodeParams{
Type: extension.TypeAnimeTorrentProvider,
Language: extension.LanguageJavascript,
Code: torrentProviderScript,
Inputs: map[string]interface{}{"mediaId": 0.0},
Function: "search",
})
require.Nil(t, resp)
require.EqualError(t, err, "invalid mediaId")
})
t.Run("search typescript payload", func(t *testing.T) {
resp, err := repo.RunPlaygroundCode(&RunPlaygroundCodeParams{
Type: extension.TypeAnimeTorrentProvider,
Language: extension.LanguageTypescript,
Code: torrentProviderScript,
Inputs: map[string]interface{}{
"mediaId": float64(testAnimeID),
"query": "One Piece",
},
Function: "search",
})
require.NoError(t, err)
require.Contains(t, resp.Logs, "search:One Piece")
var torrents []hibiketorrent.AnimeTorrent
decodeJSON(t, resp.Value, &torrents)
require.Len(t, torrents, 1)
require.Equal(t, "One Piece:Sample Anime", torrents[0].Name)
require.Equal(t, "playground-extension", torrents[0].Provider)
require.Equal(t, 1, torrents[0].Seeders)
})
t.Run("smart search includes metadata derived identifiers", func(t *testing.T) {
resp, err := repo.RunPlaygroundCode(&RunPlaygroundCodeParams{
Type: extension.TypeAnimeTorrentProvider,
Language: extension.LanguageJavascript,
Code: torrentProviderScript,
Inputs: map[string]interface{}{
"mediaId": float64(testAnimeID),
"options": map[string]interface{}{
"query": "custom-query",
"batch": true,
"episodeNumber": 1,
"resolution": "720",
"bestReleases": true,
},
},
Function: "smartSearch",
})
require.NoError(t, err)
var torrents []hibiketorrent.AnimeTorrent
decodeJSON(t, resp.Value, &torrents)
require.Len(t, torrents, 1)
require.Equal(t, "smart:12:9001:77:1:720:true:true", torrents[0].Name)
require.Equal(t, 77, torrents[0].Seeders)
})
t.Run("direct info helpers and settings", func(t *testing.T) {
resp, err := repo.RunPlaygroundCode(&RunPlaygroundCodeParams{
Type: extension.TypeAnimeTorrentProvider,
Language: extension.LanguageJavascript,
Code: torrentProviderScript,
Inputs: map[string]interface{}{
"mediaId": float64(testAnimeID),
"torrent": `{"infoHash":"hash-123","magnetLink":"magnet:?xt=urn:btih:hash-123"}`,
},
Function: "getTorrentInfoHash",
})
require.NoError(t, err)
require.Equal(t, "hash-123", resp.Value)
resp, err = repo.RunPlaygroundCode(&RunPlaygroundCodeParams{
Type: extension.TypeAnimeTorrentProvider,
Language: extension.LanguageJavascript,
Code: torrentProviderScript,
Inputs: map[string]interface{}{
"mediaId": float64(testAnimeID),
"torrent": `{"infoHash":"hash-123"}`,
},
Function: "getTorrentMagnetLink",
})
require.NoError(t, err)
require.Equal(t, "magnet:?xt=urn:btih:hash-123", resp.Value)
resp, err = repo.RunPlaygroundCode(&RunPlaygroundCodeParams{
Type: extension.TypeAnimeTorrentProvider,
Language: extension.LanguageJavascript,
Code: torrentProviderScript,
Inputs: map[string]interface{}{"mediaId": float64(testAnimeID)},
Function: "getLatest",
})
require.NoError(t, err)
var latest []hibiketorrent.AnimeTorrent
decodeJSON(t, resp.Value, &latest)
require.Len(t, latest, 1)
require.Equal(t, "latest", latest[0].Name)
resp, err = repo.RunPlaygroundCode(&RunPlaygroundCodeParams{
Type: extension.TypeAnimeTorrentProvider,
Language: extension.LanguageJavascript,
Code: torrentProviderScript,
Inputs: map[string]interface{}{"mediaId": float64(testAnimeID)},
Function: "getSettings",
})
require.NoError(t, err)
var settings hibiketorrent.AnimeProviderSettings
decodeJSON(t, resp.Value, &settings)
require.True(t, settings.CanSmartSearch)
require.Equal(t, hibiketorrent.AnimeProviderTypeMain, settings.Type)
})
t.Run("unknown call", func(t *testing.T) {
resp, err := repo.RunPlaygroundCode(&RunPlaygroundCodeParams{
Type: extension.TypeAnimeTorrentProvider,
Language: extension.LanguageJavascript,
Code: torrentProviderScript,
Inputs: map[string]interface{}{"mediaId": float64(testAnimeID)},
Function: "missing",
})
require.Nil(t, resp)
require.EqualError(t, err, "unknown call")
})
}
func TestRunPlaygroundCodeMangaProvider(t *testing.T) {
repo, _, _ := newTestPlaygroundRepository()
t.Run("invalid media id", func(t *testing.T) {
resp, err := repo.RunPlaygroundCode(&RunPlaygroundCodeParams{
Type: extension.TypeMangaProvider,
Language: extension.LanguageJavascript,
Code: mangaProviderScript,
Inputs: map[string]interface{}{"mediaId": -1.0},
Function: "search",
})
require.Nil(t, resp)
require.EqualError(t, err, "invalid mediaId")
})
t.Run("search selects the best result", func(t *testing.T) {
resp, err := repo.RunPlaygroundCode(&RunPlaygroundCodeParams{
Type: extension.TypeMangaProvider,
Language: extension.LanguageJavascript,
Code: mangaProviderScript,
Inputs: map[string]interface{}{"mediaId": float64(testMangaID)},
Function: "search",
})
require.NoError(t, err)
require.Contains(t, resp.Logs, "manga-search:Blue Lock")
var result hibikemanga.SearchResult
decodeJSON(t, resp.Value, &result)
require.Equal(t, "exact", result.ID)
require.Equal(t, "playground-extension", result.Provider)
require.Equal(t, "Blue Lock", result.Title)
})
t.Run("chapters and chapter pages", func(t *testing.T) {
resp, err := repo.RunPlaygroundCode(&RunPlaygroundCodeParams{
Type: extension.TypeMangaProvider,
Language: extension.LanguageJavascript,
Code: mangaProviderScript,
Inputs: map[string]interface{}{
"mediaId": float64(testMangaID),
"id": "series-1",
},
Function: "findChapters",
})
require.NoError(t, err)
var chapters []hibikemanga.ChapterDetails
decodeJSON(t, resp.Value, &chapters)
require.Len(t, chapters, 1)
require.Equal(t, "playground-extension", chapters[0].Provider)
require.Equal(t, "series-1:1", chapters[0].ID)
resp, err = repo.RunPlaygroundCode(&RunPlaygroundCodeParams{
Type: extension.TypeMangaProvider,
Language: extension.LanguageJavascript,
Code: mangaProviderScript,
Inputs: map[string]interface{}{
"mediaId": float64(testMangaID),
"id": "chapter-1",
},
Function: "findChapterPages",
})
require.NoError(t, err)
var pages []hibikemanga.ChapterPage
decodeJSON(t, resp.Value, &pages)
require.Len(t, pages, 1)
require.Equal(t, "playground-extension", pages[0].Provider)
require.Equal(t, "https://example.com/pages/1.jpg", pages[0].URL)
})
t.Run("unknown call", func(t *testing.T) {
resp, err := repo.RunPlaygroundCode(&RunPlaygroundCodeParams{
Type: extension.TypeMangaProvider,
Language: extension.LanguageJavascript,
Code: mangaProviderScript,
Inputs: map[string]interface{}{"mediaId": float64(testMangaID)},
Function: "missing",
})
require.Nil(t, resp)
require.EqualError(t, err, "unknown call")
})
}
func TestRunPlaygroundCodeOnlinestreamProvider(t *testing.T) {
repo, _, _ := newTestPlaygroundRepository()
t.Run("invalid media id", func(t *testing.T) {
resp, err := repo.RunPlaygroundCode(&RunPlaygroundCodeParams{
Type: extension.TypeOnlinestreamProvider,
Language: extension.LanguageJavascript,
Code: onlinestreamProviderScript,
Inputs: map[string]interface{}{"mediaId": 0.0},
Function: "search",
})
require.Nil(t, resp)
require.EqualError(t, err, "invalid mediaId")
})
t.Run("search returns the best match", func(t *testing.T) {
resp, err := repo.RunPlaygroundCode(&RunPlaygroundCodeParams{
Type: extension.TypeOnlinestreamProvider,
Language: extension.LanguageJavascript,
Code: onlinestreamProviderScript,
Inputs: map[string]interface{}{
"mediaId": float64(testAnimeID),
"dub": true,
},
Function: "search",
})
require.NoError(t, err)
require.Contains(t, resp.Logs, "stream-search:Sample Anime:true")
var result hibikeonlinestream.SearchResult
decodeJSON(t, resp.Value, &result)
require.Equal(t, "Sample Anime", result.Title)
require.Equal(t, hibikeonlinestream.Dub, result.SubOrDub)
})
t.Run("no results are surfaced in the response", func(t *testing.T) {
resp, err := repo.RunPlaygroundCode(&RunPlaygroundCodeParams{
Type: extension.TypeOnlinestreamProvider,
Language: extension.LanguageJavascript,
Code: noResultsOnlinestreamProviderScript,
Inputs: map[string]interface{}{
"mediaId": float64(testAnimeID),
"dub": false,
},
Function: "search",
})
require.NoError(t, err)
require.Contains(t, resp.Value, "ERROR:")
})
t.Run("episodes and episode server", func(t *testing.T) {
resp, err := repo.RunPlaygroundCode(&RunPlaygroundCodeParams{
Type: extension.TypeOnlinestreamProvider,
Language: extension.LanguageJavascript,
Code: onlinestreamProviderScript,
Inputs: map[string]interface{}{
"mediaId": float64(testAnimeID),
"id": "sample-anime",
},
Function: "findEpisodes",
})
require.NoError(t, err)
var episodes []hibikeonlinestream.EpisodeDetails
decodeJSON(t, resp.Value, &episodes)
require.Len(t, episodes, 1)
require.Equal(t, "playground-extension", episodes[0].Provider)
episodeJSON, err := json.Marshal(episodes[0])
require.NoError(t, err)
resp, err = repo.RunPlaygroundCode(&RunPlaygroundCodeParams{
Type: extension.TypeOnlinestreamProvider,
Language: extension.LanguageJavascript,
Code: onlinestreamProviderScript,
Inputs: map[string]interface{}{
"mediaId": float64(testAnimeID),
"episode": string(episodeJSON),
"server": "mirror",
},
Function: "findEpisodeServer",
})
require.NoError(t, err)
var server hibikeonlinestream.EpisodeServer
decodeJSON(t, resp.Value, &server)
require.Equal(t, "playground-extension", server.Provider)
require.Equal(t, "mirror", server.Server)
require.Len(t, server.VideoSources, 1)
})
t.Run("unknown call", func(t *testing.T) {
resp, err := repo.RunPlaygroundCode(&RunPlaygroundCodeParams{
Type: extension.TypeOnlinestreamProvider,
Language: extension.LanguageJavascript,
Code: onlinestreamProviderScript,
Inputs: map[string]interface{}{"mediaId": float64(testAnimeID)},
Function: "missing",
})
require.Nil(t, resp)
require.EqualError(t, err, "unknown call")
})
}
func newTestPlaygroundRepository() (*PlaygroundRepository, *testmocks.FakePlatform, *testmocks.FakeMetadataProvider) {
logger := util.NewLogger()
fakePlatform := testmocks.NewFakePlatformBuilder().
WithAnime(testmocks.NewBaseAnime(testAnimeID, "Sample Anime")).
WithManga(testmocks.NewBaseManga(testMangaID, "Blue Lock")).
Build()
fakeMetadataProvider := testmocks.NewFakeMetadataProviderBuilder().
WithAnimeMetadata(testAnimeID, &metadataapi.AnimeMetadata{
Titles: map[string]string{
"en": "Sample Anime",
},
Episodes: map[string]*metadataapi.EpisodeMetadata{
"1": {
Episode: "1",
EpisodeNumber: 1,
AbsoluteEpisodeNumber: 13,
AnidbEid: 77,
},
},
Mappings: &metadataapi.AnimeMappings{AnidbId: 9001},
}).
Build()
return NewPlaygroundRepository(
logger,
util.NewRef[platform.Platform](fakePlatform),
util.NewRef[metadata_provider.Provider](fakeMetadataProvider),
), fakePlatform, fakeMetadataProvider
}
func decodeJSON(t *testing.T, raw string, target interface{}) {
t.Helper()
require.NoError(t, json.Unmarshal([]byte(raw), target))
}

View File

@@ -75,7 +75,7 @@ func initializeProviderBase(
providerBase.store.Bind(vm, providerBase.scheduler)
// Bind the shared bindings
ShareBinds(vm, logger, ext, wsEventManager)
goja_bindings.BindFetch(vm)
goja_bindings.BindFetch(ext.ID, vm)
gojautil.BindMutable(vm)
BindUserConfig(vm, ext, logger)
return vm

View File

@@ -6,7 +6,7 @@ import (
"reflect"
"seanime/internal/events"
"seanime/internal/extension"
goja_bindings "seanime/internal/goja/goja_bindings"
"seanime/internal/goja/goja_bindings"
"seanime/internal/goja/goja_runtime"
"seanime/internal/hook"
"seanime/internal/plugin"
@@ -164,7 +164,7 @@ func NewGojaPlugin(
p.pool, err = runtimeManager.GetOrCreatePrivatePool(ext.ID, func() *goja.Runtime {
runtime := goja.New()
ShareBinds(runtime, logger, ext, wsEventManager)
goja_bindings.BindFetch(runtime, ext.Plugin.Permissions.GetNetworkAccessAllowedDomains())
goja_bindings.BindFetch(ext.ID, runtime, ext.Plugin.Permissions.GetNetworkAccessAllowedDomains())
BindUserConfig(runtime, ext, logger)
p.BindPluginAPIs(runtime, logger)
return runtime
@@ -181,7 +181,7 @@ func NewGojaPlugin(
uiVM.SetParserOptions(parser.WithDisableSourceMaps)
// Bind shared APIs
ShareBinds(uiVM, logger, ext, wsEventManager)
goja_bindings.BindFetch(uiVM, ext.Plugin.Permissions.GetNetworkAccessAllowedDomains())
goja_bindings.BindFetch(ext.ID, uiVM, ext.Plugin.Permissions.GetNetworkAccessAllowedDomains())
BindUserConfig(uiVM, ext, logger)
// Bind the store to the UI VM
p.BindPluginAPIs(uiVM, logger)

View File

@@ -4,10 +4,10 @@ import (
"fmt"
"net/http"
"net/http/httptest"
"os"
"seanime/internal/api/anilist"
"seanime/internal/api/metadata_provider"
"seanime/internal/continuity"
"seanime/internal/database/db"
"seanime/internal/events"
"seanime/internal/extension"
"seanime/internal/goja/goja_runtime"
@@ -19,7 +19,7 @@ import (
"seanime/internal/platforms/anilist_platform"
"seanime/internal/platforms/platform"
"seanime/internal/plugin"
"seanime/internal/test_utils"
"seanime/internal/testutil"
"seanime/internal/util"
"seanime/internal/util/filecache"
"testing"
@@ -30,13 +30,24 @@ import (
"github.com/stretchr/testify/require"
)
var (
testDocumentsDir = "/Users/rahim/Documents"
testDocumentCollectionDir = "/Users/rahim/Documents/collection"
testVideoPath = "/Users/rahim/Documents/collection/Bocchi the Rock/[ASW] Bocchi the Rock! - 01 [1080p HEVC][EDC91675].mkv"
type pluginTestPaths struct {
DocumentsDir string
CollectionDir string
}
tempTestDir = "$TEMP/test"
)
func newPluginTestPaths(t testing.TB) pluginTestPaths {
t.Helper()
env := testutil.NewTestEnv(t)
documentsDir := env.MustMkdir("Documents")
collectionDir := env.MustMkdir("Documents", "collection")
env.MustWriteFixtureFile("/Documents/collection/fixture.txt", []byte("fixture"))
return pluginTestPaths{
DocumentsDir: documentsDir,
CollectionDir: collectionDir,
}
}
// TestPluginOptions contains options for initializing a test plugin
type TestPluginOptions struct {
@@ -62,11 +73,9 @@ func DefaultTestPluginOptions() TestPluginOptions {
// InitTestPlugin initializes a test plugin with the given options
func InitTestPlugin(t testing.TB, opts TestPluginOptions) (*GojaPlugin, *zerolog.Logger, *goja_runtime.Manager, *anilist_platform.AnilistPlatform, events.WSEventManagerInterface, error) {
env := testutil.NewTestEnv(t)
if opts.SetupHooks {
test_utils.SetTwoLevelDeep()
if tPtr, ok := t.(*testing.T); ok {
test_utils.InitTestProvider(tPtr, test_utils.Anilist())
}
env = testutil.NewTestEnv(t, testutil.Anilist())
}
ext := &extension.Extension{
@@ -85,10 +94,9 @@ func InitTestPlugin(t testing.TB, opts TestPluginOptions) (*GojaPlugin, *zerolog
ext.Plugin.Permissions.Allow = opts.Permissions.Allow
logger := util.NewLogger()
database, err := db.NewDatabase(test_utils.ConfigData.Path.DataDir, test_utils.ConfigData.Database.Name, logger)
require.NoError(t, err)
database := env.MustNewDatabase(logger)
wsEventManager := events.NewMockWSEventManager(logger)
anilistClientRef := util.NewRef[anilist.AnilistClient](anilist.NewMockAnilistClient())
anilistClientRef := util.NewRef[anilist.AnilistClient](anilist.NewFixtureAnilistClient())
extensionBankRef := util.NewRef(extension.NewUnifiedBank())
anilistPlatform := anilist_platform.NewAnilistPlatform(anilistClientRef, extensionBankRef, logger, database).(*anilist_platform.AnilistPlatform)
anilistPlatformRef := util.NewRef[platform.Platform](anilistPlatform)
@@ -142,7 +150,6 @@ func TestGojaPluginAnime(t *testing.T) {
})
}
`
opts := DefaultTestPluginOptions()
opts.Payload = payload
opts.Permissions = extension.PluginPermissions{
@@ -151,9 +158,9 @@ func TestGojaPluginAnime(t *testing.T) {
extension.PluginPermissionDatabase,
},
}
env := testutil.NewTestEnv(t, testutil.Anilist())
logger := util.NewLogger()
database, err := db.NewDatabase(test_utils.ConfigData.Path.DataDir, test_utils.ConfigData.Database.Name, logger)
require.NoError(t, err)
database := env.MustNewDatabase(logger)
metadataProvider := metadata_provider.NewProvider(&metadata_provider.NewProviderImplOptions{
Logger: logger,
@@ -185,6 +192,9 @@ func TestGojaPluginAnime(t *testing.T) {
/////////////////////////////////////////////////////////////////////////////////////////////
func TestGojaPluginMpv(t *testing.T) {
testutil.InitTestProvider(t, testutil.MediaPlayer(), testutil.Live())
sampleVideoPath := testutil.RequireSampleVideoPath(t)
payload := fmt.Sprintf(`
function init() {
@@ -213,7 +223,7 @@ function init() {
});
}
`, testVideoPath)
`, sampleVideoPath)
playbackManager, _, err := getPlaybackManager(t)
require.NoError(t, err)
@@ -243,18 +253,18 @@ function init() {
// Test that the plugin cannot access paths that are not allowed
// $os.readDir should throw an error
func TestGojaPluginPathNotAllowed(t *testing.T) {
paths := newPluginTestPaths(t)
homeDir, err := os.UserHomeDir()
require.NoError(t, err)
payload := fmt.Sprintf(`
function init() {
$ui.register((ctx) => {
const tempDir = $os.tempDir();
console.log("Temp dir", tempDir);
const dirPath = "%s";
const entries = $os.readDir(dirPath);
const dirPath = %q;
$os.readDir(dirPath);
});
}
`, testDocumentCollectionDir)
`, homeDir)
opts := DefaultTestPluginOptions()
opts.Payload = payload
@@ -263,7 +273,7 @@ function init() {
extension.PluginPermissionSystem,
},
Allow: extension.PluginAllowlist{
ReadPaths: []string{"$TEMP/*", testDocumentsDir},
ReadPaths: []string{paths.DocumentsDir},
WritePaths: []string{"$TEMP/*"},
},
}
@@ -279,6 +289,9 @@ function init() {
// Test that the plugin can play a video and listen to events
func TestGojaPluginPlaybackEvents(t *testing.T) {
testutil.InitTestProvider(t, testutil.MediaPlayer(), testutil.Live())
sampleVideoPath := testutil.RequireSampleVideoPath(t)
payload := fmt.Sprintf(`
function init() {
@@ -298,7 +311,7 @@ function init() {
});
}
`, testVideoPath)
`, sampleVideoPath)
playbackManager, _, err := getPlaybackManager(t)
require.NoError(t, err)
@@ -801,24 +814,21 @@ func TestGojaPluginStorage2(t *testing.T) {
/////////////////////////////////////////////////////////////////////////////////////////////s
func getPlaybackManager(t *testing.T) (*playbackmanager.PlaybackManager, *anilist.AnimeCollection, error) {
env := testutil.NewTestEnv(t)
logger := util.NewLogger()
wsEventManager := events.NewMockWSEventManager(logger)
database, err := db.NewDatabase(test_utils.ConfigData.Path.DataDir, test_utils.ConfigData.Database.Name, logger)
if err != nil {
t.Fatalf("error while creating database, %v", err)
}
database := env.MustNewDatabase(logger)
filecacher, err := filecache.NewCacher(t.TempDir())
require.NoError(t, err)
anilistClient := anilist.TestGetMockAnilistClient()
anilistClient := anilist.NewTestAnilistClient()
anilistClientRef := util.NewRef(anilistClient)
anilistPlatform := anilist_platform.NewAnilistPlatform(anilistClientRef, util.NewRef(extension.NewUnifiedBank()), logger, database)
animeCollection, err := anilistPlatform.GetAnimeCollection(t.Context(), true)
metadataProvider := metadata_provider.GetFakeProvider(t, database)
metadataProvider := metadata_provider.NewTestProvider(t, database)
require.NoError(t, err)
continuityManager := continuity.NewManager(&continuity.NewManagerOptions{
FileCacher: filecacher,

View File

@@ -1508,6 +1508,10 @@ declare namespace $ui {
onResize(cb: (size: { width: number, height: number }) => void): () => void
}
clipboard: {
write(text: string): void
}
}
interface Notification {

View File

@@ -1,75 +0,0 @@
package extension_repo_test
//
//import (
//)
//
//// Tests the external manga provider extension loaded from the extension directory.
//// This will load the extensions from ./testdir
//func TestExternalGoMangaExtension(t *testing.T) {
//
// repo := getRepo(t)
//
// // Load all extensions
// // This should load all the extensions in the directory
// repo.ReloadExternalExtensions()
//
// ext, found := repo.GetMangaProviderExtensionByID("mangapill-external")
// require.True(t, found)
//
// t.Logf("\nExtension:\n\tID: %s \n\tName: %s", ext.GetID(), ext.GetName())
//
// // Test the extension
// so := hibikemanga.SearchOptions{
// Query: "Dandadan",
// }
//
// searchResults, err := ext.GetProvider().Search(so)
// require.NoError(t, err)
// require.GreaterOrEqual(t, len(searchResults), 1)
//
// chapters, err := ext.GetProvider().FindChapters(searchResults[0].ID)
// require.NoError(t, err)
// require.GreaterOrEqual(t, len(chapters), 1)
//
// spew.Dump(chapters[0])
//
//}
//
//// Tests the built-in manga provider extension
//func TestBuiltinMangaExtension(t *testing.T) {
//
// logger := util.NewLogger()
// repo := getRepo(t)
//
// // Load all extensions
// // This should load all the extensions in the directory
// repo.ReloadBuiltInExtension(extension.Extension{
// ID: "seanime-builtin-mangapill",
// Type: "manga-provider",
// Name: "Mangapill",
// Version: "0.0.0",
// Language: "go",
// ManifestURI: "",
// Description: "",
// Author: "",
// Payload: "",
// }, manga_providers.NewMangapill(logger))
//
// ext, found := repo.GetMangaProviderExtensionByID("seanime-builtin-mangapill")
// require.True(t, found)
//
// t.Logf("\nExtension:\n\tID: %s \n\tName: %s", ext.GetID(), ext.GetName())
//
// // Test the extension
// so := hibikemanga.SearchOptions{
// Query: "Dandadan",
// }
//
// searchResults, err := ext.GetProvider().Search(so)
// require.NoError(t, err)
//
// spew.Dump(searchResults)
//
//}

View File

@@ -1,85 +0,0 @@
package mediaplayer_testdir
//import (
// "fmt"
// "strings"
//
// hibikemediaplayer "seanime/internal/extension/hibike/mediaplayer"
//)
//
//type (
// // MobilePlayer is an extension that sends media links the mobile device's media player.
// MobilePlayer struct {
// config mobilePlayerConfig
// }
//
// mobilePlayerConfig struct {
// iosPlayer string
// androidPlayer string
// }
//)
//
//func NewMediaPlayer() hibikemediaplayer.MediaPlayer {
// return &MobilePlayer{}
//}
//
//func (m *MobilePlayer) InitConfig(config map[string]interface{}) {
// iosPlayer, _ := config["iosPlayer"].(string)
// androidPlayer, _ := config["androidPlayer"].(string)
//
// m.config = mobilePlayerConfig{
// iosPlayer: iosPlayer,
// androidPlayer: androidPlayer,
// }
//}
//
//func (m *MobilePlayer) GetSettings() hibikemediaplayer.Settings {
// return hibikemediaplayer.Settings{
// CanTrackProgress: false,
// }
//}
//
//func (m *MobilePlayer) Play(req hibikemediaplayer.PlayRequest) (*hibikemediaplayer.PlayResponse, error) {
// return m.getPlayResponse(req)
//}
//
//func (m *MobilePlayer) Stream(req hibikemediaplayer.PlayRequest) (*hibikemediaplayer.PlayResponse, error) {
// return m.getPlayResponse(req)
//}
//
//func (m *MobilePlayer) getPlayResponse(req hibikemediaplayer.PlayRequest) (*hibikemediaplayer.PlayResponse, error) {
// var url string
// if req.ClientInfo.Platform == "ios" {
// // Play on iOS
// switch m.config.iosPlayer {
// case "outplayer":
// url = getOutplayerUrl(req.Path)
// }
// }
//
// if url == "" {
// return nil, fmt.Errorf("no player found for platform %s", req.ClientInfo.Platform)
// }
//
// return &hibikemediaplayer.PlayResponse{
// OpenURL: url,
// }, nil
//}
//
//func getOutplayerUrl(url string) (ret string) {
// ret = strings.Replace(url, "http://", "outplayer://", 1)
// ret = strings.Replace(ret, "https://", "outplayer://", 1)
// return
//}
//
//func (m *MobilePlayer) GetPlaybackStatus() (*hibikemediaplayer.PlaybackStatus, error) {
// return nil, fmt.Errorf("not implemented")
//}
//
//func (m *MobilePlayer) Start() error {
// return nil
//}
//
//func (m *MobilePlayer) Stop() error {
// return nil
//}

View File

@@ -1,46 +0,0 @@
{
"id": "mobileplayer",
"name": "MobilePlayer",
"description": "",
"version": "0.0.1",
"type": "mediaplayer",
"manifestURI": "",
"language": "go",
"author": "Seanime",
"config": {
"requiresConfig": true,
"fields": [
{
"type": "select",
"label": "iOS Player",
"name": "iosPlayer",
"options": [
{
"label": "Outplayer",
"value": "outplayer"
},
{
"label": "VLC",
"value": "vlc"
}
]
},
{
"type": "select",
"label": "Android Player",
"name": "androidPlayer",
"options": [
{
"label": "VLC",
"value": "vlc"
},
{
"label": "MX Player",
"value": "mxplayer"
}
]
}
]
},
"payload": ""
}

View File

@@ -1,21 +0,0 @@
package extension_repo
import (
"seanime/internal/events"
"seanime/internal/util"
"seanime/internal/util/filecache"
"testing"
)
func GetMockExtensionRepository(t *testing.T) *Repository {
logger := util.NewLogger()
filecacher, _ := filecache.NewCacher(t.TempDir())
extensionRepository := NewRepository(&NewRepositoryOptions{
Logger: logger,
ExtensionDir: t.TempDir(),
WSEventManager: events.NewMockWSEventManager(logger),
FileCacher: filecacher,
})
return extensionRepository
}

View File

@@ -1,19 +0,0 @@
package extension_repo_test
import (
"seanime/internal/events"
"seanime/internal/extension_repo"
"seanime/internal/util"
"testing"
)
func getRepo(t *testing.T) *extension_repo.Repository {
logger := util.NewLogger()
wsEventManager := events.NewMockWSEventManager(logger)
return extension_repo.NewRepository(&extension_repo.NewRepositoryOptions{
Logger: logger,
ExtensionDir: "testdir",
WSEventManager: wsEventManager,
})
}

View File

@@ -1,113 +1,246 @@
package goja_bindings
import (
"seanime/internal/util"
"fmt"
"net/http"
"net/http/httptest"
gojautil "seanime/internal/util/goja"
"sync"
"sync/atomic"
"testing"
"time"
"github.com/dop251/goja"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestAbortContext(t *testing.T) {
vm := goja.New()
BindAbortContext(vm, gojautil.NewScheduler())
func TestAbortContextStateAndReason(t *testing.T) {
t.Run("default abort state and reason", func(t *testing.T) {
vm, _ := newAbortTestRuntime(t)
t.Run("AbortContext basic functionality", func(t *testing.T) {
script := `
const controller = new AbortContext();
const signal = controller.signal;
let aborted = signal.aborted;
controller.abort();
({
initialAborted: aborted,
finalAborted: signal.aborted
})
`
val, err := vm.RunString(`
(() => {
const controller = new AbortContext();
const signal = controller.signal;
const initialAborted = signal.aborted;
val, err := vm.RunString(script)
assert.NoError(t, err)
controller.abort();
const defaultReason = String(signal.reason);
controller.abort("ignored");
return {
initialAborted,
finalAborted: signal.aborted,
defaultReason,
finalReason: String(signal.reason),
};
})()
`)
require.NoError(t, err)
obj := val.ToObject(vm)
util.Spew(obj.Export())
initialAborted := obj.Get("initialAborted").ToBoolean()
finalAborted := obj.Get("finalAborted").ToBoolean()
assert.False(t, initialAborted, "Signal should not be aborted initially")
assert.True(t, finalAborted, "Signal should be aborted after controller.abort()")
require.False(t, obj.Get("initialAborted").ToBoolean())
require.True(t, obj.Get("finalAborted").ToBoolean())
require.Contains(t, obj.Get("defaultReason").String(), "context canceled")
require.Equal(t, obj.Get("defaultReason").String(), obj.Get("finalReason").String())
})
//t.Run("AbortSignal event listener", func(t *testing.T) {
// script := `
// const controller = new AbortContext();
// const signal = controller.signal;
//
// let eventFired = false;
// signal.addEventListener('abort', () => {
// eventFired = true;
// });
//
// controller.abort();
//
// eventFired
// `
//
// val, err := vm.RunString(script)
// require.NoError(t, err)
// assert.True(t, val.ToBoolean(), "Abort event should fire")
//})
t.Run("custom abort reason", func(t *testing.T) {
vm, _ := newAbortTestRuntime(t)
t.Run("AbortSignal with reason", func(t *testing.T) {
script := `
const controller = new AbortContext();
const signal = controller.signal;
controller.abort('Custom reason');
signal.reason
`
val, err := vm.RunString(`
(() => {
const controller = new AbortContext();
controller.abort("Custom reason");
return {
aborted: controller.signal.aborted,
reason: controller.signal.reason,
};
})()
`)
require.NoError(t, err)
val, err := vm.RunString(script)
assert.NoError(t, err)
assert.Equal(t, "Custom reason", val.String())
obj := val.ToObject(vm)
require.True(t, obj.Get("aborted").ToBoolean())
require.Equal(t, "Custom reason", obj.Get("reason").String())
})
}
func TestAbortContextAbortListeners(t *testing.T) {
t.Run("listener fires once when registered before abort", func(t *testing.T) {
vm, _ := newAbortTestRuntime(t)
var count atomic.Int32
reasons := make(chan string, 1)
vm.Set("recordAbort", func(reason string) {
count.Add(1)
reasons <- reason
})
_, err := vm.RunString(`
(() => {
const controller = new AbortContext();
controller.signal.addEventListener("abort", () => {
recordAbort(String(controller.signal.reason));
});
controller.abort("first");
controller.abort("second");
})();
`)
require.NoError(t, err)
select {
case reason := <-reasons:
require.Equal(t, "first", reason)
case <-time.After(time.Second):
t.Fatal("abort listener was not called")
}
require.Eventually(t, func() bool {
return count.Load() == 1
}, time.Second, 10*time.Millisecond)
time.Sleep(50 * time.Millisecond)
require.Equal(t, int32(1), count.Load())
})
t.Run("listener added after abort fires asynchronously", func(t *testing.T) {
vm, _ := newAbortTestRuntime(t)
reasons := make(chan string, 1)
vm.Set("recordAbort", func(reason string) {
reasons <- reason
})
_, err := vm.RunString(`
(() => {
const controller = new AbortContext();
controller.abort("late-reason");
controller.signal.addEventListener("abort", () => {
recordAbort(String(controller.signal.reason));
});
})();
`)
require.NoError(t, err)
select {
case reason := <-reasons:
require.Equal(t, "late-reason", reason)
case <-time.After(time.Second):
t.Fatal("late abort listener was not called")
}
})
}
func TestAbortContextWithFetch(t *testing.T) {
vm := goja.New()
BindAbortContext(vm, gojautil.NewScheduler())
fetch := BindFetch(vm)
defer fetch.Close()
t.Run("already aborted signal rejects before request starts", func(t *testing.T) {
vm, _ := newAbortTestRuntime(t)
// Start the response channel handler
go func() {
for fn := range fetch.ResponseChannel() {
fn()
var requestCount atomic.Int32
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
requestCount.Add(1)
w.WriteHeader(http.StatusOK)
}))
defer server.Close()
fetch := BindFetch(vm, []string{"*"})
defer fetch.Close()
val, err := vm.RunString(fmt.Sprintf(`
(() => {
const controller = new AbortContext();
controller.abort("request aborted");
return fetch(%q, {
signal: controller.signal,
});
})()
`, server.URL))
require.NoError(t, err)
promise := requirePromise(t, val)
waitForPromiseState(t, promise, goja.PromiseStateRejected)
require.Equal(t, "request aborted", promise.Result().Export())
require.Equal(t, int32(0), requestCount.Load())
})
t.Run("in-flight request is canceled through signal context", func(t *testing.T) {
vm, _ := newAbortTestRuntime(t)
started := make(chan struct{})
canceled := make(chan struct{})
var startedOnce sync.Once
var canceledOnce sync.Once
server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
startedOnce.Do(func() { close(started) })
select {
case <-r.Context().Done():
canceledOnce.Do(func() { close(canceled) })
case <-time.After(2 * time.Second):
w.WriteHeader(http.StatusGatewayTimeout)
}
}))
defer server.Close()
fetch := BindFetch(vm, []string{"*"})
defer fetch.Close()
val, err := vm.RunString(fmt.Sprintf(`
(() => {
globalThis.controller = new AbortContext();
return fetch(%q, {
signal: controller.signal,
});
})()
`, server.URL))
require.NoError(t, err)
promise := requirePromise(t, val)
select {
case <-started:
case <-time.After(time.Second):
t.Fatal("request never reached test server")
}
}()
t.Run("Abort fetch immediately", func(t *testing.T) {
script := `
const controller = new AbortContext();
controller.abort();
fetch('https://api.github.com/users/github', {
signal: controller.signal
})
`
_, err = vm.RunString(`controller.abort("stop-now")`)
require.NoError(t, err)
val, err := vm.RunString(script)
assert.NoError(t, err)
select {
case <-canceled:
case <-time.After(2 * time.Second):
t.Fatal("request context was not canceled")
}
promise, ok := val.Export().(*goja.Promise)
assert.True(t, ok, "fetch should return a promise")
time.Sleep(100 * time.Millisecond)
// Promise should be rejected
assert.Equal(t, goja.PromiseStateRejected, promise.State())
waitForPromiseState(t, promise, goja.PromiseStateRejected)
require.Contains(t, promise.Result().String(), "canceled")
})
}
func newAbortTestRuntime(t *testing.T) (*goja.Runtime, *gojautil.Scheduler) {
t.Helper()
vm := goja.New()
scheduler := gojautil.NewScheduler()
BindAbortContext(vm, scheduler)
t.Cleanup(scheduler.Stop)
return vm, scheduler
}
func requirePromise(t *testing.T, value goja.Value) *goja.Promise {
t.Helper()
promise, ok := value.Export().(*goja.Promise)
require.True(t, ok, "value should export to a promise")
return promise
}
func waitForPromiseState(t *testing.T, promise *goja.Promise, expected goja.PromiseState) {
t.Helper()
require.Eventually(t, func() bool {
return promise.State() == expected
}, 2*time.Second, 10*time.Millisecond)
}

View File

@@ -4,6 +4,7 @@ import (
"fmt"
"net/http"
"net/http/httptest"
"seanime/internal/testutil"
"seanime/internal/util"
"testing"
"time"
@@ -14,6 +15,9 @@ import (
)
func setupChromeVMWithServer(t *testing.T) (*goja.Runtime, *ChromeDP, *httptest.Server) {
t.Helper()
testutil.InitTestProvider(t, testutil.Live())
vm := goja.New()
chrome := BindChromeDP(vm)

View File

@@ -24,7 +24,7 @@ func setupTestVM(t *testing.T) *goja.Runtime {
logger := util.NewLogger()
ext := &extension.Extension{}
extension_repo.ShareBinds(vm, util.NewLogger(), ext, events.NewMockWSEventManager(logger))
goja_bindings.BindFetch(vm)
goja_bindings.BindFetch("test", vm)
fm := extension_repo.FieldMapper{}
vm.SetFieldNameMapper(fm)
return vm
@@ -100,7 +100,7 @@ func TestUserConfig(t *testing.T) {
},
}
extension_repo.ShareBinds(vm, util.NewLogger(), ext, events.NewMockWSEventManager(logger))
goja_bindings.BindFetch(vm)
goja_bindings.BindFetch("test", vm)
extension_repo.BindUserConfig(vm, ext, util.NewLogger())
vm.RunString(`
@@ -125,7 +125,7 @@ func TestByteSliceToUint8Array(t *testing.T) {
ext := &extension.Extension{}
extension_repo.ShareBinds(vm, util.NewLogger(), ext, events.NewMockWSEventManager(logger))
goja_bindings.BindFetch(vm)
goja_bindings.BindFetch("test", vm)
// JavaScript code to verify the type and contents of 'data'
jsCode := `
@@ -158,9 +158,6 @@ func TestByteSliceToUint8Array(t *testing.T) {
}
func TestGojaDocument(t *testing.T) {
vm := setupTestVM(t)
defer vm.ClearInterrupt()
tests := []struct {
entry string
}{
@@ -170,6 +167,9 @@ func TestGojaDocument(t *testing.T) {
for _, tt := range tests {
t.Run(tt.entry, func(t *testing.T) {
vm := setupTestVM(t)
defer vm.ClearInterrupt()
fileB, err := os.ReadFile(tt.entry)
require.NoError(t, err)

View File

@@ -1,9 +1,11 @@
package goja_bindings
import (
"seanime/internal/util"
"crypto/aes"
"crypto/cipher"
"crypto/sha256"
"encoding/base64"
"testing"
"time"
"github.com/dop251/goja"
gojabuffer "github.com/dop251/goja_nodejs/buffer"
@@ -11,184 +13,275 @@ import (
"github.com/stretchr/testify/require"
)
func TestGojaCrypto(t *testing.T) {
func TestGojaCryptoEncoders(t *testing.T) {
vm := newCryptoTestVM(t)
val, err := vm.RunString(`
(() => ({
bufferBase64: Buffer.from("Hello, this is a string to encode!").toString("base64"),
bufferDecoded: Buffer.from("SGVsbG8sIHRoaXMgaXMgYSBzdHJpbmcgdG8gZW5jb2RlIQ==", "base64").toString("utf-8"),
base64RoundTrip: CryptoJS.enc.Utf8.stringify(
CryptoJS.enc.Base64.parse(
CryptoJS.enc.Base64.stringify(CryptoJS.enc.Utf8.parse("Hello, World!"))
)
),
latin1RoundTrip: CryptoJS.enc.Latin1.stringify(CryptoJS.enc.Latin1.parse("Hello, World!")),
hexRoundTrip: CryptoJS.enc.Hex.stringify(CryptoJS.enc.Hex.parse("48656c6c6f2c20576f726c6421")),
utf8RoundTrip: CryptoJS.enc.Utf8.stringify(CryptoJS.enc.Utf8.parse("𔭢")),
utf16RoundTrip: CryptoJS.enc.Utf16.stringify(CryptoJS.enc.Utf16.parse("Hello, World!")),
utf16LERoundTrip: CryptoJS.enc.Utf16LE.stringify(CryptoJS.enc.Utf16LE.parse("Hello, World!")),
}))()
`)
require.NoError(t, err)
obj := val.ToObject(vm)
require.Equal(t, "SGVsbG8sIHRoaXMgaXMgYSBzdHJpbmcgdG8gZW5jb2RlIQ==", obj.Get("bufferBase64").String())
require.Equal(t, "Hello, this is a string to encode!", obj.Get("bufferDecoded").String())
require.Equal(t, "Hello, World!", obj.Get("base64RoundTrip").String())
require.Equal(t, "Hello, World!", obj.Get("latin1RoundTrip").String())
require.Equal(t, "48656c6c6f2c20576f726c6421", obj.Get("hexRoundTrip").String())
require.Equal(t, "𔭢", obj.Get("utf8RoundTrip").String())
require.Equal(t, "Hello, World!", obj.Get("utf16RoundTrip").String())
require.Equal(t, "Hello, World!", obj.Get("utf16LERoundTrip").String())
}
func TestGojaCryptoAES(t *testing.T) {
t.Run("random iv round trip", func(t *testing.T) {
vm := newCryptoTestVM(t)
val, err := vm.RunString(`
(() => {
const message = "seanime";
const key = CryptoJS.enc.Utf8.parse("secret key");
const encrypted = CryptoJS.AES.encrypt(message, key);
return {
ciphertext: encrypted.toString(),
decrypted: CryptoJS.AES.decrypt(encrypted, key).toString(CryptoJS.enc.Utf8),
};
})()
`)
require.NoError(t, err)
obj := val.ToObject(vm)
ciphertext := obj.Get("ciphertext").String()
decoded, err := base64.StdEncoding.DecodeString(ciphertext)
require.NoError(t, err)
require.Len(t, decoded, 32)
require.Equal(t, "seanime", obj.Get("decrypted").String())
})
t.Run("fixed iv ciphertext is deterministic", func(t *testing.T) {
vm := newCryptoTestVM(t)
message := "seanime"
key := []byte("secret key")
iv := []byte("3134003223491201")
val, err := vm.RunString(`
(() => {
const message = "seanime";
const key = CryptoJS.enc.Utf8.parse("secret key");
const iv = CryptoJS.enc.Utf8.parse("3134003223491201");
const encrypted = CryptoJS.AES.encrypt(message, key, { iv });
return {
ciphertext: encrypted.toString(),
ciphertextBase64: encrypted.toString(CryptoJS.enc.Base64),
decryptedWithIV: CryptoJS.AES.decrypt(encrypted, key, { iv }).toString(CryptoJS.enc.Utf8),
decryptedWithoutIV: CryptoJS.AES.decrypt(encrypted, key).toString(CryptoJS.enc.Utf8),
};
})()
`)
require.NoError(t, err)
obj := val.ToObject(vm)
expectedCiphertext := expectedAESCiphertext(message, key, iv)
require.Equal(t, expectedCiphertext, obj.Get("ciphertext").String())
require.Equal(t, expectedCiphertext, obj.Get("ciphertextBase64").String())
require.Equal(t, message, obj.Get("decryptedWithIV").String())
require.Empty(t, obj.Get("decryptedWithoutIV").String())
})
t.Run("invalid iv length returns an error string", func(t *testing.T) {
vm := newCryptoTestVM(t)
val, err := vm.RunString(`
(() => {
try {
CryptoJS.AES.encrypt("seanime", CryptoJS.enc.Utf8.parse("secret key"), {
iv: CryptoJS.enc.Utf8.parse("short"),
});
return "unexpected success";
} catch (e) {
return String(e);
}
})()
`)
require.NoError(t, err)
require.Contains(t, val.String(), "IV length must be equal to block size")
})
}
func TestGojaCryptoOpenSSLDecrypt(t *testing.T) {
vm := newCryptoTestVM(t)
val, err := vm.RunString(`
(() => {
const payload = "U2FsdGVkX19ZanX9W5jQGgNGOIOBGxhY6gxa1EHnRi3yHL8Ml4cMmQeryf9p04N12VuOjiBas21AcU0Ypc4dB4AWOdc9Cn1wdA2DuQhryUonKYHwV/XXJ53DBn1OIqAvrIAxrN8S2j9Rk5z/F/peu1Kk/d3m82jiKvhTWQcxDeDW8UzCMZbbFnm4qJC3k19+PD5Pal5sBcVTGRXNCpvSSpYb56FcP9Xs+3DyBWhNUqJuO+Wwm3G1J5HhklxCWZ7tcn7TE5Y8d5ORND7t51Padrw4LgEOootqHtfHuBVX6EqlvJslXt0kFgcXJUIO+hw0q5SJ+tiS7o/2OShJ7BCk4XzfQmhFJdBJYGjQ8WPMHYzLuMzDkf6zk2+m7YQtUTXx8SVoLXFOt8gNZeD942snGrWA5+CdYveOfJ8Yv7owoOueMzzYqr5rzG7GVapVI0HzrA24LR4AjRDICqTsJEy6Yg==";
const key = "6315b93606d60f48c964b67b14701f3848ef25af01296cf7e6a98c9460e1d2ac";
return CryptoJS.AES.decrypt(payload, key).toString(CryptoJS.enc.Utf8);
})()
`)
require.NoError(t, err)
require.Equal(t, `[{"file":"https://cloudburst82.xyz/_v7/b39c8e03ac287e819418f1ad0644d7c0f506c2def541ec36e8253cd39f36c15ab46274b0ce5189dc51b2b970efa7b3abd9c70f52b02839d47a75863596d321a0b9c8b0370f96fa253d059244713458951d6c965d17a36ce87d4e2844d4665b7b658acd2318d5f8730643d893d2e1577307c767157b45abf64588a76b0cd8c1d2/master.m3u8","type":"hls"}]`, val.String())
}
func TestGojaCryptoErrorPaths(t *testing.T) {
vm := newCryptoTestVM(t)
tests := []struct {
name string
script string
want string
}{
{
name: "encrypt requires two arguments",
script: `
(() => {
try {
CryptoJS.AES.encrypt("only-message");
return "unexpected success";
} catch (e) {
return String(e);
}
})()
`,
want: "AES.encrypt requires at least 2 arguments",
},
{
name: "decrypt requires two arguments",
script: `
(() => {
try {
CryptoJS.AES.decrypt("ciphertext-only");
return "unexpected success";
} catch (e) {
return String(e);
}
})()
`,
want: "AES.decrypt requires at least 2 arguments",
},
{
name: "word array rejects invalid encoder",
script: `
(() => {
try {
CryptoJS.AES.encrypt("seanime", CryptoJS.enc.Utf8.parse("secret key")).toString("bad");
return "unexpected success";
} catch (e) {
return String(e);
}
})()
`,
want: "encoder parameter must be a CryptoJS.enc object",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
val, err := vm.RunString(tt.script)
require.NoError(t, err)
require.Contains(t, val.String(), tt.want)
})
}
}
func TestGojaCryptoHelperCoverage(t *testing.T) {
vm := goja.New()
defer vm.ClearInterrupt()
t.Run("adjust key length preserves valid sizes and hashes invalid sizes", func(t *testing.T) {
key16 := []byte("1234567890abcdef")
key24 := []byte("1234567890abcdefghijklmn")
key32 := []byte("1234567890abcdefghijklmnopqrstuv")
shortKey := []byte("short")
require.Equal(t, key16, adjustKeyLength(key16))
require.Equal(t, key24, adjustKeyLength(key24))
require.Equal(t, key32, adjustKeyLength(key32))
require.Len(t, adjustKeyLength(shortKey), 32)
require.NotEqual(t, shortKey, adjustKeyLength(shortKey))
})
t.Run("low-level parser fallbacks", func(t *testing.T) {
require.Nil(t, base64Parse("%%%"))
require.Nil(t, hexParse("xyz"))
require.Empty(t, utf16Stringify([]byte{0x00}))
require.Empty(t, utf16LEStringify([]byte{0x00}))
})
t.Run("encoder wrappers handle undefined and wrong types", func(t *testing.T) {
parseFns := []func(goja.FunctionCall) goja.Value{
cryptoEncUtf8ParseFunc(vm),
cryptoEncBase64ParseFunc(vm),
cryptoEncHexParseFunc(vm),
cryptoEncLatin1ParseFunc(vm),
cryptoEncUtf16ParseFunc(vm),
cryptoEncUtf16LEParseFunc(vm),
}
for _, parseFn := range parseFns {
ret := parseFn(goja.FunctionCall{Arguments: []goja.Value{goja.Undefined()}})
require.Equal(t, "", ret.String())
}
stringifyFns := []func(goja.FunctionCall) goja.Value{
cryptoEncUtf8StringifyFunc(vm),
cryptoEncBase64StringifyFunc(vm),
cryptoEncHexStringifyFunc(vm),
cryptoEncLatin1StringifyFunc(vm),
cryptoEncUtf16StringifyFunc(vm),
cryptoEncUtf16LEStringifyFunc(vm),
}
for _, stringifyFn := range stringifyFns {
ret := stringifyFn(goja.FunctionCall{Arguments: []goja.Value{vm.ToValue("not-bytes")}})
require.Equal(t, "", ret.String())
}
})
}
func newCryptoTestVM(t *testing.T) *goja.Runtime {
t.Helper()
vm := goja.New()
t.Cleanup(vm.ClearInterrupt)
registry := new(gojarequire.Registry)
registry.Enable(vm)
gojabuffer.Enable(vm)
BindCrypto(vm)
BindConsole(vm, util.NewLogger())
require.NoError(t, BindCrypto(vm))
_, err := vm.RunString(`
async function run() {
try {
console.log("\nTesting Buffer encoding/decoding")
const originalString = "Hello, this is a string to encode!"
const base64String = Buffer.from(originalString).toString("base64")
console.log("Original String:", originalString)
console.log("Base64 Encoded:", base64String)
const decodedString = Buffer.from(base64String, "base64").toString("utf-8")
console.log("Base64 Decoded:", decodedString)
}
catch (e) {
console.error(e)
}
try {
console.log("\nTesting AES")
let message = "seanime"
let key = CryptoJS.enc.Utf8.parse("secret key")
console.log("Message:", message)
let encrypted = CryptoJS.AES.encrypt(message, key)
console.log("Encrypted without IV:", encrypted) // map[iv toString]
console.log("Encrypted.toString():", encrypted.toString()) // AoHrnhJfbRht2idLHM82WdkIEpRbXufnA6+ozty9fbk=
console.log("Encrypted.toString(CryptoJS.enc.Base64):", encrypted.toString(CryptoJS.enc.Base64)) // AoHrnhJfbRht2idLHM82WdkIEpRbXufnA6+ozty9fbk=
let decrypted = CryptoJS.AES.decrypt(encrypted, key)
console.log("Decrypted:", decrypted.toString(CryptoJS.enc.Utf8))
let iv = CryptoJS.enc.Utf8.parse("3134003223491201")
encrypted = CryptoJS.AES.encrypt(message, key, { iv: iv })
console.log("Encrypted with IV:", encrypted) // map[iv toString]
decrypted = CryptoJS.AES.decrypt(encrypted, key)
console.log("Decrypted without IV:", decrypted.toString(CryptoJS.enc.Utf8))
decrypted = CryptoJS.AES.decrypt(encrypted, key, { iv: iv })
console.log("Decrypted with IV:", decrypted.toString(CryptoJS.enc.Utf8)) // seanime
}
catch (e) {
console.error(e)
}
try {
console.log("\nTesting encoders")
console.log("")
let a = CryptoJS.enc.Utf8.parse("Hello, World!")
console.log("Base64 Parsed:", a)
let b = CryptoJS.enc.Base64.stringify(a)
console.log("Base64 Stringified:", b)
let c = CryptoJS.enc.Base64.parse(b)
console.log("Base64 Parsed:", c)
let d = CryptoJS.enc.Utf8.stringify(c)
console.log("Base64 Stringified:", d)
console.log("")
let words = CryptoJS.enc.Latin1.parse("Hello, World!")
console.log("Latin1 Parsed:", words)
let latin1 = CryptoJS.enc.Latin1.stringify(words)
console.log("Latin1 Stringified", latin1)
words = CryptoJS.enc.Hex.parse("48656c6c6f2c20576f726c6421")
console.log("Hex Parsed:", words)
let hex = CryptoJS.enc.Hex.stringify(words)
console.log("Hex Stringified", hex)
words = CryptoJS.enc.Utf8.parse("𔭢")
console.log("Utf8 Parsed:", words)
let utf8 = CryptoJS.enc.Utf8.stringify(words)
console.log("Utf8 Stringified", utf8)
words = CryptoJS.enc.Utf16.parse("Hello, World!")
console.log("Utf16 Parsed:", words)
let utf16 = CryptoJS.enc.Utf16.stringify(words)
console.log("Utf16 Stringified", utf16)
words = CryptoJS.enc.Utf16LE.parse("Hello, World!")
console.log("Utf16LE Parsed:", words)
utf16 = CryptoJS.enc.Utf16LE.stringify(words)
console.log("Utf16LE Stringified", utf16)
}
catch (e) {
console.error("Error:", e)
}
}
`)
require.NoError(t, err)
runFunc, ok := goja.AssertFunction(vm.Get("run"))
require.True(t, ok)
ret, err := runFunc(goja.Undefined())
require.NoError(t, err)
promise := ret.Export().(*goja.Promise)
for promise.State() == goja.PromiseStatePending {
time.Sleep(10 * time.Millisecond)
}
if promise.State() == goja.PromiseStateRejected {
err := promise.Result()
t.Fatal(err)
}
return vm
}
func TestGojaCryptoOpenSSL(t *testing.T) {
vm := goja.New()
defer vm.ClearInterrupt()
func expectedAESCiphertext(message string, key []byte, iv []byte) string {
hash := sha256.Sum256(key)
padded := pkcs7(message, aes.BlockSize)
ciphertext := make([]byte, len(padded))
registry := new(gojarequire.Registry)
registry.Enable(vm)
gojabuffer.Enable(vm)
BindCrypto(vm)
BindConsole(vm, util.NewLogger())
_, err := vm.RunString(`
async function run() {
try {
console.log("\nTesting Buffer encoding/decoding")
const payload = "U2FsdGVkX19ZanX9W5jQGgNGOIOBGxhY6gxa1EHnRi3yHL8Ml4cMmQeryf9p04N12VuOjiBas21AcU0Ypc4dB4AWOdc9Cn1wdA2DuQhryUonKYHwV/XXJ53DBn1OIqAvrIAxrN8S2j9Rk5z/F/peu1Kk/d3m82jiKvhTWQcxDeDW8UzCMZbbFnm4qJC3k19+PD5Pal5sBcVTGRXNCpvSSpYb56FcP9Xs+3DyBWhNUqJuO+Wwm3G1J5HhklxCWZ7tcn7TE5Y8d5ORND7t51Padrw4LgEOootqHtfHuBVX6EqlvJslXt0kFgcXJUIO+hw0q5SJ+tiS7o/2OShJ7BCk4XzfQmhFJdBJYGjQ8WPMHYzLuMzDkf6zk2+m7YQtUTXx8SVoLXFOt8gNZeD942snGrWA5+CdYveOfJ8Yv7owoOueMzzYqr5rzG7GVapVI0HzrA24LR4AjRDICqTsJEy6Yg=="
const key = "6315b93606d60f48c964b67b14701f3848ef25af01296cf7e6a98c9460e1d2ac"
console.log("Original String:", payload)
const decrypted = CryptoJS.AES.decrypt(payload, key)
console.log("Decrypted:", decrypted.toString(CryptoJS.enc.Utf8))
}
catch (e) {
console.error(e)
}
}
`)
require.NoError(t, err)
runFunc, ok := goja.AssertFunction(vm.Get("run"))
require.True(t, ok)
ret, err := runFunc(goja.Undefined())
require.NoError(t, err)
promise := ret.Export().(*goja.Promise)
for promise.State() == goja.PromiseStatePending {
time.Sleep(10 * time.Millisecond)
block, err := aes.NewCipher(hash[:])
if err != nil {
panic(err)
}
if promise.State() == goja.PromiseStateRejected {
err := promise.Result()
t.Fatal(err)
}
cipher.NewCBCEncrypter(block, iv).CryptBlocks(ciphertext, padded)
return base64.StdEncoding.EncodeToString(ciphertext)
}
func pkcs7(message string, blockSize int) []byte {
data := []byte(message)
padding := blockSize - len(data)%blockSize
for i := 0; i < padding; i++ {
data = append(data, byte(padding))
}
return data
}

View File

@@ -45,6 +45,7 @@ type Fetch struct {
allowedDomains []string // empty = allow all domains
rules []accessRule
anilistToken string
extensionId string
}
func (f *Fetch) SetAnilistToken(token string) {
@@ -80,12 +81,13 @@ var whitelistedDomains = []string{
"*.googleapis.com",
}
func NewFetch(vm *goja.Runtime, allowedDomains []string) *Fetch {
func NewFetch(extensionId string, vm *goja.Runtime, allowedDomains []string) *Fetch {
f := &Fetch{
vm: vm,
fetchSem: make(chan struct{}, maxConcurrentRequests),
vmResponseCh: make(chan func(), maxConcurrentRequests),
allowedDomains: allowedDomains,
extensionId: extensionId,
}
f.allowedDomains = lo.Uniq(append(f.allowedDomains, whitelistedDomains...))
f.compileRules()
@@ -172,7 +174,7 @@ type fetchResult struct {
}
// BindFetch binds the fetch function to the VM
func BindFetch(vm *goja.Runtime, allowedDomains ...[]string) *Fetch {
func BindFetch(extensionId string, vm *goja.Runtime, allowedDomains ...[]string) *Fetch {
ad := []string{"*"}
if len(allowedDomains) > 0 {
@@ -180,7 +182,7 @@ func BindFetch(vm *goja.Runtime, allowedDomains ...[]string) *Fetch {
}
// Create a new Fetch instance
f := NewFetch(vm, ad)
f := NewFetch(extensionId, vm, ad)
_ = vm.Set("fetch", f.Fetch)
go func() {
@@ -273,7 +275,7 @@ func (f *Fetch) isURLAllowed(urlStr string) bool {
func (f *Fetch) Fetch(call goja.FunctionCall) goja.Value {
defer func() {
if r := recover(); r != nil {
log.Warn().Msgf("extension: fetch panic: %v", r)
log.Warn().Str("id", f.extensionId).Msgf("extension: fetch panic: %v", r)
}
}()
@@ -349,26 +351,28 @@ func (f *Fetch) Fetch(call goja.FunctionCall) goja.Value {
}
if options.Body != nil && !goja.IsUndefined(options.Body) {
switch v := options.Body.Export().(type) {
case string:
reqBody = v
case io.Reader:
reqBody = v
case []byte:
reqBody = v
case *goja.ArrayBuffer:
reqBody = v.Bytes()
case goja.ArrayBuffer:
reqBody = v.Bytes()
case *formData:
body, mp := v.GetBuffer()
if fd, ok := getFormDataFromValue(f.vm, options.Body); ok {
body, mp := fd.GetBuffer()
reqBody = body
reqContentType = mp.FormDataContentType()
case map[string]interface{}:
reqBody = v
reqContentType = "application/json"
default:
reqBody = options.Body.String()
} else {
switch v := options.Body.Export().(type) {
case string:
reqBody = v
case io.Reader:
reqBody = v
case []byte:
reqBody = v
case *goja.ArrayBuffer:
reqBody = v.Bytes()
case goja.ArrayBuffer:
reqBody = v.Bytes()
case map[string]interface{}:
reqBody = v
reqContentType = "application/json"
default:
reqBody = options.Body.String()
}
}
}
@@ -397,7 +401,7 @@ func (f *Fetch) Fetch(call goja.FunctionCall) goja.Value {
}
}
log.Trace().Str("url", url).Str("method", options.Method).Msgf("extension: Network request")
log.Trace().Str("id", f.extensionId).Str("url", url).Str("method", options.Method).Msgf("extension: Network request")
var client *req.Client
if options.NoCloudFlareBypass {

View File

@@ -14,14 +14,16 @@ import (
// formData
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
const formDataInternalProperty = "__seanimeFormData"
func BindFormData(vm *goja.Runtime) error {
err := vm.Set("FormData", func(call goja.ConstructorCall) *goja.Object {
fd := newFormData(vm)
instance := call.This
_ = instance.DefineDataProperty(formDataInternalProperty, vm.ToValue(fd), goja.FLAG_FALSE, goja.FLAG_FALSE, goja.FLAG_FALSE)
setFormDataObjectProperties(instance, fd)
instanceValue := vm.ToValue(fd).(*goja.Object)
instanceValue.SetPrototype(call.This.Prototype())
return instanceValue
return instance
})
if err != nil {
return err
@@ -29,6 +31,43 @@ func BindFormData(vm *goja.Runtime) error {
return nil
}
func getFormDataFromValue(vm *goja.Runtime, value goja.Value) (*formData, bool) {
if value == nil || goja.IsUndefined(value) || goja.IsNull(value) {
return nil, false
}
if fd, ok := value.Export().(*formData); ok {
return fd, true
}
obj := value.ToObject(vm)
if obj == nil {
return nil, false
}
internal := obj.Get(formDataInternalProperty)
if internal == nil || goja.IsUndefined(internal) || goja.IsNull(internal) {
return nil, false
}
fd, ok := internal.Export().(*formData)
return fd, ok
}
func setFormDataObjectProperties(obj *goja.Object, fd *formData) {
_ = obj.Set("append", fd.Append)
_ = obj.Set("delete", fd.Delete)
_ = obj.Set("entries", fd.Entries)
_ = obj.Set("get", fd.Get)
_ = obj.Set("getAll", fd.GetAll)
_ = obj.Set("has", fd.Has)
_ = obj.Set("keys", fd.Keys)
_ = obj.Set("set", fd.Set)
_ = obj.Set("values", fd.Values)
_ = obj.Set("getContentType", fd.GetContentType)
_ = obj.Set("getBuffer", fd.GetBuffer)
}
type formData struct {
runtime *goja.Runtime
buf *bytes.Buffer

View File

@@ -20,7 +20,7 @@ func TestGojaTorrentUtils(t *testing.T) {
gojabuffer.Enable(vm)
BindTorrentUtils(vm)
BindConsole(vm, util.NewLogger())
BindFetch(vm)
BindFetch("test", vm)
_, err := vm.RunString(`
async function run() {

View File

@@ -81,13 +81,7 @@ func (h *Handler) HandleDebridAddTorrents(c echo.Context) error {
}
for _, torrent := range b.Torrents {
// Get the torrent's provider extension
animeTorrentProviderExtension, ok := h.App.TorrentRepository.GetAnimeProviderExtension(torrent.Provider)
if !ok {
return h.RespondWithError(c, errors.New("provider extension not found for torrent"))
}
magnet, err := animeTorrentProviderExtension.GetProvider().GetTorrentMagnetLink(&torrent)
magnet, err := h.App.TorrentRepository.ResolveMagnetLink(&torrent)
if err != nil {
if len(b.Torrents) == 1 {
return h.RespondWithError(c, err)
@@ -103,6 +97,7 @@ func (h *Handler) HandleDebridAddTorrents(c echo.Context) error {
// Add the torrent to the debrid service
_, err = h.App.DebridClientRepository.AddAndQueueTorrent(debrid.AddTorrentOptions{
MagnetLink: magnet,
InfoHash: torrent.InfoHash,
SelectFileId: "all",
}, b.Destination, b.Media.ID)
if err != nil {
@@ -250,12 +245,7 @@ func (h *Handler) HandleDebridGetTorrentInfo(c echo.Context) error {
return h.RespondWithError(c, err)
}
animeTorrentProviderExtension, ok := h.App.TorrentRepository.GetAnimeProviderExtension(b.Torrent.Provider)
if !ok {
return h.RespondWithError(c, errors.New("provider extension not found for torrent"))
}
magnet, err := animeTorrentProviderExtension.GetProvider().GetTorrentMagnetLink(&b.Torrent)
magnet, err := h.App.TorrentRepository.ResolveMagnetLink(&b.Torrent)
if err != nil {
return h.RespondWithError(c, err)
}
@@ -290,12 +280,7 @@ func (h *Handler) HandleDebridGetTorrentFilePreviews(c echo.Context) error {
return h.RespondWithError(c, err)
}
animeTorrentProviderExtension, ok := h.App.TorrentRepository.GetAnimeProviderExtension(b.Torrent.Provider)
if !ok {
return h.RespondWithError(c, errors.New("provider extension not found for torrent"))
}
magnet, err := animeTorrentProviderExtension.GetProvider().GetTorrentMagnetLink(b.Torrent)
magnet, err := h.App.TorrentRepository.ResolveMagnetLink(b.Torrent)
if err != nil {
return h.RespondWithError(c, err)
}
@@ -351,12 +336,7 @@ func (h *Handler) HandleDebridStartStream(c echo.Context) error {
userAgent := c.Request().Header.Get("User-Agent")
if b.Torrent != nil {
animeTorrentProviderExtension, ok := h.App.TorrentRepository.GetAnimeProviderExtension(b.Torrent.Provider)
if !ok {
return h.RespondWithError(c, errors.New("provider extension not found for torrent"))
}
magnet, err := animeTorrentProviderExtension.GetProvider().GetTorrentMagnetLink(b.Torrent)
magnet, err := h.App.TorrentRepository.ResolveMagnetLink(b.Torrent)
if err != nil {
return h.RespondWithError(c, err)
}

View File

@@ -18,8 +18,7 @@ func (h *Handler) OptionalAuthMiddleware(next echo.HandlerFunc) echo.HandlerFunc
// Allow the following paths to be accessed by anyone
if path == "/api/v1/status" || // public but restricted
path == "/events" || // for server events (auth handled by websocket handler)
strings.HasPrefix(path, "/api/v1/mediastream/transcode/") { // HLS segments (TODO: secure later)
path == "/events" { // for server events (auth handled by websocket handler)
if path == "/api/v1/status" {
// allow status requests by all clients but mark as unauthenticated

View File

@@ -69,6 +69,7 @@ func (h *Handler) FeaturesMiddleware(next echo.HandlerFunc) echo.HandlerFunc {
{"/api/v1/torrent-client", h.App.FeatureManager.IsDisabled(core.ManageAutoDownloader), UpdateMethods, Empty},
{"/api/v1/download-torrent-file", h.App.FeatureManager.IsDisabled(core.ManageAutoDownloader), UpdateMethods, Empty},
{"/api/v1/auto-downloader", h.App.FeatureManager.IsDisabled(core.ManageAutoDownloader), UpdateMethods, Empty},
{"/api/v1/auto-select/profile", h.App.FeatureManager.IsDisabled(core.ManageAutoDownloader), UpdateMethods, Empty},
// onlinestream
{"/api/v1/onlinestream", h.App.FeatureManager.IsDisabled(core.OnlineStreaming), UpdateMethods, []string{"/api/v1/onlinestream/search", "/api/v1/onlinestream/manual-mapping", "/api/v1/onlinestream/get-mapping", "/api/v1/onlinestream/remove-mapping"}},
{"/api/v1/onlinestream/search", h.App.FeatureManager.IsDisabled(core.ManageMangaSource), UpdateMethods, Empty},
@@ -76,7 +77,7 @@ func (h *Handler) FeaturesMiddleware(next echo.HandlerFunc) echo.HandlerFunc {
{"/api/v1/onlinestream/get-mapping", h.App.FeatureManager.IsDisabled(core.ManageMangaSource), UpdateMethods, Empty},
{"/api/v1/onlinestream/remove-mapping", h.App.FeatureManager.IsDisabled(core.ManageMangaSource), UpdateMethods, Empty},
// custom source
//{"/api/v1/custom-source", h.App.FeatureManager.IsDisabled(core.ManageMangaSource), UpdateMethods, Empty},
{"/api/v1/custom-source", h.App.FeatureManager.IsDisabled(core.ManageExtensions), UpdateMethods, Empty},
// nakama
{"/api/v1/nakama", h.App.FeatureManager.IsDisabled(core.ManageNakama), UpdateMethods, Empty},
// open in explorer
@@ -90,6 +91,8 @@ func (h *Handler) FeaturesMiddleware(next echo.HandlerFunc) echo.HandlerFunc {
// extensions
{"/api/v1/extensions", h.App.FeatureManager.IsDisabled(core.ManageExtensions), UpdateMethods, []string{"/api/v1/extensions/all"}},
{"/api/v1/extensions/updates", h.App.FeatureManager.IsDisabled(core.ManageExtensions), Empty, Empty},
{"/api/v1/extensions/plugin-settings", h.App.FeatureManager.IsDisabled(core.PluginTray), UpdateMethods, Empty},
{"/api/v1/extensions/plugin-permissions", h.App.FeatureManager.IsDisabled(core.PluginTray), UpdateMethods, Empty},
// proxy
{"/api/v1/proxy", h.App.FeatureManager.IsDisabled(core.Proxy), Empty, Empty},
{"/api/v1/image-proxy", h.App.FeatureManager.IsDisabled(core.Proxy), Empty, Empty},
@@ -104,6 +107,8 @@ func (h *Handler) FeaturesMiddleware(next echo.HandlerFunc) echo.HandlerFunc {
{"/api/v1/directstream", h.App.FeatureManager.IsDisabled(core.WatchingLocalAnime), UpdateMethods, Empty},
{"/api/v1/mediastream/file", h.App.FeatureManager.IsDisabled(core.WatchingLocalAnime), Empty, Empty},
{"/api/v1/mediastream", h.App.FeatureManager.IsDisabled(core.WatchingLocalAnime), Empty, Empty},
// continuity
{"/api/v1/continuity", h.App.FeatureManager.IsDisabled(core.WatchingLocalAnime), UpdateMethods, Empty},
// manga
{"/api/v1/manga", h.App.FeatureManager.IsDisabled(core.ManageMangaSource), UpdateMethods, []string{"/api/v1/manga/pages", "/api/v1/manga/chapters"}},
{"/api/v1/manga", h.App.FeatureManager.IsDisabled(core.Reading), UpdateMethods, Empty},
@@ -111,6 +116,7 @@ func (h *Handler) FeaturesMiddleware(next echo.HandlerFunc) echo.HandlerFunc {
{"/api/v1/manga/download", h.App.FeatureManager.IsDisabled(core.ManageMangaDownloads), UpdateMethods, Empty},
// local anime library
{"/api/v1/metadata-provider", h.App.FeatureManager.IsDisabled(core.ManageLocalAnimeLibrary), UpdateMethods, Empty},
{"/api/v1/metadata/parent", h.App.FeatureManager.IsDisabled(core.ManageLocalAnimeLibrary), UpdateMethods, Empty},
{"/api/v1/library", h.App.FeatureManager.IsDisabled(core.ManageLocalAnimeLibrary), UpdateMethods, []string{"/api/v1/library/anime-entry/update-progress", "/api/v1/library/anime-entry/update-repeat"}},
{"/api/v1/library/explorer", h.App.FeatureManager.IsDisabled(core.ManageLocalAnimeLibrary), UpdateMethods, Empty},
}

View File

@@ -2,14 +2,19 @@ package handlers
import (
"errors"
"fmt"
"os"
"path/filepath"
"seanime/internal/api/anilist"
"seanime/internal/database/db_bridge"
"seanime/internal/database/models"
hibiketorrent "seanime/internal/extension/hibike/torrent"
"seanime/internal/library/autodownloader"
"seanime/internal/torrent_clients/torrent_client"
torrentrepo "seanime/internal/torrents/torrent"
"seanime/internal/util"
"github.com/goccy/go-json"
"github.com/labstack/echo/v4"
)
@@ -127,13 +132,8 @@ func (h *Handler) HandleTorrentClientGetFiles(c echo.Context) error {
}
defer os.RemoveAll(tempDir)
// Get the torrent's provider extension
providerExtension, ok := h.App.TorrentRepository.GetAnimeProviderExtension(b.Provider)
if !ok {
return h.RespondWithError(c, errors.New("provider extension not found for torrent"))
}
// Get the magnet
magnet, err := providerExtension.GetProvider().GetTorrentMagnetLink(b.Torrent)
magnet, err := h.App.TorrentRepository.ResolveMagnetLink(b.Torrent)
if err != nil {
return h.RespondWithError(c, err)
}
@@ -256,13 +256,8 @@ func (h *Handler) HandleTorrentClientDownload(c echo.Context) error {
// Get magnets
magnets := make([]string, 0)
for _, t := range b.Torrents {
// Get the torrent's provider extension
providerExtension, ok := h.App.TorrentRepository.GetAnimeProviderExtension(t.Provider)
if !ok {
return h.RespondWithError(c, errors.New("provider extension not found for torrent"))
}
// Get the torrent magnet link
magnet, err := providerExtension.GetProvider().GetTorrentMagnetLink(&t)
magnet, err := h.App.TorrentRepository.ResolveMagnetLink(&t)
if err != nil {
return h.RespondWithError(c, err)
}
@@ -324,10 +319,30 @@ func (h *Handler) HandleTorrentClientAddMagnetFromRule(c echo.Context) error {
return h.RespondWithError(c, err)
}
if b.MagnetUrl == "" || b.RuleId == 0 {
if b.RuleId == 0 || (b.MagnetUrl == "" && b.QueuedItemId == 0) {
return h.RespondWithError(c, errors.New("missing parameters"))
}
magnetURL := b.MagnetUrl
if magnetURL == "" {
item, err := h.App.Database.GetAutoDownloaderItem(b.QueuedItemId)
if err != nil {
return h.RespondWithError(c, err)
}
magnetURL, err = resolveAutoDownloaderItemMagnet(item, h.App.TorrentRepository)
if err != nil {
return h.RespondWithError(c, err)
}
if item.Magnet != magnetURL {
item.Magnet = magnetURL
if err := h.App.Database.UpdateAutoDownloaderItem(item.ID, item); err != nil {
h.App.Logger.Warn().Err(err).Uint("queuedItemId", item.ID).Msg("torrent client: Failed to cache resolved queued magnet")
}
}
}
// Get rule from database
rule, err := db_bridge.GetAutoDownloaderRule(h.App.Database, b.RuleId)
if err != nil {
@@ -341,7 +356,7 @@ func (h *Handler) HandleTorrentClientAddMagnetFromRule(c echo.Context) error {
}
// try to add torrents to client, on error return error
err = h.App.TorrentClientRepository.AddMagnets([]string{b.MagnetUrl}, rule.Destination)
err = h.App.TorrentClientRepository.AddMagnets([]string{magnetURL}, rule.Destination)
if err != nil {
return h.RespondWithError(c, err)
}
@@ -354,3 +369,49 @@ func (h *Handler) HandleTorrentClientAddMagnetFromRule(c echo.Context) error {
return h.RespondWithData(c, true)
}
func resolveAutoDownloaderItemMagnet(item *models.AutoDownloaderItem, torrentRepository *torrentrepo.Repository) (string, error) {
if item == nil {
return "", errors.New("queued item not found")
}
if item.Magnet != "" {
return item.Magnet, nil
}
fallbackHash := item.Hash
var resolveErr error
if len(item.TorrentData) > 0 {
var storedTorrent autodownloader.NormalizedTorrent
if err := json.Unmarshal(item.TorrentData, &storedTorrent); err != nil {
resolveErr = err
} else if storedTorrent.AnimeTorrent != nil {
if fallbackHash == "" {
fallbackHash = storedTorrent.AnimeTorrent.InfoHash
}
if storedTorrent.AnimeTorrent.Provider == "" && storedTorrent.ExtensionID != "" {
storedTorrent.AnimeTorrent.Provider = storedTorrent.ExtensionID
}
if torrentRepository != nil {
magnet, err := torrentRepository.ResolveMagnetLink(storedTorrent.AnimeTorrent)
if err == nil && magnet != "" {
return magnet, nil
}
resolveErr = err
}
}
}
if fallbackHash != "" {
return fmt.Sprintf("magnet:?xt=urn:btih:%s", fallbackHash), nil
}
if resolveErr != nil {
return "", resolveErr
}
return "", errors.New("magnet link not found")
}

View File

@@ -0,0 +1,129 @@
package handlers
import (
"errors"
"testing"
"github.com/goccy/go-json"
"github.com/rs/zerolog"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"seanime/internal/api/metadata_provider"
"seanime/internal/database/models"
"seanime/internal/extension"
hibiketorrent "seanime/internal/extension/hibike/torrent"
"seanime/internal/library/autodownloader"
torrentrepo "seanime/internal/torrents/torrent"
"seanime/internal/util"
)
type ruleMagnetTestProvider struct {
magnet string
err error
calls int
}
func (p *ruleMagnetTestProvider) Search(hibiketorrent.AnimeSearchOptions) ([]*hibiketorrent.AnimeTorrent, error) {
return nil, nil
}
func (p *ruleMagnetTestProvider) SmartSearch(hibiketorrent.AnimeSmartSearchOptions) ([]*hibiketorrent.AnimeTorrent, error) {
return nil, nil
}
func (p *ruleMagnetTestProvider) GetTorrentInfoHash(torrent *hibiketorrent.AnimeTorrent) (string, error) {
if torrent == nil {
return "", nil
}
return torrent.InfoHash, nil
}
func (p *ruleMagnetTestProvider) GetTorrentMagnetLink(*hibiketorrent.AnimeTorrent) (string, error) {
p.calls++
if p.err != nil {
return "", p.err
}
return p.magnet, nil
}
func (p *ruleMagnetTestProvider) GetLatest() ([]*hibiketorrent.AnimeTorrent, error) {
return nil, nil
}
func (p *ruleMagnetTestProvider) GetSettings() hibiketorrent.AnimeProviderSettings {
return hibiketorrent.AnimeProviderSettings{Type: hibiketorrent.AnimeProviderTypeMain}
}
func TestResolveAutoDownloaderItemMagnetUsesStoredTorrentExtension(t *testing.T) {
provider := &ruleMagnetTestProvider{magnet: "magnet:?xt=urn:btih:resolved-from-provider"}
repo := newTorrentRepositoryForRuleMagnetTests(map[string]*ruleMagnetTestProvider{"fake": provider})
torrentData, err := json.Marshal(&autodownloader.NormalizedTorrent{
AnimeTorrent: &hibiketorrent.AnimeTorrent{
Name: "Example torrent",
InfoHash: "hash-from-torrent",
},
ExtensionID: "fake",
})
require.NoError(t, err)
item := &models.AutoDownloaderItem{
Hash: "hash-from-item",
TorrentData: torrentData,
}
magnet, err := resolveAutoDownloaderItemMagnet(item, repo)
require.NoError(t, err)
assert.Equal(t, "magnet:?xt=urn:btih:resolved-from-provider", magnet)
assert.Equal(t, 1, provider.calls)
}
func TestResolveAutoDownloaderItemMagnetFallsBackToHash(t *testing.T) {
provider := &ruleMagnetTestProvider{err: errors.New("provider failed")}
repo := newTorrentRepositoryForRuleMagnetTests(map[string]*ruleMagnetTestProvider{"fake": provider})
torrentData, err := json.Marshal(&autodownloader.NormalizedTorrent{
AnimeTorrent: &hibiketorrent.AnimeTorrent{
Name: "Example torrent",
InfoHash: "hash-from-torrent",
},
ExtensionID: "fake",
})
require.NoError(t, err)
item := &models.AutoDownloaderItem{
Hash: "hash-from-item",
TorrentData: torrentData,
}
magnet, err := resolveAutoDownloaderItemMagnet(item, repo)
require.NoError(t, err)
assert.Equal(t, "magnet:?xt=urn:btih:hash-from-item", magnet)
assert.Equal(t, 1, provider.calls)
}
func newTorrentRepositoryForRuleMagnetTests(providers map[string]*ruleMagnetTestProvider) *torrentrepo.Repository {
logger := zerolog.Nop()
bank := extension.NewUnifiedBank()
for id, provider := range providers {
bank.Set(id, extension.NewAnimeTorrentProviderExtension(&extension.Extension{
ID: id,
Name: id,
Version: "1.0.0",
ManifestURI: "builtin",
Language: extension.LanguageGo,
Type: extension.TypeAnimeTorrentProvider,
}, provider))
}
var metadata metadata_provider.Provider
repo := torrentrepo.NewRepository(&torrentrepo.NewRepositoryOptions{
Logger: &logger,
MetadataProviderRef: util.NewRef[metadata_provider.Provider](metadata),
ExtensionBankRef: util.NewRef(bank),
})
repo.SetSettings(&torrentrepo.RepositorySettings{DefaultAnimeProvider: "fake"})
return repo
}

View File

@@ -88,12 +88,7 @@ func (h *Handler) HandleGetTorrentstreamTorrentFilePreviews(c echo.Context) erro
return h.RespondWithError(c, err)
}
providerExtension, ok := h.App.ExtensionRepository.GetAnimeTorrentProviderExtensionByID(b.Torrent.Provider)
if !ok {
return h.RespondWithError(c, errors.New("torrentstream: Torrent provider extension not found"))
}
magnet, err := providerExtension.GetProvider().GetTorrentMagnetLink(b.Torrent)
magnet, err := h.App.TorrentRepository.ResolveMagnetLink(b.Torrent)
if err != nil {
return h.RespondWithError(c, err)
}

View File

@@ -1,79 +0,0 @@
package hook
//type mockTagsEvent struct {
// Event
// tags []string
//}
//
//func (m mockTagsEvent) Tags() []string {
// return m.tags
//}
//
//func TestTaggedHook(t *testing.T) {
// calls := ""
//
// base := &Hook[*mockTagsEvent]{}
// base.BindFunc(func(e *mockTagsEvent) error { calls += "f0"; return e.Next() })
//
// hA := NewTaggedHook(base)
// hA.BindFunc(func(e *mockTagsEvent) error { calls += "a1"; return e.Next() })
// hA.Bind(&Handler[*mockTagsEvent]{
// Func: func(e *mockTagsEvent) error { calls += "a2"; return e.Next() },
// Priority: -1,
// })
//
// hB := NewTaggedHook(base, "b1", "b2")
// hB.BindFunc(func(e *mockTagsEvent) error { calls += "b1"; return e.Next() })
// hB.Bind(&Handler[*mockTagsEvent]{
// Func: func(e *mockTagsEvent) error { calls += "b2"; return e.Next() },
// Priority: -2,
// })
//
// hC := NewTaggedHook(base, "c1", "c2")
// hC.BindFunc(func(e *mockTagsEvent) error { calls += "c1"; return e.Next() })
// hC.Bind(&Handler[*mockTagsEvent]{
// Func: func(e *mockTagsEvent) error { calls += "c2"; return e.Next() },
// Priority: -3,
// })
//
// scenarios := []struct {
// event *mockTagsEvent
// expectedCalls string
// }{
// {
// &mockTagsEvent{},
// "a2f0a1",
// },
// {
// &mockTagsEvent{tags: []string{"missing"}},
// "a2f0a1",
// },
// {
// &mockTagsEvent{tags: []string{"b2"}},
// "b2a2f0a1b1",
// },
// {
// &mockTagsEvent{tags: []string{"c1"}},
// "c2a2f0a1c1",
// },
// {
// &mockTagsEvent{tags: []string{"b1", "c2"}},
// "c2b2a2f0a1b1c1",
// },
// }
//
// for _, s := range scenarios {
// t.Run(strings.Join(s.event.tags, "_"), func(t *testing.T) {
// calls = "" // reset
//
// err := base.Trigger(s.event)
// if err != nil {
// t.Fatalf("Unexpected trigger error: %v", err)
// }
//
// if calls != s.expectedCalls {
// t.Fatalf("Expected calls sequence %q, got %q", s.expectedCalls, calls)
// }
// })
// }
//}

View File

@@ -2,100 +2,217 @@ package anime_test
import (
"seanime/internal/api/anilist"
"seanime/internal/api/metadata_provider"
"seanime/internal/database/db"
"seanime/internal/extension"
"seanime/internal/library/anime"
"seanime/internal/platforms/anilist_platform"
"seanime/internal/test_utils"
"seanime/internal/util"
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestNewLibraryCollection(t *testing.T) {
test_utils.InitTestProvider(t, test_utils.Anilist())
logger := util.NewLogger()
func TestNewLibraryCollectionContinueWatchingList(t *testing.T) {
h := newAnimeTestHarness(t)
database, err := db.NewDatabase(t.TempDir(), "test", logger)
assert.NoError(t, err)
localFiles := make([]*anime.LocalFile, 0)
localFiles = append(localFiles, anime.NewTestLocalFiles(
anime.TestLocalFileGroup{
LibraryPath: "/Anime",
FilePathTemplate: "/Anime/Sousou no Frieren/[SubsPlease] Sousou no Frieren - %ep.mkv",
MediaID: 154587,
Episodes: []anime.TestLocalFileEpisode{
{Episode: 1, AniDBEpisode: "1", Type: anime.LocalFileTypeMain},
{Episode: 2, AniDBEpisode: "2", Type: anime.LocalFileTypeMain},
{Episode: 3, AniDBEpisode: "3", Type: anime.LocalFileTypeMain},
{Episode: 4, AniDBEpisode: "4", Type: anime.LocalFileTypeMain},
{Episode: 5, AniDBEpisode: "5", Type: anime.LocalFileTypeMain},
{Episode: 6, AniDBEpisode: "6", Type: anime.LocalFileTypeMain},
{Episode: 7, AniDBEpisode: "7", Type: anime.LocalFileTypeMain},
},
},
anime.TestLocalFileGroup{
LibraryPath: "/Anime",
FilePathTemplate: "/Anime/Mushoku Tensei/[SubsPlease] Mushoku Tensei S2 - %ep.mkv",
MediaID: 146065,
Episodes: []anime.TestLocalFileEpisode{
{Episode: 0, AniDBEpisode: "S1", Type: anime.LocalFileTypeMain},
{Episode: 1, AniDBEpisode: "1", Type: anime.LocalFileTypeMain},
{Episode: 2, AniDBEpisode: "2", Type: anime.LocalFileTypeMain},
{Episode: 3, AniDBEpisode: "3", Type: anime.LocalFileTypeMain},
{Episode: 4, AniDBEpisode: "4", Type: anime.LocalFileTypeMain},
{Episode: 5, AniDBEpisode: "5", Type: anime.LocalFileTypeMain},
},
},
)...)
metadataProvider := metadata_provider.GetFakeProvider(t, database)
//wsEventManager := events.NewMockWSEventManager(logger)
patchAnimeCollectionEntry(t, h.animeCollection, 154587, anilist.AnimeCollectionEntryPatch{
Status: new(anilist.MediaListStatusCurrent),
Progress: new(4),
})
patchCollectionEntryEpisodeCount(t, h.animeCollection, 154587, 7)
h.setEpisodeMetadata(t, 154587, []int{1, 2, 3, 4, 5, 6, 7}, nil)
anilistClient := anilist.TestGetMockAnilistClient()
anilistPlatform := anilist_platform.NewAnilistPlatform(util.NewRef(anilistClient), util.NewRef(extension.NewUnifiedBank()), logger, database)
patchAnimeCollectionEntry(t, h.animeCollection, 146065, anilist.AnimeCollectionEntryPatch{
Status: new(anilist.MediaListStatusCurrent),
Progress: new(1),
})
patchCollectionEntryEpisodeCount(t, h.animeCollection, 146065, 6)
h.setEpisodeMetadata(t, 146065, []int{1, 2, 3, 4, 5}, map[string]int{"S1": 1})
animeCollection, err := anilistPlatform.GetAnimeCollection(t.Context(), false)
libraryCollection := h.newLibraryCollection(t, localFiles)
if assert.NoError(t, err) {
require.Len(t, libraryCollection.ContinueWatchingList, 2)
require.Equal(t, 154587, libraryCollection.ContinueWatchingList[0].BaseAnime.ID)
require.Equal(t, 5, libraryCollection.ContinueWatchingList[0].EpisodeNumber)
require.Equal(t, 146065, libraryCollection.ContinueWatchingList[1].BaseAnime.ID)
require.Equal(t, 1, libraryCollection.ContinueWatchingList[1].EpisodeNumber)
require.Empty(t, libraryCollection.UnmatchedLocalFiles)
require.Empty(t, libraryCollection.UnknownGroups)
}
// Mock Anilist collection and local files
// User is currently watching Sousou no Frieren and One Piece
lfs := make([]*anime.LocalFile, 0)
func TestNewLibraryCollectionMergesRepeatingAndHydratesStats(t *testing.T) {
h := newAnimeTestHarness(t)
// Sousou no Frieren
// 7 episodes downloaded, 4 watched
mediaId := 154587
lfs = append(lfs, anime.MockHydratedLocalFiles(
anime.MockGenerateHydratedLocalFileGroupOptions("E:/Anime", "E:\\Anime\\Sousou no Frieren\\[SubsPlease] Sousou no Frieren - %ep (1080p) [F02B9CEE].mkv", mediaId, []anime.MockHydratedLocalFileWrapperOptionsMetadata{
{MetadataEpisode: 1, MetadataAniDbEpisode: "1", MetadataType: anime.LocalFileTypeMain},
{MetadataEpisode: 2, MetadataAniDbEpisode: "2", MetadataType: anime.LocalFileTypeMain},
{MetadataEpisode: 3, MetadataAniDbEpisode: "3", MetadataType: anime.LocalFileTypeMain},
{MetadataEpisode: 4, MetadataAniDbEpisode: "4", MetadataType: anime.LocalFileTypeMain},
{MetadataEpisode: 5, MetadataAniDbEpisode: "5", MetadataType: anime.LocalFileTypeMain},
{MetadataEpisode: 6, MetadataAniDbEpisode: "6", MetadataType: anime.LocalFileTypeMain},
{MetadataEpisode: 7, MetadataAniDbEpisode: "7", MetadataType: anime.LocalFileTypeMain},
}),
)...)
anilist.TestModifyAnimeCollectionEntry(animeCollection, mediaId, anilist.TestModifyAnimeCollectionEntryInput{
Status: new(anilist.MediaListStatusCurrent),
Progress: new(4), // Mock progress
})
localFiles := anime.NewTestLocalFiles(
anime.TestLocalFileGroup{
LibraryPath: "/Anime",
FilePathTemplate: "/Anime/Sousou no Frieren/%ep.mkv",
MediaID: 154587,
Episodes: []anime.TestLocalFileEpisode{{Episode: 1, AniDBEpisode: "1", Type: anime.LocalFileTypeMain}},
},
anime.TestLocalFileGroup{
LibraryPath: "/Anime",
FilePathTemplate: "/Anime/One Piece/%ep.mkv",
MediaID: 21,
Episodes: []anime.TestLocalFileEpisode{{Episode: 1070, AniDBEpisode: "1070", Type: anime.LocalFileTypeMain}},
},
anime.TestLocalFileGroup{
LibraryPath: "/Anime",
FilePathTemplate: "/Anime/Mushoku/%ep.mkv",
MediaID: 146065,
Episodes: []anime.TestLocalFileEpisode{{Episode: 1, AniDBEpisode: "1", Type: anime.LocalFileTypeMain}},
},
)
// One Piece
// Downloaded 1070-1075 but only watched up until 1060
mediaId = 21
lfs = append(lfs, anime.MockHydratedLocalFiles(
anime.MockGenerateHydratedLocalFileGroupOptions("E:/Anime", "E:\\Anime\\One Piece\\[SubsPlease] One Piece - %ep (1080p) [F02B9CEE].mkv", mediaId, []anime.MockHydratedLocalFileWrapperOptionsMetadata{
{MetadataEpisode: 1070, MetadataAniDbEpisode: "1070", MetadataType: anime.LocalFileTypeMain},
{MetadataEpisode: 1071, MetadataAniDbEpisode: "1071", MetadataType: anime.LocalFileTypeMain},
{MetadataEpisode: 1072, MetadataAniDbEpisode: "1072", MetadataType: anime.LocalFileTypeMain},
{MetadataEpisode: 1073, MetadataAniDbEpisode: "1073", MetadataType: anime.LocalFileTypeMain},
{MetadataEpisode: 1074, MetadataAniDbEpisode: "1074", MetadataType: anime.LocalFileTypeMain},
{MetadataEpisode: 1075, MetadataAniDbEpisode: "1075", MetadataType: anime.LocalFileTypeMain},
}),
)...)
anilist.TestModifyAnimeCollectionEntry(animeCollection, mediaId, anilist.TestModifyAnimeCollectionEntryInput{
Status: new(anilist.MediaListStatusCurrent),
Progress: new(1060), // Mock progress
})
patchAnimeCollectionEntry(t, h.animeCollection, 154587, anilist.AnimeCollectionEntryPatch{
Status: new(anilist.MediaListStatusCurrent),
Progress: new(0),
})
onePieceEntry := patchAnimeCollectionEntry(t, h.animeCollection, 21, anilist.AnimeCollectionEntryPatch{
Status: new(anilist.MediaListStatusRepeating),
Progress: new(1060),
})
mushokuEntry := patchAnimeCollectionEntry(t, h.animeCollection, 146065, anilist.AnimeCollectionEntryPatch{
Status: new(anilist.MediaListStatusCompleted),
Progress: new(12),
})
// Add unmatched local files
mediaId = 0
lfs = append(lfs, anime.MockHydratedLocalFiles(
anime.MockGenerateHydratedLocalFileGroupOptions("E:/Anime", "E:\\Anime\\Unmatched\\[SubsPlease] Unmatched - %ep (1080p) [F02B9CEE].mkv", mediaId, []anime.MockHydratedLocalFileWrapperOptionsMetadata{
{MetadataEpisode: 1, MetadataAniDbEpisode: "1", MetadataType: anime.LocalFileTypeMain},
{MetadataEpisode: 2, MetadataAniDbEpisode: "2", MetadataType: anime.LocalFileTypeMain},
{MetadataEpisode: 3, MetadataAniDbEpisode: "3", MetadataType: anime.LocalFileTypeMain},
{MetadataEpisode: 4, MetadataAniDbEpisode: "4", MetadataType: anime.LocalFileTypeMain},
}),
)...)
movieFormat := anilist.MediaFormatMovie
showFormat := anilist.MediaFormatTv
ovaFormat := anilist.MediaFormatOva
patchCollectionEntryFormat(t, h.animeCollection, 154587, showFormat)
onePieceEntry.Media.Format = &movieFormat
mushokuEntry.Media.Format = &ovaFormat
libraryCollection, err := anime.NewLibraryCollection(t.Context(), &anime.NewLibraryCollectionOptions{
AnimeCollection: animeCollection,
LocalFiles: lfs,
PlatformRef: util.NewRef(anilistPlatform),
MetadataProviderRef: util.NewRef(metadataProvider),
})
libraryCollection := h.newLibraryCollection(t, localFiles)
if assert.NoError(t, err) {
assert.Equal(t, 1, len(libraryCollection.ContinueWatchingList)) // Only Sousou no Frieren is in the continue watching list
assert.Equal(t, 4, len(libraryCollection.UnmatchedLocalFiles)) // 4 unmatched local files
currentList := findCollectionListByStatus(t, libraryCollection, anilist.MediaListStatusCurrent)
require.Len(t, currentList.Entries, 2)
require.ElementsMatch(t, []int{154587, 21}, []int{currentList.Entries[0].MediaId, currentList.Entries[1].MediaId})
require.Nil(t, findOptionalCollectionListByStatus(libraryCollection, anilist.MediaListStatusRepeating))
var repeatingEntry *anime.LibraryCollectionEntry
for _, entry := range currentList.Entries {
if entry.MediaId == 21 {
repeatingEntry = entry
break
}
}
require.NotNil(t, repeatingEntry)
require.NotNil(t, repeatingEntry.EntryListData.Status)
require.Equal(t, anilist.MediaListStatusRepeating, *repeatingEntry.EntryListData.Status)
require.NotNil(t, libraryCollection.Stats)
require.Equal(t, 3, libraryCollection.Stats.TotalEntries)
require.Equal(t, len(localFiles), libraryCollection.Stats.TotalFiles)
require.Equal(t, 1, libraryCollection.Stats.TotalShows)
require.Equal(t, 1, libraryCollection.Stats.TotalMovies)
require.Equal(t, 1, libraryCollection.Stats.TotalSpecials)
}
func TestNewLibraryCollectionGroupsUnknownIgnoredAndUnmatchedFiles(t *testing.T) {
h := newAnimeTestHarness(t)
localFiles := anime.NewTestLocalFiles(
anime.TestLocalFileGroup{
LibraryPath: "/Anime",
FilePathTemplate: "/Anime/Unknown Show/%ep.mkv",
MediaID: 999999,
Episodes: []anime.TestLocalFileEpisode{
{Episode: 1, AniDBEpisode: "1", Type: anime.LocalFileTypeMain},
{Episode: 2, AniDBEpisode: "2", Type: anime.LocalFileTypeMain},
},
},
anime.TestLocalFileGroup{
LibraryPath: "/Anime",
FilePathTemplate: "/Anime/Resolve/A/%ep.mkv",
MediaID: 0,
Episodes: []anime.TestLocalFileEpisode{
{Episode: 1, AniDBEpisode: "1", Type: anime.LocalFileTypeMain},
{Episode: 2, AniDBEpisode: "2", Type: anime.LocalFileTypeMain},
},
},
anime.TestLocalFileGroup{
LibraryPath: "/Anime",
FilePathTemplate: "/Anime/Resolve/B/%ep.mkv",
MediaID: 0,
Episodes: []anime.TestLocalFileEpisode{{Episode: 1, AniDBEpisode: "1", Type: anime.LocalFileTypeMain}},
},
anime.TestLocalFileGroup{
LibraryPath: "/Anime",
FilePathTemplate: "/Anime/Ignored/Z/%ep.mkv",
MediaID: 0,
Episodes: []anime.TestLocalFileEpisode{{Episode: 1, AniDBEpisode: "1", Type: anime.LocalFileTypeMain}},
},
anime.TestLocalFileGroup{
LibraryPath: "/Anime",
FilePathTemplate: "/Anime/Ignored/A/%ep.mkv",
MediaID: 0,
Episodes: []anime.TestLocalFileEpisode{{Episode: 1, AniDBEpisode: "1", Type: anime.LocalFileTypeMain}},
},
)
localFiles[5].Ignored = true
localFiles[6].Ignored = true
libraryCollection := h.newLibraryCollection(t, localFiles)
require.Empty(t, libraryCollection.ContinueWatchingList)
require.Len(t, libraryCollection.UnknownGroups, 1)
require.Equal(t, 999999, libraryCollection.UnknownGroups[0].MediaId)
require.Len(t, libraryCollection.UnknownGroups[0].LocalFiles, 2)
require.Len(t, libraryCollection.UnmatchedLocalFiles, 3)
require.Len(t, libraryCollection.UnmatchedGroups, 2)
require.Equal(t, "/Anime/Resolve/A", libraryCollection.UnmatchedGroups[0].Dir)
require.Len(t, libraryCollection.UnmatchedGroups[0].LocalFiles, 2)
require.Equal(t, "/Anime/Resolve/B", libraryCollection.UnmatchedGroups[1].Dir)
require.Len(t, libraryCollection.UnmatchedGroups[1].LocalFiles, 1)
require.Len(t, libraryCollection.IgnoredLocalFiles, 2)
require.Equal(t, "/Anime/Ignored/A/1.mkv", libraryCollection.IgnoredLocalFiles[0].GetPath())
require.Equal(t, "/Anime/Ignored/Z/1.mkv", libraryCollection.IgnoredLocalFiles[1].GetPath())
}
func findCollectionListByStatus(t *testing.T, libraryCollection *anime.LibraryCollection, status anilist.MediaListStatus) *anime.LibraryCollectionList {
t.Helper()
list := findOptionalCollectionListByStatus(libraryCollection, status)
require.NotNil(t, list)
return list
}
func findOptionalCollectionListByStatus(libraryCollection *anime.LibraryCollection, status anilist.MediaListStatus) *anime.LibraryCollectionList {
for _, list := range libraryCollection.Lists {
if list.Status == status {
return list
}
}
return nil
}

View File

@@ -0,0 +1,27 @@
package anime
import (
"testing"
"github.com/stretchr/testify/require"
)
func TestEpisodeSliceHelpers(t *testing.T) {
// this is a tiny direct test for the helper methods at the bottom of entry_download_info.go.
// the higher-level tests already cover the real behavior, this one just keeps the utility methods exercised.
slice := newEpisodeSlice(3)
require.Len(t, slice.getSlice(), 3)
require.Equal(t, 1, slice.get(0).episodeNumber)
require.Equal(t, "2", slice.getEpisodeNumber(2).aniDBEpisode)
require.Nil(t, slice.getEpisodeNumber(99))
clone := slice.copy()
require.NotSame(t, slice, clone)
require.Len(t, clone.getSlice(), 3)
slice.trimStart(1)
require.Len(t, slice.getSlice(), 2)
require.Equal(t, 2, slice.get(0).episodeNumber)
clone.print()
}

View File

@@ -1,194 +1,229 @@
package anime_test
import (
"context"
"seanime/internal/api/anilist"
"seanime/internal/api/metadata"
"seanime/internal/api/metadata_provider"
"seanime/internal/database/db"
"seanime/internal/library/anime"
"seanime/internal/test_utils"
"seanime/internal/util"
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestNewEntryDownloadInfo(t *testing.T) {
test_utils.InitTestProvider(t, test_utils.Anilist())
func TestNewEntryDownloadInfoEpisodeZeroDiscrepancy(t *testing.T) {
// anilist counts episode 0 here, but the metadata maps it as S1.
// the expected list should still expose that extra slot as episode 0.
h := newAnimeTestHarness(t)
mediaID := 146065
logger := util.NewLogger()
database, err := db.NewDatabase(test_utils.ConfigData.Path.DataDir, test_utils.ConfigData.Database.Name, logger)
require.NoError(t, err)
metadataProvider := metadata_provider.GetFakeProvider(t, database)
anilistClient := anilist.TestGetMockAnilistClient()
animeCollection, err := anilistClient.AnimeCollection(context.Background(), &test_utils.ConfigData.Provider.AnilistUsername)
if err != nil {
t.Fatal(err)
}
patchEntryMediaStatus(t, h.animeCollection, mediaID, anilist.MediaStatusReleasing)
patchAnimeCollectionEntry(t, h.animeCollection, mediaID, anilist.AnimeCollectionEntryPatch{
AiredEpisodes: new(6),
NextAiringEpisode: &anilist.BaseAnime_NextAiringEpisode{Episode: 7},
})
h.setEpisodeMetadata(t, mediaID, []int{1, 2, 3, 4, 5}, map[string]int{"S1": 1})
tests := []struct {
name string
localFiles []*anime.LocalFile
mediaId int
currentProgress int
status anilist.MediaListStatus
expectedEpisodeNumbersToDownload []struct {
episodeNumber int
aniDbEpisode string
}
name string
progress int
expectedEpisodes []downloadEpisodeExpectation
}{
{
// AniList includes episode 0 as a main episode but AniDB lists it as a special S1
// So we should expect to see episode 0 (S1) in the list of episodes to download
name: "Mushoku Tensei: Jobless Reincarnation Season 2",
localFiles: nil,
mediaId: 146065,
currentProgress: 0,
status: anilist.MediaListStatusCurrent,
expectedEpisodeNumbersToDownload: []struct {
episodeNumber int
aniDbEpisode string
}{
{episodeNumber: 0, aniDbEpisode: "S1"},
{episodeNumber: 1, aniDbEpisode: "1"},
{episodeNumber: 2, aniDbEpisode: "2"},
{episodeNumber: 3, aniDbEpisode: "3"},
{episodeNumber: 4, aniDbEpisode: "4"},
{episodeNumber: 5, aniDbEpisode: "5"},
{episodeNumber: 6, aniDbEpisode: "6"},
{episodeNumber: 7, aniDbEpisode: "7"},
{episodeNumber: 8, aniDbEpisode: "8"},
{episodeNumber: 9, aniDbEpisode: "9"},
{episodeNumber: 10, aniDbEpisode: "10"},
{episodeNumber: 11, aniDbEpisode: "11"},
{episodeNumber: 12, aniDbEpisode: "12"},
},
name: "progress zero keeps episode zero",
progress: 0,
expectedEpisodes: []downloadEpisodeExpectation{{0, "S1"}, {1, "1"}, {2, "2"}, {3, "3"}, {4, "4"}, {5, "5"}},
},
{
// Same as above but progress of 1 should just eliminate episode 0 from the list and not episode 1
name: "Mushoku Tensei: Jobless Reincarnation Season 2 - 2",
localFiles: nil,
mediaId: 146065,
currentProgress: 1,
status: anilist.MediaListStatusCurrent,
expectedEpisodeNumbersToDownload: []struct {
episodeNumber int
aniDbEpisode string
}{
{episodeNumber: 1, aniDbEpisode: "1"},
{episodeNumber: 2, aniDbEpisode: "2"},
{episodeNumber: 3, aniDbEpisode: "3"},
{episodeNumber: 4, aniDbEpisode: "4"},
{episodeNumber: 5, aniDbEpisode: "5"},
{episodeNumber: 6, aniDbEpisode: "6"},
{episodeNumber: 7, aniDbEpisode: "7"},
{episodeNumber: 8, aniDbEpisode: "8"},
{episodeNumber: 9, aniDbEpisode: "9"},
{episodeNumber: 10, aniDbEpisode: "10"},
{episodeNumber: 11, aniDbEpisode: "11"},
{episodeNumber: 12, aniDbEpisode: "12"},
},
},
{
name: "Watashi ga Koibito ni Nareru Wake Naijan, Murimuri! Season 2",
localFiles: nil,
mediaId: 199112,
currentProgress: 0,
status: anilist.MediaListStatusCurrent,
expectedEpisodeNumbersToDownload: []struct {
episodeNumber int
aniDbEpisode string
}{
{episodeNumber: 1, aniDbEpisode: "1"},
{episodeNumber: 2, aniDbEpisode: "2"},
{episodeNumber: 3, aniDbEpisode: "3"},
{episodeNumber: 4, aniDbEpisode: "4"},
{episodeNumber: 5, aniDbEpisode: "5"},
},
name: "progress one only removes episode zero",
progress: 1,
expectedEpisodes: []downloadEpisodeExpectation{{1, "1"}, {2, "2"}, {3, "3"}, {4, "4"}, {5, "5"}},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
// we only care about the logical episode list here, not local download state.
info := h.newEntryDownloadInfo(t, mediaID, nil, tt.progress, anilist.MediaListStatusCurrent)
anilistEntry, _ := animeCollection.GetListEntryFromAnimeId(tt.mediaId)
require.NotNil(t, anilistEntry)
animeMetadata, err := metadataProvider.GetAnimeMetadata(metadata.AnilistPlatform, tt.mediaId)
require.NoError(t, err)
info, err := anime.NewEntryDownloadInfo(&anime.NewEntryDownloadInfoOptions{
LocalFiles: tt.localFiles,
Progress: &tt.currentProgress,
Status: &tt.status,
Media: anilistEntry.Media,
MetadataProviderRef: util.NewRef(metadataProvider),
AnimeMetadata: animeMetadata,
})
if assert.NoError(t, err) && assert.NotNil(t, info) {
foundEpToDownload := make([]struct {
episodeNumber int
aniDbEpisode string
}, 0)
for _, ep := range info.EpisodesToDownload {
foundEpToDownload = append(foundEpToDownload, struct {
episodeNumber int
aniDbEpisode string
}{
episodeNumber: ep.EpisodeNumber,
aniDbEpisode: ep.AniDBEpisode,
})
}
assert.ElementsMatch(t, tt.expectedEpisodeNumbersToDownload, foundEpToDownload)
require.ElementsMatch(t, tt.expectedEpisodes, collectDownloadEpisodes(info))
require.False(t, info.HasInaccurateSchedule)
// generated download entries use placeholder local files internally, then clear them back out.
for _, episode := range info.EpisodesToDownload {
require.Nil(t, episode.Episode.LocalFile)
require.Equal(t, episode.AniDBEpisode, episode.Episode.AniDBEpisode)
}
})
}
}
func TestNewEntryDownloadInfoSpecialsDiscrepancyAndBatchFlags(t *testing.T) {
// this covers the path where anilist's aired count includes specials.
// we expect the main episodes plus two remapped specials, and finished media should allow batch mode.
h := newAnimeTestHarness(t)
mediaID := 154587
patchCollectionEntryEpisodeCount(t, h.animeCollection, mediaID, 6)
patchEntryMediaStatus(t, h.animeCollection, mediaID, anilist.MediaStatusFinished)
metadataOverride := h.setEpisodeMetadata(t, mediaID, []int{1, 2, 3, 4}, map[string]int{"S1": 1, "S2": 2})
metadataOverride.Episodes["1"].AbsoluteEpisodeNumber = 13
info := h.newEntryDownloadInfo(t, mediaID, nil, 0, anilist.MediaListStatusCurrent)
require.ElementsMatch(t, []downloadEpisodeExpectation{{1, "1"}, {2, "2"}, {3, "3"}, {4, "4"}, {6, "S1"}, {5, "S2"}}, collectDownloadEpisodes(info))
require.True(t, info.CanBatch)
require.True(t, info.BatchAll)
require.False(t, info.Rewatch)
require.Equal(t, 12, info.AbsoluteOffset)
}
func TestNewEntryDownloadInfoCompletedRewatchFiltersDownloadedEpisodes(t *testing.T) {
// completed entries reset progress back to 0 for download planning.
// the remaining list should just be "everything not already on disk" and mark this as a rewatch.
h := newAnimeTestHarness(t)
mediaID := 154587
patchCollectionEntryEpisodeCount(t, h.animeCollection, mediaID, 5)
patchEntryMediaStatus(t, h.animeCollection, mediaID, anilist.MediaStatusFinished)
h.setEpisodeMetadata(t, mediaID, []int{1, 2, 3, 4, 5}, nil)
localFiles := anime.NewTestLocalFiles(
anime.TestLocalFileGroup{
LibraryPath: "/Anime",
FilePathTemplate: "/Anime/Frieren/%ep.mkv",
MediaID: mediaID,
Episodes: []anime.TestLocalFileEpisode{
{Episode: 1, AniDBEpisode: "1", Type: anime.LocalFileTypeMain},
{Episode: 3, AniDBEpisode: "3", Type: anime.LocalFileTypeMain},
},
},
)
info := h.newEntryDownloadInfo(t, mediaID, localFiles, 4, anilist.MediaListStatusCompleted)
require.ElementsMatch(t, []downloadEpisodeExpectation{{2, "2"}, {4, "4"}, {5, "5"}}, collectDownloadEpisodes(info))
require.True(t, info.CanBatch)
require.False(t, info.BatchAll)
require.True(t, info.Rewatch)
}
func TestNewEntryDownloadInfoScheduleFlags(t *testing.T) {
t.Run("releasing without next airing is inaccurate", func(t *testing.T) {
// releasing shows without next airing data keep the full aired list,
// but they should be marked as having an inaccurate schedule.
h := newAnimeTestHarness(t)
mediaID := 154587
patchCollectionEntryEpisodeCount(t, h.animeCollection, mediaID, 5)
patchEntryMediaStatus(t, h.animeCollection, mediaID, anilist.MediaStatusReleasing)
h.clearNextAiringEpisode(t, mediaID)
h.setEpisodeMetadata(t, mediaID, []int{1, 2, 3, 4, 5}, nil)
info := h.newEntryDownloadInfo(t, mediaID, nil, 0, anilist.MediaListStatusCurrent)
require.ElementsMatch(t, []downloadEpisodeExpectation{{1, "1"}, {2, "2"}, {3, "3"}, {4, "4"}, {5, "5"}}, collectDownloadEpisodes(info))
require.True(t, info.HasInaccurateSchedule)
})
t.Run("next airing trims future episodes", func(t *testing.T) {
// once next airing is known, anything at or after that future episode should be filtered out.
h := newAnimeTestHarness(t)
mediaID := 154587
patchCollectionEntryEpisodeCount(t, h.animeCollection, mediaID, 12)
patchEntryMediaStatus(t, h.animeCollection, mediaID, anilist.MediaStatusReleasing)
patchAnimeCollectionEntry(t, h.animeCollection, mediaID, anilist.AnimeCollectionEntryPatch{
NextAiringEpisode: &anilist.BaseAnime_NextAiringEpisode{Episode: 4},
})
h.setEpisodeMetadata(t, mediaID, []int{1, 2, 3, 4, 5, 6}, nil)
info := h.newEntryDownloadInfo(t, mediaID, nil, 0, anilist.MediaListStatusCurrent)
require.ElementsMatch(t, []downloadEpisodeExpectation{{1, "1"}, {2, "2"}, {3, "3"}}, collectDownloadEpisodes(info))
require.False(t, info.HasInaccurateSchedule)
})
}
func TestNewEntryDownloadInfoFallsBackToMetadataCurrentEpisodeCount(t *testing.T) {
// if media.Episodes is missing, the code falls back to aired episode dates in metadata.
// only past-dated episodes should survive that fallback count.
h := newAnimeTestHarness(t)
mediaID := 154587
patchEntryMediaStatus(t, h.animeCollection, mediaID, anilist.MediaStatusFinished)
h.clearEpisodeCount(t, mediaID)
h.clearNextAiringEpisode(t, mediaID)
h.setCustomMetadata(mediaID, h.newMetadataWithAirDates(t, mediaID, map[int]string{
1: "2000-01-01",
2: "2000-01-02",
3: "2099-01-01",
}))
info := h.newEntryDownloadInfo(t, mediaID, nil, 0, anilist.MediaListStatusCurrent)
require.ElementsMatch(t, []downloadEpisodeExpectation{{1, "1"}, {2, "2"}}, collectDownloadEpisodes(info))
}
func TestNewEntryDownloadInfoEarlyReturnsAndErrors(t *testing.T) {
t.Run("not yet released returns empty result", func(t *testing.T) {
// unreleased media short-circuits before any download planning starts.
h := newAnimeTestHarness(t)
mediaID := 154587
patchEntryMediaStatus(t, h.animeCollection, mediaID, anilist.MediaStatusNotYetReleased)
h.setEpisodeMetadata(t, mediaID, []int{1, 2, 3}, nil)
info := h.newEntryDownloadInfo(t, mediaID, nil, 0, anilist.MediaListStatusCurrent)
require.Empty(t, info.EpisodesToDownload)
require.False(t, info.CanBatch)
require.False(t, info.Rewatch)
})
t.Run("missing metadata returns an error", func(t *testing.T) {
// metadata is required for the planner, so nil should fail fast.
h := newAnimeTestHarness(t)
mediaID := 154587
entry := h.findEntry(t, mediaID)
_, err := anime.NewEntryDownloadInfo(&anime.NewEntryDownloadInfoOptions{
LocalFiles: nil,
Progress: new(0),
Status: new(anilist.MediaListStatusCurrent),
Media: entry.Media,
MetadataProviderRef: h.metadataProviderRef,
AnimeMetadata: nil,
})
}
}
func TestNewEntryDownloadInfo2(t *testing.T) {
test_utils.InitTestProvider(t, test_utils.Anilist())
mediaId := 21
logger := util.NewLogger()
database, err := db.NewDatabase(test_utils.ConfigData.Path.DataDir, test_utils.ConfigData.Database.Name, logger)
require.NoError(t, err)
metadataProvider := metadata_provider.GetFakeProvider(t, database)
anilistClient := anilist.TestGetMockAnilistClient()
animeCollection, err := anilistClient.AnimeCollection(context.Background(), nil)
if err != nil {
t.Fatal(err)
}
anilistEntry, _ := animeCollection.GetListEntryFromAnimeId(mediaId)
animeMetadata, err := metadataProvider.GetAnimeMetadata(metadata.AnilistPlatform, mediaId)
require.NoError(t, err)
info, err := anime.NewEntryDownloadInfo(&anime.NewEntryDownloadInfoOptions{
LocalFiles: nil,
Progress: new(0),
Status: new(anilist.MediaListStatusCurrent),
Media: anilistEntry.Media,
MetadataProviderRef: util.NewRef(metadataProvider),
AnimeMetadata: animeMetadata,
require.EqualError(t, err, "could not get anime metadata")
})
require.NoError(t, err)
require.NotNil(t, info)
t.Run("missing current episode count returns empty result", func(t *testing.T) {
// when both media and metadata resolve to zero aired episodes, we just get an empty plan.
mediaID := 154587
h := newAnimeTestHarness(t)
t.Log(len(info.EpisodesToDownload))
assert.GreaterOrEqual(t, len(info.EpisodesToDownload), 1096)
patchEntryMediaStatus(t, h.animeCollection, mediaID, anilist.MediaStatusFinished)
h.clearEpisodeCount(t, mediaID)
h.clearNextAiringEpisode(t, mediaID)
h.setCustomMetadata(mediaID, h.setEpisodeMetadata(t, mediaID, []int{1, 2, 3}, nil))
h.clearMetadataAirDates(mediaID)
info := h.newEntryDownloadInfo(t, mediaID, nil, 0, anilist.MediaListStatusCurrent)
require.Empty(t, info.EpisodesToDownload)
})
}
type downloadEpisodeExpectation struct {
episodeNumber int
aniDBEpisode string
}
func collectDownloadEpisodes(info *anime.EntryDownloadInfo) []downloadEpisodeExpectation {
ret := make([]downloadEpisodeExpectation, 0, len(info.EpisodesToDownload))
for _, episode := range info.EpisodesToDownload {
ret = append(ret, downloadEpisodeExpectation{
episodeNumber: episode.EpisodeNumber,
aniDBEpisode: episode.AniDBEpisode,
})
}
return ret
}

View File

@@ -7,7 +7,6 @@ import (
"seanime/internal/extension"
"seanime/internal/library/anime"
"seanime/internal/platforms/anilist_platform"
"seanime/internal/test_utils"
"seanime/internal/util"
"testing"
@@ -17,13 +16,12 @@ import (
// TestNewAnimeEntry tests /library/entry endpoint.
// /!\ MAKE SURE TO HAVE THE MEDIA ADDED TO YOUR LIST TEST ACCOUNT LISTS
func TestNewAnimeEntry(t *testing.T) {
test_utils.InitTestProvider(t, test_utils.Anilist())
logger := util.NewLogger()
database, err := db.NewDatabase(t.TempDir(), "test", logger)
assert.NoError(t, err)
metadataProvider := metadata_provider.GetFakeProvider(t, database)
metadataProvider := metadata_provider.NewTestProvider(t, database)
tests := []struct {
name string
@@ -36,14 +34,19 @@ func TestNewAnimeEntry(t *testing.T) {
{
name: "Sousou no Frieren",
mediaId: 154587,
localFiles: anime.MockHydratedLocalFiles(
anime.MockGenerateHydratedLocalFileGroupOptions("E:/Anime", "E:\\Anime\\Sousou no Frieren\\[SubsPlease] Sousou no Frieren - %ep (1080p) [F02B9CEE].mkv", 154587, []anime.MockHydratedLocalFileWrapperOptionsMetadata{
{MetadataEpisode: 1, MetadataAniDbEpisode: "1", MetadataType: anime.LocalFileTypeMain},
{MetadataEpisode: 2, MetadataAniDbEpisode: "2", MetadataType: anime.LocalFileTypeMain},
{MetadataEpisode: 3, MetadataAniDbEpisode: "3", MetadataType: anime.LocalFileTypeMain},
{MetadataEpisode: 4, MetadataAniDbEpisode: "4", MetadataType: anime.LocalFileTypeMain},
{MetadataEpisode: 5, MetadataAniDbEpisode: "5", MetadataType: anime.LocalFileTypeMain},
}),
localFiles: anime.NewTestLocalFiles(
anime.TestLocalFileGroup{
LibraryPath: "E:/Anime",
FilePathTemplate: "E:\\Anime\\Sousou no Frieren\\[SubsPlease] Sousou no Frieren - %ep (1080p) [F02B9CEE].mkv",
MediaID: 154587,
Episodes: []anime.TestLocalFileEpisode{
{Episode: 1, AniDBEpisode: "1", Type: anime.LocalFileTypeMain},
{Episode: 2, AniDBEpisode: "2", Type: anime.LocalFileTypeMain},
{Episode: 3, AniDBEpisode: "3", Type: anime.LocalFileTypeMain},
{Episode: 4, AniDBEpisode: "4", Type: anime.LocalFileTypeMain},
{Episode: 5, AniDBEpisode: "5", Type: anime.LocalFileTypeMain},
},
},
),
currentProgress: 4,
expectedNextEpisodeNumber: 5,
@@ -52,22 +55,27 @@ func TestNewAnimeEntry(t *testing.T) {
{
name: "Mushoku Tensei II Isekai Ittara Honki Dasu",
mediaId: 146065,
localFiles: anime.MockHydratedLocalFiles(
anime.MockGenerateHydratedLocalFileGroupOptions("E:/Anime", "E:/Anime/Mushoku Tensei II Isekai Ittara Honki Dasu/[SubsPlease] Mushoku Tensei S2 - 00 (1080p) [9C362DC3].mkv", 146065, []anime.MockHydratedLocalFileWrapperOptionsMetadata{
{MetadataEpisode: 0, MetadataAniDbEpisode: "S1", MetadataType: anime.LocalFileTypeMain}, // Special episode
{MetadataEpisode: 1, MetadataAniDbEpisode: "1", MetadataType: anime.LocalFileTypeMain},
{MetadataEpisode: 2, MetadataAniDbEpisode: "2", MetadataType: anime.LocalFileTypeMain},
{MetadataEpisode: 3, MetadataAniDbEpisode: "3", MetadataType: anime.LocalFileTypeMain},
{MetadataEpisode: 4, MetadataAniDbEpisode: "4", MetadataType: anime.LocalFileTypeMain},
{MetadataEpisode: 5, MetadataAniDbEpisode: "5", MetadataType: anime.LocalFileTypeMain},
{MetadataEpisode: 6, MetadataAniDbEpisode: "6", MetadataType: anime.LocalFileTypeMain},
{MetadataEpisode: 7, MetadataAniDbEpisode: "7", MetadataType: anime.LocalFileTypeMain},
{MetadataEpisode: 8, MetadataAniDbEpisode: "8", MetadataType: anime.LocalFileTypeMain},
{MetadataEpisode: 9, MetadataAniDbEpisode: "9", MetadataType: anime.LocalFileTypeMain},
{MetadataEpisode: 10, MetadataAniDbEpisode: "10", MetadataType: anime.LocalFileTypeMain},
{MetadataEpisode: 11, MetadataAniDbEpisode: "11", MetadataType: anime.LocalFileTypeMain},
{MetadataEpisode: 12, MetadataAniDbEpisode: "12", MetadataType: anime.LocalFileTypeMain},
}),
localFiles: anime.NewTestLocalFiles(
anime.TestLocalFileGroup{
LibraryPath: "E:/Anime",
FilePathTemplate: "E:/Anime/Mushoku Tensei II Isekai Ittara Honki Dasu/[SubsPlease] Mushoku Tensei S2 - 00 (1080p) [9C362DC3].mkv",
MediaID: 146065,
Episodes: []anime.TestLocalFileEpisode{
{Episode: 0, AniDBEpisode: "S1", Type: anime.LocalFileTypeMain},
{Episode: 1, AniDBEpisode: "1", Type: anime.LocalFileTypeMain},
{Episode: 2, AniDBEpisode: "2", Type: anime.LocalFileTypeMain},
{Episode: 3, AniDBEpisode: "3", Type: anime.LocalFileTypeMain},
{Episode: 4, AniDBEpisode: "4", Type: anime.LocalFileTypeMain},
{Episode: 5, AniDBEpisode: "5", Type: anime.LocalFileTypeMain},
{Episode: 6, AniDBEpisode: "6", Type: anime.LocalFileTypeMain},
{Episode: 7, AniDBEpisode: "7", Type: anime.LocalFileTypeMain},
{Episode: 8, AniDBEpisode: "8", Type: anime.LocalFileTypeMain},
{Episode: 9, AniDBEpisode: "9", Type: anime.LocalFileTypeMain},
{Episode: 10, AniDBEpisode: "10", Type: anime.LocalFileTypeMain},
{Episode: 11, AniDBEpisode: "11", Type: anime.LocalFileTypeMain},
{Episode: 12, AniDBEpisode: "12", Type: anime.LocalFileTypeMain},
},
},
),
currentProgress: 0,
expectedNextEpisodeNumber: 0,
@@ -75,7 +83,7 @@ func TestNewAnimeEntry(t *testing.T) {
},
}
anilistClient := anilist.TestGetMockAnilistClient()
anilistClient := anilist.NewTestAnilistClient()
anilistPlatform := anilist_platform.NewAnilistPlatform(util.NewRef(anilistClient), util.NewRef(extension.NewUnifiedBank()), logger, database)
animeCollection, err := anilistPlatform.GetAnimeCollection(t.Context(), false)
if err != nil {
@@ -86,7 +94,7 @@ func TestNewAnimeEntry(t *testing.T) {
t.Run(tt.name, func(t *testing.T) {
anilist.TestModifyAnimeCollectionEntry(animeCollection, tt.mediaId, anilist.TestModifyAnimeCollectionEntryInput{
anilist.PatchAnimeCollectionEntry(animeCollection, tt.mediaId, anilist.AnimeCollectionEntryPatch{
Progress: new(tt.currentProgress), // Mock progress
})

View File

@@ -13,6 +13,9 @@ import (
)
func TestLocalFile_GetNormalizedPath(t *testing.T) {
if runtime.GOOS != "windows" {
t.Skip("Skipping windows-only test")
}
tests := []struct {
filePath string

View File

@@ -2,32 +2,48 @@ package anime_test
import (
"cmp"
"github.com/stretchr/testify/assert"
"seanime/internal/library/anime"
"slices"
"testing"
"github.com/stretchr/testify/assert"
)
func TestLocalFileWrapperEntry(t *testing.T) {
lfs := anime.MockHydratedLocalFiles(
anime.MockGenerateHydratedLocalFileGroupOptions("/mnt/anime/", "/mnt/anime/One Piece/One Piece - %ep.mkv", 21, []anime.MockHydratedLocalFileWrapperOptionsMetadata{
{MetadataEpisode: 1070, MetadataAniDbEpisode: "1070", MetadataType: anime.LocalFileTypeMain},
{MetadataEpisode: 1071, MetadataAniDbEpisode: "1071", MetadataType: anime.LocalFileTypeMain},
{MetadataEpisode: 1072, MetadataAniDbEpisode: "1072", MetadataType: anime.LocalFileTypeMain},
{MetadataEpisode: 1073, MetadataAniDbEpisode: "1073", MetadataType: anime.LocalFileTypeMain},
{MetadataEpisode: 1074, MetadataAniDbEpisode: "1074", MetadataType: anime.LocalFileTypeMain},
}),
anime.MockGenerateHydratedLocalFileGroupOptions("/mnt/anime/", "/mnt/anime/Blue Lock/Blue Lock - %ep.mkv", 22222, []anime.MockHydratedLocalFileWrapperOptionsMetadata{
{MetadataEpisode: 1, MetadataAniDbEpisode: "1", MetadataType: anime.LocalFileTypeMain},
{MetadataEpisode: 2, MetadataAniDbEpisode: "2", MetadataType: anime.LocalFileTypeMain},
{MetadataEpisode: 3, MetadataAniDbEpisode: "3", MetadataType: anime.LocalFileTypeMain},
}),
anime.MockGenerateHydratedLocalFileGroupOptions("/mnt/anime/", "/mnt/anime/Kimi ni Todoke/Kimi ni Todoke - %ep.mkv", 9656, []anime.MockHydratedLocalFileWrapperOptionsMetadata{
{MetadataEpisode: 0, MetadataAniDbEpisode: "S1", MetadataType: anime.LocalFileTypeMain},
{MetadataEpisode: 1, MetadataAniDbEpisode: "1", MetadataType: anime.LocalFileTypeMain},
{MetadataEpisode: 2, MetadataAniDbEpisode: "2", MetadataType: anime.LocalFileTypeMain},
}),
lfs := anime.NewTestLocalFiles(
anime.TestLocalFileGroup{
LibraryPath: "/mnt/anime/",
FilePathTemplate: "/mnt/anime/One Piece/One Piece - %ep.mkv",
MediaID: 21,
Episodes: []anime.TestLocalFileEpisode{
{Episode: 1070, AniDBEpisode: "1070", Type: anime.LocalFileTypeMain},
{Episode: 1071, AniDBEpisode: "1071", Type: anime.LocalFileTypeMain},
{Episode: 1072, AniDBEpisode: "1072", Type: anime.LocalFileTypeMain},
{Episode: 1073, AniDBEpisode: "1073", Type: anime.LocalFileTypeMain},
{Episode: 1074, AniDBEpisode: "1074", Type: anime.LocalFileTypeMain},
},
},
anime.TestLocalFileGroup{
LibraryPath: "/mnt/anime/",
FilePathTemplate: "/mnt/anime/Blue Lock/Blue Lock - %ep.mkv",
MediaID: 22222,
Episodes: []anime.TestLocalFileEpisode{
{Episode: 1, AniDBEpisode: "1", Type: anime.LocalFileTypeMain},
{Episode: 2, AniDBEpisode: "2", Type: anime.LocalFileTypeMain},
{Episode: 3, AniDBEpisode: "3", Type: anime.LocalFileTypeMain},
},
},
anime.TestLocalFileGroup{
LibraryPath: "/mnt/anime/",
FilePathTemplate: "/mnt/anime/Kimi ni Todoke/Kimi ni Todoke - %ep.mkv",
MediaID: 9656,
Episodes: []anime.TestLocalFileEpisode{
{Episode: 0, AniDBEpisode: "S1", Type: anime.LocalFileTypeMain},
{Episode: 1, AniDBEpisode: "1", Type: anime.LocalFileTypeMain},
{Episode: 2, AniDBEpisode: "2", Type: anime.LocalFileTypeMain},
},
},
)
tests := []struct {
@@ -100,17 +116,27 @@ func TestLocalFileWrapperEntry(t *testing.T) {
func TestLocalFileWrapperEntryProgressNumber(t *testing.T) {
lfs := anime.MockHydratedLocalFiles(
anime.MockGenerateHydratedLocalFileGroupOptions("/mnt/anime/", "/mnt/anime/Kimi ni Todoke/Kimi ni Todoke - %ep.mkv", 9656, []anime.MockHydratedLocalFileWrapperOptionsMetadata{
{MetadataEpisode: 0, MetadataAniDbEpisode: "S1", MetadataType: anime.LocalFileTypeMain},
{MetadataEpisode: 1, MetadataAniDbEpisode: "1", MetadataType: anime.LocalFileTypeMain},
{MetadataEpisode: 2, MetadataAniDbEpisode: "2", MetadataType: anime.LocalFileTypeMain},
}),
anime.MockGenerateHydratedLocalFileGroupOptions("/mnt/anime/", "/mnt/anime/Kimi ni Todoke/Kimi ni Todoke - %ep.mkv", 9656_2, []anime.MockHydratedLocalFileWrapperOptionsMetadata{
{MetadataEpisode: 1, MetadataAniDbEpisode: "S1", MetadataType: anime.LocalFileTypeMain},
{MetadataEpisode: 2, MetadataAniDbEpisode: "1", MetadataType: anime.LocalFileTypeMain},
{MetadataEpisode: 3, MetadataAniDbEpisode: "2", MetadataType: anime.LocalFileTypeMain},
}),
lfs := anime.NewTestLocalFiles(
anime.TestLocalFileGroup{
LibraryPath: "/mnt/anime/",
FilePathTemplate: "/mnt/anime/Kimi ni Todoke/Kimi ni Todoke - %ep.mkv",
MediaID: 9656,
Episodes: []anime.TestLocalFileEpisode{
{Episode: 0, AniDBEpisode: "S1", Type: anime.LocalFileTypeMain},
{Episode: 1, AniDBEpisode: "1", Type: anime.LocalFileTypeMain},
{Episode: 2, AniDBEpisode: "2", Type: anime.LocalFileTypeMain},
},
},
anime.TestLocalFileGroup{
LibraryPath: "/mnt/anime/",
FilePathTemplate: "/mnt/anime/Kimi ni Todoke/Kimi ni Todoke - %ep.mkv",
MediaID: 9656_2,
Episodes: []anime.TestLocalFileEpisode{
{Episode: 1, AniDBEpisode: "S1", Type: anime.LocalFileTypeMain},
{Episode: 2, AniDBEpisode: "1", Type: anime.LocalFileTypeMain},
{Episode: 3, AniDBEpisode: "2", Type: anime.LocalFileTypeMain},
},
},
)
tests := []struct {

View File

@@ -1,591 +0,0 @@
{
"154587": {
"localFiles": [
{
"path": "E:\\Anime\\Sousou no Frieren\\[SubsPlease] Sousou no Frieren - 01 (1080p) [F02B9CEE].mkv",
"name": "[SubsPlease] Sousou no Frieren - 01 (1080p) [F02B9CEE].mkv",
"parsedInfo": {
"original": "[SubsPlease] Sousou no Frieren - 01 (1080p) [F02B9CEE].mkv",
"title": "Sousou no Frieren",
"releaseGroup": "SubsPlease",
"episode": "01"
},
"parsedFolderInfo": [
{
"original": "Sousou no Frieren",
"title": "Sousou no Frieren"
}
],
"metadata": {
"episode": 1,
"aniDBEpisode": "1",
"type": "main"
},
"locked": false,
"ignored": false,
"mediaId": 154587
},
{
"path": "E:\\Anime\\Sousou no Frieren\\[SubsPlease] Sousou no Frieren - 02 (1080p) [E5A85899].mkv",
"name": "[SubsPlease] Sousou no Frieren - 02 (1080p) [E5A85899].mkv",
"parsedInfo": {
"original": "[SubsPlease] Sousou no Frieren - 02 (1080p) [E5A85899].mkv",
"title": "Sousou no Frieren",
"releaseGroup": "SubsPlease",
"episode": "02"
},
"parsedFolderInfo": [
{
"original": "Sousou no Frieren",
"title": "Sousou no Frieren"
}
],
"metadata": {
"episode": 2,
"aniDBEpisode": "2",
"type": "main"
},
"locked": false,
"ignored": false,
"mediaId": 154587
},
{
"path": "E:\\Anime\\Sousou no Frieren\\[SubsPlease] Sousou no Frieren - 03 (1080p) [7EF3F175].mkv",
"name": "[SubsPlease] Sousou no Frieren - 03 (1080p) [7EF3F175].mkv",
"parsedInfo": {
"original": "[SubsPlease] Sousou no Frieren - 03 (1080p) [7EF3F175].mkv",
"title": "Sousou no Frieren",
"releaseGroup": "SubsPlease",
"episode": "03"
},
"parsedFolderInfo": [
{
"original": "Sousou no Frieren",
"title": "Sousou no Frieren"
}
],
"metadata": {
"episode": 3,
"aniDBEpisode": "3",
"type": "main"
},
"locked": false,
"ignored": false,
"mediaId": 154587
},
{
"path": "E:\\Anime\\Sousou no Frieren\\[SubsPlease] Sousou no Frieren - 04 (1080p) [5ED46803].mkv",
"name": "[SubsPlease] Sousou no Frieren - 04 (1080p) [5ED46803].mkv",
"parsedInfo": {
"original": "[SubsPlease] Sousou no Frieren - 04 (1080p) [5ED46803].mkv",
"title": "Sousou no Frieren",
"releaseGroup": "SubsPlease",
"episode": "04"
},
"parsedFolderInfo": [
{
"original": "Sousou no Frieren",
"title": "Sousou no Frieren"
}
],
"metadata": {
"episode": 4,
"aniDBEpisode": "4",
"type": "main"
},
"locked": false,
"ignored": false,
"mediaId": 154587
},
{
"path": "E:\\Anime\\Sousou no Frieren\\[SubsPlease] Sousou no Frieren - 05 (1080p) [8E3F8FA5].mkv",
"name": "[SubsPlease] Sousou no Frieren - 05 (1080p) [8E3F8FA5].mkv",
"parsedInfo": {
"original": "[SubsPlease] Sousou no Frieren - 05 (1080p) [8E3F8FA5].mkv",
"title": "Sousou no Frieren",
"releaseGroup": "SubsPlease",
"episode": "05"
},
"parsedFolderInfo": [
{
"original": "Sousou no Frieren",
"title": "Sousou no Frieren"
}
],
"metadata": {
"episode": 5,
"aniDBEpisode": "5",
"type": "main"
},
"locked": false,
"ignored": false,
"mediaId": 154587
}
],
"animeCollection": {
"MediaListCollection": {
"lists": [
{
"status": "CURRENT",
"entries": [
{
"id": 366875178,
"score": 9,
"progress": 4,
"status": "CURRENT",
"repeat": 0,
"private": false,
"startedAt": {
"year": 2023,
"month": 10
},
"completedAt": {},
"media": {
"id": 154587,
"idMal": 52991,
"siteUrl": "https://anilist.co/anime/154587",
"status": "RELEASING",
"season": "FALL",
"type": "ANIME",
"format": "TV",
"bannerImage": "https://s4.anilist.co/file/anilistcdn/media/anime/banner/154587-ivXNJ23SM1xB.jpg",
"episodes": 28,
"synonyms": [
"Frieren at the Funeral",
"장송의 프리렌",
"Frieren: Oltre la Fine del Viaggio",
"คำอธิษฐานในวันที่จากลา Frieren",
"Frieren e a Jornada para o Além",
"Frieren Nach dem Ende der Reise",
"葬送的芙莉蓮",
"Frieren: Más allá del final del viaje",
"Frieren en el funeral",
"Sōsō no Furīren",
"Frieren. U kresu drogi",
"Frieren - Pháp sư tiễn táng",
"Фрирен, провожающая в последний путь"
],
"isAdult": false,
"countryOfOrigin": "JP",
"title": {
"userPreferred": "Sousou no Frieren",
"romaji": "Sousou no Frieren",
"english": "Frieren: Beyond Journeys End",
"native": "葬送のフリーレン"
},
"coverImage": {
"extraLarge": "https://s4.anilist.co/file/anilistcdn/media/anime/cover/large/bx154587-n1fmjRv4JQUd.jpg",
"large": "https://s4.anilist.co/file/anilistcdn/media/anime/cover/medium/bx154587-n1fmjRv4JQUd.jpg",
"medium": "https://s4.anilist.co/file/anilistcdn/media/anime/cover/small/bx154587-n1fmjRv4JQUd.jpg",
"color": "#d6f1c9"
},
"startDate": {
"year": 2023,
"month": 9,
"day": 29
},
"endDate": {},
"nextAiringEpisode": {
"airingAt": 1700229600,
"timeUntilAiring": 223940,
"episode": 11
},
"relations": {
"edges": [
{
"relationType": "SOURCE",
"node": {
"id": 118586,
"idMal": 126287,
"siteUrl": "https://anilist.co/manga/118586",
"status": "RELEASING",
"type": "MANGA",
"format": "MANGA",
"bannerImage": "https://s4.anilist.co/file/anilistcdn/media/manga/banner/118586-1JLJiwaIlnBp.jpg",
"synonyms": [
"Frieren at the Funeral",
"장송의 프리렌",
"Frieren: Oltre la Fine del Viaggio",
"คำอธิษฐานในวันที่จากลา Frieren",
"Frieren e a Jornada para o Além",
"Frieren Nach dem Ende der Reise",
"葬送的芙莉蓮",
"Frieren After \"The End\"",
"Frieren: Remnants of the Departed",
"Frieren. U kresu drogi",
"Frieren",
"FRIEREN: Más allá del fin del viaje"
],
"isAdult": false,
"countryOfOrigin": "JP",
"title": {
"userPreferred": "Sousou no Frieren",
"romaji": "Sousou no Frieren",
"english": "Frieren: Beyond Journeys End",
"native": "葬送のフリーレン"
},
"coverImage": {
"extraLarge": "https://s4.anilist.co/file/anilistcdn/media/manga/cover/large/bx118586-F0Lp86XQV7du.jpg",
"large": "https://s4.anilist.co/file/anilistcdn/media/manga/cover/medium/bx118586-F0Lp86XQV7du.jpg",
"medium": "https://s4.anilist.co/file/anilistcdn/media/manga/cover/small/bx118586-F0Lp86XQV7du.jpg",
"color": "#e4ae5d"
},
"startDate": {
"year": 2020,
"month": 4,
"day": 28
},
"endDate": {}
}
},
{
"relationType": "CHARACTER",
"node": {
"id": 169811,
"idMal": 56805,
"siteUrl": "https://anilist.co/anime/169811",
"status": "FINISHED",
"type": "ANIME",
"format": "MUSIC",
"bannerImage": "https://s4.anilist.co/file/anilistcdn/media/anime/banner/169811-jgMVZlIdH19a.jpg",
"episodes": 1,
"isAdult": false,
"countryOfOrigin": "JP",
"title": {
"userPreferred": "Yuusha",
"romaji": "Yuusha",
"native": "勇者"
},
"coverImage": {
"extraLarge": "https://s4.anilist.co/file/anilistcdn/media/anime/cover/large/bx169811-H0RW7WHkRlbH.png",
"large": "https://s4.anilist.co/file/anilistcdn/media/anime/cover/medium/bx169811-H0RW7WHkRlbH.png",
"medium": "https://s4.anilist.co/file/anilistcdn/media/anime/cover/small/bx169811-H0RW7WHkRlbH.png"
},
"startDate": {
"year": 2023,
"month": 9,
"day": 29
},
"endDate": {
"year": 2023,
"month": 9,
"day": 29
}
}
},
{
"relationType": "SIDE_STORY",
"node": {
"id": 170068,
"idMal": 56885,
"siteUrl": "https://anilist.co/anime/170068",
"status": "RELEASING",
"season": "FALL",
"type": "ANIME",
"format": "ONA",
"synonyms": [
"Sousou no Frieren Mini Anime",
"Frieren: Beyond Journeys End Mini Anime",
"葬送のフリーレン ミニアニメ"
],
"isAdult": false,
"countryOfOrigin": "JP",
"title": {
"userPreferred": "Sousou no Frieren: ●● no Mahou",
"romaji": "Sousou no Frieren: ●● no Mahou",
"native": "葬送のフリーレン ~●●の魔法~"
},
"coverImage": {
"extraLarge": "https://s4.anilist.co/file/anilistcdn/media/anime/cover/large/bx170068-ijY3tCP8KoWP.jpg",
"large": "https://s4.anilist.co/file/anilistcdn/media/anime/cover/medium/bx170068-ijY3tCP8KoWP.jpg",
"medium": "https://s4.anilist.co/file/anilistcdn/media/anime/cover/small/bx170068-ijY3tCP8KoWP.jpg",
"color": "#bbd678"
},
"startDate": {
"year": 2023,
"month": 10,
"day": 11
},
"endDate": {}
}
}
]
}
}
}
]
}
]
}
}
},
"146065": {
"localFiles": [],
"animeCollection": {
"MediaListCollection": {
"lists": [
{
"status": "CURRENT",
"entries": [
{
"id": 366466419,
"score": 0,
"progress": 0,
"status": "CURRENT",
"repeat": 0,
"private": false,
"startedAt": {
"year": 2023,
"month": 10,
"day": 4
},
"completedAt": {
"year": 2023,
"month": 10,
"day": 9
},
"media": {
"id": 146065,
"idMal": 51179,
"siteUrl": "https://anilist.co/anime/146065",
"status": "FINISHED",
"season": "SUMMER",
"type": "ANIME",
"format": "TV",
"bannerImage": "https://s4.anilist.co/file/anilistcdn/media/anime/banner/146065-33RDijfuxLLk.jpg",
"episodes": 13,
"synonyms": [
"ชาตินี้พี่ต้องเทพ ภาค 2",
"Mushoku Tensei: Isekai Ittara Honki Dasu 2nd Season",
"Mushoku Tensei II: Jobless Reincarnation",
"Mushoku Tensei II: Reencarnación desde cero",
"无职转生到了异世界就拿出真本事第2季"
],
"isAdult": false,
"countryOfOrigin": "JP",
"title": {
"userPreferred": "Mushoku Tensei II: Isekai Ittara Honki Dasu",
"romaji": "Mushoku Tensei II: Isekai Ittara Honki Dasu",
"english": "Mushoku Tensei: Jobless Reincarnation Season 2",
"native": "無職転生 Ⅱ ~異世界行ったら本気だす~"
},
"coverImage": {
"extraLarge": "https://s4.anilist.co/file/anilistcdn/media/anime/cover/large/bx146065-IjirxRK26O03.png",
"large": "https://s4.anilist.co/file/anilistcdn/media/anime/cover/medium/bx146065-IjirxRK26O03.png",
"medium": "https://s4.anilist.co/file/anilistcdn/media/anime/cover/small/bx146065-IjirxRK26O03.png",
"color": "#35aee4"
},
"startDate": {
"year": 2023,
"month": 7,
"day": 3
},
"endDate": {
"year": 2023,
"month": 9,
"day": 25
},
"relations": {
"edges": [
{
"relationType": "SOURCE",
"node": {
"id": 85470,
"idMal": 70261,
"siteUrl": "https://anilist.co/manga/85470",
"status": "FINISHED",
"type": "MANGA",
"format": "NOVEL",
"bannerImage": "https://s4.anilist.co/file/anilistcdn/media/manga/banner/85470-akkFSKH9aacB.jpg",
"synonyms": [
"เกิดชาตินี้พี่ต้องเทพ"
],
"isAdult": false,
"countryOfOrigin": "JP",
"title": {
"userPreferred": "Mushoku Tensei: Isekai Ittara Honki Dasu",
"romaji": "Mushoku Tensei: Isekai Ittara Honki Dasu",
"english": "Mushoku Tensei: Jobless Reincarnation",
"native": "無職転生 ~異世界行ったら本気だす~"
},
"coverImage": {
"extraLarge": "https://s4.anilist.co/file/anilistcdn/media/manga/cover/large/nx85470-jt6BF9tDWB2X.jpg",
"large": "https://s4.anilist.co/file/anilistcdn/media/manga/cover/medium/nx85470-jt6BF9tDWB2X.jpg",
"medium": "https://s4.anilist.co/file/anilistcdn/media/manga/cover/small/nx85470-jt6BF9tDWB2X.jpg",
"color": "#f1bb1a"
},
"startDate": {
"year": 2014,
"month": 1,
"day": 23
},
"endDate": {
"year": 2022,
"month": 11,
"day": 25
}
}
},
{
"relationType": "ALTERNATIVE",
"node": {
"id": 85564,
"idMal": 70259,
"siteUrl": "https://anilist.co/manga/85564",
"status": "RELEASING",
"type": "MANGA",
"format": "MANGA",
"bannerImage": "https://s4.anilist.co/file/anilistcdn/media/manga/banner/85564-Wy8IQU3Km61c.jpg",
"synonyms": [
"Mushoku Tensei: Uma segunda chance"
],
"isAdult": false,
"countryOfOrigin": "JP",
"title": {
"userPreferred": "Mushoku Tensei: Isekai Ittara Honki Dasu",
"romaji": "Mushoku Tensei: Isekai Ittara Honki Dasu",
"english": "Mushoku Tensei: Jobless Reincarnation",
"native": "無職転生 ~異世界行ったら本気だす~"
},
"coverImage": {
"extraLarge": "https://s4.anilist.co/file/anilistcdn/media/manga/cover/large/bx85564-egXRASF0x9B9.jpg",
"large": "https://s4.anilist.co/file/anilistcdn/media/manga/cover/medium/bx85564-egXRASF0x9B9.jpg",
"medium": "https://s4.anilist.co/file/anilistcdn/media/manga/cover/small/bx85564-egXRASF0x9B9.jpg",
"color": "#e4ae0d"
},
"startDate": {
"year": 2014,
"month": 5,
"day": 2
},
"endDate": {}
}
},
{
"relationType": "PREQUEL",
"node": {
"id": 127720,
"idMal": 45576,
"siteUrl": "https://anilist.co/anime/127720",
"status": "FINISHED",
"season": "FALL",
"type": "ANIME",
"format": "TV",
"bannerImage": "https://s4.anilist.co/file/anilistcdn/media/anime/banner/127720-oBpHiMWQhFVN.jpg",
"episodes": 12,
"synonyms": [
"Mushoku Tensei: Jobless Reincarnation Part 2",
"ชาตินี้พี่ต้องเทพ พาร์ท 2"
],
"isAdult": false,
"countryOfOrigin": "JP",
"title": {
"userPreferred": "Mushoku Tensei: Isekai Ittara Honki Dasu Part 2",
"romaji": "Mushoku Tensei: Isekai Ittara Honki Dasu Part 2",
"english": "Mushoku Tensei: Jobless Reincarnation Cour 2",
"native": "無職転生 ~異世界行ったら本気だす~ 第2クール"
},
"coverImage": {
"extraLarge": "https://s4.anilist.co/file/anilistcdn/media/anime/cover/large/bx127720-ADJgIrUVMdU9.jpg",
"large": "https://s4.anilist.co/file/anilistcdn/media/anime/cover/medium/bx127720-ADJgIrUVMdU9.jpg",
"medium": "https://s4.anilist.co/file/anilistcdn/media/anime/cover/small/bx127720-ADJgIrUVMdU9.jpg",
"color": "#d6bb1a"
},
"startDate": {
"year": 2021,
"month": 10,
"day": 4
},
"endDate": {
"year": 2021,
"month": 12,
"day": 20
}
}
},
{
"relationType": "ALTERNATIVE",
"node": {
"id": 142989,
"idMal": 142765,
"siteUrl": "https://anilist.co/manga/142989",
"status": "RELEASING",
"type": "MANGA",
"format": "MANGA",
"synonyms": [
"Mushoku Tensei - Depressed Magician"
],
"isAdult": false,
"countryOfOrigin": "JP",
"title": {
"userPreferred": "Mushoku Tensei: Isekai Ittara Honki Dasu - Shitsui no Majutsushi-hen",
"romaji": "Mushoku Tensei: Isekai Ittara Honki Dasu - Shitsui no Majutsushi-hen",
"native": "無職転生 ~異世界行ったら本気だす~ 失意の魔術師編"
},
"coverImage": {
"extraLarge": "https://s4.anilist.co/file/anilistcdn/media/manga/cover/large/bx142989-jYDNHLwdER70.png",
"large": "https://s4.anilist.co/file/anilistcdn/media/manga/cover/medium/bx142989-jYDNHLwdER70.png",
"medium": "https://s4.anilist.co/file/anilistcdn/media/manga/cover/small/bx142989-jYDNHLwdER70.png",
"color": "#e4bb28"
},
"startDate": {
"year": 2021,
"month": 12,
"day": 20
},
"endDate": {}
}
},
{
"relationType": "SEQUEL",
"node": {
"id": 166873,
"idMal": 55888,
"siteUrl": "https://anilist.co/anime/166873",
"status": "NOT_YET_RELEASED",
"season": "SPRING",
"type": "ANIME",
"format": "TV",
"episodes": 12,
"synonyms": [
"Mushoku Tensei: Jobless Reincarnation Season 2 Part 2",
"ชาตินี้พี่ต้องเทพ ภาค 2",
"Mushoku Tensei: Isekai Ittara Honki Dasu 2nd Season Part 2",
"Mushoku Tensei II: Jobless Reincarnation Part 2",
"Mushoku Tensei II: Reencarnación desde cero",
"无职转生到了异世界就拿出真本事第2季"
],
"isAdult": false,
"countryOfOrigin": "JP",
"title": {
"userPreferred": "Mushoku Tensei II: Isekai Ittara Honki Dasu Part 2",
"romaji": "Mushoku Tensei II: Isekai Ittara Honki Dasu Part 2",
"native": "無職転生 Ⅱ ~異世界行ったら本気だす~ 第2クール"
},
"coverImage": {
"extraLarge": "https://s4.anilist.co/file/anilistcdn/media/anime/cover/large/bx166873-cqMLPB00KcEI.jpg",
"large": "https://s4.anilist.co/file/anilistcdn/media/anime/cover/medium/bx166873-cqMLPB00KcEI.jpg",
"medium": "https://s4.anilist.co/file/anilistcdn/media/anime/cover/small/bx166873-cqMLPB00KcEI.jpg",
"color": "#6b501a"
},
"startDate": {
"year": 2024,
"month": 4
},
"endDate": {
"year": 2024,
"month": 6
}
}
}
]
}
}
}
]
}
]
}
}
}
}

View File

@@ -1,89 +1,86 @@
package anime_test
import (
"context"
"seanime/internal/api/anilist"
"seanime/internal/api/metadata_provider"
"seanime/internal/database/db"
"seanime/internal/library/anime"
"seanime/internal/test_utils"
"seanime/internal/util"
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
// Test to retrieve accurate missing episodes
// DEPRECATED
func TestNewMissingEpisodes(t *testing.T) {
t.Skip("Outdated test")
test_utils.InitTestProvider(t, test_utils.Anilist())
logger := util.NewLogger()
database, _ := db.NewDatabase(t.TempDir(), "test", logger)
// missing episodes now collapse each show down to the next thing you need,
// and anything silenced should be split into its own list.
h := newAnimeTestHarness(t)
metadataProvider := metadata_provider.GetFakeProvider(t, database)
anilistClient := anilist.TestGetMockAnilistClient()
animeCollection, err := anilistClient.AnimeCollection(context.Background(), nil)
if err != nil {
t.Fatal(err)
}
tests := []struct {
name string
mediaId int
localFiles []*anime.LocalFile
mediaAiredEpisodes int
currentProgress int
expectedMissingEpisodes int
}{
{
// Sousou no Frieren - 10 currently aired episodes
// User has 5 local files from ep 1 to 5, but only watched 4 episodes
// So we should expect to see 5 missing episodes
name: "Sousou no Frieren, missing 5 episodes",
mediaId: 154587,
localFiles: anime.MockHydratedLocalFiles(
anime.MockGenerateHydratedLocalFileGroupOptions("E:/Anime", "E:\\Anime\\Sousou no Frieren\\[SubsPlease] Sousou no Frieren - %ep (1080p) [F02B9CEE].mkv", 154587, []anime.MockHydratedLocalFileWrapperOptionsMetadata{
{MetadataEpisode: 1, MetadataAniDbEpisode: "1", MetadataType: anime.LocalFileTypeMain},
{MetadataEpisode: 2, MetadataAniDbEpisode: "2", MetadataType: anime.LocalFileTypeMain},
{MetadataEpisode: 3, MetadataAniDbEpisode: "3", MetadataType: anime.LocalFileTypeMain},
{MetadataEpisode: 4, MetadataAniDbEpisode: "4", MetadataType: anime.LocalFileTypeMain},
{MetadataEpisode: 5, MetadataAniDbEpisode: "5", MetadataType: anime.LocalFileTypeMain},
}),
),
mediaAiredEpisodes: 10,
currentProgress: 4,
//expectedMissingEpisodes: 5,
expectedMissingEpisodes: 1, // DEVNOTE: Now the value is 1 at most because everything else is merged
localFiles := anime.NewTestLocalFiles(
anime.TestLocalFileGroup{
LibraryPath: "/Anime",
FilePathTemplate: "/Anime/Frieren/%ep.mkv",
MediaID: 154587,
Episodes: []anime.TestLocalFileEpisode{
{Episode: 1, AniDBEpisode: "1", Type: anime.LocalFileTypeMain},
{Episode: 2, AniDBEpisode: "2", Type: anime.LocalFileTypeMain},
{Episode: 3, AniDBEpisode: "3", Type: anime.LocalFileTypeMain},
{Episode: 4, AniDBEpisode: "4", Type: anime.LocalFileTypeMain},
{Episode: 5, AniDBEpisode: "5", Type: anime.LocalFileTypeMain},
},
},
}
anime.TestLocalFileGroup{
LibraryPath: "/Anime",
FilePathTemplate: "/Anime/Mushoku/%ep.mkv",
MediaID: 146065,
Episodes: []anime.TestLocalFileEpisode{
{Episode: 0, AniDBEpisode: "S1", Type: anime.LocalFileTypeMain},
{Episode: 1, AniDBEpisode: "1", Type: anime.LocalFileTypeMain},
{Episode: 2, AniDBEpisode: "2", Type: anime.LocalFileTypeMain},
},
},
anime.TestLocalFileGroup{
LibraryPath: "/Anime",
FilePathTemplate: "/Anime/OnePiece/%ep.mkv",
MediaID: 21,
Episodes: []anime.TestLocalFileEpisode{
{Episode: 1069, AniDBEpisode: "1069", Type: anime.LocalFileTypeMain},
},
},
)
for _, tt := range tests {
// frieren should surface as a normal missing-episodes card.
patchAnimeCollectionEntry(t, h.animeCollection, 154587, anilist.AnimeCollectionEntryPatch{
Status: new(anilist.MediaListStatusCurrent),
Progress: new(4),
AiredEpisodes: new(10),
NextAiringEpisode: &anilist.BaseAnime_NextAiringEpisode{Episode: 11},
})
h.setEpisodeMetadata(t, 154587, []int{1, 2, 3, 4, 5, 6, 7, 8, 9, 10}, nil)
t.Run(tt.name, func(t *testing.T) {
// mushoku follows the episode-zero discrepancy path, but this one is silenced.
patchAnimeCollectionEntry(t, h.animeCollection, 146065, anilist.AnimeCollectionEntryPatch{
Status: new(anilist.MediaListStatusCurrent),
Progress: new(1),
AiredEpisodes: new(6),
NextAiringEpisode: &anilist.BaseAnime_NextAiringEpisode{Episode: 7},
})
h.setEpisodeMetadata(t, 146065, []int{1, 2, 3, 4, 5}, map[string]int{"S1": 1})
// Mock Anilist collection
anilist.TestModifyAnimeCollectionEntry(animeCollection, tt.mediaId, anilist.TestModifyAnimeCollectionEntryInput{
Progress: new(tt.currentProgress), // Mock progress
AiredEpisodes: new(tt.mediaAiredEpisodes),
NextAiringEpisode: &anilist.BaseAnime_NextAiringEpisode{
Episode: tt.mediaAiredEpisodes + 1,
},
})
// dropped entries should never show up here.
patchAnimeCollectionEntry(t, h.animeCollection, 21, anilist.AnimeCollectionEntryPatch{
Status: new(anilist.MediaListStatusDropped),
Progress: new(1060),
AiredEpisodes: new(1100),
NextAiringEpisode: &anilist.BaseAnime_NextAiringEpisode{Episode: 1101},
})
})
missing := h.newMissingEpisodes(t, localFiles, []int{146065})
if assert.NoError(t, err) {
missingData := anime.NewMissingEpisodes(&anime.NewMissingEpisodesOptions{
AnimeCollection: animeCollection,
LocalFiles: tt.localFiles,
MetadataProviderRef: util.NewRef(metadataProvider),
})
assert.Equal(t, tt.expectedMissingEpisodes, len(missingData.Episodes))
}
}
require.Len(t, missing.Episodes, 1)
require.Equal(t, 154587, missing.Episodes[0].BaseAnime.ID)
require.Equal(t, 6, missing.Episodes[0].EpisodeNumber)
require.Equal(t, "Episode 6 & 4 more", missing.Episodes[0].DisplayTitle)
require.Len(t, missing.SilencedEpisodes, 1)
require.Equal(t, 146065, missing.SilencedEpisodes[0].BaseAnime.ID)
require.Equal(t, 3, missing.SilencedEpisodes[0].EpisodeNumber)
require.Equal(t, "Episode 3 & 2 more", missing.SilencedEpisodes[0].DisplayTitle)
}

View File

@@ -13,7 +13,8 @@ import (
type ScheduleItem struct {
MediaId int `json:"mediaId"`
Title string `json:"title"`
// Time is in 15:04 format
// Time is in 15:04 format, UTC.
// The frontend should derive local time from DateTime instead.
Time string `json:"time"`
// DateTime is in UTC
DateTime time.Time `json:"dateTime"`
@@ -24,6 +25,10 @@ type ScheduleItem struct {
}
func GetScheduleItems(animeSchedule *anilist.AnimeAiringSchedule, animeCollection *anilist.AnimeCollection) []*ScheduleItem {
if animeSchedule == nil || animeCollection == nil || animeCollection.MediaListCollection == nil {
return []*ScheduleItem{}
}
animeEntryMap := make(map[int]*anilist.AnimeListEntry)
for _, list := range animeCollection.MediaListCollection.GetLists() {
for _, entry := range list.GetEntries() {
@@ -48,7 +53,7 @@ func GetScheduleItems(animeSchedule *anilist.AnimeAiringSchedule, animeCollectio
t := time.Unix(int64(node.GetAiringAt()), 0)
item := &ScheduleItem{
MediaId: entry.GetMedia().GetID(),
Title: *entry.GetMedia().GetTitle().GetUserPreferred(),
Title: entry.GetMedia().GetPreferredTitle(),
Time: t.UTC().Format("15:04"),
DateTime: t.UTC(),
Image: entry.GetMedia().GetCoverImageSafe(),

View File

@@ -0,0 +1,126 @@
package anime_test
import (
"seanime/internal/api/anilist"
"seanime/internal/customsource"
"seanime/internal/library/anime"
"testing"
"time"
"github.com/stretchr/testify/require"
)
func TestGetScheduleItemsFormatsDeduplicates(t *testing.T) {
// schedule items are merged from all schedule buckets,
// deduped by media/episode/time
h := newAnimeTestHarness(t)
patchAnimeCollectionEntry(t, h.animeCollection, 154587, anilist.AnimeCollectionEntryPatch{
Status: new(anilist.MediaListStatusCurrent),
AiredEpisodes: new(12),
})
patchCollectionEntryEpisodeCount(t, h.animeCollection, 154587, 12)
patchAnimeCollectionEntry(t, h.animeCollection, 146065, anilist.AnimeCollectionEntryPatch{
Status: new(anilist.MediaListStatusCurrent),
AiredEpisodes: new(1),
})
patchCollectionEntryEpisodeCount(t, h.animeCollection, 146065, 1)
movieFormat := anilist.MediaFormatMovie
patchCollectionEntryFormat(t, h.animeCollection, 146065, movieFormat)
movieEntry := findCollectionEntryByMediaID(t, h.animeCollection, 146065)
fallbackTitle := "movie fallback"
movieEntry.Media.Title.UserPreferred = nil
movieEntry.Media.Title.English = &fallbackTitle
// extension-backed ids should not leak into the schedule list.
extensionEntry := findCollectionEntryByMediaID(t, h.animeCollection, 21)
extensionID := customsource.GenerateMediaId(1, 99)
extensionEntry.Media.ID = extensionID
extensionEntry.Status = new(anilist.MediaListStatusCurrent)
animeSchedule := &anilist.AnimeAiringSchedule{
Ongoing: &anilist.AnimeAiringSchedule_Ongoing{Media: []*anilist.AnimeSchedule{
newAnimeSchedule(154587,
[]*anilist.AnimeSchedule_Previous_Nodes{newPreviousScheduleNode(1_700_000_100, 11, -100)},
[]*anilist.AnimeSchedule_Upcoming_Nodes{newUpcomingScheduleNode(1_700_000_200, 12, 200)},
),
newAnimeSchedule(extensionID, nil, []*anilist.AnimeSchedule_Upcoming_Nodes{newUpcomingScheduleNode(1_700_000_050, 1, 50)}),
}},
OngoingNext: &anilist.AnimeAiringSchedule_OngoingNext{Media: []*anilist.AnimeSchedule{
newAnimeSchedule(154587, nil, []*anilist.AnimeSchedule_Upcoming_Nodes{newUpcomingScheduleNode(1_700_000_200, 12, 200)}),
}},
Upcoming: &anilist.AnimeAiringSchedule_Upcoming{Media: []*anilist.AnimeSchedule{
newAnimeSchedule(146065, nil, []*anilist.AnimeSchedule_Upcoming_Nodes{newUpcomingScheduleNode(1_700_000_300, 1, 300)}),
}},
}
items := anime.GetScheduleItems(animeSchedule, h.animeCollection)
require.Len(t, items, 3)
require.Len(t, findScheduleItems(items, 154587, 12), 1)
require.Empty(t, findScheduleItems(items, extensionID, 1))
previousItem := findScheduleItem(t, items, 154587, 11)
require.Equal(t, time.Unix(1_700_000_100, 0).UTC(), previousItem.DateTime)
require.Equal(t, previousItem.DateTime.Format("15:04"), previousItem.Time)
require.False(t, previousItem.IsSeasonFinale)
require.False(t, previousItem.IsMovie)
finaleItem := findScheduleItem(t, items, 154587, 12)
require.True(t, finaleItem.IsSeasonFinale)
movieItem := findScheduleItem(t, items, 146065, 1)
require.Equal(t, fallbackTitle, movieItem.Title)
require.True(t, movieItem.IsMovie)
require.True(t, movieItem.IsSeasonFinale)
}
func TestGetScheduleItemsHandlesNilInputs(t *testing.T) {
// nil inputs should just give the caller an empty slice instead of exploding.
require.Empty(t, anime.GetScheduleItems(nil, nil))
}
func newAnimeSchedule(mediaID int, previous []*anilist.AnimeSchedule_Previous_Nodes, upcoming []*anilist.AnimeSchedule_Upcoming_Nodes) *anilist.AnimeSchedule {
ret := &anilist.AnimeSchedule{ID: mediaID}
if previous != nil {
ret.Previous = &anilist.AnimeSchedule_Previous{Nodes: previous}
}
if upcoming != nil {
ret.Upcoming = &anilist.AnimeSchedule_Upcoming{Nodes: upcoming}
}
return ret
}
func newPreviousScheduleNode(airingAt int, episode int, timeUntilAiring int) *anilist.AnimeSchedule_Previous_Nodes {
return &anilist.AnimeSchedule_Previous_Nodes{
AiringAt: airingAt,
Episode: episode,
TimeUntilAiring: timeUntilAiring,
}
}
func newUpcomingScheduleNode(airingAt int, episode int, timeUntilAiring int) *anilist.AnimeSchedule_Upcoming_Nodes {
return &anilist.AnimeSchedule_Upcoming_Nodes{
AiringAt: airingAt,
Episode: episode,
TimeUntilAiring: timeUntilAiring,
}
}
func findScheduleItem(t *testing.T, items []*anime.ScheduleItem, mediaID int, episodeNumber int) *anime.ScheduleItem {
t.Helper()
matching := findScheduleItems(items, mediaID, episodeNumber)
require.Len(t, matching, 1)
return matching[0]
}
func findScheduleItems(items []*anime.ScheduleItem, mediaID int, episodeNumber int) []*anime.ScheduleItem {
ret := make([]*anime.ScheduleItem, 0)
for _, item := range items {
if item.MediaId == mediaID && item.EpisodeNumber == episodeNumber {
ret = append(ret, item)
}
}
return ret
}

View File

@@ -0,0 +1,227 @@
package anime_test
import (
"seanime/internal/api/anilist"
"seanime/internal/api/metadata"
"seanime/internal/api/metadata_provider"
"seanime/internal/extension"
"seanime/internal/library/anime"
"seanime/internal/platforms/anilist_platform"
"seanime/internal/platforms/platform"
"seanime/internal/testutil"
"seanime/internal/util"
"sort"
"strconv"
"testing"
"github.com/stretchr/testify/require"
)
type animeTestHarness struct {
animeCollection *anilist.AnimeCollection
metadataProvider *animeTestMetadataProvider
platformRef *util.Ref[platform.Platform]
metadataProviderRef *util.Ref[metadata_provider.Provider]
}
type animeTestMetadataProvider struct {
metadata_provider.Provider
overrides map[int]*metadata.AnimeMetadata
}
func newAnimeTestHarness(t *testing.T) *animeTestHarness {
t.Helper()
// keep the real fixture stack, but make metadata overrides cheap and explicit per test.
env := testutil.NewTestEnv(t)
logger := util.NewLogger()
database := env.MustNewDatabase(logger)
metadataProvider := &animeTestMetadataProvider{
Provider: metadata_provider.NewTestProviderWithEnv(env, database),
overrides: make(map[int]*metadata.AnimeMetadata),
}
anilistClient := anilist.NewTestAnilistClient()
anilistPlatform := anilist_platform.NewAnilistPlatform(util.NewRef(anilistClient), util.NewRef(extension.NewUnifiedBank()), logger, database)
animeCollection, err := anilistPlatform.GetAnimeCollection(t.Context(), false)
require.NoError(t, err)
metadataProviderInterface := metadata_provider.Provider(metadataProvider)
platformInterface := platform.Platform(anilistPlatform)
return &animeTestHarness{
animeCollection: animeCollection,
metadataProvider: metadataProvider,
platformRef: util.NewRef(platformInterface),
metadataProviderRef: util.NewRef(metadataProviderInterface),
}
}
func (p *animeTestMetadataProvider) GetAnimeMetadata(platform metadata.Platform, mediaID int) (*metadata.AnimeMetadata, error) {
if animeMetadata, ok := p.overrides[mediaID]; ok {
return animeMetadata, nil
}
return p.Provider.GetAnimeMetadata(platform, mediaID)
}
func (h *animeTestHarness) findEntry(t *testing.T, mediaID int) *anilist.AnimeListEntry {
t.Helper()
return findCollectionEntryByMediaID(t, h.animeCollection, mediaID)
}
func (h *animeTestHarness) setEpisodeMetadata(t *testing.T, mediaID int, mainEpisodes []int, specials map[string]int) *metadata.AnimeMetadata {
t.Helper()
// most anime tests only need stable episode numbering, not a full metadata payload.
media := h.findEntry(t, mediaID).Media
animeMetadata := anime.NewAnimeMetadataFromEpisodeCount(media, mainEpisodes)
for aniDBEpisode, episodeNumber := range specials {
animeMetadata.Episodes[aniDBEpisode] = &metadata.EpisodeMetadata{
Title: media.GetTitleSafe(),
Image: media.GetBannerImageSafe(),
EpisodeNumber: episodeNumber,
Episode: aniDBEpisode,
AbsoluteEpisodeNumber: episodeNumber,
HasImage: true,
}
animeMetadata.SpecialCount++
}
h.metadataProvider.overrides[mediaID] = animeMetadata
return animeMetadata
}
func (h *animeTestHarness) setCustomMetadata(mediaID int, animeMetadata *metadata.AnimeMetadata) {
h.metadataProvider.overrides[mediaID] = animeMetadata
}
func (h *animeTestHarness) clearMetadataAirDates(mediaID int) {
if animeMetadata, ok := h.metadataProvider.overrides[mediaID]; ok {
for _, episode := range animeMetadata.Episodes {
episode.AirDate = ""
}
}
}
func (h *animeTestHarness) newMetadataWithAirDates(t *testing.T, mediaID int, airDates map[int]string) *metadata.AnimeMetadata {
t.Helper()
// this is just for the fallback path where current episode count is inferred from aired dates.
episodes := make([]int, 0, len(airDates))
for episodeNumber := range airDates {
episodes = append(episodes, episodeNumber)
}
sort.Ints(episodes)
animeMetadata := anime.NewAnimeMetadataFromEpisodeCount(h.findEntry(t, mediaID).Media, episodes)
for episodeNumber, airDate := range airDates {
animeMetadata.Episodes[strconv.Itoa(episodeNumber)].AirDate = airDate
}
return animeMetadata
}
func (h *animeTestHarness) clearNextAiringEpisode(t *testing.T, mediaID int) {
t.Helper()
h.findEntry(t, mediaID).Media.NextAiringEpisode = nil
}
func (h *animeTestHarness) clearAllNextAiringEpisodes() {
for _, list := range h.animeCollection.GetMediaListCollection().GetLists() {
for _, entry := range list.GetEntries() {
entry.Media.NextAiringEpisode = nil
}
}
}
func (h *animeTestHarness) clearEpisodeCount(t *testing.T, mediaID int) {
t.Helper()
h.findEntry(t, mediaID).Media.Episodes = nil
}
func (h *animeTestHarness) newLibraryCollection(t *testing.T, localFiles []*anime.LocalFile) *anime.LibraryCollection {
t.Helper()
libraryCollection, err := anime.NewLibraryCollection(t.Context(), &anime.NewLibraryCollectionOptions{
AnimeCollection: h.animeCollection,
LocalFiles: localFiles,
PlatformRef: h.platformRef,
MetadataProviderRef: h.metadataProviderRef,
})
require.NoError(t, err)
return libraryCollection
}
func (h *animeTestHarness) newEntryDownloadInfo(t *testing.T, mediaID int, localFiles []*anime.LocalFile, progress int, status anilist.MediaListStatus) *anime.EntryDownloadInfo {
t.Helper()
animeMetadata, err := h.metadataProvider.GetAnimeMetadata(metadata.AnilistPlatform, mediaID)
require.NoError(t, err)
info, err := anime.NewEntryDownloadInfo(&anime.NewEntryDownloadInfoOptions{
LocalFiles: localFiles,
Progress: new(progress),
Status: new(status),
Media: h.findEntry(t, mediaID).Media,
MetadataProviderRef: h.metadataProviderRef,
AnimeMetadata: animeMetadata,
})
require.NoError(t, err)
return info
}
func (h *animeTestHarness) newMissingEpisodes(t *testing.T, localFiles []*anime.LocalFile, silencedMediaIDs []int) *anime.MissingEpisodes {
t.Helper()
missingEpisodes := anime.NewMissingEpisodes(&anime.NewMissingEpisodesOptions{
AnimeCollection: h.animeCollection,
LocalFiles: localFiles,
SilencedMediaIds: silencedMediaIDs,
MetadataProviderRef: h.metadataProviderRef,
})
require.NotNil(t, missingEpisodes)
return missingEpisodes
}
func (h *animeTestHarness) newUpcomingEpisodes(t *testing.T) *anime.UpcomingEpisodes {
t.Helper()
upcomingEpisodes := anime.NewUpcomingEpisodes(&anime.NewUpcomingEpisodesOptions{
AnimeCollection: h.animeCollection,
MetadataProviderRef: h.metadataProviderRef,
})
require.NotNil(t, upcomingEpisodes)
return upcomingEpisodes
}
func patchAnimeCollectionEntry(t *testing.T, collection *anilist.AnimeCollection, mediaID int, patch anilist.AnimeCollectionEntryPatch) *anilist.AnimeListEntry {
t.Helper()
anilist.PatchAnimeCollectionEntry(collection, mediaID, patch)
return findCollectionEntryByMediaID(t, collection, mediaID)
}
func patchCollectionEntryFormat(t *testing.T, collection *anilist.AnimeCollection, mediaID int, format anilist.MediaFormat) {
t.Helper()
entry := findCollectionEntryByMediaID(t, collection, mediaID)
entry.Media.Format = &format
}
func patchCollectionEntryEpisodeCount(t *testing.T, collection *anilist.AnimeCollection, mediaID int, episodeCount int) {
t.Helper()
entry := findCollectionEntryByMediaID(t, collection, mediaID)
entry.Media.Episodes = &episodeCount
entry.Media.NextAiringEpisode = nil
}
func patchEntryMediaStatus(t *testing.T, collection *anilist.AnimeCollection, mediaID int, status anilist.MediaStatus) {
t.Helper()
findCollectionEntryByMediaID(t, collection, mediaID).Media.Status = new(status)
}
func findCollectionEntryByMediaID(t *testing.T, collection *anilist.AnimeCollection, mediaID int) *anilist.AnimeListEntry {
t.Helper()
entry, found := collection.GetListEntryFromAnimeId(mediaID)
require.True(t, found)
return entry
}

View File

@@ -5,76 +5,38 @@ import (
"strings"
)
type MockHydratedLocalFileOptions struct {
FilePath string
LibraryPath string
MediaId int
MetadataEpisode int
MetadataAniDbEpisode string
MetadataType LocalFileType
type TestLocalFileEpisode struct {
Episode int
AniDBEpisode string
Type LocalFileType
}
func MockHydratedLocalFile(opts MockHydratedLocalFileOptions) *LocalFile {
lf := NewLocalFile(opts.FilePath, opts.LibraryPath)
lf.MediaId = opts.MediaId
lf.Metadata = &LocalFileMetadata{
AniDBEpisode: opts.MetadataAniDbEpisode,
Episode: opts.MetadataEpisode,
Type: opts.MetadataType,
}
return lf
type TestLocalFileGroup struct {
LibraryPath string
FilePathTemplate string
MediaID int
Episodes []TestLocalFileEpisode
}
// MockHydratedLocalFiles creates a slice of LocalFiles based on the provided options
//
// Example:
//
// MockHydratedLocalFiles(
// MockHydratedLocalFileOptions{
// FilePath: "/mnt/anime/One Piece/One Piece - 1070.mkv",
// LibraryPath: "/mnt/anime/",
// MetadataEpisode: 1070,
// MetadataAniDbEpisode: "1070",
// MetadataType: LocalFileTypeMain,
// },
// MockHydratedLocalFileOptions{
// ...
// },
// )
func MockHydratedLocalFiles(opts ...[]MockHydratedLocalFileOptions) []*LocalFile {
lfs := make([]*LocalFile, 0, len(opts))
for _, opt := range opts {
for _, o := range opt {
lfs = append(lfs, MockHydratedLocalFile(o))
// NewTestLocalFiles expands one or more local-file groups into hydrated LocalFiles.
// FilePathTemplate replaces each %ep token with the episode number.
func NewTestLocalFiles(groups ...TestLocalFileGroup) []*LocalFile {
localFiles := make([]*LocalFile, 0)
for _, group := range groups {
for _, episode := range group.Episodes {
lf := NewLocalFile(strings.ReplaceAll(group.FilePathTemplate, "%ep", strconv.Itoa(episode.Episode)), group.LibraryPath)
if lf.ParsedData != nil && lf.ParsedData.Episode == "" {
lf.ParsedData.Episode = strconv.Itoa(episode.Episode)
}
lf.MediaId = group.MediaID
lf.Metadata = &LocalFileMetadata{
AniDBEpisode: episode.AniDBEpisode,
Episode: episode.Episode,
Type: episode.Type,
}
localFiles = append(localFiles, lf)
}
}
return lfs
}
type MockHydratedLocalFileWrapperOptionsMetadata struct {
MetadataEpisode int
MetadataAniDbEpisode string
MetadataType LocalFileType
}
// MockGenerateHydratedLocalFileGroupOptions generates a slice of MockHydratedLocalFileOptions based on a template string and metadata
//
// Example:
//
// MockGenerateHydratedLocalFileGroupOptions("/mnt/anime/", "One Piece/One Piece - %ep.mkv", 21, []MockHydratedLocalFileWrapperOptionsMetadata{
// {MetadataEpisode: 1070, MetadataAniDbEpisode: "1070", MetadataType: LocalFileTypeMain},
// })
func MockGenerateHydratedLocalFileGroupOptions(libraryPath string, template string, mId int, m []MockHydratedLocalFileWrapperOptionsMetadata) []MockHydratedLocalFileOptions {
opts := make([]MockHydratedLocalFileOptions, 0, len(m))
for _, metadata := range m {
opts = append(opts, MockHydratedLocalFileOptions{
FilePath: strings.ReplaceAll(template, "%ep", strconv.Itoa(metadata.MetadataEpisode)),
LibraryPath: libraryPath,
MediaId: mId,
MetadataEpisode: metadata.MetadataEpisode,
MetadataAniDbEpisode: metadata.MetadataAniDbEpisode,
MetadataType: metadata.MetadataType,
})
}
return opts
return localFiles
}

View File

@@ -0,0 +1,50 @@
package anime_test
import (
"seanime/internal/api/anilist"
"testing"
"github.com/stretchr/testify/require"
)
func TestNewUpcomingEpisodesSortsAndHydratesMetadata(t *testing.T) {
// upcoming episodes should be ordered by time until airing,
// and each item should carry metadata for the exact next episode when we have it.
h := newAnimeTestHarness(t)
h.clearAllNextAiringEpisodes()
patchAnimeCollectionEntry(t, h.animeCollection, 154587, anilist.AnimeCollectionEntryPatch{
Status: new(anilist.MediaListStatusCurrent),
NextAiringEpisode: &anilist.BaseAnime_NextAiringEpisode{Episode: 8, AiringAt: 1_700_000_200, TimeUntilAiring: 200},
})
frierenMetadata := h.setEpisodeMetadata(t, 154587, []int{1, 2, 3, 4, 5, 6, 7, 8}, nil)
frierenMetadata.Episodes["8"].Title = "frieren next"
patchAnimeCollectionEntry(t, h.animeCollection, 146065, anilist.AnimeCollectionEntryPatch{
Status: new(anilist.MediaListStatusCurrent),
NextAiringEpisode: &anilist.BaseAnime_NextAiringEpisode{Episode: 3, AiringAt: 1_700_000_050, TimeUntilAiring: 50},
})
mushokuMetadata := h.setEpisodeMetadata(t, 146065, []int{1, 2, 3}, nil)
mushokuMetadata.Episodes["3"].Title = "mushoku next"
// dropped entries still have next-airing data in fixtures sometimes, but they should be filtered out.
patchAnimeCollectionEntry(t, h.animeCollection, 21, anilist.AnimeCollectionEntryPatch{
Status: new(anilist.MediaListStatusDropped),
NextAiringEpisode: &anilist.BaseAnime_NextAiringEpisode{Episode: 1100, AiringAt: 1_700_000_010, TimeUntilAiring: 10},
})
upcoming := h.newUpcomingEpisodes(t)
require.Len(t, upcoming.Episodes, 2)
require.Equal(t, 146065, upcoming.Episodes[0].MediaId)
require.Equal(t, 3, upcoming.Episodes[0].EpisodeNumber)
require.Equal(t, 50, upcoming.Episodes[0].TimeUntilAiring)
require.NotNil(t, upcoming.Episodes[0].EpisodeMetadata)
require.Equal(t, "mushoku next", upcoming.Episodes[0].EpisodeMetadata.Title)
require.Equal(t, 154587, upcoming.Episodes[1].MediaId)
require.Equal(t, 8, upcoming.Episodes[1].EpisodeNumber)
require.Equal(t, 200, upcoming.Episodes[1].TimeUntilAiring)
require.NotNil(t, upcoming.Episodes[1].EpisodeMetadata)
require.Equal(t, "frieren next", upcoming.Episodes[1].EpisodeMetadata.Title)
}

View File

@@ -323,11 +323,38 @@ func (ad *AutoDownloader) checkForNewEpisodes(ctx context.Context, isSimulation
// runData holds all data needed for checking new episodes
type runData struct {
rules []*anime.AutoDownloaderRule
profiles []*anime.AutoDownloaderProfile
localFileWrapper *anime.LocalFileWrapper
torrents []*NormalizedTorrent
existingTorrents []*torrent_client.Torrent
rules []*anime.AutoDownloaderRule
profiles []*anime.AutoDownloaderProfile
localFileWrapper *anime.LocalFileWrapper
torrents []*NormalizedTorrent
existingTorrentHashes map[string]struct{}
}
func normalizeTorrentHash(hash string) string {
return strings.ToLower(strings.TrimSpace(hash))
}
func addTorrentHash(hashSet map[string]struct{}, hash string) {
normalizedHash := normalizeTorrentHash(hash)
if normalizedHash == "" {
return
}
hashSet[normalizedHash] = struct{}{}
}
func buildExistingTorrentHashes(existingTorrents []*torrent_client.Torrent, existingDebridTorrents []*debrid.TorrentItem) map[string]struct{} {
hashes := make(map[string]struct{}, len(existingTorrents)+len(existingDebridTorrents))
for _, item := range existingTorrents {
addTorrentHash(hashes, item.Hash)
}
for _, item := range existingDebridTorrents {
addTorrentHash(hashes, item.Hash)
}
return hashes
}
// fetchRunData fetches all data needed for checking new episodes
@@ -402,12 +429,23 @@ func (ad *AutoDownloader) fetchRunData(ctx context.Context, ruleIDs ...uint) (*r
existingTorrents, _ = ad.torrentClientRepository.GetList(&torrent_client.GetListOptions{})
}
var existingDebridTorrents []*debrid.TorrentItem
if ad.settings.UseDebrid && ad.debridClientRepository != nil && ad.debridClientRepository.HasProvider() {
provider, err := ad.debridClientRepository.GetProvider()
if err == nil {
existingDebridTorrents, err = provider.GetTorrents()
if err != nil {
ad.logger.Debug().Err(err).Msg("autodownloader: Failed to get debrid torrents for duplicate check")
}
}
}
return &runData{
rules: rules,
profiles: profiles,
localFileWrapper: lfWrapper,
torrents: torrents,
existingTorrents: existingTorrents,
rules: rules,
profiles: profiles,
localFileWrapper: lfWrapper,
torrents: torrents,
existingTorrentHashes: buildExistingTorrentHashes(existingTorrents, existingDebridTorrents),
}, nil
}
@@ -444,7 +482,7 @@ func (ad *AutoDownloader) groupTorrentCandidates(data *runData) map[uint]map[int
// Process each torrent
for _, t := range data.torrents {
// Skip if already exists
if ad.isTorrentAlreadyDownloaded(t, data.existingTorrents) {
if ad.isTorrentAlreadyDownloaded(t, data.existingTorrentHashes) {
continue
}
@@ -495,14 +533,10 @@ func (ad *AutoDownloader) getRuleProfiles(rule *anime.AutoDownloaderRule, profil
return ruleProfiles
}
// isTorrentAlreadyDownloaded checks if a torrent already exists in the client
func (ad *AutoDownloader) isTorrentAlreadyDownloaded(t *NormalizedTorrent, existingTorrents []*torrent_client.Torrent) bool {
for _, et := range existingTorrents {
if et.Hash == t.InfoHash {
return true
}
}
return false
// isTorrentAlreadyDownloaded checks if a torrent already exists in the client or debrid service.
func (ad *AutoDownloader) isTorrentAlreadyDownloaded(t *NormalizedTorrent, existingTorrentHashes map[string]struct{}) bool {
_, exists := existingTorrentHashes[normalizeTorrentHash(t.InfoHash)]
return exists
}
// isEpisodeAlreadyHandled checks if an episode is already in the library or queue but not delayed
@@ -631,6 +665,7 @@ func (ad *AutoDownloader) handleDelayedItem(
storedItem.Link = bestCandidate.Torrent.Link
storedItem.Hash = bestCandidate.Torrent.InfoHash
storedItem.Magnet = bestCandidate.Torrent.MagnetLink
storedItem.TorrentName = bestCandidate.Torrent.Name
storedItem.Score = bestCandidate.Score
// Do NOT reset DelayUntil, keep the original timer
@@ -865,6 +900,7 @@ func (ad *AutoDownloader) queueTorrentForDelay(isSimulation bool, rule *anime.Au
Episode: episode,
Link: candidate.Torrent.Link,
Hash: candidate.Torrent.InfoHash,
Magnet: candidate.Torrent.MagnetLink,
TorrentName: candidate.Torrent.Name,
Downloaded: false,
IsDelayed: true,
@@ -1097,6 +1133,7 @@ downloadScope:
// Add the torrent to the debrid provider and queue it
_, err := ad.debridClientRepository.AddAndQueueTorrent(debrid.AddTorrentOptions{
MagnetLink: magnet,
InfoHash: t.InfoHash,
SelectFileId: "all", // RD-only, select all files
}, rule.Destination, rule.MediaId)
if err != nil {
@@ -1115,6 +1152,7 @@ downloadScope:
// Add the torrent to the debrid provider
_, err = debridProvider.AddTorrent(debrid.AddTorrentOptions{
MagnetLink: magnet,
InfoHash: t.InfoHash,
SelectFileId: "all", // RD-only, select all files
})
if err != nil {

View File

@@ -5,9 +5,9 @@ import (
"seanime/internal/api/anilist"
"seanime/internal/database/db_bridge"
"seanime/internal/database/models"
"seanime/internal/debrid/debrid"
hibiketorrent "seanime/internal/extension/hibike/torrent"
"seanime/internal/library/anime"
"seanime/internal/test_utils"
"seanime/internal/torrent_clients/torrent_client"
"seanime/internal/util"
"testing"
@@ -233,14 +233,26 @@ func TestGetRuleProfiles(t *testing.T) {
}
}
func TestBuildExistingTorrentHashes(t *testing.T) {
hashes := buildExistingTorrentHashes(
[]*torrent_client.Torrent{{Hash: " hash1 "}, {Hash: "HASH2"}},
[]*debrid.TorrentItem{{Hash: "hash3"}, {Hash: "HaSh4"}, {Hash: ""}},
)
assert.Len(t, hashes, 4)
assert.Contains(t, hashes, "hash1")
assert.Contains(t, hashes, "hash2")
assert.Contains(t, hashes, "hash3")
assert.Contains(t, hashes, "hash4")
}
func TestIsTorrentAlreadyDownloaded(t *testing.T) {
ad := &AutoDownloader{}
existingTorrents := []*torrent_client.Torrent{
{Hash: "hash1"},
{Hash: "hash2"},
{Hash: "hash3"},
}
existingTorrentHashes := buildExistingTorrentHashes(
[]*torrent_client.Torrent{{Hash: "hash1"}, {Hash: "hash2"}},
[]*debrid.TorrentItem{{Hash: "hash3"}},
)
tests := []struct {
name string
@@ -250,7 +262,14 @@ func TestIsTorrentAlreadyDownloaded(t *testing.T) {
{
name: "torrent exists",
torrent: &NormalizedTorrent{
AnimeTorrent: &hibiketorrent.AnimeTorrent{InfoHash: "hash2"},
AnimeTorrent: &hibiketorrent.AnimeTorrent{InfoHash: " HASH2 "},
},
expected: true,
},
{
name: "torrent exists in debrid hashes",
torrent: &NormalizedTorrent{
AnimeTorrent: &hibiketorrent.AnimeTorrent{InfoHash: "hash3"},
},
expected: true,
},
@@ -265,7 +284,7 @@ func TestIsTorrentAlreadyDownloaded(t *testing.T) {
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
result := ad.isTorrentAlreadyDownloaded(tt.torrent, existingTorrents)
result := ad.isTorrentAlreadyDownloaded(tt.torrent, existingTorrentHashes)
assert.Equal(t, tt.expected, result)
})
}
@@ -953,9 +972,7 @@ func TestIsProfileValidChecks(t *testing.T) {
}
func TestIntegration(t *testing.T) {
test_utils.InitTestProvider(t, test_utils.Anilist())
anilistClient := anilist.TestGetMockAnilistClient()
anilistClient := anilist.NewTestAnilistClient()
animeCollection, err := anilistClient.AnimeCollection(context.Background(), nil)
require.NoError(t, err)
@@ -1139,15 +1156,15 @@ func TestIntegration(t *testing.T) {
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
// Create a new fake
fake := &Fake{
harness := &TestHarness{
GetLatestResults: tt.torrents,
SearchResults: tt.torrents,
}
ad := fake.New(t)
ad := harness.New(t)
ad.SetAnimeCollection(animeCollection)
// Add local files to the database
_, err = fake.Database.InsertLocalFiles(&models.LocalFiles{Value: []byte("[]")})
_, err = harness.Database.InsertLocalFiles(&models.LocalFiles{Value: []byte("[]")})
require.NoError(t, err)
// Set user progress
@@ -1193,13 +1210,17 @@ func TestIntegration(t *testing.T) {
}
func TestDelayIntegration(t *testing.T) {
test_utils.InitTestProvider(t, test_utils.Anilist())
anilistClient := anilist.TestGetMockAnilistClient()
anilistClient := anilist.NewTestAnilistClient()
animeCollection, err := anilistClient.AnimeCollection(context.Background(), nil)
require.NoError(t, err)
mediaId := 154587 // Sousou no Frieren
testAnimeCollection := animeCollection.Copy()
require.NotNil(t, testAnimeCollection)
entry, found := testAnimeCollection.GetListEntryFromAnimeId(mediaId)
require.True(t, found)
progress := 0
entry.Progress = &progress
tests := []struct {
name string
@@ -1229,7 +1250,7 @@ func TestDelayIntegration(t *testing.T) {
{
name: "Queue item for delay",
torrents: []*hibiketorrent.AnimeTorrent{
{Name: "[SubsPlease] Sousou no Frieren - 01 (1080p).mkv", InfoHash: "hash1", Seeders: 100},
{Name: "[SubsPlease] Sousou no Frieren - 01 (1080p).mkv", InfoHash: "hash1", MagnetLink: "magnet:?xt=urn:btih:hash1", Seeders: 100},
},
profile: &anime.AutoDownloaderProfile{
Conditions: []anime.AutoDownloaderCondition{{Term: "1080p", Action: anime.AutoDownloaderProfileRuleFormatActionScore, Score: 10}},
@@ -1242,6 +1263,7 @@ func TestDelayIntegration(t *testing.T) {
assert.True(t, items[0].IsDelayed) // MUST be true
assert.False(t, items[0].Downloaded) // will always be false
assert.Equal(t, "hash1", items[0].Hash)
assert.Equal(t, "magnet:?xt=urn:btih:hash1", items[0].Magnet)
},
},
{
@@ -1274,7 +1296,7 @@ func TestDelayIntegration(t *testing.T) {
Hash: "hash1",
IsDelayed: true,
DelayUntil: time.Now().Add(-1 * time.Minute), // Expired
TorrentData: mustMarshalTorrent(&NormalizedTorrent{AnimeTorrent: &hibiketorrent.AnimeTorrent{Name: "[SubsPlease] Sousou no Frieren - 01 (1080p).mkv", InfoHash: "hash1"}}),
TorrentData: mustMarshalTorrent(&NormalizedTorrent{AnimeTorrent: &hibiketorrent.AnimeTorrent{Name: "[SubsPlease] Sousou no Frieren - 01 (1080p).mkv", InfoHash: "hash1"}, ExtensionID: "fake"}),
},
},
profile: &anime.AutoDownloaderProfile{
@@ -1291,7 +1313,7 @@ func TestDelayIntegration(t *testing.T) {
{
name: "Upgrade delayed item",
torrents: []*hibiketorrent.AnimeTorrent{
{Name: "[BetterGroup] Sousou no Frieren - 01 (1080p).mkv", InfoHash: "hash_better", Seeders: 100}, // Score 20
{Name: "[BetterGroup] Sousou no Frieren - 01 (1080p).mkv", InfoHash: "hash_better", MagnetLink: "magnet:?xt=urn:btih:hash_better", Seeders: 100}, // Score 20
},
existingItems: []*models.AutoDownloaderItem{
{
@@ -1299,10 +1321,11 @@ func TestDelayIntegration(t *testing.T) {
MediaID: mediaId,
Episode: 1,
Hash: "hash_bad",
Magnet: "magnet:?xt=urn:btih:hash_bad",
Score: 10,
IsDelayed: true,
DelayUntil: time.Now().Add(5 * time.Minute), // Not expired
TorrentData: mustMarshalTorrent(&NormalizedTorrent{AnimeTorrent: &hibiketorrent.AnimeTorrent{Name: "Name", InfoHash: "hash_bad"}}),
TorrentData: mustMarshalTorrent(&NormalizedTorrent{AnimeTorrent: &hibiketorrent.AnimeTorrent{Name: "Name", InfoHash: "hash_bad"}, ExtensionID: "fake"}),
},
},
profile: &anime.AutoDownloaderProfile{
@@ -1318,6 +1341,7 @@ func TestDelayIntegration(t *testing.T) {
require.Len(t, items, 1)
assert.True(t, items[0].IsDelayed)
assert.Equal(t, "hash_better", items[0].Hash) // Updated hash
assert.Equal(t, "magnet:?xt=urn:btih:hash_better", items[0].Magnet)
assert.Equal(t, 20, items[0].Score)
},
},
@@ -1325,9 +1349,9 @@ func TestDelayIntegration(t *testing.T) {
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
fake := &Fake{GetLatestResults: tt.torrents, SearchResults: tt.torrents}
fake := &TestHarness{GetLatestResults: tt.torrents, SearchResults: tt.torrents}
ad := fake.New(t)
ad.SetAnimeCollection(animeCollection)
ad.SetAnimeCollection(testAnimeCollection.Copy())
// Setup DB
_, _ = fake.Database.InsertLocalFiles(&models.LocalFiles{Value: []byte("[]")})

View File

@@ -6,6 +6,7 @@ import (
"seanime/internal/extension"
hibiketorrent "seanime/internal/extension/hibike/torrent"
"seanime/internal/library/anime"
"seanime/internal/util"
"seanime/internal/util/limiter"
"sync"
"time"
@@ -93,6 +94,11 @@ func (ad *AutoDownloader) fetchTorrentsFromProviders(
wg.Add(1)
go func(pExt extension.AnimeTorrentProviderExtension) {
defer wg.Done()
defer util.HandlePanicInModuleThen("autodownloader/fetchTorrentsFromProviders/provider", func() {
ad.logger.Error().
Str("providerId", pExt.GetID()).
Msg("autodownloader: Recovered from torrent provider panic")
})
// Set up a rate limiter for a single provider
rateLimiter := limiter.NewLimiter(time.Second, 2) // 2 reqs per sec
@@ -171,6 +177,12 @@ func (ad *AutoDownloader) fetchTorrentsFromProviders(
for releaseGroup, resolutions := range releaseGroupToResolutions {
go func(rg string, res []string) {
defer pWg.Done()
defer util.HandlePanicInModuleThen("autodownloader/fetchTorrentsFromProviders/search", func() {
ad.logger.Error().
Str("providerId", pExt.GetID()).
Str("releaseGroup", rg).
Msg("autodownloader: Recovered from torrent provider search panic")
})
foundForGroup := false
// For each release group, search with a specific resolution

View File

@@ -17,7 +17,7 @@ import (
func TestComparison(t *testing.T) {
database, _ := db.NewDatabase(t.TempDir(), "test", util.NewLogger())
ad := AutoDownloader{
metadataProviderRef: util.NewRef(metadata_provider.GetFakeProvider(t, database)),
metadataProviderRef: util.NewRef(metadata_provider.NewTestProvider(t, database)),
settings: &models.AutoDownloaderSettings{
EnableSeasonCheck: true,
},
@@ -142,7 +142,7 @@ func TestComparison(t *testing.T) {
func TestComparison2(t *testing.T) {
database, _ := db.NewDatabase(t.TempDir(), "test", util.NewLogger())
ad := AutoDownloader{
metadataProviderRef: util.NewRef(metadata_provider.GetFakeProvider(t, database)),
metadataProviderRef: util.NewRef(metadata_provider.NewTestProvider(t, database)),
settings: &models.AutoDownloaderSettings{
EnableSeasonCheck: true,
},
@@ -239,7 +239,7 @@ func TestComparison2(t *testing.T) {
func TestComparison3(t *testing.T) {
database, _ := db.NewDatabase(t.TempDir(), "test", util.NewLogger())
ad := AutoDownloader{
metadataProviderRef: util.NewRef(metadata_provider.GetFakeProvider(t, database)),
metadataProviderRef: util.NewRef(metadata_provider.NewTestProvider(t, database)),
settings: &models.AutoDownloaderSettings{
EnableSeasonCheck: true,
},

View File

@@ -8,46 +8,45 @@ import (
"seanime/internal/events"
"seanime/internal/extension"
hibiketorrent "seanime/internal/extension/hibike/torrent"
"seanime/internal/test_utils"
"seanime/internal/testutil"
"seanime/internal/torrents/torrent"
"seanime/internal/util"
"seanime/internal/util/filecache"
"testing"
"github.com/stretchr/testify/require"
)
type Fake struct {
type TestHarness struct {
SearchResults []*hibiketorrent.AnimeTorrent
GetLatestResults []*hibiketorrent.AnimeTorrent
Database *db.Database
Providers map[string]hibiketorrent.AnimeProvider
DefaultProvider string
}
type FakeTorrentProvider struct {
fake *Fake
type TestTorrentProvider struct {
harness *TestHarness
}
func (f FakeTorrentProvider) Search(opts hibiketorrent.AnimeSearchOptions) ([]*hibiketorrent.AnimeTorrent, error) {
return f.fake.SearchResults, nil
func (f TestTorrentProvider) Search(opts hibiketorrent.AnimeSearchOptions) ([]*hibiketorrent.AnimeTorrent, error) {
return f.harness.SearchResults, nil
}
func (f FakeTorrentProvider) SmartSearch(opts hibiketorrent.AnimeSmartSearchOptions) ([]*hibiketorrent.AnimeTorrent, error) {
return f.fake.SearchResults, nil
func (f TestTorrentProvider) SmartSearch(opts hibiketorrent.AnimeSmartSearchOptions) ([]*hibiketorrent.AnimeTorrent, error) {
return f.harness.SearchResults, nil
}
func (f FakeTorrentProvider) GetTorrentInfoHash(torrent *hibiketorrent.AnimeTorrent) (string, error) {
func (f TestTorrentProvider) GetTorrentInfoHash(torrent *hibiketorrent.AnimeTorrent) (string, error) {
return torrent.InfoHash, nil
}
func (f FakeTorrentProvider) GetTorrentMagnetLink(torrent *hibiketorrent.AnimeTorrent) (string, error) {
func (f TestTorrentProvider) GetTorrentMagnetLink(torrent *hibiketorrent.AnimeTorrent) (string, error) {
return torrent.MagnetLink, nil
}
func (f FakeTorrentProvider) GetLatest() ([]*hibiketorrent.AnimeTorrent, error) {
return f.fake.GetLatestResults, nil
func (f TestTorrentProvider) GetLatest() ([]*hibiketorrent.AnimeTorrent, error) {
return f.harness.GetLatestResults, nil
}
func (f FakeTorrentProvider) GetSettings() hibiketorrent.AnimeProviderSettings {
func (f TestTorrentProvider) GetSettings() hibiketorrent.AnimeProviderSettings {
return hibiketorrent.AnimeProviderSettings{
CanSmartSearch: false,
SmartSearchFilters: nil,
@@ -56,29 +55,37 @@ func (f FakeTorrentProvider) GetSettings() hibiketorrent.AnimeProviderSettings {
}
}
var _ hibiketorrent.AnimeProvider = (*FakeTorrentProvider)(nil)
var _ hibiketorrent.AnimeProvider = (*TestTorrentProvider)(nil)
func (f *Fake) New(t *testing.T) *AutoDownloader {
logger := util.NewLogger()
database, err := db.NewDatabase("", test_utils.ConfigData.Database.Name, logger)
require.NoError(t, err)
func (f *TestHarness) New(t *testing.T) *AutoDownloader {
t.Helper()
env := testutil.NewTestEnv(t)
logger := env.Logger()
database := env.NewDatabase("")
f.Database = database
filecacher, err := filecache.NewCacher(t.TempDir())
require.NoError(t, err)
filecacher := env.NewCacher("autodownloader")
extensionBankRef := util.NewRef(extension.NewUnifiedBank())
// Fake Extension
provider := FakeTorrentProvider{fake: f}
ext := extension.NewAnimeTorrentProviderExtension(&extension.Extension{
ID: "fake",
Type: extension.TypeAnimeTorrentProvider,
Name: "Fake Provider",
}, provider)
providers := f.Providers
if len(providers) == 0 {
providers = map[string]hibiketorrent.AnimeProvider{
"fake": TestTorrentProvider{harness: f},
}
}
extensionBankRef.Get().Set("fake", ext)
for id, provider := range providers {
ext := extension.NewAnimeTorrentProviderExtension(&extension.Extension{
ID: id,
Type: extension.TypeAnimeTorrentProvider,
Name: id,
}, provider)
extensionBankRef.Get().Set(id, ext)
}
metadataProvider := metadata_provider.NewProvider(&metadata_provider.NewProviderImplOptions{
Logger: logger,
@@ -94,6 +101,14 @@ func (f *Fake) New(t *testing.T) *AutoDownloader {
})
metadataProviderRef := util.NewRef[metadata_provider.Provider](metadataProvider)
defaultProvider := f.DefaultProvider
if defaultProvider == "" {
defaultProvider = "fake"
for id := range providers {
defaultProvider = id
break
}
}
//torrentClientRepository := torrent_client.NewRepository(&torrent_client.NewRepositoryOptions{
// Logger: logger,
// QbittorrentClient: &qbittorrent.Client{},
@@ -114,7 +129,7 @@ func (f *Fake) New(t *testing.T) *AutoDownloader {
})
ad.SetSettings(&models.AutoDownloaderSettings{
Provider: "fake",
Provider: defaultProvider,
Interval: 15,
Enabled: true,
DownloadAutomatically: false,

View File

@@ -209,6 +209,10 @@ func (as *AutoScanner) scan() {
as.logger.Error().Err(err).Msg("autoscanner: Failed to get settings")
return
}
if settings.Library == nil {
as.logger.Error().Msg("autoscanner: Library settings are not set")
return
}
if settings.Library.LibraryPath == "" {
as.logger.Error().Msg("autoscanner: Library path is not set")

View File

@@ -1,9 +1,228 @@
package autoscanner
import (
"seanime/internal/api/anilist"
"seanime/internal/database/db"
"seanime/internal/database/models"
"seanime/internal/events"
"seanime/internal/testutil"
"seanime/internal/util"
"sync"
"sync/atomic"
"testing"
"time"
"github.com/stretchr/testify/require"
)
func TestAutoScanner(t *testing.T) {
func TestNewAutoScannerAppliesDefaultsAndSetters(t *testing.T) {
// the constructor should apply defaults, and then the setters should override them.
h := newAutoScannerTestHarness(t, false, 0)
require.Equal(t, 15*time.Second, h.autoScanner.waitTime)
require.NotNil(t, h.autoScanner.fileActionCh)
require.NotNil(t, h.autoScanner.scannedCh)
require.False(t, h.autoScanner.enabled)
collection := &anilist.AnimeCollection{}
h.autoScanner.SetAnimeCollection(collection)
require.Same(t, collection, h.autoScanner.animeCollection)
settings := models.LibrarySettings{
AutoScan: true,
}
h.autoScanner.SetSettings(settings)
require.True(t, h.autoScanner.enabled)
require.Equal(t, settings, h.autoScanner.settings)
custom := newAutoScannerTestHarness(t, true, 25*time.Millisecond)
require.Equal(t, 25*time.Millisecond, custom.autoScanner.waitTime)
require.True(t, custom.autoScanner.enabled)
}
func TestAutoScannerNotifyQueuesSignalsAndMissedActions(t *testing.T) {
// Notify should send a signal on the channel when enabled, but if we're currently waiting it should mark that we missed an action instead.
var nilScanner *AutoScanner
nilScanner.Notify()
t.Run("enabled queue gets a signal", func(t *testing.T) {
h := newAutoScannerTestHarness(t, true, 10*time.Millisecond)
h.autoScanner.Notify()
require.Eventually(t, func() bool {
return len(h.autoScanner.fileActionCh) == 1
}, time.Second, 5*time.Millisecond)
})
t.Run("disabled scanner stays quiet", func(t *testing.T) {
h := newAutoScannerTestHarness(t, false, 10*time.Millisecond)
h.autoScanner.Notify()
time.Sleep(20 * time.Millisecond)
require.Zero(t, len(h.autoScanner.fileActionCh))
})
t.Run("waiting scanner marks the action as missed", func(t *testing.T) {
h := newAutoScannerTestHarness(t, true, 10*time.Millisecond)
h.autoScanner.waiting = true
h.autoScanner.Notify()
require.True(t, h.autoScanner.missedAction)
require.Zero(t, len(h.autoScanner.fileActionCh))
})
}
func TestAutoScannerWaitAndScanDebouncesMissedActions(t *testing.T) {
// when another file event lands during the wait window, we should restart the timer and still scan once.
h := newAutoScannerTestHarness(t, true, 25*time.Millisecond)
h.seedSettings(t, "")
startedAt := time.Now()
done := make(chan struct{})
go func() {
defer close(done)
h.autoScanner.waitAndScan()
}()
time.Sleep(10 * time.Millisecond)
h.autoScanner.Notify()
require.Eventually(t, func() bool {
return h.wsEventManager.count(events.AutoScanCompleted) == 1
}, time.Second, 5*time.Millisecond)
select {
case <-done:
case <-time.After(time.Second):
t.Fatal("waitAndScan did not finish")
}
require.Equal(t, 1, h.wsEventManager.count(events.AutoScanStarted))
require.Equal(t, 1, h.wsEventManager.count(events.AutoScanCompleted))
require.False(t, h.autoScanner.waiting)
require.False(t, h.autoScanner.missedAction)
require.GreaterOrEqual(t, time.Since(startedAt), 40*time.Millisecond)
}
func TestAutoScannerRunNowBypassesEnabledFlag(t *testing.T) {
// even if the scanner is disabled, RunNow should trigger a scan
h := newAutoScannerTestHarness(t, false, 10*time.Millisecond)
h.seedSettings(t, "")
h.autoScanner.RunNow()
require.Eventually(t, func() bool {
return h.wsEventManager.count(events.AutoScanCompleted) == 1
}, time.Second, 5*time.Millisecond)
require.Equal(t, []string{events.AutoScanStarted, events.AutoScanCompleted}, h.wsEventManager.types())
require.Zero(t, h.refreshCalls.Load())
require.False(t, h.autoScanner.scanning.Load())
}
func TestAutoScannerScanSkipsConcurrentRuns(t *testing.T) {
// the compare-and-swap guard should keep a second scan from even starting.
h := newAutoScannerTestHarness(t, true, 10*time.Millisecond)
h.autoScanner.scanning.Store(true)
t.Cleanup(func() {
h.autoScanner.scanning.Store(false)
})
h.autoScanner.scan()
require.Empty(t, h.wsEventManager.types())
require.True(t, h.autoScanner.scanning.Load())
}
type autoScannerTestHarness struct {
database *db.Database
wsEventManager *recordingWSEventManager
autoScanner *AutoScanner
refreshCalls atomic.Int32
}
func newAutoScannerTestHarness(t *testing.T, enabled bool, waitTime time.Duration) *autoScannerTestHarness {
t.Helper()
resetAutoscannerTestState(t)
env := testutil.NewTestEnv(t)
logger := util.NewLogger()
database := env.MustNewDatabase(logger)
wsEventManager := &recordingWSEventManager{MockWSEventManager: events.NewMockWSEventManager(logger)}
h := &autoScannerTestHarness{
database: database,
wsEventManager: wsEventManager,
}
h.autoScanner = New(&NewAutoScannerOptions{
Database: database,
Logger: logger,
Enabled: enabled,
WaitTime: waitTime,
WSEventManager: wsEventManager,
OnRefreshCollection: func() {
h.refreshCalls.Add(1)
},
})
return h
}
func (h *autoScannerTestHarness) seedSettings(t *testing.T, libraryPath string) {
t.Helper()
_, err := h.database.UpsertSettings(&models.Settings{
BaseModel: models.BaseModel{ID: 1},
Library: &models.LibrarySettings{
LibraryPath: libraryPath,
},
})
require.NoError(t, err)
}
type recordingWSEventManager struct {
*events.MockWSEventManager
mu sync.Mutex
typesSent []string
}
func (m *recordingWSEventManager) SendEvent(t string, _ interface{}) {
m.mu.Lock()
defer m.mu.Unlock()
m.typesSent = append(m.typesSent, t)
}
func (m *recordingWSEventManager) count(eventType string) int {
m.mu.Lock()
defer m.mu.Unlock()
count := 0
for _, t := range m.typesSent {
if t == eventType {
count++
}
}
return count
}
func (m *recordingWSEventManager) types() []string {
m.mu.Lock()
defer m.mu.Unlock()
ret := make([]string, len(m.typesSent))
copy(ret, m.typesSent)
return ret
}
func resetAutoscannerTestState(t *testing.T) {
t.Helper()
previousSettings := db.CurrSettings
db.CurrSettings = nil
t.Cleanup(func() {
db.CurrSettings = previousSettings
})
}

View File

@@ -1,16 +0,0 @@
package filesystem
import (
"seanime/internal/util"
"testing"
)
func TestDeleteEmptyDirectories(t *testing.T) {
path := "E:/ANIME_TEST"
RemoveEmptyDirectories(path, util.NewLogger())
t.Log("All empty directories removed successfully.")
}

View File

@@ -240,7 +240,7 @@ func (fm *FillerManager) HydrateOnlinestreamFillerData(mId int, episodes []*onli
if fm == nil {
return
}
if episodes == nil || len(episodes) == 0 {
if len(episodes) == 0 {
return
}

View File

@@ -1,58 +1,662 @@
package playbackmanager_test
package playbackmanager
import (
"errors"
"seanime/internal/api/anilist"
"seanime/internal/api/metadata_provider"
"seanime/internal/continuity"
"seanime/internal/database/db"
"seanime/internal/database/models"
"seanime/internal/events"
"seanime/internal/extension"
"seanime/internal/library/playbackmanager"
"seanime/internal/platforms/anilist_platform"
"seanime/internal/test_utils"
"seanime/internal/library/anime"
"seanime/internal/mediaplayers/mediaplayer"
"seanime/internal/platforms/platform"
"seanime/internal/testmocks"
"seanime/internal/testutil"
"seanime/internal/util"
"seanime/internal/util/filecache"
"sync"
"testing"
"time"
"github.com/samber/mo"
"github.com/stretchr/testify/require"
)
func getPlaybackManager(t *testing.T) (*playbackmanager.PlaybackManager, *anilist.AnimeCollection, error) {
func TestPlaybackManagerUnitNewDefaultsAndSetters(t *testing.T) {
// keep the constructor honest so the rest of the tests can rely on the default state.
h := newPlaybackManagerTestHarness(t)
logger := util.NewLogger()
require.NotNil(t, h.playbackManager.settings)
require.NotNil(t, h.playbackManager.historyMap)
require.Empty(t, h.playbackManager.historyMap)
require.True(t, h.playbackManager.nextEpisodeLocalFile.IsAbsent())
require.True(t, h.playbackManager.animeCollection.IsAbsent())
wsEventManager := events.NewMockWSEventManager(logger)
collection := &anilist.AnimeCollection{}
h.playbackManager.SetAnimeCollection(collection)
require.True(t, h.playbackManager.animeCollection.IsPresent())
require.Same(t, collection, h.playbackManager.animeCollection.MustGet())
database, err := db.NewDatabase(test_utils.ConfigData.Path.DataDir, test_utils.ConfigData.Database.Name, logger)
settings := &Settings{AutoPlayNextEpisode: true}
h.playbackManager.SetSettings(settings)
require.Same(t, settings, h.playbackManager.settings)
if err != nil {
t.Fatalf("error while creating database, %v", err)
h.playbackManager.SetPlaylistActive(true)
require.True(t, h.playbackManager.isPlaylistActive.Load())
h.playbackManager.SetPlaylistActive(false)
require.False(t, h.playbackManager.isPlaylistActive.Load())
}
func TestPlaybackManagerUnitCheckOrLoadAnimeCollectionCachesResult(t *testing.T) {
// the first call should hit the platform, and later calls should reuse the cached collection.
h := newPlaybackManagerTestHarness(t)
expectedCollection := &anilist.AnimeCollection{}
h.platform = testmocks.NewFakePlatformBuilder().WithAnimeCollection(expectedCollection).Build()
h.playbackManager.platformRef = util.NewRef[platform.Platform](h.platform)
require.NoError(t, h.playbackManager.checkOrLoadAnimeCollection())
require.Equal(t, 1, h.platform.AnimeCollectionCalls())
require.Same(t, expectedCollection, h.playbackManager.animeCollection.MustGet())
require.NoError(t, h.playbackManager.checkOrLoadAnimeCollection())
require.Equal(t, 1, h.platform.AnimeCollectionCalls())
h.playbackManager.animeCollection = mo.None[*anilist.AnimeCollection]()
h.platform = testmocks.NewFakePlatformBuilder().WithAnimeCollectionError(errors.New("collection failed")).Build()
h.playbackManager.platformRef = util.NewRef[platform.Platform](h.platform)
err := h.playbackManager.checkOrLoadAnimeCollection()
require.EqualError(t, err, "collection failed")
require.Equal(t, 1, h.platform.AnimeCollectionCalls())
}
func TestPlaybackManagerUnitGetNextEpisodeAndCurrentMediaID(t *testing.T) {
// these are tiny state readers, so keep them focused on the state machine rules.
h := newPlaybackManagerTestHarness(t)
localFiles := anime.NewTestLocalFiles(anime.TestLocalFileGroup{
LibraryPath: "/Anime",
FilePathTemplate: "/Anime/Frieren/%ep.mkv",
MediaID: 154587,
Episodes: []anime.TestLocalFileEpisode{
{Episode: 1, AniDBEpisode: "1", Type: anime.LocalFileTypeMain},
{Episode: 2, AniDBEpisode: "2", Type: anime.LocalFileTypeMain},
},
})
_, err := h.playbackManager.GetCurrentMediaID()
require.EqualError(t, err, "no media is currently playing")
require.Nil(t, h.playbackManager.GetNextEpisode())
h.playbackManager.currentLocalFile = mo.Some(localFiles[0])
h.playbackManager.currentPlaybackType = StreamPlayback
mediaID, err := h.playbackManager.GetCurrentMediaID()
require.NoError(t, err)
require.Equal(t, 154587, mediaID)
require.Nil(t, h.playbackManager.GetNextEpisode())
h.playbackManager.currentPlaybackType = LocalFilePlayback
h.playbackManager.nextEpisodeLocalFile = mo.Some(localFiles[1])
require.Same(t, localFiles[1], h.playbackManager.GetNextEpisode())
}
func TestPlaybackManagerUnitPlaybackStatusSubscriptionLifecycle(t *testing.T) {
// subscription cleanup matters because the manager broadcasts on these channels from goroutines.
h := newPlaybackManagerTestHarness(t)
subscriber := h.playbackManager.SubscribeToPlaybackStatus("unit")
storedSubscriber, ok := h.playbackManager.playbackStatusSubscribers.Get("unit")
require.True(t, ok)
require.Same(t, subscriber, storedSubscriber)
require.False(t, subscriber.Canceled.Load())
h.playbackManager.UnsubscribeFromPlaybackStatus("unit")
require.True(t, subscriber.Canceled.Load())
_, ok = h.playbackManager.playbackStatusSubscribers.Get("unit")
require.False(t, ok)
_, channelOpen := <-subscriber.EventCh
require.False(t, channelOpen)
// a second unsubscribe should stay quiet instead of panicking.
h.playbackManager.UnsubscribeFromPlaybackStatus("unit")
}
func TestPlaybackManagerUnitRegisterMediaPlayerCallbackStopsAfterFalse(t *testing.T) {
// callbacks are just another subscriber under the hood, so we can drive one directly.
h := newPlaybackManagerTestHarness(t)
received := make(chan PlaybackEvent, 1)
h.playbackManager.RegisterMediaPlayerCallback(func(event PlaybackEvent) bool {
received <- event
return false
})
var subscriber *PlaybackStatusSubscriber
require.Eventually(t, func() bool {
h.playbackManager.playbackStatusSubscribers.Range(func(_ string, value *PlaybackStatusSubscriber) bool {
subscriber = value
return false
})
return subscriber != nil
}, time.Second, 10*time.Millisecond)
subscriber.EventCh <- PlaybackErrorEvent{Reason: "boom"}
select {
case event := <-received:
require.Equal(t, "playback_error", event.Type())
require.Equal(t, "boom", event.(PlaybackErrorEvent).Reason)
case <-time.After(time.Second):
t.Fatal("callback did not receive playback event")
}
filecacher, err := filecache.NewCacher(t.TempDir())
require.Eventually(t, func() bool {
return len(h.playbackManager.playbackStatusSubscribers.Keys()) == 0
}, time.Second, 10*time.Millisecond)
}
func TestPlaybackManagerUnitAutoPlayNextEpisodeBranches(t *testing.T) {
localFiles := anime.NewTestLocalFiles(anime.TestLocalFileGroup{
LibraryPath: "/Anime",
FilePathTemplate: "/Anime/Frieren/%ep.mkv",
MediaID: 154587,
Episodes: []anime.TestLocalFileEpisode{
{Episode: 1, AniDBEpisode: "1", Type: anime.LocalFileTypeMain},
{Episode: 2, AniDBEpisode: "2", Type: anime.LocalFileTypeMain},
},
})
t.Run("disabled autoplay is a no-op", func(t *testing.T) {
// if the setting is off, the queue should stay untouched.
h := newPlaybackManagerTestHarness(t)
h.playbackManager.currentPlaybackType = LocalFilePlayback
h.playbackManager.nextEpisodeLocalFile = mo.Some(localFiles[1])
require.NoError(t, h.playbackManager.AutoPlayNextEpisode())
require.True(t, h.playbackManager.nextEpisodeLocalFile.IsPresent())
})
t.Run("missing next episode stays quiet", func(t *testing.T) {
// multiple clients can race this request, so no-next should just return nil.
h := newPlaybackManagerTestHarness(t)
h.playbackManager.settings.AutoPlayNextEpisode = true
h.playbackManager.currentPlaybackType = LocalFilePlayback
require.NoError(t, h.playbackManager.AutoPlayNextEpisode())
require.True(t, h.playbackManager.nextEpisodeLocalFile.IsAbsent())
})
t.Run("play errors get wrapped", func(t *testing.T) {
// once autoplay is enabled and a next file exists, play-next failures should bubble up with context.
h := newPlaybackManagerTestHarness(t)
h.playbackManager.settings.AutoPlayNextEpisode = true
h.playbackManager.currentPlaybackType = LocalFilePlayback
h.playbackManager.nextEpisodeLocalFile = mo.Some(localFiles[1])
err := h.playbackManager.AutoPlayNextEpisode()
require.EqualError(t, err, "failed to auto play next episode: could not play next episode")
require.True(t, h.playbackManager.nextEpisodeLocalFile.IsPresent())
})
}
func TestPlaybackManagerUnitStartPlayingAndStreamingValidation(t *testing.T) {
t.Run("local playback fails if collection refresh fails", func(t *testing.T) {
// this should stop before touching the media player when collection loading fails.
h := newPlaybackManagerTestHarness(t)
h.platform = testmocks.NewFakePlatformBuilder().WithAnimeCollectionError(errors.New("collection failed")).Build()
h.playbackManager.platformRef = util.NewRef[platform.Platform](h.platform)
err := h.playbackManager.StartPlayingUsingMediaPlayer(&StartPlayingOptions{Payload: "/Anime/Frieren/1.mkv"})
require.EqualError(t, err, "collection failed")
})
t.Run("stream playback blocks offline mode", func(t *testing.T) {
// offline mode is a hard stop even when the caller passed a media and episode.
h := newPlaybackManagerTestHarness(t)
h.playbackManager.isOfflineRef.Set(true)
err := h.playbackManager.StartStreamingUsingMediaPlayer("stream", &StartPlayingOptions{Payload: "https://example.com"}, testmocks.NewBaseAnime(154587, "Frieren"), "1")
require.EqualError(t, err, "cannot stream when offline")
require.True(t, h.playbackManager.currentStreamMedia.IsAbsent())
})
t.Run("stream playback rejects missing data", func(t *testing.T) {
// callers need to provide both the media and the anidb episode before we can track a stream.
media := testmocks.NewBaseAnime(154587, "Frieren")
h := newPlaybackManagerTestHarness(t)
err := h.playbackManager.StartStreamingUsingMediaPlayer("stream", &StartPlayingOptions{Payload: "https://example.com"}, nil, "1")
require.EqualError(t, err, "cannot start streaming, not enough data provided")
err = h.playbackManager.StartStreamingUsingMediaPlayer("stream", &StartPlayingOptions{Payload: "https://example.com"}, media, "")
require.EqualError(t, err, "cannot start streaming, not enough data provided")
})
}
func TestPlaybackManagerUnitLocalPlaybackStatusAndProgressTracking(t *testing.T) {
// this drives the local-file tracking handlers directly so state changes and progress syncing stay covered.
h := newPlaybackManagerTestHarness(t)
h.seedAutoUpdateProgress(t, true)
media := testmocks.NewBaseAnimeBuilder(154587, "Frieren").
WithUserPreferredTitle("Frieren").
WithEpisodes(12).
Build()
localFiles := anime.NewTestLocalFiles(anime.TestLocalFileGroup{
LibraryPath: "/Anime",
FilePathTemplate: "/Anime/Frieren/%ep.mkv",
MediaID: media.ID,
Episodes: []anime.TestLocalFileEpisode{
{Episode: 1, AniDBEpisode: "1", Type: anime.LocalFileTypeMain},
{Episode: 2, AniDBEpisode: "2", Type: anime.LocalFileTypeMain},
{Episode: 3, AniDBEpisode: "3", Type: anime.LocalFileTypeMain},
},
})
wrapper := anime.NewLocalFileWrapper(localFiles)
wrapperEntry, ok := wrapper.GetLocalEntryById(media.ID)
require.True(t, ok)
h.playbackManager.currentMediaListEntry = mo.Some(&anilist.AnimeListEntry{
Media: media,
Progress: new(1),
})
h.playbackManager.currentLocalFile = mo.Some(localFiles[1])
h.playbackManager.currentLocalFileWrapperEntry = mo.Some(wrapperEntry)
subscriber := h.playbackManager.SubscribeToPlaybackStatus("unit-local")
status := &mediaplayer.PlaybackStatus{
Filename: "2.mkv",
Filepath: localFiles[1].Path,
CompletionPercentage: 0.5,
CurrentTimeInSeconds: 600,
DurationInSeconds: 1200,
PlaybackType: mediaplayer.PlaybackTypeFile,
}
h.playbackManager.handlePlaybackStatus(status)
changedEvent := expectPlaybackEvent[PlaybackStatusChangedEvent](t, subscriber.EventCh)
require.Equal(t, 2, changedEvent.State.EpisodeNumber)
require.Equal(t, "2", changedEvent.State.AniDbEpisode)
require.Equal(t, media.ID, changedEvent.State.MediaId)
require.True(t, changedEvent.State.CanPlayNext)
require.False(t, changedEvent.State.ProgressUpdated)
require.Equal(t, events.PlaybackManagerProgressPlaybackState, h.wsEventManager.lastType())
completedStatus := &mediaplayer.PlaybackStatus{
Filename: "2.mkv",
Filepath: localFiles[1].Path,
CompletionPercentage: 1,
CurrentTimeInSeconds: 1200,
DurationInSeconds: 1200,
PlaybackType: mediaplayer.PlaybackTypeFile,
}
h.playbackManager.handleVideoCompleted(completedStatus)
completedChanged := expectPlaybackEvent[PlaybackStatusChangedEvent](t, subscriber.EventCh)
require.Equal(t, 2, completedChanged.State.EpisodeNumber)
completedEvent := expectPlaybackEvent[VideoCompletedEvent](t, subscriber.EventCh)
require.Equal(t, "2.mkv", completedEvent.Filename)
progressCalls := h.platform.UpdateEntryProgressCalls()
require.Len(t, progressCalls, 1)
require.Equal(t, media.ID, progressCalls[0].MediaID)
require.Equal(t, 2, progressCalls[0].Progress)
require.NotNil(t, progressCalls[0].TotalEpisodes)
require.Equal(t, 12, *progressCalls[0].TotalEpisodes)
require.True(t, h.playbackManager.historyMap["2.mkv"].ProgressUpdated)
require.Equal(t, events.PlaybackManagerProgressVideoCompleted, h.wsEventManager.lastType())
require.Equal(t, 1, h.wsEventManager.count(events.PlaybackManagerProgressUpdated))
h.playbackManager.handleTrackingStopped("closed")
stoppedEvent := expectPlaybackEvent[VideoStoppedEvent](t, subscriber.EventCh)
require.Equal(t, "closed", stoppedEvent.Reason)
require.True(t, h.playbackManager.nextEpisodeLocalFile.IsPresent())
require.Same(t, localFiles[2], h.playbackManager.nextEpisodeLocalFile.MustGet())
require.Equal(t, events.PlaybackManagerProgressTrackingStopped, h.wsEventManager.lastType())
}
func TestPlaybackManagerUnitStreamPlaybackStatusAndProgressTracking(t *testing.T) {
// this covers the stream tracking handlers, including progress sync when a streamed episode completes.
h := newPlaybackManagerTestHarness(t)
h.seedAutoUpdateProgress(t, true)
media := testmocks.NewBaseAnimeBuilder(201, "Dungeon Meshi").
WithUserPreferredTitle("Dungeon Meshi").
WithEpisodes(24).
Build()
entry := &anilist.AnimeListEntry{Media: media, Progress: new(1)}
collection := newAnimeCollection(media, entry, anilist.MediaListStatusCurrent)
h.playbackManager.SetAnimeCollection(collection)
h.playbackManager.currentStreamMedia = mo.Some(media)
h.playbackManager.currentStreamEpisode = mo.Some(&anime.Episode{EpisodeNumber: 2, ProgressNumber: 2, AniDBEpisode: "2"})
h.playbackManager.currentStreamAniDbEpisode = mo.Some("2")
subscriber := h.playbackManager.SubscribeToPlaybackStatus("unit-stream")
startedStatus := &mediaplayer.PlaybackStatus{
Filename: "Stream",
Filepath: "https://example.com/stream/2",
CompletionPercentage: 0.1,
CurrentTimeInSeconds: 60,
DurationInSeconds: 1500,
PlaybackType: mediaplayer.PlaybackTypeStream,
}
h.playbackManager.handleStreamingTrackingStarted(startedStatus)
startedChanged := expectPlaybackEvent[PlaybackStatusChangedEvent](t, subscriber.EventCh)
require.Equal(t, 2, startedChanged.State.EpisodeNumber)
require.Equal(t, media.ID, startedChanged.State.MediaId)
startedEvent := expectPlaybackEvent[StreamStartedEvent](t, subscriber.EventCh)
require.Equal(t, "Stream", startedEvent.Filename)
require.True(t, h.playbackManager.currentMediaListEntry.IsPresent())
require.Equal(t, events.PlaybackManagerProgressTrackingStarted, h.wsEventManager.lastType())
status := &mediaplayer.PlaybackStatus{
Filename: "Stream",
Filepath: "https://example.com/stream/2",
CompletionPercentage: 0.5,
CurrentTimeInSeconds: 750,
DurationInSeconds: 1500,
Playing: true,
PlaybackType: mediaplayer.PlaybackTypeStream,
}
h.playbackManager.handleStreamingPlaybackStatus(status)
streamChanged := expectPlaybackEvent[PlaybackStatusChangedEvent](t, subscriber.EventCh)
require.Equal(t, 2, streamChanged.State.EpisodeNumber)
require.Equal(t, events.PlaybackManagerProgressPlaybackState, h.wsEventManager.lastType())
completedStatus := &mediaplayer.PlaybackStatus{
Filename: "Stream",
Filepath: "https://example.com/stream/2",
CompletionPercentage: 1,
CurrentTimeInSeconds: 1500,
DurationInSeconds: 1500,
PlaybackType: mediaplayer.PlaybackTypeStream,
}
h.playbackManager.handleStreamingVideoCompleted(completedStatus)
completedChanged := expectPlaybackEvent[PlaybackStatusChangedEvent](t, subscriber.EventCh)
require.Equal(t, 2, completedChanged.State.EpisodeNumber)
completedEvent := expectPlaybackEvent[StreamCompletedEvent](t, subscriber.EventCh)
require.Equal(t, "Stream", completedEvent.Filename)
progressCalls := h.platform.UpdateEntryProgressCalls()
require.Len(t, progressCalls, 1)
require.Equal(t, media.ID, progressCalls[0].MediaID)
require.Equal(t, 2, progressCalls[0].Progress)
require.NotNil(t, progressCalls[0].TotalEpisodes)
require.Equal(t, 24, *progressCalls[0].TotalEpisodes)
require.True(t, h.playbackManager.historyMap["Stream"].ProgressUpdated)
require.Equal(t, 1, h.wsEventManager.count(events.PlaybackManagerProgressUpdated))
h.playbackManager.handleStreamingTrackingStopped("finished")
stoppedEvent := expectPlaybackEvent[StreamStoppedEvent](t, subscriber.EventCh)
require.Equal(t, "finished", stoppedEvent.Reason)
require.Equal(t, events.PlaybackManagerProgressTrackingStopped, h.wsEventManager.lastType())
}
func TestPlaybackManagerUnitManualProgressTrackingSyncsProgress(t *testing.T) {
// manual tracking should hold the current episode in memory and sync it when the user asks for it.
h := newPlaybackManagerTestHarness(t)
media := testmocks.NewBaseAnimeBuilder(909, "Orb").
WithUserPreferredTitle("Orb").
WithEpisodes(25).
Build()
entry := &anilist.AnimeListEntry{Media: media, Progress: new(4)}
h.platform = testmocks.NewFakePlatformBuilder().WithAnimeCollection(newAnimeCollection(media, entry, anilist.MediaListStatusCurrent)).Build()
h.playbackManager.platformRef = util.NewRef[platform.Platform](h.platform)
err := h.playbackManager.StartManualProgressTracking(&StartManualProgressTrackingOptions{
ClientId: "unit",
MediaId: media.ID,
EpisodeNumber: 5,
})
require.NoError(t, err)
anilistClient := anilist.TestGetMockAnilistClient()
anilistPlatform := anilist_platform.NewAnilistPlatform(util.NewRef(anilistClient), util.NewRef(extension.NewUnifiedBank()), logger, database)
animeCollection, err := anilistPlatform.GetAnimeCollection(t.Context(), true)
metadataProvider := metadata_provider.GetFakeProvider(t, database)
require.Equal(t, ManualTrackingPlayback, h.playbackManager.currentPlaybackType)
require.True(t, h.playbackManager.currentManualTrackingState.IsPresent())
require.Equal(t, 4, h.playbackManager.currentManualTrackingState.MustGet().CurrentProgress)
require.Equal(t, 25, h.playbackManager.currentManualTrackingState.MustGet().TotalEpisodes)
require.Eventually(t, func() bool {
return h.wsEventManager.count(events.PlaybackManagerManualTrackingPlaybackState) > 0
}, time.Second, 10*time.Millisecond)
err = h.playbackManager.SyncCurrentProgress()
require.NoError(t, err)
progressCalls := h.platform.UpdateEntryProgressCalls()
require.Len(t, progressCalls, 1)
require.Equal(t, media.ID, progressCalls[0].MediaID)
require.Equal(t, 5, progressCalls[0].Progress)
require.NotNil(t, progressCalls[0].TotalEpisodes)
require.Equal(t, 25, *progressCalls[0].TotalEpisodes)
require.Equal(t, 2, h.refreshCalls)
h.playbackManager.CancelManualProgressTracking()
require.Eventually(t, func() bool {
return h.wsEventManager.count(events.PlaybackManagerManualTrackingStopped) == 1
}, 4*time.Second, 25*time.Millisecond)
require.True(t, h.playbackManager.currentManualTrackingState.IsAbsent())
}
func TestPlaybackManagerLiveRepositoryEventsReachCallbacks(t *testing.T) {
// this uses the real repository subscription wiring, but it stays in-memory and never launches a player.
h := newPlaybackManagerTestHarness(t)
repo := mediaplayer.NewRepository(&mediaplayer.NewRepositoryOptions{
Logger: util.NewLogger(),
Default: "",
WSEventManager: events.NewMockWSEventManager(util.NewLogger()),
})
h.playbackManager.SetMediaPlayerRepository(repo)
t.Cleanup(func() {
if h.playbackManager.cancel != nil {
h.playbackManager.cancel()
}
})
require.Eventually(t, func() bool {
return h.playbackManager.MediaPlayerRepository == repo && h.playbackManager.mediaPlayerRepoSubscriber != nil
}, time.Second, 10*time.Millisecond)
received := make(chan PlaybackErrorEvent, 1)
h.playbackManager.RegisterMediaPlayerCallback(func(event PlaybackEvent) bool {
playbackError, ok := event.(PlaybackErrorEvent)
if ok {
received <- playbackError
}
return false
})
h.playbackManager.mediaPlayerRepoSubscriber.EventCh <- mediaplayer.TrackingRetryEvent{Reason: "player unreachable"}
select {
case event := <-received:
require.Equal(t, "player unreachable", event.Reason)
case <-time.After(time.Second):
t.Fatal("callback did not receive repository event")
}
}
func TestPlaybackManagerLiveRepositoryStreamCompletionSyncsProgress(t *testing.T) {
// this exercises the real repository subscription loop and proves stream completion can drive a progress sync.
h := newPlaybackManagerTestHarness(t)
h.seedAutoUpdateProgress(t, true)
media := testmocks.NewBaseAnimeBuilder(700, "Lazarus").
WithUserPreferredTitle("Lazarus").
WithEpisodes(13).
Build()
h.playbackManager.SetAnimeCollection(newAnimeCollection(media, &anilist.AnimeListEntry{
Media: media,
Progress: new(0),
}, anilist.MediaListStatusCurrent))
h.playbackManager.currentStreamMedia = mo.Some(media)
h.playbackManager.currentStreamEpisode = mo.Some(&anime.Episode{EpisodeNumber: 1, ProgressNumber: 1, AniDBEpisode: "1"})
h.playbackManager.currentStreamAniDbEpisode = mo.Some("1")
repo := mediaplayer.NewRepository(&mediaplayer.NewRepositoryOptions{
Logger: util.NewLogger(),
Default: "",
WSEventManager: events.NewMockWSEventManager(util.NewLogger()),
})
h.playbackManager.SetMediaPlayerRepository(repo)
t.Cleanup(func() {
if h.playbackManager.cancel != nil {
h.playbackManager.cancel()
}
})
require.Eventually(t, func() bool {
return h.playbackManager.MediaPlayerRepository == repo && h.playbackManager.mediaPlayerRepoSubscriber != nil
}, time.Second, 10*time.Millisecond)
h.playbackManager.mediaPlayerRepoSubscriber.EventCh <- mediaplayer.StreamingTrackingStartedEvent{Status: &mediaplayer.PlaybackStatus{
Filename: "Stream",
Filepath: "https://example.com/stream/1",
CompletionPercentage: 0.1,
CurrentTimeInSeconds: 60,
DurationInSeconds: 1500,
PlaybackType: mediaplayer.PlaybackTypeStream,
}}
h.playbackManager.mediaPlayerRepoSubscriber.EventCh <- mediaplayer.StreamingVideoCompletedEvent{Status: &mediaplayer.PlaybackStatus{
Filename: "Stream",
Filepath: "https://example.com/stream/1",
CompletionPercentage: 1,
CurrentTimeInSeconds: 1500,
DurationInSeconds: 1500,
PlaybackType: mediaplayer.PlaybackTypeStream,
}}
require.Eventually(t, func() bool {
calls := h.platform.UpdateEntryProgressCalls()
return len(calls) == 1 && calls[0].MediaID == media.ID && calls[0].Progress == 1
}, time.Second, 10*time.Millisecond)
require.True(t, h.playbackManager.historyMap["Stream"].ProgressUpdated)
}
type playbackManagerTestHarness struct {
database *db.Database
wsEventManager *recordingWSEventManager
refreshCalls int
platform *testmocks.FakePlatform
playbackManager *PlaybackManager
}
func newPlaybackManagerTestHarness(t *testing.T) *playbackManagerTestHarness {
t.Helper()
env := testutil.NewTestEnv(t)
logger := util.NewLogger()
database := env.MustNewDatabase(logger)
wsEventManager := &recordingWSEventManager{MockWSEventManager: events.NewMockWSEventManager(logger)}
continuityManager := continuity.NewManager(&continuity.NewManagerOptions{
FileCacher: filecacher,
FileCacher: env.NewCacher("continuity"),
Logger: logger,
Database: database,
})
continuityManager.SetSettings(&continuity.Settings{WatchContinuityEnabled: true})
platformImpl := testmocks.NewFakePlatformBuilder().Build()
platformInterface := platform.Platform(platformImpl)
var provider metadata_provider.Provider
return playbackmanager.New(&playbackmanager.NewPlaybackManagerOptions{
WSEventManager: wsEventManager,
h := &playbackManagerTestHarness{
database: database,
wsEventManager: wsEventManager,
platform: platformImpl,
}
h.playbackManager = New(&NewPlaybackManagerOptions{
Logger: logger,
PlatformRef: util.NewRef(anilistPlatform),
MetadataProviderRef: util.NewRef(metadataProvider),
WSEventManager: wsEventManager,
PlatformRef: util.NewRef(platformInterface),
MetadataProviderRef: util.NewRef(provider),
Database: database,
RefreshAnimeCollectionFunc: func() {
// Do nothing
h.refreshCalls++
},
DiscordPresence: nil,
IsOfflineRef: util.NewRef(false),
ContinuityManager: continuityManager,
}), animeCollection, nil
IsOfflineRef: util.NewRef(false),
})
h.seedAutoUpdateProgress(t, false)
return h
}
func (h *playbackManagerTestHarness) seedAutoUpdateProgress(t *testing.T, enabled bool) {
t.Helper()
_, err := h.database.UpsertSettings(&models.Settings{
BaseModel: models.BaseModel{ID: 1},
Library: &models.LibrarySettings{
AutoUpdateProgress: enabled,
},
})
require.NoError(t, err)
}
type recordingWSEventManager struct {
*events.MockWSEventManager
mu sync.Mutex
events []events.MockWSEvent
}
func (m *recordingWSEventManager) SendEvent(t string, payload interface{}) {
m.mu.Lock()
defer m.mu.Unlock()
m.events = append(m.events, events.MockWSEvent{Type: t, Payload: payload})
}
func (m *recordingWSEventManager) count(eventType string) int {
m.mu.Lock()
defer m.mu.Unlock()
count := 0
for _, event := range m.events {
if event.Type == eventType {
count++
}
}
return count
}
func (m *recordingWSEventManager) lastType() string {
m.mu.Lock()
defer m.mu.Unlock()
if len(m.events) == 0 {
return ""
}
return m.events[len(m.events)-1].Type
}
func newAnimeCollection(media *anilist.BaseAnime, entry *anilist.AnimeListEntry, status anilist.MediaListStatus) *anilist.AnimeCollection {
entry.Status = new(status)
entry.Media = media
return &anilist.AnimeCollection{
MediaListCollection: &anilist.AnimeCollection_MediaListCollection{
Lists: []*anilist.AnimeCollection_MediaListCollection_Lists{{
Status: new(status),
Entries: []*anilist.AnimeCollection_MediaListCollection_Lists_Entries{entry},
}},
},
}
}
func expectPlaybackEvent[T PlaybackEvent](t *testing.T, ch <-chan PlaybackEvent) T {
t.Helper()
select {
case event := <-ch:
typed, ok := event.(T)
if !ok {
t.Fatalf("unexpected playback event type %T", event)
}
return typed
case <-time.After(time.Second):
var zero T
t.Fatal("timed out waiting for playback event")
return zero
}
}

View File

@@ -671,7 +671,9 @@ func (fh *FileHydrator) precompileRules() {
})
if fh.Config == nil || len(fh.Config.Hydration.Rules) == 0 {
fh.ScanLogger.LogMatcher(zerolog.DebugLevel).Msg("Zero config")
if fh.ScanLogger != nil {
fh.ScanLogger.LogMatcher(zerolog.DebugLevel).Msg("Zero config")
}
return
}
@@ -684,7 +686,7 @@ func (fh *FileHydrator) precompileRules() {
r := &compiledHydrationRule{
regex: nil,
rule: rule,
fileRulesRgx: map[string]*compiledHydrationFileRule{},
fileRulesRgx: make(map[string]*compiledHydrationFileRule),
}
if rule.Pattern != "" {

View File

@@ -2,15 +2,10 @@ package scanner
import (
"seanime/internal/api/anilist"
"seanime/internal/api/metadata_provider"
"seanime/internal/database/db"
"seanime/internal/extension"
"seanime/internal/library/anime"
"seanime/internal/library/summary"
"seanime/internal/platforms/anilist_platform"
"seanime/internal/test_utils"
"seanime/internal/platforms/platform"
"seanime/internal/util"
"seanime/internal/util/limiter"
"testing"
"github.com/stretchr/testify/assert"
@@ -18,26 +13,12 @@ import (
)
func TestFileHydrator_HydrateMetadata(t *testing.T) {
test_utils.InitTestProvider(t, test_utils.Anilist())
completeAnimeCache := anilist.NewCompleteAnimeCache()
anilistRateLimiter := limiter.NewAnilistLimiter()
logger := util.NewLogger()
database, err := db.NewDatabase(test_utils.ConfigData.Path.DataDir, test_utils.ConfigData.Database.Name, logger)
require.NoError(t, err)
metadataProvider := metadata_provider.GetFakeProvider(t, database)
anilistClient := anilist.NewAnilistClient(test_utils.ConfigData.Provider.AnilistJwt, "")
anilistClientRef := util.NewRef[anilist.AnilistClient](anilistClient)
extensionBankRef := util.NewRef(extension.NewUnifiedBank())
//wsEventManager := events.NewMockWSEventManager(logger)
anilistPlatform := anilist_platform.NewAnilistPlatform(anilistClientRef, extensionBankRef, logger, database)
anilistPlatform.SetUsername(test_utils.ConfigData.Provider.AnilistUsername)
animeCollection, err := anilistPlatform.GetAnimeCollectionWithRelations(t.Context())
harness := newScannerFixtureHarness(t)
logger := harness.Logger
animeCollection, err := harness.Platform.GetAnimeCollectionWithRelations(t.Context())
require.NoError(t, err)
require.NotNil(t, animeCollection)
allMedia := animeCollection.GetAllAnime()
tests := []struct {
name string
paths []string
@@ -104,6 +85,14 @@ func TestFileHydrator_HydrateMetadata(t *testing.T) {
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
currentStatus := anilist.MediaListStatusCurrent
anilist.EnsureAnimeCollectionWithRelationsEntry(
animeCollection,
tt.expectedMediaId,
anilist.AnimeCollectionEntryPatch{Status: &currentStatus},
harness.AnilistClient,
)
allMedia := animeCollection.GetAllAnime()
scanLogger, err := NewConsoleScanLogger()
if err != nil {
@@ -114,11 +103,7 @@ func TestFileHydrator_HydrateMetadata(t *testing.T) {
// | Local Files |
// +---------------------+
var lfs []*anime.LocalFile
for _, path := range tt.paths {
lf := anime.NewLocalFile(path, "E:/Anime")
lfs = append(lfs, lf)
}
lfs := harness.LocalFiles(tt.paths...)
// +---------------------+
// | MediaContainer |
@@ -158,10 +143,10 @@ func TestFileHydrator_HydrateMetadata(t *testing.T) {
fh := &FileHydrator{
LocalFiles: lfs,
AllMedia: mc.NormalizedMedia,
CompleteAnimeCache: completeAnimeCache,
PlatformRef: util.NewRef(anilistPlatform),
AnilistRateLimiter: anilistRateLimiter,
MetadataProviderRef: util.NewRef(metadataProvider),
CompleteAnimeCache: harness.CompleteAnimeCache,
PlatformRef: util.NewRef[platform.Platform](harness.Platform),
AnilistRateLimiter: harness.AnilistRateLimiter,
MetadataProviderRef: util.NewRef(harness.MetadataProvider),
Logger: logger,
ScanLogger: scanLogger,
Config: config,

View File

@@ -3,14 +3,9 @@ package scanner
import (
"context"
"seanime/internal/api/anilist"
"seanime/internal/api/metadata_provider"
"seanime/internal/database/db"
"seanime/internal/extension"
"seanime/internal/library/anime"
"seanime/internal/platforms/anilist_platform"
"seanime/internal/test_utils"
"seanime/internal/platforms/platform"
"seanime/internal/util"
"seanime/internal/util/limiter"
"testing"
"github.com/stretchr/testify/assert"
@@ -18,7 +13,7 @@ import (
func TestMatcher1(t *testing.T) {
anilistClient := anilist.TestGetMockAnilistClient()
anilistClient := anilist.NewTestAnilistClient()
animeCollection, err := anilistClient.AnimeCollectionWithRelations(context.Background(), nil)
if err != nil {
t.Fatal(err.Error())
@@ -103,15 +98,17 @@ func TestMatcher1(t *testing.T) {
}
func TestMatcher2(t *testing.T) {
test_utils.InitTestProvider(t, test_utils.Anilist())
anilistClient := anilist.NewAnilistClient(test_utils.ConfigData.Provider.AnilistJwt, "")
animeCollection, err := anilistClient.AnimeCollectionWithRelations(context.Background(), &test_utils.ConfigData.Provider.AnilistUsername)
harness := newScannerLiveHarness(t)
anilistClient := harness.AnilistClient
animeCollection, err := harness.Platform.GetAnimeCollectionWithRelations(t.Context())
if err != nil {
t.Fatal(err.Error())
}
if animeCollection == nil {
t.Fatal("expected anime collection, got nil")
}
dir := "E:/Anime"
dir := harness.LibraryDir
tests := []struct {
name string
@@ -156,34 +153,12 @@ func TestMatcher2(t *testing.T) {
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
// Add media to collection if it doesn't exist
allMedia := animeCollection.GetAllAnime()
hasExpectedMediaId := false
for _, media := range allMedia {
if media.ID == tt.expectedMediaId {
hasExpectedMediaId = true
break
}
}
if !hasExpectedMediaId {
anilist.TestAddAnimeCollectionWithRelationsEntry(animeCollection, tt.expectedMediaId, anilist.TestModifyAnimeCollectionEntryInput{Status: new(anilist.MediaListStatusCurrent)}, anilistClient)
allMedia = animeCollection.GetAllAnime()
}
currentStatus := anilist.MediaListStatusCurrent
anilist.EnsureAnimeCollectionWithRelationsEntry(animeCollection, tt.expectedMediaId, anilist.AnimeCollectionEntryPatch{Status: &currentStatus}, anilistClient)
for _, otherMediaId := range tt.otherMediaIds {
hasOtherMediaId := false
for _, media := range allMedia {
if media.ID == otherMediaId {
hasOtherMediaId = true
break
}
}
if !hasOtherMediaId {
anilist.TestAddAnimeCollectionWithRelationsEntry(animeCollection, otherMediaId, anilist.TestModifyAnimeCollectionEntryInput{Status: new(anilist.MediaListStatusCurrent)}, anilistClient)
allMedia = animeCollection.GetAllAnime()
}
anilist.EnsureAnimeCollectionWithRelationsEntry(animeCollection, otherMediaId, anilist.AnimeCollectionEntryPatch{Status: &currentStatus}, anilistClient)
}
allMedia := animeCollection.GetAllAnime()
scanLogger, err := NewConsoleScanLogger()
if err != nil {
@@ -238,15 +213,17 @@ func TestMatcher2(t *testing.T) {
}
func TestMatcher3(t *testing.T) {
test_utils.InitTestProvider(t, test_utils.Anilist())
anilistClient := anilist.NewAnilistClient(test_utils.ConfigData.Provider.AnilistJwt, "")
animeCollection, err := anilistClient.AnimeCollectionWithRelations(context.Background(), &test_utils.ConfigData.Provider.AnilistUsername)
harness := newScannerLiveHarness(t)
anilistClient := harness.AnilistClient
animeCollection, err := harness.Platform.GetAnimeCollectionWithRelations(t.Context())
if err != nil {
t.Fatal(err.Error())
}
if animeCollection == nil {
t.Fatal("expected anime collection, got nil")
}
dir := "E:/Anime"
dir := harness.LibraryDir
tests := []struct {
name string
@@ -856,37 +833,12 @@ func TestMatcher3(t *testing.T) {
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
// Add media to collection if it doesn't exist
allMedia := animeCollection.GetAllAnime()
// Helper to ensure media exists in collection
hasMedia := false
for _, media := range allMedia {
if media.ID == tt.expectedMediaId {
hasMedia = true
break
}
}
if !hasMedia {
anilist.TestAddAnimeCollectionWithRelationsEntry(animeCollection, tt.expectedMediaId, anilist.TestModifyAnimeCollectionEntryInput{Status: new(anilist.MediaListStatusCurrent)}, anilistClient)
allMedia = animeCollection.GetAllAnime()
}
// Ensure other media exists
currentStatus := anilist.MediaListStatusCurrent
anilist.EnsureAnimeCollectionWithRelationsEntry(animeCollection, tt.expectedMediaId, anilist.AnimeCollectionEntryPatch{Status: &currentStatus}, anilistClient)
for _, id := range tt.otherMediaIds {
hasMedia := false
for _, media := range allMedia {
if media.ID == id {
hasMedia = true
break
}
}
if !hasMedia {
anilist.TestAddAnimeCollectionWithRelationsEntry(animeCollection, id, anilist.TestModifyAnimeCollectionEntryInput{Status: new(anilist.MediaListStatusCurrent)}, anilistClient)
allMedia = animeCollection.GetAllAnime()
}
anilist.EnsureAnimeCollectionWithRelationsEntry(animeCollection, id, anilist.AnimeCollectionEntryPatch{Status: &currentStatus}, anilistClient)
}
allMedia := animeCollection.GetAllAnime()
scanLogger, err := NewConsoleScanLogger()
if err != nil {
@@ -948,41 +900,27 @@ func TestMatcherWithOfflineDB(t *testing.T) {
t.Skip("Skipping integration test")
}
test_utils.InitTestProvider(t, test_utils.Anilist())
anilistClient := anilist.TestGetMockAnilistClient()
logger := util.NewLogger()
database, err := db.NewDatabase(test_utils.ConfigData.Path.DataDir, test_utils.ConfigData.Database.Name, logger)
if err != nil {
t.Fatal(err)
}
anilistClientRef := util.NewRef(anilistClient)
extensionBankRef := util.NewRef(extension.NewUnifiedBank())
anilistPlatform := anilist_platform.NewAnilistPlatform(anilistClientRef, extensionBankRef, logger, database)
anilistPlatform.SetUsername(test_utils.ConfigData.Provider.AnilistUsername)
metadataProvider := metadata_provider.GetFakeProvider(t, database)
completeAnimeCache := anilist.NewCompleteAnimeCache()
anilistRateLimiter := limiter.NewAnilistLimiter()
harness := newScannerFixtureHarness(t)
logger := harness.Logger
scanLogger, err := NewConsoleScanLogger()
if err != nil {
t.Fatal("expected result, got error:", err.Error())
}
dir := "E:/Anime"
dir := harness.LibraryDir
t.Log("Initializing MediaFetcher with anime-offline-database...")
mf, err := NewMediaFetcher(t.Context(), &MediaFetcherOptions{
Enhanced: true,
EnhanceWithOfflineDatabase: true, // Use offline database
PlatformRef: util.NewRef(anilistPlatform),
PlatformRef: util.NewRef[platform.Platform](harness.Platform),
LocalFiles: []*anime.LocalFile{}, // Empty, we don't need local files for fetching
CompleteAnimeCache: completeAnimeCache,
MetadataProviderRef: util.NewRef(metadataProvider),
CompleteAnimeCache: harness.CompleteAnimeCache,
MetadataProviderRef: util.NewRef(harness.MetadataProvider),
Logger: logger,
AnilistRateLimiter: anilistRateLimiter,
AnilistRateLimiter: harness.AnilistRateLimiter,
ScanLogger: scanLogger,
DisableAnimeCollection: true, // Only use offline database
})
@@ -1395,15 +1333,17 @@ func TestGetFileFormatType(t *testing.T) {
}
func TestMatcher_applyMatchingRule(t *testing.T) {
test_utils.InitTestProvider(t, test_utils.Anilist())
anilistClient := anilist.NewAnilistClient(test_utils.ConfigData.Provider.AnilistJwt, "")
animeCollection, err := anilistClient.AnimeCollectionWithRelations(context.Background(), &test_utils.ConfigData.Provider.AnilistUsername)
harness := newScannerLiveHarness(t)
anilistClient := harness.AnilistClient
animeCollection, err := harness.Platform.GetAnimeCollectionWithRelations(t.Context())
if err != nil {
t.Fatal(err.Error())
}
if animeCollection == nil {
t.Fatal("expected anime collection, got nil")
}
dir := "E:/Anime"
dir := harness.LibraryDir
tests := []struct {
name string
@@ -1530,34 +1470,17 @@ func TestMatcher_applyMatchingRule(t *testing.T) {
t.Run(tt.name, func(t *testing.T) {
// Add medias to collection if it doesn't exist
allMedia := animeCollection.GetAllAnime()
expectedIDs := make([]int, len(tt.expectedMediaIds))
copy(expectedIDs, tt.expectedMediaIds)
for _, media := range allMedia {
for i, expectedID := range expectedIDs {
if media.ID == expectedID {
last := len(expectedIDs) - 1
expectedIDs[i] = expectedIDs[last]
expectedIDs = expectedIDs[:last]
break
}
}
}
for _, missingID := range expectedIDs {
anilist.TestAddAnimeCollectionWithRelationsEntry(
currentStatus := anilist.MediaListStatusCurrent
for _, expectedID := range tt.expectedMediaIds {
anilist.EnsureAnimeCollectionWithRelationsEntry(
animeCollection,
missingID,
anilist.TestModifyAnimeCollectionEntryInput{
Status: new(anilist.MediaListStatusCurrent),
},
expectedID,
anilist.AnimeCollectionEntryPatch{Status: &currentStatus},
anilistClient,
)
}
allMedia = animeCollection.GetAllAnime()
allMedia := animeCollection.GetAllAnime()
scanLogger, err := NewConsoleScanLogger()
if err != nil {

Some files were not shown because too many files have changed in this diff Show More