This commit is contained in:
5rahim
2025-11-23 11:49:13 +01:00
parent 31ed6820e6
commit bca5e35571
94 changed files with 406 additions and 3263 deletions

Binary file not shown.

Before

Width:  |  Height:  |  Size: 10 MiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 389 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 252 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 416 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 486 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 382 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 411 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 450 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 430 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 958 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 366 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 641 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 258 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 544 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 241 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 223 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 345 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 16 MiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 16 MiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 5.1 MiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 10 MiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 3.0 MiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.6 MiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 2.4 MiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 1.1 MiB

View File

@@ -3,78 +3,3 @@
</p>
<h2 align="center"><b>Seanime Server</b></h2>
- `api`: Third-party APIs
- `anilist`: AniList structs and methods
- `anizip`: Metadata API
- `filler`: Filler API
- `listsync`
- `mal`: MyAnimeList API
- `mappings`: Mapping API
- `metadata`: **Metadata module** for anime
- `tvdb`: TheTVDB API
- `constants`: Version, keys
- `core`
- `app.go`: **Shared app struct**
- `config.go`: Configuration
- `extensions.go`: Load built-in extensions
- `fiber.go`: HTTP server
- `watcher.go`: Library watcher
- `cron`: Background tasks
- `database`
- `db`: **Database module**
- `db_bridge`: Helper methods to avoid circular dependencies
- `models`: Database models
- `debrid`: **Debrid module**
- `debrid`: Structs and interfaces
- `client`: **Debrid repository** for streaming, download
- `torbox`
- `realdebrid`
- `discordrpc`: Discord RPC
- `client`
- `ipc`
- `presence`: **Discord Rich Presence module**
- `events`: **Websocket Event Manager module** and constants
- `extensions`: Structs and interfaces
- `extension_playground`: **Extension Playground module**
- `extension_repo`: **Extension Repository module**
- `handlers`: API handlers
- `library`
- `anime`: Library structs and methods
- `autodownloader` **Auto downloader module**
- `autoscanner`: **Auto scanner module**
- `filesystem`: File system methods
- `playbackmanager`: **Playback Manager module** for progress tracking
- `scanner`: **Scanner module**
- `summary`: Scan summary
- `manga`: Manga structs and **Manga Downloader module**
- `downloader`: Chapter downloader structs and methods
- `providers`: Online provider structs and methods
- `mediaplayers`
- `mediaplayer`: **Media Player Repository** module
- `mpchc`
- `mpv`
- `mpvipc`
- `vlc`
- `mediastream`: **Media Stream Repository** module
- `transcoder`: Transcoder
- `videofile`: Media metadata
- `notifier`
- `onlinestream`: **Onlinestream module**
- `providers`: Stream providers
- `sources`: Video server sources
- `platforms`
- `platform`: Platform structs and methods
- `anilist_platform`
- `local_platform`
- `test_utils`: Test methods
- `torrentstream`: **Torrent Stream Repository** module
- `sync`: **Sync/Offline module**
- `test_utils`: Test methods
- `torrent_clients`
- `torrent_client`: **Torrent Client Repository** module
- `qbittorrent`
- `transmission`
- `torrents`
- `analyzer`: Scan and identify torrent files
- `torrent`: Torrent structs and methods

View File

@@ -2,11 +2,12 @@ package anilist
import (
"context"
"github.com/samber/lo"
"seanime/internal/util"
"seanime/internal/util/limiter"
"seanime/internal/util/result"
"sync"
"github.com/samber/lo"
)
type (
@@ -26,7 +27,7 @@ const (
// NewCompleteAnimeRelationTree returns a new result.Map[int, *CompleteAnime].
// It is used to store the results of FetchMediaTree or FetchMediaTree calls.
func NewCompleteAnimeRelationTree() *CompleteAnimeRelationTree {
return &CompleteAnimeRelationTree{result.NewResultMap[int, *CompleteAnime]()}
return &CompleteAnimeRelationTree{result.NewMap[int, *CompleteAnime]()}
}
func (m *BaseAnime) FetchMediaTree(rel FetchMediaTreeRelation, anilistClient AnilistClient, rl *limiter.Limiter, tree *CompleteAnimeRelationTree, cache *CompleteAnimeCache) (err error) {

View File

@@ -58,77 +58,112 @@ import (
type (
App struct {
Config *Config
Database *db.Database
Logger *zerolog.Logger
TorrentClientRepository *torrent_client.Repository
TorrentRepository *torrent.Repository
DebridClientRepository *debrid_client.Repository
Watcher *scanner.Watcher
AnilistClientRef *util.Ref[anilist.AnilistClient]
AnilistPlatformRef *util.Ref[platform.Platform]
OfflinePlatformRef *util.Ref[platform.Platform]
MetadataProviderRef *util.Ref[metadata_provider.Provider]
LocalManager local.Manager
FillerManager *fillermanager.FillerManager
WSEventManager *events.WSEventManager
AutoDownloader *autodownloader.AutoDownloader
// Core
Config *Config
Database *db.Database
Logger *zerolog.Logger
// Torrent and debrid services
TorrentClientRepository *torrent_client.Repository
TorrentRepository *torrent.Repository
DebridClientRepository *debrid_client.Repository
// File system monitoring
Watcher *scanner.Watcher
// API clients and providers
AnilistClientRef *util.Ref[anilist.AnilistClient]
AnilistPlatformRef *util.Ref[platform.Platform]
OfflinePlatformRef *util.Ref[platform.Platform]
MetadataProviderRef *util.Ref[metadata_provider.Provider]
// Library
FillerManager *fillermanager.FillerManager
AutoDownloader *autodownloader.AutoDownloader
AutoScanner *autoscanner.AutoScanner
PlaybackManager *playbackmanager.PlaybackManager
// Real-time communication
WSEventManager *events.WSEventManager
// Extensions
ExtensionRepository *extension_repo.Repository
ExtensionBankRef *util.Ref[*extension.UnifiedBank]
ExtensionPlaygroundRepository *extension_playground.PlaygroundRepository
DirectStreamManager *directstream.Manager
NativePlayer *nativeplayer.NativePlayer
MediaPlayer struct {
// Streaming
DirectStreamManager *directstream.Manager
OnlinestreamRepository *onlinestream.Repository
MediastreamRepository *mediastream.Repository
TorrentstreamRepository *torrentstream.Repository
// Players
NativePlayer *nativeplayer.NativePlayer
MediaPlayer struct {
VLC *vlc.VLC
MpcHc *mpchc.MpcHc
Mpv *mpv.Mpv
Iina *iina.Iina
}
MediaPlayerRepository *mediaplayer.Repository
Version string
Updater *updater.Updater
AutoScanner *autoscanner.AutoScanner
PlaybackManager *playbackmanager.PlaybackManager
FileCacher *filecache.Cacher
OnlinestreamRepository *onlinestream.Repository
MangaRepository *manga.Repository
DiscordPresence *discordrpc_presence.Presence
MangaDownloader *manga.Downloader
ContinuityManager *continuity.Manager
MediaPlayerRepository *mediaplayer.Repository
// Manga services
MangaRepository *manga.Repository
MangaDownloader *manga.Downloader
// Offline and local account
LocalManager local.Manager
// Utilities
FileCacher *filecache.Cacher
Updater *updater.Updater
SelfUpdater *updater.SelfUpdater
ReportRepository *report.Repository
// Integrations
DiscordPresence *discordrpc_presence.Presence
// Continuity and sync
ContinuityManager *continuity.Manager
// Lifecycle management
Cleanups []func()
OnRefreshAnilistCollectionFuncs *result.Map[string, func()]
OnFlushLogs func()
MediastreamRepository *mediastream.Repository
TorrentstreamRepository *torrentstream.Repository
FeatureFlags FeatureFlags
Settings *models.Settings
SecondarySettings struct {
// Configuration and feature flags
FeatureFlags FeatureFlags
FeatureManager *FeatureManager
Settings *models.Settings
SecondarySettings struct {
Mediastream *models.MediastreamSettings
Torrentstream *models.TorrentstreamSettings
Debrid *models.DebridSettings
} // Struct for other settings sent to clientN
SelfUpdater *updater.SelfUpdater
ReportRepository *report.Repository
TotalLibrarySize uint64 // Initialized in modules.go
LibraryDir string
AnilistCacheDir string
IsDesktopSidecar bool
animeCollection *anilist.AnimeCollection
rawAnimeCollection *anilist.AnimeCollection // (retains custom lists)
mangaCollection *anilist.MangaCollection
rawMangaCollection *anilist.MangaCollection // (retains custom lists)
}
// Metadata
Version string
TotalLibrarySize uint64
LibraryDir string
AnilistCacheDir string
IsDesktopSidecar bool
Flags SeanimeFlags
// Internal state
user *user.User
previousVersion string
moduleMu sync.Mutex
HookManager hook.Manager
ServerReady bool // Whether the Anilist data from the first request has been fetched
ServerReady bool
isOfflineRef *util.Ref[bool]
NakamaManager *nakama.Manager
ServerPasswordHash string // SHA-256 hash of the server password
PlaylistManager *playlist.Manager
LibraryExplorer *library_explorer.LibraryExplorer
Flags SeanimeFlags
FeatureManager *FeatureManager
ServerPasswordHash string
// Plugin system
HookManager hook.Manager
// Features
PlaylistManager *playlist.Manager
LibraryExplorer *library_explorer.LibraryExplorer
NakamaManager *nakama.Manager
}
)
@@ -390,7 +425,7 @@ func NewApp(configOpts *ConfigOptions, selfupdater *updater.SelfUpdater) *App {
}{Mediastream: nil, Torrentstream: nil},
SelfUpdater: selfupdater,
moduleMu: sync.Mutex{},
OnRefreshAnilistCollectionFuncs: result.NewResultMap[string, func()](),
OnRefreshAnilistCollectionFuncs: result.NewMap[string, func()](),
HookManager: hookManager,
isOfflineRef: isOfflineRef,
ServerPasswordHash: serverPasswordHash,

View File

@@ -1,12 +1,13 @@
package core
import (
"github.com/rs/zerolog"
"os"
"path/filepath"
"sort"
"strings"
"time"
"github.com/rs/zerolog"
)
func TrimLogEntries(dir string, logger *zerolog.Logger) {

View File

@@ -199,7 +199,7 @@ func (a *App) initModulesOnce() {
})
// +---------------------+
// | Debrid Client Repo |
// | Debrid Client Repo |
// +---------------------+
a.DebridClientRepository = debrid_client.NewRepository(&debrid_client.NewRepositoryOptions{
@@ -237,7 +237,7 @@ func (a *App) initModulesOnce() {
a.AutoDownloader.Start()
// +---------------------+
// | Auto Scanner |
// | Auto Scanner |
// +---------------------+
a.AutoScanner = autoscanner.New(&autoscanner.NewAutoScannerOptions{

View File

@@ -62,8 +62,8 @@ func NewManager(extensionBankRef *util.Ref[*extension.UnifiedBank], db *db.Datab
ret := &Manager{
extensionBankRef: extensionBankRef,
extensionBankSubscriber: extensionBankRef.Get().Subscribe(id),
customSources: result.NewResultMap[int, extension.CustomSourceExtension](),
customSourcesById: result.NewResultMap[string, extension.CustomSourceExtension](),
customSources: result.NewMap[int, extension.CustomSourceExtension](),
customSourcesById: result.NewMap[string, extension.CustomSourceExtension](),
closedCh: make(chan struct{}),
db: db,
logger: logger,
@@ -73,17 +73,17 @@ func NewManager(extensionBankRef *util.Ref[*extension.UnifiedBank], db *db.Datab
for {
select {
case <-ret.extensionBankSubscriber.OnCustomSourcesChanged():
logger.Debug().Msg("custom source: Custom sources changed")
logger.Debug().Str("id", id).Msg("custom source: Custom sources changed")
ret.customSources.Clear()
ret.customSourcesById.Clear()
extension.RangeExtensions(extensionBankRef.Get(), func(id string, ext extension.CustomSourceExtension) bool {
logger.Debug().Str("extension", id).Msg("custom source: (manager) Extension loaded")
extension.RangeExtensions(extensionBankRef.Get(), func(extId string, ext extension.CustomSourceExtension) bool {
logger.Trace().Str("extension", extId).Str("id", id).Msg("custom source: Updated extension on manager")
ret.customSources.Set(ext.GetExtensionIdentifier(), ext)
ret.customSourcesById.Set(ext.GetID(), ext)
return true
})
case <-ret.closedCh:
logger.Trace().Msg("custom source: Closed manager")
logger.Trace().Str("id", id).Msg("custom source: Closed manager")
ret.customSources.Clear()
ret.customSourcesById.Clear()
return
@@ -91,9 +91,9 @@ func NewManager(extensionBankRef *util.Ref[*extension.UnifiedBank], db *db.Datab
}
}()
logger.Debug().Str("extension", id).Msg("custom source: Manager created, loading extensions")
logger.Debug().Str("id", id).Msg("custom source: Manager created, loading extensions")
extension.RangeExtensions(extensionBankRef.Get(), func(id string, ext extension.CustomSourceExtension) bool {
logger.Debug().Str("extension", ext.GetID()).Msg("custom source: (manager) Extension loaded")
logger.Trace().Str("extension", ext.GetID()).Str("id", id).Msg("custom source: Extension loaded on manager")
ret.customSources.Set(ext.GetExtensionIdentifier(), ext)
ret.customSourcesById.Set(ext.GetID(), ext)
return true

View File

@@ -6,7 +6,7 @@ import (
"seanime/internal/util/result"
)
var mangaMappingCache = result.NewResultMap[string, *models.MangaMapping]()
var mangaMappingCache = result.NewMap[string, *models.MangaMapping]()
func formatMangaMappingCacheKey(provider string, mediaId int) string {
return fmt.Sprintf("%s$%d", provider, mediaId)
@@ -53,7 +53,7 @@ func (db *Database) DeleteMangaMapping(provider string, mediaId int) error {
//////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
var mangaChapterContainerCache = result.NewResultMap[string, *models.MangaChapterContainer]()
var mangaChapterContainerCache = result.NewMap[string, *models.MangaChapterContainer]()
func formatMangaChapterContainerCacheKey(provider string, mediaId int, chapterId string) string {
return fmt.Sprintf("%s$%d$%s", provider, mediaId, chapterId)

View File

@@ -6,7 +6,7 @@ import (
"seanime/internal/util/result"
)
var onlinestreamMappingCache = result.NewResultMap[string, *models.OnlinestreamMapping]()
var onlinestreamMappingCache = result.NewMap[string, *models.OnlinestreamMapping]()
func formatOnlinestreamMappingCacheKey(provider string, mediaId int) string {
return fmt.Sprintf("%s$%d", provider, mediaId)

View File

@@ -141,7 +141,7 @@ func (r *Repository) downloadTorrentItem(tId string, torrentName string, destina
wg := sync.WaitGroup{}
downloadUrls := strings.Split(downloadUrl, ",")
downloadMap := result.NewResultMap[string, downloadStatus]()
downloadMap := result.NewMap[string, downloadStatus]()
for _, url := range downloadUrls {
wg.Add(1)

View File

@@ -75,7 +75,7 @@ func NewRepository(opts *NewRepositoryOptions) (ret *Repository) {
playbackManager: opts.PlaybackManager,
metadataProviderRef: opts.MetadataProviderRef,
completeAnimeCache: anilist.NewCompleteAnimeCache(),
ctxMap: result.NewResultMap[string, context.CancelFunc](),
ctxMap: result.NewMap[string, context.CancelFunc](),
previousStreamOptions: mo.None[*StartStreamOptions](),
directStreamManager: opts.DirectStreamManager,
}

View File

@@ -323,8 +323,8 @@ func (m *Manager) PlayDebridStream(ctx context.Context, opts PlayDebridStreamOpt
filename: "",
episode: episode,
episodeCollection: episodeCollection,
subtitleEventCache: result.NewResultMap[string, *mkvparser.SubtitleEvent](),
activeSubtitleStreams: result.NewResultMap[string, *SubtitleStream](),
subtitleEventCache: result.NewMap[string, *mkvparser.SubtitleEvent](),
activeSubtitleStreams: result.NewMap[string, *SubtitleStream](),
},
streamReadyCh: make(chan struct{}),
}

View File

@@ -308,8 +308,8 @@ func (m *Manager) PlayLocalFile(ctx context.Context, opts PlayLocalFileOptions)
media: media,
episode: episode,
episodeCollection: episodeCollection,
subtitleEventCache: result.NewResultMap[string, *mkvparser.SubtitleEvent](),
activeSubtitleStreams: result.NewResultMap[string, *SubtitleStream](),
subtitleEventCache: result.NewMap[string, *mkvparser.SubtitleEvent](),
activeSubtitleStreams: result.NewMap[string, *SubtitleStream](),
},
}

View File

@@ -312,8 +312,8 @@ func (m *Manager) PlayNakamaStream(ctx context.Context, opts PlayNakamaStreamOpt
filename: "",
episode: episode,
episodeCollection: episodeCollection,
subtitleEventCache: result.NewResultMap[string, *mkvparser.SubtitleEvent](),
activeSubtitleStreams: result.NewResultMap[string, *SubtitleStream](),
subtitleEventCache: result.NewMap[string, *mkvparser.SubtitleEvent](),
activeSubtitleStreams: result.NewMap[string, *SubtitleStream](),
},
streamReadyCh: make(chan struct{}),
}

View File

@@ -218,8 +218,8 @@ func (m *Manager) PlayTorrentStream(ctx context.Context, opts PlayTorrentStreamO
filename: filepath.Base(opts.File.DisplayPath()),
episode: episode,
episodeCollection: episodeCollection,
subtitleEventCache: result.NewResultMap[string, *mkvparser.SubtitleEvent](),
activeSubtitleStreams: result.NewResultMap[string, *SubtitleStream](),
subtitleEventCache: result.NewMap[string, *mkvparser.SubtitleEvent](),
activeSubtitleStreams: result.NewMap[string, *SubtitleStream](),
isNakamaWatchParty: opts.IsNakamaWatchParty,
},
streamReadyCh: make(chan struct{}),

View File

@@ -79,10 +79,10 @@ func NewWSEventManager(logger *zerolog.Logger) *WSEventManager {
ret := &WSEventManager{
Logger: logger,
Conns: make([]*WSConn, 0),
clientEventSubscribers: result.NewResultMap[string, *ClientEventSubscriber](),
clientNativePlayerEventSubscribers: result.NewResultMap[string, *ClientEventSubscriber](),
nakamaEventSubscribers: result.NewResultMap[string, *ClientEventSubscriber](),
playlistEventSubscribers: result.NewResultMap[string, *ClientEventSubscriber](),
clientEventSubscribers: result.NewMap[string, *ClientEventSubscriber](),
clientNativePlayerEventSubscribers: result.NewMap[string, *ClientEventSubscriber](),
nakamaEventSubscribers: result.NewMap[string, *ClientEventSubscriber](),
playlistEventSubscribers: result.NewMap[string, *ClientEventSubscriber](),
}
GlobalWSEventManager = &GlobalWSEventManagerWrapper{
WSEventManager: ret,

View File

@@ -22,7 +22,7 @@ type (
func NewMockWSEventManager(logger *zerolog.Logger) *MockWSEventManager {
return &MockWSEventManager{
Logger: logger,
ClientEventSubscribers: result.NewResultMap[string, *ClientEventSubscriber](),
ClientEventSubscribers: result.NewMap[string, *ClientEventSubscriber](),
}
}

View File

@@ -24,8 +24,8 @@ type BankSubscriber struct {
func NewUnifiedBank() *UnifiedBank {
return &UnifiedBank{
extensions: result.NewResultMap[string, BaseExtension](),
subscribers: result.NewResultMap[string, *BankSubscriber](),
extensions: result.NewMap[string, BaseExtension](),
subscribers: result.NewMap[string, *BankSubscriber](),
mu: sync.RWMutex{},
}
}
@@ -41,7 +41,7 @@ func (b *UnifiedBank) Unlock() {
func (b *UnifiedBank) Reset() {
b.mu.Lock()
defer b.mu.Unlock()
b.extensions = result.NewResultMap[string, BaseExtension]()
b.extensions = result.NewMap[string, BaseExtension]()
}
func (b *UnifiedBank) Subscribe(id string) *BankSubscriber {

View File

@@ -14,6 +14,8 @@ type (
SupportsManga bool `json:"supportsManga"`
// SupportsBidirectionalSync indicates if Seanime can pull data from the tracker
SupportsBidirectionalSync bool `json:"supportsBidirectionalSync"`
MaxRequestsPerSecond int `json:"maxRequestsPerSecond,omitempty"`
CacheVersion int `json:"cacheVersion,omitempty"`
}
UserInfo struct {
@@ -22,12 +24,19 @@ type (
}
MediaEntry struct {
// Source is "anilist" for AniList entries, or the custom source name for custom sources
// Source is "anilist" for AniList entries, or the custom source ID for custom sources.
// e.g. For "One Piece" on AniList, this would be "anilist".
// For "Plur1bus" on the SIMKL custom source, this would be "simkl".
// When Pulling: The extension MUST populate this.
Source string `json:"source"`
// MediaId is the AniList or custom source media ID (internal ID)
// MediaId is the AniList or custom source media ID (Seanime ID).
// When Pulling: The extension could leave this empty.
MediaId int `json:"mediaId"`
// ExternalId is the tracker's own media ID (e.g., MAL ID: "12345", Kitsu ID: "54321")
// This should be populated by the extension's ID mapping logic
// MalId is the MyAnimeList ID (if available)
MalId *int `json:"malId,omitempty"`
// ExternalId
// When Pulling: The extension MUST populate this.
// When Pushing: Seanime will populate this (using ResolveExternalId).
ExternalId string `json:"externalId"`
// MediaType is either "ANIME" or "MANGA"
MediaType string `json:"mediaType"`
@@ -50,22 +59,54 @@ type (
Provider interface {
GetSettings() Settings
// PushEntry updates the given entry on the tracker.
PushEntry(ctx context.Context, entry *MediaEntry) error
PushEntries(ctx context.Context, entries []*MediaEntry) (map[int]error, error)
PullEntry(ctx context.Context, mediaId int) (*MediaEntry, error)
// PullEntries returns all entries from the tracker.
PullEntries(ctx context.Context) ([]*MediaEntry, error)
// DeleteEntry deletes the entry with from the tracker.
DeleteEntry(ctx context.Context, mediaId int) error
IsLoggedIn(ctx context.Context) bool
GetUserInfo(ctx context.Context) (*UserInfo, error)
GetUserInfo(ctx context.Context) (*UserInfo, bool)
TestConnection(ctx context.Context) error
ResolveMediaId(ctx context.Context, mediaId int, mediaType string) (string, error)
// ResolveExternalId finds the tracker-specific ID for a given Seanime media entry.
// Seanime calls this BEFORE calling PushEntry to ensure MediaEntry.ExternalId is populated.
// The ID is cached for future calls, in case it were to change, you can invalidate the cache by changing the value of Settings.CacheVersion.
ResolveExternalId(ctx context.Context, entry *MediaEntry) (string, error)
//
ResolveReverseMapping(ctx context.Context, externalId string) (*MediaEntry, error)
}
)
type DiffType string
const (
DiffTypeNone DiffType = "none"
DiffTypeLocalOnly DiffType = "local-only" // Exists in Seanime, missing in Tracker
DiffTypeRemoteOnly DiffType = "remote-only" // Exists in Tracker, missing in Seanime
DiffTypeDivergent DiffType = "divergent" // Exists in both, but values differ
DiffTypeMappingError DiffType = "mapping-error" // Cannot resolve ID
)
type Action string
const (
ActionPush Action = "push"
ActionPull Action = "pull"
ActionIgnore Action = "ignore"
)
type SyncDiff struct {
MediaID int // Seanime ID
ExternalID string // Tracker ID
Type DiffType
Local *MediaEntry
Remote *MediaEntry
ProposedAction Action
}

View File

@@ -125,14 +125,14 @@ func NewRepository(opts *NewRepositoryOptions) *Repository {
logger: opts.Logger,
extensionDir: opts.ExtensionDir,
wsEventManager: opts.WSEventManager,
gojaExtensions: result.NewResultMap[string, GojaExtension](),
gojaExtensions: result.NewMap[string, GojaExtension](),
gojaRuntimeManager: goja_runtime.NewManager(opts.Logger),
extensionBankRef: opts.ExtensionBankRef,
invalidExtensions: result.NewResultMap[string, *extension.InvalidExtension](),
invalidExtensions: result.NewMap[string, *extension.InvalidExtension](),
fileCacher: opts.FileCacher,
hookManager: opts.HookManager,
client: http.DefaultClient,
builtinExtensions: result.NewResultMap[string, *builtinExtension](),
builtinExtensions: result.NewMap[string, *builtinExtension](),
updateData: make([]UpdateData, 0),
}

View File

@@ -45,7 +45,7 @@ func NewManager(logger *zerolog.Logger) *Manager {
// GetOrCreatePrivatePool returns the pool for the given extension.
func (m *Manager) GetOrCreatePrivatePool(extID string, initFn func() *goja.Runtime) (*Pool, error) {
if m.pluginPools == nil {
m.pluginPools = result.NewResultMap[string, *Pool]()
m.pluginPools = result.NewMap[string, *Pool]()
}
pool, ok := m.pluginPools.Get(extID)

View File

@@ -151,7 +151,7 @@ func (h *Handler) HandleGetAnilistAnimeDetails(c echo.Context) error {
//----------------------------------------------------------------------------------------------------------------------------------------------------
var studioDetailsMap = result.NewResultMap[int, *anilist.StudioDetails]()
var studioDetailsMap = result.NewMap[int, *anilist.StudioDetails]()
// HandleGetAnilistStudioDetails
//

View File

@@ -22,7 +22,6 @@ func (h *Handler) OptionalAuthMiddleware(next echo.HandlerFunc) echo.HandlerFunc
path == "/api/v1/status" || // for interface
path == "/events" || // for server events
strings.HasPrefix(path, "/api/v1/directstream") || // ID & path based
// strings.HasPrefix(path, "/api/v1/mediastream") || // used by media players // NOTE: DO NOT
strings.HasPrefix(path, "/api/v1/mediastream/att/") || // used by media players
strings.HasPrefix(path, "/api/v1/mediastream/direct") || // used by media players
strings.HasPrefix(path, "/api/v1/mediastream/transcode/") || // used by media players

View File

@@ -48,7 +48,7 @@ type Status struct {
ServerHasPassword bool `json:"serverHasPassword"`
}
var clientInfoCache = result.NewResultMap[string, util.ClientInfo]()
var clientInfoCache = result.NewMap[string, util.ClientInfo]()
// NewStatus returns a new Status struct.
// It uses the RouteCtx to get the App instance containing the Database instance.

View File

@@ -1,117 +0,0 @@
package legacymkvparser
// TrackType represents the type of a Matroska track.
type TrackType string
const (
TrackTypeVideo TrackType = "video"
TrackTypeAudio TrackType = "audio"
TrackTypeSubtitle TrackType = "subtitle"
TrackTypeLogo TrackType = "logo"
TrackTypeButtons TrackType = "buttons"
TrackTypeComplex TrackType = "complex"
TrackTypeUnknown TrackType = "unknown"
)
type AttachmentType string
const (
AttachmentTypeFont AttachmentType = "font"
AttachmentTypeSubtitle AttachmentType = "subtitle"
AttachmentTypeOther AttachmentType = "other"
)
// TrackInfo holds extracted information about a media track.
type TrackInfo struct {
Number int64 `json:"number"`
UID int64 `json:"uid"`
Type TrackType `json:"type"` // "video", "audio", "subtitle", etc.
CodecID string `json:"codecID"`
Name string `json:"name,omitempty"`
Language string `json:"language,omitempty"` // Best effort language code
LanguageIETF string `json:"languageIETF,omitempty"` // IETF language code
Default bool `json:"default"`
Forced bool `json:"forced"`
Enabled bool `json:"enabled"`
CodecPrivate string `json:"codecPrivate,omitempty"` // Raw CodecPrivate data, often used for subtitle headers (e.g., ASS/SSA styles)
// Video specific
Video *VideoTrack `json:"video,omitempty"`
// Audio specific
Audio *AudioTrack `json:"audio,omitempty"`
// Internal fields
contentEncodings *ContentEncodings `json:"-"`
defaultDuration uint64 `json:"-"` // in ns
}
// ChapterInfo holds extracted information about a chapter.
type ChapterInfo struct {
UID uint64 `json:"uid"`
Start float64 `json:"start"` // Start time in seconds
End float64 `json:"end,omitempty"` // End time in seconds
Text string `json:"text,omitempty"`
Languages []string `json:"languages,omitempty"` // Legacy 3-letter language codes
LanguagesIETF []string `json:"languagesIETF,omitempty"` // IETF language tags
EditionUID uint64 `json:"-"`
}
// AttachmentInfo holds extracted information about an attachment.
type AttachmentInfo struct {
UID uint64 `json:"uid"`
Filename string `json:"filename"`
Mimetype string `json:"mimetype"`
Size int `json:"size"`
Description string `json:"description,omitempty"`
Type AttachmentType `json:"type,omitempty"`
Data []byte `json:"-"` // Data loaded into memory
IsCompressed bool `json:"-"` // Whether the data is compressed
}
// Metadata holds all extracted metadata.
type Metadata struct {
Title string `json:"title,omitempty"`
Duration float64 `json:"duration"` // Duration in seconds
TimecodeScale float64 `json:"timecodeScale"` // Original timecode scale from Info
MuxingApp string `json:"muxingApp,omitempty"`
WritingApp string `json:"writingApp,omitempty"`
Tracks []*TrackInfo `json:"tracks"`
VideoTracks []*TrackInfo `json:"videoTracks"`
AudioTracks []*TrackInfo `json:"audioTracks"`
SubtitleTracks []*TrackInfo `json:"subtitleTracks"`
Chapters []*ChapterInfo `json:"chapters"`
Attachments []*AttachmentInfo `json:"attachments"`
MimeCodec string `json:"mimeCodec,omitempty"` // RFC 6381 codec string
Error error `json:"-"`
}
func (m *Metadata) GetTrackByNumber(num int64) *TrackInfo {
for _, track := range m.Tracks {
if track.Number == num {
return track
}
}
return nil
}
func (m *Metadata) GetAttachmentByName(name string) (*AttachmentInfo, bool) {
for _, attachment := range m.Attachments {
if attachment.Filename == name {
return attachment, true
}
}
return nil, false
}
///////////////////////////////////////////////////////////////////////////////////////////
func (t *TrackInfo) IsAudioTrack() bool {
return t.Type == TrackTypeAudio
}
func (t *TrackInfo) IsVideoTrack() bool {
return t.Type == TrackTypeVideo
}
func (t *TrackInfo) IsSubtitleTrack() bool {
return t.Type == TrackTypeSubtitle
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,146 +0,0 @@
package legacymkvparser
import (
"bytes"
"fmt"
"strings"
"github.com/5rahim/go-astisub"
)
const (
SubtitleTypeASS = iota
SubtitleTypeSRT
SubtitleTypeSTL
SubtitleTypeTTML
SubtitleTypeWEBVTT
SubtitleTypeUnknown
)
func isProbablySrt(content string) bool {
separatorCounts := strings.Count(content, "-->")
return separatorCounts > 5
}
func DetectSubtitleType(content string) int {
if strings.HasPrefix(strings.TrimSpace(content), "[Script Info]") {
return SubtitleTypeASS
} else if isProbablySrt(content) {
return SubtitleTypeSRT
} else if strings.Contains(content, "<tt ") || strings.Contains(content, "<tt>") {
return SubtitleTypeTTML
} else if strings.HasPrefix(strings.TrimSpace(content), "WEBVTT") {
return SubtitleTypeWEBVTT
} else if strings.Contains(content, "{\\") || strings.Contains(content, "\\N") {
return SubtitleTypeSTL
}
return SubtitleTypeUnknown
}
func ConvertToASS(content string, from int) (string, error) {
var o *astisub.Subtitles
var err error
reader := bytes.NewReader([]byte(content))
read:
switch from {
case SubtitleTypeSRT:
o, err = astisub.ReadFromSRT(reader)
case SubtitleTypeSTL:
o, err = astisub.ReadFromSTL(reader, astisub.STLOptions{IgnoreTimecodeStartOfProgramme: true})
case SubtitleTypeTTML:
o, err = astisub.ReadFromTTML(reader)
case SubtitleTypeWEBVTT:
o, err = astisub.ReadFromWebVTT(reader)
case SubtitleTypeUnknown:
detectedType := DetectSubtitleType(content)
if detectedType == SubtitleTypeUnknown {
return "", fmt.Errorf("failed to detect subtitle format from content")
}
from = detectedType
goto read
default:
return "", fmt.Errorf("unsupported subtitle format: %d", from)
}
if err != nil {
return "", fmt.Errorf("failed to read subtitles: %w", err)
}
if o == nil {
return "", fmt.Errorf("failed to read subtitles: %w", err)
}
o.Metadata = &astisub.Metadata{
SSAScriptType: "v4.00+",
SSAWrapStyle: "0",
SSAPlayResX: &[]int{640}[0],
SSAPlayResY: &[]int{360}[0],
SSAScaledBorderAndShadow: true,
}
//Format: Name, Fontname, Fontsize, PrimaryColour, SecondaryColour, OutlineColour, BackColour, Bold, Italic, Underline, StrikeOut, ScaleX, ScaleY, Spacing, Angle, BorderStyle, Outline, Shadow, Alignment, MarginL, MarginR, MarginV, Encoding
//Style: Default, Roboto Medium,24,&H00FFFFFF,&H000000FF,&H00000000,&H00000000,0,0,0,0,100,100,0,0,1,1.3,0,2,20,20,23,0
o.Styles["Default"] = &astisub.Style{
ID: "Default",
InlineStyle: &astisub.StyleAttributes{
SSAFontName: "Roboto Medium",
SSAFontSize: &[]float64{24}[0],
SSAPrimaryColour: &astisub.Color{
Red: 255,
Green: 255,
Blue: 255,
Alpha: 0,
},
SSASecondaryColour: &astisub.Color{
Red: 255,
Green: 0,
Blue: 0,
Alpha: 0,
},
SSAOutlineColour: &astisub.Color{
Red: 0,
Green: 0,
Blue: 0,
Alpha: 0,
},
SSABackColour: &astisub.Color{
Red: 0,
Green: 0,
Blue: 0,
Alpha: 0,
},
SSABold: &[]bool{false}[0],
SSAItalic: &[]bool{false}[0],
SSAUnderline: &[]bool{false}[0],
SSAStrikeout: &[]bool{false}[0],
SSAScaleX: &[]float64{100}[0],
SSAScaleY: &[]float64{100}[0],
SSASpacing: &[]float64{0}[0],
SSAAngle: &[]float64{0}[0],
SSABorderStyle: &[]int{1}[0],
SSAOutline: &[]float64{1.3}[0],
SSAShadow: &[]float64{0}[0],
SSAAlignment: &[]int{2}[0],
SSAMarginLeft: &[]int{20}[0],
SSAMarginRight: &[]int{20}[0],
SSAMarginVertical: &[]int{23}[0],
SSAEncoding: &[]int{0}[0],
},
}
for _, item := range o.Items {
item.Style = &astisub.Style{
ID: "Default",
}
}
w := &bytes.Buffer{}
err = o.WriteToSSA(w)
if err != nil {
return "", fmt.Errorf("failed to write subtitles: %w", err)
}
return w.String(), nil
}

View File

@@ -1,37 +0,0 @@
package legacymkvparser
import (
"testing"
"github.com/stretchr/testify/require"
)
func TestConvertSRTToASS(t *testing.T) {
srt := `1
00:00:00,000 --> 00:00:03,000
Hello, world!
2
00:00:04,000 --> 00:00:06,000
This is a <--> test.
`
out, err := ConvertToASS(srt, SubtitleTypeSRT)
require.NoError(t, err)
require.Equal(t, `[Script Info]
PlayResX: 640
PlayResY: 360
ScriptType: v4.00+
WrapStyle: 0
ScaledBorderAndShadow: yes
[V4+ Styles]
Format: Name, Alignment, Angle, BackColour, Bold, BorderStyle, Encoding, Fontname, Fontsize, Italic, MarginL, MarginR, MarginV, Outline, OutlineColour, PrimaryColour, ScaleX, ScaleY, SecondaryColour, Shadow, Spacing, Strikeout, Underline
Style: Default,2,0.000,&H00000000,0,1,0,Roboto Medium,24.000,0,20,20,23,1.300,&H00000000,&H00ffffff,100.000,100.000,&H000000ff,0.000,0.000,0,0
[Events]
Format: Layer, Start, End, Style, Name, MarginL, MarginR, MarginV, Effect, Text
Dialogue: 0,00:00:00.00,00:00:03.00,Default,,0,0,0,,Hello, world!
Dialogue: 0,00:00:04.00,00:00:06.00,Default,,0,0,0,,This is a <--> test.
`, out)
}

View File

@@ -1,364 +0,0 @@
package legacymkvparser
import (
"context"
"errors"
"io"
"net/http"
"os"
"path/filepath"
"seanime/internal/util"
httputil "seanime/internal/util/http"
"seanime/internal/util/torrentutil"
"strings"
"testing"
"time"
"github.com/anacrolix/torrent"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
var (
testMagnet = ""
testHttpUrl = ""
testFile = ""
// Timeout for torrent operations
torrentInfoTimeout = 60 * time.Second
// Timeout for metadata parsing test
metadataTestTimeout = 90 * time.Second
// Number of initial pieces to prioritize for header metadata
initialPiecesToPrioritize = 20
)
// getTestTorrentClient creates a new torrent client for testing.
func getTestTorrentClient(t *testing.T, tempDir string) *torrent.Client {
t.Helper()
cfg := torrent.NewDefaultClientConfig()
// Use a subdirectory within the temp dir for torrent data
cfg.DataDir = filepath.Join(tempDir, "torrent_data")
err := os.MkdirAll(cfg.DataDir, 0755)
if err != nil {
t.Fatalf("failed to create torrent data directory: %v", err)
}
client, err := torrent.NewClient(cfg)
if err != nil {
t.Fatalf("failed to create torrent client: %v", err)
}
return client
}
// hasExt checks if a file path has a specific extension (case-insensitive).
func hasExt(name, ext string) bool {
if len(name) < len(ext) {
return false
}
return strings.ToLower(name[len(name)-len(ext):]) == strings.ToLower(ext)
}
// hasVideoExt checks for common video file extensions.
func hasVideoExt(name string) bool {
return hasExt(name, ".mkv") || hasExt(name, ".mp4") || hasExt(name, ".avi") || hasExt(name, ".mov") || hasExt(name, ".webm")
}
// getTestTorrentFile adds the torrent, waits for metadata, returns the first video file.
func getTestTorrentFile(t *testing.T, magnet string, tempDir string) (*torrent.Client, *torrent.Torrent, *torrent.File) {
t.Helper()
client := getTestTorrentClient(t, tempDir)
tctx, cancel := context.WithTimeout(context.Background(), torrentInfoTimeout)
defer cancel()
tor, err := client.AddMagnet(magnet)
if err != nil {
client.Close() // Close client on error
t.Fatalf("failed to add magnet: %v", err)
}
t.Log("Waiting for torrent info...")
select {
case <-tor.GotInfo():
t.Log("Torrent info received.")
// continue
case <-tctx.Done():
tor.Drop() // Attempt to drop torrent
client.Close() // Close client
t.Fatalf("timeout waiting for torrent metadata (%v)", torrentInfoTimeout)
}
// Find the first video file
for _, f := range tor.Files() {
path := f.DisplayPath()
if hasVideoExt(path) {
t.Logf("Found video file: %s (Size: %d bytes)", path, f.Length())
return client, tor, f
}
}
t.Logf("No video file found in torrent info: %s", tor.Info().Name)
tor.Drop() // Drop torrent if no suitable file found
client.Close() // Close client
t.Fatalf("no video file found in torrent")
return nil, nil, nil // Should not be reached
}
func assertTestResult(t *testing.T, result *Metadata) {
//util.Spew(result)
if result.Error != nil {
// If the error is context timeout/canceled, it's less severe but still worth noting
if errors.Is(result.Error, context.DeadlineExceeded) || errors.Is(result.Error, context.Canceled) {
t.Logf("Warning: GetMetadata context deadline exceeded or canceled: %v", result.Error)
} else {
t.Errorf("GetMetadata failed with unexpected error: %v", result.Error)
}
} else if result.Error != nil {
t.Logf("Note: GetMetadata stopped with expected error: %v", result.Error)
}
// Check Duration (should be positive for this known file)
assert.True(t, result.Duration > 0, "Expected Duration to be positive, got %.2f", result.Duration)
t.Logf("Duration: %.2f seconds", result.Duration)
// Check TimecodeScale
assert.True(t, result.TimecodeScale > 0, "Expected TimecodeScale to be positive, got %f", result.TimecodeScale)
t.Logf("TimecodeScale: %f", result.TimecodeScale)
// Check Muxing/Writing App (often present)
if result.MuxingApp != "" {
t.Logf("MuxingApp: %s", result.MuxingApp)
}
if result.WritingApp != "" {
t.Logf("WritingApp: %s", result.WritingApp)
}
// Check Tracks (expecting video, audio, subs for this file)
assert.NotEmpty(t, result.Tracks, "Expected to find tracks")
t.Logf("Found %d total tracks:", len(result.Tracks))
foundVideo := false
foundAudio := false
for i, track := range result.Tracks {
t.Logf(" Track %d:\n Type=%s, Codec=%s, Lang=%s, LangIETF=%s Name='%s', Default=%v, Forced=%v, Enabled=%v",
i, track.Type, track.CodecID, track.Language, track.LanguageIETF, track.Name, track.Default, track.Forced, track.Enabled)
if track.Video != nil {
foundVideo = true
assert.True(t, track.Video.PixelWidth > 0, "Video track should have PixelWidth > 0")
assert.True(t, track.Video.PixelHeight > 0, "Video track should have PixelHeight > 0")
t.Logf(" Video Details: %dx%d", track.Video.PixelWidth, track.Video.PixelHeight)
}
if track.Audio != nil {
foundAudio = true
assert.True(t, track.Audio.SamplingFrequency > 0, "Audio track should have SamplingFrequency > 0")
assert.True(t, track.Audio.Channels > 0, "Audio track should have Channels > 0")
t.Logf(" Audio Details: Freq=%.1f, Channels=%d, BitDepth=%d", track.Audio.SamplingFrequency, track.Audio.Channels, track.Audio.BitDepth)
}
t.Log()
}
assert.True(t, foundVideo, "Expected to find at least one video track")
assert.True(t, foundAudio, "Expected to find at least one audio track")
t.Logf("Found %d total chapters:", len(result.Chapters))
for _, chapter := range result.Chapters {
t.Logf(" Chapter %d: StartTime=%.2f, EndTime=%.2f, Name='%s'",
chapter.UID, chapter.Start, chapter.End, chapter.Text)
}
t.Logf("Found %d total attachments:", len(result.Attachments))
for _, att := range result.Attachments {
t.Logf(" Attachment %d: Name='%s', MimeType='%s', Size=%d bytes",
att.UID, att.Filename, att.Mimetype, att.Size)
}
// Print the JSON representation of the result
//jsonResult, err := json.MarshalIndent(result, "", " ")
//if err != nil {
// t.Fatalf("Failed to marshal result to JSON: %v", err)
//}
//t.Logf("JSON Result: %s", string(jsonResult))
}
func testStreamSubtitles(t *testing.T, parser *MetadataParser, reader io.ReadSeekCloser, offset int64, ctx context.Context) {
// Stream for 30 seconds
streamCtx, cancel := context.WithTimeout(ctx, 30*time.Second)
defer cancel()
subtitleCh, errCh, _ := parser.ExtractSubtitles(streamCtx, reader, offset, 1024*1024)
var streamedSubtitles []*SubtitleEvent
// Collect subtitles with a timeout
collectDone := make(chan struct{})
go func() {
defer func() {
// Close the reader if it implements io.Closer
if closer, ok := reader.(io.Closer); ok {
_ = closer.Close()
}
}()
defer close(collectDone)
for {
select {
case subtitle, ok := <-subtitleCh:
if !ok {
return // Channel closed
}
streamedSubtitles = append(streamedSubtitles, subtitle)
case <-streamCtx.Done():
return // Timeout
}
}
}()
// Wait for all subtitles or timeout
select {
case <-collectDone:
// All subtitles collected
case <-streamCtx.Done():
t.Log("StreamSubtitles collection timed out (this is expected for large files)")
}
// Check for errors
select {
case err := <-errCh:
if err != nil {
t.Logf("StreamSubtitles returned an error: %v", err)
}
default:
// No errors yet
}
t.Logf("Found %d streamed subtitles:", len(streamedSubtitles))
for i, sub := range streamedSubtitles {
if i < 5 { // Log first 5 subtitles
t.Logf(" Streamed Subtitle %d: TrackNumber=%d, StartTime=%.2f, Text='%s'",
i, sub.TrackNumber, sub.StartTime, sub.Text)
}
}
}
// TestMetadataParser_Torrent performs an integration test.
// It downloads the header of a real torrent and parses its metadata.
func TestMetadataParser_Torrent(t *testing.T) {
if testing.Short() {
t.Skip("Skipping integration test in short mode")
}
tempDir := t.TempDir()
client, tor, file := getTestTorrentFile(t, testMagnet, tempDir)
// Ensure client and torrent are closed/dropped eventually
t.Cleanup(func() {
t.Log("Dropping torrent...")
tor.Drop()
t.Log("Closing torrent client...")
client.Close()
t.Log("Cleanup finished.")
})
logger := util.NewLogger()
parser := NewMetadataParser(file.NewReader(), logger)
// Create context with timeout for the metadata parsing operation itself
ctx, cancel := context.WithTimeout(context.Background(), metadataTestTimeout)
defer cancel()
t.Log("Calling file.Download() to enable piece requests...")
file.Download() // Start download requests
// Prioritize initial pieces to ensure metadata is fetched quickly
torInfo := tor.Info()
if torInfo != nil && torInfo.NumPieces() > 0 {
numPieces := torInfo.NumPieces()
piecesToFetch := initialPiecesToPrioritize
if numPieces < piecesToFetch {
piecesToFetch = numPieces
}
t.Logf("Prioritizing first %d pieces (out of %d) for header parsing...", piecesToFetch, numPieces)
for i := 0; i < piecesToFetch; i++ {
p := tor.Piece(i)
if p != nil {
p.SetPriority(torrent.PiecePriorityNow)
}
}
// Give a moment for prioritization to take effect and requests to start
time.Sleep(500 * time.Millisecond)
} else {
t.Log("Torrent info or pieces not available for prioritization.")
}
t.Log("Calling GetMetadata...")
startTime := time.Now()
metadata := parser.GetMetadata(ctx)
elapsed := time.Since(startTime)
t.Logf("GetMetadata took %v", elapsed)
assertTestResult(t, metadata)
testStreamSubtitles(t, parser, torrentutil.NewReadSeeker(tor, file, logger), 78123456, ctx)
}
// TestMetadataParser_HTTPStream tests parsing from an HTTP stream
func TestMetadataParser_HTTPStream(t *testing.T) {
if testHttpUrl == "" {
t.Skip("Skipping HTTP stream test")
}
logger := util.NewLogger()
res, err := http.Get(testHttpUrl)
if err != nil {
t.Fatalf("HTTP GET request failed: %v", err)
}
defer res.Body.Close()
rs := httputil.NewHttpReadSeeker(res)
if res.StatusCode != http.StatusOK {
t.Fatalf("HTTP GET request returned non-OK status: %s", res.Status)
}
parser := NewMetadataParser(rs, logger)
ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second) // 30-second timeout for parsing
defer cancel()
metadata := parser.GetMetadata(ctx)
assertTestResult(t, metadata)
_, err = rs.Seek(0, io.SeekStart)
require.NoError(t, err)
testStreamSubtitles(t, parser, rs, 1230000000, ctx)
}
func TestMetadataParser_File(t *testing.T) {
if testFile == "" {
t.Skip("Skipping file test")
}
logger := util.NewLogger()
file, err := os.Open(testFile)
if err != nil {
t.Fatalf("Could not open file: %v", err)
}
defer file.Close()
parser := NewMetadataParser(file, logger)
ctx, _ := context.WithTimeout(context.Background(), 30*time.Second) // 30-second timeout for parsing
//defer cancel()
metadata := parser.GetMetadata(ctx)
assertTestResult(t, metadata)
_, err = file.Seek(0, io.SeekStart)
require.NoError(t, err)
testStreamSubtitles(t, parser, file, 123000, ctx)
}

View File

@@ -1,183 +0,0 @@
package legacymkvparser
import (
"bytes"
"io"
"strings"
)
// ReadIsMkvOrWebm reads the first 1KB of the stream to determine if it is a Matroska or WebM file.
// It returns the mime type and a boolean indicating if it is a Matroska or WebM file.
// It seeks to the beginning of the stream before and after reading.
func ReadIsMkvOrWebm(r io.ReadSeeker) (string, bool) {
// Go to the beginning of the stream
_, err := r.Seek(0, io.SeekStart)
if err != nil {
return "", false
}
defer r.Seek(0, io.SeekStart)
return isMkvOrWebm(r)
}
func isMkvOrWebm(r io.Reader) (string, bool) {
header := make([]byte, 1024) // Read the first 1KB to be safe
n, err := r.Read(header)
if err != nil {
return "", false
}
// Check for EBML magic bytes
if !bytes.HasPrefix(header, []byte{0x1A, 0x45, 0xDF, 0xA3}) {
return "", false
}
// Look for the DocType tag (0x42 82) and check the string
docTypeTag := []byte{0x42, 0x82}
idx := bytes.Index(header, docTypeTag)
if idx == -1 || idx+3 >= n {
return "", false
}
size := int(header[idx+2]) // Size of DocType field
if idx+3+size > n {
return "", false
}
docType := string(header[idx+3 : idx+3+size])
switch docType {
case "matroska":
return "video/x-matroska", true
case "webm":
return "video/webm", true
default:
return "", false
}
}
// UTF8ToASSText
//
// note: needs testing
func UTF8ToASSText(text string) string {
// Convert HTML entities to actual characters
type tags struct {
values []string
replace string
}
t := []tags{
{values: []string{"&lt;"}, replace: "<"},
{values: []string{"&gt;"}, replace: ">"},
{values: []string{"&amp;"}, replace: "&"},
{values: []string{"&nbsp;"}, replace: "\\h"},
{values: []string{"&quot;"}, replace: "\""},
{values: []string{"&#39;"}, replace: "'"},
{values: []string{"&apos;"}, replace: "'"},
{values: []string{"&laquo;"}, replace: "«"},
{values: []string{"&raquo;"}, replace: "»"},
{values: []string{"&ndash;"}, replace: "-"},
{values: []string{"&mdash;"}, replace: "—"},
{values: []string{"&hellip;"}, replace: "…"},
{values: []string{"&copy;"}, replace: "©"},
{values: []string{"&reg;"}, replace: "®"},
{values: []string{"&trade;"}, replace: "™"},
{values: []string{"&euro;"}, replace: "€"},
{values: []string{"&pound;"}, replace: "£"},
{values: []string{"&yen;"}, replace: "¥"},
{values: []string{"&dollar;"}, replace: "$"},
{values: []string{"&cent;"}, replace: "¢"},
//
{values: []string{"\r\n", "\n", "\r", "<br>", "<br/>", "<br />", "<BR>", "<BR/>", "<BR />"}, replace: "\\N"},
{values: []string{"<b>", "<B>", "<strong>"}, replace: "{\\b1}"},
{values: []string{"</b>", "</B>", "</strong>"}, replace: "{\\b0}"},
{values: []string{"<i>", "<I>", "<em>"}, replace: "{\\i1}"},
{values: []string{"</i>", "</I>", "</em>"}, replace: "{\\i0}"},
{values: []string{"<u>", "<U>"}, replace: "{\\u1}"},
{values: []string{"</u>", "</U>"}, replace: "{\\u0}"},
{values: []string{"<s>", "<S>", "<strike>", "<del>"}, replace: "{\\s1}"},
{values: []string{"</s>", "</S>", "</strike>", "</del>"}, replace: "{\\s0}"},
{values: []string{"<center>", "<CENTER>"}, replace: "{\\an8}"},
{values: []string{"</center>", "</CENTER>"}, replace: ""},
{values: []string{"<ruby>", "<rt>"}, replace: "{\\ruby1}"},
{values: []string{"</ruby>", "</rt>"}, replace: "{\\ruby0}"},
{values: []string{"<p>", "<P>", "<div>", "<DIV>"}, replace: ""},
{values: []string{"</p>", "</P>", "</div>", "</DIV>"}, replace: "\\N"},
}
for _, tag := range t {
for _, value := range tag.values {
text = strings.ReplaceAll(text, value, tag.replace)
}
}
// Font tags with color and size
if strings.Contains(text, "<font") || strings.Contains(text, "<FONT") {
// Process font tags with attributes
for strings.Contains(text, "<font") || strings.Contains(text, "<FONT") {
var tagStart int
if idx := strings.Index(text, "<font"); idx != -1 {
tagStart = idx
} else {
tagStart = strings.Index(text, "<FONT")
}
if tagStart == -1 {
break
}
tagEnd := strings.Index(text[tagStart:], ">")
if tagEnd == -1 {
break
}
tagEnd += tagStart
// Extract the font tag content
fontTag := text[tagStart : tagEnd+1]
replacement := ""
// Handle color attribute
if colorStart := strings.Index(fontTag, "color=\""); colorStart != -1 {
colorStart += 7 // length of 'color="'
if colorEnd := strings.Index(fontTag[colorStart:], "\""); colorEnd != -1 {
color := fontTag[colorStart : colorStart+colorEnd]
// Convert HTML color to ASS format
if strings.HasPrefix(color, "#") {
if len(color) == 7 { // #RRGGBB format
color = "&H" + color[5:7] + color[3:5] + color[1:3] + "&" // Convert to ASS BGR format
}
}
replacement += "{\\c" + color + "}"
}
}
// Handle size attribute
if sizeStart := strings.Index(fontTag, "size=\""); sizeStart != -1 {
sizeStart += 6 // length of 'size="'
if sizeEnd := strings.Index(fontTag[sizeStart:], "\""); sizeEnd != -1 {
size := fontTag[sizeStart : sizeStart+sizeEnd]
replacement += "{\\fs" + size + "}"
}
}
// Handle face/family attribute
if faceStart := strings.Index(fontTag, "face=\""); faceStart != -1 {
faceStart += 6 // length of 'face="'
if faceEnd := strings.Index(fontTag[faceStart:], "\""); faceEnd != -1 {
face := fontTag[faceStart : faceStart+faceEnd]
replacement += "{\\fn" + face + "}"
}
}
// Replace the opening font tag
text = text[:tagStart] + replacement + text[tagEnd+1:]
// Find and remove the corresponding closing tag
if closeStart := strings.Index(text, "</font>"); closeStart != -1 {
text = text[:closeStart] + "{\\r}" + text[closeStart+7:]
} else if closeStart = strings.Index(text, "</FONT>"); closeStart != -1 {
text = text[:closeStart] + "{\\r}" + text[closeStart+7:]
}
}
}
return text
}

View File

@@ -1,115 +0,0 @@
package legacymkvparser
import (
"time"
)
// Info element and its children
type Info struct {
Title string
MuxingApp string
WritingApp string
TimecodeScale uint64
Duration float64
DateUTC time.Time
}
// TrackEntry represents a track in the MKV file
type TrackEntry struct {
TrackNumber uint64
TrackUID uint64
TrackType uint64
FlagEnabled uint64
FlagDefault uint64
FlagForced uint64
DefaultDuration uint64
Name string
Language string
LanguageIETF string
CodecID string
CodecPrivate []byte
Video *VideoTrack
Audio *AudioTrack
ContentEncodings *ContentEncodings
}
// VideoTrack contains video-specific track data
type VideoTrack struct {
PixelWidth uint64
PixelHeight uint64
}
// AudioTrack contains audio-specific track data
type AudioTrack struct {
SamplingFrequency float64
Channels uint64
BitDepth uint64
}
// ContentEncodings contains information about how the track data is encoded
type ContentEncodings struct {
ContentEncoding []ContentEncoding
}
// ContentEncoding describes a single encoding applied to the track data
type ContentEncoding struct {
ContentEncodingOrder uint64
ContentEncodingScope uint64
ContentEncodingType uint64
ContentCompression *ContentCompression
}
// ContentCompression describes how the track data is compressed
type ContentCompression struct {
ContentCompAlgo uint64
ContentCompSettings []byte
}
// ChapterAtom represents a single chapter point
type ChapterAtom struct {
ChapterUID uint64
ChapterTimeStart uint64
ChapterTimeEnd uint64
ChapterDisplay []ChapterDisplay
}
// ChapterDisplay contains displayable chapter information
type ChapterDisplay struct {
ChapString string
ChapLanguage []string
ChapLanguageIETF []string
}
// AttachedFile represents a file attached to the MKV container
type AttachedFile struct {
FileDescription string
FileName string
FileMimeType string
FileData []byte
FileUID uint64
}
// Block represents a data block in the MKV file
type Block struct {
TrackNumber uint64
Timecode int16
Data [][]byte
}
// BlockGroup represents a group of blocks with additional information
type BlockGroup struct {
Block Block
BlockDuration uint64
}
// Cluster represents a cluster of blocks in the MKV file
type Cluster struct {
Timecode uint64
SimpleBlock []Block
BlockGroup []BlockGroup
}
// Tracks element and its children
type Tracks struct {
TrackEntry []TrackEntry `ebml:"TrackEntry"`
}

View File

@@ -231,7 +231,7 @@ func New(opts *NewPlaybackManagerOptions) *PlaybackManager {
currentLocalFileWrapperEntry: mo.None[*anime.LocalFileWrapperEntry](),
currentMediaListEntry: mo.None[*anilist.AnimeListEntry](),
continuityManager: opts.ContinuityManager,
playbackStatusSubscribers: result.NewResultMap[string, *PlaybackStatusSubscriber](),
playbackStatusSubscribers: result.NewMap[string, *PlaybackStatusSubscriber](),
}
return pm

View File

@@ -321,7 +321,7 @@ func (r *Repository) RefreshChapterContainers(mangaCollection *anilist.MangaColl
const ChapterCountMapCacheKey = 1
var mangaLatestChapterNumberMap = result.NewResultMap[int, map[int][]MangaLatestChapterNumberItem]()
var mangaLatestChapterNumberMap = result.NewMap[int, map[int][]MangaLatestChapterNumberItem]()
type MangaLatestChapterNumberItem struct {
Provider string `json:"provider"`

View File

@@ -73,7 +73,7 @@ func New(logger *zerolog.Logger, socketName string, appPath string, optionalArgs
SocketName: sn,
AppPath: appPath,
Args: additionalArgs,
subscribers: result.NewResultMap[string, *Subscriber](),
subscribers: result.NewMap[string, *Subscriber](),
exitedCh: make(chan struct{}),
}
}

View File

@@ -152,7 +152,7 @@ func NewRepository(opts *NewRepositoryOptions) *Repository {
wsEventManager: opts.WSEventManager,
continuityManager: opts.ContinuityManager,
completionThreshold: 0.8,
subscribers: result.NewResultMap[string, *RepositorySubscriber](),
subscribers: result.NewMap[string, *RepositorySubscriber](),
currentPlaybackStatus: &PlaybackStatus{},
exitedCh: make(chan struct{}),
}

View File

@@ -76,7 +76,7 @@ func New(logger *zerolog.Logger, socketName string, appPath string, optionalArgs
SocketName: sn,
AppPath: appPath,
Args: additionalArgs,
subscribers: result.NewResultMap[string, *Subscriber](),
subscribers: result.NewMap[string, *Subscriber](),
exitedCh: make(chan struct{}),
}
}

View File

@@ -45,7 +45,7 @@ func NewPlaybackManager(repository *Repository) *PlaybackManager {
return &PlaybackManager{
logger: repository.logger,
repository: repository,
mediaContainers: result.NewResultMap[string, *MediaContainer](),
mediaContainers: result.NewMap[string, *MediaContainer](),
}
}

View File

@@ -41,8 +41,8 @@ func NewFileStream(
ret := &FileStream{
Path: path,
Out: filepath.Join(settings.StreamDir, sha),
videos: result.NewResultMap[Quality, *VideoStream](),
audios: result.NewResultMap[int32, *AudioStream](),
videos: result.NewMap[Quality, *VideoStream](),
audios: result.NewMap[int32, *AudioStream](),
logger: logger,
settings: settings,
Info: mediaInfo,

View File

@@ -64,7 +64,7 @@ func (kf *Keyframe) AddListener(callback func(keyframes []float64)) {
kf.info.listeners = append(kf.info.listeners, callback)
}
var keyframes = result.NewResultMap[string, *Keyframe]()
var keyframes = result.NewMap[string, *Keyframe]()
func GetKeyframes(
path string,

View File

@@ -56,7 +56,7 @@ func NewTranscoder(opts *NewTranscoderOptions) (*Transcoder, error) {
}
ret := &Transcoder{
streams: result.NewResultMap[string, *FileStream](),
streams: result.NewMap[string, *FileStream](),
clientChan: make(chan ClientInfo, 1000),
logger: opts.Logger,
settings: Settings{
@@ -95,7 +95,7 @@ func (t *Transcoder) Destroy() {
}
t.streams.Clear()
//close(t.clientChan)
t.streams = result.NewResultMap[string, *FileStream]()
t.streams = result.NewMap[string, *FileStream]()
t.clientChan = make(chan ClientInfo, 10)
t.logger.Debug().Msg("transcoder: Transcoder destroyed")
}

View File

@@ -334,3 +334,33 @@ func TestMetadataParser_Torrent(t *testing.T) {
assertTestResult(t, metadata)
}
func TestConvertSRTToASS(t *testing.T) {
srt := `1
00:00:00,000 --> 00:00:03,000
Hello, world!
2
00:00:04,000 --> 00:00:06,000
This is a <--> test.
`
out, err := ConvertToASS(srt, SubtitleTypeSRT)
require.NoError(t, err)
require.Equal(t, `[Script Info]
PlayResX: 640
PlayResY: 360
ScriptType: v4.00+
WrapStyle: 0
ScaledBorderAndShadow: yes
[V4+ Styles]
Format: Name, Alignment, Angle, BackColour, Bold, BorderStyle, Encoding, Fontname, Fontsize, Italic, MarginL, MarginR, MarginV, Outline, OutlineColour, PrimaryColour, ScaleX, ScaleY, SecondaryColour, Shadow, Spacing, Strikeout, Underline
Style: Default,2,0.000,&H00000000,0,1,0,Roboto Medium,24.000,0,20,20,23,1.300,&H00000000,&H00ffffff,100.000,100.000,&H000000ff,0.000,0.000,0,0
[Events]
Format: Layer, Start, End, Style, Name, MarginL, MarginR, MarginV, Effect, Text
Dialogue: 0,00:00:00.00,00:00:03.00,Default,,0,0,0,,Hello, world!
Dialogue: 0,00:00:04.00,00:00:06.00,Default,,0,0,0,,This is a <--> test.
`, out)
}

View File

@@ -7,7 +7,6 @@ import (
"seanime/internal/util"
"seanime/internal/util/result"
"sync"
"sync/atomic"
)
@@ -31,7 +30,7 @@ func NewMediaController(manager *Manager) *MediaController {
return &MediaController{
manager: manager,
current: MediaControllerTypePlaybackManager,
nativePlayerSubscribers: result.NewResultMap[string, *NativePlayerToPlaybackManagerSubscriber](),
nativePlayerSubscribers: result.NewMap[string, *NativePlayerToPlaybackManagerSubscriber](),
}
}

View File

@@ -208,7 +208,7 @@ func NewManager(opts *NewManagerOptions) *Manager {
logger: opts.Logger,
wsEventManager: opts.WSEventManager,
playbackManager: opts.PlaybackManager,
peerConnections: result.NewResultMap[string, *PeerConnection](),
peerConnections: result.NewMap[string, *PeerConnection](),
platformRef: opts.PlatformRef,
ctx: ctx,
cancel: cancel,

View File

@@ -81,7 +81,7 @@ func New(options NewNativePlayerOptions) *NativePlayer {
playbackStatus: &PlaybackStatus{},
wsEventManager: options.WsEventManager,
clientPlayerEventSubscriber: options.WsEventManager.SubscribeToClientNativePlayerEvents("nativeplayer"),
subscribers: result.NewResultMap[string, *Subscriber](),
subscribers: result.NewMap[string, *Subscriber](),
logger: options.Logger,
}

View File

@@ -1,24 +0,0 @@
package onlinestream_sources
import (
"errors"
hibikeonlinestream "seanime/internal/extension/hibike/onlinestream"
)
var (
ErrNoVideoSourceFound = errors.New("no episode source found")
ErrVideoSourceExtraction = errors.New("error while extracting video sources")
)
type VideoExtractor interface {
Extract(uri string) ([]*hibikeonlinestream.VideoSource, error)
}
const (
QualityDefault = "default"
QualityAuto = "auto"
Quality360 = "360"
Quality480 = "480"
Quality720 = "720"
Quality1080 = "1080"
)

View File

@@ -1,277 +0,0 @@
package onlinestream_sources
import (
"bytes"
"crypto/aes"
"crypto/cipher"
"encoding/base64"
"encoding/json"
"errors"
"fmt"
"github.com/gocolly/colly"
"io"
"net/http"
"net/url"
"regexp"
"seanime/internal/util"
"strings"
hibikeonlinestream "seanime/internal/extension/hibike/onlinestream"
)
type cdnKeys struct {
key []byte
secondKey []byte
iv []byte
}
type GogoCDN struct {
client *http.Client
serverName string
keys cdnKeys
referrer string
}
func NewGogoCDN() *GogoCDN {
return &GogoCDN{
client: &http.Client{},
serverName: "goload",
keys: cdnKeys{
key: []byte("37911490979715163134003223491201"),
secondKey: []byte("54674138327930866480207815084989"),
iv: []byte("3134003223491201"),
},
}
}
// Extract fetches and extracts video sources from the provided URI.
func (g *GogoCDN) Extract(uri string) (vs []*hibikeonlinestream.VideoSource, err error) {
defer util.HandlePanicInModuleThen("onlinestream/sources/gogocdn/Extract", func() {
err = ErrVideoSourceExtraction
})
// Instantiate a new collector
c := colly.NewCollector(
// Allow visiting the same page multiple times
colly.AllowURLRevisit(),
)
ur, err := url.Parse(uri)
if err != nil {
return nil, err
}
// Variables to hold extracted values
var scriptValue, id string
id = ur.Query().Get("id")
// Find and extract the script value and id
c.OnHTML("script[data-name='episode']", func(e *colly.HTMLElement) {
scriptValue = e.Attr("data-value")
})
// Start scraping
err = c.Visit(uri)
if err != nil {
return nil, err
}
// Check if scriptValue and id are found
if scriptValue == "" || id == "" {
return nil, errors.New("script value or id not found")
}
// Extract video sources
ajaxUrl := fmt.Sprintf("%s://%s/encrypt-ajax.php?%s", ur.Scheme, ur.Host, g.generateEncryptedAjaxParams(id, scriptValue))
req, err := http.NewRequest("GET", ajaxUrl, nil)
if err != nil {
return nil, err
}
req.Header.Set("X-Requested-With", "XMLHttpRequest")
req.Header.Set("User-Agent", "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/122.0.0.0 Safari/537.36")
req.Header.Set("Accept", "application/json, text/javascript, */*; q=0.01")
encryptedData, err := g.client.Do(req)
if err != nil {
return nil, err
}
defer encryptedData.Body.Close()
encryptedDataBytesRes, err := io.ReadAll(encryptedData.Body)
if err != nil {
return nil, err
}
var encryptedDataBytes map[string]string
err = json.Unmarshal(encryptedDataBytesRes, &encryptedDataBytes)
if err != nil {
return nil, err
}
data, err := g.decryptAjaxData(encryptedDataBytes["data"])
source, ok := data["source"].([]interface{})
// Check if source is found
if !ok {
return nil, ErrNoVideoSourceFound
}
var results []*hibikeonlinestream.VideoSource
urls := make([]string, 0)
for _, src := range source {
s := src.(map[string]interface{})
urls = append(urls, s["file"].(string))
}
sourceBK, ok := data["source_bk"].([]interface{})
if ok {
for _, src := range sourceBK {
s := src.(map[string]interface{})
urls = append(urls, s["file"].(string))
}
}
for _, url := range urls {
vs, ok := g.urlToVideoSource(url, source, sourceBK)
if ok {
results = append(results, vs...)
}
}
return results, nil
}
func (g *GogoCDN) urlToVideoSource(url string, source []interface{}, sourceBK []interface{}) (vs []*hibikeonlinestream.VideoSource, ok bool) {
defer util.HandlePanicInModuleThen("onlinestream/sources/gogocdn/urlToVideoSource", func() {
ok = false
})
ret := make([]*hibikeonlinestream.VideoSource, 0)
if strings.Contains(url, ".m3u8") {
resResult, err := http.Get(url)
if err != nil {
return nil, false
}
defer resResult.Body.Close()
bodyBytes, err := io.ReadAll(resResult.Body)
if err != nil {
return nil, false
}
bodyString := string(bodyBytes)
resolutions := regexp.MustCompile(`(RESOLUTION=)(.*)(\s*?)(\s.*)`).FindAllStringSubmatch(bodyString, -1)
baseURL := url[:strings.LastIndex(url, "/")]
for _, res := range resolutions {
quality := strings.Split(strings.Split(res[2], "x")[1], ",")[0]
url := fmt.Sprintf("%s/%s", baseURL, strings.TrimSpace(res[4]))
ret = append(ret, &hibikeonlinestream.VideoSource{URL: url, Type: hibikeonlinestream.VideoSourceM3U8, Quality: quality + "p"})
}
ret = append(ret, &hibikeonlinestream.VideoSource{URL: url, Type: hibikeonlinestream.VideoSourceM3U8, Quality: "default"})
} else {
for _, src := range source {
s := src.(map[string]interface{})
if s["file"].(string) == url {
quality := strings.Split(s["label"].(string), " ")[0] + "p"
ret = append(ret, &hibikeonlinestream.VideoSource{URL: url, Type: hibikeonlinestream.VideoSourceMP4, Quality: quality})
}
}
if sourceBK != nil {
for _, src := range sourceBK {
s := src.(map[string]interface{})
if s["file"].(string) == url {
ret = append(ret, &hibikeonlinestream.VideoSource{URL: url, Type: hibikeonlinestream.VideoSourceMP4, Quality: "backup"})
}
}
}
}
return ret, true
}
// generateEncryptedAjaxParams generates encrypted AJAX parameters.
func (g *GogoCDN) generateEncryptedAjaxParams(id, scriptValue string) string {
encryptedKey := g.encrypt(id, g.keys.iv, g.keys.key)
decryptedToken := g.decrypt(scriptValue, g.keys.iv, g.keys.key)
return fmt.Sprintf("id=%s&alias=%s", encryptedKey, decryptedToken)
}
// encrypt encrypts the given text using AES CBC mode.
func (g *GogoCDN) encrypt(text string, iv []byte, key []byte) string {
block, _ := aes.NewCipher(key)
textBytes := []byte(text)
textBytes = pkcs7Padding(textBytes, aes.BlockSize)
cipherText := make([]byte, len(textBytes))
mode := cipher.NewCBCEncrypter(block, iv)
mode.CryptBlocks(cipherText, textBytes)
return base64.StdEncoding.EncodeToString(cipherText)
}
// decrypt decrypts the given text using AES CBC mode.
func (g *GogoCDN) decrypt(text string, iv []byte, key []byte) string {
block, _ := aes.NewCipher(key)
cipherText, _ := base64.StdEncoding.DecodeString(text)
plainText := make([]byte, len(cipherText))
mode := cipher.NewCBCDecrypter(block, iv)
mode.CryptBlocks(plainText, cipherText)
plainText = pkcs7Trimming(plainText)
return string(plainText)
}
func (g *GogoCDN) decryptAjaxData(encryptedData string) (map[string]interface{}, error) {
decodedData, err := base64.StdEncoding.DecodeString(encryptedData)
if err != nil {
return nil, err
}
block, err := aes.NewCipher(g.keys.secondKey)
if err != nil {
return nil, err
}
if len(decodedData) < aes.BlockSize {
return nil, fmt.Errorf("cipher text too short")
}
iv := g.keys.iv
mode := cipher.NewCBCDecrypter(block, iv)
mode.CryptBlocks(decodedData, decodedData)
// Remove padding
decodedData = pkcs7Trimming(decodedData)
var data map[string]interface{}
err = json.Unmarshal(decodedData, &data)
if err != nil {
return nil, err
}
return data, nil
}
// pkcs7Padding pads the text to be a multiple of blockSize using Pkcs7 padding.
func pkcs7Padding(text []byte, blockSize int) []byte {
padding := blockSize - len(text)%blockSize
padText := bytes.Repeat([]byte{byte(padding)}, padding)
return append(text, padText...)
}
// pkcs7Trimming removes Pkcs7 padding from the text.
func pkcs7Trimming(text []byte) []byte {
length := len(text)
unpadding := int(text[length-1])
return text[:(length - unpadding)]
}

View File

@@ -1,16 +0,0 @@
package onlinestream_sources
import (
"github.com/davecgh/go-spew/spew"
"github.com/stretchr/testify/assert"
"testing"
)
func TestGogoCDN_Extract(t *testing.T) {
gogo := NewGogoCDN()
ret, err := gogo.Extract("https://embtaku.pro/streaming.php?id=MjExNjU5&title=One+Piece+Episode+1075")
assert.NoError(t, err)
spew.Dump(ret)
}

View File

@@ -1,350 +0,0 @@
package onlinestream_sources
import (
"crypto/aes"
"crypto/cipher"
"crypto/md5"
"encoding/base64"
"encoding/json"
"errors"
"io"
"net/http"
"regexp"
hibikeonlinestream "seanime/internal/extension/hibike/onlinestream"
"seanime/internal/util"
"strconv"
"strings"
)
type MegaCloud struct {
Script string
Sources string
UserAgent string
}
func NewMegaCloud() *MegaCloud {
return &MegaCloud{
Script: "https://megacloud.tv/js/player/a/prod/e1-player.min.js",
Sources: "https://megacloud.tv/embed-2/ajax/e-1/getSources?id=",
UserAgent: "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.3",
}
}
func (m *MegaCloud) Extract(uri string) (vs []*hibikeonlinestream.VideoSource, err error) {
defer util.HandlePanicInModuleThen("onlinestream/sources/megacloud/Extract", func() {
err = ErrVideoSourceExtraction
})
videoIdParts := strings.Split(uri, "/")
videoId := videoIdParts[len(videoIdParts)-1]
videoId = strings.Split(videoId, "?")[0]
client := &http.Client{}
req, err := http.NewRequest("GET", m.Sources+videoId, nil)
if err != nil {
return nil, err
}
req.Header.Set("Accept", "*/*")
req.Header.Set("X-Requested-With", "XMLHttpRequest")
req.Header.Set("User-Agent", m.UserAgent)
req.Header.Set("Referer", uri)
res, err := client.Do(req)
if err != nil {
return nil, err
}
defer res.Body.Close()
var srcData map[string]interface{}
err = json.NewDecoder(res.Body).Decode(&srcData)
if err != nil {
return nil, err
}
subtitles := make([]*hibikeonlinestream.VideoSubtitle, 0)
for idx, s := range srcData["tracks"].([]interface{}) {
sub := s.(map[string]interface{})
label, ok := sub["label"].(string)
if ok {
subtitle := &hibikeonlinestream.VideoSubtitle{
URL: sub["file"].(string),
ID: label,
Language: label,
IsDefault: idx == 0,
}
subtitles = append(subtitles, subtitle)
}
}
if encryptedString, ok := srcData["sources"]; ok {
switch encryptedString.(type) {
case []interface{}:
if len(encryptedString.([]interface{})) == 0 {
return nil, ErrNoVideoSourceFound
}
videoSources := make([]*hibikeonlinestream.VideoSource, 0)
if e, ok := encryptedString.([]interface{})[0].(map[string]interface{}); ok {
file, ok := e["file"].(string)
if ok {
videoSources = append(videoSources, &hibikeonlinestream.VideoSource{
URL: file,
Type: map[bool]hibikeonlinestream.VideoSourceType{true: hibikeonlinestream.VideoSourceM3U8, false: hibikeonlinestream.VideoSourceMP4}[strings.Contains(file, ".m3u8")],
Subtitles: subtitles,
Quality: QualityAuto,
})
}
}
if len(videoSources) == 0 {
return nil, ErrNoVideoSourceFound
}
return videoSources, nil
case []map[string]interface{}:
if srcData["encrypted"].(bool) && ok {
videoSources := make([]*hibikeonlinestream.VideoSource, 0)
for _, e := range encryptedString.([]map[string]interface{}) {
videoSources = append(videoSources, &hibikeonlinestream.VideoSource{
URL: e["file"].(string),
Type: map[bool]hibikeonlinestream.VideoSourceType{true: hibikeonlinestream.VideoSourceM3U8, false: hibikeonlinestream.VideoSourceMP4}[strings.Contains(e["file"].(string), ".m3u8")],
Subtitles: subtitles,
Quality: QualityAuto,
})
}
if len(videoSources) == 0 {
return nil, ErrNoVideoSourceFound
}
return videoSources, nil
}
case string:
res, err = client.Get(m.Script)
if err != nil {
return nil, err
}
defer res.Body.Close()
text, err := io.ReadAll(res.Body)
if err != nil {
return nil, errors.New("couldn't fetch script to decrypt resource")
}
values, err := m.extractVariables(string(text))
if err != nil {
return nil, err
}
secret, encryptedSource := m.getSecret(encryptedString.(string), values)
//if err != nil {
// return nil, err
//}
decrypted, err := m.decrypt(encryptedSource, secret)
if err != nil {
return nil, err
}
var decryptedData []map[string]interface{}
err = json.Unmarshal([]byte(decrypted), &decryptedData)
if err != nil {
return nil, err
}
sources := make([]*hibikeonlinestream.VideoSource, 0)
for _, e := range decryptedData {
sources = append(sources, &hibikeonlinestream.VideoSource{
URL: e["file"].(string),
Type: map[bool]hibikeonlinestream.VideoSourceType{true: hibikeonlinestream.VideoSourceM3U8, false: hibikeonlinestream.VideoSourceMP4}[strings.Contains(e["file"].(string), ".m3u8")],
Subtitles: subtitles,
Quality: QualityAuto,
})
}
if len(sources) == 0 {
return nil, ErrNoVideoSourceFound
}
return sources, nil
}
}
return nil, ErrNoVideoSourceFound
}
func (m *MegaCloud) extractVariables(text string) ([][]int, error) {
re := regexp.MustCompile(`case\s*0x[0-9a-f]+:\s*\w+\s*=\s*(\w+)\s*,\s*\w+\s*=\s*(\w+);`)
matches := re.FindAllStringSubmatch(text, -1)
var vars [][]int
for _, match := range matches {
if len(match) < 3 {
continue
}
caseLine := match[0]
if strings.Contains(caseLine, "partKey") {
continue
}
matchKey1, err1 := m.matchingKey(match[1], text)
matchKey2, err2 := m.matchingKey(match[2], text)
if err1 != nil || err2 != nil {
continue
}
key1, err1 := strconv.ParseInt(matchKey1, 16, 64)
key2, err2 := strconv.ParseInt(matchKey2, 16, 64)
if err1 != nil || err2 != nil {
continue
}
vars = append(vars, []int{int(key1), int(key2)})
}
return vars, nil
}
func (m *MegaCloud) matchingKey(value, script string) (string, error) {
regexPattern := `,` + regexp.QuoteMeta(value) + `=((?:0x)?([0-9a-fA-F]+))`
re := regexp.MustCompile(regexPattern)
match := re.FindStringSubmatch(script)
if len(match) > 1 {
return strings.TrimPrefix(match[1], "0x"), nil
}
return "", errors.New("failed to match the key")
}
func (m *MegaCloud) getSecret(encryptedString string, values [][]int) (string, string) {
secret := ""
encryptedSourceArray := strings.Split(encryptedString, "")
currentIndex := 0
for _, index := range values {
start := index[0] + currentIndex
end := start + index[1]
for i := start; i < end; i++ {
secret += string(encryptedString[i])
encryptedSourceArray[i] = ""
}
currentIndex += index[1]
}
encryptedSource := strings.Join(encryptedSourceArray, "")
return secret, encryptedSource
}
//func (m *MegaCloud) getSecret(encryptedString string, values []int) (string, string, error) {
// var secret string
// var encryptedSource = encryptedString
// var totalInc int
//
// for i := 0; i < values[0]; i++ {
// var start, inc int
//
// switch i {
// case 0:
// start = values[2]
// inc = values[1]
// case 1:
// start = values[4]
// inc = values[3]
// case 2:
// start = values[6]
// inc = values[5]
// case 3:
// start = values[8]
// inc = values[7]
// case 4:
// start = values[10]
// inc = values[9]
// case 5:
// start = values[12]
// inc = values[11]
// case 6:
// start = values[14]
// inc = values[13]
// case 7:
// start = values[16]
// inc = values[15]
// case 8:
// start = values[18]
// inc = values[17]
// default:
// return "", "", errors.New("invalid index")
// }
//
// from := start + totalInc
// to := from + inc
//
// secret += encryptedString[from:to]
// encryptedSource = strings.Replace(encryptedSource, encryptedString[from:to], "", 1)
// totalInc += inc
// }
//
// return secret, encryptedSource, nil
//}
func (m *MegaCloud) decrypt(encrypted, keyOrSecret string) (string, error) {
cypher, err := base64.StdEncoding.DecodeString(encrypted)
if err != nil {
return "", err
}
salt := cypher[8:16]
password := append([]byte(keyOrSecret), salt...)
md5Hashes := make([][]byte, 3)
digest := password
for i := 0; i < 3; i++ {
hash := md5.Sum(digest)
md5Hashes[i] = hash[:]
digest = append(hash[:], password...)
}
key := append(md5Hashes[0], md5Hashes[1]...)
iv := md5Hashes[2]
contents := cypher[16:]
block, err := aes.NewCipher(key)
if err != nil {
return "", err
}
mode := cipher.NewCBCDecrypter(block, iv)
mode.CryptBlocks(contents, contents)
contents, err = pkcs7Unpad(contents, block.BlockSize())
if err != nil {
return "", err
}
return string(contents), nil
}
func pkcs7Unpad(data []byte, blockSize int) ([]byte, error) {
if blockSize <= 0 {
return nil, errors.New("invalid blocksize")
}
if len(data)%blockSize != 0 || len(data) == 0 {
return nil, errors.New("invalid PKCS7 data (block size must be a multiple of input length)")
}
padLen := int(data[len(data)-1])
if padLen > blockSize || padLen == 0 {
return nil, errors.New("invalid PKCS7 padding")
}
for i := 0; i < padLen; i++ {
if data[len(data)-1-i] != byte(padLen) {
return nil, errors.New("invalid PKCS7 padding")
}
}
return data[:len(data)-padLen], nil
}

View File

@@ -1,111 +0,0 @@
package onlinestream_sources
import (
"encoding/hex"
"encoding/json"
"errors"
"fmt"
"io"
"net/http"
"seanime/internal/util"
"strings"
hibikeonlinestream "seanime/internal/extension/hibike/onlinestream"
)
type StreamSB struct {
Host string
Host2 string
UserAgent string
}
func NewStreamSB() *StreamSB {
return &StreamSB{
Host: "https://streamsss.net/sources50",
Host2: "https://watchsb.com/sources50",
UserAgent: "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/83.0.4103.116 Safari/537.36",
}
}
func (s *StreamSB) Payload(hex string) string {
return "566d337678566f743674494a7c7c" + hex + "7c7c346b6767586d6934774855537c7c73747265616d7362/6565417268755339773461447c7c346133383438333436313335376136323337373433383634376337633465366534393338373136643732373736343735373237613763376334363733353737303533366236333463353333363534366137633763373337343732363536313664373336327c7c6b586c3163614468645a47617c7c73747265616d7362"
}
func (s *StreamSB) Extract(uri string) (vs []*hibikeonlinestream.VideoSource, err error) {
defer util.HandlePanicInModuleThen("onlinestream/sources/streamsb/Extract", func() {
err = ErrVideoSourceExtraction
})
var ret []*hibikeonlinestream.VideoSource
id := strings.Split(uri, "/e/")[1]
if strings.Contains(id, "html") {
id = strings.Split(id, ".html")[0]
}
if id == "" {
return nil, errors.New("cannot find ID")
}
client := &http.Client{}
req, _ := http.NewRequest("GET", fmt.Sprintf("%s/%s", s.Host, s.Payload(hex.EncodeToString([]byte(id)))), nil)
req.Header.Add("watchsb", "sbstream")
req.Header.Add("User-Agent", s.UserAgent)
req.Header.Add("Referer", uri)
res, err := client.Do(req)
if err != nil {
return nil, err
}
defer res.Body.Close()
body, _ := io.ReadAll(res.Body)
var jsonResponse map[string]interface{}
err = json.Unmarshal(body, &jsonResponse)
if err != nil {
return nil, err
}
streamData, ok := jsonResponse["stream_data"].(map[string]interface{})
if !ok {
return nil, ErrNoVideoSourceFound
}
m3u8Urls, err := client.Get(streamData["file"].(string))
if err != nil {
return nil, err
}
defer m3u8Urls.Body.Close()
m3u8Body, err := io.ReadAll(m3u8Urls.Body)
if err != nil {
return nil, err
}
videoList := strings.Split(string(m3u8Body), "#EXT-X-STREAM-INF:")
for _, video := range videoList {
if !strings.Contains(video, "m3u8") {
continue
}
url := strings.Split(video, "\n")[1]
quality := strings.Split(strings.Split(video, "RESOLUTION=")[1], ",")[0]
quality = strings.Split(quality, "x")[1]
ret = append(ret, &hibikeonlinestream.VideoSource{
URL: url,
Quality: quality + "p",
Type: hibikeonlinestream.VideoSourceM3U8,
})
}
ret = append(ret, &hibikeonlinestream.VideoSource{
URL: streamData["file"].(string),
Quality: "auto",
Type: map[bool]hibikeonlinestream.VideoSourceType{true: hibikeonlinestream.VideoSourceM3U8, false: hibikeonlinestream.VideoSourceMP4}[strings.Contains(streamData["file"].(string), ".m3u8")],
})
return ret, nil
}

View File

@@ -1,64 +0,0 @@
package onlinestream_sources
import (
"errors"
"io"
"net/http"
"regexp"
"seanime/internal/util"
"strings"
hibikeonlinestream "seanime/internal/extension/hibike/onlinestream"
)
type (
Streamtape struct {
Client *http.Client
}
)
func NewStreamtape() *Streamtape {
return &Streamtape{
Client: &http.Client{},
}
}
func (s *Streamtape) Extract(uri string) (vs []*hibikeonlinestream.VideoSource, err error) {
defer util.HandlePanicInModuleThen("onlinestream/sources/streamtape/Extract", func() {
err = ErrVideoSourceExtraction
})
var ret []*hibikeonlinestream.VideoSource
resp, err := s.Client.Get(uri)
if err != nil {
return nil, err
}
body, err := io.ReadAll(resp.Body)
if err != nil {
return nil, err
}
re := regexp.MustCompile(`robotlink'\).innerHTML = (.*)'`)
match := re.FindStringSubmatch(string(body))
if len(match) == 0 {
return nil, errors.New("could not find robotlink")
}
fhsh := strings.Split(match[1], "+ ('")
fh := fhsh[0]
sh := fhsh[1][3:]
fh = strings.ReplaceAll(fh, "'", "")
url := "https:" + fh + sh
ret = append(ret, &hibikeonlinestream.VideoSource{
URL: url,
Type: map[bool]hibikeonlinestream.VideoSourceType{true: hibikeonlinestream.VideoSourceM3U8, false: hibikeonlinestream.VideoSourceMP4}[strings.Contains(url, ".m3u8")],
Quality: QualityAuto,
})
return ret, nil
}

View File

@@ -227,7 +227,7 @@ func NewCacheLayer(anilistClientRef *util.Ref[anilist.AnilistClient]) anilist.An
fileCacher: fileCacher,
buckets: buckets,
logger: logger,
collectionMediaIDs: result.NewResultMap[int, struct{}](),
collectionMediaIDs: result.NewMap[int, struct{}](),
}
AnilistClient.Store(anilistClientRef.Get())

View File

@@ -41,9 +41,9 @@ func (a *AppContextImpl) BindStorage(vm *goja.Runtime, logger *zerolog.Logger, e
ext: ext,
logger: &storageLogger,
runtime: vm,
pluginDataCache: result.NewResultMap[string, *models.PluginData](),
keyDataCache: result.NewResultMap[string, interface{}](),
keySubscribers: result.NewResultMap[string, []chan interface{}](),
pluginDataCache: result.NewMap[string, *models.PluginData](),
keyDataCache: result.NewMap[string, interface{}](),
keySubscribers: result.NewMap[string, []chan interface{}](),
scheduler: scheduler,
}
storageObj := vm.NewObject()

View File

@@ -32,7 +32,7 @@ type StoreKeySubscriber[K comparable, T any] struct {
func NewStore[K comparable, T any](data map[K]T) *Store[K, T] {
s := &Store[K, T]{
data: make(map[K]T),
keySubscribers: result.NewResultMap[K, []*StoreKeySubscriber[K, T]](),
keySubscribers: result.NewMap[K, []*StoreKeySubscriber[K, T]](),
deleted: 0,
}

View File

@@ -196,13 +196,13 @@ func NewActionManager(ctx *Context) *ActionManager {
return &ActionManager{
ctx: ctx,
animePageButtons: result.NewResultMap[string, *AnimePageButton](),
animeLibraryDropdownItems: result.NewResultMap[string, *AnimeLibraryDropdownMenuItem](),
animePageDropdownItems: result.NewResultMap[string, *AnimePageDropdownMenuItem](),
mangaPageButtons: result.NewResultMap[string, *MangaPageButton](),
mediaCardContextMenuItems: result.NewResultMap[string, *MediaCardContextMenuItem](),
episodeCardContextMenuItems: result.NewResultMap[string, *EpisodeCardContextMenuItem](),
episodeGridItemMenuItems: result.NewResultMap[string, *EpisodeGridItemMenuItem](),
animePageButtons: result.NewMap[string, *AnimePageButton](),
animeLibraryDropdownItems: result.NewMap[string, *AnimeLibraryDropdownMenuItem](),
animePageDropdownItems: result.NewMap[string, *AnimePageDropdownMenuItem](),
mangaPageButtons: result.NewMap[string, *MangaPageButton](),
mediaCardContextMenuItems: result.NewMap[string, *MediaCardContextMenuItem](),
episodeCardContextMenuItems: result.NewMap[string, *EpisodeCardContextMenuItem](),
episodeGridItemMenuItems: result.NewMap[string, *EpisodeGridItemMenuItem](),
}
}

View File

@@ -59,7 +59,7 @@ func NewCommandPaletteManager(ctx *Context) *CommandPaletteManager {
return &CommandPaletteManager{
ctx: ctx,
componentManager: &ComponentManager{ctx: ctx},
items: result.NewResultMap[string, *commandItem](),
items: result.NewMap[string, *commandItem](),
renderedItems: make([]*CommandItemJSON, 0),
}
}

View File

@@ -112,19 +112,19 @@ func NewContext(ui *UI) *Context {
ext: ui.ext,
logger: ui.logger,
vm: ui.vm,
states: result.NewResultMap[string, *State](),
states: result.NewMap[string, *State](),
fetchSem: make(chan struct{}, MaxConcurrentFetchRequests),
stateSubscribers: make([]chan *State, 0),
eventBus: result.NewResultMap[ClientEventType, *result.Map[string, *EventListener]](),
eventBus: result.NewMap[ClientEventType, *result.Map[string, *EventListener]](),
wsEventManager: ui.wsEventManager,
effectStack: make(map[string]bool),
effectCalls: make(map[string][]time.Time),
pendingStateUpdates: make(map[string]struct{}),
lastUIUpdateAt: time.Now().Add(-time.Hour), // Initialize to a time in the past
atomicCleanupCounter: atomic.Int64{},
onCleanupFns: result.NewResultMap[int64, func()](),
onCleanupFns: result.NewMap[int64, func()](),
cron: mo.None[*plugin.Cron](),
registeredInlineEventHandlers: result.NewResultMap[string, *EventListener](),
registeredInlineEventHandlers: result.NewMap[string, *EventListener](),
pendingClientEvents: make([]*ServerPluginEvent, 0, maxEventBatchSize),
eventBatchSize: 0,
}
@@ -239,7 +239,7 @@ func (c *Context) RegisterEventListener(events ...ClientEventType) *EventListene
// Register the listener for each event type
for _, event := range events {
if !c.eventBus.Has(event) {
c.eventBus.Set(event, result.NewResultMap[string, *EventListener]())
c.eventBus.Set(event, result.NewMap[string, *EventListener]())
}
listeners, _ := c.eventBus.Get(event)
listeners.Set(id, listener)

View File

@@ -31,8 +31,8 @@ type DOMEventListener struct {
func NewDOMManager(ctx *Context) *DOMManager {
return &DOMManager{
ctx: ctx,
elementObservers: result.NewResultMap[string, *ElementObserver](),
eventListeners: result.NewResultMap[string, *DOMEventListener](),
elementObservers: result.NewMap[string, *ElementObserver](),
eventListeners: result.NewMap[string, *DOMEventListener](),
}
}

View File

@@ -39,8 +39,8 @@ func NewRepository(opts *NewRepositoryOptions) *Repository {
logger: opts.Logger,
metadataProviderRef: opts.MetadataProviderRef,
extensionBankRef: opts.ExtensionBankRef,
animeProviderSearchCaches: result.NewResultMap[string, *result.Cache[string, *SearchData]](),
animeProviderSmartSearchCaches: result.NewResultMap[string, *result.Cache[string, *SearchData]](),
animeProviderSearchCaches: result.NewMap[string, *result.Cache[string, *SearchData]](),
animeProviderSmartSearchCaches: result.NewMap[string, *result.Cache[string, *SearchData]](),
settings: RepositorySettings{},
mu: sync.Mutex{},
}
@@ -70,8 +70,8 @@ func (r *Repository) OnExtensionReloaded() {
// This is called each time a new extension is added or removed
func (r *Repository) reloadExtensions() {
// Clear the search caches
r.animeProviderSearchCaches = result.NewResultMap[string, *result.Cache[string, *SearchData]]()
r.animeProviderSmartSearchCaches = result.NewResultMap[string, *result.Cache[string, *SearchData]]()
r.animeProviderSearchCaches = result.NewMap[string, *result.Cache[string, *SearchData]]()
r.animeProviderSmartSearchCaches = result.NewMap[string, *result.Cache[string, *SearchData]]()
go func() {
// Create new caches for each provider

View File

@@ -28,7 +28,7 @@ const (
)
var (
metadataCache = result.NewResultMap[string, *TorrentMetadata]()
metadataCache = result.NewMap[string, *TorrentMetadata]()
)
type (

View File

@@ -82,7 +82,7 @@ func NewRepository(opts *NewRepositoryOptions) *Repository {
client: nil,
handler: nil,
settings: mo.Option[Settings]{},
selectionHistoryMap: result.NewResultMap[int, *hibiketorrent.AnimeTorrent](),
selectionHistoryMap: result.NewMap[int, *hibiketorrent.AnimeTorrent](),
torrentRepository: opts.TorrentRepository,
baseAnimeCache: opts.BaseAnimeCache,
completeAnimeCache: opts.CompleteAnimeCache,

View File

@@ -12,7 +12,7 @@ type mapItem[K interface{}, V any] struct {
value V
}
func NewResultMap[K interface{}, V any]() *Map[K, V] {
func NewMap[K interface{}, V any]() *Map[K, V] {
return &Map[K, V]{}
}

View File

@@ -9,7 +9,7 @@ Electron-based desktop client for Seanime. Embeds server and web interface. Succ
</p>
<p align="center">
<img src="../docs/images/4/anime-entry-torrent-stream--sq.jpg" alt="preview" width="70%"/>
<img src="https://seanime.app/bucket/img-2025-10-29-17-13-15.webp?updatedAt=1761758012992" alt="preview" width="80%"/>
</p>
---

Binary file not shown.

Before

Width:  |  Height:  |  Size: 89 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 18 KiB

After

Width:  |  Height:  |  Size: 13 KiB

View File

@@ -250,7 +250,7 @@ export function PluginTray(props: TrayPluginProps) {
}}
data-plugin-tray-popover-content={props.trayIcon.extensionId}
>
<div className="absolute inset-0 top-[-2.5rem]">
<div className="absolute w-full top-[-2.5rem]">
<div className="flex items-center justify-between">
<p
className="text-sm border font-medium text-gray-300 px-1.5 py-0.5 rounded-lg bg-black/60"

View File

@@ -4,6 +4,7 @@ import { useSeaCommandInject } from "@/app/(main)/_features/sea-command/use-inje
import { useServerStatus } from "@/app/(main)/_hooks/use-server-status"
import { MangaHorizontalReader } from "@/app/(main)/manga/_containers/chapter-reader/_components/chapter-horizontal-reader"
import { MangaVerticalReader } from "@/app/(main)/manga/_containers/chapter-reader/_components/chapter-vertical-reader"
import { MangaReaderActionDisplay } from "@/app/(main)/manga/_containers/chapter-reader/manga-reader-action-display"
import { MangaReaderBar } from "@/app/(main)/manga/_containers/chapter-reader/manga-reader-bar"
import {
useCurrentChapter,
@@ -330,6 +331,8 @@ export function ChapterReaderDrawer(props: ChapterDrawerProps) {
hiddenBar && "max-h-dvh",
)} tabIndex={-1}
>
<MangaReaderActionDisplay />
{pageContainerError ? (
<LuffyError
title="Failed to load pages"

View File

@@ -39,6 +39,7 @@ import { AiOutlineColumnHeight, AiOutlineColumnWidth } from "react-icons/ai"
import { BiCog } from "react-icons/bi"
import { FaRedo, FaRegImage } from "react-icons/fa"
import { GiResize } from "react-icons/gi"
import { LuSettings } from "react-icons/lu"
import { MdMenuBook, MdOutlinePhotoSizeSelectLarge } from "react-icons/md"
import { PiArrowCircleLeftDuotone, PiArrowCircleRightDuotone, PiReadCvLogoLight, PiScrollDuotone } from "react-icons/pi"
import { TbArrowAutofitHeight } from "react-icons/tb"
@@ -67,6 +68,12 @@ const radioGroupClasses = {
),
}
export const MANGA_READING_MODE_ICONS = {
[MangaReadingMode.LONG_STRIP]: <PiScrollDuotone className="text-xl" />,
[MangaReadingMode.PAGED]: <PiReadCvLogoLight className="text-xl" />,
[MangaReadingMode.DOUBLE_PAGE]: <MdMenuBook className="text-xl" />,
}
export const MANGA_READING_MODE_OPTIONS = [
{
value: MangaReadingMode.LONG_STRIP,
@@ -82,6 +89,11 @@ export const MANGA_READING_MODE_OPTIONS = [
},
]
export const MANGA_READING_DIRECTION_ICONS = {
[MangaReadingDirection.LTR]: <PiArrowCircleRightDuotone className="text-2xl" />,
[MangaReadingDirection.RTL]: <PiArrowCircleLeftDuotone className="text-2xl" />,
}
export const MANGA_READING_DIRECTION_OPTIONS = [
{
value: MangaReadingDirection.LTR,
@@ -93,6 +105,13 @@ export const MANGA_READING_DIRECTION_OPTIONS = [
},
]
export const MANGA_PAGE_FIT_ICONS = {
[MangaPageFit.CONTAIN]: <AiOutlineColumnHeight className="text-xl" />,
[MangaPageFit.LARGER]: <TbArrowAutofitHeight className="text-xl" />,
[MangaPageFit.COVER]: <AiOutlineColumnWidth className="text-xl" />,
[MangaPageFit.TRUE_SIZE]: <FaRegImage className="text-xl" />,
}
export const MANGA_PAGE_FIT_OPTIONS = [
{
value: MangaPageFit.CONTAIN,
@@ -112,6 +131,11 @@ export const MANGA_PAGE_FIT_OPTIONS = [
},
]
export const MANGA_PAGE_STRETCH_ICONS = {
[MangaPageStretch.NONE]: <MdOutlinePhotoSizeSelectLarge className="text-xl" />,
[MangaPageStretch.STRETCH]: <GiResize className="text-xl" />,
}
export const MANGA_PAGE_STRETCH_OPTIONS = [
{
value: MangaPageStretch.NONE,
@@ -335,9 +359,10 @@ export function ChapterReaderSettings(props: ChapterReaderSettingsProps) {
<DropdownMenu
trigger={<IconButton
data-chapter-reader-settings-dropdown-menu-trigger
icon={<BiCog />}
icon={<LuSettings />}
intent="gray-basic"
className="flex lg:hidden"
tabIndex={-1}
/>}
className="block lg:hidden"
data-chapter-reader-settings-dropdown-menu

View File

@@ -0,0 +1,54 @@
import { cn } from "@/components/ui/core/styling"
import { atom, useAtom } from "jotai"
import React from "react"
type MangaReaderFlashAction = {
id: string
message: string
timestamp: number
}
export const manga_flashAction = atom<MangaReaderFlashAction | null>(null)
export const manga_flashActionTimeout = atom<ReturnType<typeof setTimeout> | null>(null)
export const manga_doFlashAction = atom(null, (get, set, payload: { message: string, duration?: number }) => {
const id = Date.now().toString()
const timeout = get(manga_flashActionTimeout)
set(manga_flashAction, { id, message: payload.message, timestamp: Date.now() })
if (timeout) {
clearTimeout(timeout)
}
const t = setTimeout(() => {
set(manga_flashAction, null)
set(manga_flashActionTimeout, null)
}, payload.duration ?? 800)
set(manga_flashActionTimeout, t)
})
export function MangaReaderActionDisplay() {
const [notification] = useAtom(manga_flashAction)
if (!notification) return null
return (
<div className="absolute top-16 left-1/2 transform -translate-x-1/2 z-50 pointer-events-none">
<div
className={cn(
"text-white px-3 py-2 !text-lg font-semibold rounded-lg bg-black/50 backdrop-blur-sm tracking-wide",
)}
>
{notification.message}
</div>
</div>
)
}
export function useMangaReaderFlashAction() {
const [, flashAction] = useAtom(manga_doFlashAction)
return { flashAction }
}

View File

@@ -2,9 +2,13 @@ import { AL_BaseManga, Manga_PageContainer } from "@/api/generated/types"
import { ___manga_scrollSignalAtom } from "@/app/(main)/manga/_containers/chapter-reader/_components/chapter-vertical-reader"
import {
ChapterReaderSettings,
MANGA_PAGE_FIT_ICONS,
MANGA_PAGE_FIT_OPTIONS,
MANGA_PAGE_STRETCH_ICONS,
MANGA_PAGE_STRETCH_OPTIONS,
MANGA_READING_DIRECTION_ICONS,
MANGA_READING_DIRECTION_OPTIONS,
MANGA_READING_MODE_ICONS,
MANGA_READING_MODE_OPTIONS,
} from "@/app/(main)/manga/_containers/chapter-reader/chapter-reader-settings"
import { __manga_selectedChapterAtom, MangaReader_SelectedChapter, useHandleChapterPageStatus } from "@/app/(main)/manga/_lib/handle-chapter-reader"
@@ -31,7 +35,7 @@ import { useSetAtom } from "jotai"
import { useAtom, useAtomValue } from "jotai/react"
import React from "react"
import { BiX } from "react-icons/bi"
import { LuChevronLeft, LuChevronRight, LuInfo } from "react-icons/lu"
import { LuChevronLeft, LuChevronRight } from "react-icons/lu"
type MangaReaderBarProps = {
children?: React.ReactNode
@@ -280,11 +284,17 @@ export function MangaReaderBar(props: MangaReaderBarProps) {
<div data-manga-reader-bar-info-container className="hidden lg:flex">
<Popover
modal={true}
tabIndex={-1}
trigger={
<IconButton
icon={<LuInfo />}
icon={<span className="flex items-center gap-1">
{MANGA_READING_MODE_ICONS[readingMode]}
{MANGA_PAGE_FIT_ICONS[pageFit]}
{pageStretch !== MangaPageStretch.NONE && MANGA_PAGE_STRETCH_ICONS[pageStretch]}
{readingMode !== MangaReadingMode.LONG_STRIP && MANGA_READING_DIRECTION_ICONS[readingDirection]}
</span>}
intent="gray-basic"
className="opacity-50 outline-0"
className="opacity-50 outline-0 w-fit px-2 focus-visible:outline-0 focus-visible:ring-0 ml-2"
tabIndex={-1}
/>
}

View File

@@ -1,4 +1,5 @@
import { Manga_ChapterContainer, Manga_PageContainer, Nullish } from "@/api/generated/types"
import { manga_doFlashAction } from "@/app/(main)/manga/_containers/chapter-reader/manga-reader-action-display"
import { useMangaEntryDownloadedChapters } from "@/app/(main)/manga/_lib/handle-manga-downloads"
import { getDecimalFromChapter, isChapterAfter, isChapterBefore } from "@/app/(main)/manga/_lib/handle-manga-utils"
import {
@@ -360,26 +361,72 @@ export function useSwitchSettingsWithKeys() {
const [pageFit, setPageFit] = useAtom(__manga_pageFitAtom)
const [pageStretch, setPageStretch] = useAtom(__manga_pageStretchAtom)
const [doublePageOffset, setDoublePageOffset] = useAtom(__manga_doublePageOffsetAtom)
const setFlashAction = useSetAtom(manga_doFlashAction)
const switchValue = (currentValue: string, possibleValues: string[], setValue: (v: any) => void) => {
const getReadingModeLabel = (value: string) => {
const labels: Record<string, string> = {
[MangaReadingMode.LONG_STRIP]: "Long Strip",
[MangaReadingMode.PAGED]: "Single Page",
[MangaReadingMode.DOUBLE_PAGE]: "Double Page",
}
return labels[value] || value
}
const getReadingDirectionLabel = (value: string) => {
const labels: Record<string, string> = {
[MangaReadingDirection.LTR]: "Left to Right",
[MangaReadingDirection.RTL]: "Right to Left",
}
return labels[value] || value
}
const getPageFitLabel = (value: string) => {
const labels: Record<string, string> = {
[MangaPageFit.CONTAIN]: "Contain",
[MangaPageFit.LARGER]: "Overflow",
[MangaPageFit.COVER]: "Cover",
[MangaPageFit.TRUE_SIZE]: "True size",
}
return labels[value] || value
}
const getPageStretchLabel = (value: string) => {
const labels: Record<string, string> = {
[MangaPageStretch.NONE]: "None",
[MangaPageStretch.STRETCH]: "Stretch",
}
return labels[value] || value
}
const switchValue = (currentValue: string, possibleValues: string[], setValue: (v: any) => void, getLabel: (v: string) => string) => {
const currentIndex = possibleValues.indexOf(currentValue)
const nextIndex = (currentIndex + 1) % possibleValues.length
setValue(possibleValues[nextIndex])
const nextValue = possibleValues[nextIndex]
setValue(nextValue)
setFlashAction({ message: getLabel(nextValue) })
}
const incrementOffset = () => {
setDoublePageOffset(prev => Math.max(0, prev + 1))
setDoublePageOffset(prev => {
const newValue = Math.max(0, prev + 1)
setFlashAction({ message: `Double Page Offset: ${newValue}` })
return newValue
})
}
const decrementOffset = () => {
setDoublePageOffset(prev => Math.max(0, prev - 1))
setDoublePageOffset(prev => {
const newValue = Math.max(0, prev - 1)
setFlashAction({ message: `Double Page Offset: ${newValue}` })
return newValue
})
}
React.useEffect(() => {
mousetrap.bind("m", () => switchValue(readingMode, Object.values(MangaReadingMode), setReadingMode))
mousetrap.bind("d", () => switchValue(readingDirection, Object.values(MangaReadingDirection), setReadingDirection))
mousetrap.bind("f", () => switchValue(pageFit, Object.values(MangaPageFit), setPageFit))
mousetrap.bind("s", () => switchValue(pageStretch, Object.values(MangaPageStretch), setPageStretch))
mousetrap.bind("m", () => switchValue(readingMode, Object.values(MangaReadingMode), setReadingMode, getReadingModeLabel))
mousetrap.bind("d", () => switchValue(readingDirection, Object.values(MangaReadingDirection), setReadingDirection, getReadingDirectionLabel))
mousetrap.bind("f", () => switchValue(pageFit, Object.values(MangaPageFit), setPageFit, getPageFitLabel))
mousetrap.bind("s", () => switchValue(pageStretch, Object.values(MangaPageStretch), setPageStretch, getPageStretchLabel))
mousetrap.bind("shift+right", () => incrementOffset())
mousetrap.bind("shift+left", () => decrementOffset())