Fix issues with account swutching
This commit is contained in:
+14
-11
@@ -2,6 +2,7 @@ package webdav
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/sirrobot01/decypharr/pkg/debrid/types"
|
||||
"io"
|
||||
"net/http"
|
||||
"os"
|
||||
@@ -29,7 +30,7 @@ type File struct {
|
||||
name string
|
||||
torrentName string
|
||||
link string
|
||||
downloadLink string
|
||||
downloadLink *types.DownloadLink
|
||||
size int64
|
||||
isDir bool
|
||||
fileId string
|
||||
@@ -55,26 +56,26 @@ func (f *File) Close() error {
|
||||
// This is just to satisfy the os.File interface
|
||||
f.content = nil
|
||||
f.children = nil
|
||||
f.downloadLink = ""
|
||||
f.downloadLink = nil
|
||||
f.readOffset = 0
|
||||
return nil
|
||||
}
|
||||
|
||||
func (f *File) getDownloadLink() (string, error) {
|
||||
func (f *File) getDownloadLink() (*types.DownloadLink, error) {
|
||||
// Check if we already have a final URL cached
|
||||
|
||||
if f.downloadLink != "" && isValidURL(f.downloadLink) {
|
||||
if f.downloadLink != nil && isValidURL(f.downloadLink.DownloadLink) {
|
||||
return f.downloadLink, nil
|
||||
}
|
||||
downloadLink, err := f.cache.GetDownloadLink(f.torrentName, f.name, f.link)
|
||||
if err != nil {
|
||||
return "", err
|
||||
return nil, err
|
||||
}
|
||||
if downloadLink != "" && isValidURL(downloadLink) {
|
||||
if downloadLink != nil && isValidURL(downloadLink.DownloadLink) {
|
||||
f.downloadLink = downloadLink
|
||||
return downloadLink, nil
|
||||
}
|
||||
return "", os.ErrNotExist
|
||||
return nil, os.ErrNotExist
|
||||
}
|
||||
|
||||
func (f *File) getDownloadByteRange() (*[2]int64, error) {
|
||||
@@ -138,12 +139,12 @@ func (f *File) streamWithRetry(w http.ResponseWriter, r *http.Request, retryCoun
|
||||
return &streamError{Err: err, StatusCode: http.StatusPreconditionFailed}
|
||||
}
|
||||
|
||||
if downloadLink == "" {
|
||||
if downloadLink == nil {
|
||||
return &streamError{Err: fmt.Errorf("empty download link"), StatusCode: http.StatusNotFound}
|
||||
}
|
||||
|
||||
// Create upstream request with streaming optimizations
|
||||
upstreamReq, err := http.NewRequest("GET", downloadLink, nil)
|
||||
upstreamReq, err := http.NewRequest("GET", downloadLink.DownloadLink, nil)
|
||||
if err != nil {
|
||||
return &streamError{Err: err, StatusCode: http.StatusInternalServerError}
|
||||
}
|
||||
@@ -168,6 +169,7 @@ func (f *File) streamWithRetry(w http.ResponseWriter, r *http.Request, retryCoun
|
||||
// Retry with new download link
|
||||
_log.Debug().
|
||||
Int("retry_count", retryCount+1).
|
||||
Str("account", downloadLink.MaskedToken).
|
||||
Str("file", f.name).
|
||||
Msg("Retrying stream request")
|
||||
return f.streamWithRetry(w, r, retryCount+1)
|
||||
@@ -279,6 +281,7 @@ func (f *File) handleUpstream(resp *http.Response, retryCount, maxRetries int) (
|
||||
_log.Debug().
|
||||
Str("file", f.name).
|
||||
Int("retry_count", retryCount).
|
||||
Str("account", f.downloadLink.MaskedToken).
|
||||
Msg("Bandwidth exceeded. Marking link as invalid")
|
||||
|
||||
f.cache.MarkDownloadLinkAsInvalid(f.link, f.downloadLink, "bandwidth_exceeded")
|
||||
@@ -289,7 +292,7 @@ func (f *File) handleUpstream(resp *http.Response, retryCount, maxRetries int) (
|
||||
}
|
||||
|
||||
return false, &streamError{
|
||||
Err: fmt.Errorf("bandwidth exceeded after %d retries", retryCount),
|
||||
Err: fmt.Errorf("bandwidth exceeded for %s after %d retries", f.downloadLink.MaskedToken, retryCount),
|
||||
StatusCode: http.StatusServiceUnavailable,
|
||||
}
|
||||
}
|
||||
@@ -312,7 +315,7 @@ func (f *File) handleUpstream(resp *http.Response, retryCount, maxRetries int) (
|
||||
// Try to regenerate download link if we haven't exceeded retries
|
||||
if retryCount < maxRetries {
|
||||
// Clear cached link to force regeneration
|
||||
f.downloadLink = ""
|
||||
f.downloadLink = nil
|
||||
return true, nil
|
||||
}
|
||||
|
||||
|
||||
@@ -451,15 +451,15 @@ func (h *Handler) handleGet(w http.ResponseWriter, r *http.Request) {
|
||||
// Handle nginx proxy (X-Accel-Redirect)
|
||||
if file.content == nil && !file.isRar && h.cache.StreamWithRclone() {
|
||||
link, err := file.getDownloadLink()
|
||||
if err != nil || link == "" {
|
||||
if err != nil || link == nil {
|
||||
http.Error(w, "Could not fetch download link", http.StatusPreconditionFailed)
|
||||
return
|
||||
}
|
||||
|
||||
w.Header().Set("Content-Disposition", fmt.Sprintf("attachment; filename=\"%s\"", fi.Name()))
|
||||
w.Header().Set("X-Accel-Redirect", link)
|
||||
w.Header().Set("X-Accel-Redirect", link.DownloadLink)
|
||||
w.Header().Set("X-Accel-Buffering", "no")
|
||||
http.Redirect(w, r, link, http.StatusFound)
|
||||
http.Redirect(w, r, link.DownloadLink, http.StatusFound)
|
||||
return
|
||||
}
|
||||
|
||||
|
||||
Reference in New Issue
Block a user