From ffb1745bf6e213d56ac2cd9b3d2c3d85fcb76388 Mon Sep 17 00:00:00 2001 From: Mukhtar Akere Date: Sun, 11 May 2025 15:20:06 +0100 Subject: [PATCH] Add support for rclone refresh dirs instead of refreshing everything --- internal/config/webdav.go | 7 +- pkg/debrid/debrid/refresh.go | 19 +++-- pkg/web/templates/config.html | 22 ++++-- pkg/webdav/handler.go | 135 ---------------------------------- pkg/webdav/propfind.go | 128 ++++++++++++++++++++++++++++++++ 5 files changed, 159 insertions(+), 152 deletions(-) create mode 100644 pkg/webdav/propfind.go diff --git a/internal/config/webdav.go b/internal/config/webdav.go index 7088920..17998f3 100644 --- a/internal/config/webdav.go +++ b/internal/config/webdav.go @@ -15,9 +15,10 @@ type WebDav struct { FolderNaming string `json:"folder_naming,omitempty"` // Rclone - RcUrl string `json:"rc_url,omitempty"` - RcUser string `json:"rc_user,omitempty"` - RcPass string `json:"rc_pass,omitempty"` + RcUrl string `json:"rc_url,omitempty"` + RcUser string `json:"rc_user,omitempty"` + RcPass string `json:"rc_pass,omitempty"` + RcRefreshDirs string `json:"rc_refresh_dirs,omitempty"` // comma separated list of directories to refresh // Directories Directories map[string]WebdavDirectories `json:"directories,omitempty"` diff --git a/pkg/debrid/debrid/refresh.go b/pkg/debrid/debrid/refresh.go index e197a75..3c9b511 100644 --- a/pkg/debrid/debrid/refresh.go +++ b/pkg/debrid/debrid/refresh.go @@ -138,12 +138,19 @@ func (c *Cache) refreshRclone() error { } // Create form data data := "" - for index, dir := range c.GetDirectories() { - if dir != "" { - if index == 0 { - data += "dir=" + dir - } else { - data += "&dir" + fmt.Sprint(index+1) + "=" + dir + dirs := strings.FieldsFunc(cfg.RcRefreshDirs, func(r rune) bool { + return r == ',' || r == '&' + }) + if len(dirs) == 0 { + data = "dir=__all__" + } else { + for index, dir := range dirs { + if dir != "" { + if index == 0 { + data += "dir=" + dir + } else { + data += "&dir" + fmt.Sprint(index+1) + "=" + dir + } } } } diff --git a/pkg/web/templates/config.html b/pkg/web/templates/config.html index a90155a..f4df50d 100644 --- a/pkg/web/templates/config.html +++ b/pkg/web/templates/config.html @@ -372,13 +372,13 @@
Webdav
-
+
How often to refresh the torrents list from debrid(instant when using webdav)
-
- +
+ How often to refresh the download links list from debrid
@@ -387,6 +387,11 @@ How long to keep the links in the webdav before expiring
+
+ + + Number of workers to use for the webdav server(when refreshing) +
How to name each torrent directory in the webdav
-
- - - Number of workers to use for the webdav server(when refreshing) -
Rclone RC URL for the webdav server(speeds up import significantly)
+
+ + + Directories to refresh via RC(comma-seperated e.g. __all__, torrents) +
@@ -1090,6 +1095,7 @@ debrid.rc_url = document.querySelector(`[name="debrid[${i}].rc_url"]`).value; debrid.rc_user = document.querySelector(`[name="debrid[${i}].rc_user"]`).value; debrid.rc_pass = document.querySelector(`[name="debrid[${i}].rc_pass"]`).value; + debrid.rc_refresh_dirs = document.querySelector(`[name="debrid[${i}].rc_refresh_dirs"]`).value; //custom folders debrid.directories = {}; diff --git a/pkg/webdav/handler.go b/pkg/webdav/handler.go index 6f5bea3..0dc9d2c 100644 --- a/pkg/webdav/handler.go +++ b/pkg/webdav/handler.go @@ -3,7 +3,6 @@ package webdav import ( "bytes" "context" - "encoding/xml" "fmt" "html/template" "io" @@ -31,41 +30,6 @@ type Handler struct { RootPath string } -type DAVResponse struct { - XMLName xml.Name `xml:"d:response"` - Href string `xml:"d:href"` - PropStat PropStat `xml:"d:propstat"` -} - -type PropStat struct { - XMLName xml.Name `xml:"d:propstat"` - Prop Prop `xml:"d:prop"` - Status string `xml:"d:status"` -} - -type Prop struct { - XMLName xml.Name `xml:"d:prop"` - ResourceType *ResourceType `xml:"d:resourcetype,omitempty"` - LastModified string `xml:"d:getlastmodified,omitempty"` - ContentLength int64 `xml:"d:getcontentlength,omitempty"` - DisplayName string `xml:"d:displayname,omitempty"` -} - -type ResourceType struct { - XMLName xml.Name `xml:"d:resourcetype"` - Collection *Collection `xml:"d:collection,omitempty"` -} - -type Collection struct { - XMLName xml.Name `xml:"d:collection"` -} - -type MultiStatus struct { - XMLName xml.Name `xml:"d:multistatus"` - Namespace string `xml:"xmlns:d,attr"` - Responses []DAVResponse `xml:"d:response"` -} - func NewHandler(name string, cache *debrid.Cache, logger zerolog.Logger) *Handler { h := &Handler{ Name: name, @@ -540,102 +504,3 @@ func (h *Handler) handleOptions(w http.ResponseWriter, r *http.Request) { w.Header().Set("DAV", "1, 2") w.WriteHeader(http.StatusOK) } - -func (h *Handler) handlePropfind(w http.ResponseWriter, r *http.Request) { - // Setup context for metadata only - ctx := context.WithValue(r.Context(), "metadataOnly", true) - r = r.WithContext(ctx) - - // Determine depth (default "1") - depth := r.Header.Get("Depth") - if depth == "" { - depth = "1" - } - - cleanPath := path.Clean(r.URL.Path) - - // Build the list of entries - type entry struct { - href string - fi os.FileInfo - } - var entries []entry - - // Always include the resource itself - f, err := h.OpenFile(r.Context(), cleanPath, os.O_RDONLY, 0) - if err == nil { - defer f.Close() - - if fi, err2 := f.Stat(); err2 == nil { - entries = append(entries, entry{ - href: cleanPath, - fi: fi, - }) - - // Add children if directory and depth isn't 0 - if fi.IsDir() { - children := h.getChildren(cleanPath) - for _, child := range children { - entries = append(entries, entry{ - href: path.Join("/", cleanPath, child.Name()) + "/", - fi: child, - }) - } - } - } - } - - // Create MultiStatus response - multiStatus := MultiStatus{ - Namespace: "DAV:", - Responses: []DAVResponse{}, - } - - // Add responses for each entry - for _, e := range entries { - var resourceType *ResourceType - var contentLength int64 - - if e.fi.IsDir() { - resourceType = &ResourceType{ - Collection: &Collection{}, - } - } else { - contentLength = e.fi.Size() - } - - // Format href path properly - raw := e.href - u := &url.URL{Path: raw} - escaped := u.EscapedPath() - - response := DAVResponse{ - Href: escaped, - PropStat: PropStat{ - Prop: Prop{ - ResourceType: resourceType, - LastModified: e.fi.ModTime().Format("2006-01-02T15:04:05.000-07:00"), - ContentLength: contentLength, - DisplayName: e.fi.Name(), - }, - Status: "HTTP/1.1 200 OK", - }, - } - - multiStatus.Responses = append(multiStatus.Responses, response) - } - - // Marshal to XML - body, err := xml.Marshal(multiStatus) - if err != nil { - http.Error(w, "Internal Server Error", http.StatusInternalServerError) - return - } - w.Header().Set("Content-Type", "application/xml; charset=utf-8") - w.Header().Set("Vary", "Accept-Encoding") - - // Set status code - w.WriteHeader(207) // MultiStatus - _, _ = w.Write([]byte(xml.Header)) - _, _ = w.Write(body) -} diff --git a/pkg/webdav/propfind.go b/pkg/webdav/propfind.go new file mode 100644 index 0000000..2586bd8 --- /dev/null +++ b/pkg/webdav/propfind.go @@ -0,0 +1,128 @@ +package webdav + +import ( + "context" + "fmt" + "net/http" + "net/url" + "os" + "path" + "strings" +) + +func (h *Handler) handlePropfind(w http.ResponseWriter, r *http.Request) { + // Setup context for metadata only + ctx := context.WithValue(r.Context(), "metadataOnly", true) + r = r.WithContext(ctx) + + // Determine depth (default "1") + depth := r.Header.Get("Depth") + if depth == "" { + depth = "1" + } + + cleanPath := path.Clean(r.URL.Path) + + // Build the list of entries + type entry struct { + href string + fi os.FileInfo + } + + // Always include the resource itself + f, err := h.OpenFile(r.Context(), cleanPath, os.O_RDONLY, 0) + if err != nil { + h.logger.Error().Err(err).Str("path", cleanPath).Msg("Failed to open file") + http.NotFound(w, r) + return + } + defer f.Close() + + fi, err := f.Stat() + if err != nil { + h.logger.Error().Err(err).Msg("Failed to stat file") + http.Error(w, "Server Error", http.StatusInternalServerError) + return + } + + // Collect children if a directory and depth allows + children := make([]os.FileInfo, 0) + if fi.IsDir() && depth != "0" { + children = h.getChildren(cleanPath) + } + + entries := make([]entry, 0, 1+len(children)) + entries = append(entries, entry{href: cleanPath, fi: fi}) + + for _, child := range children { + childHref := path.Join("/", cleanPath, child.Name()) + if child.IsDir() { + childHref += "/" + } + entries = append(entries, entry{href: childHref, fi: child}) + } + + // Use a string builder for creating XML + var sb strings.Builder + + // XML header and main element + sb.WriteString(``) + sb.WriteString(``) + + // Format time once + timeFormat := "2006-01-02T15:04:05.000-07:00" + + // Add responses for each entry + for _, e := range entries { + // Format href path properly + u := &url.URL{Path: e.href} + escaped := u.EscapedPath() + + sb.WriteString(``) + sb.WriteString(fmt.Sprintf(`%s`, xmlEscape(escaped))) + sb.WriteString(``) + sb.WriteString(``) + + // Resource type differs based on directory vs file + if e.fi.IsDir() { + sb.WriteString(``) + } else { + sb.WriteString(``) + sb.WriteString(fmt.Sprintf(`%d`, e.fi.Size())) + } + + // Always add lastmodified and displayname + lastModified := e.fi.ModTime().Format(timeFormat) + sb.WriteString(fmt.Sprintf(`%s`, xmlEscape(lastModified))) + sb.WriteString(fmt.Sprintf(`%s`, xmlEscape(e.fi.Name()))) + + sb.WriteString(``) + sb.WriteString(`HTTP/1.1 200 OK`) + sb.WriteString(``) + sb.WriteString(``) + } + + // Close root element + sb.WriteString(``) + + // Set headers + w.Header().Set("Content-Type", "application/xml; charset=utf-8") + w.Header().Set("Vary", "Accept-Encoding") + + // Set status code and write response + w.WriteHeader(http.StatusMultiStatus) // 207 MultiStatus + _, _ = w.Write([]byte(sb.String())) +} + +// Basic XML escaping function +func xmlEscape(s string) string { + s = strings.ReplaceAll(s, "&", "&") + s = strings.ReplaceAll(s, "<", "<") + s = strings.ReplaceAll(s, ">", ">") + s = strings.ReplaceAll(s, "'", "'") + s = strings.ReplaceAll(s, "\"", """) + s = strings.ReplaceAll(s, "\n", " ") + s = strings.ReplaceAll(s, "\r", " ") + s = strings.ReplaceAll(s, "\t", " ") + return s +}