From e123a2fd5e634458b07de30d1ebf3f1fabb071b1 Mon Sep 17 00:00:00 2001 From: Mukhtar Akere Date: Thu, 26 Jun 2025 03:51:28 +0100 Subject: [PATCH] Hotfix issues with 1.0.3 --- .github/ISSUE_TEMPLATE/bug_report.yml | 2 +- pkg/debrid/providers/realdebrid/realdebrid.go | 2 +- pkg/debrid/store/refresh.go | 4 +- pkg/debrid/types/account.go | 31 ++- pkg/repair/repair.go | 185 ++++-------------- pkg/web/api.go | 22 --- pkg/web/routes.go | 1 - pkg/web/templates/config.html | 3 + pkg/web/templates/repair.html | 148 +------------- pkg/webdav/misc.go | 12 -- 10 files changed, 76 insertions(+), 334 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/bug_report.yml b/.github/ISSUE_TEMPLATE/bug_report.yml index 5838303..b6db022 100644 --- a/.github/ISSUE_TEMPLATE/bug_report.yml +++ b/.github/ISSUE_TEMPLATE/bug_report.yml @@ -72,5 +72,5 @@ body: label: Trace Logs have been provided as applicable description: Trace logs are **generally required** and are not optional for all bug reports and contain `trace`. Info logs are invalid for bug reports and do not contain `debug` nor `trace` options: - - label: I have read and followed the steps in the wiki link above and provided the required trace logs - the logs contain `trace` - that are relevant and show this issue. + - label: I have read and followed the steps in the documentation link and provided the required trace logs - the logs contain `trace` - that are relevant and show this issue. required: true \ No newline at end of file diff --git a/pkg/debrid/providers/realdebrid/realdebrid.go b/pkg/debrid/providers/realdebrid/realdebrid.go index dedf3ef..1975507 100644 --- a/pkg/debrid/providers/realdebrid/realdebrid.go +++ b/pkg/debrid/providers/realdebrid/realdebrid.go @@ -621,7 +621,7 @@ func (r *RealDebrid) CheckLink(link string) error { func (r *RealDebrid) _getDownloadLink(file *types.File) (*types.DownloadLink, error) { url := fmt.Sprintf("%s/unrestrict/link/", r.Host) _link := file.Link - if strings.HasPrefix(_link, "https://real-debrid.com/d/") { + if strings.HasPrefix(file.Link, "https://real-debrid.com/d/") && len(file.Link) > 39 { _link = file.Link[0:39] } payload := gourl.Values{ diff --git a/pkg/debrid/store/refresh.go b/pkg/debrid/store/refresh.go index 9f77f8b..2682441 100644 --- a/pkg/debrid/store/refresh.go +++ b/pkg/debrid/store/refresh.go @@ -137,10 +137,10 @@ func (c *Cache) refreshRclone() error { } client := &http.Client{ - Timeout: 30 * time.Second, + Timeout: 60 * time.Second, Transport: &http.Transport{ MaxIdleConns: 10, - IdleConnTimeout: 30 * time.Second, + IdleConnTimeout: 60 * time.Second, DisableCompression: false, MaxIdleConnsPerHost: 5, }, diff --git a/pkg/debrid/types/account.go b/pkg/debrid/types/account.go index 9187bca..0533908 100644 --- a/pkg/debrid/types/account.go +++ b/pkg/debrid/types/account.go @@ -18,7 +18,7 @@ func NewAccounts(debridConf config.Debrid) *Accounts { if token == "" { continue } - account := newAccount(token, idx) + account := newAccount(debridConf.Name, token, idx) accounts = append(accounts, account) } @@ -33,6 +33,7 @@ func NewAccounts(debridConf config.Debrid) *Accounts { } type Account struct { + Debrid string // e.g., "realdebrid", "torbox", etc. Order int Disabled bool Token string @@ -176,30 +177,31 @@ func (a *Accounts) SetDownloadLinks(links map[string]*DownloadLink) { a.Current().setLinks(links) } -func newAccount(token string, index int) *Account { +func newAccount(debridName, token string, index int) *Account { return &Account{ - Token: token, - Order: index, - links: make(map[string]*DownloadLink), + Debrid: debridName, + Token: token, + Order: index, + links: make(map[string]*DownloadLink), } } func (a *Account) getLink(fileLink string) (*DownloadLink, bool) { a.mu.RLock() defer a.mu.RUnlock() - dl, ok := a.links[fileLink[0:39]] + dl, ok := a.links[a.sliceFileLink(fileLink)] return dl, ok } func (a *Account) setLink(fileLink string, dl *DownloadLink) { a.mu.Lock() defer a.mu.Unlock() - a.links[fileLink[0:39]] = dl + a.links[a.sliceFileLink(fileLink)] = dl } func (a *Account) deleteLink(fileLink string) { a.mu.Lock() defer a.mu.Unlock() - delete(a.links, fileLink[0:39]) + delete(a.links, a.sliceFileLink(fileLink)) } func (a *Account) resetDownloadLinks() { a.mu.Lock() @@ -225,6 +227,17 @@ func (a *Account) setLinks(links map[string]*DownloadLink) { // Expired, continue continue } - a.links[dl.Link[0:39]] = dl + a.links[a.sliceFileLink(dl.Link)] = dl } } + +// slice download link +func (a *Account) sliceFileLink(fileLink string) string { + if a.Debrid != "realdebrid" { + return fileLink + } + if len(fileLink) < 39 { + return fileLink + } + return fileLink[0:39] +} diff --git a/pkg/repair/repair.go b/pkg/repair/repair.go index 63d4cb6..ced38e6 100644 --- a/pkg/repair/repair.go +++ b/pkg/repair/repair.go @@ -75,26 +75,6 @@ type Job struct { ctx context.Context } -func (j *Job) getUnprocessedBrokenItems() map[string][]arr.ContentFile { - items := make(map[string][]arr.ContentFile) - - for arrName, files := range j.BrokenItems { - if len(files) == 0 { - continue // Skip empty file lists - } - items[arrName] = make([]arr.ContentFile, 0, len(files)) - for _, file := range files { - if file.Path != "" && file.TargetPath != "" && !file.Processed { - items[arrName] = append(items[arrName], file) - } - } - } - if len(items) == 0 { - return nil // Return nil if no unprocessed items found - } - return items -} - func New(arrs *arr.Storage, engine *debrid.Storage) *Repair { cfg := config.Get() workers := runtime.NumCPU() * 20 @@ -765,7 +745,7 @@ func (r *Repair) ProcessJob(id string) error { return fmt.Errorf("job %s already failed", id) } - brokenItems := job.getUnprocessedBrokenItems() + brokenItems := job.BrokenItems if len(brokenItems) == 0 { r.logger.Info().Msgf("No broken items found for job %s", id) job.CompletedAt = time.Now() @@ -773,144 +753,63 @@ func (r *Repair) ProcessJob(id string) error { return nil } - r.logger.Info().Msgf("Processing job %s with %d broken items", id, len(brokenItems)) - go r.processJob(job, brokenItems) - - return nil -} - -func (r *Repair) processJob(job *Job, brokenItems map[string][]arr.ContentFile) { if job.ctx == nil || job.ctx.Err() != nil { job.ctx, job.cancelFunc = context.WithCancel(r.ctx) } - errs := make([]error, 0) - processedCount := 0 + g, ctx := errgroup.WithContext(job.ctx) + g.SetLimit(r.workers) for arrName, items := range brokenItems { - select { - case <-job.ctx.Done(): - r.logger.Info().Msgf("Job %s cancelled", job.ID) - job.Status = JobCancelled - job.CompletedAt = time.Now() - job.Error = "Job was cancelled" - return - default: - // Continue processing - } + items := items + arrName := arrName + g.Go(func() error { - a := r.arrs.Get(arrName) - if a == nil { - errs = append(errs, fmt.Errorf("arr %s not found", arrName)) - continue - } + select { + case <-ctx.Done(): + return ctx.Err() + default: + } - if err := a.DeleteFiles(items); err != nil { - errs = append(errs, fmt.Errorf("failed to delete broken items for %s: %w", arrName, err)) - continue - } - // Search for missing items - if err := a.SearchMissing(items); err != nil { - errs = append(errs, fmt.Errorf("failed to search missing items for %s: %w", arrName, err)) - continue - } - processedCount += len(items) - // Mark this item as processed - for i := range items { - items[i].Processed = true - } - job.BrokenItems[arrName] = items + a := r.arrs.Get(arrName) + if a == nil { + r.logger.Error().Msgf("Arr %s not found", arrName) + return nil + } + + if err := a.DeleteFiles(items); err != nil { + r.logger.Error().Err(err).Msgf("Failed to delete broken items for %s", arrName) + return nil + } + // Search for missing items + if err := a.SearchMissing(items); err != nil { + r.logger.Error().Err(err).Msgf("Failed to search missing items for %s", arrName) + return nil + } + return nil + }) } // Update job status to in-progress job.Status = JobProcessing r.saveToFile() - if len(errs) > 0 { - errMsg := fmt.Sprintf("Job %s encountered errors: %v", job.ID, errs) - job.Error = errMsg - job.FailedAt = time.Now() - job.Status = JobFailed - r.logger.Error().Msg(errMsg) - go func() { - if err := request.SendDiscordMessage("repair_failed", "error", job.discordContext()); err != nil { - r.logger.Error().Msgf("Error sending discord message: %v", err) - } - }() - return - } - remainingItems := job.getUnprocessedBrokenItems() - if len(remainingItems) == 0 { - // All items processed, mark job as completed - job.CompletedAt = time.Now() - job.Status = JobCompleted - r.logger.Info().Msgf("Job %s completed successfully (all items processed)", job.ID) - go func() { - if err := request.SendDiscordMessage("repair_complete", "success", job.discordContext()); err != nil { - r.logger.Error().Msgf("Error sending discord message: %v", err) - } - }() - } else { - // Some items still remain, keep job as pending - job.Status = JobPending - r.logger.Info().Msgf("Job %s: processed %d selected items successfully, %d items remaining", job.ID, processedCount, len(remainingItems)) - go func() { - if err := request.SendDiscordMessage("repair_partial_complete", "info", job.discordContext()); err != nil { - r.logger.Error().Msgf("Error sending discord message: %v", err) - } - }() - } - r.saveToFile() -} - -// ProcessJobItems processes the selected items for a job -// selectedItems is the map of arr names to the list of file IDs to process -func (r *Repair) ProcessJobItems(id string, selectedItems map[string][]int) error { - job := r.GetJob(id) - if job == nil { - return fmt.Errorf("job %s not found", id) - } - if job.Status != JobPending { - return fmt.Errorf("job %s not pending", id) - } - if job.StartedAt.IsZero() { - return fmt.Errorf("job %s not started", id) - } - if !job.CompletedAt.IsZero() { - return fmt.Errorf("job %s already completed", id) - } - if !job.FailedAt.IsZero() { - return fmt.Errorf("job %s already failed", id) - } - - brokenItems := job.getUnprocessedBrokenItems() - validatedItems := make(map[string][]arr.ContentFile) - - for arrName, selectedItemsList := range selectedItems { - if jobItems, exists := brokenItems[arrName]; exists { - validItems := make([]arr.ContentFile, 0, len(selectedItemsList)) - for _, item := range selectedItemsList { - // Find the item in the job items - for _, jobItem := range jobItems { - if jobItem.FileId == item { - validItems = append(validItems, jobItem) - break - } - } - } - if len(validItems) > 0 { - validatedItems[arrName] = validItems - } + // Launch a goroutine to wait for completion and update the job + go func() { + if err := g.Wait(); err != nil { + job.FailedAt = time.Now() + job.Error = err.Error() + job.CompletedAt = time.Now() + job.Status = JobFailed + r.logger.Error().Err(err).Msgf("Job %s failed", id) + } else { + job.CompletedAt = time.Now() + job.Status = JobCompleted + r.logger.Info().Msgf("Job %s completed successfully", id) } - } - if len(validatedItems) == 0 { - return fmt.Errorf("no valid items found for job %s", id) - } - job.Status = JobProcessing - r.saveToFile() - - go r.processJob(job, validatedItems) + r.saveToFile() + }() return nil } diff --git a/pkg/web/api.go b/pkg/web/api.go index 7cc6399..99f69af 100644 --- a/pkg/web/api.go +++ b/pkg/web/api.go @@ -326,28 +326,6 @@ func (wb *Web) handleProcessRepairJob(w http.ResponseWriter, r *http.Request) { w.WriteHeader(http.StatusOK) } -func (wb *Web) handleProcessRepairJobItems(w http.ResponseWriter, r *http.Request) { - id := chi.URLParam(r, "id") - if id == "" { - http.Error(w, "No job ID provided", http.StatusBadRequest) - return - } - var req struct { - Items map[string][]int `json:"items"` - } - if err := json.NewDecoder(r.Body).Decode(&req); err != nil { - http.Error(w, "Invalid request body: "+err.Error(), http.StatusBadRequest) - return - } - _store := store.Get() - if err := _store.Repair().ProcessJobItems(id, req.Items); err != nil { - wb.logger.Error().Err(err).Msg("Failed to process repair job items") - http.Error(w, "Failed to process job items: "+err.Error(), http.StatusInternalServerError) - return - } - w.WriteHeader(http.StatusOK) -} - func (wb *Web) handleDeleteRepairJob(w http.ResponseWriter, r *http.Request) { // Read ids from body var req struct { diff --git a/pkg/web/routes.go b/pkg/web/routes.go index d13b7e5..2a96f2d 100644 --- a/pkg/web/routes.go +++ b/pkg/web/routes.go @@ -28,7 +28,6 @@ func (wb *Web) Routes() http.Handler { r.Post("/repair", wb.handleRepairMedia) r.Get("/repair/jobs", wb.handleGetRepairJobs) r.Post("/repair/jobs/{id}/process", wb.handleProcessRepairJob) - r.Post("/repair/jobs/{id}/process-items", wb.handleProcessRepairJobItems) r.Post("/repair/jobs/{id}/stop", wb.handleStopRepairJob) r.Delete("/repair/jobs", wb.handleDeleteRepairJob) r.Get("/torrents", wb.handleGetTorrents) diff --git a/pkg/web/templates/config.html b/pkg/web/templates/config.html index 15dd472..eb1999e 100644 --- a/pkg/web/templates/config.html +++ b/pkg/web/templates/config.html @@ -1005,6 +1005,9 @@ if (config.max_file_size) { document.querySelector('[name="max_file_size"]').value = config.max_file_size; } + if (config.remove_stalled_after) { + document.querySelector('[name="remove_stalled_after"]').value = config.remove_stalled_after; + } if (config.discord_webhook_url) { document.querySelector('[name="discord_webhook_url"]').value = config.discord_webhook_url; } diff --git a/pkg/web/templates/repair.html b/pkg/web/templates/repair.html index bf342d8..489f26c 100644 --- a/pkg/web/templates/repair.html +++ b/pkg/web/templates/repair.html @@ -130,13 +130,7 @@
Broken Items 0 - 0 selected
-
- -
@@ -171,11 +165,6 @@ - @@ -294,7 +283,7 @@ if (!response.ok) throw new Error(await response.text()); createToast('Repair process initiated successfully!'); - loadJobs(1); // Refresh jobs after submission + await loadJobs(1); // Refresh jobs after submission } catch (error) { createToast(`Error starting repair: ${error.message}`, 'error'); } finally { @@ -391,12 +380,6 @@ let totalItems = job.broken_items ? Object.values(job.broken_items).reduce((sum, arr) => sum + arr.length, 0) : 0; row.innerHTML = ` - @@ -459,9 +442,8 @@ document.querySelectorAll('#jobsPagination a[data-page]').forEach(link => { link.addEventListener('click', (e) => { e.preventDefault(); - const newPage = parseInt(e.currentTarget.dataset.page); - currentPage = newPage; - renderJobsTable(newPage); + currentPage = parseInt(e.currentTarget.dataset.page); + renderJobsTable(currentPage); }); }); @@ -526,7 +508,6 @@ // modal functions function processItemsData(brokenItems) { const items = []; - let itemId = 0; for (const [arrName, itemsArray] of Object.entries(brokenItems)) { if (itemsArray && itemsArray.length > 0) { @@ -601,51 +582,15 @@ row.dataset.itemId = item.id; row.innerHTML = ` - `; - // Make row clickable to toggle selection - row.addEventListener('click', (e) => { - if (e.target.type !== 'checkbox') { - const checkbox = row.querySelector('.item-checkbox'); - checkbox.checked = !checkbox.checked; - checkbox.dispatchEvent(new Event('change')); - } - }); - tableBody.appendChild(row); } - // Add event listeners to checkboxes - document.querySelectorAll('.item-checkbox').forEach(checkbox => { - checkbox.addEventListener('change', (e) => { - const itemId = parseInt(e.target.value); - const row = e.target.closest('tr'); - - if (e.target.checked) { - selectedItems.add(itemId); - row.classList.add('selected'); - } else { - selectedItems.delete(itemId); - row.classList.remove('selected'); - } - - updateItemsStats(); - updateSelectAllStates(); - }); - }); - // Create pagination if (totalPages > 1) { const prevLi = document.createElement('li'); @@ -674,45 +619,18 @@ document.querySelectorAll('#itemsPagination a[data-items-page]').forEach(link => { link.addEventListener('click', (e) => { e.preventDefault(); - const newPage = parseInt(e.currentTarget.dataset.itemsPage); - currentItemsPage = newPage; + currentItemsPage = parseInt(e.currentTarget.dataset.itemsPage);; renderBrokenItemsTable(); }); }); - - updateSelectAllStates(); } function updateItemsStats() { document.getElementById('totalItemsCount').textContent = allBrokenItems.length; - document.getElementById('selectedItemsCount').textContent = `${selectedItems.size} selected`; - - const processSelectedBtn = document.getElementById('processSelectedItemsBtn'); - processSelectedBtn.disabled = selectedItems.size === 0; // Update footer stats const footerStats = document.getElementById('modalFooterStats'); - footerStats.textContent = `Total: ${allBrokenItems.length} | Filtered: ${filteredItems.length} | Selected: ${selectedItems.size}`; - } - - function updateSelectAllStates() { - const selectAllTableCheckbox = document.getElementById('selectAllItemsTable'); - const visibleCheckboxes = document.querySelectorAll('.item-checkbox'); - const checkedVisible = document.querySelectorAll('.item-checkbox:checked'); - - if (visibleCheckboxes.length === 0) { - selectAllTableCheckbox.indeterminate = false; - selectAllTableCheckbox.checked = false; - } else if (checkedVisible.length === visibleCheckboxes.length) { - selectAllTableCheckbox.indeterminate = false; - selectAllTableCheckbox.checked = true; - } else if (checkedVisible.length > 0) { - selectAllTableCheckbox.indeterminate = true; - selectAllTableCheckbox.checked = false; - } else { - selectAllTableCheckbox.indeterminate = false; - selectAllTableCheckbox.checked = false; - } + footerStats.textContent = `Total: ${allBrokenItems.length} | Filtered: ${filteredItems.length}`; } function populateArrFilter() { @@ -728,62 +646,6 @@ }); } - document.getElementById('selectAllItemsTable').addEventListener('change', (e) => { - const visibleCheckboxes = document.querySelectorAll('.item-checkbox'); - visibleCheckboxes.forEach(checkbox => { - const itemId = parseInt(checkbox.value); - const row = checkbox.closest('tr'); - - if (e.target.checked) { - selectedItems.add(itemId); - checkbox.checked = true; - row.classList.add('selected'); - } else { - selectedItems.delete(itemId); - checkbox.checked = false; - row.classList.remove('selected'); - } - }); - updateItemsStats(); - }); - - document.getElementById('processSelectedItemsBtn').addEventListener('click', async () => { - if (selectedItems.size === 0) return; - - const selectedItemsData = allBrokenItems.filter(item => selectedItems.has(item.id)); - - // Group by arr - const itemsByArr = {}; - selectedItemsData.forEach(item => { - if (!itemsByArr[item.arr]) { - itemsByArr[item.arr] = []; - } - itemsByArr[item.arr].push(item.id); - }); - - console.log(itemsByArr); - - try { - const response = await fetcher(`/api/repair/jobs/${currentJob.id}/process-items`, { - method: 'POST', - headers: { - 'Content-Type': 'application/json' - }, - body: JSON.stringify({ items: itemsByArr }) - }); - - if (!response.ok) throw new Error(await response.text()); - createToast(`Processing ${selectedItems.size} selected items`); - - // Close modal and refresh jobs - const modal = bootstrap.Modal.getInstance(document.getElementById('jobDetailsModal')); - modal.hide(); - loadJobs(currentPage); - } catch (error) { - createToast(`Error processing selected items: ${error.message}`, 'error'); - } - }); - // Filter event listeners document.getElementById('itemSearchInput').addEventListener('input', applyFilters); document.getElementById('arrFilterSelect').addEventListener('change', applyFilters); diff --git a/pkg/webdav/misc.go b/pkg/webdav/misc.go index c37e76a..04868b1 100644 --- a/pkg/webdav/misc.go +++ b/pkg/webdav/misc.go @@ -238,17 +238,5 @@ func setVideoResponseHeaders(w http.ResponseWriter, resp *http.Response, isRange w.Header().Set("Content-Range", contentRange) } - // Video streaming optimizations - w.Header().Set("Accept-Ranges", "bytes") // Enable seeking - w.Header().Set("Connection", "keep-alive") // Keep connection open - - // Prevent buffering in proxies/CDNs - w.Header().Set("X-Accel-Buffering", "no") // Nginx - w.Header().Set("Proxy-Buffering", "off") // General proxy - - w.Header().Set("Access-Control-Allow-Origin", "*") - w.Header().Set("Access-Control-Allow-Headers", "Range") - w.Header().Set("Access-Control-Expose-Headers", "Content-Length, Content-Range") - w.WriteHeader(resp.StatusCode) }
-
- -
-
Arr Path Type -
- -
-
${job.id.substring(0, 8)} ${job.arrs.join(', ')} ${formattedDate} -
- -
-
${item.arr} ${item.path} ${item.type} ${formatFileSize(item.size)}