18 Commits

Author SHA1 Message Date
Mukhtar Akere
bba90cb89a Finalize v0.4.0
Some checks failed
Release / goreleaser (push) Failing after 2m18s
2025-01-25 11:40:30 +01:00
Mukhtar Akere
fc5c6e2869 Finalize v0.4.0 2025-01-24 23:33:08 +01:00
Mukhtar Akere
66f4965ec8 Fix arr storage 2025-01-23 03:11:24 +01:00
Mukhtar Akere
dc16f0d8a1 Fix arr storage 2025-01-23 03:06:11 +01:00
Mukhtar Akere
0741ddf999 Fix versionning 2025-01-23 02:31:55 +01:00
Mukhtar Akere
2ae4bd571e Fix Log file permissions 2025-01-23 02:11:37 +01:00
Mukhtar Akere
0b1c1af8b8 Fix Repair checks. Handle false positives 2025-01-23 01:35:28 +01:00
Mukhtar Akere
74a55149fc Features:
- Add file logging, server
- Fix minor repair bug
- Wrap up beta
2025-01-23 00:27:12 +01:00
Mukhtar Akere
cfb0051b04 Fix AllDebrid symlink bug 2025-01-19 08:56:52 +01:00
Mukhtar Akere
a986c4b5d0 Hotfix ui templates 2025-01-18 04:13:56 +01:00
Mukhtar Akere
3841b7751e Changelog v0.4.0 2025-01-18 03:49:05 +01:00
Mukhtar Akere
ea73572557 - Add shinning UI
- Revamp deployment process
- Fix Alldebrid file node bug
2025-01-13 20:18:59 +01:00
Mukhtar Akere
7cb41a0e8b Fix getting mount path 2025-01-11 23:10:05 +01:00
Mukhtar Akere
451c17cdf7 Merge branch 'beta' of github.com:sirrobot01/debrid-blackhole into beta 2025-01-11 23:09:00 +01:00
Mukhtar Akere
c39eebea0d [BETA] Changelog 0.3.4 (#14)
- Add repair worker
- Fix AllDebrid bugs with single movies/series
- Fix Torbox bugs
2025-01-11 07:21:49 -08:00
Mukhtar Akere
03c9657945 Add repair worker 2025-01-09 19:44:38 +01:00
Mukhtar Akere
28e5342c66 Add AllDebrid support 2025-01-01 17:12:18 +01:00
Mukhtar Akere
eeb3a31b05 Fix rar files, remove srt 2024-12-27 22:30:36 +01:00
62 changed files with 3091 additions and 1168 deletions

View File

@@ -5,7 +5,7 @@ tmp_dir = "tmp"
[build] [build]
args_bin = [] args_bin = []
bin = "./tmp/main" bin = "./tmp/main"
cmd = "go build -o ./tmp/main ." cmd = "bash -c 'VERSION=$(git describe --tags --always --abbrev=0 2>/dev/null || echo dev) && go build -ldflags \"-X github.com/sirrobot01/debrid-blackhole/pkg/version.Version=$VERSION -X github.com/sirrobot01/debrid-blackhole/pkg/version.Channel=beta\" -o ./tmp/main .'"
delay = 1000 delay = 1000
exclude_dir = ["assets", "tmp", "vendor", "testdata", "data"] exclude_dir = ["assets", "tmp", "vendor", "testdata", "data"]
exclude_file = [] exclude_file = []
@@ -49,4 +49,4 @@ tmp_dir = "tmp"
[screen] [screen]
clear_on_rebuild = false clear_on_rebuild = false
keep_scroll = true keep_scroll = true

View File

@@ -6,4 +6,4 @@ docker-compose.yml
**/.idea/ **/.idea/
*.magnet *.magnet
**.torrent **.torrent
torrents.json

View File

@@ -21,6 +21,15 @@ jobs:
LATEST_TAG=$(git tag | sort -V | tail -n1) LATEST_TAG=$(git tag | sort -V | tail -n1)
echo "latest_tag=${LATEST_TAG}" >> $GITHUB_ENV echo "latest_tag=${LATEST_TAG}" >> $GITHUB_ENV
- name: Set channel
id: set_channel
run: |
if [[ ${{ github.ref }} == 'refs/heads/beta' ]]; then
echo "CHANNEL=beta" >> $GITHUB_ENV
else
echo "CHANNEL=stable" >> $GITHUB_ENV
fi
- name: Set up QEMU - name: Set up QEMU
uses: docker/setup-qemu-action@v3 uses: docker/setup-qemu-action@v3
@@ -41,6 +50,9 @@ jobs:
platforms: linux/amd64,linux/arm64,linux/arm/v7 platforms: linux/amd64,linux/arm64,linux/arm/v7
push: true push: true
tags: cy01/blackhole:beta tags: cy01/blackhole:beta
build-args: |
VERSION=${{ env.latest_tag }}
CHANNEL=${{ env.CHANNEL }}
- name: Build and push for main branch - name: Build and push for main branch
if: github.ref == 'refs/heads/main' if: github.ref == 'refs/heads/main'
@@ -51,4 +63,7 @@ jobs:
push: true push: true
tags: | tags: |
cy01/blackhole:latest cy01/blackhole:latest
cy01/blackhole:${{ env.latest_tag }} cy01/blackhole:${{ env.latest_tag }}
build-args: |
VERSION=${{ env.latest_tag }}
CHANNEL=${{ env.CHANNEL }}

View File

@@ -21,6 +21,14 @@ jobs:
uses: actions/setup-go@v4 uses: actions/setup-go@v4
with: with:
go-version: '1.22' go-version: '1.22'
- name: Set Release Channel
run: |
if [[ ${{ github.ref }} == refs/tags/beta* ]]; then
echo "RELEASE_CHANNEL=beta" >> $GITHUB_ENV
else
echo "RELEASE_CHANNEL=stable" >> $GITHUB_ENV
fi
- name: Run GoReleaser - name: Run GoReleaser
uses: goreleaser/goreleaser-action@v5 uses: goreleaser/goreleaser-action@v5

2
.gitignore vendored
View File

@@ -10,3 +10,5 @@ docker-compose.yml
*.log.* *.log.*
dist/ dist/
tmp/** tmp/**
torrents.json
logs/**

View File

@@ -2,7 +2,6 @@ version: 1
before: before:
hooks: hooks:
# You may remove this if you don't use go modules.
- go mod tidy - go mod tidy
builds: builds:
@@ -16,6 +15,10 @@ builds:
- amd64 - amd64
- arm - arm
- arm64 - arm64
ldflags:
- -s -w
- -X github.com/sirrobot01/debrid-blackhole/pkg/version.Version={{.Version}}
- -X github.com/sirrobot01/debrid-blackhole/pkg/version.Channel={{.Env.RELEASE_CHANNEL}}
archives: archives:

View File

@@ -39,7 +39,7 @@
- Rewrote the whole codebase - Rewrote the whole codebase
#### 0.2.0 ### 0.2.0
- Implement 0.2.0-beta changes - Implement 0.2.0-beta changes
- Removed Blackhole - Removed Blackhole
- Added QbitTorrent API - Added QbitTorrent API
@@ -105,4 +105,26 @@
- Add new /internal/cached endpoint to check if an hash is cached - Add new /internal/cached endpoint to check if an hash is cached
- implement per-debrid local cache - implement per-debrid local cache
- Fix file check for torbox - Fix file check for torbox
- Other minor bug fixes - Other minor bug fixes
#### 0.3.3
- Add AllDebrid Support
- Fix Torbox not downloading uncached torrents
- Fix Rar files being downloaded
#### 0.4.0
- Add support for multiple debrid providers
- A full-fledged UI for adding torrents, repairing files, viewing config and managing torrents
- Fix issues with Alldebrid
- Fix file transversal bug
- Fix files with no parent directory
- Logging
- Add a more robust logging system
- Add logging to a file
- Add logging to the UI
- Qbittorrent
- Add support for tags(creating, deleting, listing)
- Add support for categories(creating, deleting, listing)
- Fix issues with arr sending torrents using a different content type.

View File

@@ -2,6 +2,8 @@ FROM --platform=$BUILDPLATFORM golang:1.22 as builder
ARG TARGETPLATFORM ARG TARGETPLATFORM
ARG BUILDPLATFORM ARG BUILDPLATFORM
ARG VERSION
ARG CHANNEL
# Set destination for COPY # Set destination for COPY
WORKDIR /app WORKDIR /app
@@ -15,14 +17,24 @@ RUN go mod download
ADD . . ADD . .
# Build # Build
RUN CGO_ENABLED=0 GOOS=$(echo $TARGETPLATFORM | cut -d '/' -f1) GOARCH=$(echo $TARGETPLATFORM | cut -d '/' -f2) go build -o /blackhole RUN CGO_ENABLED=0 GOOS=$(echo $TARGETPLATFORM | cut -d '/' -f1) GOARCH=$(echo $TARGETPLATFORM | cut -d '/' -f2) go build -ldflags="-X github.com/sirrobot01/debrid-blackhole/pkg/version.Version=${VERSION} -X github.com/sirrobot01/debrid-blackhole/pkg/version.Channel=${CHANNEL}" -o /blackhole
FROM alpine as logsetup
RUN mkdir -p /logs && \
touch /logs/decypharr.log && \
chown -R 1000:1000 /logs && \
chmod -R 755 /logs && \
chmod 666 /logs/decypharr.log
FROM scratch FROM scratch
COPY --from=builder /etc/ssl/certs/ca-certificates.crt /etc/ssl/certs/ COPY --from=builder /etc/ssl/certs/ca-certificates.crt /etc/ssl/certs/
COPY --from=builder /blackhole /blackhole COPY --from=builder /blackhole /blackhole
COPY --from=builder /app/README.md /README.md COPY --from=builder /app/README.md /README.md
COPY --from=logsetup /logs /logs
EXPOSE 8181 ENV LOG_PATH=/logs
EXPOSE 8181 8282
VOLUME ["/app"] VOLUME ["/app"]

318
README.md
View File

@@ -1,31 +1,53 @@
### GoBlackHole(with Debrid Proxy Support) ### DecyphArr(with Debrid Proxy Support)
This is a Golang implementation go Torrent QbitTorrent with a **Real Debrid & Torbox Support**. ![ui](doc/main.png)
This is a Golang implementation go Torrent QbitTorrent with a **Multiple Debrid service support**.
### Table of Contents
- [Features](#features)
- [Supported Debrid Providers](#supported-debrid-providers)
- [Installation](#installation)
- [Docker Compose](#docker-compose)
- [Binary](#binary)
- [Usage](#usage)
- [Connecting to Sonarr/Radarr](#connecting-to-sonarrradarr)
- [Sample Config](#sample-config)
- [Config Notes](#config-notes)
- [Log Level](#log-level)
- [Max Cache Size](#max-cache-size)
- [Debrid Config](#debrid-config)
- [Proxy Config](#proxy-config)
- [Qbittorrent Config](#qbittorrent-config)
- [Arrs Config](#arrs-config)
- [Proxy](#proxy)
- [Repair Worker](#repair-worker)
- [Changelog](#changelog)
- [TODO](#todo)
### Features ### Features
- Mock Qbittorent API that supports the Arrs(Sonarr, Radarr, etc) - Mock Qbittorent API that supports the Arrs(Sonarr, Radarr, etc)
- A Full-fledged UI for managing torrents
- Proxy support for the Arrs - Proxy support for the Arrs
- Real Debrid Support - Real Debrid Support
- Torbox Support - Torbox Support
- Debrid Link Support - Debrid Link Support
- Multi-Debrid Providers support - Multi-Debrid Providers support
- UI for adding torrents directly to *arrs - Repair Worker for missing files (**NEW**)
The proxy is useful in filtering out un-cached Real Debrid torrents The proxy is useful in filtering out un-cached Real Debrid torrents
### Supported Debrid Providers ### Supported Debrid Providers
- Real Debrid - [Real Debrid](https://real-debrid.com)
- Torbox - [Torbox](https://torbox.app)
- Debrid Link - [Debrid Link](https://debrid-link.com)
- [All Debrid](https://alldebrid.com)
### Changelog
- View the [CHANGELOG.md](CHANGELOG.md) for the latest changes
#### Installation ### Installation
##### Docker Compose ##### Docker Compose
```yaml ```yaml
version: '3.7' version: '3.7'
@@ -38,9 +60,7 @@ services:
- "8181:8181" # Proxy - "8181:8181" # Proxy
user: "1000:1000" user: "1000:1000"
volumes: volumes:
- ./logs:/app/logs - /mnt/:/mnt
- ~/plex/media:/media
- ~/plex/media/symlinks/:/media/symlinks/
- ~/plex/configs/blackhole/config.json:/app/config.json # Config file, see below - ~/plex/configs/blackhole/config.json:/app/config.json # Config file, see below
environment: environment:
- PUID=1000 - PUID=1000
@@ -61,107 +81,14 @@ Download the binary from the releases page and run it with the config file.
./blackhole --config /path/to/config.json ./blackhole --config /path/to/config.json
``` ```
#### Config ### Usage
```json - The UI is available at `http://localhost:8282`
{ - Setup the config.json file. Scroll down for the sample config file
"debrids": [ - Setup docker compose/ binary with the config file
{ - Start the service
"name": "torbox", - Connect to Sonarr/Radarr/Lidarr
"host": "https://api.torbox.app/v1",
"api_key": "torbox_api_key",
"folder": "data/torbox/torrents/",
"rate_limit": "250/minute",
"download_uncached": false,
"check_cached": true
},
{
"name": "realdebrid",
"host": "https://api.real-debrid.com/rest/1.0",
"api_key": "realdebrid_key",
"folder": "data/realdebrid/torrents/",
"rate_limit": "250/minute",
"download_uncached": false,
"check_cached": false
},
{
"name": "debridlink",
"host": "https://debrid-link.com/api/v2",
"api_key": "debridlink_key",
"folder": "data/debridlink/torrents/",
"rate_limit": "250/minute",
"download_uncached": false,
"check_cached": false
}
],
"proxy": {
"enabled": true,
"port": "8100",
"debug": false,
"username": "username",
"password": "password",
"cached_only": true
},
"max_cache_size": 1000,
"qbittorrent": {
"port": "8282",
"download_folder": "/media/symlinks/",
"categories": ["sonarr", "radarr"],
"refresh_interval": 5
}
}
```
#### Config Notes #### Connecting to Sonarr/Radarr
##### Max Cache Size
- The `max_cache_size` key is used to set the maximum number of infohashes that can be stored in the availability cache. This is used to prevent round trip to the debrid provider when using the proxy/Qbittorrent
- The default value is `1000`
- The cache is stored in memory and is not persisted on restart
##### Debrid Config
- The `debrids` key is an array of debrid providers
- The `name` key is the name of the debrid provider
- The `host` key is the API endpoint of the debrid provider
- The `api_key` key is the API key of the debrid provider
- The `folder` key is the folder where the torrents will be downloaded. e.g `data/realdebrid/torrents/`
- The `rate_limit` key is the rate limit of the debrid provider(null by default)
- The `download_uncached` bool key is used to download uncached torrents(disabled by default)
- The `check_cached` bool key is used to check if the torrent is cached(disabled by default)
##### Proxy Config
- The `enabled` key is used to enable the proxy
- The `port` key is the port the proxy will listen on
- The `debug` key is used to enable debug logs
- The `username` and `password` keys are used for basic authentication
- The `cached_only` means only cached torrents will be returned
##### Qbittorrent Config
- The `port` key is the port the qBittorrent will listen on
- The `download_folder` is the folder where the torrents will be downloaded. e.g `/media/symlinks/`
- The `categories` key is used to filter out torrents based on the category. e.g `sonarr`, `radarr`
- The `refresh_interval` key is used to set the interval in minutes to refresh the Arrs Monitored Downloads(it's in seconds). The default value is `5` seconds
### Proxy
The proxy is useful in filtering out un-cached Real Debrid torrents.
The proxy is a simple HTTP proxy that requires basic authentication. The proxy can be enabled by setting the `proxy.enabled` to `true` in the config file.
The proxy listens on the port `8181` by default. The username and password can be set in the config file.
Setting Up Proxy in Arr
- Sonarr/Radarr
- Settings -> General -> Use Proxy
- Hostname: `localhost` # or the IP of the server
- Port: `8181` # or the port set in the config file
- Username: `username` # or the username set in the config file
- Password: `password` # or the password set in the config file
- Bypass Proxy for Local Addresses -> `No`
### Qbittorrent
The qBittorrent is a mock qBittorrent API that supports the Arrs(Sonarr, Radarr, etc).
Setting Up Qbittorrent in Arr
- Sonarr/Radarr - Sonarr/Radarr
- Settings -> Download Client -> Add Client -> qBittorrent - Settings -> Download Client -> Add Client -> qBittorrent
@@ -175,18 +102,163 @@ Setting Up Qbittorrent in Arr
- Test - Test
- Save - Save
### UI for adding torrents #### Sample Config
![UI](./doc/ui.png) This is the default config file. You can create a `config.json` file in the root directory of the project or mount it in the docker-compose file.
```json
{
"debrids": [
{
"name": "torbox",
"host": "https://api.torbox.app/v1",
"api_key": "torbox_api_key",
"folder": "/mnt/remote/torbox/torrents/",
"rate_limit": "250/minute",
"download_uncached": false,
"check_cached": true
},
{
"name": "realdebrid",
"host": "https://api.real-debrid.com/rest/1.0",
"api_key": "realdebrid_key",
"folder": "/mnt/remote/realdebrid/__all__/",
"rate_limit": "250/minute",
"download_uncached": false,
"check_cached": false
},
{
"name": "debridlink",
"host": "https://debrid-link.com/api/v2",
"api_key": "debridlink_key",
"folder": "/mnt/remote/debridlink/torrents/",
"rate_limit": "250/minute",
"download_uncached": false,
"check_cached": false
},
{
"name": "alldebrid",
"host": "http://api.alldebrid.com/v4.1",
"api_key": "alldebrid_key",
"folder": "/mnt/remote/alldebrid/magnet/",
"rate_limit": "600/minute",
"download_uncached": false,
"check_cached": false
}
],
"proxy": {
"enabled": true,
"port": "8100",
"log_level": "info",
"username": "username",
"password": "password",
"cached_only": true
},
"max_cache_size": 1000,
"qbittorrent": {
"port": "8282",
"download_folder": "/mnt/symlinks/",
"categories": ["sonarr", "radarr"],
"refresh_interval": 5,
"log_level": "info"
},
"arrs": [
{
"name": "sonarr",
"host": "http://host:8989",
"token": "arr_key"
},
{
"name": "radarr",
"host": "http://host:7878",
"token": "arr_key"
}
],
"repair": {
"enabled": false,
"interval": "12h",
"run_on_start": false
},
"log_level": "info"
}
```
#### Config Notes
##### Log Level
- The `log_level` key is used to set the log level of the application. The default value is `info`
- The log level can be set to `debug`, `info`, `warn`, `error`
##### Max Cache Size
- The `max_cache_size` key is used to set the maximum number of infohashes that can be stored in the availability cache. This is used to prevent round trip to the debrid provider when using the proxy/Qbittorrent
- The default value is `1000`
- The cache is stored in memory and is not persisted on restart
##### Debrid Config
- The `debrids` key is an array of debrid providers
- The `name` key is the name of the debrid provider
- The `host` key is the API endpoint of the debrid provider
- The `api_key` key is the API key of the debrid provider
- The `folder` key is the folder where your debrid folder is mounted(webdav, rclone, zurg etc). e.g `data/realdebrid/torrents/`, `/media/remote/alldebrid/magnets/`
- The `rate_limit` key is the rate limit of the debrid provider(null by default)
- The `download_uncached` bool key is used to download uncached torrents(disabled by default)
- The `check_cached` bool key is used to check if the torrent is cached(disabled by default)
##### Repair Config (**NEW**)
The `repair` key is used to enable the repair worker
- The `enabled` key is used to enable the repair worker
- The `interval` key is the interval in either minutes, seconds, hours, days. Use any of this format, e.g 12:00, 5:00, 1h, 1d, 1m, 1s.
- The `run_on_start` key is used to run the repair worker on start
##### Proxy Config
- The `enabled` key is used to enable the proxy
- The `port` key is the port the proxy will listen on
- The `log_level` key is used to set the log level of the proxy. The default value is `info`
- The `username` and `password` keys are used for basic authentication
- The `cached_only` means only cached torrents will be returned
##### Qbittorrent Config
- The `port` key is the port the qBittorrent will listen on
- The `download_folder` is the folder where the torrents will be downloaded. e.g `/media/symlinks/`
- The `categories` key is used to filter out torrents based on the category. e.g `sonarr`, `radarr`
- The `refresh_interval` key is used to set the interval in minutes to refresh the Arrs Monitored Downloads(it's in seconds). The default value is `5` seconds
##### Arrs Config
This is an array of Arrs(Sonarr, Radarr, etc) that will be used to download the torrents. This is not required if you already set up the Qbittorrent in the Arrs with the host, token.
This is particularly useful if you want to use the Repair tool without using Qbittorent
- The `name` key is the name of the Arr/ Category
- The `host` key is the host of the Arr
- The `token` key is the API token of the Arr
### Proxy
**Note**: Proxy has stopped working for Real Debrid, Debrid Link, and All Debrid. It still works for Torbox. This is due to the changes in the API of the Debrid Providers.
The proxy is useful in filtering out un-cached Debrid torrents.
The proxy is a simple HTTP proxy that requires basic authentication. The proxy can be enabled by setting the `proxy.enabled` to `true` in the config file.
The proxy listens on the port `8181` by default. The username and password can be set in the config file.
### Repair Worker
The repair worker is a simple worker that checks for missing files in the Arrs(Sonarr, Radarr, etc). It's particularly useful for files either deleted by the Debrid provider or files with bad symlinks.
- Search for broken symlinks/files
- Search for missing files
- Search for deleted/unreadable files
### Changelog
- View the [CHANGELOG.md](CHANGELOG.md) for the latest changes
The UI is a simple web interface that allows you to add torrents directly to the Arrs(Sonarr, Radarr, etc)
### TODO ### TODO
- [ ] A proper name!!!! - [x] A proper name!!!!
- [ ] Debrid - [x] Debrid
- [ ] Add more Debrid Providers - [x] Add more Debrid Providers
- [ ] Qbittorrent - [x] Qbittorrent
- [ ] Add more Qbittorrent features - [x] Add more Qbittorrent features
- [ ] Persist torrents on restart/server crash - [x] Persist torrents on restart/server crash
- [ ] Add tests - [ ] Add tests

View File

@@ -3,10 +3,13 @@ package cmd
import ( import (
"cmp" "cmp"
"context" "context"
"goBlack/common" "github.com/sirrobot01/debrid-blackhole/common"
"goBlack/pkg/debrid" "github.com/sirrobot01/debrid-blackhole/pkg/arr"
"goBlack/pkg/proxy" "github.com/sirrobot01/debrid-blackhole/pkg/debrid"
"goBlack/pkg/qbit" "github.com/sirrobot01/debrid-blackhole/pkg/proxy"
"github.com/sirrobot01/debrid-blackhole/pkg/qbit"
"github.com/sirrobot01/debrid-blackhole/pkg/repair"
"log"
"sync" "sync"
) )
@@ -14,6 +17,7 @@ func Start(ctx context.Context, config *common.Config) error {
maxCacheSize := cmp.Or(config.MaxCacheSize, 1000) maxCacheSize := cmp.Or(config.MaxCacheSize, 1000)
deb := debrid.NewDebrid(config.Debrids, maxCacheSize) deb := debrid.NewDebrid(config.Debrids, maxCacheSize)
arrs := arr.NewStorage(config.Arrs)
var wg sync.WaitGroup var wg sync.WaitGroup
errChan := make(chan error, 2) errChan := make(chan error, 2)
@@ -31,12 +35,22 @@ func Start(ctx context.Context, config *common.Config) error {
wg.Add(1) wg.Add(1)
go func() { go func() {
defer wg.Done() defer wg.Done()
if err := qbit.Start(ctx, config, deb); err != nil { if err := qbit.Start(ctx, config, deb, arrs); err != nil {
errChan <- err errChan <- err
} }
}() }()
} }
if config.Repair.Enabled {
wg.Add(1)
go func() {
defer wg.Done()
if err := repair.Start(ctx, config, arrs); err != nil {
log.Printf("Error during repair: %v", err)
}
}()
}
go func() { go func() {
wg.Wait() wg.Wait()
close(errChan) close(errChan)

View File

@@ -22,7 +22,7 @@ type DebridConfig struct {
type ProxyConfig struct { type ProxyConfig struct {
Port string `json:"port"` Port string `json:"port"`
Enabled bool `json:"enabled"` Enabled bool `json:"enabled"`
Debug bool `json:"debug"` LogLevel string `json:"log_level"`
Username string `json:"username"` Username string `json:"username"`
Password string `json:"password"` Password string `json:"password"`
CachedOnly *bool `json:"cached_only"` CachedOnly *bool `json:"cached_only"`
@@ -32,18 +32,33 @@ type QBitTorrentConfig struct {
Username string `json:"username"` Username string `json:"username"`
Password string `json:"password"` Password string `json:"password"`
Port string `json:"port"` Port string `json:"port"`
Debug bool `json:"debug"` LogLevel string `json:"log_level"`
DownloadFolder string `json:"download_folder"` DownloadFolder string `json:"download_folder"`
Categories []string `json:"categories"` Categories []string `json:"categories"`
RefreshInterval int `json:"refresh_interval"` RefreshInterval int `json:"refresh_interval"`
} }
type ArrConfig struct {
Name string `json:"name"`
Host string `json:"host"`
Token string `json:"token"`
}
type RepairConfig struct {
Enabled bool `json:"enabled"`
Interval string `json:"interval"`
RunOnStart bool `json:"run_on_start"`
}
type Config struct { type Config struct {
LogLevel string `json:"log_level"`
Debrid DebridConfig `json:"debrid"` Debrid DebridConfig `json:"debrid"`
Debrids []DebridConfig `json:"debrids"` Debrids []DebridConfig `json:"debrids"`
Proxy ProxyConfig `json:"proxy"` Proxy ProxyConfig `json:"proxy"`
MaxCacheSize int `json:"max_cache_size"` MaxCacheSize int `json:"max_cache_size"`
QBitTorrent QBitTorrentConfig `json:"qbittorrent"` QBitTorrent QBitTorrentConfig `json:"qbittorrent"`
Arrs []ArrConfig `json:"arrs"`
Repair RepairConfig `json:"repair"`
} }
func validateDebrids(debrids []DebridConfig) error { func validateDebrids(debrids []DebridConfig) error {
@@ -153,3 +168,5 @@ func LoadConfig(path string) (*Config, error) {
return config, nil return config, nil
} }
var CONFIG *Config = nil

View File

@@ -2,13 +2,79 @@ package common
import ( import (
"fmt" "fmt"
"log" "github.com/rs/zerolog"
"gopkg.in/natefinch/lumberjack.v2"
"os" "os"
"path/filepath"
"strings"
) )
func NewLogger(prefix string, output *os.File) *log.Logger { func GetLogPath() string {
f := fmt.Sprintf("[%s] ", prefix) logsDir := os.Getenv("LOG_PATH")
return log.New(output, f, log.LstdFlags) if logsDir == "" {
// Create the logs directory if it doesn't exist
logsDir = "logs"
}
if err := os.MkdirAll(logsDir, 0755); err != nil {
panic(fmt.Sprintf("Failed to create logs directory: %v", err))
}
return filepath.Join(logsDir, "decypharr.log")
} }
var Logger = NewLogger("Main", os.Stdout) func NewLogger(prefix string, level string, output *os.File) zerolog.Logger {
rotatingLogFile := &lumberjack.Logger{
Filename: GetLogPath(),
MaxSize: 10,
MaxBackups: 2,
MaxAge: 28,
Compress: true,
}
consoleWriter := zerolog.ConsoleWriter{
Out: output,
TimeFormat: "2006-01-02 15:04:05",
NoColor: false, // Set to true if you don't want colors
FormatLevel: func(i interface{}) string {
return strings.ToUpper(fmt.Sprintf("| %-6s|", i))
},
FormatMessage: func(i interface{}) string {
return fmt.Sprintf("[%s] %v", prefix, i)
},
}
fileWriter := zerolog.ConsoleWriter{
Out: rotatingLogFile,
TimeFormat: "2006-01-02 15:04:05",
NoColor: true, // No colors in file output
FormatLevel: func(i interface{}) string {
return strings.ToUpper(fmt.Sprintf("| %-6s|", i))
},
FormatMessage: func(i interface{}) string {
return fmt.Sprintf("[%s] %v", prefix, i)
},
}
multi := zerolog.MultiLevelWriter(consoleWriter, fileWriter)
logger := zerolog.New(multi).
With().
Timestamp().
Logger().
Level(zerolog.InfoLevel)
// Set the log level
switch level {
case "debug":
logger = logger.Level(zerolog.DebugLevel)
case "info":
logger = logger.Level(zerolog.InfoLevel)
case "warn":
logger = logger.Level(zerolog.WarnLevel)
case "error":
logger = logger.Level(zerolog.ErrorLevel)
}
return logger
}

View File

@@ -13,7 +13,6 @@ import (
"net/http" "net/http"
"net/url" "net/url"
"os" "os"
"path"
"path/filepath" "path/filepath"
"regexp" "regexp"
"strings" "strings"
@@ -251,20 +250,39 @@ func GetInfohashFromURL(url string) (string, error) {
} }
func JoinURL(base string, paths ...string) (string, error) { func JoinURL(base string, paths ...string) (string, error) {
// Parse the base URL // Split the last path component to separate query parameters
u, err := url.Parse(base) lastPath := paths[len(paths)-1]
parts := strings.Split(lastPath, "?")
paths[len(paths)-1] = parts[0]
joined, err := url.JoinPath(base, paths...)
if err != nil { if err != nil {
return "", err return "", err
} }
// Join the path components // Add back query parameters if they exist
u.Path = path.Join(u.Path, path.Join(paths...)) if len(parts) > 1 {
return joined + "?" + parts[1], nil
}
// Return the resulting URL as a string return joined, nil
return u.String(), nil
} }
func FileReady(path string) bool { func FileReady(path string) bool {
_, err := os.Stat(path) _, err := os.Stat(path)
return !os.IsNotExist(err) // Returns true if the file exists return !os.IsNotExist(err) // Returns true if the file exists
} }
func Remove[S ~[]E, E comparable](s S, values ...E) S {
result := make(S, 0, len(s))
outer:
for _, item := range s {
for _, v := range values {
if item == v {
continue outer
}
}
result = append(result, item)
}
return result
}

BIN
doc/download.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 185 KiB

BIN
doc/main.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 156 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 124 KiB

7
go.mod
View File

@@ -1,4 +1,4 @@
module goBlack module github.com/sirrobot01/debrid-blackhole
go 1.22 go 1.22
@@ -22,7 +22,12 @@ require (
github.com/google/go-cmp v0.6.0 // indirect github.com/google/go-cmp v0.6.0 // indirect
github.com/huandu/xstrings v1.3.2 // indirect github.com/huandu/xstrings v1.3.2 // indirect
github.com/klauspost/compress v1.17.9 // indirect github.com/klauspost/compress v1.17.9 // indirect
github.com/mattn/go-colorable v0.1.13 // indirect
github.com/mattn/go-isatty v0.0.19 // indirect
github.com/rs/zerolog v1.33.0 // indirect
github.com/valyala/bytebufferpool v1.0.0 // indirect github.com/valyala/bytebufferpool v1.0.0 // indirect
golang.org/x/net v0.27.0 // indirect golang.org/x/net v0.27.0 // indirect
golang.org/x/sys v0.22.0 // indirect
golang.org/x/text v0.16.0 // indirect golang.org/x/text v0.16.0 // indirect
gopkg.in/natefinch/lumberjack.v2 v2.2.1 // indirect
) )

17
go.sum
View File

@@ -50,6 +50,7 @@ github.com/cavaliergopher/grab/v3 v3.0.1 h1:4z7TkBfmPjmLAAmkkAZNX/6QJ1nNFdv3SdIH
github.com/cavaliergopher/grab/v3 v3.0.1/go.mod h1:1U/KNnD+Ft6JJiYoYBAimKH2XrYptb8Kl3DFGmsjpq4= github.com/cavaliergopher/grab/v3 v3.0.1/go.mod h1:1U/KNnD+Ft6JJiYoYBAimKH2XrYptb8Kl3DFGmsjpq4=
github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
github.com/coreos/go-systemd/v22 v22.5.0/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc=
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
@@ -79,6 +80,7 @@ github.com/go-kit/kit v0.9.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2
github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE= github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE=
github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk= github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk=
github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY=
github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA=
github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ=
github.com/gogo/protobuf v1.2.0/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= github.com/gogo/protobuf v1.2.0/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ=
github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q=
@@ -136,6 +138,11 @@ github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA=
github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg=
github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM=
github.com/mattn/go-isatty v0.0.19 h1:JITubQf0MOLdlGRuRq+jtsDlekdYPia9ZFsB8h/APPA=
github.com/mattn/go-isatty v0.0.19/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0= github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0=
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
@@ -175,6 +182,9 @@ github.com/rcrowley/go-metrics v0.0.0-20181016184325-3113b8401b8a/go.mod h1:bCqn
github.com/rogpeppe/go-charset v0.0.0-20180617210344-2471d30d28b4/go.mod h1:qgYeAmZ5ZIpBWTGllZSQnw97Dj+woV0toclVaRGI8pc= github.com/rogpeppe/go-charset v0.0.0-20180617210344-2471d30d28b4/go.mod h1:qgYeAmZ5ZIpBWTGllZSQnw97Dj+woV0toclVaRGI8pc=
github.com/rogpeppe/go-internal v1.9.0 h1:73kH8U+JUqXU8lRuOHeVHaa/SZPifC7BkcraZVejAe8= github.com/rogpeppe/go-internal v1.9.0 h1:73kH8U+JUqXU8lRuOHeVHaa/SZPifC7BkcraZVejAe8=
github.com/rogpeppe/go-internal v1.9.0/go.mod h1:WtVeX8xhTBvf0smdhujwtBcq4Qrzq/fJaraNFVN+nFs= github.com/rogpeppe/go-internal v1.9.0/go.mod h1:WtVeX8xhTBvf0smdhujwtBcq4Qrzq/fJaraNFVN+nFs=
github.com/rs/xid v1.5.0/go.mod h1:trrq9SKmegXys3aeAKXMUTdJsYXVwGY3RLcfgqegfbg=
github.com/rs/zerolog v1.33.0 h1:1cU2KZkvPxNyfgEmhHAz/1A9Bz+llsdYzklWFzgp0r8=
github.com/rs/zerolog v1.33.0/go.mod h1:/7mN4D5sKwJLZQ2b/znpjC3/GQWY/xaDXUM0kKWRHss=
github.com/ryszard/goskiplist v0.0.0-20150312221310-2dfbae5fcf46/go.mod h1:uAQ5PCi+MFsC7HjREoAz1BU+Mq60+05gifQSsHSDG/8= github.com/ryszard/goskiplist v0.0.0-20150312221310-2dfbae5fcf46/go.mod h1:uAQ5PCi+MFsC7HjREoAz1BU+Mq60+05gifQSsHSDG/8=
github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo= github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo=
github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE= github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE=
@@ -241,6 +251,11 @@ golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd/go.mod h1:h1NjWce9XRLGQEsW7w
golang.org/x/sys v0.0.0-20200106162015-b016eb3dc98e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200106162015-b016eb3dc98e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200122134326-e047566fdf82/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200122134326-e047566fdf82/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200413165638-669c56c373c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200413165638-669c56c373c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.22.0 h1:RI27ohtqKCnwULzJLqkv897zojh5/DwS/ENaMzUOaWI=
golang.org/x/sys v0.22.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
golang.org/x/text v0.16.0 h1:a94ExnEXNtEwYLGJSIUxnWoxoRz/ZcCsV63ROupILh4= golang.org/x/text v0.16.0 h1:a94ExnEXNtEwYLGJSIUxnWoxoRz/ZcCsV63ROupILh4=
@@ -273,6 +288,8 @@ gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys= gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys=
gopkg.in/natefinch/lumberjack.v2 v2.2.1 h1:bBRl1b0OH9s/DuPhuXpNl+VtCaJXFZ5/uEFST95x9zc=
gopkg.in/natefinch/lumberjack.v2 v2.2.1/go.mod h1:YD8tP3GAjkrDg1eZH7EGmyESg/lsYskCTPBJVb9jqSc=
gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw= gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw=
gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=

View File

@@ -3,8 +3,8 @@ package main
import ( import (
"context" "context"
"flag" "flag"
"goBlack/cmd" "github.com/sirrobot01/debrid-blackhole/cmd"
"goBlack/common" "github.com/sirrobot01/debrid-blackhole/common"
"log" "log"
) )
@@ -15,6 +15,7 @@ func main() {
// Load the config file // Load the config file
conf, err := common.LoadConfig(configPath) conf, err := common.LoadConfig(configPath)
common.CONFIG = conf
if err != nil { if err != nil {
log.Fatal(err) log.Fatal(err)
} }

View File

@@ -3,10 +3,8 @@ package arr
import ( import (
"bytes" "bytes"
"encoding/json" "encoding/json"
"goBlack/common" "github.com/sirrobot01/debrid-blackhole/common"
"log"
"net/http" "net/http"
"os"
"strings" "strings"
"sync" "sync"
) )
@@ -23,7 +21,6 @@ const (
var ( var (
client *common.RLHTTPClient = common.NewRLHTTPClient(nil, nil) client *common.RLHTTPClient = common.NewRLHTTPClient(nil, nil)
logger *log.Logger = common.NewLogger("QBit", os.Stdout)
) )
type Arr struct { type Arr struct {
@@ -87,11 +84,12 @@ func inferType(host, name string) Type {
} }
} }
func NewStorage() *Storage { func NewStorage(cfg []common.ArrConfig) *Storage {
arrs := make(map[string]*Arr) arrs := make(map[string]*Arr)
//for name, arrCfg := range cfg { for _, a := range cfg {
// arrs[name] = NewArr(name, arrCfg.Host, arrCfg.Token, inferType(arrCfg.Host, name)) name := a.Name
//} arrs[name] = NewArr(name, a.Host, a.Token, inferType(a.Host, name))
}
return &Storage{ return &Storage{
Arrs: arrs, Arrs: arrs,
} }
@@ -100,7 +98,10 @@ func NewStorage() *Storage {
func (as *Storage) AddOrUpdate(arr *Arr) { func (as *Storage) AddOrUpdate(arr *Arr) {
as.mu.Lock() as.mu.Lock()
defer as.mu.Unlock() defer as.mu.Unlock()
as.Arrs[arr.Host] = arr if arr.Name == "" {
return
}
as.Arrs[arr.Name] = arr
} }
func (as *Storage) Get(name string) *Arr { func (as *Storage) Get(name string) *Arr {
@@ -114,7 +115,9 @@ func (as *Storage) GetAll() []*Arr {
defer as.mu.RUnlock() defer as.mu.RUnlock()
arrs := make([]*Arr, 0, len(as.Arrs)) arrs := make([]*Arr, 0, len(as.Arrs))
for _, arr := range as.Arrs { for _, arr := range as.Arrs {
arrs = append(arrs, arr) if arr.Host != "" && arr.Token != "" {
arrs = append(arrs, arr)
}
} }
return arrs return arrs
} }

View File

@@ -6,24 +6,98 @@ import (
"net/http" "net/http"
) )
type ContentRequest struct { func (a *Arr) GetMedia(tvId string) ([]Content, error) {
ID string `json:"id"` // Get series
Title string `json:"name"` resp, err := a.Request(http.MethodGet, fmt.Sprintf("api/v3/series?tvdbId=%s", tvId), nil)
Arr string `json:"arr"` if err != nil {
return nil, err
}
if resp.StatusCode == http.StatusNotFound {
// This is Radarr
repairLogger.Info().Msg("Radarr detected")
a.Type = Radarr
return GetMovies(a, tvId)
}
a.Type = Sonarr
defer resp.Body.Close()
type series struct {
Title string `json:"title"`
Id int `json:"id"`
}
var data []series
if err = json.NewDecoder(resp.Body).Decode(&data); err != nil {
return nil, err
}
// Get series files
contents := make([]Content, 0)
for _, d := range data {
resp, err = a.Request(http.MethodGet, fmt.Sprintf("api/v3/episodefile?seriesId=%d", d.Id), nil)
if err != nil {
continue
}
defer resp.Body.Close()
var seriesFiles []seriesFile
if err = json.NewDecoder(resp.Body).Decode(&seriesFiles); err != nil {
continue
}
ct := Content{
Title: d.Title,
Id: d.Id,
}
files := make([]contentFile, 0)
for _, file := range seriesFiles {
files = append(files, contentFile{
Id: file.Id,
Path: file.Path,
})
}
ct.Files = files
contents = append(contents, ct)
}
return contents, nil
} }
func (a *Arr) GetContents() *ContentRequest { func GetMovies(a *Arr, tvId string) ([]Content, error) {
resp, err := a.Request(http.MethodGet, "api/v3/series", nil) resp, err := a.Request(http.MethodGet, fmt.Sprintf("api/v3/movie?tmdbId=%s", tvId), nil)
if err != nil { if err != nil {
return nil return nil, err
} }
defer resp.Body.Close() defer resp.Body.Close()
var data *ContentRequest var movies []Movie
if err = json.NewDecoder(resp.Body).Decode(&data); err != nil { if err = json.NewDecoder(resp.Body).Decode(&movies); err != nil {
fmt.Printf("Error: %v\n", err) return nil, err
return nil
} }
fmt.Printf("Data: %v\n", data) contents := make([]Content, 0)
//data.Arr = a.Name for _, movie := range movies {
return data ct := Content{
Title: movie.Title,
Id: movie.Id,
}
files := make([]contentFile, 0)
files = append(files, contentFile{
Id: movie.MovieFile.Id,
Path: movie.MovieFile.Path,
})
ct.Files = files
contents = append(contents, ct)
}
return contents, nil
}
func (a *Arr) DeleteFile(id int) error {
switch a.Type {
case Sonarr:
_, err := a.Request(http.MethodDelete, fmt.Sprintf("api/v3/episodefile/%d", id), nil)
if err != nil {
return err
}
case Radarr:
_, err := a.Request(http.MethodDelete, fmt.Sprintf("api/v3/moviefile/%d", id), nil)
if err != nil {
return err
}
default:
return fmt.Errorf("unknown arr type: %s", a.Type)
}
return nil
} }

View File

@@ -3,7 +3,7 @@ package arr
import ( import (
"cmp" "cmp"
"fmt" "fmt"
"goBlack/common" "github.com/sirrobot01/debrid-blackhole/common"
"net/http" "net/http"
"strconv" "strconv"
"strings" "strings"

344
pkg/arr/repair.go Normal file
View File

@@ -0,0 +1,344 @@
package arr
import (
"github.com/rs/zerolog"
"github.com/sirrobot01/debrid-blackhole/common"
"io"
"net/http"
"os"
"path/filepath"
"runtime"
"strconv"
"sync"
)
var repairLogger *zerolog.Logger
func getLogger() *zerolog.Logger {
if repairLogger == nil {
logger := common.NewLogger("repair", common.CONFIG.LogLevel, os.Stdout)
repairLogger = &logger
}
return repairLogger
}
func (a *Arr) SearchMissing(id int) {
var payload interface{}
switch a.Type {
case Sonarr:
payload = struct {
Name string `json:"name"`
SeriesId int `json:"seriesId"`
}{
Name: "SeriesSearch",
SeriesId: id,
}
case Radarr:
payload = struct {
Name string `json:"name"`
MovieId int `json:"movieId"`
}{
Name: "MoviesSearch",
MovieId: id,
}
default:
getLogger().Info().Msgf("Unknown arr type: %s", a.Type)
return
}
resp, err := a.Request(http.MethodPost, "api/v3/command", payload)
if err != nil {
getLogger().Info().Msgf("Failed to search missing: %v", err)
return
}
if statusOk := strconv.Itoa(resp.StatusCode)[0] == '2'; !statusOk {
getLogger().Info().Msgf("Failed to search missing: %s", resp.Status)
return
}
}
func (a *Arr) Repair(tmdbId string) error {
getLogger().Info().Msgf("Starting repair for %s", a.Name)
media, err := a.GetMedia(tmdbId)
if err != nil {
getLogger().Info().Msgf("Failed to get %s media: %v", a.Type, err)
return err
}
getLogger().Info().Msgf("Found %d %s media", len(media), a.Type)
brokenMedia := a.processMedia(media)
getLogger().Info().Msgf("Found %d %s broken media files", len(brokenMedia), a.Type)
// Automatic search for missing files
for _, m := range brokenMedia {
getLogger().Debug().Msgf("Searching missing for %s", m.Title)
a.SearchMissing(m.Id)
}
getLogger().Info().Msgf("Repair completed for %s", a.Name)
return nil
}
func (a *Arr) processMedia(media []Content) []Content {
if len(media) <= 1 {
var brokenMedia []Content
for _, m := range media {
// Check if media is accessible
if !a.isMediaAccessible(m) {
getLogger().Debug().Msgf("Skipping media check for %s - parent directory not accessible", m.Title)
continue
}
if a.checkMediaFiles(m) {
brokenMedia = append(brokenMedia, m)
}
}
return brokenMedia
}
workerCount := runtime.NumCPU() * 4
if len(media) < workerCount {
workerCount = len(media)
}
jobs := make(chan Content)
results := make(chan Content)
var brokenMedia []Content
var wg sync.WaitGroup
for i := 0; i < workerCount; i++ {
wg.Add(1)
go func() {
defer wg.Done()
for m := range jobs {
// Check if media is accessible
// First check if we can access this media's directory
if !a.isMediaAccessible(m) {
getLogger().Debug().Msgf("Skipping media check for %s - parent directory not accessible", m.Title)
continue
}
if a.checkMediaFilesParallel(m) {
results <- m
}
}
}()
}
go func() {
for _, m := range media {
jobs <- m
}
close(jobs)
}()
go func() {
wg.Wait()
close(results)
}()
for m := range results {
brokenMedia = append(brokenMedia, m)
}
return brokenMedia
}
func (a *Arr) checkMediaFilesParallel(m Content) bool {
if len(m.Files) <= 1 {
return a.checkMediaFiles(m)
}
fileWorkers := runtime.NumCPU() * 2
if len(m.Files) < fileWorkers {
fileWorkers = len(m.Files)
}
fileJobs := make(chan contentFile)
brokenFiles := make(chan bool, len(m.Files))
var fileWg sync.WaitGroup
for i := 0; i < fileWorkers; i++ {
fileWg.Add(1)
go func() {
defer fileWg.Done()
for f := range fileJobs {
getLogger().Debug().Msgf("Checking file: %s", f.Path)
isBroken := false
if fileIsSymlinked(f.Path) {
getLogger().Debug().Msgf("File is symlinked: %s", f.Path)
if !fileIsCorrectSymlink(f.Path) {
getLogger().Debug().Msgf("File is broken: %s", f.Path)
isBroken = true
if err := a.DeleteFile(f.Id); err != nil {
getLogger().Info().Msgf("Failed to delete file: %s %d: %v", f.Path, f.Id, err)
}
}
} else {
getLogger().Debug().Msgf("File is not symlinked: %s", f.Path)
if !fileIsReadable(f.Path) {
getLogger().Debug().Msgf("File is broken: %s", f.Path)
isBroken = true
if err := a.DeleteFile(f.Id); err != nil {
getLogger().Info().Msgf("Failed to delete file: %s %d: %v", f.Path, f.Id, err)
}
}
}
brokenFiles <- isBroken
}
}()
}
go func() {
for _, f := range m.Files {
fileJobs <- f
}
close(fileJobs)
}()
go func() {
fileWg.Wait()
close(brokenFiles)
}()
isBroken := false
for broken := range brokenFiles {
if broken {
isBroken = true
}
}
return isBroken
}
func (a *Arr) checkMediaFiles(m Content) bool {
isBroken := false
for _, f := range m.Files {
if fileIsSymlinked(f.Path) {
if !fileIsCorrectSymlink(f.Path) {
isBroken = true
if err := a.DeleteFile(f.Id); err != nil {
getLogger().Info().Msgf("Failed to delete file: %s %d: %v", f.Path, f.Id, err)
}
}
} else {
if !fileIsReadable(f.Path) {
isBroken = true
if err := a.DeleteFile(f.Id); err != nil {
getLogger().Info().Msgf("Failed to delete file: %s %d: %v", f.Path, f.Id, err)
}
}
}
}
return isBroken
}
func (a *Arr) isMediaAccessible(m Content) bool {
// We're likely to mount the debrid path.
// So instead of checking the arr path, we check the original path
// This is because the arr path is likely to be a symlink
// And we want to check the actual path where the media is stored
// This is to avoid false positives
if len(m.Files) == 0 {
return false
}
// Get the first file to check its target location
file := m.Files[0].Path
var targetPath string
fileInfo, err := os.Lstat(file)
if err != nil {
repairLogger.Debug().Msgf("Cannot stat file %s: %v", file, err)
return false
}
if fileInfo.Mode()&os.ModeSymlink != 0 {
// If it's a symlink, get where it points to
target, err := os.Readlink(file)
if err != nil {
repairLogger.Debug().Msgf("Cannot read symlink %s: %v", file, err)
return false
}
// If the symlink target is relative, make it absolute
if !filepath.IsAbs(target) {
dir := filepath.Dir(file)
target = filepath.Join(dir, target)
}
targetPath = target
} else {
// If it's a regular file, use its path
targetPath = file
}
mediaDir := filepath.Dir(targetPath) // Gets /remote/storage/Movie
parentDir := filepath.Dir(mediaDir) // Gets /remote/storage
_, err = os.Stat(parentDir)
if err != nil {
repairLogger.Debug().Msgf("Parent directory of target not accessible for media %s: %s", m.Title, parentDir)
return false
}
return true
}
func fileIsSymlinked(file string) bool {
info, err := os.Lstat(file)
if err != nil {
return false
}
return info.Mode()&os.ModeSymlink != 0
}
func fileIsCorrectSymlink(file string) bool {
target, err := os.Readlink(file)
if err != nil {
return false
}
if !filepath.IsAbs(target) {
dir := filepath.Dir(file)
target = filepath.Join(dir, target)
}
return fileIsReadable(target)
}
func fileIsReadable(filePath string) bool {
// First check if file exists and is accessible
info, err := os.Stat(filePath)
if err != nil {
return false
}
// Check if it's a regular file
if !info.Mode().IsRegular() {
return false
}
// Try to read the first 1024 bytes
err = checkFileStart(filePath)
if err != nil {
return false
}
return true
}
func checkFileStart(filePath string) error {
f, err := os.Open(filePath)
if err != nil {
return err
}
defer f.Close()
buffer := make([]byte, 1024)
_, err = io.ReadAtLeast(f, buffer, 1024)
if err != nil && err != io.EOF {
return err
}
return nil
}

34
pkg/arr/structs.go Normal file
View File

@@ -0,0 +1,34 @@
package arr
type Movie struct {
Title string `json:"title"`
OriginalTitle string `json:"originalTitle"`
Path string `json:"path"`
MovieFile struct {
MovieId int `json:"movieId"`
RelativePath string `json:"relativePath"`
Path string `json:"path"`
Size int `json:"size"`
Id int `json:"id"`
} `json:"movieFile"`
Id int `json:"id"`
}
type contentFile struct {
Name string `json:"name"`
Path string `json:"path"`
Id int `json:"id"`
}
type Content struct {
Title string `json:"title"`
Id int `json:"id"`
Files []contentFile `json:"files"`
}
type seriesFile struct {
SeriesId int `json:"seriesId"`
SeasonNumber int `json:"seasonNumber"`
Path string `json:"path"`
Id int `json:"id"`
}

5
pkg/arr/utils.go Normal file
View File

@@ -0,0 +1,5 @@
package arr
func Readfile(path string) error {
return nil
}

263
pkg/debrid/alldebrid.go Normal file
View File

@@ -0,0 +1,263 @@
package debrid
import (
"encoding/json"
"fmt"
"github.com/rs/zerolog"
"github.com/sirrobot01/debrid-blackhole/common"
"github.com/sirrobot01/debrid-blackhole/pkg/debrid/structs"
"net/http"
gourl "net/url"
"os"
"path/filepath"
"strconv"
)
type AllDebrid struct {
BaseDebrid
}
func (r *AllDebrid) GetMountPath() string {
return r.MountPath
}
func (r *AllDebrid) GetName() string {
return r.Name
}
func (r *AllDebrid) GetLogger() zerolog.Logger {
return r.logger
}
func (r *AllDebrid) IsAvailable(infohashes []string) map[string]bool {
// Check if the infohashes are available in the local cache
hashes, result := GetLocalCache(infohashes, r.cache)
if len(hashes) == 0 {
// Either all the infohashes are locally cached or none are
r.cache.AddMultiple(result)
return result
}
// Divide hashes into groups of 100
// AllDebrid does not support checking cached infohashes
return result
}
func (r *AllDebrid) SubmitMagnet(torrent *Torrent) (*Torrent, error) {
url := fmt.Sprintf("%s/magnet/upload", r.Host)
query := gourl.Values{}
query.Add("magnets[]", torrent.Magnet.Link)
url += "?" + query.Encode()
req, _ := http.NewRequest(http.MethodGet, url, nil)
resp, err := r.client.MakeRequest(req)
if err != nil {
return nil, err
}
var data structs.AllDebridUploadMagnetResponse
err = json.Unmarshal(resp, &data)
if err != nil {
return nil, err
}
magnets := data.Data.Magnets
if len(magnets) == 0 {
return nil, fmt.Errorf("error adding torrent")
}
magnet := magnets[0]
torrentId := strconv.Itoa(magnet.ID)
r.logger.Info().Msgf("Torrent: %s added with id: %s", torrent.Name, torrentId)
torrent.Id = torrentId
return torrent, nil
}
func getAlldebridStatus(statusCode int) string {
switch {
case statusCode == 4:
return "downloaded"
case statusCode >= 0 && statusCode <= 3:
return "downloading"
default:
return "error"
}
}
func flattenFiles(files []structs.AllDebridMagnetFile, parentPath string, index *int) []TorrentFile {
result := make([]TorrentFile, 0)
for _, f := range files {
currentPath := f.Name
if parentPath != "" {
currentPath = filepath.Join(parentPath, f.Name)
}
if f.Elements != nil {
// This is a folder, recurse into it
result = append(result, flattenFiles(f.Elements, currentPath, index)...)
} else {
// This is a file
fileName := filepath.Base(f.Name)
if common.RegexMatch(common.SAMPLEMATCH, fileName) {
continue
}
if !common.RegexMatch(common.VIDEOMATCH, fileName) && !common.RegexMatch(common.MUSICMATCH, fileName) {
continue
}
*index++
file := TorrentFile{
Id: strconv.Itoa(*index),
Name: fileName,
Size: f.Size,
Path: currentPath,
}
result = append(result, file)
}
}
return result
}
func (r *AllDebrid) GetTorrent(id string) (*Torrent, error) {
torrent := &Torrent{}
url := fmt.Sprintf("%s/magnet/status?id=%s", r.Host, id)
req, _ := http.NewRequest(http.MethodGet, url, nil)
resp, err := r.client.MakeRequest(req)
if err != nil {
return torrent, err
}
var res structs.AllDebridTorrentInfoResponse
err = json.Unmarshal(resp, &res)
if err != nil {
r.logger.Info().Msgf("Error unmarshalling torrent info: %s", err)
return torrent, err
}
data := res.Data.Magnets
status := getAlldebridStatus(data.StatusCode)
name := data.Filename
torrent.Id = id
torrent.Name = name
torrent.Status = status
torrent.Filename = name
torrent.OriginalFilename = name
torrent.Folder = name
if status == "downloaded" {
torrent.Bytes = data.Size
torrent.Progress = float64((data.Downloaded / data.Size) * 100)
torrent.Speed = data.DownloadSpeed
torrent.Seeders = data.Seeders
index := -1
files := flattenFiles(data.Files, "", &index)
parentFolder := data.Filename
if data.NbLinks == 1 {
// All debrid doesn't return the parent folder for single file torrents
parentFolder = ""
}
torrent.OriginalFilename = parentFolder
torrent.Files = files
}
torrent.Debrid = r
return torrent, nil
}
func (r *AllDebrid) CheckStatus(torrent *Torrent, isSymlink bool) (*Torrent, error) {
for {
tb, err := r.GetTorrent(torrent.Id)
torrent = tb
if err != nil || tb == nil {
return tb, err
}
status := torrent.Status
if status == "downloaded" {
r.logger.Info().Msgf("Torrent: %s downloaded", torrent.Name)
if !isSymlink {
err = r.GetDownloadLinks(torrent)
if err != nil {
return torrent, err
}
}
break
} else if status == "downloading" {
if !r.DownloadUncached {
go torrent.Delete()
return torrent, fmt.Errorf("torrent: %s not cached", torrent.Name)
}
// Break out of the loop if the torrent is downloading.
// This is necessary to prevent infinite loop since we moved to sync downloading and async processing
break
} else {
return torrent, fmt.Errorf("torrent: %s has error", torrent.Name)
}
}
return torrent, nil
}
func (r *AllDebrid) DeleteTorrent(torrent *Torrent) {
url := fmt.Sprintf("%s/magnet/delete?id=%s", r.Host, torrent.Id)
req, _ := http.NewRequest(http.MethodGet, url, nil)
_, err := r.client.MakeRequest(req)
if err == nil {
r.logger.Info().Msgf("Torrent: %s deleted", torrent.Name)
} else {
r.logger.Info().Msgf("Error deleting torrent: %s", err)
}
}
func (r *AllDebrid) GetDownloadLinks(torrent *Torrent) error {
downloadLinks := make([]TorrentDownloadLinks, 0)
for _, file := range torrent.Files {
url := fmt.Sprintf("%s/link/unlock", r.Host)
query := gourl.Values{}
query.Add("link", file.Link)
url += "?" + query.Encode()
req, _ := http.NewRequest(http.MethodGet, url, nil)
resp, err := r.client.MakeRequest(req)
if err != nil {
return err
}
var data structs.AllDebridDownloadLink
if err = json.Unmarshal(resp, &data); err != nil {
return err
}
link := data.Data.Link
dl := TorrentDownloadLinks{
Link: file.Link,
Filename: data.Data.Filename,
DownloadLink: link,
}
downloadLinks = append(downloadLinks, dl)
}
torrent.DownloadLinks = downloadLinks
return nil
}
func (r *AllDebrid) GetCheckCached() bool {
return r.CheckCached
}
func NewAllDebrid(dc common.DebridConfig, cache *common.Cache) *AllDebrid {
rl := common.ParseRateLimit(dc.RateLimit)
headers := map[string]string{
"Authorization": fmt.Sprintf("Bearer %s", dc.APIKey),
}
client := common.NewRLHTTPClient(rl, headers)
logger := common.NewLogger(dc.Name, common.CONFIG.LogLevel, os.Stdout)
return &AllDebrid{
BaseDebrid: BaseDebrid{
Name: "alldebrid",
Host: dc.Host,
APIKey: dc.APIKey,
DownloadUncached: dc.DownloadUncached,
client: client,
cache: cache,
MountPath: dc.Folder,
logger: logger,
CheckCached: dc.CheckCached,
},
}
}

View File

@@ -3,9 +3,9 @@ package debrid
import ( import (
"fmt" "fmt"
"github.com/anacrolix/torrent/metainfo" "github.com/anacrolix/torrent/metainfo"
"goBlack/common" "github.com/rs/zerolog"
"goBlack/pkg/arr" "github.com/sirrobot01/debrid-blackhole/common"
"log" "github.com/sirrobot01/debrid-blackhole/pkg/arr"
"path/filepath" "path/filepath"
) )
@@ -17,7 +17,7 @@ type BaseDebrid struct {
client *common.RLHTTPClient client *common.RLHTTPClient
cache *common.Cache cache *common.Cache
MountPath string MountPath string
logger *log.Logger logger zerolog.Logger
CheckCached bool CheckCached bool
} }
@@ -31,7 +31,7 @@ type Service interface {
GetCheckCached() bool GetCheckCached() bool
GetTorrent(id string) (*Torrent, error) GetTorrent(id string) (*Torrent, error)
GetName() string GetName() string
GetLogger() *log.Logger GetLogger() zerolog.Logger
} }
func NewDebrid(debs []common.DebridConfig, maxCachedSize int) *DebridService { func NewDebrid(debs []common.DebridConfig, maxCachedSize int) *DebridService {
@@ -41,7 +41,8 @@ func NewDebrid(debs []common.DebridConfig, maxCachedSize int) *DebridService {
for _, dc := range debs { for _, dc := range debs {
d := createDebrid(dc, common.NewCache(maxCacheSize)) d := createDebrid(dc, common.NewCache(maxCacheSize))
d.GetLogger().Println("Debrid Service started") logger := d.GetLogger()
logger.Info().Msg("Debrid Service started")
debrids = append(debrids, d) debrids = append(debrids, d)
} }
d := &DebridService{debrids: debrids, lastUsed: 0} d := &DebridService{debrids: debrids, lastUsed: 0}
@@ -56,6 +57,8 @@ func createDebrid(dc common.DebridConfig, cache *common.Cache) Service {
return NewTorbox(dc, cache) return NewTorbox(dc, cache)
case "debridlink": case "debridlink":
return NewDebridLink(dc, cache) return NewDebridLink(dc, cache)
case "alldebrid":
return NewAllDebrid(dc, cache)
default: default:
return NewRealDebrid(dc, cache) return NewRealDebrid(dc, cache)
} }
@@ -154,16 +157,17 @@ func ProcessTorrent(d *DebridService, magnet *common.Magnet, a *arr.Arr, isSymli
errs := make([]error, 0) errs := make([]error, 0)
for index, db := range d.debrids { for index, db := range d.debrids {
log.Println("Processing debrid: ", db.GetName())
logger := db.GetLogger() logger := db.GetLogger()
logger.Printf("Torrent Hash: %s", debridTorrent.InfoHash) logger.Info().Msgf("Processing debrid: %s", db.GetName())
logger.Info().Msgf("Torrent Hash: %s", debridTorrent.InfoHash)
if db.GetCheckCached() { if db.GetCheckCached() {
hash, exists := db.IsAvailable([]string{debridTorrent.InfoHash})[debridTorrent.InfoHash] hash, exists := db.IsAvailable([]string{debridTorrent.InfoHash})[debridTorrent.InfoHash]
if !exists || !hash { if !exists || !hash {
logger.Printf("Torrent: %s is not cached", debridTorrent.Name) logger.Info().Msgf("Torrent: %s is not cached", debridTorrent.Name)
continue continue
} else { } else {
logger.Printf("Torrent: %s is cached(or downloading)", debridTorrent.Name) logger.Info().Msgf("Torrent: %s is cached(or downloading)", debridTorrent.Name)
} }
} }
@@ -176,7 +180,7 @@ func ProcessTorrent(d *DebridService, magnet *common.Magnet, a *arr.Arr, isSymli
errs = append(errs, err) errs = append(errs, err)
continue continue
} }
logger.Printf("Torrent: %s submitted to %s", dbt.Name, db.GetName()) logger.Info().Msgf("Torrent: %s submitted to %s", dbt.Name, db.GetName())
d.lastUsed = index d.lastUsed = index
return db.CheckStatus(dbt, isSymlink) return db.CheckStatus(dbt, isSymlink)
} }

View File

@@ -4,8 +4,9 @@ import (
"bytes" "bytes"
"encoding/json" "encoding/json"
"fmt" "fmt"
"goBlack/common" "github.com/rs/zerolog"
"goBlack/pkg/debrid/structs" "github.com/sirrobot01/debrid-blackhole/common"
"github.com/sirrobot01/debrid-blackhole/pkg/debrid/structs"
"log" "log"
"net/http" "net/http"
"os" "os"
@@ -24,7 +25,7 @@ func (r *DebridLink) GetName() string {
return r.Name return r.Name
} }
func (r *DebridLink) GetLogger() *log.Logger { func (r *DebridLink) GetLogger() zerolog.Logger {
return r.logger return r.logger
} }
@@ -63,13 +64,13 @@ func (r *DebridLink) IsAvailable(infohashes []string) map[string]bool {
req, _ := http.NewRequest(http.MethodGet, url, nil) req, _ := http.NewRequest(http.MethodGet, url, nil)
resp, err := r.client.MakeRequest(req) resp, err := r.client.MakeRequest(req)
if err != nil { if err != nil {
log.Println("Error checking availability:", err) r.logger.Info().Msgf("Error checking availability: %v", err)
return result return result
} }
var data structs.DebridLinkAvailableResponse var data structs.DebridLinkAvailableResponse
err = json.Unmarshal(resp, &data) err = json.Unmarshal(resp, &data)
if err != nil { if err != nil {
log.Println("Error marshalling availability:", err) r.logger.Info().Msgf("Error marshalling availability: %v", err)
return result return result
} }
if data.Value == nil { if data.Value == nil {
@@ -133,6 +134,7 @@ func (r *DebridLink) GetTorrent(id string) (*Torrent, error) {
Id: f.ID, Id: f.ID,
Name: f.Name, Name: f.Name,
Size: f.Size, Size: f.Size,
Path: f.Name,
} }
} }
torrent.Files = files torrent.Files = files
@@ -159,7 +161,7 @@ func (r *DebridLink) SubmitMagnet(torrent *Torrent) (*Torrent, error) {
} }
data := *res.Value data := *res.Value
status := "downloading" status := "downloading"
log.Printf("Torrent: %s added with id: %s\n", torrent.Name, data.ID) log.Printf("Torrent: %s added with id: %s", torrent.Name, data.ID)
name := common.RemoveInvalidChars(data.Name) name := common.RemoveInvalidChars(data.Name)
torrent.Id = data.ID torrent.Id = data.ID
torrent.Name = name torrent.Name = name
@@ -177,6 +179,7 @@ func (r *DebridLink) SubmitMagnet(torrent *Torrent) (*Torrent, error) {
Id: f.ID, Id: f.ID,
Name: f.Name, Name: f.Name,
Size: f.Size, Size: f.Size,
Path: f.Name,
Link: f.DownloadURL, Link: f.DownloadURL,
} }
} }
@@ -197,7 +200,7 @@ func (r *DebridLink) CheckStatus(torrent *Torrent, isSymlink bool) (*Torrent, er
if status == "error" || status == "dead" || status == "magnet_error" { if status == "error" || status == "dead" || status == "magnet_error" {
return torrent, fmt.Errorf("torrent: %s has error", torrent.Name) return torrent, fmt.Errorf("torrent: %s has error", torrent.Name)
} else if status == "downloaded" { } else if status == "downloaded" {
r.logger.Printf("Torrent: %s downloaded\n", torrent.Name) r.logger.Info().Msgf("Torrent: %s downloaded", torrent.Name)
if !isSymlink { if !isSymlink {
err = r.GetDownloadLinks(torrent) err = r.GetDownloadLinks(torrent)
if err != nil { if err != nil {
@@ -224,9 +227,9 @@ func (r *DebridLink) DeleteTorrent(torrent *Torrent) {
req, _ := http.NewRequest(http.MethodDelete, url, nil) req, _ := http.NewRequest(http.MethodDelete, url, nil)
_, err := r.client.MakeRequest(req) _, err := r.client.MakeRequest(req)
if err == nil { if err == nil {
r.logger.Printf("Torrent: %s deleted\n", torrent.Name) r.logger.Info().Msgf("Torrent: %s deleted", torrent.Name)
} else { } else {
r.logger.Printf("Error deleting torrent: %s", err) r.logger.Info().Msgf("Error deleting torrent: %s", err)
} }
} }
@@ -254,7 +257,7 @@ func NewDebridLink(dc common.DebridConfig, cache *common.Cache) *DebridLink {
"Content-Type": "application/json", "Content-Type": "application/json",
} }
client := common.NewRLHTTPClient(rl, headers) client := common.NewRLHTTPClient(rl, headers)
logger := common.NewLogger(dc.Name, os.Stdout) logger := common.NewLogger(dc.Name, common.CONFIG.LogLevel, os.Stdout)
return &DebridLink{ return &DebridLink{
BaseDebrid: BaseDebrid{ BaseDebrid: BaseDebrid{
Name: "debridlink", Name: "debridlink",

View File

@@ -3,9 +3,9 @@ package debrid
import ( import (
"encoding/json" "encoding/json"
"fmt" "fmt"
"goBlack/common" "github.com/rs/zerolog"
"goBlack/pkg/debrid/structs" "github.com/sirrobot01/debrid-blackhole/common"
"log" "github.com/sirrobot01/debrid-blackhole/pkg/debrid/structs"
"net/http" "net/http"
gourl "net/url" gourl "net/url"
"os" "os"
@@ -27,7 +27,7 @@ func (r *RealDebrid) GetName() string {
return r.Name return r.Name
} }
func (r *RealDebrid) GetLogger() *log.Logger { func (r *RealDebrid) GetLogger() zerolog.Logger {
return r.logger return r.logger
} }
@@ -35,9 +35,11 @@ func GetTorrentFiles(data structs.RealDebridTorrentInfo) []TorrentFile {
files := make([]TorrentFile, 0) files := make([]TorrentFile, 0)
for _, f := range data.Files { for _, f := range data.Files {
name := filepath.Base(f.Path) name := filepath.Base(f.Path)
if (!common.RegexMatch(common.VIDEOMATCH, name) && if common.RegexMatch(common.SAMPLEMATCH, name) {
!common.RegexMatch(common.SUBMATCH, name) && // Skip sample files
!common.RegexMatch(common.MUSICMATCH, name)) || common.RegexMatch(common.SAMPLEMATCH, name) { continue
}
if !common.RegexMatch(common.VIDEOMATCH, name) && !common.RegexMatch(common.MUSICMATCH, name) {
continue continue
} }
fileId := f.ID fileId := f.ID
@@ -87,13 +89,13 @@ func (r *RealDebrid) IsAvailable(infohashes []string) map[string]bool {
req, _ := http.NewRequest(http.MethodGet, url, nil) req, _ := http.NewRequest(http.MethodGet, url, nil)
resp, err := r.client.MakeRequest(req) resp, err := r.client.MakeRequest(req)
if err != nil { if err != nil {
log.Println("Error checking availability:", err) r.logger.Info().Msgf("Error checking availability: %v", err)
return result return result
} }
var data structs.RealDebridAvailabilityResponse var data structs.RealDebridAvailabilityResponse
err = json.Unmarshal(resp, &data) err = json.Unmarshal(resp, &data)
if err != nil { if err != nil {
log.Println("Error marshalling availability:", err) r.logger.Info().Msgf("Error marshalling availability: %v", err)
return result return result
} }
for _, h := range hashes[i:end] { for _, h := range hashes[i:end] {
@@ -119,7 +121,7 @@ func (r *RealDebrid) SubmitMagnet(torrent *Torrent) (*Torrent, error) {
return nil, err return nil, err
} }
err = json.Unmarshal(resp, &data) err = json.Unmarshal(resp, &data)
log.Printf("Torrent: %s added with id: %s\n", torrent.Name, data.Id) r.logger.Info().Msgf("Torrent: %s added with id: %s", torrent.Name, data.Id)
torrent.Id = data.Id torrent.Id = data.Id
return torrent, nil return torrent, nil
@@ -162,7 +164,7 @@ func (r *RealDebrid) CheckStatus(torrent *Torrent, isSymlink bool) (*Torrent, er
for { for {
resp, err := r.client.MakeRequest(req) resp, err := r.client.MakeRequest(req)
if err != nil { if err != nil {
log.Println("ERROR Checking file: ", err) r.logger.Info().Msgf("ERROR Checking file: %v", err)
return torrent, err return torrent, err
} }
var data structs.RealDebridTorrentInfo var data structs.RealDebridTorrentInfo
@@ -205,7 +207,7 @@ func (r *RealDebrid) CheckStatus(torrent *Torrent, isSymlink bool) (*Torrent, er
} else if status == "downloaded" { } else if status == "downloaded" {
files := GetTorrentFiles(data) files := GetTorrentFiles(data)
torrent.Files = files torrent.Files = files
log.Printf("Torrent: %s downloaded to RD\n", torrent.Name) r.logger.Info().Msgf("Torrent: %s downloaded to RD", torrent.Name)
if !isSymlink { if !isSymlink {
err = r.GetDownloadLinks(torrent) err = r.GetDownloadLinks(torrent)
if err != nil { if err != nil {
@@ -231,9 +233,9 @@ func (r *RealDebrid) DeleteTorrent(torrent *Torrent) {
req, _ := http.NewRequest(http.MethodDelete, url, nil) req, _ := http.NewRequest(http.MethodDelete, url, nil)
_, err := r.client.MakeRequest(req) _, err := r.client.MakeRequest(req)
if err == nil { if err == nil {
r.logger.Printf("Torrent: %s deleted\n", torrent.Name) r.logger.Info().Msgf("Torrent: %s deleted", torrent.Name)
} else { } else {
r.logger.Printf("Error deleting torrent: %s", err) r.logger.Info().Msgf("Error deleting torrent: %s", err)
} }
} }
@@ -277,7 +279,7 @@ func NewRealDebrid(dc common.DebridConfig, cache *common.Cache) *RealDebrid {
"Authorization": fmt.Sprintf("Bearer %s", dc.APIKey), "Authorization": fmt.Sprintf("Bearer %s", dc.APIKey),
} }
client := common.NewRLHTTPClient(rl, headers) client := common.NewRLHTTPClient(rl, headers)
logger := common.NewLogger(dc.Name, os.Stdout) logger := common.NewLogger(dc.Name, common.CONFIG.LogLevel, os.Stdout)
return &RealDebrid{ return &RealDebrid{
BaseDebrid: BaseDebrid{ BaseDebrid: BaseDebrid{
Name: "realdebrid", Name: "realdebrid",

View File

@@ -0,0 +1,75 @@
package structs
type errorResponse struct {
Code string `json:"code"`
Message string `json:"message"`
}
type AllDebridMagnetFile struct {
Name string `json:"n"`
Size int64 `json:"s"`
Link string `json:"l"`
Elements []AllDebridMagnetFile `json:"e"`
}
type magnetInfo struct {
Id int `json:"id"`
Filename string `json:"filename"`
Size int64 `json:"size"`
Hash string `json:"hash"`
Status string `json:"status"`
StatusCode int `json:"statusCode"`
UploadDate int `json:"uploadDate"`
Downloaded int64 `json:"downloaded"`
Uploaded int64 `json:"uploaded"`
DownloadSpeed int `json:"downloadSpeed"`
UploadSpeed int `json:"uploadSpeed"`
Seeders int `json:"seeders"`
CompletionDate int `json:"completionDate"`
Type string `json:"type"`
Notified bool `json:"notified"`
Version int `json:"version"`
NbLinks int `json:"nbLinks"`
Files []AllDebridMagnetFile `json:"files"`
}
type AllDebridTorrentInfoResponse struct {
Status string `json:"status"`
Data struct {
Magnets magnetInfo `json:"magnets"`
} `json:"data"`
Error *errorResponse `json:"error"`
}
type AllDebridUploadMagnetResponse struct {
Status string `json:"status"`
Data struct {
Magnets []struct {
Magnet string `json:"magnet"`
Hash string `json:"hash"`
Name string `json:"name"`
FilenameOriginal string `json:"filename_original"`
Size int64 `json:"size"`
Ready bool `json:"ready"`
ID int `json:"id"`
} `json:"magnets"`
}
Error *errorResponse `json:"error"`
}
type AllDebridDownloadLink struct {
Status string `json:"status"`
Data struct {
Link string `json:"link"`
Host string `json:"host"`
Filename string `json:"filename"`
Streaming []interface{} `json:"streaming"`
Paws bool `json:"paws"`
Filesize int `json:"filesize"`
Id string `json:"id"`
Path []struct {
Name string `json:"n"`
Size int `json:"s"`
} `json:"path"`
} `json:"data"`
Error *errorResponse `json:"error"`
}

View File

@@ -4,8 +4,9 @@ import (
"bytes" "bytes"
"encoding/json" "encoding/json"
"fmt" "fmt"
"goBlack/common" "github.com/rs/zerolog"
"goBlack/pkg/debrid/structs" "github.com/sirrobot01/debrid-blackhole/common"
"github.com/sirrobot01/debrid-blackhole/pkg/debrid/structs"
"log" "log"
"mime/multipart" "mime/multipart"
"net/http" "net/http"
@@ -30,7 +31,7 @@ func (r *Torbox) GetName() string {
return r.Name return r.Name
} }
func (r *Torbox) GetLogger() *log.Logger { func (r *Torbox) GetLogger() zerolog.Logger {
return r.logger return r.logger
} }
@@ -69,13 +70,13 @@ func (r *Torbox) IsAvailable(infohashes []string) map[string]bool {
req, _ := http.NewRequest(http.MethodGet, url, nil) req, _ := http.NewRequest(http.MethodGet, url, nil)
resp, err := r.client.MakeRequest(req) resp, err := r.client.MakeRequest(req)
if err != nil { if err != nil {
log.Println("Error checking availability:", err) r.logger.Info().Msgf("Error checking availability: %v", err)
return result return result
} }
var res structs.TorBoxAvailableResponse var res structs.TorBoxAvailableResponse
err = json.Unmarshal(resp, &res) err = json.Unmarshal(resp, &res)
if err != nil { if err != nil {
log.Println("Error marshalling availability:", err) r.logger.Info().Msgf("Error marshalling availability: %v", err)
return result return result
} }
if res.Data == nil { if res.Data == nil {
@@ -117,13 +118,13 @@ func (r *Torbox) SubmitMagnet(torrent *Torrent) (*Torrent, error) {
} }
dt := *data.Data dt := *data.Data
torrentId := strconv.Itoa(dt.Id) torrentId := strconv.Itoa(dt.Id)
log.Printf("Torrent: %s added with id: %s\n", torrent.Name, torrentId) log.Printf("Torrent: %s added with id: %s", torrent.Name, torrentId)
torrent.Id = torrentId torrent.Id = torrentId
return torrent, nil return torrent, nil
} }
func getStatus(status string, finished bool) string { func getTorboxStatus(status string, finished bool) string {
if finished { if finished {
return "downloaded" return "downloaded"
} }
@@ -159,7 +160,7 @@ func (r *Torbox) GetTorrent(id string) (*Torrent, error) {
torrent.Bytes = data.Size torrent.Bytes = data.Size
torrent.Folder = name torrent.Folder = name
torrent.Progress = data.Progress * 100 torrent.Progress = data.Progress * 100
torrent.Status = getStatus(data.DownloadState, data.DownloadFinished) torrent.Status = getTorboxStatus(data.DownloadState, data.DownloadFinished)
torrent.Speed = data.DownloadSpeed torrent.Speed = data.DownloadSpeed
torrent.Seeders = data.Seeds torrent.Seeders = data.Seeds
torrent.Filename = name torrent.Filename = name
@@ -167,9 +168,11 @@ func (r *Torbox) GetTorrent(id string) (*Torrent, error) {
files := make([]TorrentFile, 0) files := make([]TorrentFile, 0)
for _, f := range data.Files { for _, f := range data.Files {
fileName := filepath.Base(f.Name) fileName := filepath.Base(f.Name)
if (!common.RegexMatch(common.VIDEOMATCH, fileName) && if common.RegexMatch(common.SAMPLEMATCH, fileName) {
!common.RegexMatch(common.SUBMATCH, fileName) && // Skip sample files
!common.RegexMatch(common.MUSICMATCH, fileName)) || common.RegexMatch(common.SAMPLEMATCH, fileName) { continue
}
if !common.RegexMatch(common.VIDEOMATCH, fileName) && !common.RegexMatch(common.MUSICMATCH, fileName) {
continue continue
} }
file := TorrentFile{ file := TorrentFile{
@@ -206,7 +209,7 @@ func (r *Torbox) CheckStatus(torrent *Torrent, isSymlink bool) (*Torrent, error)
if status == "error" || status == "dead" || status == "magnet_error" { if status == "error" || status == "dead" || status == "magnet_error" {
return torrent, fmt.Errorf("torrent: %s has error", torrent.Name) return torrent, fmt.Errorf("torrent: %s has error", torrent.Name)
} else if status == "downloaded" { } else if status == "downloaded" {
r.logger.Printf("Torrent: %s downloaded\n", torrent.Name) r.logger.Info().Msgf("Torrent: %s downloaded", torrent.Name)
if !isSymlink { if !isSymlink {
err = r.GetDownloadLinks(torrent) err = r.GetDownloadLinks(torrent)
if err != nil { if err != nil {
@@ -235,9 +238,9 @@ func (r *Torbox) DeleteTorrent(torrent *Torrent) {
req, _ := http.NewRequest(http.MethodDelete, url, bytes.NewBuffer(jsonPayload)) req, _ := http.NewRequest(http.MethodDelete, url, bytes.NewBuffer(jsonPayload))
_, err := r.client.MakeRequest(req) _, err := r.client.MakeRequest(req)
if err == nil { if err == nil {
r.logger.Printf("Torrent: %s deleted\n", torrent.Name) r.logger.Info().Msgf("Torrent: %s deleted", torrent.Name)
} else { } else {
r.logger.Printf("Error deleting torrent: %s", err) r.logger.Info().Msgf("Error deleting torrent: %s", err)
} }
} }
@@ -286,7 +289,7 @@ func NewTorbox(dc common.DebridConfig, cache *common.Cache) *Torbox {
"Authorization": fmt.Sprintf("Bearer %s", dc.APIKey), "Authorization": fmt.Sprintf("Bearer %s", dc.APIKey),
} }
client := common.NewRLHTTPClient(rl, headers) client := common.NewRLHTTPClient(rl, headers)
logger := common.NewLogger(dc.Name, os.Stdout) logger := common.NewLogger(dc.Name, common.CONFIG.LogLevel, os.Stdout)
return &Torbox{ return &Torbox{
BaseDebrid: BaseDebrid{ BaseDebrid: BaseDebrid{
Name: "torbox", Name: "torbox",

View File

@@ -1,8 +1,9 @@
package debrid package debrid
import ( import (
"goBlack/common" "fmt"
"goBlack/pkg/arr" "github.com/sirrobot01/debrid-blackhole/common"
"github.com/sirrobot01/debrid-blackhole/pkg/arr"
"os" "os"
"path/filepath" "path/filepath"
) )
@@ -58,7 +59,7 @@ func (t *Torrent) GetSymlinkFolder(parent string) string {
return filepath.Join(parent, t.Arr.Name, t.Folder) return filepath.Join(parent, t.Arr.Name, t.Folder)
} }
func (t *Torrent) GetMountFolder(rClonePath string) string { func (t *Torrent) GetMountFolder(rClonePath string) (string, error) {
possiblePaths := []string{ possiblePaths := []string{
t.OriginalFilename, t.OriginalFilename,
t.Filename, t.Filename,
@@ -66,11 +67,11 @@ func (t *Torrent) GetMountFolder(rClonePath string) string {
} }
for _, path := range possiblePaths { for _, path := range possiblePaths {
if path != "" && common.FileReady(filepath.Join(rClonePath, path)) { if common.FileReady(filepath.Join(rClonePath, path)) {
return path return path, nil
} }
} }
return "" return "", fmt.Errorf("no path found")
} }
func (t *Torrent) Delete() { func (t *Torrent) Delete() {

View File

@@ -37,7 +37,7 @@ Loop:
for { for {
select { select {
case <-t.C: case <-t.C:
fmt.Printf(" %s: transferred %d / %d bytes (%.2f%%)\n", fmt.Printf(" %s: transferred %d / %d bytes (%.2f%%)",
resp.Filename, resp.Filename,
resp.BytesComplete(), resp.BytesComplete(),
resp.Size(), resp.Size(),

View File

@@ -9,11 +9,11 @@ import (
"fmt" "fmt"
"github.com/elazarl/goproxy" "github.com/elazarl/goproxy"
"github.com/elazarl/goproxy/ext/auth" "github.com/elazarl/goproxy/ext/auth"
"github.com/rs/zerolog"
"github.com/sirrobot01/debrid-blackhole/common"
"github.com/sirrobot01/debrid-blackhole/pkg/debrid"
"github.com/valyala/fastjson" "github.com/valyala/fastjson"
"goBlack/common"
"goBlack/pkg/debrid"
"io" "io"
"log"
"net/http" "net/http"
"os" "os"
"regexp" "regexp"
@@ -75,7 +75,7 @@ type Proxy struct {
password string password string
cachedOnly bool cachedOnly bool
debrid debrid.Service debrid debrid.Service
logger *log.Logger logger zerolog.Logger
} }
func NewProxy(config common.Config, deb *debrid.DebridService) *Proxy { func NewProxy(config common.Config, deb *debrid.DebridService) *Proxy {
@@ -84,12 +84,11 @@ func NewProxy(config common.Config, deb *debrid.DebridService) *Proxy {
return &Proxy{ return &Proxy{
port: port, port: port,
enabled: cfg.Enabled, enabled: cfg.Enabled,
debug: cfg.Debug,
username: cfg.Username, username: cfg.Username,
password: cfg.Password, password: cfg.Password,
cachedOnly: *cfg.CachedOnly, cachedOnly: *cfg.CachedOnly,
debrid: deb.Get(), debrid: deb.Get(),
logger: common.NewLogger("Proxy", os.Stdout), logger: common.NewLogger("Proxy", cfg.LogLevel, os.Stdout),
} }
} }
@@ -229,7 +228,7 @@ func (p *Proxy) ProcessXMLResponse(resp *http.Response) *http.Response {
body, err := io.ReadAll(resp.Body) body, err := io.ReadAll(resp.Body)
if err != nil { if err != nil {
p.logger.Println("Error reading response body:", err) p.logger.Info().Msgf("Error reading response body: %v", err)
resp.Body = io.NopCloser(bytes.NewReader(body)) resp.Body = io.NopCloser(bytes.NewReader(body))
return resp return resp
} }
@@ -241,7 +240,7 @@ func (p *Proxy) ProcessXMLResponse(resp *http.Response) *http.Response {
var rss RSS var rss RSS
err = xml.Unmarshal(body, &rss) err = xml.Unmarshal(body, &rss)
if err != nil { if err != nil {
p.logger.Printf("Error unmarshalling XML: %v", err) p.logger.Info().Msgf("Error unmarshalling XML: %v", err)
resp.Body = io.NopCloser(bytes.NewReader(body)) resp.Body = io.NopCloser(bytes.NewReader(body))
return resp return resp
} }
@@ -279,10 +278,10 @@ func (p *Proxy) ProcessXMLResponse(resp *http.Response) *http.Response {
} }
if len(newItems) > 0 { if len(newItems) > 0 {
p.logger.Printf("[%s Report]: %d/%d items are cached || Found %d infohash", indexer, len(newItems), len(rss.Channel.Items), len(hashes)) p.logger.Info().Msgf("[%s Report]: %d/%d items are cached || Found %d infohash", indexer, len(newItems), len(rss.Channel.Items), len(hashes))
} else { } else {
// This will prevent the indexer from being disabled by the arr // This will prevent the indexer from being disabled by the arr
p.logger.Printf("[%s Report]: No Items are cached; Return only first item with [UnCached]", indexer) p.logger.Info().Msgf("[%s Report]: No Items are cached; Return only first item with [UnCached]", indexer)
item := rss.Channel.Items[0] item := rss.Channel.Items[0]
item.Title = fmt.Sprintf("%s [UnCached]", item.Title) item.Title = fmt.Sprintf("%s [UnCached]", item.Title)
newItems = append(newItems, item) newItems = append(newItems, item)
@@ -291,7 +290,7 @@ func (p *Proxy) ProcessXMLResponse(resp *http.Response) *http.Response {
rss.Channel.Items = newItems rss.Channel.Items = newItems
modifiedBody, err := xml.MarshalIndent(rss, "", " ") modifiedBody, err := xml.MarshalIndent(rss, "", " ")
if err != nil { if err != nil {
p.logger.Printf("Error marshalling XML: %v", err) p.logger.Info().Msgf("Error marshalling XML: %v", err)
resp.Body = io.NopCloser(bytes.NewReader(body)) resp.Body = io.NopCloser(bytes.NewReader(body))
return resp return resp
} }
@@ -332,13 +331,13 @@ func (p *Proxy) Start(ctx context.Context) error {
Addr: portFmt, Addr: portFmt,
Handler: proxy, Handler: proxy,
} }
p.logger.Printf("[*] Starting proxy server on %s\n", portFmt) p.logger.Info().Msgf("Starting proxy server on %s", portFmt)
go func() { go func() {
if err := srv.ListenAndServe(); err != nil && !errors.Is(err, http.ErrServerClosed) { if err := srv.ListenAndServe(); err != nil && !errors.Is(err, http.ErrServerClosed) {
p.logger.Printf("Error starting proxy server: %v\n", err) p.logger.Info().Msgf("Error starting proxy server: %v", err)
} }
}() }()
<-ctx.Done() <-ctx.Done()
p.logger.Println("Shutting down gracefully...") p.logger.Info().Msg("Shutting down gracefully...")
return srv.Shutdown(context.Background()) return srv.Shutdown(context.Background())
} }

View File

@@ -3,13 +3,14 @@ package qbit
import ( import (
"context" "context"
"fmt" "fmt"
"goBlack/common" "github.com/sirrobot01/debrid-blackhole/common"
"goBlack/pkg/debrid" "github.com/sirrobot01/debrid-blackhole/pkg/arr"
"goBlack/pkg/qbit/server" "github.com/sirrobot01/debrid-blackhole/pkg/debrid"
"github.com/sirrobot01/debrid-blackhole/pkg/qbit/server"
) )
func Start(ctx context.Context, config *common.Config, deb *debrid.DebridService) error { func Start(ctx context.Context, config *common.Config, deb *debrid.DebridService, arrs *arr.Storage) error {
srv := server.NewServer(config, deb) srv := server.NewServer(config, deb, arrs)
if err := srv.Start(ctx); err != nil { if err := srv.Start(ctx); err != nil {
return fmt.Errorf("failed to start qbit server: %w", err) return fmt.Errorf("failed to start qbit server: %w", err)
} }

View File

@@ -1,42 +0,0 @@
package server
import (
"goBlack/common"
"goBlack/pkg/qbit/shared"
"net/http"
"path/filepath"
)
func (s *Server) handleVersion(w http.ResponseWriter, r *http.Request) {
_, _ = w.Write([]byte("v4.3.2"))
}
func (s *Server) handleWebAPIVersion(w http.ResponseWriter, r *http.Request) {
_, _ = w.Write([]byte("2.7"))
}
func (s *Server) handlePreferences(w http.ResponseWriter, r *http.Request) {
preferences := shared.NewAppPreferences()
preferences.WebUiUsername = s.qbit.Username
preferences.SavePath = s.qbit.DownloadFolder
preferences.TempPath = filepath.Join(s.qbit.DownloadFolder, "temp")
common.JSONResponse(w, preferences, http.StatusOK)
}
func (s *Server) handleBuildInfo(w http.ResponseWriter, r *http.Request) {
res := shared.BuildInfo{
Bitness: 64,
Boost: "1.75.0",
Libtorrent: "1.2.11.0",
Openssl: "1.1.1i",
Qt: "5.15.2",
Zlib: "1.2.11",
}
common.JSONResponse(w, res, http.StatusOK)
}
func (s *Server) shutdown(w http.ResponseWriter, r *http.Request) {
w.WriteHeader(http.StatusOK)
}

View File

@@ -1,7 +0,0 @@
package server
import "net/http"
func (s *Server) handleLogin(w http.ResponseWriter, r *http.Request) {
_, _ = w.Write([]byte("Ok."))
}

View File

@@ -1,14 +1,12 @@
package server package server
import ( import (
"encoding/json"
"errors"
"fmt" "fmt"
"github.com/google/uuid" "github.com/google/uuid"
"goBlack/common" "github.com/sirrobot01/debrid-blackhole/common"
"goBlack/pkg/arr" "github.com/sirrobot01/debrid-blackhole/pkg/arr"
"goBlack/pkg/debrid" "github.com/sirrobot01/debrid-blackhole/pkg/debrid"
"sync" "github.com/sirrobot01/debrid-blackhole/pkg/qbit/shared"
"time" "time"
) )
@@ -64,12 +62,11 @@ func (i *ImportRequest) Complete() {
i.CompletedAt = time.Now() i.CompletedAt = time.Now()
} }
func (i *ImportRequest) Process(s *Server) (err error) { func (i *ImportRequest) Process(q *shared.QBit) (err error) {
// Use this for now. // Use this for now.
// This sends the torrent to the arr // This sends the torrent to the arr
q := s.qbit
magnet, err := common.GetMagnetFromUrl(i.URI) magnet, err := common.GetMagnetFromUrl(i.URI)
torrent := q.CreateTorrentFromMagnet(magnet, i.Arr.Name) torrent := q.CreateTorrentFromMagnet(magnet, i.Arr.Name, "manual")
debridTorrent, err := debrid.ProcessTorrent(q.Debrid, magnet, i.Arr, i.IsSymlink) debridTorrent, err := debrid.ProcessTorrent(q.Debrid, magnet, i.Arr, i.IsSymlink)
if err != nil || debridTorrent == nil { if err != nil || debridTorrent == nil {
if debridTorrent != nil { if debridTorrent != nil {
@@ -85,94 +82,3 @@ func (i *ImportRequest) Process(s *Server) (err error) {
go q.ProcessFiles(torrent, debridTorrent, i.Arr, i.IsSymlink) go q.ProcessFiles(torrent, debridTorrent, i.Arr, i.IsSymlink)
return nil return nil
} }
func (i *ImportRequest) BetaProcess(s *Server) (err error) {
// THis actually imports the torrent into the arr. Needs more work
if i.Arr == nil {
return errors.New("invalid arr")
}
q := s.qbit
magnet, err := common.GetMagnetFromUrl(i.URI)
if err != nil {
return fmt.Errorf("error parsing magnet link: %w", err)
}
debridTorrent, err := debrid.ProcessTorrent(q.Debrid, magnet, i.Arr, true)
if err != nil || debridTorrent == nil {
if debridTorrent != nil {
go debridTorrent.Delete()
}
if err == nil {
err = fmt.Errorf("failed to process torrent")
}
return err
}
debridTorrent.Arr = i.Arr
torrentPath, err := q.ProcessSymlink(debridTorrent)
if err != nil {
return fmt.Errorf("failed to process symlink: %w", err)
}
i.Path = torrentPath
body, err := i.Arr.Import(torrentPath, i.SeriesId, i.Seasons)
if err != nil {
return fmt.Errorf("failed to import: %w", err)
}
defer body.Close()
var resp ManualImportResponseSchema
if err := json.NewDecoder(body).Decode(&resp); err != nil {
return fmt.Errorf("failed to decode response: %w", err)
}
if resp.Status != "success" {
return fmt.Errorf("failed to import: %s", resp.Result)
}
i.Complete()
return
}
type ImportStore struct {
Imports map[string]*ImportRequest
mu sync.RWMutex
}
func NewImportStore() *ImportStore {
return &ImportStore{
Imports: make(map[string]*ImportRequest),
}
}
func (s *ImportStore) AddImport(i *ImportRequest) {
s.mu.Lock()
defer s.mu.Unlock()
s.Imports[i.ID] = i
}
func (s *ImportStore) GetImport(id string) *ImportRequest {
s.mu.RLock()
defer s.mu.RUnlock()
return s.Imports[id]
}
func (s *ImportStore) GetAllImports() []*ImportRequest {
s.mu.RLock()
defer s.mu.RUnlock()
var imports []*ImportRequest
for _, i := range s.Imports {
imports = append(imports, i)
}
return imports
}
func (s *ImportStore) DeleteImport(id string) {
s.mu.Lock()
defer s.mu.Unlock()
delete(s.Imports, id)
}
func (s *ImportStore) UpdateImport(i *ImportRequest) {
s.mu.Lock()
defer s.mu.Unlock()
s.Imports[i.ID] = i
}

View File

@@ -1,84 +0,0 @@
package server
import (
"context"
"encoding/base64"
"github.com/go-chi/chi/v5"
"goBlack/pkg/arr"
"net/http"
"strings"
)
func DecodeAuthHeader(header string) (string, string, error) {
encodedTokens := strings.Split(header, " ")
if len(encodedTokens) != 2 {
return "", "", nil
}
encodedToken := encodedTokens[1]
bytes, err := base64.StdEncoding.DecodeString(encodedToken)
if err != nil {
return "", "", err
}
bearer := string(bytes)
colonIndex := strings.LastIndex(bearer, ":")
host := bearer[:colonIndex]
token := bearer[colonIndex+1:]
return host, token, nil
}
func (s *Server) CategoryContext(next http.Handler) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
category := strings.Trim(r.URL.Query().Get("category"), "")
if category == "" {
// Get from form
_ = r.ParseForm()
category = r.Form.Get("category")
if category == "" {
// Get from multipart form
_ = r.ParseMultipartForm(0)
category = r.FormValue("category")
}
}
ctx := r.Context()
ctx = context.WithValue(r.Context(), "category", category)
next.ServeHTTP(w, r.WithContext(ctx))
})
}
func (s *Server) authContext(next http.Handler) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
host, token, err := DecodeAuthHeader(r.Header.Get("Authorization"))
category := r.Context().Value("category").(string)
a := &arr.Arr{
Name: category,
}
if err == nil {
a.Host = host
a.Token = token
}
s.qbit.Arrs.AddOrUpdate(a)
ctx := context.WithValue(r.Context(), "arr", a)
next.ServeHTTP(w, r.WithContext(ctx))
})
}
func HashesCtx(next http.Handler) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
_hashes := chi.URLParam(r, "hashes")
var hashes []string
if _hashes != "" {
hashes = strings.Split(_hashes, "|")
}
if hashes == nil {
// Get hashes from form
_ = r.ParseForm()
hashes = r.Form["hashes"]
}
ctx := context.WithValue(r.Context(), "hashes", hashes)
next.ServeHTTP(w, r.WithContext(ctx))
})
}

View File

@@ -0,0 +1,382 @@
package server
import (
"context"
"encoding/base64"
"github.com/go-chi/chi/v5"
"github.com/rs/zerolog"
"github.com/sirrobot01/debrid-blackhole/common"
"github.com/sirrobot01/debrid-blackhole/pkg/arr"
"github.com/sirrobot01/debrid-blackhole/pkg/qbit/shared"
"net/http"
"path/filepath"
"strings"
)
type qbitHandler struct {
qbit *shared.QBit
logger zerolog.Logger
debug bool
}
func decodeAuthHeader(header string) (string, string, error) {
encodedTokens := strings.Split(header, " ")
if len(encodedTokens) != 2 {
return "", "", nil
}
encodedToken := encodedTokens[1]
bytes, err := base64.StdEncoding.DecodeString(encodedToken)
if err != nil {
return "", "", err
}
bearer := string(bytes)
colonIndex := strings.LastIndex(bearer, ":")
host := bearer[:colonIndex]
token := bearer[colonIndex+1:]
return host, token, nil
}
func (q *qbitHandler) CategoryContext(next http.Handler) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
category := strings.Trim(r.URL.Query().Get("category"), "")
if category == "" {
// Get from form
_ = r.ParseForm()
category = r.Form.Get("category")
if category == "" {
// Get from multipart form
_ = r.ParseMultipartForm(32 << 20)
category = r.FormValue("category")
}
}
ctx := r.Context()
ctx = context.WithValue(r.Context(), "category", strings.TrimSpace(category))
next.ServeHTTP(w, r.WithContext(ctx))
})
}
func (q *qbitHandler) authContext(next http.Handler) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
host, token, err := decodeAuthHeader(r.Header.Get("Authorization"))
category := r.Context().Value("category").(string)
a := &arr.Arr{
Name: category,
}
if err == nil {
a.Host = strings.TrimSpace(host)
a.Token = strings.TrimSpace(token)
}
q.qbit.Arrs.AddOrUpdate(a)
ctx := context.WithValue(r.Context(), "arr", a)
next.ServeHTTP(w, r.WithContext(ctx))
})
}
func HashesCtx(next http.Handler) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
_hashes := chi.URLParam(r, "hashes")
var hashes []string
if _hashes != "" {
hashes = strings.Split(_hashes, "|")
}
if hashes == nil {
// Get hashes from form
_ = r.ParseForm()
hashes = r.Form["hashes"]
}
for i, hash := range hashes {
hashes[i] = strings.TrimSpace(hash)
}
ctx := context.WithValue(r.Context(), "hashes", hashes)
next.ServeHTTP(w, r.WithContext(ctx))
})
}
func (q *qbitHandler) handleLogin(w http.ResponseWriter, r *http.Request) {
_, _ = w.Write([]byte("Ok."))
}
func (q *qbitHandler) handleVersion(w http.ResponseWriter, r *http.Request) {
_, _ = w.Write([]byte("v4.3.2"))
}
func (q *qbitHandler) handleWebAPIVersion(w http.ResponseWriter, r *http.Request) {
_, _ = w.Write([]byte("2.7"))
}
func (q *qbitHandler) handlePreferences(w http.ResponseWriter, r *http.Request) {
preferences := shared.NewAppPreferences()
preferences.WebUiUsername = q.qbit.Username
preferences.SavePath = q.qbit.DownloadFolder
preferences.TempPath = filepath.Join(q.qbit.DownloadFolder, "temp")
common.JSONResponse(w, preferences, http.StatusOK)
}
func (q *qbitHandler) handleBuildInfo(w http.ResponseWriter, r *http.Request) {
res := shared.BuildInfo{
Bitness: 64,
Boost: "1.75.0",
Libtorrent: "1.2.11.0",
Openssl: "1.1.1i",
Qt: "5.15.2",
Zlib: "1.2.11",
}
common.JSONResponse(w, res, http.StatusOK)
}
func (q *qbitHandler) shutdown(w http.ResponseWriter, r *http.Request) {
w.WriteHeader(http.StatusOK)
}
func (q *qbitHandler) handleTorrentsInfo(w http.ResponseWriter, r *http.Request) {
//log all url params
ctx := r.Context()
category := ctx.Value("category").(string)
filter := strings.Trim(r.URL.Query().Get("filter"), "")
hashes, _ := ctx.Value("hashes").([]string)
torrents := q.qbit.Storage.GetAll(category, filter, hashes)
common.JSONResponse(w, torrents, http.StatusOK)
}
func (q *qbitHandler) handleTorrentsAdd(w http.ResponseWriter, r *http.Request) {
ctx := r.Context()
// Parse form based on content type
contentType := r.Header.Get("Content-Type")
if strings.Contains(contentType, "multipart/form-data") {
if err := r.ParseMultipartForm(32 << 20); err != nil {
q.logger.Info().Msgf("Error parsing multipart form: %v", err)
http.Error(w, err.Error(), http.StatusBadRequest)
return
}
} else if strings.Contains(contentType, "application/x-www-form-urlencoded") {
if err := r.ParseForm(); err != nil {
q.logger.Info().Msgf("Error parsing form: %v", err)
http.Error(w, err.Error(), http.StatusBadRequest)
return
}
} else {
http.Error(w, "Invalid content type", http.StatusBadRequest)
return
}
isSymlink := strings.ToLower(r.FormValue("sequentialDownload")) != "true"
category := r.FormValue("category")
atleastOne := false
ctx = context.WithValue(ctx, "isSymlink", isSymlink)
// Handle magnet URLs
if urls := r.FormValue("urls"); urls != "" {
var urlList []string
for _, u := range strings.Split(urls, "\n") {
urlList = append(urlList, strings.TrimSpace(u))
}
for _, url := range urlList {
if err := q.qbit.AddMagnet(ctx, url, category); err != nil {
q.logger.Info().Msgf("Error adding magnet: %v", err)
http.Error(w, err.Error(), http.StatusBadRequest)
return
}
atleastOne = true
}
}
// Handle torrent files
if r.MultipartForm != nil && r.MultipartForm.File != nil {
if files := r.MultipartForm.File["torrents"]; len(files) > 0 {
for _, fileHeader := range files {
if err := q.qbit.AddTorrent(ctx, fileHeader, category); err != nil {
q.logger.Info().Msgf("Error adding torrent: %v", err)
http.Error(w, err.Error(), http.StatusBadRequest)
return
}
atleastOne = true
}
}
}
if !atleastOne {
http.Error(w, "No valid URLs or torrents provided", http.StatusBadRequest)
return
}
w.WriteHeader(http.StatusOK)
}
func (q *qbitHandler) handleTorrentsDelete(w http.ResponseWriter, r *http.Request) {
ctx := r.Context()
hashes, _ := ctx.Value("hashes").([]string)
if len(hashes) == 0 {
http.Error(w, "No hashes provided", http.StatusBadRequest)
return
}
for _, hash := range hashes {
q.qbit.Storage.Delete(hash)
}
w.WriteHeader(http.StatusOK)
}
func (q *qbitHandler) handleTorrentsPause(w http.ResponseWriter, r *http.Request) {
ctx := r.Context()
hashes, _ := ctx.Value("hashes").([]string)
for _, hash := range hashes {
torrent := q.qbit.Storage.Get(hash)
if torrent == nil {
continue
}
go q.qbit.PauseTorrent(torrent)
}
w.WriteHeader(http.StatusOK)
}
func (q *qbitHandler) handleTorrentsResume(w http.ResponseWriter, r *http.Request) {
ctx := r.Context()
hashes, _ := ctx.Value("hashes").([]string)
for _, hash := range hashes {
torrent := q.qbit.Storage.Get(hash)
if torrent == nil {
continue
}
go q.qbit.ResumeTorrent(torrent)
}
w.WriteHeader(http.StatusOK)
}
func (q *qbitHandler) handleTorrentRecheck(w http.ResponseWriter, r *http.Request) {
ctx := r.Context()
hashes, _ := ctx.Value("hashes").([]string)
for _, hash := range hashes {
torrent := q.qbit.Storage.Get(hash)
if torrent == nil {
continue
}
go q.qbit.RefreshTorrent(torrent)
}
w.WriteHeader(http.StatusOK)
}
func (q *qbitHandler) handleCategories(w http.ResponseWriter, r *http.Request) {
var categories = map[string]shared.TorrentCategory{}
for _, cat := range q.qbit.Categories {
path := filepath.Join(q.qbit.DownloadFolder, cat)
categories[cat] = shared.TorrentCategory{
Name: cat,
SavePath: path,
}
}
common.JSONResponse(w, categories, http.StatusOK)
}
func (q *qbitHandler) handleCreateCategory(w http.ResponseWriter, r *http.Request) {
err := r.ParseForm()
if err != nil {
http.Error(w, "Failed to parse form data", http.StatusBadRequest)
return
}
name := r.Form.Get("category")
if name == "" {
http.Error(w, "No name provided", http.StatusBadRequest)
return
}
q.qbit.Categories = append(q.qbit.Categories, name)
common.JSONResponse(w, nil, http.StatusOK)
}
func (q *qbitHandler) handleTorrentProperties(w http.ResponseWriter, r *http.Request) {
hash := r.URL.Query().Get("hash")
torrent := q.qbit.Storage.Get(hash)
properties := q.qbit.GetTorrentProperties(torrent)
common.JSONResponse(w, properties, http.StatusOK)
}
func (q *qbitHandler) handleTorrentFiles(w http.ResponseWriter, r *http.Request) {
hash := r.URL.Query().Get("hash")
torrent := q.qbit.Storage.Get(hash)
if torrent == nil {
return
}
files := q.qbit.GetTorrentFiles(torrent)
common.JSONResponse(w, files, http.StatusOK)
}
func (q *qbitHandler) handleSetCategory(w http.ResponseWriter, r *http.Request) {
ctx := r.Context()
category := ctx.Value("category").(string)
hashes, _ := ctx.Value("hashes").([]string)
torrents := q.qbit.Storage.GetAll("", "", hashes)
for _, torrent := range torrents {
torrent.Category = category
q.qbit.Storage.AddOrUpdate(torrent)
}
common.JSONResponse(w, nil, http.StatusOK)
}
func (q *qbitHandler) handleAddTorrentTags(w http.ResponseWriter, r *http.Request) {
err := r.ParseForm()
if err != nil {
http.Error(w, "Failed to parse form data", http.StatusBadRequest)
return
}
ctx := r.Context()
hashes, _ := ctx.Value("hashes").([]string)
tags := strings.Split(r.FormValue("tags"), ",")
for i, tag := range tags {
tags[i] = strings.TrimSpace(tag)
}
torrents := q.qbit.Storage.GetAll("", "", hashes)
for _, t := range torrents {
q.qbit.SetTorrentTags(t, tags)
}
common.JSONResponse(w, nil, http.StatusOK)
}
func (q *qbitHandler) handleRemoveTorrentTags(w http.ResponseWriter, r *http.Request) {
err := r.ParseForm()
if err != nil {
http.Error(w, "Failed to parse form data", http.StatusBadRequest)
return
}
ctx := r.Context()
hashes, _ := ctx.Value("hashes").([]string)
tags := strings.Split(r.FormValue("tags"), ",")
for i, tag := range tags {
tags[i] = strings.TrimSpace(tag)
}
torrents := q.qbit.Storage.GetAll("", "", hashes)
for _, torrent := range torrents {
q.qbit.RemoveTorrentTags(torrent, tags)
}
common.JSONResponse(w, nil, http.StatusOK)
}
func (q *qbitHandler) handleGetTags(w http.ResponseWriter, r *http.Request) {
common.JSONResponse(w, q.qbit.Tags, http.StatusOK)
}
func (q *qbitHandler) handleCreateTags(w http.ResponseWriter, r *http.Request) {
err := r.ParseForm()
if err != nil {
http.Error(w, "Failed to parse form data", http.StatusBadRequest)
return
}
tags := strings.Split(r.FormValue("tags"), ",")
for i, tag := range tags {
tags[i] = strings.TrimSpace(tag)
}
q.qbit.AddTags(tags)
common.JSONResponse(w, nil, http.StatusOK)
}

View File

@@ -0,0 +1,48 @@
package server
import (
"github.com/go-chi/chi/v5"
"net/http"
)
func (q *qbitHandler) Routes(r chi.Router) http.Handler {
r.Route("/api/v2", func(r chi.Router) {
//if q.debug {
// r.Use(middleware.Logger)
//}
r.Use(q.CategoryContext)
r.Post("/auth/login", q.handleLogin)
r.Group(func(r chi.Router) {
r.Use(q.authContext)
r.Route("/torrents", func(r chi.Router) {
r.Use(HashesCtx)
r.Get("/info", q.handleTorrentsInfo)
r.Post("/add", q.handleTorrentsAdd)
r.Post("/delete", q.handleTorrentsDelete)
r.Get("/categories", q.handleCategories)
r.Post("/createCategory", q.handleCreateCategory)
r.Post("/setCategory", q.handleSetCategory)
r.Post("/addTags", q.handleAddTorrentTags)
r.Post("/removeTags", q.handleRemoveTorrentTags)
r.Post("/createTags", q.handleCreateTags)
r.Get("/tags", q.handleGetTags)
r.Get("/pause", q.handleTorrentsPause)
r.Get("/resume", q.handleTorrentsResume)
r.Get("/recheck", q.handleTorrentRecheck)
r.Get("/properties", q.handleTorrentProperties)
r.Get("/files", q.handleTorrentFiles)
})
r.Route("/app", func(r chi.Router) {
r.Get("/version", q.handleVersion)
r.Get("/webapiVersion", q.handleWebAPIVersion)
r.Get("/preferences", q.handlePreferences)
r.Get("/buildInfo", q.handleBuildInfo)
r.Get("/shutdown", q.shutdown)
})
})
})
return r
}

View File

@@ -1,51 +0,0 @@
package server
import (
"github.com/go-chi/chi/v5"
"net/http"
)
func (s *Server) Routes(r chi.Router) http.Handler {
r.Route("/api/v2", func(r chi.Router) {
r.Use(s.CategoryContext)
r.Post("/auth/login", s.handleLogin)
r.Group(func(r chi.Router) {
r.Use(s.authContext)
r.Route("/torrents", func(r chi.Router) {
r.Use(HashesCtx)
r.Get("/info", s.handleTorrentsInfo)
r.Post("/add", s.handleTorrentsAdd)
r.Post("/delete", s.handleTorrentsDelete)
r.Get("/categories", s.handleCategories)
r.Post("/createCategory", s.handleCreateCategory)
r.Get("/pause", s.handleTorrentsPause)
r.Get("/resume", s.handleTorrentsResume)
r.Get("/recheck", s.handleTorrentRecheck)
r.Get("/properties", s.handleTorrentProperties)
r.Get("/files", s.handleTorrentFiles)
})
r.Route("/app", func(r chi.Router) {
r.Get("/version", s.handleVersion)
r.Get("/webapiVersion", s.handleWebAPIVersion)
r.Get("/preferences", s.handlePreferences)
r.Get("/buildInfo", s.handleBuildInfo)
r.Get("/shutdown", s.shutdown)
})
})
})
r.Get("/", s.handleHome)
r.Route("/internal", func(r chi.Router) {
r.Get("/arrs", s.handleGetArrs)
r.Get("/content", s.handleContent)
r.Get("/seasons/{contentId}", s.handleSeasons)
r.Get("/episodes/{contentId}", s.handleEpisodes)
r.Post("/add", s.handleAddContent)
r.Get("/search", s.handleSearch)
r.Get("/cached", s.handleCheckCached)
})
return r
}

View File

@@ -6,10 +6,12 @@ import (
"fmt" "fmt"
"github.com/go-chi/chi/v5" "github.com/go-chi/chi/v5"
"github.com/go-chi/chi/v5/middleware" "github.com/go-chi/chi/v5/middleware"
"goBlack/common" "github.com/rs/zerolog"
"goBlack/pkg/debrid" "github.com/sirrobot01/debrid-blackhole/common"
"goBlack/pkg/qbit/shared" "github.com/sirrobot01/debrid-blackhole/pkg/arr"
"log" "github.com/sirrobot01/debrid-blackhole/pkg/debrid"
"github.com/sirrobot01/debrid-blackhole/pkg/qbit/shared"
"io"
"net/http" "net/http"
"os" "os"
"os/signal" "os/signal"
@@ -18,32 +20,35 @@ import (
type Server struct { type Server struct {
qbit *shared.QBit qbit *shared.QBit
logger *log.Logger logger zerolog.Logger
debug bool
} }
func NewServer(config *common.Config, deb *debrid.DebridService) *Server { func NewServer(config *common.Config, deb *debrid.DebridService, arrs *arr.Storage) *Server {
logger := common.NewLogger("QBit", os.Stdout) logger := common.NewLogger("QBit", config.QBitTorrent.LogLevel, os.Stdout)
q := shared.NewQBit(config, deb, logger) q := shared.NewQBit(config, deb, logger, arrs)
return &Server{ return &Server{
qbit: q, qbit: q,
logger: logger, logger: logger,
debug: config.QBitTorrent.Debug,
} }
} }
func (s *Server) Start(ctx context.Context) error { func (s *Server) Start(ctx context.Context) error {
r := chi.NewRouter() r := chi.NewRouter()
if s.debug {
r.Use(middleware.Logger)
}
r.Use(middleware.Recoverer) r.Use(middleware.Recoverer)
r.Handle("/static/*", http.StripPrefix("/static/", http.FileServer(http.Dir("static")))) r.Handle("/static/*", http.StripPrefix("/static/", http.FileServer(http.Dir("static"))))
s.Routes(r) logLevel := s.logger.GetLevel().String()
debug := logLevel == "debug"
q := qbitHandler{qbit: s.qbit, logger: s.logger, debug: debug}
ui := uiHandler{qbit: s.qbit, logger: common.NewLogger("UI", s.logger.GetLevel().String(), os.Stdout), debug: debug}
// Register routes
r.Get("/logs", s.GetLogs)
q.Routes(r)
ui.Routes(r)
go s.qbit.StartWorker(context.Background()) go s.qbit.StartWorker(context.Background())
s.logger.Printf("Starting QBit server on :%s", s.qbit.Port) s.logger.Info().Msgf("Starting QBit server on :%s", s.qbit.Port)
port := fmt.Sprintf(":%s", s.qbit.Port) port := fmt.Sprintf(":%s", s.qbit.Port)
srv := &http.Server{ srv := &http.Server{
Addr: port, Addr: port,
@@ -55,12 +60,38 @@ func (s *Server) Start(ctx context.Context) error {
go func() { go func() {
if err := srv.ListenAndServe(); err != nil && !errors.Is(err, http.ErrServerClosed) { if err := srv.ListenAndServe(); err != nil && !errors.Is(err, http.ErrServerClosed) {
fmt.Printf("Error starting server: %v\n", err) s.logger.Info().Msgf("Error starting server: %v", err)
stop() stop()
} }
}() }()
<-ctx.Done() <-ctx.Done()
fmt.Println("Shutting down gracefully...") s.logger.Info().Msg("Shutting down gracefully...")
return srv.Shutdown(context.Background()) return srv.Shutdown(context.Background())
} }
func (s *Server) GetLogs(w http.ResponseWriter, r *http.Request) {
logFile := common.GetLogPath()
// Open and read the file
file, err := os.Open(logFile)
if err != nil {
http.Error(w, "Error reading log file", http.StatusInternalServerError)
return
}
defer file.Close()
// Set headers
w.Header().Set("Content-Type", "text/plain; charset=utf-8")
w.Header().Set("Content-Disposition", "inline; filename=application.log")
w.Header().Set("Cache-Control", "no-cache, no-store, must-revalidate")
w.Header().Set("Pragma", "no-cache")
w.Header().Set("Expires", "0")
// Stream the file
_, err = io.Copy(w, file)
if err != nil {
http.Error(w, "Error streaming log file", http.StatusInternalServerError)
return
}
}

View File

@@ -1,334 +0,0 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Debrid Manager</title>
<!-- Bootstrap CSS -->
<link href="https://cdn.jsdelivr.net/npm/bootstrap@5.3.0-alpha1/dist/css/bootstrap.min.css" rel="stylesheet">
<!-- Bootstrap Icons -->
<link href="https://cdn.jsdelivr.net/npm/bootstrap-icons@1.7.2/font/bootstrap-icons.css" rel="stylesheet">
<link href="https://cdn.jsdelivr.net/npm/select2@4.1.0-rc.0/dist/css/select2.min.css" rel="stylesheet"/>
<!-- Select2 Bootstrap 5 Theme CSS -->
<link rel="stylesheet"
href="https://cdn.jsdelivr.net/npm/select2-bootstrap-5-theme@1.3.0/dist/select2-bootstrap-5-theme.min.css"/>
<style>
.select2-container--bootstrap-5 .select2-results__option {
padding: 0.5rem;
}
.select2-result img {
border-radius: 4px;
}
.select2-container--bootstrap-5 .select2-results__option--highlighted {
background-color: #f8f9fa !important;
color: #000 !important;
}
.select2-container--bootstrap-5 .select2-results__option--selected {
background-color: #e9ecef !important;
}
</style>
</head>
<body>
<nav class="navbar navbar-expand-lg navbar-dark bg-dark">
<div class="container-fluid">
<span class="navbar-brand">Debrid Manager</span>
</div>
</nav>
<div class="container mt-4">
<div class="row">
<div class="col-md-8">
<div class="mb-3">
<label for="magnetURI" class="form-label">Magnet Link</label>
<textarea class="form-control" id="magnetURI" rows="3"></textarea>
</div>
<div class="mb-3">
<label for="selectArr" class="form-label">Enter Category</label>
<input type="email" class="form-control" id="selectArr" placeholder="Enter Category(e.g sonarr, radarr, radarr4k)">
</div>
<div class="form-check">
<input class="form-check-input" type="checkbox" value="" id="isSymlink">
<label class="form-check-label" for="isSymlink">
Not Symlink(Download real files instead of symlinks from Debrid)
</label>
</div>
<div class="mt-3">
<button class="btn btn-primary" id="addToArr">
Add to Arr
</button>
</div>
</div>
<!-- <div class="col-md-6">-->
<!-- <div class="mb-3 d-none">-->
<!-- <select class="form-select mb-3 select2-ajax" id="selectContent">-->
<!-- <option></option>-->
<!-- </select>-->
<!-- </div>-->
<!-- <div class="mb-3 d-none">-->
<!-- <select class="form-select mb-3 select2-multi" id="selectSeason" multiple-->
<!-- style="width: 100%; display: none;">-->
<!-- <option value="all">Select All</option>-->
<!-- </select>-->
<!-- </div>-->
<!-- <div class="mb-4 d-none">-->
<!-- <select class="form-select mb-3 select2-multi" id="selectEpisode" multiple-->
<!-- style="width: 100%; display: none;">-->
<!-- <option value="all">Select All</option>-->
<!-- </select>-->
<!-- </div>-->
<!-- </div>-->
</div>
</div>
<!-- Bootstrap JS and Popper.js -->
<script src="https://cdn.jsdelivr.net/npm/bootstrap@5.3.0-alpha1/dist/js/bootstrap.bundle.min.js"></script>
<!-- jQuery -->
<script src="https://code.jquery.com/jquery-3.6.0.min.js"></script>
<script src="https://cdn.jsdelivr.net/npm/select2@4.1.0-rc.0/dist/js/select2.min.js"></script>
<script>
$(document).ready(function () {
let $selectArr = $('#selectArr');
let $selectContent = $('#selectContent');
let $selectSeason = $('#selectSeason');
let $selectEpisode = $('#selectEpisode');
let $addBtn = $('#addToArr');
const $contentSearch = $('#contentSearch');
const $searchResults = $('#searchResults');
let isSonarr = true;
let searchTimeout;
let selectedArr, selectedContent, selectedSeasons, selectedEpisodes;
// Initially show only selectArr, hide others
$selectSeason.hide().closest('.mb-3').hide();
$selectEpisode.hide().closest('.mb-3').hide();
// Initialize Select2
$('.select2-multi').select2({
theme: 'bootstrap-5',
width: '100%',
placeholder: 'Select options',
allowClear: true
});
// Also hide the Select2 containers
$('.select2-container--bootstrap-5').hide();
$selectContent.select2({
theme: 'bootstrap-5',
width: '100%',
placeholder: 'Search shows/movies...',
allowClear: true,
minimumInputLength: 2,
ajax: {
url: '/internal/search',
dataType: 'json',
delay: 250,
data: function (params) {
return {
term: params.term
};
},
processResults: function (data) {
return {
results: data.map(function (item) {
return {
id: item.id,
text: item.media_type === 'movie' ? item.title : item.name,
media_type: item.media_type,
poster: item.poster_path ?
'https://image.tmdb.org/t/p/w92' + item.poster_path : null,
year: item.media_type === 'movie' ?
(item.release_date ? item.release_date.substring(0, 4) : '') :
(item.first_air_date ? item.first_air_date.substring(0, 4) : '')
};
})
};
},
cache: true
},
templateResult: formatResult,
templateSelection: formatSelection
});
function formatResult(item) {
if (!item.id) return item.text;
return $(`
<div class="select2-result d-flex align-items-center gap-2">
${item.poster ?
`<img src="${item.poster}" style="width: 45px; height: 68px; object-fit: cover;">` :
'<div style="width: 45px; height: 68px; background: #eee;"></div>'
}
<div>
<div class="fw-bold">${item.text}</div>
<small class="text-muted">
${item.year}${item.media_type === 'movie' ? 'Movie' : 'TV Series'}
</small>
</div>
</div>
`);
}
function formatSelection(item) {
if (!item.id) return item.text;
return item.text + (item.year ? ` (${item.year})` : '');
}
// Handle selection
$selectContent.on('select2:select', function (e) {
selectedContent = e.params.data.id;
const mediaType = e.params.data.media_type;
if (mediaType === 'tv') {
$selectSeason.show().closest('.mb-3').show();
$selectSeason.next('.select2-container--bootstrap-5').show();
// Fetch seasons (your existing seasons fetch code)
fetch(`/internal/seasons/${selectedContent}`)
.then(response => response.json())
.then(seasons => {
$selectSeason.empty().append('<option value="all">Select All</option>');
seasons.forEach(season => {
$selectSeason.append(`<option value="${season}">Season ${season}</option>`);
});
$selectSeason.trigger('change.select2');
})
.catch(error => console.error('Error fetching seasons:', error));
} else {
// For movies, show the Add to Arr button directly
$selectSeason.hide().closest('.mb-3').hide();
$selectSeason.next('.select2-container--bootstrap-5').hide();
$selectEpisode.hide().closest('.mb-3').hide();
$selectEpisode.next('.select2-container--bootstrap-5').hide();
$addBtn.show();
}
});
// Fetch Arrs
function fetchArrs() {
fetch('/internal/arrs')
.then(response => response.json())
.then(arrs => {
$selectArr.empty().append('<option value="">Select Arr</option>');
arrs.forEach(arr => {
$selectArr.append(`<option value="${arr.name}">${arr.name}</option>`);
});
})
.catch(error => console.error('Error fetching arrs:', error));
}
// Handle content selection
$selectContent.change(function () {
selectedContent = $(this).val();
selectedArr = $selectArr.val();
if (!selectedContent) {
$selectSeason.hide().closest('.mb-3').hide();
$selectSeason.next('.select2-container--bootstrap-5').hide();
$selectEpisode.hide().closest('.mb-3').hide();
$selectEpisode.next('.select2-container--bootstrap-5').hide();
return;
}
if (isSonarr) {
$selectSeason.show().closest('.mb-3').show();
$selectSeason.next('.select2-container--bootstrap-5').show();
// Fetch seasons
fetch(`/internal/seasons/${selectedContent}`)
.then(response => response.json())
.then(seasons => {
$selectSeason.empty().append('<option value="all">Select All</option>');
seasons.forEach(season => {
$selectSeason.append(`<option value="${season}">Season ${season}</option>`);
});
$selectSeason.trigger('change.select2');
})
.catch(error => console.error('Error fetching seasons:', error));
} else {
// For Radarr, show the Add to Arr button directly
$selectSeason.hide().closest('.mb-3').hide();
$selectSeason.next('.select2-container--bootstrap-5').hide();
$selectEpisode.hide().closest('.mb-3').hide();
$selectEpisode.next('.select2-container--bootstrap-5').hide();
$addBtn.show();
}
});
// Handle season selection
$selectSeason.change(function () {
selectedSeasons = $(this).val();
console.log('Selected seasons:', selectedSeasons);
if (!selectedSeasons || selectedSeasons.includes('all')) {
$selectEpisode.hide().closest('.mb-3').hide();
$selectEpisode.next('.select2-container--bootstrap-5').hide();
$addBtn.show();
return;
}
$selectEpisode.show().closest('.mb-3').show();
$selectEpisode.next('.select2-container--bootstrap-5').show();
fetch(`/internal/episodes/${selectedContent}?seasons=${selectedSeasons.join(',')}`)
.then(response => response.json())
.then(episodes => {
$selectEpisode.empty().append('<option value="all">Select All</option>');
episodes.forEach(episode => {
$selectEpisode.append(`<option value="${episode}">Episode ${episode}</option>`);
});
$selectEpisode.trigger('change.select2');
})
.catch(error => console.error('Error fetching episodes:', error));
$addBtn.show();
});
$addBtn.click(function () {
let oldText = $(this).text();
$(this).prop('disabled', true).prepend('<span class="spinner-border spinner-border-sm me-2" role="status" aria-hidden="true"></span>');
let magnet = $('#magnetURI').val();
if (!magnet) {
$(this).prop('disabled', false).text(oldText);
alert('Please provide a magnet link or upload a torrent file!');
return;
}
let data = {
arr: $selectArr.val(),
url: magnet,
notSymlink: $('#isSymlink').is(':checked'),
};
console.log('Adding to Arr:', data);
fetch('/internal/add', {
method: 'POST',
headers: {
'Content-Type': 'application/json'
},
body: JSON.stringify(data)
})
.then(async response => {
if (!response.ok) {
const errorText = await response.text();
throw new Error(errorText);
}
return response.json();
})
.then(result => {
console.log('Added to Arr:', result);
$(this).prop('disabled', false).text(oldText);
alert('Added to Arr successfully!');
})
.catch(error => {
$(this).prop('disabled', false).text(oldText);
alert(`Error adding to Arr: ${error.message || error}`);
});
});
// Initial fetch of Arrs
//fetchArrs();
});
</script>
</body>
</html>

View File

@@ -0,0 +1,310 @@
{{ define "config" }}
<div class="container mt-4">
<div class="card">
<div class="card-header">
<h4 class="mb-0"><i class="bi bi-gear me-2"></i>Configuration</h4>
</div>
<div class="card-body">
<form id="configForm">
<div class="section mb-5">
<h5 class="border-bottom pb-2">General Configuration</h5>
<div class="row">
<div class="col-md-6">
<div class="form-group">
<label for="qbitDebug">Log Level</label>
<select class="form-select" name="qbit.log_level" id="log-level" disabled>
<option value="info">Info</option>
<option value="debug">Debug</option>
<option value="warn">Warning</option>
<option value="error">Error</option>
<option value="trace">Trace</option>
</select>
</div>
</div>
</div>
</div>
<!-- Debrid Configuration -->
<div class="section mb-5">
<h5 class="border-bottom pb-2">Debrid Configuration</h5>
<div id="debridConfigs"></div>
</div>
<!-- QBitTorrent Configuration -->
<div class="section mb-5">
<h5 class="border-bottom pb-2">QBitTorrent Configuration</h5>
<div class="row">
<div class="col-md-6 mb-3">
<label class="form-label">Username</label>
<input type="text" disabled class="form-control" name="qbit.username">
</div>
<div class="col-md-6 mb-3">
<label class="form-label">Password</label>
<input type="password" disabled class="form-control" name="qbit.password">
</div>
<div class="col-md-6 mb-3">
<label class="form-label">Port</label>
<input type="text" disabled class="form-control" name="qbit.port">
</div>
<div class="col-md-6 mb-3">
<label class="form-label">Symlink/Download Folder</label>
<input type="text" disabled class="form-control" name="qbit.download_folder">
</div>
<div class="col-md-6 mb-3">
<label class="form-label">Refresh Interval (seconds)</label>
<input type="number" class="form-control" name="qbit.refresh_interval">
</div>
<div class="col-12 mb-3">
<div class="form-group">
<label for="qbitDebug">Log Level</label>
<select class="form-select" name="qbit.log_level" id="qbitDebug" disabled>
<option value="info">Info</option>
<option value="debug">Debug</option>
<option value="warn">Warning</option>
<option value="error">Error</option>
<option value="trace">Trace</option>
</select>
</div>
</div>
</div>
</div>
<!-- Arr Configurations -->
<div class="section mb-5">
<h5 class="border-bottom pb-2">Arr Configurations</h5>
<div id="arrConfigs"></div>
</div>
<!-- Repair Configuration -->
<div class="section">
<h5 class="border-bottom pb-2">Repair Configuration</h5>
<div class="row">
<div class="col-md-6 mb-3">
<label class="form-label">Interval</label>
<input type="text" disabled class="form-control" name="repair.interval" placeholder="e.g., 24h">
</div>
<div class="col-12">
<div class="form-check mb-2">
<input type="checkbox" disabled class="form-check-input" name="repair.enabled" id="repairEnabled">
<label class="form-check-label" for="repairEnabled">Enable Repair</label>
</div>
<div class="form-check">
<input type="checkbox" disabled class="form-check-input" name="repair.run_on_start" id="repairOnStart">
<label class="form-check-label" for="repairOnStart">Run on Start</label>
</div>
</div>
</div>
</div>
</form>
</div>
</div>
</div>
<script>
// Templates for dynamic elements
const debridTemplate = (index) => `
<div class="config-item position-relative mb-3 p-3 border rounded">
<div class="row">
<div class="col-md-6 mb-3">
<label class="form-label">Name</label>
<input type="text" disabled class="form-control" name="debrid[${index}].name" required>
</div>
<div class="col-md-6 mb-3">
<label class="form-label">Host</label>
<input type="text" disabled class="form-control" name="debrid[${index}].host" required>
</div>
<div class="col-md-6 mb-3">
<label class="form-label">API Key</label>
<input type="password" disabled class="form-control" name="debrid[${index}].api_key" required>
</div>
<div class="col-md-6 mb-3">
<label class="form-label">Mount Folder</label>
<input type="text" disabled class="form-control" name="debrid[${index}].folder">
</div>
<div class="col-md-6 mb-3">
<label class="form-label">Rate Limit</label>
<input type="text" disabled class="form-control" name="debrid[${index}].rate_limit" placeholder="e.g., 200/minute">
</div>
<div class="col-12">
<div class="form-check me-3 d-inline-block">
<input type="checkbox" disabled class="form-check-input" name="debrid[${index}].download_uncached">
<label class="form-check-label">Download Uncached</label>
</div>
<div class="form-check d-inline-block">
<input type="checkbox" disabled class="form-check-input" name="debrid[${index}].check_cached">
<label class="form-check-label">Check Cached</label>
</div>
</div>
</div>
</div>
`;
const arrTemplate = (index) => `
<div class="config-item position-relative mb-3 p-3 border rounded">
<div class="row">
<div class="col-md-4 mb-3">
<label class="form-label">Name</label>
<input type="text" disabled class="form-control" name="arr[${index}].name" required>
</div>
<div class="col-md-4 mb-3">
<label class="form-label">Host</label>
<input type="text" disabled class="form-control" name="arr[${index}].host" required>
</div>
<div class="col-md-4 mb-3">
<label class="form-label">API Token</label>
<input type="password" disabled class="form-control" name="arr[${index}].token" required>
</div>
</div>
</div>
`;
// Main functionality
document.addEventListener('DOMContentLoaded', function() {
let debridCount = 0;
let arrCount = 0;
// Load existing configuration
fetch('/internal/config')
.then(response => response.json())
.then(config => {
// Load Debrid configs
config.debrids?.forEach(debrid => {
addDebridConfig(debrid);
});
// Load QBitTorrent config
if (config.qbittorrent) {
Object.entries(config.qbittorrent).forEach(([key, value]) => {
const input = document.querySelector(`[name="qbit.${key}"]`);
if (input) {
if (input.type === 'checkbox') {
input.checked = value;
} else {
input.value = value;
}
}
});
}
// Load Arr configs
config.arrs?.forEach(arr => {
addArrConfig(arr);
});
// Load Repair config
if (config.repair) {
Object.entries(config.repair).forEach(([key, value]) => {
const input = document.querySelector(`[name="repair.${key}"]`);
if (input) {
if (input.type === 'checkbox') {
input.checked = value;
} else {
input.value = value;
}
}
});
}
// Load general config
const logLevel = document.getElementById('log-level');
logLevel.value = config.log_level;
});
// Handle form submission
document.getElementById('configForm').addEventListener('submit', async (e) => {
e.preventDefault();
const formData = new FormData(e.target);
const config = {
debrids: [],
qbittorrent: {},
arrs: [],
repair: {}
};
// Process form data
for (let [key, value] of formData.entries()) {
if (key.startsWith('debrid[')) {
const match = key.match(/debrid\[(\d+)\]\.(.+)/);
if (match) {
const [_, index, field] = match;
if (!config.debrids[index]) config.debrids[index] = {};
config.debrids[index][field] = value;
}
} else if (key.startsWith('qbit.')) {
config.qbittorrent[key.replace('qbit.', '')] = value;
} else if (key.startsWith('arr[')) {
const match = key.match(/arr\[(\d+)\]\.(.+)/);
if (match) {
const [_, index, field] = match;
if (!config.arrs[index]) config.arrs[index] = {};
config.arrs[index][field] = value;
}
} else if (key.startsWith('repair.')) {
config.repair[key.replace('repair.', '')] = value;
}
}
// Clean up arrays (remove empty entries)
config.debrids = config.debrids.filter(Boolean);
config.arrs = config.arrs.filter(Boolean);
try {
const response = await fetch('/internal/config', {
method: 'POST',
headers: {
'Content-Type': 'application/json'
},
body: JSON.stringify(config)
});
if (!response.ok) throw new Error(await response.text());
alert('Configuration saved successfully!');
} catch (error) {
alert(`Error saving configuration: ${error.message}`);
}
});
// Helper functions
function addDebridConfig(data = {}) {
const container = document.getElementById('debridConfigs');
container.insertAdjacentHTML('beforeend', debridTemplate(debridCount));
if (data) {
Object.entries(data).forEach(([key, value]) => {
const input = container.querySelector(`[name="debrid[${debridCount}].${key}"]`);
if (input) {
if (input.type === 'checkbox') {
input.checked = value;
} else {
input.value = value;
}
}
});
}
debridCount++;
}
function addArrConfig(data = {}) {
const container = document.getElementById('arrConfigs');
container.insertAdjacentHTML('beforeend', arrTemplate(arrCount));
if (data) {
Object.entries(data).forEach(([key, value]) => {
const input = container.querySelector(`[name="arr[${arrCount}].${key}"]`);
if (input) {
if (input.type === 'checkbox') {
input.checked = value;
} else {
input.value = value;
}
}
});
}
arrCount++;
}
});
</script>
{{ end }}

View File

@@ -0,0 +1,74 @@
{{ define "download" }}
<div class="container mt-4">
<div class="card">
<div class="card-header">
<h4 class="mb-0"><i class="bi bi-cloud-download me-2"></i>Add New Download</h4>
</div>
<div class="card-body">
<form id="downloadForm">
<div class="mb-3">
<label for="magnetURI" class="form-label">Magnet Link or Torrent URL</label>
<textarea class="form-control" id="magnetURI" rows="3" placeholder="Paste your magnet link here..."></textarea>
</div>
<div class="mb-3">
<label for="category" class="form-label">Enter Category</label>
<input type="text" class="form-control" id="category" placeholder="Enter Category(e.g sonarr, radarr, radarr4k)">
</div>
<div class="mb-3">
<div class="form-check">
<input class="form-check-input" type="checkbox" id="isSymlink">
<label class="form-check-label" for="isSymlink">
Download real files instead of symlinks
</label>
</div>
</div>
<button type="submit" class="btn btn-primary" id="submitDownload">
<i class="bi bi-cloud-upload me-2"></i>Add to Download Queue
</button>
</form>
</div>
</div>
</div>
<script>
document.addEventListener('DOMContentLoaded', () => {
// Handle form submission
document.getElementById('downloadForm').addEventListener('submit', async (e) => {
e.preventDefault();
const submitBtn = document.getElementById('submitDownload');
const originalText = submitBtn.innerHTML;
submitBtn.disabled = true;
submitBtn.innerHTML = '<span class="spinner-border spinner-border-sm me-2"></span>Adding...';
try {
const response = await fetch('/internal/add', {
method: 'POST',
headers: {
'Content-Type': 'application/json'
},
body: JSON.stringify({
url: document.getElementById('magnetURI').value,
arr: document.getElementById('category').value,
notSymlink: document.getElementById('isSymlink').checked
})
});
if (!response.ok) throw new Error(await response.text());
alert('Download added successfully!');
document.getElementById('magnetURI').value = '';
} catch (error) {
alert(`Error adding download: ${error.message}`);
} finally {
submitBtn.disabled = false;
submitBtn.innerHTML = originalText;
}
});
});
</script>
{{ end }}

View File

@@ -0,0 +1,149 @@
{{ define "index" }}
<div class="container mt-4">
<div class="card">
<div class="card-header d-flex justify-content-between align-items-center">
<h4 class="mb-0"><i class="bi bi-table me-2"></i>Active Torrents</h4>
<div>
<button class="btn btn-outline-secondary btn-sm me-2" id="refreshBtn">
<i class="bi bi-arrow-clockwise me-1"></i>Refresh
</button>
<select class="form-select form-select-sm d-inline-block w-auto" id="categoryFilter">
<option value="">All Categories</option>
</select>
</div>
</div>
<div class="card-body p-0">
<div class="table-responsive">
<table class="table table-hover mb-0">
<thead>
<tr>
<th>Name</th>
<th>Size</th>
<th>Progress</th>
<th>Speed</th>
<th>Category</th>
<th>Debrid</th>
<th>State</th>
<th>Actions</th>
</tr>
</thead>
<tbody id="torrentsList">
</tbody>
</table>
</div>
</div>
</div>
</div>
<script>
const torrentRowTemplate = (torrent) => `
<tr>
<td class="text-break">${torrent.name}</td>
<td>${formatBytes(torrent.size)}</td>
<td style="min-width: 150px;">
<div class="progress" style="height: 8px;">
<div class="progress-bar" role="progressbar"
style="width: ${(torrent.progress * 100).toFixed(1)}%"
aria-valuenow="${(torrent.progress * 100).toFixed(1)}"
aria-valuemin="0"
aria-valuemax="100"></div>
</div>
<small class="text-muted">${(torrent.progress * 100).toFixed(1)}%</small>
</td>
<td>${formatSpeed(torrent.dlspeed)}</td>
<td><span class="badge bg-secondary">${torrent.category || 'None'}</span></td>
<td>${torrent.debrid || 'None'}</td>
<td><span class="badge ${getStateColor(torrent.state)}">${torrent.state}</span></td>
<td>
<button class="btn btn-sm btn-outline-danger" onclick="deleteTorrent('${torrent.hash}')">
<i class="bi bi-trash"></i>
</button>
</td>
</tr>
`;
function formatBytes(bytes) {
if (!bytes) return '0 B';
const k = 1024;
const sizes = ['B', 'KB', 'MB', 'GB', 'TB'];
const i = Math.floor(Math.log(bytes) / Math.log(k));
return `${parseFloat((bytes / Math.pow(k, i)).toFixed(2))} ${sizes[i]}`;
}
function formatSpeed(speed) {
return `${formatBytes(speed)}/s`;
}
function getStateColor(state) {
const stateColors = {
'downloading': 'bg-primary',
'pausedup': 'bg-success',
'error': 'bg-danger',
};
return stateColors[state?.toLowerCase()] || 'bg-secondary';
}
let refreshInterval;
async function loadTorrents() {
try {
const response = await fetch('/internal/torrents');
const torrents = await response.json();
const tbody = document.getElementById('torrentsList');
tbody.innerHTML = torrents.map(torrent => torrentRowTemplate(torrent)).join('');
// Update category filter options
let category = document.getElementById('categoryFilter').value;
document.querySelectorAll('#torrentsList tr').forEach(row => {
const rowCategory = row.querySelector('td:nth-child(5)').textContent;
row.style.display = (!category || rowCategory.includes(category)) ? '' : 'none';
});
updateCategoryFilter(torrents);
} catch (error) {
console.error('Error loading torrents:', error);
}
}
function updateCategoryFilter(torrents) {
const categories = [...new Set(torrents.map(t => t.category).filter(Boolean))];
const select = document.getElementById('categoryFilter');
const currentValue = select.value;
select.innerHTML = '<option value="">All Categories</option>' +
categories.map(cat => `<option value="${cat}" ${cat === currentValue ? 'selected' : ''}>${cat}</option>`).join('');
}
async function deleteTorrent(hash) {
if (!confirm('Are you sure you want to delete this torrent?')) return;
try {
await fetch(`/internal/torrents/${hash}`, {
method: 'DELETE'
});
await loadTorrents();
} catch (error) {
console.error('Error deleting torrent:', error);
alert('Failed to delete torrent');
}
}
document.addEventListener('DOMContentLoaded', () => {
loadTorrents();
refreshInterval = setInterval(loadTorrents, 5000); // Refresh every 5 seconds
document.getElementById('refreshBtn').addEventListener('click', loadTorrents);
document.getElementById('categoryFilter').addEventListener('change', (e) => {
const category = e.target.value;
document.querySelectorAll('#torrentsList tr').forEach(row => {
const rowCategory = row.querySelector('td:nth-child(5)').textContent;
row.style.display = (!category || rowCategory.includes(category)) ? '' : 'none';
});
});
});
window.addEventListener('beforeunload', () => {
clearInterval(refreshInterval);
});
</script>
{{ end }}

View File

@@ -0,0 +1,142 @@
{{ define "layout" }}
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>DecyphArr - {{.Title}}</title>
<link href="https://cdn.jsdelivr.net/npm/bootstrap@5.3.0-alpha1/dist/css/bootstrap.min.css" rel="stylesheet">
<link href="https://cdn.jsdelivr.net/npm/bootstrap-icons@1.7.2/font/bootstrap-icons.css" rel="stylesheet">
<link href="https://cdn.jsdelivr.net/npm/select2@4.1.0-rc.0/dist/css/select2.min.css" rel="stylesheet"/>
<link rel="stylesheet" href="https://cdn.jsdelivr.net/npm/select2-bootstrap-5-theme@1.3.0/dist/select2-bootstrap-5-theme.min.css"/>
<style>
:root {
--primary-color: #2563eb;
--secondary-color: #1e40af;
}
body {
background-color: #f8fafc;
}
.navbar {
padding: 1rem 0;
background: #fff !important;
box-shadow: 0 2px 4px rgba(0, 0, 0, 0.1);
}
.navbar-brand {
color: var(--primary-color) !important;
font-weight: 700;
font-size: 1.5rem;
}
.card {
border: none;
border-radius: 10px;
box-shadow: 0 4px 6px rgba(0, 0, 0, 0.1);
}
.nav-link {
padding: 0.5rem 1rem;
color: #4b5563;
}
.nav-link.active {
color: var(--primary-color) !important;
font-weight: 500;
}
.badge#channel-badge {
background-color: #0d6efd;
}
.badge#channel-badge.beta {
background-color: #fd7e14;
}
</style>
</head>
<body>
<nav class="navbar navbar-expand-lg navbar-light mb-4">
<div class="container">
<a class="navbar-brand" href="/">
<i class="bi bi-cloud-download me-2"></i>DecyphArr
</a>
<button class="navbar-toggler" type="button" data-bs-toggle="collapse" data-bs-target="#navbarNav">
<span class="navbar-toggler-icon"></span>
</button>
<div class="collapse navbar-collapse" id="navbarNav">
<ul class="navbar-nav me-auto">
<li class="nav-item">
<a class="nav-link {{if eq .Page "index"}}active{{end}}" href="/">
<i class="bi bi-table me-1"></i>Torrents
</a>
</li>
<li class="nav-item">
<a class="nav-link {{if eq .Page "download"}}active{{end}}" href="/download">
<i class="bi bi-cloud-download me-1"></i>Download
</a>
</li>
<li class="nav-item">
<a class="nav-link {{if eq .Page "repair"}}active{{end}}" href="/repair">
<i class="bi bi-tools me-1"></i>Repair
</a>
</li>
<li class="nav-item">
<a class="nav-link {{if eq .Page "config"}}active{{end}}" href="/config">
<i class="bi bi-gear me-1"></i>Config
</a>
</li>
<li class="nav-item">
<a class="nav-link" href="/logs" target="_blank">
<i class="bi bi-journal me-1"></i>Logs
</a>
</li>
</ul>
<div class="d-flex align-items-center">
<span class="badge me-2" id="channel-badge">Loading...</span>
<span class="badge bg-primary" id="version-badge">Loading...</span>
</div>
</div>
</div>
</nav>
{{ if eq .Page "index" }}
{{ template "index" . }}
{{ else if eq .Page "download" }}
{{ template "download" . }}
{{ else if eq .Page "repair" }}
{{ template "repair" . }}
{{ else if eq .Page "config" }}
{{ template "config" . }}
{{ end }}
<script src="https://cdn.jsdelivr.net/npm/bootstrap@5.3.0-alpha1/dist/js/bootstrap.bundle.min.js"></script>
<script src="https://code.jquery.com/jquery-3.6.0.min.js"></script>
<script src="https://cdn.jsdelivr.net/npm/select2@4.1.0-rc.0/dist/js/select2.min.js"></script>
<script>
document.addEventListener('DOMContentLoaded', function() {
fetch('/internal/version')
.then(response => response.json())
.then(data => {
const versionBadge = document.getElementById('version-badge');
const channelBadge = document.getElementById('channel-badge');
// Add url to version badge
versionBadge.innerHTML = `<a href="https://github.com/sirrobot01/debrid-blackhole/releases/tag/${data.version}" target="_blank" class="text-white">${data.version}</a>`;
channelBadge.textContent = data.channel.charAt(0).toUpperCase() + data.channel.slice(1);
if (data.channel === 'beta') {
channelBadge.classList.add('beta');
}
})
.catch(error => {
console.error('Error fetching version:', error);
document.getElementById('version-badge').textContent = 'Unknown';
document.getElementById('channel-badge').textContent = 'Unknown';
});
});
</script>
</body>
</html>
{{ end }}

View File

@@ -0,0 +1,94 @@
{{ define "repair" }}
<div class="container mt-4">
<div class="card">
<div class="card-header">
<h4 class="mb-0"><i class="bi bi-tools me-2"></i>Repair Media</h4>
</div>
<div class="card-body">
<form id="repairForm">
<div class="mb-3">
<label for="arrSelect" class="form-label">Select Arr Instance</label>
<select class="form-select" id="arrSelect" required>
<option value="">Select an Arr instance</option>
</select>
</div>
<div class="mb-3">
<label for="mediaIds" class="form-label">Media IDs</label>
<input type="text" class="form-control" id="mediaIds"
placeholder="Enter IDs (comma-separated)">
<small class="text-muted">Enter TV DB ids for Sonarr, TM DB ids for Radarr</small>
</div>
<div class="mb-3">
<div class="form-check">
<input class="form-check-input" type="checkbox" id="isAsync" checked>
<label class="form-check-label" for="isAsync">
Run repair in background
</label>
</div>
</div>
<button type="submit" class="btn btn-primary" id="submitRepair">
<i class="bi bi-wrench me-2"></i>Start Repair
</button>
</form>
</div>
</div>
</div>
<script>
document.addEventListener('DOMContentLoaded', () => {
// Load Arr instances
fetch('/internal/arrs')
.then(response => response.json())
.then(arrs => {
const select = document.getElementById('arrSelect');
arrs.forEach(arr => {
const option = document.createElement('option');
option.value = arr.name;
option.textContent = arr.name;
select.appendChild(option);
});
});
// Handle form submission
document.getElementById('repairForm').addEventListener('submit', async (e) => {
e.preventDefault();
const submitBtn = document.getElementById('submitRepair');
const originalText = submitBtn.innerHTML;
submitBtn.disabled = true;
submitBtn.innerHTML = '<span class="spinner-border spinner-border-sm me-2"></span>Repairing...';
let mediaIds = document.getElementById('mediaIds').value.split(',').map(id => id.trim());
let arr = document.getElementById('arrSelect').value;
if (!arr) {
alert('Please select an Arr instance');
submitBtn.disabled = false;
submitBtn.innerHTML = originalText;
return;
}
try {
const response = await fetch('/internal/repair', {
method: 'POST',
headers: {
'Content-Type': 'application/json'
},
body: JSON.stringify({
arr: document.getElementById('arrSelect').value,
mediaIds: mediaIds,
async: document.getElementById('isAsync').checked
})
});
if (!response.ok) throw new Error(await response.text());
alert('Repair process initiated successfully!');
} catch (error) {
alert(`Error starting repair: ${error.message}`);
} finally {
submitBtn.disabled = false;
submitBtn.innerHTML = originalText;
}
});
});
</script>
{{ end }}

View File

@@ -1,176 +0,0 @@
package server
import (
"context"
"goBlack/common"
"goBlack/pkg/qbit/shared"
"net/http"
"path/filepath"
"strings"
)
func (s *Server) handleTorrentsInfo(w http.ResponseWriter, r *http.Request) {
//log all url params
ctx := r.Context()
category := ctx.Value("category").(string)
filter := strings.Trim(r.URL.Query().Get("filter"), "")
hashes, _ := ctx.Value("hashes").([]string)
torrents := s.qbit.Storage.GetAll(category, filter, hashes)
common.JSONResponse(w, torrents, http.StatusOK)
}
func (s *Server) handleTorrentsAdd(w http.ResponseWriter, r *http.Request) {
ctx := r.Context()
contentType := strings.Split(r.Header.Get("Content-Type"), ";")[0]
switch contentType {
case "multipart/form-data":
err := r.ParseMultipartForm(32 << 20) // 32MB max memory
if err != nil {
s.logger.Printf("Error parsing form: %v\n", err)
http.Error(w, err.Error(), http.StatusBadRequest)
return
}
case "application/x-www-form-urlencoded":
err := r.ParseForm()
if err != nil {
s.logger.Printf("Error parsing form: %v\n", err)
http.Error(w, err.Error(), http.StatusBadRequest)
return
}
}
isSymlink := strings.ToLower(r.FormValue("sequentialDownload")) != "true"
s.logger.Printf("isSymlink: %v\n", isSymlink)
urls := r.FormValue("urls")
category := r.FormValue("category")
var urlList []string
if urls != "" {
urlList = strings.Split(urls, "\n")
}
ctx = context.WithValue(ctx, "isSymlink", isSymlink)
for _, url := range urlList {
if err := s.qbit.AddMagnet(ctx, url, category); err != nil {
s.logger.Printf("Error adding magnet: %v\n", err)
http.Error(w, err.Error(), http.StatusBadRequest)
return
}
}
if contentType == "multipart/form-data" && len(r.MultipartForm.File["torrents"]) > 0 {
files := r.MultipartForm.File["torrents"]
for _, fileHeader := range files {
if err := s.qbit.AddTorrent(ctx, fileHeader, category); err != nil {
s.logger.Printf("Error adding torrent: %v\n", err)
http.Error(w, err.Error(), http.StatusBadRequest)
return
}
}
}
w.WriteHeader(http.StatusOK)
}
func (s *Server) handleTorrentsDelete(w http.ResponseWriter, r *http.Request) {
ctx := r.Context()
hashes, _ := ctx.Value("hashes").([]string)
if len(hashes) == 0 {
http.Error(w, "No hashes provided", http.StatusBadRequest)
return
}
for _, hash := range hashes {
s.qbit.Storage.Delete(hash)
}
w.WriteHeader(http.StatusOK)
}
func (s *Server) handleTorrentsPause(w http.ResponseWriter, r *http.Request) {
ctx := r.Context()
hashes, _ := ctx.Value("hashes").([]string)
for _, hash := range hashes {
torrent := s.qbit.Storage.Get(hash)
if torrent == nil {
continue
}
go s.qbit.PauseTorrent(torrent)
}
w.WriteHeader(http.StatusOK)
}
func (s *Server) handleTorrentsResume(w http.ResponseWriter, r *http.Request) {
ctx := r.Context()
hashes, _ := ctx.Value("hashes").([]string)
for _, hash := range hashes {
torrent := s.qbit.Storage.Get(hash)
if torrent == nil {
continue
}
go s.qbit.ResumeTorrent(torrent)
}
w.WriteHeader(http.StatusOK)
}
func (s *Server) handleTorrentRecheck(w http.ResponseWriter, r *http.Request) {
ctx := r.Context()
hashes, _ := ctx.Value("hashes").([]string)
for _, hash := range hashes {
torrent := s.qbit.Storage.Get(hash)
if torrent == nil {
continue
}
go s.qbit.RefreshTorrent(torrent)
}
w.WriteHeader(http.StatusOK)
}
func (s *Server) handleCategories(w http.ResponseWriter, r *http.Request) {
var categories = map[string]shared.TorrentCategory{}
for _, cat := range s.qbit.Categories {
path := filepath.Join(s.qbit.DownloadFolder, cat)
categories[cat] = shared.TorrentCategory{
Name: cat,
SavePath: path,
}
}
common.JSONResponse(w, categories, http.StatusOK)
}
func (s *Server) handleCreateCategory(w http.ResponseWriter, r *http.Request) {
err := r.ParseForm()
if err != nil {
http.Error(w, "Failed to parse form data", http.StatusBadRequest)
return
}
name := r.Form.Get("category")
if name == "" {
http.Error(w, "No name provided", http.StatusBadRequest)
return
}
s.qbit.Categories = append(s.qbit.Categories, name)
common.JSONResponse(w, nil, http.StatusOK)
}
func (s *Server) handleTorrentProperties(w http.ResponseWriter, r *http.Request) {
hash := r.URL.Query().Get("hash")
torrent := s.qbit.Storage.Get(hash)
properties := s.qbit.GetTorrentProperties(torrent)
common.JSONResponse(w, properties, http.StatusOK)
}
func (s *Server) handleTorrentFiles(w http.ResponseWriter, r *http.Request) {
hash := r.URL.Query().Get("hash")
torrent := s.qbit.Storage.Get(hash)
if torrent == nil {
return
}
files := s.qbit.GetTorrentFiles(torrent)
common.JSONResponse(w, files, http.StatusOK)
}

View File

@@ -3,9 +3,15 @@ package server
import ( import (
"embed" "embed"
"encoding/json" "encoding/json"
"goBlack/common" "errors"
"goBlack/pkg/arr" "fmt"
"goBlack/pkg/debrid" "github.com/go-chi/chi/v5"
"github.com/rs/zerolog"
"github.com/sirrobot01/debrid-blackhole/common"
"github.com/sirrobot01/debrid-blackhole/pkg/arr"
"github.com/sirrobot01/debrid-blackhole/pkg/debrid"
"github.com/sirrobot01/debrid-blackhole/pkg/qbit/shared"
"github.com/sirrobot01/debrid-blackhole/pkg/version"
"html/template" "html/template"
"net/http" "net/http"
"strings" "strings"
@@ -33,81 +39,95 @@ type ContentResponse struct {
ArrID string `json:"arr"` ArrID string `json:"arr"`
} }
//go:embed static/index.html type RepairRequest struct {
ArrName string `json:"arr"`
MediaIds []string `json:"mediaIds"`
Async bool `json:"async"`
}
//go:embed templates/*
var content embed.FS var content embed.FS
func (s *Server) handleHome(w http.ResponseWriter, r *http.Request) { type uiHandler struct {
tmpl, err := template.ParseFS(content, "static/index.html") qbit *shared.QBit
if err != nil { logger zerolog.Logger
http.Error(w, err.Error(), http.StatusInternalServerError) debug bool
return }
}
err = tmpl.Execute(w, nil) var templates *template.Template
if err != nil {
func init() {
templates = template.Must(template.ParseFS(
content,
"templates/layout.html",
"templates/index.html",
"templates/download.html",
"templates/repair.html",
"templates/config.html",
))
}
func (u *uiHandler) IndexHandler(w http.ResponseWriter, r *http.Request) {
data := map[string]interface{}{
"Page": "index",
"Title": "Torrents",
}
if err := templates.ExecuteTemplate(w, "layout", data); err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError) http.Error(w, err.Error(), http.StatusInternalServerError)
return return
} }
} }
func (s *Server) handleGetArrs(w http.ResponseWriter, r *http.Request) { func (u *uiHandler) DownloadHandler(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/json") data := map[string]interface{}{
common.JSONResponse(w, s.qbit.Arrs.GetAll(), http.StatusOK) "Page": "download",
} "Title": "Download",
}
func (s *Server) handleContent(w http.ResponseWriter, r *http.Request) { if err := templates.ExecuteTemplate(w, "layout", data); err != nil {
arrName := r.URL.Query().Get("arr") http.Error(w, err.Error(), http.StatusInternalServerError)
_arr := s.qbit.Arrs.Get(arrName)
if _arr == nil {
http.Error(w, "Invalid arr", http.StatusBadRequest)
return return
} }
contents := _arr.GetContents()
w.Header().Set("Content-Type", "application/json")
common.JSONResponse(w, contents, http.StatusOK)
} }
func (s *Server) handleSearch(w http.ResponseWriter, r *http.Request) { func (u *uiHandler) RepairHandler(w http.ResponseWriter, r *http.Request) {
// arrName := r.URL.Query().Get("arr") data := map[string]interface{}{
term := r.URL.Query().Get("term") "Page": "repair",
results, err := arr.SearchTMDB(term) "Title": "Repair",
if err != nil { }
http.Error(w, err.Error(), http.StatusBadRequest) if err := templates.ExecuteTemplate(w, "layout", data); err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return return
} }
w.Header().Set("Content-Type", "application/json")
common.JSONResponse(w, results.Results, http.StatusOK)
} }
func (s *Server) handleSeasons(w http.ResponseWriter, r *http.Request) { func (u *uiHandler) ConfigHandler(w http.ResponseWriter, r *http.Request) {
// arrId := r.URL.Query().Get("arrId") data := map[string]interface{}{
// contentId := chi.URLParam(r, "contentId") "Page": "config",
seasons := []string{"Season 1", "Season 2", "Season 3", "Season 4", "Season 5"} "Title": "Config",
w.Header().Set("Content-Type", "application/json") }
common.JSONResponse(w, seasons, http.StatusOK) if err := templates.ExecuteTemplate(w, "layout", data); err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
} }
func (s *Server) handleEpisodes(w http.ResponseWriter, r *http.Request) { func (u *uiHandler) handleGetArrs(w http.ResponseWriter, r *http.Request) {
// arrId := r.URL.Query().Get("arrId")
// contentId := chi.URLParam(r, "contentId")
// seasonIds := strings.Split(r.URL.Query().Get("seasons"), ",")
episodes := []string{"Episode 1", "Episode 2", "Episode 3", "Episode 4", "Episode 5"}
w.Header().Set("Content-Type", "application/json") w.Header().Set("Content-Type", "application/json")
common.JSONResponse(w, episodes, http.StatusOK) common.JSONResponse(w, u.qbit.Arrs.GetAll(), http.StatusOK)
} }
func (s *Server) handleAddContent(w http.ResponseWriter, r *http.Request) { func (u *uiHandler) handleAddContent(w http.ResponseWriter, r *http.Request) {
var req AddRequest var req AddRequest
if err := json.NewDecoder(r.Body).Decode(&req); err != nil { if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
http.Error(w, err.Error(), http.StatusBadRequest) http.Error(w, err.Error(), http.StatusBadRequest)
return return
} }
_arr := s.qbit.Arrs.Get(req.Arr) _arr := u.qbit.Arrs.Get(req.Arr)
if _arr == nil { if _arr == nil {
_arr = arr.NewArr(req.Arr, "", "", arr.Sonarr) _arr = arr.NewArr(req.Arr, "", "", arr.Sonarr)
} }
importReq := NewImportRequest(req.Url, _arr, !req.NotSymlink) importReq := NewImportRequest(req.Url, _arr, !req.NotSymlink)
err := importReq.Process(s) err := importReq.Process(u.qbit)
if err != nil { if err != nil {
http.Error(w, err.Error(), http.StatusBadRequest) http.Error(w, err.Error(), http.StatusBadRequest)
return return
@@ -115,7 +135,7 @@ func (s *Server) handleAddContent(w http.ResponseWriter, r *http.Request) {
common.JSONResponse(w, importReq, http.StatusOK) common.JSONResponse(w, importReq, http.StatusOK)
} }
func (s *Server) handleCheckCached(w http.ResponseWriter, r *http.Request) { func (u *uiHandler) handleCheckCached(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/json") w.Header().Set("Content-Type", "application/json")
_hashes := r.URL.Query().Get("hash") _hashes := r.URL.Query().Get("hash")
if _hashes == "" { if _hashes == "" {
@@ -131,9 +151,9 @@ func (s *Server) handleCheckCached(w http.ResponseWriter, r *http.Request) {
var deb debrid.Service var deb debrid.Service
if db == "" { if db == "" {
// use the first debrid // use the first debrid
deb = s.qbit.Debrid.Get() deb = u.qbit.Debrid.Get()
} else { } else {
deb = s.qbit.Debrid.GetByName(db) deb = u.qbit.Debrid.GetByName(db)
} }
if deb == nil { if deb == nil {
http.Error(w, "Invalid debrid", http.StatusBadRequest) http.Error(w, "Invalid debrid", http.StatusBadRequest)
@@ -147,3 +167,82 @@ func (s *Server) handleCheckCached(w http.ResponseWriter, r *http.Request) {
} }
common.JSONResponse(w, result, http.StatusOK) common.JSONResponse(w, result, http.StatusOK)
} }
func (u *uiHandler) handleRepairMedia(w http.ResponseWriter, r *http.Request) {
var req RepairRequest
if err := json.NewDecoder(r.Body).Decode(&req); err != nil {
http.Error(w, err.Error(), http.StatusBadRequest)
return
}
_arr := u.qbit.Arrs.Get(req.ArrName)
if _arr == nil {
http.Error(w, "No Arrs found to repair", http.StatusNotFound)
return
}
mediaIds := req.MediaIds
if len(mediaIds) == 0 {
mediaIds = []string{""}
}
if req.Async {
for _, tvId := range mediaIds {
go func() {
err := _arr.Repair(tvId)
if err != nil {
u.logger.Info().Msgf("Failed to repair: %v", err)
}
}()
}
common.JSONResponse(w, "Repair process started", http.StatusOK)
return
}
var errs []error
for _, tvId := range mediaIds {
if err := _arr.Repair(tvId); err != nil {
errs = append(errs, err)
}
}
if len(errs) > 0 {
combinedErr := errors.Join(errs...)
http.Error(w, fmt.Sprintf("Failed to repair: %v", combinedErr), http.StatusInternalServerError)
return
}
common.JSONResponse(w, "Repair completed", http.StatusOK)
}
func (u *uiHandler) handleGetVersion(w http.ResponseWriter, r *http.Request) {
v := version.GetInfo()
w.Header().Set("Content-Type", "application/json")
common.JSONResponse(w, v, http.StatusOK)
}
func (u *uiHandler) handleGetTorrents(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/json")
common.JSONResponse(w, u.qbit.Storage.GetAll("", "", nil), http.StatusOK)
}
func (u *uiHandler) handleDeleteTorrent(w http.ResponseWriter, r *http.Request) {
hash := chi.URLParam(r, "hash")
if hash == "" {
http.Error(w, "No hash provided", http.StatusBadRequest)
return
}
u.qbit.Storage.Delete(hash)
w.WriteHeader(http.StatusOK)
}
func (u *uiHandler) handleGetConfig(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Content-Type", "application/json")
config := common.CONFIG
arrCfgs := make([]common.ArrConfig, 0)
for _, a := range u.qbit.Arrs.GetAll() {
arrCfgs = append(arrCfgs, common.ArrConfig{Host: a.Host, Name: a.Name, Token: a.Token})
}
config.Arrs = arrCfgs
common.JSONResponse(w, config, http.StatusOK)
}

View File

@@ -0,0 +1,27 @@
package server
import (
"github.com/go-chi/chi/v5"
"net/http"
)
func (u *uiHandler) Routes(r chi.Router) http.Handler {
r.Group(func(r chi.Router) {
r.Get("/", u.IndexHandler)
r.Get("/download", u.DownloadHandler)
r.Get("/repair", u.RepairHandler)
r.Get("/config", u.ConfigHandler)
r.Route("/internal", func(r chi.Router) {
r.Get("/arrs", u.handleGetArrs)
r.Post("/add", u.handleAddContent)
r.Get("/cached", u.handleCheckCached)
r.Post("/repair", u.handleRepairMedia)
r.Get("/torrents", u.handleGetTorrents)
r.Delete("/torrents/{hash}", u.handleDeleteTorrent)
r.Get("/config", u.handleGetConfig)
r.Get("/version", u.handleGetVersion)
})
})
return r
}

View File

@@ -2,9 +2,9 @@ package shared
import ( import (
"fmt" "fmt"
"goBlack/common" "github.com/sirrobot01/debrid-blackhole/common"
"goBlack/pkg/debrid" "github.com/sirrobot01/debrid-blackhole/pkg/debrid"
"goBlack/pkg/downloaders" "github.com/sirrobot01/debrid-blackhole/pkg/downloaders"
"os" "os"
"path/filepath" "path/filepath"
"sync" "sync"
@@ -12,7 +12,7 @@ import (
) )
func (q *QBit) processManualFiles(debridTorrent *debrid.Torrent) (string, error) { func (q *QBit) processManualFiles(debridTorrent *debrid.Torrent) (string, error) {
q.logger.Printf("Downloading %d files...", len(debridTorrent.DownloadLinks)) q.logger.Info().Msgf("Downloading %d files...", len(debridTorrent.DownloadLinks))
torrentPath := common.RemoveExtension(debridTorrent.OriginalFilename) torrentPath := common.RemoveExtension(debridTorrent.OriginalFilename)
parent := common.RemoveInvalidChars(filepath.Join(q.DownloadFolder, debridTorrent.Arr.Name, torrentPath)) parent := common.RemoveInvalidChars(filepath.Join(q.DownloadFolder, debridTorrent.Arr.Name, torrentPath))
err := os.MkdirAll(parent, os.ModePerm) err := os.MkdirAll(parent, os.ModePerm)
@@ -30,7 +30,7 @@ func (q *QBit) downloadFiles(debridTorrent *debrid.Torrent, parent string) {
client := downloaders.GetHTTPClient() client := downloaders.GetHTTPClient()
for _, link := range debridTorrent.DownloadLinks { for _, link := range debridTorrent.DownloadLinks {
if link.DownloadLink == "" { if link.DownloadLink == "" {
q.logger.Printf("No download link found for %s\n", link.Filename) q.logger.Info().Msgf("No download link found for %s", link.Filename)
continue continue
} }
wg.Add(1) wg.Add(1)
@@ -40,33 +40,42 @@ func (q *QBit) downloadFiles(debridTorrent *debrid.Torrent, parent string) {
defer func() { <-semaphore }() defer func() { <-semaphore }()
err := downloaders.NormalHTTP(client, link.DownloadLink, filepath.Join(parent, link.Filename)) err := downloaders.NormalHTTP(client, link.DownloadLink, filepath.Join(parent, link.Filename))
if err != nil { if err != nil {
q.logger.Printf("Error downloading %s: %v\n", link.DownloadLink, err) q.logger.Info().Msgf("Error downloading %s: %v", link.DownloadLink, err)
} else { } else {
q.logger.Printf("Downloaded %s successfully\n", link.DownloadLink) q.logger.Info().Msgf("Downloaded %s successfully", link.DownloadLink)
} }
}(link) }(link)
} }
wg.Wait() wg.Wait()
q.logger.Printf("Downloaded all files for %s\n", debridTorrent.Name) q.logger.Info().Msgf("Downloaded all files for %s", debridTorrent.Name)
} }
func (q *QBit) ProcessSymlink(debridTorrent *debrid.Torrent) (string, error) { func (q *QBit) ProcessSymlink(debridTorrent *debrid.Torrent) (string, error) {
var wg sync.WaitGroup var wg sync.WaitGroup
files := debridTorrent.Files files := debridTorrent.Files
ready := make(chan debrid.TorrentFile, len(files)) ready := make(chan debrid.TorrentFile, len(files))
if len(files) == 0 {
q.logger.Printf("Checking %d files...", len(files)) return "", fmt.Errorf("no video files found")
}
q.logger.Info().Msgf("Checking %d files...", len(files))
rCloneBase := debridTorrent.Debrid.GetMountPath() rCloneBase := debridTorrent.Debrid.GetMountPath()
torrentPath, err := q.getTorrentPath(rCloneBase, debridTorrent) // /MyTVShow/ torrentPath, err := q.getTorrentPath(rCloneBase, debridTorrent) // /MyTVShow/
if err != nil { if err != nil {
return "", fmt.Errorf("failed to get torrent path: %v", err) return "", fmt.Errorf("failed to get torrent path: %v", err)
} }
torrentSymlinkPath := filepath.Join(q.DownloadFolder, debridTorrent.Arr.Name, torrentPath) // /mnt/symlinks/{category}/MyTVShow/ // Fix for alldebrid
newTorrentPath := torrentPath
if newTorrentPath == "" {
// Alldebrid at times doesn't return the parent folder for single file torrents
newTorrentPath = common.RemoveExtension(debridTorrent.Name) // MyTVShow
}
torrentSymlinkPath := filepath.Join(q.DownloadFolder, debridTorrent.Arr.Name, newTorrentPath) // /mnt/symlinks/{category}/MyTVShow/
err = os.MkdirAll(torrentSymlinkPath, os.ModePerm) err = os.MkdirAll(torrentSymlinkPath, os.ModePerm)
if err != nil { if err != nil {
return "", fmt.Errorf("failed to create directory: %s: %v", torrentSymlinkPath, err) return "", fmt.Errorf("failed to create directory: %s: %v", torrentSymlinkPath, err)
} }
torrentRclonePath := filepath.Join(rCloneBase, torrentPath) torrentRclonePath := filepath.Join(rCloneBase, torrentPath) // leave it as is
q.logger.Debug().Msgf("Debrid torrent path: %s\nSymlink Path: %s", torrentRclonePath, torrentSymlinkPath)
for _, file := range files { for _, file := range files {
wg.Add(1) wg.Add(1)
go checkFileLoop(&wg, torrentRclonePath, file, ready) go checkFileLoop(&wg, torrentRclonePath, file, ready)
@@ -78,7 +87,7 @@ func (q *QBit) ProcessSymlink(debridTorrent *debrid.Torrent) (string, error) {
}() }()
for f := range ready { for f := range ready {
q.logger.Println("File is ready:", f.Path) q.logger.Info().Msgf("File is ready: %s", f.Path)
q.createSymLink(torrentSymlinkPath, torrentRclonePath, f) q.createSymLink(torrentSymlinkPath, torrentRclonePath, f)
} }
return torrentPath, nil return torrentPath, nil
@@ -86,9 +95,11 @@ func (q *QBit) ProcessSymlink(debridTorrent *debrid.Torrent) (string, error) {
func (q *QBit) getTorrentPath(rclonePath string, debridTorrent *debrid.Torrent) (string, error) { func (q *QBit) getTorrentPath(rclonePath string, debridTorrent *debrid.Torrent) (string, error) {
for { for {
torrentPath := debridTorrent.GetMountFolder(rclonePath) q.logger.Debug().Msgf("Checking for torrent path: %s", rclonePath)
if torrentPath != "" { torrentPath, err := debridTorrent.GetMountFolder(rclonePath)
return torrentPath, nil if err == nil {
q.logger.Debug().Msgf("Found torrent path: %s", torrentPath)
return torrentPath, err
} }
time.Sleep(time.Second) time.Sleep(time.Second)
} }
@@ -99,13 +110,9 @@ func (q *QBit) createSymLink(path string, torrentMountPath string, file debrid.T
// Combine the directory and filename to form a full path // Combine the directory and filename to form a full path
fullPath := filepath.Join(path, file.Name) // /mnt/symlinks/{category}/MyTVShow/MyTVShow.S01E01.720p.mkv fullPath := filepath.Join(path, file.Name) // /mnt/symlinks/{category}/MyTVShow/MyTVShow.S01E01.720p.mkv
// Create a symbolic link if file doesn't exist // Create a symbolic link if file doesn't exist
torrentFilePath := filepath.Join(torrentMountPath, file.Name) // debridFolder/MyTVShow/MyTVShow.S01E01.720p.mkv torrentFilePath := filepath.Join(torrentMountPath, file.Path) // debridFolder/MyTVShow/MyTVShow.S01E01.720p.mkv
err := os.Symlink(torrentFilePath, fullPath) err := os.Symlink(torrentFilePath, fullPath)
if err != nil { if err != nil {
q.logger.Printf("Failed to create symlink: %s: %v\n", fullPath, err) q.logger.Info().Msgf("Failed to create symlink: %s: %v", fullPath, err)
}
// Check if the file exists
if !common.FileReady(fullPath) {
q.logger.Printf("Symlink not ready: %s\n", fullPath)
} }
} }

View File

@@ -2,10 +2,10 @@ package shared
import ( import (
"cmp" "cmp"
"goBlack/common" "github.com/rs/zerolog"
"goBlack/pkg/arr" "github.com/sirrobot01/debrid-blackhole/common"
"goBlack/pkg/debrid" "github.com/sirrobot01/debrid-blackhole/pkg/arr"
"log" "github.com/sirrobot01/debrid-blackhole/pkg/debrid"
"os" "os"
) )
@@ -18,16 +18,16 @@ type QBit struct {
Debrid *debrid.DebridService Debrid *debrid.DebridService
Storage *TorrentStorage Storage *TorrentStorage
debug bool debug bool
logger *log.Logger logger zerolog.Logger
Arrs *arr.Storage Arrs *arr.Storage
Tags []string
RefreshInterval int RefreshInterval int
} }
func NewQBit(config *common.Config, deb *debrid.DebridService, logger *log.Logger) *QBit { func NewQBit(config *common.Config, deb *debrid.DebridService, logger zerolog.Logger, arrs *arr.Storage) *QBit {
cfg := config.QBitTorrent cfg := config.QBitTorrent
port := cmp.Or(cfg.Port, os.Getenv("QBIT_PORT"), "8182") port := cmp.Or(cfg.Port, os.Getenv("QBIT_PORT"), "8182")
refreshInterval := cmp.Or(cfg.RefreshInterval, 10) refreshInterval := cmp.Or(cfg.RefreshInterval, 10)
arrs := arr.NewStorage()
return &QBit{ return &QBit{
Username: cfg.Username, Username: cfg.Username,
Password: cfg.Password, Password: cfg.Password,
@@ -35,7 +35,6 @@ func NewQBit(config *common.Config, deb *debrid.DebridService, logger *log.Logge
DownloadFolder: cfg.DownloadFolder, DownloadFolder: cfg.DownloadFolder,
Categories: cfg.Categories, Categories: cfg.Categories,
Debrid: deb, Debrid: deb,
debug: cfg.Debug,
Storage: NewTorrentStorage("torrents.json"), Storage: NewTorrentStorage("torrents.json"),
logger: logger, logger: logger,
Arrs: arrs, Arrs: arrs,

View File

@@ -1,6 +1,6 @@
package shared package shared
import "goBlack/pkg/debrid" import "github.com/sirrobot01/debrid-blackhole/pkg/debrid"
type BuildInfo struct { type BuildInfo struct {
Libtorrent string `json:"libtorrent"` Libtorrent string `json:"libtorrent"`
@@ -171,6 +171,7 @@ type TorrentCategory struct {
type Torrent struct { type Torrent struct {
ID string `json:"-"` ID string `json:"-"`
DebridTorrent *debrid.Torrent `json:"-"` DebridTorrent *debrid.Torrent `json:"-"`
Debrid string `json:"debrid"`
TorrentPath string `json:"-"` TorrentPath string `json:"-"`
AddedOn int64 `json:"added_on,omitempty"` AddedOn int64 `json:"added_on,omitempty"`
@@ -217,6 +218,7 @@ type Torrent struct {
Uploaded int64 `json:"uploaded,omitempty"` Uploaded int64 `json:"uploaded,omitempty"`
UploadedSession int64 `json:"uploaded_session,omitempty"` UploadedSession int64 `json:"uploaded_session,omitempty"`
Upspeed int `json:"upspeed,omitempty"` Upspeed int `json:"upspeed,omitempty"`
Source string `json:"source,omitempty"`
} }
func (t *Torrent) IsReady() bool { func (t *Torrent) IsReady() bool {

View File

@@ -5,13 +5,14 @@ import (
"context" "context"
"fmt" "fmt"
"github.com/google/uuid" "github.com/google/uuid"
"goBlack/common" "github.com/sirrobot01/debrid-blackhole/common"
"goBlack/pkg/arr" "github.com/sirrobot01/debrid-blackhole/pkg/arr"
"goBlack/pkg/debrid" "github.com/sirrobot01/debrid-blackhole/pkg/debrid"
"io" "io"
"mime/multipart" "mime/multipart"
"os" "os"
"path/filepath" "path/filepath"
"slices"
"strings" "strings"
"time" "time"
) )
@@ -46,7 +47,7 @@ func (q *QBit) AddTorrent(ctx context.Context, fileHeader *multipart.FileHeader,
} }
func (q *QBit) Process(ctx context.Context, magnet *common.Magnet, category string) error { func (q *QBit) Process(ctx context.Context, magnet *common.Magnet, category string) error {
torrent := q.CreateTorrentFromMagnet(magnet, category) torrent := q.CreateTorrentFromMagnet(magnet, category, "auto")
a, ok := ctx.Value("arr").(*arr.Arr) a, ok := ctx.Value("arr").(*arr.Arr)
if !ok { if !ok {
return fmt.Errorf("arr not found in context") return fmt.Errorf("arr not found in context")
@@ -68,13 +69,14 @@ func (q *QBit) Process(ctx context.Context, magnet *common.Magnet, category stri
return nil return nil
} }
func (q *QBit) CreateTorrentFromMagnet(magnet *common.Magnet, category string) *Torrent { func (q *QBit) CreateTorrentFromMagnet(magnet *common.Magnet, category, source string) *Torrent {
torrent := &Torrent{ torrent := &Torrent{
ID: uuid.NewString(), ID: uuid.NewString(),
Hash: strings.ToLower(magnet.InfoHash), Hash: strings.ToLower(magnet.InfoHash),
Name: magnet.Name, Name: magnet.Name,
Size: magnet.Size, Size: magnet.Size,
Category: category, Category: category,
Source: source,
State: "downloading", State: "downloading",
MagnetUri: magnet.Link, MagnetUri: magnet.Link,
@@ -91,11 +93,11 @@ func (q *QBit) CreateTorrentFromMagnet(magnet *common.Magnet, category string) *
func (q *QBit) ProcessFiles(torrent *Torrent, debridTorrent *debrid.Torrent, arr *arr.Arr, isSymlink bool) { func (q *QBit) ProcessFiles(torrent *Torrent, debridTorrent *debrid.Torrent, arr *arr.Arr, isSymlink bool) {
for debridTorrent.Status != "downloaded" { for debridTorrent.Status != "downloaded" {
progress := debridTorrent.Progress progress := debridTorrent.Progress
q.logger.Printf("%s Download Progress: %.2f%%", debridTorrent.Debrid.GetName(), progress) q.logger.Debug().Msgf("%s -> (%s) Download Progress: %.2f%%", debridTorrent.Debrid.GetName(), debridTorrent.Name, progress)
time.Sleep(4 * time.Second) time.Sleep(10 * time.Second)
dbT, err := debridTorrent.Debrid.CheckStatus(debridTorrent, isSymlink) dbT, err := debridTorrent.Debrid.CheckStatus(debridTorrent, isSymlink)
if err != nil { if err != nil {
q.logger.Printf("Error checking status: %v", err) q.logger.Error().Msgf("Error checking status: %v", err)
go debridTorrent.Delete() go debridTorrent.Delete()
q.MarkAsFailed(torrent) q.MarkAsFailed(torrent)
_ = arr.Refresh() _ = arr.Refresh()
@@ -117,7 +119,7 @@ func (q *QBit) ProcessFiles(torrent *Torrent, debridTorrent *debrid.Torrent, arr
if err != nil { if err != nil {
q.MarkAsFailed(torrent) q.MarkAsFailed(torrent)
go debridTorrent.Delete() go debridTorrent.Delete()
q.logger.Printf("Error: %v", err) q.logger.Info().Msgf("Error: %v", err)
return return
} }
torrent.TorrentPath = filepath.Base(torrentPath) torrent.TorrentPath = filepath.Base(torrentPath)
@@ -157,6 +159,7 @@ func (q *QBit) UpdateTorrentMin(t *Torrent, debridTorrent *debrid.Torrent) *Torr
t.Name = debridTorrent.Name t.Name = debridTorrent.Name
t.AddedOn = addedOn.Unix() t.AddedOn = addedOn.Unix()
t.DebridTorrent = debridTorrent t.DebridTorrent = debridTorrent
t.Debrid = debridTorrent.Debrid.GetName()
t.Size = totalSize t.Size = totalSize
t.Completed = sizeCompleted t.Completed = sizeCompleted
t.Downloaded = sizeCompleted t.Downloaded = sizeCompleted
@@ -180,7 +183,7 @@ func (q *QBit) UpdateTorrent(t *Torrent, debridTorrent *debrid.Torrent) *Torrent
debridTorrent, _ = db.GetTorrent(t.ID) debridTorrent, _ = db.GetTorrent(t.ID)
} }
if debridTorrent == nil { if debridTorrent == nil {
q.logger.Printf("Torrent with ID %s not found in %s", t.ID, db.GetName()) q.logger.Info().Msgf("Torrent with ID %s not found in %s", t.ID, db.GetName())
return t return t
} }
if debridTorrent.Status != "downloaded" { if debridTorrent.Status != "downloaded" {
@@ -188,7 +191,8 @@ func (q *QBit) UpdateTorrent(t *Torrent, debridTorrent *debrid.Torrent) *Torrent
} }
if t.TorrentPath == "" { if t.TorrentPath == "" {
t.TorrentPath = filepath.Base(debridTorrent.GetMountFolder(rcLoneMount)) tPath, _ := debridTorrent.GetMountFolder(rcLoneMount)
t.TorrentPath = filepath.Base(tPath)
} }
savePath := filepath.Join(q.DownloadFolder, t.Category) + string(os.PathSeparator) savePath := filepath.Join(q.DownloadFolder, t.Category) + string(os.PathSeparator)
torrentPath := filepath.Join(savePath, t.TorrentPath) + string(os.PathSeparator) torrentPath := filepath.Join(savePath, t.TorrentPath) + string(os.PathSeparator)
@@ -263,12 +267,55 @@ func (q *QBit) GetTorrentFiles(t *Torrent) []*TorrentFile {
if t.DebridTorrent == nil { if t.DebridTorrent == nil {
return files return files
} }
for index, file := range t.DebridTorrent.Files { for _, file := range t.DebridTorrent.Files {
files = append(files, &TorrentFile{ files = append(files, &TorrentFile{
Index: index, Name: file.Path,
Name: file.Path, Size: file.Size,
Size: file.Size,
}) })
} }
return files return files
} }
func (q *QBit) SetTorrentTags(t *Torrent, tags []string) bool {
torrentTags := strings.Split(t.Tags, ",")
for _, tag := range tags {
if tag == "" {
continue
}
if !slices.Contains(torrentTags, tag) {
torrentTags = append(torrentTags, tag)
}
if !slices.Contains(q.Tags, tag) {
q.Tags = append(q.Tags, tag)
}
}
t.Tags = strings.Join(torrentTags, ",")
q.Storage.Update(t)
return true
}
func (q *QBit) RemoveTorrentTags(t *Torrent, tags []string) bool {
torrentTags := strings.Split(t.Tags, ",")
newTorrentTags := common.Remove(torrentTags, tags...)
q.Tags = common.Remove(q.Tags, tags...)
t.Tags = strings.Join(newTorrentTags, ",")
q.Storage.Update(t)
return true
}
func (q *QBit) AddTags(tags []string) bool {
for _, tag := range tags {
if tag == "" {
continue
}
if !slices.Contains(q.Tags, tag) {
q.Tags = append(q.Tags, tag)
}
}
return true
}
func (q *QBit) RemoveTags(tags []string) bool {
q.Tags = common.Remove(q.Tags, tags...)
return true
}

View File

@@ -1,8 +1,8 @@
package shared package shared
import ( import (
"goBlack/common" "github.com/sirrobot01/debrid-blackhole/common"
"goBlack/pkg/debrid" "github.com/sirrobot01/debrid-blackhole/pkg/debrid"
"path/filepath" "path/filepath"
"sync" "sync"
"time" "time"

View File

@@ -6,7 +6,7 @@ import (
) )
func (q *QBit) StartWorker(ctx context.Context) { func (q *QBit) StartWorker(ctx context.Context) {
q.logger.Println("Qbit Worker started") q.logger.Info().Msg("Qbit Worker started")
q.StartRefreshWorker(ctx) q.StartRefreshWorker(ctx)
} }
@@ -16,7 +16,7 @@ func (q *QBit) StartRefreshWorker(ctx context.Context) {
for { for {
select { select {
case <-refreshCtx.Done(): case <-refreshCtx.Done():
q.logger.Println("Qbit Refresh Worker stopped") q.logger.Info().Msg("Qbit Refresh Worker stopped")
return return
case <-refreshTicker.C: case <-refreshTicker.C:
torrents := q.Storage.GetAll("", "", nil) torrents := q.Storage.GetAll("", "", nil)

72
pkg/repair/repair.go Normal file
View File

@@ -0,0 +1,72 @@
package repair
import (
"context"
"github.com/sirrobot01/debrid-blackhole/common"
"github.com/sirrobot01/debrid-blackhole/pkg/arr"
"log"
"os"
"os/signal"
"strings"
"syscall"
"time"
)
func Start(ctx context.Context, config *common.Config, arrs *arr.Storage) error {
ctx, stop := signal.NotifyContext(ctx, os.Interrupt, syscall.SIGTERM)
logger := common.NewLogger("Repair", config.LogLevel, os.Stdout)
defer stop()
duration, err := parseSchedule(config.Repair.Interval)
if err != nil {
log.Fatalf("Failed to parse schedule: %v", err)
}
if config.Repair.RunOnStart {
logger.Info().Msgf("Running initial repair")
if err := repair(arrs); err != nil {
log.Printf("Error during initial repair: %v", err)
return err
}
}
ticker := time.NewTicker(duration)
defer ticker.Stop()
if strings.Contains(config.Repair.Interval, ":") {
logger.Info().Msgf("Starting repair worker, scheduled daily at %s", config.Repair.Interval)
} else {
logger.Info().Msgf("Starting repair worker with %v interval", duration)
}
for {
select {
case <-ctx.Done():
logger.Info().Msg("Repair worker stopped")
return nil
case t := <-ticker.C:
logger.Info().Msgf("Running repair at %v", t.Format("15:04:05"))
if err := repair(arrs); err != nil {
logger.Info().Msgf("Error during repair: %v", err)
return err
}
// If using time-of-day schedule, reset the ticker for next day
if strings.Contains(config.Repair.Interval, ":") {
nextDuration, err := parseSchedule(config.Repair.Interval)
if err != nil {
logger.Info().Msgf("Error calculating next schedule: %v", err)
return err
}
ticker.Reset(nextDuration)
}
}
}
}
func repair(arrs *arr.Storage) error {
for _, a := range arrs.GetAll() {
go a.Repair("")
}
return nil
}

70
pkg/repair/utils.go Normal file
View File

@@ -0,0 +1,70 @@
package repair
import (
"fmt"
"strconv"
"strings"
"time"
)
func parseSchedule(schedule string) (time.Duration, error) {
if schedule == "" {
return time.Hour, nil // default 60m
}
// Check if it's a time-of-day format (HH:MM)
if strings.Contains(schedule, ":") {
return parseTimeOfDay(schedule)
}
// Otherwise treat as duration interval
return parseDurationInterval(schedule)
}
func parseTimeOfDay(schedule string) (time.Duration, error) {
now := time.Now()
scheduledTime, err := time.Parse("15:04", schedule)
if err != nil {
return 0, fmt.Errorf("invalid time format: %s. Use HH:MM in 24-hour format", schedule)
}
// Convert scheduled time to today
scheduleToday := time.Date(
now.Year(), now.Month(), now.Day(),
scheduledTime.Hour(), scheduledTime.Minute(), 0, 0,
now.Location(),
)
if scheduleToday.Before(now) {
scheduleToday = scheduleToday.Add(24 * time.Hour)
}
return scheduleToday.Sub(now), nil
}
func parseDurationInterval(interval string) (time.Duration, error) {
if len(interval) < 2 {
return 0, fmt.Errorf("invalid interval format: %s", interval)
}
numStr := interval[:len(interval)-1]
unit := interval[len(interval)-1]
num, err := strconv.Atoi(numStr)
if err != nil {
return 0, fmt.Errorf("invalid number in interval: %s", numStr)
}
switch unit {
case 'm':
return time.Duration(num) * time.Minute, nil
case 'h':
return time.Duration(num) * time.Hour, nil
case 'd':
return time.Duration(num) * 24 * time.Hour, nil
case 's':
return time.Duration(num) * time.Second, nil
default:
return 0, fmt.Errorf("invalid unit in interval: %c", unit)
}
}

18
pkg/version/version.go Normal file
View File

@@ -0,0 +1,18 @@
package version
type Info struct {
Version string `json:"version"`
Channel string `json:"channel"`
}
var (
Version = ""
Channel = ""
)
func GetInfo() Info {
return Info{
Version: Version,
Channel: Channel,
}
}

57
scripts/deploy.sh Executable file
View File

@@ -0,0 +1,57 @@
#!/bin/bash
# deploy.sh
# Function to display usage
usage() {
echo "Usage: $0 [-b|--beta] <version>"
echo "Example for main: $0 v1.0.0"
echo "Example for beta: $0 -b v1.0.0"
exit 1
}
# Parse arguments
BETA=false
while [[ "$#" -gt 0 ]]; do
case $1 in
-b|--beta) BETA=true; shift ;;
-*) echo "Unknown parameter: $1"; usage ;;
*) VERSION="$1"; shift ;;
esac
done
# Check if version is provided
if [ -z "$VERSION" ]; then
echo "Error: Version is required"
usage
fi
# Validate version format
if ! [[ $VERSION =~ ^v[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
echo "Error: Version must be in format v1.0.0"
exit 1
fi
# Set tag based on branch
if [ "$BETA" = true ]; then
TAG="$VERSION-beta"
BRANCH="beta"
else
TAG="$VERSION"
BRANCH="main"
fi
echo "Deploying version $VERSION to $BRANCH branch..."
# Ensure we're on the right branch
git checkout $BRANCH || exit 1
# Create and push tag
echo "Creating tag $TAG..."
git tag "$TAG" || exit 1
git push origin "$TAG" || exit 1
echo "Deployment initiated successfully!"
echo "GitHub Actions will handle the release process."
echo "Check the progress at: https://github.com/sirrobot01/debrid-blackhole/actions"