Changelog 0.2.0 (#1)

* Changelog 0.2.0
This commit is contained in:
Mukhtar Akere
2024-09-12 06:01:10 +01:00
committed by GitHub
parent 60c6cb32d3
commit 9511f3e99e
25 changed files with 1494 additions and 274 deletions

View File

@@ -31,4 +31,16 @@
- Rewrote Report log - Rewrote Report log
- Fix YTS, 1337x not grabbing infohash - Fix YTS, 1337x not grabbing infohash
- Fix Torrent symlink bug - Fix Torrent symlink bug
-
#### 0.2.0-beta
- Switch to QbitTorrent API instead of Blackhole
- Rewrote the whole codebase
#### 0.2.0
- Implement 0.2.0-beta changes
- Removed Blackhole
- Added QbitTorrent API
- Cleaned up the code

View File

@@ -1,9 +1,9 @@
### GoBlackHole(with Debrid Proxy Support) ### GoBlackHole(with Debrid Proxy Support)
This is a Golang implementation go Torrent Blackhole with a **Real Debrid Proxy Support**. This is a Golang implementation go Torrent QbitTorrent with a **Real Debrid Proxy Support**.
#### Uses #### Uses
- Torrent Blackhole that supports the Arrs(Sonarr, Radarr, etc) - Mock Qbittorent API that supports the Arrs(Sonarr, Radarr, etc)
- Proxy support for the Arrs - Proxy support for the Arrs
The proxy is useful in filtering out un-cached Real Debrid torrents The proxy is useful in filtering out un-cached Real Debrid torrents
@@ -21,6 +21,9 @@ services:
blackhole: blackhole:
image: cy01/blackhole:latest # or cy01/blackhole:beta image: cy01/blackhole:latest # or cy01/blackhole:beta
container_name: blackhole container_name: blackhole
ports:
- "8282:8282" # qBittorrent
- "8181:8181" # Proxy
user: "1000:1000" user: "1000:1000"
volumes: volumes:
- ./logs:/app/logs - ./logs:/app/logs
@@ -31,6 +34,8 @@ services:
- PUID=1000 - PUID=1000
- PGID=1000 - PGID=1000
- UMASK=002 - UMASK=002
- QBIT_PORT=8282 # qBittorrent Port. This is optional. You can set this in the config file
- PORT=8181 # Proxy Port. This is optional. You can set this in the config file
restart: unless-stopped restart: unless-stopped
``` ```
@@ -52,26 +57,6 @@ Download the binary from the releases page and run it with the config file.
"folder": "data/realdebrid/torrents/", "folder": "data/realdebrid/torrents/",
"rate_limit": "250/minute" "rate_limit": "250/minute"
}, },
"arrs": [
{
"watch_folder": "data/sonarr/",
"completed_folder": "data/sonarr/completed/",
"token": "sonarr_api_key",
"url": "http://localhost:8787"
},
{
"watch_folder": "data/radarr/",
"completed_folder": "data/radarr/completed/",
"token": "radarr_api_key",
"url": "http://localhost:7878"
},
{
"watch_folder": "data/radarr4k/",
"completed_folder": "data/radarr4k/completed/",
"token": "radarr4k_api_key",
"url": "http://localhost:7878"
}
],
"proxy": { "proxy": {
"enabled": true, "enabled": true,
"port": "8181", "port": "8181",
@@ -80,7 +65,14 @@ Download the binary from the releases page and run it with the config file.
"password": "password", "password": "password",
"cached_only": true "cached_only": true
}, },
"max_cache_size": 1000 "max_cache_size": 1000,
"qbittorrent": {
"port": "8282",
"username": "admin",
"password": "admin",
"download_folder": "/media/symlinks/",
"categories": ["sonarr", "radarr"]
}
} }
``` ```
@@ -88,19 +80,20 @@ Download the binary from the releases page and run it with the config file.
##### Debrid Config ##### Debrid Config
- This config key is important as it's used for both Blackhole and Proxy - This config key is important as it's used for both Blackhole and Proxy
##### Arrs Config
- An empty array will disable Blackhole for the Arrs
- The `watch_folder` is the folder where the Blackhole will watch for torrents
- The `completed_folder` is the folder where the Blackhole will move the completed torrents
- The `token` is the API key for the Arr(This is optional, I think)
##### Proxy Config ##### Proxy Config
- The `enabled` key is used to enable the proxy - The `enabled` key is used to enable the proxy
- The `port` key is the port the proxy will listen on - The `port` key is the port the proxy will listen on
- The `debug` key is used to enable debug logs - The `debug` key is used to enable debug logs
- The `username` and `password` keys are used for basic authentication - The `username` and `password` keys are used for basic authentication
- The `cached_only` means only cached torrents will be returned - The `cached_only` means only cached torrents will be returned
-
##### Qbittorrent Config
- The `port` key is the port the qBittorrent will listen on
- The `username` and `password` keys are used for basic authentication
- The `download_folder` is the folder where the torrents will be downloaded. e.g `/media/symlinks/`
- The `categories` key is used to filter out torrents based on the category. e.g `sonarr`, `radarr`
### Proxy ### Proxy
The proxy is useful in filtering out un-cached Real Debrid torrents. The proxy is useful in filtering out un-cached Real Debrid torrents.
@@ -117,7 +110,32 @@ Setting Up Proxy in Arr
- Password: `password` # or the password set in the config file - Password: `password` # or the password set in the config file
- Bypass Proxy for Local Addresses -> `No` - Bypass Proxy for Local Addresses -> `No`
### Qbittorrent
The qBittorrent is a mock qBittorrent API that supports the Arrs(Sonarr, Radarr, etc).
Setting Up Qbittorrent in Arr
- Sonarr/Radarr
- Settings -> Download Client -> Add Client -> qBittorrent
- Host: `localhost` # or the IP of the server
- Port: `8282` # or the port set in the config file/ docker-compose env
- Username: `admin` # or the username set in the config file
- Password: `admin` # or the password set in the config file
- Category: e.g `sonarr`, `radarr`
- Use SSL -> `No`
- Test
- Save
### TODO ### TODO
- [ ] Add more Debrid Providers - [ ] A proper name!!!!
- [ ] Debrid
- [ ] Add more Debrid Providers
- [ ] Proxy
- [ ] Add more Proxy features - [ ] Add more Proxy features
- [ ] Add more tests
- [ ] Qbittorrent
- [ ] Add more Qbittorrent features
- [ ] Persist torrents on restart/server crash
- [ ] Add tests

View File

@@ -1,186 +0,0 @@
package cmd
import (
"fmt"
"github.com/fsnotify/fsnotify"
"goBlack/common"
"goBlack/debrid"
"log"
"os"
"path/filepath"
"sync"
"time"
)
type Blackhole struct {
config *common.Config
deb debrid.Service
cache *common.Cache
}
func NewBlackhole(config *common.Config, deb debrid.Service, cache *common.Cache) *Blackhole {
return &Blackhole{
config: config,
deb: deb,
cache: cache,
}
}
func fileReady(path string) bool {
_, err := os.Stat(path)
return !os.IsNotExist(err) // Returns true if the file exists
}
func checkFileLoop(wg *sync.WaitGroup, dir string, file debrid.TorrentFile, ready chan<- debrid.TorrentFile) {
defer wg.Done()
ticker := time.NewTicker(1 * time.Second) // Check every second
defer ticker.Stop()
path := filepath.Join(dir, file.Path)
for {
select {
case <-ticker.C:
if fileReady(path) {
ready <- file
return
}
}
}
}
func (b *Blackhole) processFiles(arr *debrid.Arr, torrent *debrid.Torrent) {
var wg sync.WaitGroup
files := torrent.Files
ready := make(chan debrid.TorrentFile, len(files))
log.Printf("Checking %d files...", len(files))
for _, file := range files {
wg.Add(1)
go checkFileLoop(&wg, arr.Debrid.Folder, file, ready)
}
go func() {
wg.Wait()
close(ready)
}()
for r := range ready {
log.Println("File is ready:", r.Name)
b.createSymLink(arr, torrent)
}
go torrent.Cleanup(true)
fmt.Printf("%s downloaded", torrent.Name)
}
func (b *Blackhole) createSymLink(arr *debrid.Arr, torrent *debrid.Torrent) {
path := filepath.Join(arr.CompletedFolder, torrent.Folder)
err := os.MkdirAll(path, os.ModePerm)
if err != nil {
log.Printf("Failed to create directory: %s\n", path)
}
for _, file := range torrent.Files {
// Combine the directory and filename to form a full path
fullPath := filepath.Join(path, file.Name) // completedFolder/MyTVShow/MyTVShow.S01E01.720p.mkv
// Create a symbolic link if file doesn't exist
torrentPath := filepath.Join(arr.Debrid.Folder, torrent.Folder, file.Name) // debridFolder/MyTVShow/MyTVShow.S01E01.720p.mkv
_ = os.Symlink(torrentPath, fullPath)
}
}
func watcher(watcher *fsnotify.Watcher, events map[string]time.Time) {
for {
select {
case event, ok := <-watcher.Events:
if !ok {
return
}
if event.Op&fsnotify.Write == fsnotify.Write {
if filepath.Ext(event.Name) == ".torrent" || filepath.Ext(event.Name) == ".magnet" {
events[event.Name] = time.Now()
}
}
case err, ok := <-watcher.Errors:
if !ok {
return
}
log.Println("ERROR:", err)
}
}
}
func (b *Blackhole) processFilesDebounced(arr *debrid.Arr, events map[string]time.Time, debouncePeriod time.Duration) {
ticker := time.NewTicker(1 * time.Second) // Check every second
defer ticker.Stop()
for range ticker.C {
for file, lastEventTime := range events {
if time.Since(lastEventTime) >= debouncePeriod {
log.Printf("Torrent file detected: %s", file)
// Process the torrent file
torrent, err := b.deb.Process(arr, file)
if err != nil && torrent != nil {
// remove torrent file
torrent.Cleanup(true)
_ = torrent.MarkAsFailed()
log.Printf("Error processing torrent file: %s", err)
}
if err == nil && torrent != nil && len(torrent.Files) > 0 {
go b.processFiles(arr, torrent)
}
delete(events, file) // remove file from channel
}
}
}
}
func (b *Blackhole) startArr(arr *debrid.Arr) {
log.Printf("Watching: %s", arr.WatchFolder)
w, err := fsnotify.NewWatcher()
if err != nil {
log.Println(err)
}
defer func(w *fsnotify.Watcher) {
err := w.Close()
if err != nil {
log.Println(err)
}
}(w)
events := make(map[string]time.Time)
go watcher(w, events)
if err = w.Add(arr.WatchFolder); err != nil {
log.Println("Error Watching folder:", err)
return
}
b.processFilesDebounced(arr, events, 1*time.Second)
}
func (b *Blackhole) Start() {
log.Println("[*] Starting Blackhole")
var wg sync.WaitGroup
for _, conf := range b.config.Arrs {
wg.Add(1)
defer wg.Done()
headers := map[string]string{
"X-Api-Key": conf.Token,
}
client := common.NewRLHTTPClient(nil, headers)
arr := &debrid.Arr{
Debrid: b.config.Debrid,
WatchFolder: conf.WatchFolder,
CompletedFolder: conf.CompletedFolder,
Token: conf.Token,
URL: conf.URL,
Client: client,
}
go b.startArr(arr)
}
wg.Wait()
}

View File

@@ -3,33 +3,34 @@ package cmd
import ( import (
"cmp" "cmp"
"goBlack/common" "goBlack/common"
"goBlack/debrid" "goBlack/pkg/debrid"
"goBlack/pkg/proxy"
"goBlack/pkg/qbit"
"sync" "sync"
) )
func Start(config *common.Config) { func Start(config *common.Config) {
maxCacheSize := cmp.Or(config.MaxCacheSize, 1000) maxCacheSize := cmp.Or(config.MaxCacheSize, 1000)
cache := common.NewCache(maxCacheSize) cache := common.NewCache(maxCacheSize)
deb := debrid.NewDebrid(config.Debrid, cache) deb := debrid.NewDebrid(config.Debrid, cache)
var wg sync.WaitGroup var wg sync.WaitGroup
if config.Proxy.Enabled { if config.Proxy.Enabled {
proxy := NewProxy(*config, deb, cache) p := proxy.NewProxy(*config, deb, cache)
wg.Add(1) wg.Add(1)
go func() { go func() {
defer wg.Done() defer wg.Done()
proxy.Start() p.Start()
}() }()
} }
if config.QBitTorrent.Port != "" {
if len(config.Arrs) > 0 { qb := qbit.NewQBit(config, deb, cache)
blackhole := NewBlackhole(config, deb, cache)
wg.Add(1) wg.Add(1)
go func() { go func() {
defer wg.Done() defer wg.Done()
blackhole.Start() qb.Start()
}() }()
} }

View File

@@ -17,21 +17,23 @@ type DebridConfig struct {
type Config struct { type Config struct {
Debrid DebridConfig `json:"debrid"` Debrid DebridConfig `json:"debrid"`
Arrs []struct { Proxy struct {
WatchFolder string `json:"watch_folder"`
CompletedFolder string `json:"completed_folder"`
Token string `json:"token"`
URL string `json:"url"`
} `json:"arrs"`
Proxy struct {
Port string `json:"port"` Port string `json:"port"`
Enabled bool `json:"enabled"` Enabled bool `json:"enabled"`
Debug bool `json:"debug"` Debug bool `json:"debug"`
Username string `json:"username"` Username string `json:"username"`
Password string `json:"password"` Password string `json:"password"`
CachedOnly bool `json:"cached_only"` CachedOnly *bool `json:"cached_only"`
} }
MaxCacheSize int `json:"max_cache_size"` MaxCacheSize int `json:"max_cache_size"`
QBitTorrent struct {
Username string `json:"username"`
Password string `json:"password"`
Port string `json:"port"`
Debug bool `json:"debug"`
DownloadFolder string `json:"download_folder"`
Categories []string `json:"categories"`
} `json:"qbittorrent"`
} }
func LoadConfig(path string) (*Config, error) { func LoadConfig(path string) (*Config, error) {
@@ -53,6 +55,10 @@ func LoadConfig(path string) (*Config, error) {
if err != nil { if err != nil {
return nil, err return nil, err
} }
if config.Proxy.CachedOnly == nil {
config.Proxy.CachedOnly = new(bool)
*config.Proxy.CachedOnly = true
}
return config, nil return config, nil
} }

View File

@@ -5,9 +5,9 @@ import (
) )
var ( var (
VIDEOMATCH = "(?i)(\\.)(YUV|WMV|WEBM|VOB|VIV|SVI|ROQ|RMVB|RM|OGV|OGG|NSV|MXF|MTS|M2TS|TS|MPG|MPEG|M2V|MP2|MPE|MPV|MP4|M4P|M4V|MOV|QT|MNG|MKV|FLV|DRC|AVI|ASF|AMV)$" VIDEOMATCH = "(?i)(\\.)(YUV|WMV|WEBM|VOB|VIV|SVI|ROQ|RMVB|RM|OGV|OGG|NSV|MXF|MTS|M2TS|TS|MPG|MPEG|M2V|MP2|MPE|MPV|MP4|M4P|M4V|MOV|QT|MNG|MKV|FLV|DRC|AVI|ASF|AMV|MKA|F4V|3GP|3G2|DIVX|X264|X265)$"
SUBMATCH = "(?i)(\\.)(SRT|SUB|SBV|ASS|VTT|TTML|DFXP|STL|SCC|CAP|SMI|TTXT|TDS|USF|JSS|SSA|PSB|RT|LRC|SSB)$" SUBMATCH = "(?i)(\\.)(SRT|SUB|SBV|ASS|VTT|TTML|DFXP|STL|SCC|CAP|SMI|TTXT|TDS|USF|JSS|SSA|PSB|RT|LRC|SSB)$"
SeasonMatch = "(?i)(?:season|s)[.\\-_\\s]?(\\d+)" SAMPLEMATCH = `(?i)(^|[\\/]|[._-])(sample|trailer|thumb)s?([._-]|$)`
) )
func RegexMatch(regex string, value string) bool { func RegexMatch(regex string, value string) bool {
@@ -16,7 +16,8 @@ func RegexMatch(regex string, value string) bool {
} }
func RemoveExtension(value string) string { func RemoveExtension(value string) string {
re := regexp.MustCompile(VIDEOMATCH) pattern := "(?i)(\\.)(YUV|WMV|WEBM|VOB|VIV|SVI|ROQ|RMVB|RM|OGV|OGG|NSV|MXF|MTS|M2TS|TS|MPG|MPEG|M2V|MP2|MPE|MPV|MP4|M4P|M4V|MOV|QT|MNG|MKV|FLV|DRC|AVI|ASF|AMV|MKA|F4V|3GP|3G2|DIVX|X264|X265)$"
re := regexp.MustCompile(pattern)
// Find the last index of the matched extension // Find the last index of the matched extension
loc := re.FindStringIndex(value) loc := re.FindStringIndex(value)

View File

@@ -6,11 +6,13 @@ import (
"encoding/hex" "encoding/hex"
"fmt" "fmt"
"github.com/anacrolix/torrent/metainfo" "github.com/anacrolix/torrent/metainfo"
"io"
"log" "log"
"math/rand" "math/rand"
"net/http" "net/http"
"net/url" "net/url"
"os" "os"
"path/filepath"
"regexp" "regexp"
"strings" "strings"
) )
@@ -22,6 +24,41 @@ type Magnet struct {
Link string Link string
} }
func GetMagnetFromFile(file io.Reader, filePath string) (*Magnet, error) {
if filepath.Ext(filePath) == ".torrent" {
mi, err := metainfo.Load(file)
if err != nil {
return nil, err
}
hash := mi.HashInfoBytes()
infoHash := hash.HexString()
info, err := mi.UnmarshalInfo()
if err != nil {
return nil, err
}
magnet := &Magnet{
InfoHash: infoHash,
Name: info.Name,
Size: info.Length,
Link: mi.Magnet(&hash, &info).String(),
}
return magnet, nil
} else {
// .magnet file
magnetLink := ReadMagnetFile(file)
return GetMagnetInfo(magnetLink)
}
}
func GetMagnetFromUrl(url string) (*Magnet, error) {
if strings.HasPrefix(url, "magnet:") {
return GetMagnetInfo(url)
} else if strings.HasPrefix(url, "http") {
return OpenMagnetHttpURL(url)
}
return nil, fmt.Errorf("invalid url")
}
func OpenMagnetFile(filePath string) string { func OpenMagnetFile(filePath string) string {
file, err := os.Open(filePath) file, err := os.Open(filePath)
if err != nil { if err != nil {
@@ -34,13 +71,15 @@ func OpenMagnetFile(filePath string) string {
return return
} }
}(file) // Ensure the file is closed after the function ends }(file) // Ensure the file is closed after the function ends
return ReadMagnetFile(file)
}
// Create a scanner to read the file line by line func ReadMagnetFile(file io.Reader) string {
scanner := bufio.NewScanner(file) scanner := bufio.NewScanner(file)
for scanner.Scan() { for scanner.Scan() {
magnetLink := scanner.Text() content := scanner.Text()
if magnetLink != "" { if content != "" {
return magnetLink return content
} }
} }
@@ -167,3 +206,8 @@ func processInfoHash(input string) (string, error) {
// If we get here, it's not a valid infohash and we couldn't convert it // If we get here, it's not a valid infohash and we couldn't convert it
return "", fmt.Errorf("invalid infohash: %s", input) return "", fmt.Errorf("invalid infohash: %s", input)
} }
func NewLogger(prefix string, output *os.File) *log.Logger {
f := fmt.Sprintf("[%s] ", prefix)
return log.New(output, f, log.LstdFlags)
}

2
go.mod
View File

@@ -7,6 +7,7 @@ require (
github.com/elazarl/goproxy v0.0.0-20240726154733-8b0c20506380 github.com/elazarl/goproxy v0.0.0-20240726154733-8b0c20506380
github.com/elazarl/goproxy/ext v0.0.0-20190711103511-473e67f1d7d2 github.com/elazarl/goproxy/ext v0.0.0-20190711103511-473e67f1d7d2
github.com/fsnotify/fsnotify v1.7.0 github.com/fsnotify/fsnotify v1.7.0
github.com/go-chi/chi/v5 v5.1.0
github.com/valyala/fastjson v1.6.4 github.com/valyala/fastjson v1.6.4
golang.org/x/time v0.6.0 golang.org/x/time v0.6.0
) )
@@ -16,6 +17,7 @@ require (
github.com/anacrolix/missinggo/v2 v2.7.3 // indirect github.com/anacrolix/missinggo/v2 v2.7.3 // indirect
github.com/bradfitz/iter v0.0.0-20191230175014-e8f45d346db8 // indirect github.com/bradfitz/iter v0.0.0-20191230175014-e8f45d346db8 // indirect
github.com/google/go-cmp v0.6.0 // indirect github.com/google/go-cmp v0.6.0 // indirect
github.com/google/uuid v1.6.0 // indirect
github.com/huandu/xstrings v1.3.2 // indirect github.com/huandu/xstrings v1.3.2 // indirect
golang.org/x/net v0.27.0 // indirect golang.org/x/net v0.27.0 // indirect
golang.org/x/sys v0.22.0 // indirect golang.org/x/sys v0.22.0 // indirect

4
go.sum
View File

@@ -70,6 +70,8 @@ github.com/glycerine/go-unsnap-stream v0.0.0-20190901134440-81cf024a9e0a/go.mod
github.com/glycerine/goconvey v0.0.0-20180728074245-46e3a41ad493/go.mod h1:Ogl1Tioa0aV7gstGFO7KhffUsb9M4ydbEbbxpcEDc24= github.com/glycerine/goconvey v0.0.0-20180728074245-46e3a41ad493/go.mod h1:Ogl1Tioa0aV7gstGFO7KhffUsb9M4ydbEbbxpcEDc24=
github.com/glycerine/goconvey v0.0.0-20190315024820-982ee783a72e/go.mod h1:Ogl1Tioa0aV7gstGFO7KhffUsb9M4ydbEbbxpcEDc24= github.com/glycerine/goconvey v0.0.0-20190315024820-982ee783a72e/go.mod h1:Ogl1Tioa0aV7gstGFO7KhffUsb9M4ydbEbbxpcEDc24=
github.com/glycerine/goconvey v0.0.0-20190410193231-58a59202ab31/go.mod h1:Ogl1Tioa0aV7gstGFO7KhffUsb9M4ydbEbbxpcEDc24= github.com/glycerine/goconvey v0.0.0-20190410193231-58a59202ab31/go.mod h1:Ogl1Tioa0aV7gstGFO7KhffUsb9M4ydbEbbxpcEDc24=
github.com/go-chi/chi/v5 v5.1.0 h1:acVI1TYaD+hhedDJ3r54HyA6sExp3HfXq7QWEEY/xMw=
github.com/go-chi/chi/v5 v5.1.0/go.mod h1:DslCQbL2OYiznFReuXYUmQ2hGd1aDpCnlMNITLSKoi8=
github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as=
github.com/go-kit/kit v0.9.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= github.com/go-kit/kit v0.9.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as=
github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE= github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE=
@@ -100,6 +102,8 @@ github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/
github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI=
github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY=
github.com/gopherjs/gopherjs v0.0.0-20181103185306-d547d1d9531e/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= github.com/gopherjs/gopherjs v0.0.0-20181103185306-d547d1d9531e/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY=
github.com/gopherjs/gopherjs v0.0.0-20190309154008-847fc94819f9/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= github.com/gopherjs/gopherjs v0.0.0-20190309154008-847fc94819f9/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY=

View File

@@ -1,8 +1,10 @@
package debrid package debrid
import ( import (
"fmt"
"github.com/anacrolix/torrent/metainfo" "github.com/anacrolix/torrent/metainfo"
"goBlack/common" "goBlack/common"
"log"
"path/filepath" "path/filepath"
) )
@@ -12,6 +14,11 @@ type Service interface {
DownloadLink(torrent *Torrent) error DownloadLink(torrent *Torrent) error
Process(arr *Arr, magnet string) (*Torrent, error) Process(arr *Arr, magnet string) (*Torrent, error)
IsAvailable(infohashes []string) map[string]bool IsAvailable(infohashes []string) map[string]bool
GetMountPath() string
GetDownloadUncached() bool
GetTorrent(id string) (*Torrent, error)
GetName() string
GetLogger() *log.Logger
} }
type Debrid struct { type Debrid struct {
@@ -20,6 +27,8 @@ type Debrid struct {
DownloadUncached bool DownloadUncached bool
client *common.RLHTTPClient client *common.RLHTTPClient
cache *common.Cache cache *common.Cache
MountPath string
logger *log.Logger
} }
func NewDebrid(dc common.DebridConfig, cache *common.Cache) Service { func NewDebrid(dc common.DebridConfig, cache *common.Cache) Service {
@@ -110,3 +119,31 @@ func GetLocalCache(infohashes []string, cache *common.Cache) ([]string, map[stri
return hashes, result return hashes, result
} }
func ProcessQBitTorrent(d Service, magnet *common.Magnet, category string) (*Torrent, error) {
arr := &Arr{
CompletedFolder: category,
}
debridTorrent := &Torrent{
InfoHash: magnet.InfoHash,
Magnet: magnet,
Name: magnet.Name,
Arr: arr,
Size: magnet.Size,
}
logger := d.GetLogger()
logger.Printf("Torrent Name: %s", debridTorrent.Name)
if !d.GetDownloadUncached() {
hash, exists := d.IsAvailable([]string{debridTorrent.InfoHash})[debridTorrent.InfoHash]
if !exists || !hash {
return debridTorrent, fmt.Errorf("torrent is not cached")
}
logger.Printf("Torrent: %s is cached", debridTorrent.Name)
}
debridTorrent, err := d.SubmitMagnet(debridTorrent)
if err != nil || debridTorrent.Id == "" {
return nil, err
}
return d.CheckStatus(debridTorrent)
}

View File

@@ -4,10 +4,11 @@ import (
"encoding/json" "encoding/json"
"fmt" "fmt"
"goBlack/common" "goBlack/common"
"goBlack/debrid/structs" "goBlack/pkg/debrid/structs"
"log" "log"
"net/http" "net/http"
gourl "net/url" gourl "net/url"
"os"
"path/filepath" "path/filepath"
"strconv" "strconv"
"strings" "strings"
@@ -19,6 +20,39 @@ type RealDebrid struct {
DownloadUncached bool DownloadUncached bool
client *common.RLHTTPClient client *common.RLHTTPClient
cache *common.Cache cache *common.Cache
MountPath string
logger *log.Logger
}
func (r *RealDebrid) GetMountPath() string {
return r.MountPath
}
func (r *RealDebrid) GetName() string {
return "realdebrid"
}
func (r *RealDebrid) GetLogger() *log.Logger {
return r.logger
}
func GetTorrentFiles(data structs.RealDebridTorrentInfo) []TorrentFile {
files := make([]TorrentFile, 0)
for _, f := range data.Files {
name := filepath.Base(f.Path)
if (!common.RegexMatch(common.VIDEOMATCH, name) && !common.RegexMatch(common.SUBMATCH, name)) || common.RegexMatch(common.SAMPLEMATCH, name) {
continue
}
fileId := f.ID
file := &TorrentFile{
Name: name,
Path: filepath.Join(common.RemoveExtension(data.OriginalFilename), name),
Size: int64(f.Bytes),
Id: strconv.Itoa(fileId),
}
files = append(files, *file)
}
return files
} }
func (r *RealDebrid) Process(arr *Arr, magnet string) (*Torrent, error) { func (r *RealDebrid) Process(arr *Arr, magnet string) (*Torrent, error) {
@@ -114,35 +148,50 @@ func (r *RealDebrid) SubmitMagnet(torrent *Torrent) (*Torrent, error) {
return torrent, nil return torrent, nil
} }
func (r *RealDebrid) GetTorrent(id string) (*Torrent, error) {
torrent := &Torrent{}
url := fmt.Sprintf("%s/torrents/info/%s", r.Host, id)
resp, err := r.client.MakeRequest(http.MethodGet, url, nil)
if err != nil {
return torrent, err
}
var data structs.RealDebridTorrentInfo
err = json.Unmarshal(resp, &data)
if err != nil {
return torrent, err
}
name := common.RemoveExtension(data.OriginalFilename)
torrent.Id = id
torrent.Name = name
torrent.Bytes = data.Bytes
torrent.Folder = name
torrent.Progress = data.Progress
torrent.Status = data.Status
files := GetTorrentFiles(data)
torrent.Files = files
return torrent, nil
}
func (r *RealDebrid) CheckStatus(torrent *Torrent) (*Torrent, error) { func (r *RealDebrid) CheckStatus(torrent *Torrent) (*Torrent, error) {
url := fmt.Sprintf("%s/torrents/info/%s", r.Host, torrent.Id) url := fmt.Sprintf("%s/torrents/info/%s", r.Host, torrent.Id)
for { for {
resp, err := r.client.MakeRequest(http.MethodGet, url, nil) resp, err := r.client.MakeRequest(http.MethodGet, url, nil)
if err != nil { if err != nil {
log.Println("ERROR Checking file: ", err)
return torrent, err return torrent, err
} }
var data structs.RealDebridTorrentInfo var data structs.RealDebridTorrentInfo
err = json.Unmarshal(resp, &data) err = json.Unmarshal(resp, &data)
status := data.Status status := data.Status
torrent.Folder = common.RemoveExtension(data.OriginalFilename) torrent.Folder = common.RemoveExtension(data.OriginalFilename)
torrent.Bytes = data.Bytes
torrent.Progress = data.Progress
torrent.Speed = data.Speed
torrent.Seeders = data.Seeders
if status == "error" || status == "dead" || status == "magnet_error" { if status == "error" || status == "dead" || status == "magnet_error" {
return torrent, fmt.Errorf("torrent: %s has error", torrent.Name) return torrent, fmt.Errorf("torrent: %s has error", torrent.Name)
} else if status == "waiting_files_selection" { } else if status == "waiting_files_selection" {
files := make([]TorrentFile, 0) files := GetTorrentFiles(data)
for _, f := range data.Files {
name := filepath.Base(f.Path)
if !common.RegexMatch(common.VIDEOMATCH, name) && !common.RegexMatch(common.SUBMATCH, name) {
continue
}
fileId := f.ID
file := &TorrentFile{
Name: name,
Path: filepath.Join(torrent.Folder, name),
Size: int64(f.Bytes),
Id: strconv.Itoa(fileId),
}
files = append(files, *file)
}
torrent.Files = files torrent.Files = files
if len(files) == 0 { if len(files) == 0 {
return torrent, fmt.Errorf("no video files found") return torrent, fmt.Errorf("no video files found")
@@ -166,8 +215,6 @@ func (r *RealDebrid) CheckStatus(torrent *Torrent) (*Torrent, error) {
return torrent, err return torrent, err
} }
break break
} else if status == "downloading" {
return torrent, fmt.Errorf("torrent is uncached")
} }
} }
@@ -178,17 +225,24 @@ func (r *RealDebrid) DownloadLink(torrent *Torrent) error {
return nil return nil
} }
func (r *RealDebrid) GetDownloadUncached() bool {
return r.DownloadUncached
}
func NewRealDebrid(dc common.DebridConfig, cache *common.Cache) *RealDebrid { func NewRealDebrid(dc common.DebridConfig, cache *common.Cache) *RealDebrid {
rl := common.ParseRateLimit(dc.RateLimit) rl := common.ParseRateLimit(dc.RateLimit)
headers := map[string]string{ headers := map[string]string{
"Authorization": fmt.Sprintf("Bearer %s", dc.APIKey), "Authorization": fmt.Sprintf("Bearer %s", dc.APIKey),
} }
client := common.NewRLHTTPClient(rl, headers) client := common.NewRLHTTPClient(rl, headers)
logger := common.NewLogger(dc.Name, os.Stdout)
return &RealDebrid{ return &RealDebrid{
Host: dc.Host, Host: dc.Host,
APIKey: dc.APIKey, APIKey: dc.APIKey,
DownloadUncached: dc.DownloadUncached, DownloadUncached: dc.DownloadUncached,
client: client, client: client,
cache: cache, cache: cache,
MountPath: dc.Folder,
logger: logger,
} }
} }

View File

@@ -74,7 +74,7 @@ type RealDebridTorrentInfo struct {
Filename string `json:"filename"` Filename string `json:"filename"`
OriginalFilename string `json:"original_filename"` OriginalFilename string `json:"original_filename"`
Hash string `json:"hash"` Hash string `json:"hash"`
Bytes int `json:"bytes"` Bytes int64 `json:"bytes"`
OriginalBytes int `json:"original_bytes"` OriginalBytes int `json:"original_bytes"`
Host string `json:"host"` Host string `json:"host"`
Split int `json:"split"` Split int `json:"split"`

View File

@@ -7,6 +7,7 @@ import (
"net/http" "net/http"
gourl "net/url" gourl "net/url"
"os" "os"
"path/filepath"
"strconv" "strconv"
"strings" "strings"
) )
@@ -39,11 +40,20 @@ type Torrent struct {
Folder string `json:"folder"` Folder string `json:"folder"`
Filename string `json:"filename"` Filename string `json:"filename"`
Size int64 `json:"size"` Size int64 `json:"size"`
Bytes int64 `json:"bytes"` // Size of only the files that are downloaded
Magnet *common.Magnet `json:"magnet"` Magnet *common.Magnet `json:"magnet"`
Files []TorrentFile `json:"files"` Files []TorrentFile `json:"files"`
Status string `json:"status"` Status string `json:"status"`
Progress int `json:"progress"`
Speed int `json:"speed"`
Seeders int `json:"seeders"`
Arr *Arr Debrid *Debrid
Arr *Arr
}
func (t *Torrent) GetSymlinkFolder(parent string) string {
return filepath.Join(parent, t.Arr.CompletedFolder, t.Folder)
} }
type TorrentFile struct { type TorrentFile struct {

View File

@@ -1,4 +1,4 @@
package cmd package proxy
import ( import (
"bytes" "bytes"
@@ -9,7 +9,7 @@ import (
"github.com/elazarl/goproxy/ext/auth" "github.com/elazarl/goproxy/ext/auth"
"github.com/valyala/fastjson" "github.com/valyala/fastjson"
"goBlack/common" "goBlack/common"
"goBlack/debrid" "goBlack/pkg/debrid"
"io" "io"
"log" "log"
"net/http" "net/http"
@@ -74,6 +74,7 @@ type Proxy struct {
cachedOnly bool cachedOnly bool
debrid debrid.Service debrid debrid.Service
cache *common.Cache cache *common.Cache
logger *log.Logger
} }
func NewProxy(config common.Config, deb debrid.Service, cache *common.Cache) *Proxy { func NewProxy(config common.Config, deb debrid.Service, cache *common.Cache) *Proxy {
@@ -85,9 +86,10 @@ func NewProxy(config common.Config, deb debrid.Service, cache *common.Cache) *Pr
debug: cfg.Debug, debug: cfg.Debug,
username: cfg.Username, username: cfg.Username,
password: cfg.Password, password: cfg.Password,
cachedOnly: cfg.CachedOnly, cachedOnly: *cfg.CachedOnly,
debrid: deb, debrid: deb,
cache: cache, cache: cache,
logger: common.NewLogger("Proxy", os.Stdout),
} }
} }
@@ -227,7 +229,7 @@ func (p *Proxy) ProcessXMLResponse(resp *http.Response) *http.Response {
body, err := io.ReadAll(resp.Body) body, err := io.ReadAll(resp.Body)
if err != nil { if err != nil {
log.Println("Error reading response body:", err) p.logger.Println("Error reading response body:", err)
resp.Body = io.NopCloser(bytes.NewReader(body)) resp.Body = io.NopCloser(bytes.NewReader(body))
return resp return resp
} }
@@ -239,13 +241,17 @@ func (p *Proxy) ProcessXMLResponse(resp *http.Response) *http.Response {
var rss RSS var rss RSS
err = xml.Unmarshal(body, &rss) err = xml.Unmarshal(body, &rss)
if err != nil { if err != nil {
log.Printf("Error unmarshalling XML: %v", err) p.logger.Printf("Error unmarshalling XML: %v", err)
resp.Body = io.NopCloser(bytes.NewReader(body)) resp.Body = io.NopCloser(bytes.NewReader(body))
return resp return resp
} }
indexer := "" indexer := ""
if len(rss.Channel.Items) > 0 { if len(rss.Channel.Items) > 0 {
indexer = rss.Channel.Items[0].ProwlarrIndexer.Text indexer = rss.Channel.Items[0].ProwlarrIndexer.Text
} else {
p.logger.Println("No items found in RSS feed")
resp.Body = io.NopCloser(bytes.NewReader(body))
return resp
} }
// Step 4: Extract infohash or magnet URI, manipulate data // Step 4: Extract infohash or magnet URI, manipulate data
@@ -273,13 +279,20 @@ func (p *Proxy) ProcessXMLResponse(resp *http.Response) *http.Response {
} }
} }
log.Printf("[%s Report]: %d/%d items are cached || Found %d infohash", indexer, len(newItems), len(rss.Channel.Items), len(hashes)) if len(newItems) > 0 {
rss.Channel.Items = newItems p.logger.Printf("[%s Report]: %d/%d items are cached || Found %d infohash", indexer, len(newItems), len(rss.Channel.Items), len(hashes))
} else {
// This will prevent the indexer from being disabled by the arr
p.logger.Printf("[%s Report]: No Items are cached; Return only first item with [UnCached]", indexer)
item := rss.Channel.Items[0]
item.Title = fmt.Sprintf("%s [UnCached]", item.Title)
newItems = append(newItems, item)
}
// rss.Channel.Items = newItems rss.Channel.Items = newItems
modifiedBody, err := xml.MarshalIndent(rss, "", " ") modifiedBody, err := xml.MarshalIndent(rss, "", " ")
if err != nil { if err != nil {
log.Printf("Error marshalling XML: %v", err) p.logger.Printf("Error marshalling XML: %v", err)
resp.Body = io.NopCloser(bytes.NewReader(body)) resp.Body = io.NopCloser(bytes.NewReader(body))
return resp return resp
} }
@@ -306,7 +319,9 @@ func (p *Proxy) Start() {
}) })
} }
proxy.OnRequest(goproxy.ReqHostMatches(regexp.MustCompile("^.443$"))).HandleConnect(goproxy.AlwaysMitm) proxy.OnRequest(
goproxy.ReqHostMatches(regexp.MustCompile("^.443$")),
UrlMatches(regexp.MustCompile("^.*/api\\?t=(search|tvsearch|movie)(&.*)?$"))).HandleConnect(goproxy.AlwaysMitm)
proxy.OnResponse( proxy.OnResponse(
UrlMatches(regexp.MustCompile("^.*/api\\?t=(search|tvsearch|movie)(&.*)?$")), UrlMatches(regexp.MustCompile("^.*/api\\?t=(search|tvsearch|movie)(&.*)?$")),
goproxy.StatusCodeIs(http.StatusOK, http.StatusAccepted)).DoFunc( goproxy.StatusCodeIs(http.StatusOK, http.StatusAccepted)).DoFunc(
@@ -316,6 +331,6 @@ func (p *Proxy) Start() {
proxy.Verbose = p.debug proxy.Verbose = p.debug
portFmt := fmt.Sprintf(":%s", p.port) portFmt := fmt.Sprintf(":%s", p.port)
log.Printf("[*] Starting proxy server on %s\n", portFmt) p.logger.Printf("[*] Starting proxy server on %s\n", portFmt)
log.Fatal(http.ListenAndServe(fmt.Sprintf("%s", portFmt), proxy)) p.logger.Fatal(http.ListenAndServe(fmt.Sprintf("%s", portFmt), proxy))
} }

39
pkg/qbit/handlers.go Normal file
View File

@@ -0,0 +1,39 @@
package qbit
import (
"github.com/go-chi/chi/v5"
"net/http"
)
func (q *QBit) AddRoutes(r chi.Router) http.Handler {
r.Route("/api/v2", func(r chi.Router) {
r.Post("/auth/login", q.handleLogin)
r.Group(func(r chi.Router) {
r.Use(q.authMiddleware)
r.Route("/torrents", func(r chi.Router) {
r.Use(HashesCtx)
r.Get("/info", q.handleTorrentsInfo)
r.Post("/add", q.handleTorrentsAdd)
r.Post("/delete", q.handleTorrentsDelete)
r.Get("/categories", q.handleCategories)
r.Post("/createCategory", q.handleCreateCategory)
r.Get("/pause", q.handleTorrentsPause)
r.Get("/resume", q.handleTorrentsResume)
r.Get("/recheck", q.handleTorrentRecheck)
r.Get("/properties", q.handleTorrentProperties)
})
r.Route("/app", func(r chi.Router) {
r.Get("/version", q.handleVersion)
r.Get("/webapiVersion", q.handleWebAPIVersion)
r.Get("/preferences", q.handlePreferences)
r.Get("/buildInfo", q.handleBuildInfo)
r.Get("/shutdown", q.shutdown)
})
})
})
return r
}

42
pkg/qbit/handlers_app.go Normal file
View File

@@ -0,0 +1,42 @@
package qbit
import (
"net/http"
"path/filepath"
)
func (q *QBit) handleVersion(w http.ResponseWriter, r *http.Request) {
_, _ = w.Write([]byte("v4.3.2"))
w.WriteHeader(http.StatusOK)
}
func (q *QBit) handleWebAPIVersion(w http.ResponseWriter, r *http.Request) {
_, _ = w.Write([]byte("2.7"))
w.WriteHeader(http.StatusOK)
}
func (q *QBit) handlePreferences(w http.ResponseWriter, r *http.Request) {
preferences := NewAppPreferences()
preferences.WebUiUsername = q.Username
preferences.SavePath = q.DownloadFolder
preferences.TempPath = filepath.Join(q.DownloadFolder, "temp")
JSONResponse(w, preferences, http.StatusOK)
}
func (q *QBit) handleBuildInfo(w http.ResponseWriter, r *http.Request) {
res := BuildInfo{
Bitness: 64,
Boost: "1.75.0",
Libtorrent: "1.2.11.0",
Openssl: "1.1.1i",
Qt: "5.15.2",
Zlib: "1.2.11",
}
JSONResponse(w, res, http.StatusOK)
}
func (q *QBit) shutdown(w http.ResponseWriter, r *http.Request) {
w.WriteHeader(http.StatusOK)
}

50
pkg/qbit/handlers_auth.go Normal file
View File

@@ -0,0 +1,50 @@
package qbit
import (
"net/http"
"time"
)
func (q *QBit) handleLogin(w http.ResponseWriter, r *http.Request) {
err := r.ParseForm()
if err != nil {
http.Error(w, "Failed to parse form data", http.StatusBadRequest)
return
}
username := r.Form.Get("username")
password := r.Form.Get("password")
// In a real implementation, you'd verify credentials here
// For this mock, we'll accept any non-empty username and password
if username == "" || password == "" {
http.Error(w, "Invalid credentials", http.StatusUnauthorized)
return
}
if username != q.Username || password != q.Password {
http.Error(w, "Invalid credentials", http.StatusUnauthorized)
return
}
// Generate a new SID
sid, err := generateSID()
if err != nil {
http.Error(w, "Failed to generate session ID", http.StatusInternalServerError)
return
}
// Set the SID cookie
http.SetCookie(w, &http.Cookie{
Name: cookieName,
Value: sid,
Path: "/",
HttpOnly: true,
MaxAge: 315360000,
})
// Store the session
sessions.Store(sid, time.Now().Add(24*time.Hour))
w.WriteHeader(http.StatusOK)
w.Write([]byte("Ok."))
}

View File

@@ -0,0 +1,171 @@
package qbit
import (
"goBlack/common"
"io"
"net/http"
"path/filepath"
"strings"
)
func (q *QBit) handleTorrentsInfo(w http.ResponseWriter, r *http.Request) {
//log all url params
ctx := r.Context()
category := strings.Trim(r.URL.Query().Get("category"), "")
filter := strings.Trim(r.URL.Query().Get("filter"), "")
hashes, _ := ctx.Value("hashes").([]string)
torrents := q.storage.GetAll(category, filter, hashes)
JSONResponse(w, torrents, http.StatusOK)
}
func (q *QBit) handleTorrentsAdd(w http.ResponseWriter, r *http.Request) {
contentType := strings.Split(r.Header.Get("Content-Type"), ";")[0]
switch contentType {
case "multipart/form-data":
err := r.ParseMultipartForm(32 << 20) // 32MB max memory
if err != nil {
q.logger.Printf("Error parsing form: %v\n", err)
http.Error(w, err.Error(), http.StatusBadRequest)
return
}
case "application/x-www-form-urlencoded":
err := r.ParseForm()
if err != nil {
q.logger.Printf("Error parsing form: %v\n", err)
http.Error(w, err.Error(), http.StatusBadRequest)
return
}
}
files := r.MultipartForm.File["torrents"]
urls := r.FormValue("urls")
category := r.FormValue("category")
if len(files) == 0 && urls == "" {
http.Error(w, "No torrent provided", http.StatusBadRequest)
return
}
var urlList []string
if urls != "" {
urlList = strings.Split(urls, "\n")
}
for _, url := range urlList {
magnet, err := common.GetMagnetFromUrl(url)
if err != nil {
q.logger.Printf("Error parsing magnet link: %v\n", err)
http.Error(w, err.Error(), http.StatusBadRequest)
return
}
go q.Process(magnet, category)
}
for _, fileHeader := range files {
file, _ := fileHeader.Open()
defer file.Close()
var reader io.Reader = file
magnet, err := common.GetMagnetFromFile(reader, fileHeader.Filename)
if err != nil {
http.Error(w, err.Error(), http.StatusBadRequest)
q.logger.Printf("Error reading file: %s", fileHeader.Filename)
return
}
go q.Process(magnet, category)
}
w.WriteHeader(http.StatusOK)
}
func (q *QBit) handleTorrentsDelete(w http.ResponseWriter, r *http.Request) {
ctx := r.Context()
hashes, _ := ctx.Value("hashes").([]string)
if len(hashes) == 0 {
http.Error(w, "No hashes provided", http.StatusBadRequest)
return
}
for _, hash := range hashes {
q.storage.Delete(hash)
}
w.WriteHeader(http.StatusOK)
}
func (q *QBit) handleTorrentsPause(w http.ResponseWriter, r *http.Request) {
ctx := r.Context()
hashes, _ := ctx.Value("hashes").([]string)
for _, hash := range hashes {
torrent := q.storage.Get(hash)
if torrent == nil {
continue
}
go q.PauseTorrent(torrent)
}
w.WriteHeader(http.StatusOK)
}
func (q *QBit) handleTorrentsResume(w http.ResponseWriter, r *http.Request) {
ctx := r.Context()
hashes, _ := ctx.Value("hashes").([]string)
for _, hash := range hashes {
torrent := q.storage.Get(hash)
if torrent == nil {
continue
}
go q.ResumeTorrent(torrent)
}
w.WriteHeader(http.StatusOK)
}
func (q *QBit) handleTorrentRecheck(w http.ResponseWriter, r *http.Request) {
ctx := r.Context()
hashes, _ := ctx.Value("hashes").([]string)
for _, hash := range hashes {
torrent := q.storage.Get(hash)
if torrent == nil {
continue
}
go q.RefreshTorrent(torrent)
}
w.WriteHeader(http.StatusOK)
}
func (q *QBit) handleCategories(w http.ResponseWriter, r *http.Request) {
var categories = map[string]TorrentCategory{}
for _, cat := range q.Categories {
path := filepath.Join(q.DownloadFolder, cat)
categories[cat] = TorrentCategory{
Name: cat,
SavePath: path,
}
}
JSONResponse(w, categories, http.StatusOK)
}
func (q *QBit) handleCreateCategory(w http.ResponseWriter, r *http.Request) {
err := r.ParseForm()
if err != nil {
http.Error(w, "Failed to parse form data", http.StatusBadRequest)
return
}
name := r.Form.Get("category")
if name == "" {
http.Error(w, "No name provided", http.StatusBadRequest)
return
}
q.Categories = append(q.Categories, name)
JSONResponse(w, nil, http.StatusOK)
}
func (q *QBit) handleTorrentProperties(w http.ResponseWriter, r *http.Request) {
hash := r.URL.Query().Get("hash")
torrent := q.storage.Get(hash)
properties := q.GetTorrentProperties(torrent)
JSONResponse(w, properties, http.StatusOK)
}

67
pkg/qbit/main.go Normal file
View File

@@ -0,0 +1,67 @@
package qbit
import (
"cmp"
"fmt"
"github.com/go-chi/chi/v5"
"github.com/go-chi/chi/v5/middleware"
"goBlack/common"
"goBlack/pkg/debrid"
"log"
"net/http"
"os"
"sync"
)
type QBit struct {
Username string `json:"username"`
Password string `json:"password"`
Port string `json:"port"`
DownloadFolder string `json:"download_folder"`
Categories []string `json:"categories"`
debrid debrid.Service
cache *common.Cache
storage *TorrentStorage
debug bool
logger *log.Logger
}
var (
sessions sync.Map
)
const (
sidLength = 32
cookieName = "SID"
)
func NewQBit(config *common.Config, deb debrid.Service, cache *common.Cache) *QBit {
cfg := config.QBitTorrent
storage := NewTorrentStorage("torrents.json")
port := cmp.Or(cfg.Port, os.Getenv("QBIT_PORT"), "8182")
return &QBit{
Username: cfg.Username,
Password: cfg.Password,
Port: port,
DownloadFolder: cfg.DownloadFolder,
Categories: cfg.Categories,
debrid: deb,
cache: cache,
debug: cfg.Debug,
storage: storage,
logger: common.NewLogger("QBit", os.Stdout),
}
}
func (q *QBit) Start() {
r := chi.NewRouter()
r.Use(middleware.Logger)
r.Use(middleware.Recoverer)
q.AddRoutes(r)
q.logger.Printf("Starting QBit server on :%s", q.Port)
port := fmt.Sprintf(":%s", q.Port)
q.logger.Fatal(http.ListenAndServe(port, r))
}

41
pkg/qbit/middleware.go Normal file
View File

@@ -0,0 +1,41 @@
package qbit
import (
"context"
"crypto/subtle"
"github.com/go-chi/chi/v5"
"net/http"
"strings"
)
func (q *QBit) authMiddleware(next http.Handler) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
user, pass, ok := r.BasicAuth()
if !ok {
http.Error(w, "Unauthorized", http.StatusUnauthorized)
return
}
if subtle.ConstantTimeCompare([]byte(user), []byte(q.Username)) != 1 || subtle.ConstantTimeCompare([]byte(pass), []byte(q.Password)) != 1 {
http.Error(w, "Unauthorized", http.StatusUnauthorized)
return
}
next.ServeHTTP(w, r)
})
}
func HashesCtx(next http.Handler) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
_hashes := chi.URLParam(r, "hashes")
var hashes []string
if _hashes != "" {
hashes = strings.Split(_hashes, "|")
}
if hashes == nil {
// Get hashes from form
_ = r.ParseForm()
hashes = r.Form["hashes"]
}
ctx := context.WithValue(r.Context(), "hashes", hashes)
next.ServeHTTP(w, r.WithContext(ctx))
})
}

101
pkg/qbit/qbit.go Normal file
View File

@@ -0,0 +1,101 @@
package qbit
import (
"github.com/google/uuid"
"goBlack/common"
"goBlack/pkg/debrid"
"os"
"path/filepath"
"strings"
"sync"
"time"
)
func (q *QBit) Process(magnet *common.Magnet, category string) (*Torrent, error) {
torrent := q.CreateTorrentFromMagnet(magnet, category)
go q.storage.AddOrUpdate(torrent)
debridTorrent, err := debrid.ProcessQBitTorrent(q.debrid, magnet, category)
if err != nil || debridTorrent == nil {
// Mark as failed
q.MarkAsFailed(torrent)
return torrent, err
}
torrent.ID = debridTorrent.Id
q.processFiles(torrent, debridTorrent)
return torrent, nil
}
func (q *QBit) CreateTorrentFromMagnet(magnet *common.Magnet, category string) *Torrent {
torrent := &Torrent{
ID: uuid.NewString(),
Hash: strings.ToLower(magnet.InfoHash),
Name: magnet.Name,
Size: magnet.Size,
Category: category,
State: "downloading",
AddedOn: time.Now().Unix(),
MagnetUri: magnet.Link,
Tracker: "udp://tracker.opentrackr.org:1337",
UpLimit: -1,
DlLimit: -1,
FlPiecePrio: false,
ForceStart: false,
AutoTmm: false,
Availability: 2,
MaxRatio: -1,
MaxSeedingTime: -1,
NumComplete: 10,
NumIncomplete: 0,
NumLeechs: 1,
Ratio: 1,
RatioLimit: 1,
}
return torrent
}
func (q *QBit) processFiles(torrent *Torrent, debridTorrent *debrid.Torrent) {
var wg sync.WaitGroup
files := debridTorrent.Files
ready := make(chan debrid.TorrentFile, len(files))
q.logger.Printf("Checking %d files...", len(files))
rCloneMountPath := q.debrid.GetMountPath()
path := filepath.Join(q.DownloadFolder, debridTorrent.Arr.CompletedFolder, debridTorrent.Folder) // /mnt/symlinks/{category}/MyTVShow/
err := os.MkdirAll(path, os.ModePerm)
if err != nil {
q.logger.Printf("Failed to create directory: %s\n", path)
}
for _, file := range files {
wg.Add(1)
go checkFileLoop(&wg, rCloneMountPath, file, ready)
}
go func() {
wg.Wait()
close(ready)
}()
for f := range ready {
q.logger.Println("File is ready:", f.Path)
q.createSymLink(path, debridTorrent, f)
}
// Update the torrent when all files are ready
q.UpdateTorrent(torrent, debridTorrent)
q.logger.Printf("%s COMPLETED \n", debridTorrent.Name)
}
func (q *QBit) createSymLink(path string, torrent *debrid.Torrent, file debrid.TorrentFile) {
// Combine the directory and filename to form a full path
fullPath := filepath.Join(path, file.Name) // /mnt/symlinks/{category}/MyTVShow/MyTVShow.S01E01.720p.mkv
// Create a symbolic link if file doesn't exist
torrentMountPath := filepath.Join(q.debrid.GetMountPath(), torrent.Folder, file.Name) // debridFolder/MyTVShow/MyTVShow.S01E01.720p.mkv
_ = os.Symlink(torrentMountPath, fullPath)
// Check if the file exists
if !fileReady(fullPath) {
q.logger.Printf("Failed to create symlink: %s\n", fullPath)
}
}

137
pkg/qbit/storage.go Normal file
View File

@@ -0,0 +1,137 @@
package qbit
import (
"encoding/json"
"os"
"sync"
)
type TorrentStorage struct {
torrents map[string]*Torrent
mu sync.RWMutex
order []string
}
func loadTorrentsFromJSON(filename string) (map[string]*Torrent, error) {
data, err := os.ReadFile(filename)
if err != nil {
return nil, err
}
torrents := make(map[string]*Torrent)
if err := json.Unmarshal(data, &torrents); err != nil {
return nil, err
}
return torrents, nil
}
func NewTorrentStorage(filename string) *TorrentStorage {
// Open the json file and read the data
torrents, err := loadTorrentsFromJSON(filename)
if err != nil {
torrents = make(map[string]*Torrent)
}
order := make([]string, 0, len(torrents))
for id := range torrents {
order = append(order, id)
}
// Create a new TorrentStorage
return &TorrentStorage{
torrents: torrents,
order: order,
}
}
func (ts *TorrentStorage) Add(torrent *Torrent) {
ts.mu.Lock()
defer ts.mu.Unlock()
ts.torrents[torrent.Hash] = torrent
ts.order = append(ts.order, torrent.Hash)
}
func (ts *TorrentStorage) AddOrUpdate(torrent *Torrent) {
ts.mu.Lock()
defer ts.mu.Unlock()
if _, exists := ts.torrents[torrent.Hash]; !exists {
ts.order = append(ts.order, torrent.Hash)
}
ts.torrents[torrent.Hash] = torrent
}
func (ts *TorrentStorage) GetByID(id string) *Torrent {
ts.mu.RLock()
defer ts.mu.RUnlock()
for _, torrent := range ts.torrents {
if torrent.ID == id {
return torrent
}
}
return nil
}
func (ts *TorrentStorage) Get(hash string) *Torrent {
ts.mu.RLock()
defer ts.mu.RUnlock()
return ts.torrents[hash]
}
func (ts *TorrentStorage) GetAll(category string, filter string, hashes []string) []*Torrent {
ts.mu.RLock()
defer ts.mu.RUnlock()
torrents := make([]*Torrent, 0, len(ts.torrents))
for _, id := range ts.order {
torrent := ts.torrents[id]
if category != "" && torrent.Category != category {
continue
}
if filter != "" && torrent.State != filter {
continue
}
torrents = append(torrents, torrent)
}
if len(hashes) > 0 {
filtered := make([]*Torrent, 0, len(torrents))
for _, hash := range hashes {
if torrent := ts.torrents[hash]; torrent != nil {
filtered = append(filtered, torrent)
}
}
return filtered
}
return torrents
}
func (ts *TorrentStorage) Update(torrent *Torrent) {
ts.mu.Lock()
defer ts.mu.Unlock()
ts.torrents[torrent.Hash] = torrent
}
func (ts *TorrentStorage) Delete(hash string) {
ts.mu.Lock()
defer ts.mu.Unlock()
torrent, exists := ts.torrents[hash]
if !exists {
return
}
delete(ts.torrents, hash)
for i, id := range ts.order {
if id == hash {
ts.order = append(ts.order[:i], ts.order[i+1:]...)
break
}
}
// Delete the torrent folder
if torrent.ContentPath != "" {
os.RemoveAll(torrent.ContentPath)
}
}
func (ts *TorrentStorage) Save(filename string) error {
ts.mu.RLock()
defer ts.mu.RUnlock()
data, err := json.Marshal(ts.torrents)
if err != nil {
return err
}
return os.WriteFile(filename, data, 0644)
}

404
pkg/qbit/structs.go Normal file
View File

@@ -0,0 +1,404 @@
package qbit
type BuildInfo struct {
Libtorrent string `json:"libtorrent"`
Bitness int `json:"bitness"`
Boost string `json:"boost"`
Openssl string `json:"openssl"`
Qt string `json:"qt"`
Zlib string `json:"zlib"`
}
type AppPreferences struct {
AddTrackers string `json:"add_trackers"`
AddTrackersEnabled bool `json:"add_trackers_enabled"`
AltDlLimit int64 `json:"alt_dl_limit"`
AltUpLimit int64 `json:"alt_up_limit"`
AlternativeWebuiEnabled bool `json:"alternative_webui_enabled"`
AlternativeWebuiPath string `json:"alternative_webui_path"`
AnnounceIp string `json:"announce_ip"`
AnnounceToAllTiers bool `json:"announce_to_all_tiers"`
AnnounceToAllTrackers bool `json:"announce_to_all_trackers"`
AnonymousMode bool `json:"anonymous_mode"`
AsyncIoThreads int64 `json:"async_io_threads"`
AutoDeleteMode int64 `json:"auto_delete_mode"`
AutoTmmEnabled bool `json:"auto_tmm_enabled"`
AutorunEnabled bool `json:"autorun_enabled"`
AutorunProgram string `json:"autorun_program"`
BannedIPs string `json:"banned_IPs"`
BittorrentProtocol int64 `json:"bittorrent_protocol"`
BypassAuthSubnetWhitelist string `json:"bypass_auth_subnet_whitelist"`
BypassAuthSubnetWhitelistEnabled bool `json:"bypass_auth_subnet_whitelist_enabled"`
BypassLocalAuth bool `json:"bypass_local_auth"`
CategoryChangedTmmEnabled bool `json:"category_changed_tmm_enabled"`
CheckingMemoryUse int64 `json:"checking_memory_use"`
CreateSubfolderEnabled bool `json:"create_subfolder_enabled"`
CurrentInterfaceAddress string `json:"current_interface_address"`
CurrentNetworkInterface string `json:"current_network_interface"`
Dht bool `json:"dht"`
DiskCache int64 `json:"disk_cache"`
DiskCacheTtl int64 `json:"disk_cache_ttl"`
DlLimit int64 `json:"dl_limit"`
DontCountSlowTorrents bool `json:"dont_count_slow_torrents"`
DyndnsDomain string `json:"dyndns_domain"`
DyndnsEnabled bool `json:"dyndns_enabled"`
DyndnsPassword string `json:"dyndns_password"`
DyndnsService int64 `json:"dyndns_service"`
DyndnsUsername string `json:"dyndns_username"`
EmbeddedTrackerPort int64 `json:"embedded_tracker_port"`
EnableCoalesceReadWrite bool `json:"enable_coalesce_read_write"`
EnableEmbeddedTracker bool `json:"enable_embedded_tracker"`
EnableMultiConnectionsFromSameIp bool `json:"enable_multi_connections_from_same_ip"`
EnableOsCache bool `json:"enable_os_cache"`
EnablePieceExtentAffinity bool `json:"enable_piece_extent_affinity"`
EnableSuperSeeding bool `json:"enable_super_seeding"`
EnableUploadSuggestions bool `json:"enable_upload_suggestions"`
Encryption int64 `json:"encryption"`
ExportDir string `json:"export_dir"`
ExportDirFin string `json:"export_dir_fin"`
FilePoolSize int64 `json:"file_pool_size"`
IncompleteFilesExt bool `json:"incomplete_files_ext"`
IpFilterEnabled bool `json:"ip_filter_enabled"`
IpFilterPath string `json:"ip_filter_path"`
IpFilterTrackers bool `json:"ip_filter_trackers"`
LimitLanPeers bool `json:"limit_lan_peers"`
LimitTcpOverhead bool `json:"limit_tcp_overhead"`
LimitUtpRate bool `json:"limit_utp_rate"`
ListenPort int64 `json:"listen_port"`
Locale string `json:"locale"`
Lsd bool `json:"lsd"`
MailNotificationAuthEnabled bool `json:"mail_notification_auth_enabled"`
MailNotificationEmail string `json:"mail_notification_email"`
MailNotificationEnabled bool `json:"mail_notification_enabled"`
MailNotificationPassword string `json:"mail_notification_password"`
MailNotificationSender string `json:"mail_notification_sender"`
MailNotificationSmtp string `json:"mail_notification_smtp"`
MailNotificationSslEnabled bool `json:"mail_notification_ssl_enabled"`
MailNotificationUsername string `json:"mail_notification_username"`
MaxActiveDownloads int64 `json:"max_active_downloads"`
MaxActiveTorrents int64 `json:"max_active_torrents"`
MaxActiveUploads int64 `json:"max_active_uploads"`
MaxConnec int64 `json:"max_connec"`
MaxConnecPerTorrent int64 `json:"max_connec_per_torrent"`
MaxRatio int64 `json:"max_ratio"`
MaxRatioAct int64 `json:"max_ratio_act"`
MaxRatioEnabled bool `json:"max_ratio_enabled"`
MaxSeedingTime int64 `json:"max_seeding_time"`
MaxSeedingTimeEnabled bool `json:"max_seeding_time_enabled"`
MaxUploads int64 `json:"max_uploads"`
MaxUploadsPerTorrent int64 `json:"max_uploads_per_torrent"`
OutgoingPortsMax int64 `json:"outgoing_ports_max"`
OutgoingPortsMin int64 `json:"outgoing_ports_min"`
Pex bool `json:"pex"`
PreallocateAll bool `json:"preallocate_all"`
ProxyAuthEnabled bool `json:"proxy_auth_enabled"`
ProxyIp string `json:"proxy_ip"`
ProxyPassword string `json:"proxy_password"`
ProxyPeerConnections bool `json:"proxy_peer_connections"`
ProxyPort int64 `json:"proxy_port"`
ProxyTorrentsOnly bool `json:"proxy_torrents_only"`
ProxyType int64 `json:"proxy_type"`
ProxyUsername string `json:"proxy_username"`
QueueingEnabled bool `json:"queueing_enabled"`
RandomPort bool `json:"random_port"`
RecheckCompletedTorrents bool `json:"recheck_completed_torrents"`
ResolvePeerCountries bool `json:"resolve_peer_countries"`
RssAutoDownloadingEnabled bool `json:"rss_auto_downloading_enabled"`
RssMaxArticlesPerFeed int64 `json:"rss_max_articles_per_feed"`
RssProcessingEnabled bool `json:"rss_processing_enabled"`
RssRefreshInterval int64 `json:"rss_refresh_interval"`
SavePath string `json:"save_path"`
SavePathChangedTmmEnabled bool `json:"save_path_changed_tmm_enabled"`
SaveResumeDataInterval int64 `json:"save_resume_data_interval"`
ScanDirs ScanDirs `json:"scan_dirs"`
ScheduleFromHour int64 `json:"schedule_from_hour"`
ScheduleFromMin int64 `json:"schedule_from_min"`
ScheduleToHour int64 `json:"schedule_to_hour"`
ScheduleToMin int64 `json:"schedule_to_min"`
SchedulerDays int64 `json:"scheduler_days"`
SchedulerEnabled bool `json:"scheduler_enabled"`
SendBufferLowWatermark int64 `json:"send_buffer_low_watermark"`
SendBufferWatermark int64 `json:"send_buffer_watermark"`
SendBufferWatermarkFactor int64 `json:"send_buffer_watermark_factor"`
SlowTorrentDlRateThreshold int64 `json:"slow_torrent_dl_rate_threshold"`
SlowTorrentInactiveTimer int64 `json:"slow_torrent_inactive_timer"`
SlowTorrentUlRateThreshold int64 `json:"slow_torrent_ul_rate_threshold"`
SocketBacklogSize int64 `json:"socket_backlog_size"`
StartPausedEnabled bool `json:"start_paused_enabled"`
StopTrackerTimeout int64 `json:"stop_tracker_timeout"`
TempPath string `json:"temp_path"`
TempPathEnabled bool `json:"temp_path_enabled"`
TorrentChangedTmmEnabled bool `json:"torrent_changed_tmm_enabled"`
UpLimit int64 `json:"up_limit"`
UploadChokingAlgorithm int64 `json:"upload_choking_algorithm"`
UploadSlotsBehavior int64 `json:"upload_slots_behavior"`
Upnp bool `json:"upnp"`
UpnpLeaseDuration int64 `json:"upnp_lease_duration"`
UseHttps bool `json:"use_https"`
UtpTcpMixedMode int64 `json:"utp_tcp_mixed_mode"`
WebUiAddress string `json:"web_ui_address"`
WebUiBanDuration int64 `json:"web_ui_ban_duration"`
WebUiClickjackingProtectionEnabled bool `json:"web_ui_clickjacking_protection_enabled"`
WebUiCsrfProtectionEnabled bool `json:"web_ui_csrf_protection_enabled"`
WebUiDomainList string `json:"web_ui_domain_list"`
WebUiHostHeaderValidationEnabled bool `json:"web_ui_host_header_validation_enabled"`
WebUiHttpsCertPath string `json:"web_ui_https_cert_path"`
WebUiHttpsKeyPath string `json:"web_ui_https_key_path"`
WebUiMaxAuthFailCount int64 `json:"web_ui_max_auth_fail_count"`
WebUiPort int64 `json:"web_ui_port"`
WebUiSecureCookieEnabled bool `json:"web_ui_secure_cookie_enabled"`
WebUiSessionTimeout int64 `json:"web_ui_session_timeout"`
WebUiUpnp bool `json:"web_ui_upnp"`
WebUiUsername string `json:"web_ui_username"`
WebUiPassword string `json:"web_ui_password"`
SSLKey string `json:"ssl_key"`
SSLCert string `json:"ssl_cert"`
RSSDownloadRepack string `json:"rss_download_repack_proper_episodes"`
RSSSmartEpisodeFilters string `json:"rss_smart_episode_filters"`
WebUiUseCustomHttpHeaders bool `json:"web_ui_use_custom_http_headers"`
WebUiUseCustomHttpHeadersEnabled bool `json:"web_ui_use_custom_http_headers_enabled"`
}
type ScanDirs struct{}
type TorrentCategory struct {
Name string `json:"name"`
SavePath string `json:"savePath"`
}
type Torrent struct {
ID string `json:"-"`
AddedOn int64 `json:"added_on,omitempty"`
AmountLeft int64 `json:"amount_left,omitempty"`
AutoTmm bool `json:"auto_tmm"`
Availability float64 `json:"availability"`
Category string `json:"category,omitempty"`
Completed int64 `json:"completed,omitempty"`
CompletionOn int64 `json:"completion_on,omitempty"`
ContentPath string `json:"content_path,omitempty"`
DlLimit int64 `json:"dl_limit,omitempty"`
Dlspeed int64 `json:"dlspeed,omitempty"`
Downloaded int64 `json:"downloaded,omitempty"`
DownloadedSession int64 `json:"downloaded_session,omitempty"`
Eta int64 `json:"eta,omitempty"`
FlPiecePrio bool `json:"f_l_piece_prio"`
ForceStart bool `json:"force_start"`
Hash string `json:"hash"`
LastActivity int64 `json:"last_activity,omitempty"`
MagnetUri string `json:"magnet_uri,omitempty"`
MaxRatio int64 `json:"max_ratio,omitempty"`
MaxSeedingTime int64 `json:"max_seeding_time,omitempty"`
Name string `json:"name,omitempty"`
NumComplete int64 `json:"num_complete,omitempty"`
NumIncomplete int64 `json:"num_incomplete,omitempty"`
NumLeechs int64 `json:"num_leechs,omitempty"`
NumSeeds int64 `json:"num_seeds,omitempty"`
Priority int64 `json:"priority,omitempty"`
Progress float32 `json:"progress"`
Ratio int64 `json:"ratio,omitempty"`
RatioLimit int64 `json:"ratio_limit,omitempty"`
SavePath string `json:"save_path,omitempty"`
SeedingTimeLimit int64 `json:"seeding_time_limit,omitempty"`
SeenComplete int64 `json:"seen_complete,omitempty"`
SeqDl bool `json:"seq_dl"`
Size int64 `json:"size,omitempty"`
State string `json:"state,omitempty"`
SuperSeeding bool `json:"super_seeding"`
Tags string `json:"tags,omitempty"`
TimeActive int64 `json:"time_active,omitempty"`
TotalSize int64 `json:"total_size,omitempty"`
Tracker string `json:"tracker,omitempty"`
UpLimit int64 `json:"up_limit,omitempty"`
Uploaded int64 `json:"uploaded,omitempty"`
UploadedSession int64 `json:"uploaded_session,omitempty"`
Upspeed int64 `json:"upspeed,omitempty"`
}
type TorrentProperties struct {
AdditionDate int64 `json:"addition_date,omitempty"`
Comment string `json:"comment,omitempty"`
CompletionDate int64 `json:"completion_date,omitempty"`
CreatedBy string `json:"created_by,omitempty"`
CreationDate int64 `json:"creation_date,omitempty"`
DlLimit int64 `json:"dl_limit,omitempty"`
DlSpeed int64 `json:"dl_speed,omitempty"`
DlSpeedAvg int64 `json:"dl_speed_avg,omitempty"`
Eta int64 `json:"eta,omitempty"`
LastSeen int64 `json:"last_seen,omitempty"`
NbConnections int64 `json:"nb_connections,omitempty"`
NbConnectionsLimit int64 `json:"nb_connections_limit,omitempty"`
Peers int64 `json:"peers,omitempty"`
PeersTotal int64 `json:"peers_total,omitempty"`
PieceSize int64 `json:"piece_size,omitempty"`
PiecesHave int64 `json:"pieces_have,omitempty"`
PiecesNum int64 `json:"pieces_num,omitempty"`
Reannounce int64 `json:"reannounce,omitempty"`
SavePath string `json:"save_path,omitempty"`
SeedingTime int64 `json:"seeding_time,omitempty"`
Seeds int64 `json:"seeds,omitempty"`
SeedsTotal int64 `json:"seeds_total,omitempty"`
ShareRatio int64 `json:"share_ratio,omitempty"`
TimeElapsed int64 `json:"time_elapsed,omitempty"`
TotalDownloaded int64 `json:"total_downloaded,omitempty"`
TotalDownloadedSession int64 `json:"total_downloaded_session,omitempty"`
TotalSize int64 `json:"total_size,omitempty"`
TotalUploaded int64 `json:"total_uploaded,omitempty"`
TotalUploadedSession int64 `json:"total_uploaded_session,omitempty"`
TotalWasted int64 `json:"total_wasted,omitempty"`
UpLimit int64 `json:"up_limit,omitempty"`
UpSpeed int64 `json:"up_speed,omitempty"`
UpSpeedAvg int64 `json:"up_speed_avg,omitempty"`
}
func NewAppPreferences() *AppPreferences {
preferences := &AppPreferences{
AddTrackers: "",
AddTrackersEnabled: false,
AltDlLimit: 10240,
AltUpLimit: 10240,
AlternativeWebuiEnabled: false,
AlternativeWebuiPath: "",
AnnounceIp: "",
AnnounceToAllTiers: true,
AnnounceToAllTrackers: false,
AnonymousMode: false,
AsyncIoThreads: 4,
AutoDeleteMode: 0,
AutoTmmEnabled: false,
AutorunEnabled: false,
AutorunProgram: "",
BannedIPs: "",
BittorrentProtocol: 0,
BypassAuthSubnetWhitelist: "",
BypassAuthSubnetWhitelistEnabled: false,
BypassLocalAuth: false,
CategoryChangedTmmEnabled: false,
CheckingMemoryUse: 32,
CreateSubfolderEnabled: true,
CurrentInterfaceAddress: "",
CurrentNetworkInterface: "",
Dht: true,
DiskCache: -1,
DiskCacheTtl: 60,
DlLimit: 0,
DontCountSlowTorrents: false,
DyndnsDomain: "changeme.dyndns.org",
DyndnsEnabled: false,
DyndnsPassword: "",
DyndnsService: 0,
DyndnsUsername: "",
EmbeddedTrackerPort: 9000,
EnableCoalesceReadWrite: true,
EnableEmbeddedTracker: false,
EnableMultiConnectionsFromSameIp: false,
EnableOsCache: true,
EnablePieceExtentAffinity: false,
EnableSuperSeeding: false,
EnableUploadSuggestions: false,
Encryption: 0,
ExportDir: "",
ExportDirFin: "",
FilePoolSize: 40,
IncompleteFilesExt: false,
IpFilterEnabled: false,
IpFilterPath: "",
IpFilterTrackers: false,
LimitLanPeers: true,
LimitTcpOverhead: false,
LimitUtpRate: true,
ListenPort: 31193,
Locale: "en",
Lsd: true,
MailNotificationAuthEnabled: false,
MailNotificationEmail: "",
MailNotificationEnabled: false,
MailNotificationPassword: "",
MailNotificationSender: "qBittorrentNotification@example.com",
MailNotificationSmtp: "smtp.changeme.com",
MailNotificationSslEnabled: false,
MailNotificationUsername: "",
MaxActiveDownloads: 3,
MaxActiveTorrents: 5,
MaxActiveUploads: 3,
MaxConnec: 500,
MaxConnecPerTorrent: 100,
MaxRatio: -1,
MaxRatioAct: 0,
MaxRatioEnabled: false,
MaxSeedingTime: -1,
MaxSeedingTimeEnabled: false,
MaxUploads: -1,
MaxUploadsPerTorrent: -1,
OutgoingPortsMax: 0,
OutgoingPortsMin: 0,
Pex: true,
PreallocateAll: false,
ProxyAuthEnabled: false,
ProxyIp: "0.0.0.0",
ProxyPassword: "",
ProxyPeerConnections: false,
ProxyPort: 8080,
ProxyTorrentsOnly: false,
ProxyType: 0,
ProxyUsername: "",
QueueingEnabled: false,
RandomPort: false,
RecheckCompletedTorrents: false,
ResolvePeerCountries: true,
RssAutoDownloadingEnabled: false,
RssMaxArticlesPerFeed: 50,
RssProcessingEnabled: false,
RssRefreshInterval: 30,
SavePathChangedTmmEnabled: false,
SaveResumeDataInterval: 60,
ScanDirs: ScanDirs{},
ScheduleFromHour: 8,
ScheduleFromMin: 0,
ScheduleToHour: 20,
ScheduleToMin: 0,
SchedulerDays: 0,
SchedulerEnabled: false,
SendBufferLowWatermark: 10,
SendBufferWatermark: 500,
SendBufferWatermarkFactor: 50,
SlowTorrentDlRateThreshold: 2,
SlowTorrentInactiveTimer: 60,
SlowTorrentUlRateThreshold: 2,
SocketBacklogSize: 30,
StartPausedEnabled: false,
StopTrackerTimeout: 1,
TempPathEnabled: false,
TorrentChangedTmmEnabled: true,
UpLimit: 0,
UploadChokingAlgorithm: 1,
UploadSlotsBehavior: 0,
Upnp: true,
UpnpLeaseDuration: 0,
UseHttps: false,
UtpTcpMixedMode: 0,
WebUiAddress: "*",
WebUiBanDuration: 3600,
WebUiClickjackingProtectionEnabled: true,
WebUiCsrfProtectionEnabled: true,
WebUiDomainList: "*",
WebUiHostHeaderValidationEnabled: true,
WebUiHttpsCertPath: "",
WebUiHttpsKeyPath: "",
WebUiMaxAuthFailCount: 5,
WebUiPort: 8080,
WebUiSecureCookieEnabled: true,
WebUiSessionTimeout: 3600,
WebUiUpnp: false,
// Fields in the struct but not in the JSON (set to zero values):
WebUiPassword: "",
SSLKey: "",
SSLCert: "",
RSSDownloadRepack: "",
RSSSmartEpisodeFilters: "",
WebUiUseCustomHttpHeaders: false,
WebUiUseCustomHttpHeadersEnabled: false,
}
return preferences
}

102
pkg/qbit/torrent.go Normal file
View File

@@ -0,0 +1,102 @@
package qbit
import (
"cmp"
"goBlack/pkg/debrid"
"os"
"path/filepath"
"time"
)
// All torrent related helpers goes here
func (q *QBit) MarkAsFailed(t *Torrent) *Torrent {
t.State = "error"
q.storage.AddOrUpdate(t)
return t
}
func (q *QBit) UpdateTorrent(t *Torrent, debridTorrent *debrid.Torrent) *Torrent {
if debridTorrent == nil && t.ID != "" {
debridTorrent, _ = q.debrid.GetTorrent(t.ID)
}
if debridTorrent == nil {
q.logger.Printf("Torrent with ID %s not found in %s", t.ID, q.debrid.GetName())
return t
}
totalSize := cmp.Or(debridTorrent.Bytes, 1)
progress := int64(cmp.Or(debridTorrent.Progress, 100))
progress = progress / 100.0
sizeCompleted := totalSize * progress
savePath := filepath.Join(q.DownloadFolder, t.Category) + string(os.PathSeparator)
torrentPath := filepath.Join(savePath, debridTorrent.Folder) + string(os.PathSeparator)
var speed int64
if debridTorrent.Speed != 0 {
speed = int64(debridTorrent.Speed)
}
var eta int64
if speed != 0 {
eta = (totalSize - sizeCompleted) / speed
}
t.Name = debridTorrent.Name
t.Size = debridTorrent.Bytes
t.Completed = sizeCompleted
t.Downloaded = sizeCompleted
t.DownloadedSession = sizeCompleted
t.Uploaded = sizeCompleted
t.UploadedSession = sizeCompleted
t.AmountLeft = totalSize - sizeCompleted
t.Progress = 100
t.SavePath = savePath
t.ContentPath = torrentPath
t.Eta = eta
t.Dlspeed = speed
t.Upspeed = speed
if t.AmountLeft == 0 {
t.State = "pausedUP"
}
go q.storage.AddOrUpdate(t)
return t
}
func (q *QBit) ResumeTorrent(t *Torrent) bool {
return true
}
func (q *QBit) PauseTorrent(t *Torrent) bool {
return true
}
func (q *QBit) RefreshTorrent(t *Torrent) bool {
return true
}
func (q *QBit) GetTorrentProperties(t *Torrent) *TorrentProperties {
return &TorrentProperties{
AdditionDate: t.AddedOn,
Comment: "Debrid Blackhole <https://github.com/sirrobot01/debrid-blackhole>",
CreatedBy: "Debrid Blackhole <https://github.com/sirrobot01/debrid-blackhole>",
CreationDate: t.AddedOn,
DlLimit: -1,
UpLimit: -1,
DlSpeed: t.Dlspeed,
UpSpeed: t.Upspeed,
TotalSize: t.Size,
TotalUploaded: t.Uploaded,
TotalDownloaded: t.Downloaded,
TotalUploadedSession: t.UploadedSession,
TotalDownloadedSession: t.DownloadedSession,
LastSeen: time.Now().Unix(),
NbConnectionsLimit: 100,
Peers: 0,
PeersTotal: 2,
SeedingTime: 1,
Seeds: 100,
ShareRatio: 100,
}
}

48
pkg/qbit/utils.go Normal file
View File

@@ -0,0 +1,48 @@
package qbit
import (
"crypto/rand"
"encoding/hex"
"encoding/json"
"goBlack/pkg/debrid"
"net/http"
"os"
"path/filepath"
"sync"
"time"
)
func generateSID() (string, error) {
bytes := make([]byte, sidLength)
if _, err := rand.Read(bytes); err != nil {
return "", err
}
return hex.EncodeToString(bytes), nil
}
func JSONResponse(w http.ResponseWriter, data interface{}, code int) {
w.Header().Set("Content-Type", "application/json")
w.WriteHeader(code)
json.NewEncoder(w).Encode(data)
}
func fileReady(path string) bool {
_, err := os.Stat(path)
return !os.IsNotExist(err) // Returns true if the file exists
}
func checkFileLoop(wg *sync.WaitGroup, dir string, file debrid.TorrentFile, ready chan<- debrid.TorrentFile) {
defer wg.Done()
ticker := time.NewTicker(1 * time.Second) // Check every second
defer ticker.Stop()
path := filepath.Join(dir, file.Path)
for {
select {
case <-ticker.C:
if fileReady(path) {
ready <- file
return
}
}
}
}