From a51364d150fc2d9d303031eafcd2da0f8bcc3b33 Mon Sep 17 00:00:00 2001 From: Mukhtar Akere Date: Sat, 30 Nov 2024 15:46:58 +0100 Subject: [PATCH] Changelog 0.3.0 --- .air.toml | 52 +++ .dockerignore | 3 +- .github/workflows/docker.yml | 63 ++++ .github/workflows/release.yml | 32 ++ .gitignore | 1 + CHANGELOG.md | 10 +- Dockerfile | 1 + cmd/main.go | 27 +- common/config.go | 84 +++++ common/logger.go | 14 + common/request.go | 7 + common/utils.go | 5 - main.go | 6 +- pkg/arr/arr.go | 120 +++++++ pkg/arr/content.go | 29 ++ pkg/arr/history.go | 41 +++ pkg/arr/import.go | 209 +++++++++++ pkg/arr/refresh.go | 54 +++ pkg/arr/tmdb.go | 31 ++ pkg/debrid/debrid.go | 21 +- pkg/debrid/debrid_link.go | 4 +- pkg/debrid/realdebrid.go | 10 +- pkg/debrid/structs/debrid_link.go | 4 +- pkg/debrid/structs/realdebrid.go | 12 +- pkg/debrid/structs/torbox.go | 4 +- pkg/debrid/torbox.go | 35 +- pkg/debrid/torrent.go | 30 +- pkg/{qbit => }/downloaders/fasthttp.go | 0 pkg/{qbit => }/downloaders/grab.go | 2 +- pkg/{qbit => }/downloaders/http.go | 0 pkg/proxy/proxy.go | 17 +- pkg/qbit/arr.go | 103 ------ pkg/qbit/handlers.go | 41 --- pkg/qbit/handlers_app.go | 40 --- pkg/qbit/handlers_auth.go | 9 - pkg/qbit/main.go | 75 +--- pkg/qbit/qbit.go | 107 ------ pkg/qbit/server/app_handlers.go | 42 +++ pkg/qbit/server/auth_handlers.go | 7 + pkg/qbit/server/import.go | 172 +++++++++ pkg/qbit/{ => server}/middleware.go | 56 +-- pkg/qbit/server/routes.go | 50 +++ pkg/qbit/server/server.go | 66 ++++ pkg/qbit/server/static/index.html | 334 ++++++++++++++++++ .../torrent_handlers.go} | 84 ++--- pkg/qbit/server/ui_handlers.go | 114 ++++++ pkg/qbit/{ => shared}/downloader.go | 55 +-- pkg/qbit/shared/qbit.go | 46 +++ pkg/qbit/{ => shared}/storage.go | 7 +- pkg/qbit/{ => shared}/structs.go | 180 +++++----- pkg/qbit/{ => shared}/torrent.go | 141 +++++++- pkg/qbit/{ => shared}/utils.go | 18 +- pkg/qbit/{ => shared}/worker.go | 23 +- 53 files changed, 2019 insertions(+), 679 deletions(-) create mode 100644 .air.toml create mode 100644 .github/workflows/docker.yml create mode 100644 .github/workflows/release.yml create mode 100644 common/logger.go create mode 100644 pkg/arr/arr.go create mode 100644 pkg/arr/content.go create mode 100644 pkg/arr/history.go create mode 100644 pkg/arr/import.go create mode 100644 pkg/arr/refresh.go create mode 100644 pkg/arr/tmdb.go rename pkg/{qbit => }/downloaders/fasthttp.go (100%) rename pkg/{qbit => }/downloaders/grab.go (98%) rename pkg/{qbit => }/downloaders/http.go (100%) delete mode 100644 pkg/qbit/arr.go delete mode 100644 pkg/qbit/handlers.go delete mode 100644 pkg/qbit/handlers_app.go delete mode 100644 pkg/qbit/handlers_auth.go delete mode 100644 pkg/qbit/qbit.go create mode 100644 pkg/qbit/server/app_handlers.go create mode 100644 pkg/qbit/server/auth_handlers.go create mode 100644 pkg/qbit/server/import.go rename pkg/qbit/{ => server}/middleware.go (63%) create mode 100644 pkg/qbit/server/routes.go create mode 100644 pkg/qbit/server/server.go create mode 100644 pkg/qbit/server/static/index.html rename pkg/qbit/{handlers_torrent.go => server/torrent_handlers.go} (52%) create mode 100644 pkg/qbit/server/ui_handlers.go rename pkg/qbit/{ => shared}/downloader.go (72%) create mode 100644 pkg/qbit/shared/qbit.go rename pkg/qbit/{ => shared}/storage.go (97%) rename pkg/qbit/{ => shared}/structs.go (76%) rename pkg/qbit/{ => shared}/torrent.go (50%) rename pkg/qbit/{ => shared}/utils.go (52%) rename pkg/qbit/{ => shared}/worker.go (63%) diff --git a/.air.toml b/.air.toml new file mode 100644 index 0000000..0ff91a4 --- /dev/null +++ b/.air.toml @@ -0,0 +1,52 @@ +root = "." +testdata_dir = "testdata" +tmp_dir = "tmp" + +[build] + args_bin = [] + bin = "./tmp/main" + cmd = "go build -o ./tmp/main ." + delay = 1000 + exclude_dir = ["assets", "tmp", "vendor", "testdata", "data"] + exclude_file = [] + exclude_regex = ["_test.go"] + exclude_unchanged = false + follow_symlink = false + full_bin = "" + include_dir = [] + include_ext = ["go", "tpl", "tmpl", "html"] + include_file = [] + kill_delay = "0s" + log = "build-errors.log" + poll = false + poll_interval = 0 + post_cmd = [] + pre_cmd = [] + rerun = false + rerun_delay = 500 + send_interrupt = false + stop_on_error = false + +[color] + app = "" + build = "yellow" + main = "magenta" + runner = "green" + watcher = "cyan" + +[log] + main_only = false + silent = false + time = false + +[misc] + clean_on_exit = false + +[proxy] + app_port = 0 + enabled = false + proxy_port = 0 + +[screen] + clear_on_rebuild = false + keep_scroll = true diff --git a/.dockerignore b/.dockerignore index ed9222c..3067534 100644 --- a/.dockerignore +++ b/.dockerignore @@ -5,4 +5,5 @@ docker-compose.yml .DS_Store **/.idea/ *.magnet -**.torrent \ No newline at end of file +**.torrent + diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml new file mode 100644 index 0000000..10e4be8 --- /dev/null +++ b/.github/workflows/docker.yml @@ -0,0 +1,63 @@ +name: Docker Build and Push + +on: + push: + branches: + - main + - beta + +jobs: + docker: + runs-on: ubuntu-latest + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Get version + id: get_version + run: | + if [[ ${{ github.ref }} == 'refs/heads/main' ]]; then + VERSION=$(git describe --tags --abbrev=0 2>/dev/null || echo "") + echo "VERSION=$VERSION" >> $GITHUB_OUTPUT + fi + + - name: Set up QEMU + uses: docker/setup-qemu-action@v3 + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: Login to Docker Hub + uses: docker/login-action@v3 + with: + username: ${{ secrets.DOCKERHUB_USERNAME }} + password: ${{ secrets.DOCKERHUB_TOKEN }} + + - name: Build and push for beta branch + if: github.ref == 'refs/heads/beta' + uses: docker/build-push-action@v5 + with: + context: . + platforms: linux/amd64,linux/arm64,linux/arm/v7 + push: true + tags: cy01/blackhole:beta + + - name: Build and push for main branch with version + if: github.ref == 'refs/heads/main' && steps.get_version.outputs.VERSION != '' + uses: docker/build-push-action@v5 + with: + context: . + platforms: linux/amd64,linux/arm64,linux/arm/v7 + push: true + tags: | + cy01/blackhole:latest + cy01/blackhole:${{ steps.get_version.outputs.VERSION }} + + - name: Build and push for main branch without version + if: github.ref == 'refs/heads/main' && steps.get_version.outputs.VERSION == '' + uses: docker/build-push-action@v5 + with: + context: . + platforms: linux/amd64,linux/arm64,linux/arm/v7 + push: true + tags: cy01/blackhole:latest \ No newline at end of file diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml new file mode 100644 index 0000000..d5efba4 --- /dev/null +++ b/.github/workflows/release.yml @@ -0,0 +1,32 @@ +name: Release + +on: + push: + tags: + - '*' + +permissions: + contents: write + +jobs: + goreleaser: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + with: + fetch-depth: 0 + + - name: Set up Go + uses: actions/setup-go@v4 + with: + go-version: '1.22' + + - name: Run GoReleaser + uses: goreleaser/goreleaser-action@v5 + with: + distribution: goreleaser + version: latest + args: release --clean + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} \ No newline at end of file diff --git a/.gitignore b/.gitignore index 168a7b4..632d6ba 100644 --- a/.gitignore +++ b/.gitignore @@ -9,3 +9,4 @@ docker-compose.yml *.log *.log.* dist/ +tmp/** diff --git a/CHANGELOG.md b/CHANGELOG.md index 7a4fdc5..4ba2e18 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -83,4 +83,12 @@ - Add support for multiple debrid providers - Add Torbox support - Add support for configurable debrid cache checks -- Add support for configurable debrid download uncached torrents \ No newline at end of file +- Add support for configurable debrid download uncached torrents + +#### 0.3.0 + +- Add UI for adding torrents +- Refraction of the code +- -Fix Torbox bug +- Update CI/CD +- Update Readme \ No newline at end of file diff --git a/Dockerfile b/Dockerfile index a50aea2..07bec57 100644 --- a/Dockerfile +++ b/Dockerfile @@ -20,6 +20,7 @@ RUN CGO_ENABLED=0 GOOS=$(echo $TARGETPLATFORM | cut -d '/' -f1) GOARCH=$(echo $T FROM scratch COPY --from=builder /etc/ssl/certs/ca-certificates.crt /etc/ssl/certs/ COPY --from=builder /blackhole /blackhole +COPY --from=builder /app/README.md /README.md EXPOSE 8181 diff --git a/cmd/main.go b/cmd/main.go index 21b4bdc..73920ca 100644 --- a/cmd/main.go +++ b/cmd/main.go @@ -2,6 +2,7 @@ package cmd import ( "cmp" + "context" "goBlack/common" "goBlack/pkg/debrid" "goBlack/pkg/proxy" @@ -9,32 +10,44 @@ import ( "sync" ) -func Start(config *common.Config) { +func Start(ctx context.Context, config *common.Config) error { maxCacheSize := cmp.Or(config.MaxCacheSize, 1000) cache := common.NewCache(maxCacheSize) deb := debrid.NewDebrid(config.Debrids, cache) var wg sync.WaitGroup + errChan := make(chan error, 2) if config.Proxy.Enabled { - p := proxy.NewProxy(*config, deb, cache) wg.Add(1) go func() { defer wg.Done() - p.Start() + if err := proxy.NewProxy(*config, deb, cache).Start(ctx); err != nil { + errChan <- err + } }() } if config.QBitTorrent.Port != "" { - qb := qbit.NewQBit(config, deb, cache) wg.Add(1) go func() { defer wg.Done() - qb.Start() + if err := qbit.Start(ctx, config, deb, cache); err != nil { + errChan <- err + } }() } - // Wait indefinitely - wg.Wait() + go func() { + wg.Wait() + close(errChan) + }() + // Wait for context cancellation or completion or error + select { + case err := <-errChan: + return err + case <-ctx.Done(): + return ctx.Err() + } } diff --git a/common/config.go b/common/config.go index 911a441..89f4b9d 100644 --- a/common/config.go +++ b/common/config.go @@ -2,8 +2,11 @@ package common import ( "encoding/json" + "errors" + "fmt" "log" "os" + "sync" ) type DebridConfig struct { @@ -43,6 +46,82 @@ type Config struct { QBitTorrent QBitTorrentConfig `json:"qbittorrent"` } +func validateDebrids(debrids []DebridConfig) error { + if len(debrids) == 0 { + return errors.New("no debrids configured") + } + + errChan := make(chan error, len(debrids)) + var wg sync.WaitGroup + + for _, debrid := range debrids { + // Basic field validation + if debrid.Host == "" { + return errors.New("debrid host is required") + } + if debrid.APIKey == "" { + return errors.New("debrid api key is required") + } + if debrid.Folder == "" { + return errors.New("debrid folder is required") + } + + // Check folder existence concurrently + wg.Add(1) + go func(folder string) { + defer wg.Done() + if _, err := os.Stat(folder); os.IsNotExist(err) { + errChan <- fmt.Errorf("debrid folder does not exist: %s", folder) + } + }(debrid.Folder) + } + + // Wait for all checks to complete + go func() { + wg.Wait() + close(errChan) + }() + + // Return first error if any + if err := <-errChan; err != nil { + return err + } + + return nil +} + +func validateQbitTorrent(config *QBitTorrentConfig) error { + if config.DownloadFolder == "" { + return errors.New("qbittorent download folder is required") + } + if _, err := os.Stat(config.DownloadFolder); os.IsNotExist(err) { + return errors.New("qbittorent download folder does not exist") + } + return nil +} + +func validateConfig(config *Config) error { + // Run validations concurrently + errChan := make(chan error, 2) + + go func() { + errChan <- validateDebrids(config.Debrids) + }() + + go func() { + errChan <- validateQbitTorrent(&config.QBitTorrent) + }() + + // Check for errors + for i := 0; i < 2; i++ { + if err := <-errChan; err != nil { + return err + } + } + + return nil +} + func LoadConfig(path string) (*Config, error) { // Load the config file file, err := os.Open(path) @@ -67,5 +146,10 @@ func LoadConfig(path string) (*Config, error) { config.Debrids = append(config.Debrids, config.Debrid) } + // Validate the config + //if err := validateConfig(config); err != nil { + // return nil, err + //} + return config, nil } diff --git a/common/logger.go b/common/logger.go new file mode 100644 index 0000000..223613a --- /dev/null +++ b/common/logger.go @@ -0,0 +1,14 @@ +package common + +import ( + "fmt" + "log" + "os" +) + +func NewLogger(prefix string, output *os.File) *log.Logger { + f := fmt.Sprintf("[%s] ", prefix) + return log.New(output, f, log.LstdFlags) +} + +var Logger = NewLogger("Main", os.Stdout) diff --git a/common/request.go b/common/request.go index 6a2410a..dce171c 100644 --- a/common/request.go +++ b/common/request.go @@ -2,6 +2,7 @@ package common import ( "crypto/tls" + "encoding/json" "fmt" "golang.org/x/time/rate" "io" @@ -125,3 +126,9 @@ func ParseRateLimit(rateStr string) *rate.Limiter { return nil } } + +func JSONResponse(w http.ResponseWriter, data interface{}, code int) { + w.Header().Set("Content-Type", "application/json") + w.WriteHeader(code) + json.NewEncoder(w).Encode(data) +} diff --git a/common/utils.go b/common/utils.go index 2ce2c5c..44acfc0 100644 --- a/common/utils.go +++ b/common/utils.go @@ -209,11 +209,6 @@ func processInfoHash(input string) (string, error) { return "", fmt.Errorf("invalid infohash: %s", input) } -func NewLogger(prefix string, output *os.File) *log.Logger { - f := fmt.Sprintf("[%s] ", prefix) - return log.New(output, f, log.LstdFlags) -} - func GetInfohashFromURL(url string) (string, error) { // Download the torrent file var magnetLink string diff --git a/main.go b/main.go index ad060a5..bff2e07 100644 --- a/main.go +++ b/main.go @@ -1,6 +1,7 @@ package main import ( + "context" "flag" "goBlack/cmd" "goBlack/common" @@ -17,6 +18,9 @@ func main() { if err != nil { log.Fatal(err) } - cmd.Start(conf) + ctx := context.Background() + if err := cmd.Start(ctx, conf); err != nil { + log.Fatal(err) + } } diff --git a/pkg/arr/arr.go b/pkg/arr/arr.go new file mode 100644 index 0000000..ddfae29 --- /dev/null +++ b/pkg/arr/arr.go @@ -0,0 +1,120 @@ +package arr + +import ( + "bytes" + "encoding/json" + "goBlack/common" + "log" + "net/http" + "os" + "strings" + "sync" +) + +// Type is a type of arr +type Type string + +const ( + Sonarr Type = "sonarr" + Radarr Type = "radarr" + Lidarr Type = "lidarr" + Readarr Type = "readarr" +) + +var ( + client *common.RLHTTPClient = common.NewRLHTTPClient(nil, nil) + logger *log.Logger = common.NewLogger("QBit", os.Stdout) +) + +type Arr struct { + Name string `json:"name"` + Host string `json:"host"` + Token string `json:"token"` + Type Type `json:"type"` +} + +func NewArr(name, host, token string, arrType Type) *Arr { + return &Arr{ + Name: name, + Host: host, + Token: token, + Type: arrType, + } +} + +func (a *Arr) Request(method, endpoint string, payload interface{}) (*http.Response, error) { + if a.Token == "" || a.Host == "" { + return nil, nil + } + url, err := common.JoinURL(a.Host, endpoint) + if err != nil { + return nil, err + } + var jsonPayload []byte + + if payload != nil { + jsonPayload, err = json.Marshal(payload) + if err != nil { + return nil, err + } + } + req, err := http.NewRequest(method, url, bytes.NewBuffer(jsonPayload)) + if err != nil { + return nil, err + } + req.Header.Set("Content-Type", "application/json") + req.Header.Set("X-Api-Key", a.Token) + return client.Do(req) +} + +type Storage struct { + Arrs map[string]*Arr // name -> arr + mu sync.RWMutex +} + +func inferType(host, name string) Type { + switch { + case strings.Contains(host, "sonarr") || strings.Contains(name, "sonarr"): + return Sonarr + case strings.Contains(host, "radarr") || strings.Contains(name, "radarr"): + return Radarr + case strings.Contains(host, "lidarr") || strings.Contains(name, "lidarr"): + return Lidarr + case strings.Contains(host, "readarr") || strings.Contains(name, "readarr"): + return Readarr + default: + return "" + } +} + +func NewStorage() *Storage { + arrs := make(map[string]*Arr) + //for name, arrCfg := range cfg { + // arrs[name] = NewArr(name, arrCfg.Host, arrCfg.Token, inferType(arrCfg.Host, name)) + //} + return &Storage{ + Arrs: arrs, + } +} + +func (as *Storage) AddOrUpdate(arr *Arr) { + as.mu.Lock() + defer as.mu.Unlock() + as.Arrs[arr.Host] = arr +} + +func (as *Storage) Get(name string) *Arr { + as.mu.RLock() + defer as.mu.RUnlock() + return as.Arrs[name] +} + +func (as *Storage) GetAll() []*Arr { + as.mu.RLock() + defer as.mu.RUnlock() + arrs := make([]*Arr, 0, len(as.Arrs)) + for _, arr := range as.Arrs { + arrs = append(arrs, arr) + } + return arrs +} diff --git a/pkg/arr/content.go b/pkg/arr/content.go new file mode 100644 index 0000000..5dfb34a --- /dev/null +++ b/pkg/arr/content.go @@ -0,0 +1,29 @@ +package arr + +import ( + "encoding/json" + "fmt" + "net/http" +) + +type ContentRequest struct { + ID string `json:"id"` + Title string `json:"name"` + Arr string `json:"arr"` +} + +func (a *Arr) GetContents() *ContentRequest { + resp, err := a.Request(http.MethodGet, "api/v3/series", nil) + if err != nil { + return nil + } + defer resp.Body.Close() + var data *ContentRequest + if err = json.NewDecoder(resp.Body).Decode(&data); err != nil { + fmt.Printf("Error: %v\n", err) + return nil + } + fmt.Printf("Data: %v\n", data) + //data.Arr = a.Name + return data +} diff --git a/pkg/arr/history.go b/pkg/arr/history.go new file mode 100644 index 0000000..e63bf15 --- /dev/null +++ b/pkg/arr/history.go @@ -0,0 +1,41 @@ +package arr + +import ( + "encoding/json" + "net/http" + gourl "net/url" +) + +type HistorySchema struct { + Page int `json:"page"` + PageSize int `json:"pageSize"` + SortKey string `json:"sortKey"` + SortDirection string `json:"sortDirection"` + TotalRecords int `json:"totalRecords"` + Records []struct { + ID int `json:"id"` + DownloadID string `json:"downloadId"` + } `json:"records"` +} + +func (a *Arr) GetHistory(downloadId, eventType string) *HistorySchema { + query := gourl.Values{} + if downloadId != "" { + query.Add("downloadId", downloadId) + } + query.Add("eventType", eventType) + query.Add("pageSize", "100") + url := "history" + "?" + query.Encode() + resp, err := a.Request(http.MethodGet, url, nil) + if err != nil { + return nil + } + defer resp.Body.Close() + var data *HistorySchema + + if err = json.NewDecoder(resp.Body).Decode(&data); err != nil { + return nil + } + return data + +} diff --git a/pkg/arr/import.go b/pkg/arr/import.go new file mode 100644 index 0000000..9ef651b --- /dev/null +++ b/pkg/arr/import.go @@ -0,0 +1,209 @@ +package arr + +import ( + "encoding/json" + "fmt" + "io" + "net/http" + gourl "net/url" + "strconv" + "time" +) + +type ImportResponseSchema struct { + Path string `json:"path"` + RelativePath string `json:"relativePath"` + FolderName string `json:"folderName"` + Name string `json:"name"` + Size int `json:"size"` + Series struct { + Title string `json:"title"` + SortTitle string `json:"sortTitle"` + Status string `json:"status"` + Ended bool `json:"ended"` + Overview string `json:"overview"` + Network string `json:"network"` + AirTime string `json:"airTime"` + Images []struct { + CoverType string `json:"coverType"` + RemoteUrl string `json:"remoteUrl"` + } `json:"images"` + OriginalLanguage struct { + Id int `json:"id"` + Name string `json:"name"` + } `json:"originalLanguage"` + Seasons []struct { + SeasonNumber int `json:"seasonNumber"` + Monitored bool `json:"monitored"` + } `json:"seasons"` + Year int `json:"year"` + Path string `json:"path"` + QualityProfileId int `json:"qualityProfileId"` + SeasonFolder bool `json:"seasonFolder"` + Monitored bool `json:"monitored"` + MonitorNewItems string `json:"monitorNewItems"` + UseSceneNumbering bool `json:"useSceneNumbering"` + Runtime int `json:"runtime"` + TvdbId int `json:"tvdbId"` + TvRageId int `json:"tvRageId"` + TvMazeId int `json:"tvMazeId"` + TmdbId int `json:"tmdbId"` + FirstAired time.Time `json:"firstAired"` + LastAired time.Time `json:"lastAired"` + SeriesType string `json:"seriesType"` + CleanTitle string `json:"cleanTitle"` + ImdbId string `json:"imdbId"` + TitleSlug string `json:"titleSlug"` + Certification string `json:"certification"` + Genres []string `json:"genres"` + Tags []interface{} `json:"tags"` + Added time.Time `json:"added"` + Ratings struct { + Votes int `json:"votes"` + Value float64 `json:"value"` + } `json:"ratings"` + LanguageProfileId int `json:"languageProfileId"` + Id int `json:"id"` + } `json:"series"` + SeasonNumber int `json:"seasonNumber"` + Episodes []struct { + SeriesId int `json:"seriesId"` + TvdbId int `json:"tvdbId"` + EpisodeFileId int `json:"episodeFileId"` + SeasonNumber int `json:"seasonNumber"` + EpisodeNumber int `json:"episodeNumber"` + Title string `json:"title"` + AirDate string `json:"airDate"` + AirDateUtc time.Time `json:"airDateUtc"` + Runtime int `json:"runtime"` + Overview string `json:"overview"` + HasFile bool `json:"hasFile"` + Monitored bool `json:"monitored"` + AbsoluteEpisodeNumber int `json:"absoluteEpisodeNumber"` + UnverifiedSceneNumbering bool `json:"unverifiedSceneNumbering"` + Id int `json:"id"` + FinaleType string `json:"finaleType,omitempty"` + } `json:"episodes"` + ReleaseGroup string `json:"releaseGroup"` + Quality struct { + Quality struct { + Id int `json:"id"` + Name string `json:"name"` + Source string `json:"source"` + Resolution int `json:"resolution"` + } `json:"quality"` + Revision struct { + Version int `json:"version"` + Real int `json:"real"` + IsRepack bool `json:"isRepack"` + } `json:"revision"` + } `json:"quality"` + Languages []struct { + Id int `json:"id"` + Name string `json:"name"` + } `json:"languages"` + QualityWeight int `json:"qualityWeight"` + CustomFormats []interface{} `json:"customFormats"` + CustomFormatScore int `json:"customFormatScore"` + IndexerFlags int `json:"indexerFlags"` + ReleaseType string `json:"releaseType"` + Rejections []struct { + Reason string `json:"reason"` + Type string `json:"type"` + } `json:"rejections"` + Id int `json:"id"` +} + +type ManualImportRequestFile struct { + Path string `json:"path"` + SeriesId int `json:"seriesId"` + SeasonNumber int `json:"seasonNumber"` + EpisodeIds []int `json:"episodeIds"` + Quality struct { + Quality struct { + Id int `json:"id"` + Name string `json:"name"` + Source string `json:"source"` + Resolution int `json:"resolution"` + } `json:"quality"` + Revision struct { + Version int `json:"version"` + Real int `json:"real"` + IsRepack bool `json:"isRepack"` + } `json:"revision"` + } `json:"quality"` + Languages []struct { + Id int `json:"id"` + Name string `json:"name"` + } `json:"languages"` + ReleaseGroup string `json:"releaseGroup"` + CustomFormats []interface{} `json:"customFormats"` + CustomFormatScore int `json:"customFormatScore"` + IndexerFlags int `json:"indexerFlags"` + ReleaseType string `json:"releaseType"` + Rejections []struct { + Reason string `json:"reason"` + Type string `json:"type"` + } `json:"rejections"` +} + +type ManualImportRequestSchema struct { + Name string `json:"name"` + Files []ManualImportRequestFile `json:"files"` + ImportMode string `json:"importMode"` +} + +func (a *Arr) Import(path string, seriesId int, seasons []int) (io.ReadCloser, error) { + query := gourl.Values{} + query.Add("folder", path) + if seriesId != 0 { + query.Add("seriesId", strconv.Itoa(seriesId)) + } + url := "api/v3/manualimport" + "?" + query.Encode() + resp, err := a.Request(http.MethodGet, url, nil) + if err != nil { + return nil, fmt.Errorf("failed to import, invalid file: %w", err) + } + defer resp.Body.Close() + var data []ImportResponseSchema + if err = json.NewDecoder(resp.Body).Decode(&data); err != nil { + return nil, fmt.Errorf("failed to decode response: %w", err) + } + + var files []ManualImportRequestFile + for _, d := range data { + episodesIds := []int{} + for _, e := range d.Episodes { + episodesIds = append(episodesIds, e.Id) + } + file := ManualImportRequestFile{ + Path: d.Path, + SeriesId: d.Series.Id, + SeasonNumber: d.SeasonNumber, + EpisodeIds: episodesIds, + Quality: d.Quality, + Languages: d.Languages, + ReleaseGroup: d.ReleaseGroup, + CustomFormats: d.CustomFormats, + CustomFormatScore: d.CustomFormatScore, + IndexerFlags: d.IndexerFlags, + ReleaseType: d.ReleaseType, + Rejections: d.Rejections, + } + files = append(files, file) + } + request := ManualImportRequestSchema{ + Name: "ManualImport", + Files: files, + ImportMode: "copy", + } + + url = "api/v3/command" + resp, err = a.Request(http.MethodPost, url, request) + if err != nil { + return nil, fmt.Errorf("failed to import: %w", err) + } + defer resp.Body.Close() + return resp.Body, nil + +} diff --git a/pkg/arr/refresh.go b/pkg/arr/refresh.go new file mode 100644 index 0000000..a7cc7f9 --- /dev/null +++ b/pkg/arr/refresh.go @@ -0,0 +1,54 @@ +package arr + +import ( + "cmp" + "fmt" + "goBlack/common" + "net/http" + "strconv" + "strings" +) + +func (a *Arr) Refresh() error { + payload := map[string]string{"name": "RefreshMonitoredDownloads"} + + resp, err := a.Request(http.MethodPost, "api/v3/command", payload) + if err == nil && resp != nil { + statusOk := strconv.Itoa(resp.StatusCode)[0] == '2' + if statusOk { + return nil + } + } + return fmt.Errorf("failed to refresh monitored downloads for %s", cmp.Or(a.Name, a.Host)) +} + +func (a *Arr) MarkAsFailed(infoHash string) error { + downloadId := strings.ToUpper(infoHash) + history := a.GetHistory(downloadId, "grabbed") + if history == nil { + return nil + } + torrentId := 0 + for _, record := range history.Records { + if strings.EqualFold(record.DownloadID, downloadId) { + torrentId = record.ID + break + } + } + if torrentId != 0 { + url, err := common.JoinURL(a.Host, "history/failed/", strconv.Itoa(torrentId)) + if err != nil { + return err + } + req, err := http.NewRequest(http.MethodPost, url, nil) + if err != nil { + return err + } + client := &http.Client{} + _, err = client.Do(req) + if err == nil { + return fmt.Errorf("failed to mark %s as failed: %v", cmp.Or(a.Name, a.Host), err) + } + } + return nil +} diff --git a/pkg/arr/tmdb.go b/pkg/arr/tmdb.go new file mode 100644 index 0000000..6313c12 --- /dev/null +++ b/pkg/arr/tmdb.go @@ -0,0 +1,31 @@ +package arr + +import ( + "encoding/json" + "net/http" + url2 "net/url" +) + +type TMDBResponse struct { + Page int `json:"page"` + Results []struct { + ID int `json:"id"` + Name string `json:"name"` + MediaType string `json:"media_type"` + PosterPath string `json:"poster_path"` + } `json:"results"` +} + +func SearchTMDB(term string) (*TMDBResponse, error) { + resp, err := http.Get("https://api.themoviedb.org/3/search/multi?api_key=key&query=" + url2.QueryEscape(term)) + if err != nil { + return nil, err + } + + var data *TMDBResponse + if err = json.NewDecoder(resp.Body).Decode(&data); err != nil { + return nil, err + } + + return data, nil +} diff --git a/pkg/debrid/debrid.go b/pkg/debrid/debrid.go index d18fd14..8ae9291 100644 --- a/pkg/debrid/debrid.go +++ b/pkg/debrid/debrid.go @@ -4,6 +4,7 @@ import ( "fmt" "github.com/anacrolix/torrent/metainfo" "goBlack/common" + "goBlack/pkg/arr" "log" "path/filepath" ) @@ -94,16 +95,17 @@ func getTorrentInfo(filePath string) (*Torrent, error) { if err != nil { return nil, err } + infoLength := info.Length magnet := &common.Magnet{ InfoHash: infoHash, Name: info.Name, - Size: info.Length, + Size: infoLength, Link: mi.Magnet(&hash, &info).String(), } torrent := &Torrent{ InfoHash: infoHash, Name: info.Name, - Size: info.Length, + Size: infoLength, Magnet: magnet, Filename: filePath, } @@ -136,12 +138,12 @@ func GetLocalCache(infohashes []string, cache *common.Cache) ([]string, map[stri return infohashes, result } -func ProcessQBitTorrent(d *DebridService, magnet *common.Magnet, arr *Arr, isSymlink bool) (*Torrent, error) { +func ProcessTorrent(d *DebridService, magnet *common.Magnet, a *arr.Arr, isSymlink bool) (*Torrent, error) { debridTorrent := &Torrent{ InfoHash: magnet.InfoHash, Magnet: magnet, Name: magnet.Name, - Arr: arr, + Arr: a, Size: magnet.Size, } @@ -159,15 +161,16 @@ func ProcessQBitTorrent(d *DebridService, magnet *common.Magnet, arr *Arr, isSym } } - debridTorrent, err := db.SubmitMagnet(debridTorrent) - if err != nil || debridTorrent.Id == "" { + dbt, err := db.SubmitMagnet(debridTorrent) + if err != nil || dbt.Id == "" { logger.Printf("Error submitting magnet: %s", err) continue } - logger.Printf("Torrent: %s submitted to %s", debridTorrent.Name, db.GetName()) + logger.Printf("Torrent: %s submitted to %s", dbt.Name, db.GetName()) d.lastUsed = index - debridTorrent.Debrid = db - return db.CheckStatus(debridTorrent, isSymlink) + dbt.Debrid = db + dbt.Arr = a + return db.CheckStatus(dbt, isSymlink) } return nil, fmt.Errorf("failed to process torrent") } diff --git a/pkg/debrid/debrid_link.go b/pkg/debrid/debrid_link.go index 3252e26..6092d27 100644 --- a/pkg/debrid/debrid_link.go +++ b/pkg/debrid/debrid_link.go @@ -107,8 +107,6 @@ func (r *DebridLink) GetTorrent(id string) (*Torrent, error) { return torrent, fmt.Errorf("torrent not found") } dt := *res.Value - fmt.Printf("Length of dt: %d\n", len(dt)) - fmt.Printf("Raw response: %+v\n", res) if len(dt) == 0 { return torrent, fmt.Errorf("torrent not found") @@ -206,7 +204,7 @@ func (r *DebridLink) CheckStatus(torrent *Torrent, isSymlink bool) (*Torrent, er break } else if status == "downloading" { if !r.DownloadUncached { - go r.DeleteTorrent(torrent) + go torrent.Delete() return torrent, fmt.Errorf("torrent: %s not cached", torrent.Name) } // Break out of the loop if the torrent is downloading. diff --git a/pkg/debrid/realdebrid.go b/pkg/debrid/realdebrid.go index 18e6cdd..706ac39 100644 --- a/pkg/debrid/realdebrid.go +++ b/pkg/debrid/realdebrid.go @@ -40,13 +40,13 @@ func GetTorrentFiles(data structs.RealDebridTorrentInfo) []TorrentFile { continue } fileId := f.ID - file := &TorrentFile{ + file := TorrentFile{ Name: name, Path: name, - Size: int64(f.Bytes), + Size: f.Bytes, Id: strconv.Itoa(fileId), } - files = append(files, *file) + files = append(files, file) } return files } @@ -185,7 +185,7 @@ func (r *RealDebrid) CheckStatus(torrent *Torrent, isSymlink bool) (*Torrent, er files := GetTorrentFiles(data) torrent.Files = files if len(files) == 0 { - r.DeleteTorrent(torrent) + go torrent.Delete() return torrent, fmt.Errorf("no video files found") } filesId := make([]string, 0) @@ -214,7 +214,7 @@ func (r *RealDebrid) CheckStatus(torrent *Torrent, isSymlink bool) (*Torrent, er break } else if status == "downloading" { if !r.DownloadUncached { - go r.DeleteTorrent(torrent) + go torrent.Delete() return torrent, fmt.Errorf("torrent: %s not cached", torrent.Name) } // Break out of the loop if the torrent is downloading. diff --git a/pkg/debrid/structs/debrid_link.go b/pkg/debrid/structs/debrid_link.go index 9ecc28e..e33d537 100644 --- a/pkg/debrid/structs/debrid_link.go +++ b/pkg/debrid/structs/debrid_link.go @@ -36,8 +36,8 @@ type debridLinkTorrentInfo struct { } `json:"trackers"` Created int64 `json:"created"` DownloadPercent float64 `json:"downloadPercent"` - DownloadSpeed int64 `json:"downloadSpeed"` - UploadSpeed int64 `json:"uploadSpeed"` + DownloadSpeed int `json:"downloadSpeed"` + UploadSpeed int `json:"uploadSpeed"` } type DebridLinkTorrentInfo DebridLinkAPIResponse[[]debridLinkTorrentInfo] diff --git a/pkg/debrid/structs/realdebrid.go b/pkg/debrid/structs/realdebrid.go index 0ccc128..c382a5f 100644 --- a/pkg/debrid/structs/realdebrid.go +++ b/pkg/debrid/structs/realdebrid.go @@ -75,7 +75,7 @@ type RealDebridTorrentInfo struct { OriginalFilename string `json:"original_filename"` Hash string `json:"hash"` Bytes int64 `json:"bytes"` - OriginalBytes int `json:"original_bytes"` + OriginalBytes int64 `json:"original_bytes"` Host string `json:"host"` Split int `json:"split"` Progress float64 `json:"progress"` @@ -84,12 +84,12 @@ type RealDebridTorrentInfo struct { Files []struct { ID int `json:"id"` Path string `json:"path"` - Bytes int `json:"bytes"` + Bytes int64 `json:"bytes"` Selected int `json:"selected"` } `json:"files"` Links []string `json:"links"` Ended string `json:"ended,omitempty"` - Speed int64 `json:"speed,omitempty"` + Speed int `json:"speed,omitempty"` Seeders int `json:"seeders,omitempty"` } @@ -97,11 +97,11 @@ type RealDebridUnrestrictResponse struct { Id string `json:"id"` Filename string `json:"filename"` MimeType string `json:"mimeType"` - Filesize int64 `json:"filesize"` + Filesize int `json:"filesize"` Link string `json:"link"` Host string `json:"host"` - Chunks int64 `json:"chunks"` - Crc int64 `json:"crc"` + Chunks int `json:"chunks"` + Crc int `json:"crc"` Download string `json:"download"` Streamable int `json:"streamable"` } diff --git a/pkg/debrid/structs/torbox.go b/pkg/debrid/structs/torbox.go index 18ce122..5b81ca8 100644 --- a/pkg/debrid/structs/torbox.go +++ b/pkg/debrid/structs/torbox.go @@ -11,7 +11,7 @@ type TorboxAPIResponse[T any] struct { type TorBoxAvailableResponse TorboxAPIResponse[map[string]struct { Name string `json:"name"` - Size int64 `json:"size"` + Size int `json:"size"` Hash string `json:"hash"` }] @@ -36,7 +36,7 @@ type torboxInfo struct { Peers int `json:"peers"` Ratio int `json:"ratio"` Progress float64 `json:"progress"` - DownloadSpeed int64 `json:"download_speed"` + DownloadSpeed int `json:"download_speed"` UploadSpeed int `json:"upload_speed"` Eta int `json:"eta"` TorrentFile bool `json:"torrent_file"` diff --git a/pkg/debrid/torbox.go b/pkg/debrid/torbox.go index 4b2e2dd..e88c387 100644 --- a/pkg/debrid/torbox.go +++ b/pkg/debrid/torbox.go @@ -12,6 +12,7 @@ import ( gourl "net/url" "os" "path" + "path/filepath" "slices" "strconv" "strings" @@ -157,24 +158,36 @@ func (r *Torbox) GetTorrent(id string) (*Torrent, error) { torrent.Name = name torrent.Bytes = data.Size torrent.Folder = name - torrent.Progress = data.Progress + torrent.Progress = data.Progress * 100 torrent.Status = getStatus(data.DownloadState, data.DownloadFinished) torrent.Speed = data.DownloadSpeed torrent.Seeders = data.Seeds torrent.Filename = name torrent.OriginalFilename = name - files := make([]TorrentFile, len(data.Files)) - for i, f := range data.Files { - files[i] = TorrentFile{ - Id: strconv.Itoa(f.Id), - Name: f.Name, - Size: f.Size, + files := make([]TorrentFile, 0) + if len(data.Files) == 0 { + return torrent, fmt.Errorf("no files found for torrent: %s", name) + } + for _, f := range data.Files { + fileName := filepath.Base(f.Name) + if (!common.RegexMatch(common.VIDEOMATCH, fileName) && + !common.RegexMatch(common.SUBMATCH, fileName) && + !common.RegexMatch(common.MUSICMATCH, fileName)) || common.RegexMatch(common.SAMPLEMATCH, fileName) { + continue } + file := TorrentFile{ + Id: strconv.Itoa(f.Id), + Name: fileName, + Size: f.Size, + Path: fileName, + } + files = append(files, file) } - if len(files) > 0 && name == data.Hash { - cleanPath := path.Clean(files[0].Name) - torrent.OriginalFilename = strings.Split(strings.TrimPrefix(cleanPath, "/"), "/")[0] + if len(files) == 0 { + return torrent, fmt.Errorf("no video files found") } + cleanPath := path.Clean(data.Files[0].Name) + torrent.OriginalFilename = strings.Split(cleanPath, "/")[0] torrent.Files = files torrent.Debrid = r return torrent, nil @@ -203,7 +216,7 @@ func (r *Torbox) CheckStatus(torrent *Torrent, isSymlink bool) (*Torrent, error) break } else if status == "downloading" { if !r.DownloadUncached { - go r.DeleteTorrent(torrent) + go torrent.Delete() return torrent, fmt.Errorf("torrent: %s not cached", torrent.Name) } // Break out of the loop if the torrent is downloading. diff --git a/pkg/debrid/torrent.go b/pkg/debrid/torrent.go index 5e58c53..89315b7 100644 --- a/pkg/debrid/torrent.go +++ b/pkg/debrid/torrent.go @@ -2,13 +2,14 @@ package debrid import ( "goBlack/common" + "goBlack/pkg/arr" "os" "path/filepath" ) type Arr struct { Name string `json:"name"` - Token string `json:"token"` + Token string `json:"-"` Host string `json:"host"` } @@ -38,13 +39,13 @@ type Torrent struct { Status string `json:"status"` Added string `json:"added"` Progress float64 `json:"progress"` - Speed int64 `json:"speed"` + Speed int `json:"speed"` Seeders int `json:"seeders"` Links []string `json:"links"` DownloadLinks []TorrentDownloadLinks `json:"download_links"` Debrid Service - Arr *Arr + Arr *arr.Arr } type TorrentDownloadLinks struct { @@ -58,15 +59,22 @@ func (t *Torrent) GetSymlinkFolder(parent string) string { } func (t *Torrent) GetMountFolder(rClonePath string) string { - if common.FileReady(filepath.Join(rClonePath, t.OriginalFilename)) { - return t.OriginalFilename - } else if common.FileReady(filepath.Join(rClonePath, t.Filename)) { - return t.Filename - } else if pathWithNoExt := common.RemoveExtension(t.OriginalFilename); common.FileReady(filepath.Join(rClonePath, pathWithNoExt)) { - return pathWithNoExt - } else { - return "" + possiblePaths := []string{ + t.OriginalFilename, + t.Filename, + common.RemoveExtension(t.OriginalFilename), } + + for _, path := range possiblePaths { + if path != "" && common.FileReady(filepath.Join(rClonePath, path)) { + return path + } + } + return "" +} + +func (t *Torrent) Delete() { + t.Debrid.DeleteTorrent(t) } type TorrentFile struct { diff --git a/pkg/qbit/downloaders/fasthttp.go b/pkg/downloaders/fasthttp.go similarity index 100% rename from pkg/qbit/downloaders/fasthttp.go rename to pkg/downloaders/fasthttp.go diff --git a/pkg/qbit/downloaders/grab.go b/pkg/downloaders/grab.go similarity index 98% rename from pkg/qbit/downloaders/grab.go rename to pkg/downloaders/grab.go index 483b12c..d1647e8 100644 --- a/pkg/qbit/downloaders/grab.go +++ b/pkg/downloaders/grab.go @@ -40,7 +40,7 @@ Loop: fmt.Printf(" %s: transferred %d / %d bytes (%.2f%%)\n", resp.Filename, resp.BytesComplete(), - resp.Size, + resp.Size(), 100*resp.Progress()) case <-resp.Done: diff --git a/pkg/qbit/downloaders/http.go b/pkg/downloaders/http.go similarity index 100% rename from pkg/qbit/downloaders/http.go rename to pkg/downloaders/http.go diff --git a/pkg/proxy/proxy.go b/pkg/proxy/proxy.go index 30e312f..9bf1fb9 100644 --- a/pkg/proxy/proxy.go +++ b/pkg/proxy/proxy.go @@ -3,7 +3,9 @@ package proxy import ( "bytes" "cmp" + "context" "encoding/xml" + "errors" "fmt" "github.com/elazarl/goproxy" "github.com/elazarl/goproxy/ext/auth" @@ -308,7 +310,7 @@ func UrlMatches(re *regexp.Regexp) goproxy.ReqConditionFunc { } } -func (p *Proxy) Start() { +func (p *Proxy) Start(ctx context.Context) error { username, password := p.username, p.password proxy := goproxy.NewProxyHttpServer() if username != "" || password != "" { @@ -328,6 +330,17 @@ func (p *Proxy) Start() { proxy.Verbose = p.debug portFmt := fmt.Sprintf(":%s", p.port) + srv := &http.Server{ + Addr: portFmt, + Handler: proxy, + } p.logger.Printf("[*] Starting proxy server on %s\n", portFmt) - p.logger.Fatal(http.ListenAndServe(fmt.Sprintf("%s", portFmt), proxy)) + go func() { + if err := srv.ListenAndServe(); err != nil && !errors.Is(err, http.ErrServerClosed) { + p.logger.Printf("Error starting proxy server: %v\n", err) + } + }() + <-ctx.Done() + p.logger.Println("Shutting down gracefully...") + return srv.Shutdown(context.Background()) } diff --git a/pkg/qbit/arr.go b/pkg/qbit/arr.go deleted file mode 100644 index 089cd06..0000000 --- a/pkg/qbit/arr.go +++ /dev/null @@ -1,103 +0,0 @@ -package qbit - -import ( - "bytes" - "cmp" - "encoding/json" - "goBlack/common" - "goBlack/pkg/debrid" - "net/http" - gourl "net/url" - "strconv" - "strings" -) - -func (q *QBit) RefreshArr(arr *debrid.Arr) { - if arr.Token == "" || arr.Host == "" { - return - } - url, err := common.JoinURL(arr.Host, "api/v3/command") - - if err != nil { - return - } - payload := map[string]string{"name": "RefreshMonitoredDownloads"} - jsonPayload, err := json.Marshal(payload) - if err != nil { - return - } - - client := &http.Client{} - req, err := http.NewRequest("POST", url, bytes.NewBuffer(jsonPayload)) - if err != nil { - return - } - - req.Header.Set("Content-Type", "application/json") - req.Header.Set("X-Api-Key", arr.Token) - - resp, reqErr := client.Do(req) - if reqErr == nil { - statusOk := strconv.Itoa(resp.StatusCode)[0] == '2' - if statusOk { - if q.debug { - q.logger.Printf("Refreshed monitored downloads for %s", cmp.Or(arr.Name, arr.Host)) - } - } - } - if reqErr != nil { - } -} - -func (q *QBit) GetArrHistory(arr *debrid.Arr, downloadId, eventType string) *debrid.ArrHistorySchema { - query := gourl.Values{} - if downloadId != "" { - query.Add("downloadId", downloadId) - } - query.Add("eventType", eventType) - query.Add("pageSize", "100") - url, _ := common.JoinURL(arr.Host, "history") - url += "?" + query.Encode() - resp, err := http.Get(url) - if err != nil { - return nil - } - var data *debrid.ArrHistorySchema - - if err = json.NewDecoder(resp.Body).Decode(&data); err != nil { - return nil - } - return data - -} - -func (q *QBit) MarkArrAsFailed(torrent *Torrent, arr *debrid.Arr) error { - downloadId := strings.ToUpper(torrent.Hash) - history := q.GetArrHistory(arr, downloadId, "grabbed") - if history == nil { - return nil - } - torrentId := 0 - for _, record := range history.Records { - if strings.EqualFold(record.DownloadID, downloadId) { - torrentId = record.ID - break - } - } - if torrentId != 0 { - url, err := common.JoinURL(arr.Host, "history/failed/", strconv.Itoa(torrentId)) - if err != nil { - return err - } - req, err := http.NewRequest(http.MethodPost, url, nil) - if err != nil { - return err - } - client := &http.Client{} - _, err = client.Do(req) - if err == nil { - q.logger.Printf("Marked torrent: %s as failed", torrent.Name) - } - } - return nil -} diff --git a/pkg/qbit/handlers.go b/pkg/qbit/handlers.go deleted file mode 100644 index 6db62ba..0000000 --- a/pkg/qbit/handlers.go +++ /dev/null @@ -1,41 +0,0 @@ -package qbit - -import ( - "github.com/go-chi/chi/v5" - "net/http" -) - -func (q *QBit) AddRoutes(r chi.Router) http.Handler { - r.Route("/api/v2", func(r chi.Router) { - r.Post("/auth/login", q.handleLogin) - - r.Group(func(r chi.Router) { - //r.Use(q.authMiddleware) - r.Use(q.authContext) - r.Route("/torrents", func(r chi.Router) { - r.Use(HashesCtx) - r.Get("/info", q.handleTorrentsInfo) - r.Post("/add", q.handleTorrentsAdd) - r.Post("/delete", q.handleTorrentsDelete) - r.Get("/categories", q.handleCategories) - r.Post("/createCategory", q.handleCreateCategory) - - r.Get("/pause", q.handleTorrentsPause) - r.Get("/resume", q.handleTorrentsResume) - r.Get("/recheck", q.handleTorrentRecheck) - r.Get("/properties", q.handleTorrentProperties) - r.Get("/files", q.handleTorrentFiles) - }) - - r.Route("/app", func(r chi.Router) { - r.Get("/version", q.handleVersion) - r.Get("/webapiVersion", q.handleWebAPIVersion) - r.Get("/preferences", q.handlePreferences) - r.Get("/buildInfo", q.handleBuildInfo) - r.Get("/shutdown", q.shutdown) - }) - }) - - }) - return r -} diff --git a/pkg/qbit/handlers_app.go b/pkg/qbit/handlers_app.go deleted file mode 100644 index 0811f62..0000000 --- a/pkg/qbit/handlers_app.go +++ /dev/null @@ -1,40 +0,0 @@ -package qbit - -import ( - "net/http" - "path/filepath" -) - -func (q *QBit) handleVersion(w http.ResponseWriter, r *http.Request) { - _, _ = w.Write([]byte("v4.3.2")) -} - -func (q *QBit) handleWebAPIVersion(w http.ResponseWriter, r *http.Request) { - _, _ = w.Write([]byte("2.7")) -} - -func (q *QBit) handlePreferences(w http.ResponseWriter, r *http.Request) { - preferences := NewAppPreferences() - - preferences.WebUiUsername = q.Username - preferences.SavePath = q.DownloadFolder - preferences.TempPath = filepath.Join(q.DownloadFolder, "temp") - - JSONResponse(w, preferences, http.StatusOK) -} - -func (q *QBit) handleBuildInfo(w http.ResponseWriter, r *http.Request) { - res := BuildInfo{ - Bitness: 64, - Boost: "1.75.0", - Libtorrent: "1.2.11.0", - Openssl: "1.1.1i", - Qt: "5.15.2", - Zlib: "1.2.11", - } - JSONResponse(w, res, http.StatusOK) -} - -func (q *QBit) shutdown(w http.ResponseWriter, r *http.Request) { - w.WriteHeader(http.StatusOK) -} diff --git a/pkg/qbit/handlers_auth.go b/pkg/qbit/handlers_auth.go deleted file mode 100644 index 1c376f3..0000000 --- a/pkg/qbit/handlers_auth.go +++ /dev/null @@ -1,9 +0,0 @@ -package qbit - -import ( - "net/http" -) - -func (q *QBit) handleLogin(w http.ResponseWriter, r *http.Request) { - _, _ = w.Write([]byte("Ok.")) -} diff --git a/pkg/qbit/main.go b/pkg/qbit/main.go index 55e8e02..00ecdb7 100644 --- a/pkg/qbit/main.go +++ b/pkg/qbit/main.go @@ -1,80 +1,17 @@ package qbit import ( - "cmp" "context" "fmt" - "github.com/go-chi/chi/v5" - "github.com/go-chi/chi/v5/middleware" "goBlack/common" "goBlack/pkg/debrid" - "log" - "net/http" - "os" - "sync" - "time" + "goBlack/pkg/qbit/server" ) -type WorkerType struct { - ticker *time.Ticker - ctx context.Context -} - -type Worker struct { - types map[string]WorkerType -} - -type QBit struct { - Username string `json:"username"` - Password string `json:"password"` - Port string `json:"port"` - DownloadFolder string `json:"download_folder"` - Categories []string `json:"categories"` - debrid *debrid.DebridService - cache *common.Cache - storage *TorrentStorage - debug bool - logger *log.Logger - arrs sync.Map // host:token (Used for refreshing in worker) - RefreshInterval int -} - -func NewQBit(config *common.Config, deb *debrid.DebridService, cache *common.Cache) *QBit { - cfg := config.QBitTorrent - storage := NewTorrentStorage("torrents.json") - port := cmp.Or(cfg.Port, os.Getenv("QBIT_PORT"), "8182") - refreshInterval := cmp.Or(cfg.RefreshInterval, 10) - return &QBit{ - Username: cfg.Username, - Password: cfg.Password, - Port: port, - DownloadFolder: cfg.DownloadFolder, - Categories: cfg.Categories, - debrid: deb, - cache: cache, - debug: cfg.Debug, - storage: storage, - logger: common.NewLogger("QBit", os.Stdout), - arrs: sync.Map{}, - RefreshInterval: refreshInterval, +func Start(ctx context.Context, config *common.Config, deb *debrid.DebridService, cache *common.Cache) error { + srv := server.NewServer(config, deb, cache) + if err := srv.Start(ctx); err != nil { + return fmt.Errorf("failed to start qbit server: %w", err) } -} - -func (q *QBit) Start() { - - r := chi.NewRouter() - if q.debug { - r.Use(middleware.Logger) - } - r.Use(middleware.Recoverer) - - q.AddRoutes(r) - - ctx := context.Background() - - go q.StartWorker(ctx) - - q.logger.Printf("Starting QBit server on :%s", q.Port) - port := fmt.Sprintf(":%s", q.Port) - q.logger.Fatal(http.ListenAndServe(port, r)) + return nil } diff --git a/pkg/qbit/qbit.go b/pkg/qbit/qbit.go deleted file mode 100644 index 2f492f4..0000000 --- a/pkg/qbit/qbit.go +++ /dev/null @@ -1,107 +0,0 @@ -package qbit - -import ( - "context" - "fmt" - "github.com/google/uuid" - "goBlack/common" - "goBlack/pkg/debrid" - "io" - "mime/multipart" - "strings" - "time" -) - -func (q *QBit) AddMagnet(ctx context.Context, url, category string) error { - magnet, err := common.GetMagnetFromUrl(url) - if err != nil { - q.logger.Printf("Error parsing magnet link: %v\n", err) - return err - } - err = q.Process(ctx, magnet, category) - if err != nil { - q.logger.Println("Failed to process magnet:", err) - return err - } - return nil -} - -func (q *QBit) AddTorrent(ctx context.Context, fileHeader *multipart.FileHeader, category string) error { - file, _ := fileHeader.Open() - defer file.Close() - var reader io.Reader = file - magnet, err := common.GetMagnetFromFile(reader, fileHeader.Filename) - if err != nil { - q.logger.Printf("Error reading file: %s", fileHeader.Filename) - return err - } - err = q.Process(ctx, magnet, category) - if err != nil { - q.logger.Println("Failed to process torrent:", err) - return err - } - return nil -} - -func (q *QBit) Process(ctx context.Context, magnet *common.Magnet, category string) error { - torrent := q.CreateTorrentFromMagnet(magnet, category) - arr := &debrid.Arr{ - Name: category, - Token: ctx.Value("token").(string), - Host: ctx.Value("host").(string), - } - isSymlink := ctx.Value("isSymlink").(bool) - debridTorrent, err := debrid.ProcessQBitTorrent(q.debrid, magnet, arr, isSymlink) - if err != nil || debridTorrent == nil { - if err == nil { - err = fmt.Errorf("failed to process torrent") - } - return err - } - torrent = q.UpdateTorrentMin(torrent, debridTorrent) - q.storage.AddOrUpdate(torrent) - go q.processFiles(torrent, debridTorrent, arr, isSymlink) // We can send async for file processing not to delay the response - return nil -} - -func (q *QBit) CreateTorrentFromMagnet(magnet *common.Magnet, category string) *Torrent { - torrent := &Torrent{ - ID: uuid.NewString(), - Hash: strings.ToLower(magnet.InfoHash), - Name: magnet.Name, - Size: magnet.Size, - Category: category, - State: "downloading", - MagnetUri: magnet.Link, - - Tracker: "udp://tracker.opentrackr.org:1337", - UpLimit: -1, - DlLimit: -1, - AutoTmm: false, - Ratio: 1, - RatioLimit: 1, - } - return torrent -} - -func (q *QBit) processFiles(torrent *Torrent, debridTorrent *debrid.Torrent, arr *debrid.Arr, isSymlink bool) { - for debridTorrent.Status != "downloaded" { - progress := debridTorrent.Progress - q.logger.Printf("%s Download Progress: %.2f%%", debridTorrent.Debrid.GetName(), progress) - time.Sleep(5 * time.Second) - dbT, err := debridTorrent.Debrid.CheckStatus(debridTorrent, isSymlink) - if err != nil { - q.logger.Printf("Error checking status: %v", err) - q.MarkAsFailed(torrent) - q.RefreshArr(arr) - return - } - debridTorrent = dbT - torrent = q.UpdateTorrentMin(torrent, debridTorrent) - } - if isSymlink { - q.processSymlink(torrent, debridTorrent, arr) - } else { - q.processManualFiles(torrent, debridTorrent, arr) - } -} diff --git a/pkg/qbit/server/app_handlers.go b/pkg/qbit/server/app_handlers.go new file mode 100644 index 0000000..f98df62 --- /dev/null +++ b/pkg/qbit/server/app_handlers.go @@ -0,0 +1,42 @@ +package server + +import ( + "goBlack/common" + "goBlack/pkg/qbit/shared" + "net/http" + "path/filepath" +) + +func (s *Server) handleVersion(w http.ResponseWriter, r *http.Request) { + _, _ = w.Write([]byte("v4.3.2")) +} + +func (s *Server) handleWebAPIVersion(w http.ResponseWriter, r *http.Request) { + _, _ = w.Write([]byte("2.7")) +} + +func (s *Server) handlePreferences(w http.ResponseWriter, r *http.Request) { + preferences := shared.NewAppPreferences() + + preferences.WebUiUsername = s.qbit.Username + preferences.SavePath = s.qbit.DownloadFolder + preferences.TempPath = filepath.Join(s.qbit.DownloadFolder, "temp") + + common.JSONResponse(w, preferences, http.StatusOK) +} + +func (s *Server) handleBuildInfo(w http.ResponseWriter, r *http.Request) { + res := shared.BuildInfo{ + Bitness: 64, + Boost: "1.75.0", + Libtorrent: "1.2.11.0", + Openssl: "1.1.1i", + Qt: "5.15.2", + Zlib: "1.2.11", + } + common.JSONResponse(w, res, http.StatusOK) +} + +func (s *Server) shutdown(w http.ResponseWriter, r *http.Request) { + w.WriteHeader(http.StatusOK) +} diff --git a/pkg/qbit/server/auth_handlers.go b/pkg/qbit/server/auth_handlers.go new file mode 100644 index 0000000..2a95e77 --- /dev/null +++ b/pkg/qbit/server/auth_handlers.go @@ -0,0 +1,7 @@ +package server + +import "net/http" + +func (s *Server) handleLogin(w http.ResponseWriter, r *http.Request) { + _, _ = w.Write([]byte("Ok.")) +} diff --git a/pkg/qbit/server/import.go b/pkg/qbit/server/import.go new file mode 100644 index 0000000..41d046a --- /dev/null +++ b/pkg/qbit/server/import.go @@ -0,0 +1,172 @@ +package server + +import ( + "encoding/json" + "errors" + "fmt" + "github.com/google/uuid" + "goBlack/common" + "goBlack/pkg/arr" + "goBlack/pkg/debrid" + "sync" + "time" +) + +type ImportRequest struct { + ID string `json:"id"` + Path string `json:"path"` + URI string `json:"uri"` + Arr *arr.Arr `json:"arr"` + IsSymlink bool `json:"isSymlink"` + SeriesId int `json:"series"` + Seasons []int `json:"seasons"` + Episodes []string `json:"episodes"` + + Failed bool `json:"failed"` + FailedAt time.Time `json:"failedAt"` + Reason string `json:"reason"` + Completed bool `json:"completed"` + CompletedAt time.Time `json:"completedAt"` + Async bool `json:"async"` +} + +type ManualImportResponseSchema struct { + Priority string `json:"priority"` + Status string `json:"status"` + Result string `json:"result"` + Queued time.Time `json:"queued"` + Trigger string `json:"trigger"` + SendUpdatesToClient bool `json:"sendUpdatesToClient"` + UpdateScheduledTask bool `json:"updateScheduledTask"` + Id int `json:"id"` +} + +func NewImportRequest(uri string, arr *arr.Arr, isSymlink bool) *ImportRequest { + return &ImportRequest{ + ID: uuid.NewString(), + URI: uri, + Arr: arr, + Failed: false, + Completed: false, + Async: false, + IsSymlink: isSymlink, + } +} + +func (i *ImportRequest) Fail(reason string) { + i.Failed = true + i.FailedAt = time.Now() + i.Reason = reason +} + +func (i *ImportRequest) Complete() { + i.Completed = true + i.CompletedAt = time.Now() +} + +func (i *ImportRequest) Process(s *Server) (err error) { + // Use this for now. + // This sends the torrent to the arr + q := s.qbit + magnet, err := common.GetMagnetFromUrl(i.URI) + torrent := q.CreateTorrentFromMagnet(magnet, i.Arr.Name) + debridTorrent, err := debrid.ProcessTorrent(q.Debrid, magnet, i.Arr, i.IsSymlink) + if err != nil || debridTorrent == nil { + if err == nil { + err = fmt.Errorf("failed to process torrent") + } + return err + } + torrent = q.UpdateTorrentMin(torrent, debridTorrent) + q.Storage.AddOrUpdate(torrent) + go q.ProcessFiles(torrent, debridTorrent, i.Arr, i.IsSymlink) + return nil +} + +func (i *ImportRequest) BetaProcess(s *Server) (err error) { + // THis actually imports the torrent into the arr. Needs more work + if i.Arr == nil { + return errors.New("invalid arr") + } + q := s.qbit + magnet, err := common.GetMagnetFromUrl(i.URI) + if err != nil { + return fmt.Errorf("error parsing magnet link: %w", err) + } + debridTorrent, err := debrid.ProcessTorrent(q.Debrid, magnet, i.Arr, true) + if err != nil || debridTorrent == nil { + if err == nil { + err = errors.New("failed to process torrent") + } + return err + } + + debridTorrent.Arr = i.Arr + + torrentPath, err := q.ProcessSymlink(debridTorrent) + if err != nil { + return fmt.Errorf("failed to process symlink: %w", err) + } + i.Path = torrentPath + body, err := i.Arr.Import(torrentPath, i.SeriesId, i.Seasons) + if err != nil { + return fmt.Errorf("failed to import: %w", err) + } + defer body.Close() + + var resp ManualImportResponseSchema + if err := json.NewDecoder(body).Decode(&resp); err != nil { + return fmt.Errorf("failed to decode response: %w", err) + } + if resp.Status != "success" { + return fmt.Errorf("failed to import: %s", resp.Result) + } + i.Complete() + + return +} + +type ImportStore struct { + Imports map[string]*ImportRequest + mu sync.RWMutex +} + +func NewImportStore() *ImportStore { + return &ImportStore{ + Imports: make(map[string]*ImportRequest), + } +} + +func (s *ImportStore) AddImport(i *ImportRequest) { + s.mu.Lock() + defer s.mu.Unlock() + s.Imports[i.ID] = i +} + +func (s *ImportStore) GetImport(id string) *ImportRequest { + s.mu.RLock() + defer s.mu.RUnlock() + return s.Imports[id] +} + +func (s *ImportStore) GetAllImports() []*ImportRequest { + s.mu.RLock() + defer s.mu.RUnlock() + var imports []*ImportRequest + for _, i := range s.Imports { + imports = append(imports, i) + } + return imports +} + +func (s *ImportStore) DeleteImport(id string) { + s.mu.Lock() + defer s.mu.Unlock() + delete(s.Imports, id) +} + +func (s *ImportStore) UpdateImport(i *ImportRequest) { + s.mu.Lock() + defer s.mu.Unlock() + s.Imports[i.ID] = i +} diff --git a/pkg/qbit/middleware.go b/pkg/qbit/server/middleware.go similarity index 63% rename from pkg/qbit/middleware.go rename to pkg/qbit/server/middleware.go index 76b06fd..4dd6768 100644 --- a/pkg/qbit/middleware.go +++ b/pkg/qbit/server/middleware.go @@ -1,29 +1,14 @@ -package qbit +package server import ( "context" - "crypto/subtle" "encoding/base64" "github.com/go-chi/chi/v5" + "goBlack/pkg/arr" "net/http" "strings" ) -func (q *QBit) authMiddleware(next http.Handler) http.Handler { - return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - user, pass, ok := r.BasicAuth() - if !ok { - http.Error(w, "Unauthorized", http.StatusUnauthorized) - return - } - if subtle.ConstantTimeCompare([]byte(user), []byte(q.Username)) != 1 || subtle.ConstantTimeCompare([]byte(pass), []byte(q.Password)) != 1 { - http.Error(w, "Unauthorized", http.StatusUnauthorized) - return - } - next.ServeHTTP(w, r) - }) -} - func DecodeAuthHeader(header string) (string, string, error) { encodedTokens := strings.Split(header, " ") if len(encodedTokens) != 2 { @@ -45,17 +30,38 @@ func DecodeAuthHeader(header string) (string, string, error) { return host, token, nil } -func (q *QBit) authContext(next http.Handler) http.Handler { +func (s *Server) CategoryContext(next http.Handler) http.Handler { + return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + category := strings.Trim(r.URL.Query().Get("category"), "") + if category == "" { + // Get from form + _ = r.ParseForm() + category = r.Form.Get("category") + if category == "" { + // Get from multipart form + _ = r.ParseMultipartForm(0) + category = r.FormValue("category") + } + } + ctx := r.Context() + ctx = context.WithValue(r.Context(), "category", category) + next.ServeHTTP(w, r.WithContext(ctx)) + }) +} + +func (s *Server) authContext(next http.Handler) http.Handler { return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { host, token, err := DecodeAuthHeader(r.Header.Get("Authorization")) - ctx := r.Context() - if err == nil { - ctx = context.WithValue(r.Context(), "host", host) - ctx = context.WithValue(ctx, "token", token) - q.arrs.Store(host, token) - next.ServeHTTP(w, r.WithContext(ctx)) - return + category := r.Context().Value("category").(string) + a := &arr.Arr{ + Name: category, } + if err == nil { + a.Host = host + a.Token = token + } + s.qbit.Arrs.AddOrUpdate(a) + ctx := context.WithValue(r.Context(), "arr", a) next.ServeHTTP(w, r.WithContext(ctx)) }) } diff --git a/pkg/qbit/server/routes.go b/pkg/qbit/server/routes.go new file mode 100644 index 0000000..2d3334c --- /dev/null +++ b/pkg/qbit/server/routes.go @@ -0,0 +1,50 @@ +package server + +import ( + "github.com/go-chi/chi/v5" + "net/http" +) + +func (s *Server) Routes(r chi.Router) http.Handler { + r.Route("/api/v2", func(r chi.Router) { + r.Use(s.CategoryContext) + r.Post("/auth/login", s.handleLogin) + + r.Group(func(r chi.Router) { + r.Use(s.authContext) + r.Route("/torrents", func(r chi.Router) { + r.Use(HashesCtx) + r.Get("/info", s.handleTorrentsInfo) + r.Post("/add", s.handleTorrentsAdd) + r.Post("/delete", s.handleTorrentsDelete) + r.Get("/categories", s.handleCategories) + r.Post("/createCategory", s.handleCreateCategory) + + r.Get("/pause", s.handleTorrentsPause) + r.Get("/resume", s.handleTorrentsResume) + r.Get("/recheck", s.handleTorrentRecheck) + r.Get("/properties", s.handleTorrentProperties) + r.Get("/files", s.handleTorrentFiles) + }) + + r.Route("/app", func(r chi.Router) { + r.Get("/version", s.handleVersion) + r.Get("/webapiVersion", s.handleWebAPIVersion) + r.Get("/preferences", s.handlePreferences) + r.Get("/buildInfo", s.handleBuildInfo) + r.Get("/shutdown", s.shutdown) + }) + }) + + }) + r.Get("/", s.handleHome) + r.Route("/internal", func(r chi.Router) { + r.Get("/arrs", s.handleGetArrs) + r.Get("/content", s.handleContent) + r.Get("/seasons/{contentId}", s.handleSeasons) + r.Get("/episodes/{contentId}", s.handleEpisodes) + r.Post("/add", s.handleAddContent) + r.Get("/search", s.handleSearch) + }) + return r +} diff --git a/pkg/qbit/server/server.go b/pkg/qbit/server/server.go new file mode 100644 index 0000000..2dc0fbb --- /dev/null +++ b/pkg/qbit/server/server.go @@ -0,0 +1,66 @@ +package server + +import ( + "context" + "errors" + "fmt" + "github.com/go-chi/chi/v5" + "github.com/go-chi/chi/v5/middleware" + "goBlack/common" + "goBlack/pkg/debrid" + "goBlack/pkg/qbit/shared" + "log" + "net/http" + "os" + "os/signal" + "syscall" +) + +type Server struct { + qbit *shared.QBit + logger *log.Logger + debug bool +} + +func NewServer(config *common.Config, deb *debrid.DebridService, cache *common.Cache) *Server { + logger := common.NewLogger("QBit", os.Stdout) + q := shared.NewQBit(config, deb, cache, logger) + return &Server{ + qbit: q, + logger: logger, + debug: config.QBitTorrent.Debug, + } +} + +func (s *Server) Start(ctx context.Context) error { + r := chi.NewRouter() + if s.debug { + r.Use(middleware.Logger) + } + r.Use(middleware.Recoverer) + r.Handle("/static/*", http.StripPrefix("/static/", http.FileServer(http.Dir("static")))) + s.Routes(r) + + go s.qbit.StartWorker(context.Background()) + + s.logger.Printf("Starting QBit server on :%s", s.qbit.Port) + port := fmt.Sprintf(":%s", s.qbit.Port) + srv := &http.Server{ + Addr: port, + Handler: r, + } + + ctx, stop := signal.NotifyContext(ctx, os.Interrupt, syscall.SIGTERM) + defer stop() + + go func() { + if err := srv.ListenAndServe(); err != nil && !errors.Is(err, http.ErrServerClosed) { + fmt.Printf("Error starting server: %v\n", err) + stop() + } + }() + + <-ctx.Done() + fmt.Println("Shutting down gracefully...") + return srv.Shutdown(context.Background()) +} diff --git a/pkg/qbit/server/static/index.html b/pkg/qbit/server/static/index.html new file mode 100644 index 0000000..4fc3096 --- /dev/null +++ b/pkg/qbit/server/static/index.html @@ -0,0 +1,334 @@ + + + + + + Debrid Manager + + + + + + + + + + + + +
+
+
+
+ + +
+
+ + +
+
+ + +
+
+ +
+
+ + + + + + + + + + + + + + + + + + + + + +
+
+ + + + + + + + + \ No newline at end of file diff --git a/pkg/qbit/handlers_torrent.go b/pkg/qbit/server/torrent_handlers.go similarity index 52% rename from pkg/qbit/handlers_torrent.go rename to pkg/qbit/server/torrent_handlers.go index 5fea3e9..4ef1e84 100644 --- a/pkg/qbit/handlers_torrent.go +++ b/pkg/qbit/server/torrent_handlers.go @@ -1,44 +1,46 @@ -package qbit +package server import ( "context" + "goBlack/common" + "goBlack/pkg/qbit/shared" "net/http" "path/filepath" "strings" ) -func (q *QBit) handleTorrentsInfo(w http.ResponseWriter, r *http.Request) { +func (s *Server) handleTorrentsInfo(w http.ResponseWriter, r *http.Request) { //log all url params ctx := r.Context() - category := strings.Trim(r.URL.Query().Get("category"), "") + category := ctx.Value("category").(string) filter := strings.Trim(r.URL.Query().Get("filter"), "") hashes, _ := ctx.Value("hashes").([]string) - torrents := q.storage.GetAll(category, filter, hashes) - JSONResponse(w, torrents, http.StatusOK) + torrents := s.qbit.Storage.GetAll(category, filter, hashes) + common.JSONResponse(w, torrents, http.StatusOK) } -func (q *QBit) handleTorrentsAdd(w http.ResponseWriter, r *http.Request) { +func (s *Server) handleTorrentsAdd(w http.ResponseWriter, r *http.Request) { ctx := r.Context() contentType := strings.Split(r.Header.Get("Content-Type"), ";")[0] switch contentType { case "multipart/form-data": err := r.ParseMultipartForm(32 << 20) // 32MB max memory if err != nil { - q.logger.Printf("Error parsing form: %v\n", err) + s.logger.Printf("Error parsing form: %v\n", err) http.Error(w, err.Error(), http.StatusBadRequest) return } case "application/x-www-form-urlencoded": err := r.ParseForm() if err != nil { - q.logger.Printf("Error parsing form: %v\n", err) + s.logger.Printf("Error parsing form: %v\n", err) http.Error(w, err.Error(), http.StatusBadRequest) return } } isSymlink := strings.ToLower(r.FormValue("sequentialDownload")) != "true" - q.logger.Printf("isSymlink: %v\n", isSymlink) + s.logger.Printf("isSymlink: %v\n", isSymlink) urls := r.FormValue("urls") category := r.FormValue("category") @@ -50,8 +52,8 @@ func (q *QBit) handleTorrentsAdd(w http.ResponseWriter, r *http.Request) { ctx = context.WithValue(ctx, "isSymlink", isSymlink) for _, url := range urlList { - if err := q.AddMagnet(ctx, url, category); err != nil { - q.logger.Printf("Error adding magnet: %v\n", err) + if err := s.qbit.AddMagnet(ctx, url, category); err != nil { + s.logger.Printf("Error adding magnet: %v\n", err) http.Error(w, err.Error(), http.StatusBadRequest) return } @@ -60,8 +62,8 @@ func (q *QBit) handleTorrentsAdd(w http.ResponseWriter, r *http.Request) { if contentType == "multipart/form-data" { files := r.MultipartForm.File["torrents"] for _, fileHeader := range files { - if err := q.AddTorrent(ctx, fileHeader, category); err != nil { - q.logger.Printf("Error adding torrent: %v\n", err) + if err := s.qbit.AddTorrent(ctx, fileHeader, category); err != nil { + s.logger.Printf("Error adding torrent: %v\n", err) http.Error(w, err.Error(), http.StatusBadRequest) return } @@ -71,7 +73,7 @@ func (q *QBit) handleTorrentsAdd(w http.ResponseWriter, r *http.Request) { w.WriteHeader(http.StatusOK) } -func (q *QBit) handleTorrentsDelete(w http.ResponseWriter, r *http.Request) { +func (s *Server) handleTorrentsDelete(w http.ResponseWriter, r *http.Request) { ctx := r.Context() hashes, _ := ctx.Value("hashes").([]string) if len(hashes) == 0 { @@ -79,67 +81,67 @@ func (q *QBit) handleTorrentsDelete(w http.ResponseWriter, r *http.Request) { return } for _, hash := range hashes { - q.storage.Delete(hash) + s.qbit.Storage.Delete(hash) } w.WriteHeader(http.StatusOK) } -func (q *QBit) handleTorrentsPause(w http.ResponseWriter, r *http.Request) { +func (s *Server) handleTorrentsPause(w http.ResponseWriter, r *http.Request) { ctx := r.Context() hashes, _ := ctx.Value("hashes").([]string) for _, hash := range hashes { - torrent := q.storage.Get(hash) + torrent := s.qbit.Storage.Get(hash) if torrent == nil { continue } - go q.PauseTorrent(torrent) + go s.qbit.PauseTorrent(torrent) } w.WriteHeader(http.StatusOK) } -func (q *QBit) handleTorrentsResume(w http.ResponseWriter, r *http.Request) { +func (s *Server) handleTorrentsResume(w http.ResponseWriter, r *http.Request) { ctx := r.Context() hashes, _ := ctx.Value("hashes").([]string) for _, hash := range hashes { - torrent := q.storage.Get(hash) + torrent := s.qbit.Storage.Get(hash) if torrent == nil { continue } - go q.ResumeTorrent(torrent) + go s.qbit.ResumeTorrent(torrent) } w.WriteHeader(http.StatusOK) } -func (q *QBit) handleTorrentRecheck(w http.ResponseWriter, r *http.Request) { +func (s *Server) handleTorrentRecheck(w http.ResponseWriter, r *http.Request) { ctx := r.Context() hashes, _ := ctx.Value("hashes").([]string) for _, hash := range hashes { - torrent := q.storage.Get(hash) + torrent := s.qbit.Storage.Get(hash) if torrent == nil { continue } - go q.RefreshTorrent(torrent) + go s.qbit.RefreshTorrent(torrent) } w.WriteHeader(http.StatusOK) } -func (q *QBit) handleCategories(w http.ResponseWriter, r *http.Request) { - var categories = map[string]TorrentCategory{} - for _, cat := range q.Categories { - path := filepath.Join(q.DownloadFolder, cat) - categories[cat] = TorrentCategory{ +func (s *Server) handleCategories(w http.ResponseWriter, r *http.Request) { + var categories = map[string]shared.TorrentCategory{} + for _, cat := range s.qbit.Categories { + path := filepath.Join(s.qbit.DownloadFolder, cat) + categories[cat] = shared.TorrentCategory{ Name: cat, SavePath: path, } } - JSONResponse(w, categories, http.StatusOK) + common.JSONResponse(w, categories, http.StatusOK) } -func (q *QBit) handleCreateCategory(w http.ResponseWriter, r *http.Request) { +func (s *Server) handleCreateCategory(w http.ResponseWriter, r *http.Request) { err := r.ParseForm() if err != nil { http.Error(w, "Failed to parse form data", http.StatusBadRequest) @@ -152,24 +154,24 @@ func (q *QBit) handleCreateCategory(w http.ResponseWriter, r *http.Request) { return } - q.Categories = append(q.Categories, name) + s.qbit.Categories = append(s.qbit.Categories, name) - JSONResponse(w, nil, http.StatusOK) + common.JSONResponse(w, nil, http.StatusOK) } -func (q *QBit) handleTorrentProperties(w http.ResponseWriter, r *http.Request) { +func (s *Server) handleTorrentProperties(w http.ResponseWriter, r *http.Request) { hash := r.URL.Query().Get("hash") - torrent := q.storage.Get(hash) - properties := q.GetTorrentProperties(torrent) - JSONResponse(w, properties, http.StatusOK) + torrent := s.qbit.Storage.Get(hash) + properties := s.qbit.GetTorrentProperties(torrent) + common.JSONResponse(w, properties, http.StatusOK) } -func (q *QBit) handleTorrentFiles(w http.ResponseWriter, r *http.Request) { +func (s *Server) handleTorrentFiles(w http.ResponseWriter, r *http.Request) { hash := r.URL.Query().Get("hash") - torrent := q.storage.Get(hash) + torrent := s.qbit.Storage.Get(hash) if torrent == nil { return } - files := q.GetTorrentFiles(torrent) - JSONResponse(w, files, http.StatusOK) + files := s.qbit.GetTorrentFiles(torrent) + common.JSONResponse(w, files, http.StatusOK) } diff --git a/pkg/qbit/server/ui_handlers.go b/pkg/qbit/server/ui_handlers.go new file mode 100644 index 0000000..539e56d --- /dev/null +++ b/pkg/qbit/server/ui_handlers.go @@ -0,0 +1,114 @@ +package server + +import ( + "embed" + "encoding/json" + "goBlack/common" + "goBlack/pkg/arr" + "html/template" + "net/http" +) + +type AddRequest struct { + Url string `json:"url"` + Arr string `json:"arr"` + File string `json:"file"` + NotSymlink bool `json:"notSymlink"` + Content string `json:"content"` + Seasons []string `json:"seasons"` + Episodes []string `json:"episodes"` +} + +type ArrResponse struct { + Name string `json:"name"` + Url string `json:"url"` +} + +type ContentResponse struct { + ID string `json:"id"` + Title string `json:"title"` + Type string `json:"type"` + ArrID string `json:"arr"` +} + +//go:embed static/index.html +var content embed.FS + +func (s *Server) handleHome(w http.ResponseWriter, r *http.Request) { + tmpl, err := template.ParseFS(content, "static/index.html") + if err != nil { + http.Error(w, err.Error(), http.StatusInternalServerError) + return + } + + err = tmpl.Execute(w, nil) + if err != nil { + http.Error(w, err.Error(), http.StatusInternalServerError) + return + } +} + +func (s *Server) handleGetArrs(w http.ResponseWriter, r *http.Request) { + w.Header().Set("Content-Type", "application/json") + common.JSONResponse(w, s.qbit.Arrs.GetAll(), http.StatusOK) +} + +func (s *Server) handleContent(w http.ResponseWriter, r *http.Request) { + arrName := r.URL.Query().Get("arr") + _arr := s.qbit.Arrs.Get(arrName) + if _arr == nil { + http.Error(w, "Invalid arr", http.StatusBadRequest) + return + } + contents := _arr.GetContents() + w.Header().Set("Content-Type", "application/json") + common.JSONResponse(w, contents, http.StatusOK) +} + +func (s *Server) handleSearch(w http.ResponseWriter, r *http.Request) { + // arrName := r.URL.Query().Get("arr") + term := r.URL.Query().Get("term") + results, err := arr.SearchTMDB(term) + if err != nil { + http.Error(w, err.Error(), http.StatusBadRequest) + return + } + w.Header().Set("Content-Type", "application/json") + common.JSONResponse(w, results.Results, http.StatusOK) +} + +func (s *Server) handleSeasons(w http.ResponseWriter, r *http.Request) { + // arrId := r.URL.Query().Get("arrId") + // contentId := chi.URLParam(r, "contentId") + seasons := []string{"Season 1", "Season 2", "Season 3", "Season 4", "Season 5"} + w.Header().Set("Content-Type", "application/json") + common.JSONResponse(w, seasons, http.StatusOK) +} + +func (s *Server) handleEpisodes(w http.ResponseWriter, r *http.Request) { + // arrId := r.URL.Query().Get("arrId") + // contentId := chi.URLParam(r, "contentId") + // seasonIds := strings.Split(r.URL.Query().Get("seasons"), ",") + episodes := []string{"Episode 1", "Episode 2", "Episode 3", "Episode 4", "Episode 5"} + w.Header().Set("Content-Type", "application/json") + common.JSONResponse(w, episodes, http.StatusOK) +} + +func (s *Server) handleAddContent(w http.ResponseWriter, r *http.Request) { + var req AddRequest + if err := json.NewDecoder(r.Body).Decode(&req); err != nil { + http.Error(w, err.Error(), http.StatusBadRequest) + return + } + _arr := s.qbit.Arrs.Get(req.Arr) + if _arr == nil { + _arr = arr.NewArr(req.Arr, "", "", arr.Sonarr) + } + importReq := NewImportRequest(req.Url, _arr, !req.NotSymlink) + err := importReq.Process(s) + if err != nil { + http.Error(w, err.Error(), http.StatusBadRequest) + return + } + common.JSONResponse(w, importReq, http.StatusOK) +} diff --git a/pkg/qbit/downloader.go b/pkg/qbit/shared/downloader.go similarity index 72% rename from pkg/qbit/downloader.go rename to pkg/qbit/shared/downloader.go index 38cb176..183c274 100644 --- a/pkg/qbit/downloader.go +++ b/pkg/qbit/shared/downloader.go @@ -1,29 +1,27 @@ -package qbit +package shared import ( + "fmt" "goBlack/common" "goBlack/pkg/debrid" - "goBlack/pkg/qbit/downloaders" + "goBlack/pkg/downloaders" "os" "path/filepath" "sync" "time" ) -func (q *QBit) processManualFiles(torrent *Torrent, debridTorrent *debrid.Torrent, arr *debrid.Arr) { +func (q *QBit) processManualFiles(debridTorrent *debrid.Torrent) (string, error) { q.logger.Printf("Downloading %d files...", len(debridTorrent.DownloadLinks)) torrentPath := common.RemoveExtension(debridTorrent.OriginalFilename) parent := common.RemoveInvalidChars(filepath.Join(q.DownloadFolder, debridTorrent.Arr.Name, torrentPath)) err := os.MkdirAll(parent, os.ModePerm) if err != nil { - q.logger.Printf("Failed to create directory: %s\n", parent) - q.MarkAsFailed(torrent) - return + // add previous error to the error and return + return "", fmt.Errorf("failed to create directory: %s: %v", parent, err) } - torrent.TorrentPath = torrentPath q.downloadFiles(debridTorrent, parent) - q.UpdateTorrent(torrent, debridTorrent) - q.RefreshArr(arr) + return torrentPath, nil } func (q *QBit) downloadFiles(debridTorrent *debrid.Torrent, parent string) { @@ -52,7 +50,7 @@ func (q *QBit) downloadFiles(debridTorrent *debrid.Torrent, parent string) { q.logger.Printf("Downloaded all files for %s\n", debridTorrent.Name) } -func (q *QBit) processSymlink(torrent *Torrent, debridTorrent *debrid.Torrent, arr *debrid.Arr) { +func (q *QBit) ProcessSymlink(debridTorrent *debrid.Torrent) (string, error) { var wg sync.WaitGroup files := debridTorrent.Files ready := make(chan debrid.TorrentFile, len(files)) @@ -61,17 +59,12 @@ func (q *QBit) processSymlink(torrent *Torrent, debridTorrent *debrid.Torrent, a rCloneBase := debridTorrent.Debrid.GetMountPath() torrentPath, err := q.getTorrentPath(rCloneBase, debridTorrent) // /MyTVShow/ if err != nil { - q.MarkAsFailed(torrent) - q.logger.Printf("Error: %v", err) - return + return "", fmt.Errorf("failed to get torrent path: %v", err) } - torrentSymlinkPath := filepath.Join(q.DownloadFolder, debridTorrent.Arr.Name, torrentPath) // /mnt/symlinks/{category}/MyTVShow/ err = os.MkdirAll(torrentSymlinkPath, os.ModePerm) if err != nil { - q.logger.Printf("Failed to create directory: %s\n", torrentSymlinkPath) - q.MarkAsFailed(torrent) - return + return "", fmt.Errorf("failed to create directory: %s: %v", torrentSymlinkPath, err) } torrentRclonePath := filepath.Join(rCloneBase, torrentPath) for _, file := range files { @@ -88,32 +81,16 @@ func (q *QBit) processSymlink(torrent *Torrent, debridTorrent *debrid.Torrent, a q.logger.Println("File is ready:", f.Path) q.createSymLink(torrentSymlinkPath, torrentRclonePath, f) } - // Update the torrent when all files are ready - torrent.TorrentPath = filepath.Base(torrentPath) // Quite important - q.UpdateTorrent(torrent, debridTorrent) - q.RefreshArr(arr) + return torrentPath, nil } func (q *QBit) getTorrentPath(rclonePath string, debridTorrent *debrid.Torrent) (string, error) { - pathChan := make(chan string) - errChan := make(chan error) - - go func() { - for { - torrentPath := debridTorrent.GetMountFolder(rclonePath) - if torrentPath != "" { - pathChan <- torrentPath - return - } - time.Sleep(time.Second) + for { + torrentPath := debridTorrent.GetMountFolder(rclonePath) + if torrentPath != "" { + return torrentPath, nil } - }() - - select { - case path := <-pathChan: - return path, nil - case err := <-errChan: - return "", err + time.Sleep(time.Second) } } diff --git a/pkg/qbit/shared/qbit.go b/pkg/qbit/shared/qbit.go new file mode 100644 index 0000000..4ae55b2 --- /dev/null +++ b/pkg/qbit/shared/qbit.go @@ -0,0 +1,46 @@ +package shared + +import ( + "cmp" + "goBlack/common" + "goBlack/pkg/arr" + "goBlack/pkg/debrid" + "log" + "os" +) + +type QBit struct { + Username string `json:"username"` + Password string `json:"password"` + Port string `json:"port"` + DownloadFolder string `json:"download_folder"` + Categories []string `json:"categories"` + Debrid *debrid.DebridService + cache *common.Cache + Storage *TorrentStorage + debug bool + logger *log.Logger + Arrs *arr.Storage + RefreshInterval int +} + +func NewQBit(config *common.Config, deb *debrid.DebridService, cache *common.Cache, logger *log.Logger) *QBit { + cfg := config.QBitTorrent + port := cmp.Or(cfg.Port, os.Getenv("QBIT_PORT"), "8182") + refreshInterval := cmp.Or(cfg.RefreshInterval, 10) + arrs := arr.NewStorage() + return &QBit{ + Username: cfg.Username, + Password: cfg.Password, + Port: port, + DownloadFolder: cfg.DownloadFolder, + Categories: cfg.Categories, + Debrid: deb, + cache: cache, + debug: cfg.Debug, + Storage: NewTorrentStorage("torrents.json"), + logger: logger, + Arrs: arrs, + RefreshInterval: refreshInterval, + } +} diff --git a/pkg/qbit/storage.go b/pkg/qbit/shared/storage.go similarity index 97% rename from pkg/qbit/storage.go rename to pkg/qbit/shared/storage.go index 59f880b..bd93e1c 100644 --- a/pkg/qbit/storage.go +++ b/pkg/qbit/shared/storage.go @@ -1,4 +1,4 @@ -package qbit +package shared import ( "encoding/json" @@ -122,7 +122,10 @@ func (ts *TorrentStorage) Delete(hash string) { } // Delete the torrent folder if torrent.ContentPath != "" { - os.RemoveAll(torrent.ContentPath) + err := os.RemoveAll(torrent.ContentPath) + if err != nil { + return + } } } diff --git a/pkg/qbit/structs.go b/pkg/qbit/shared/structs.go similarity index 76% rename from pkg/qbit/structs.go rename to pkg/qbit/shared/structs.go index c93906e..5bf2884 100644 --- a/pkg/qbit/structs.go +++ b/pkg/qbit/shared/structs.go @@ -1,4 +1,4 @@ -package qbit +package shared import "goBlack/pkg/debrid" @@ -14,40 +14,40 @@ type BuildInfo struct { type AppPreferences struct { AddTrackers string `json:"add_trackers"` AddTrackersEnabled bool `json:"add_trackers_enabled"` - AltDlLimit int64 `json:"alt_dl_limit"` - AltUpLimit int64 `json:"alt_up_limit"` + AltDlLimit int `json:"alt_dl_limit"` + AltUpLimit int `json:"alt_up_limit"` AlternativeWebuiEnabled bool `json:"alternative_webui_enabled"` AlternativeWebuiPath string `json:"alternative_webui_path"` AnnounceIp string `json:"announce_ip"` AnnounceToAllTiers bool `json:"announce_to_all_tiers"` AnnounceToAllTrackers bool `json:"announce_to_all_trackers"` AnonymousMode bool `json:"anonymous_mode"` - AsyncIoThreads int64 `json:"async_io_threads"` - AutoDeleteMode int64 `json:"auto_delete_mode"` + AsyncIoThreads int `json:"async_io_threads"` + AutoDeleteMode int `json:"auto_delete_mode"` AutoTmmEnabled bool `json:"auto_tmm_enabled"` AutorunEnabled bool `json:"autorun_enabled"` AutorunProgram string `json:"autorun_program"` BannedIPs string `json:"banned_IPs"` - BittorrentProtocol int64 `json:"bittorrent_protocol"` + BittorrentProtocol int `json:"bittorrent_protocol"` BypassAuthSubnetWhitelist string `json:"bypass_auth_subnet_whitelist"` BypassAuthSubnetWhitelistEnabled bool `json:"bypass_auth_subnet_whitelist_enabled"` BypassLocalAuth bool `json:"bypass_local_auth"` CategoryChangedTmmEnabled bool `json:"category_changed_tmm_enabled"` - CheckingMemoryUse int64 `json:"checking_memory_use"` + CheckingMemoryUse int `json:"checking_memory_use"` CreateSubfolderEnabled bool `json:"create_subfolder_enabled"` CurrentInterfaceAddress string `json:"current_interface_address"` CurrentNetworkInterface string `json:"current_network_interface"` Dht bool `json:"dht"` - DiskCache int64 `json:"disk_cache"` - DiskCacheTtl int64 `json:"disk_cache_ttl"` - DlLimit int64 `json:"dl_limit"` + DiskCache int `json:"disk_cache"` + DiskCacheTtl int `json:"disk_cache_ttl"` + DlLimit int `json:"dl_limit"` DontCountSlowTorrents bool `json:"dont_count_slow_torrents"` DyndnsDomain string `json:"dyndns_domain"` DyndnsEnabled bool `json:"dyndns_enabled"` DyndnsPassword string `json:"dyndns_password"` - DyndnsService int64 `json:"dyndns_service"` + DyndnsService int `json:"dyndns_service"` DyndnsUsername string `json:"dyndns_username"` - EmbeddedTrackerPort int64 `json:"embedded_tracker_port"` + EmbeddedTrackerPort int `json:"embedded_tracker_port"` EnableCoalesceReadWrite bool `json:"enable_coalesce_read_write"` EnableEmbeddedTracker bool `json:"enable_embedded_tracker"` EnableMultiConnectionsFromSameIp bool `json:"enable_multi_connections_from_same_ip"` @@ -55,10 +55,10 @@ type AppPreferences struct { EnablePieceExtentAffinity bool `json:"enable_piece_extent_affinity"` EnableSuperSeeding bool `json:"enable_super_seeding"` EnableUploadSuggestions bool `json:"enable_upload_suggestions"` - Encryption int64 `json:"encryption"` + Encryption int `json:"encryption"` ExportDir string `json:"export_dir"` ExportDirFin string `json:"export_dir_fin"` - FilePoolSize int64 `json:"file_pool_size"` + FilePoolSize int `json:"file_pool_size"` IncompleteFilesExt bool `json:"incomplete_files_ext"` IpFilterEnabled bool `json:"ip_filter_enabled"` IpFilterPath string `json:"ip_filter_path"` @@ -66,7 +66,7 @@ type AppPreferences struct { LimitLanPeers bool `json:"limit_lan_peers"` LimitTcpOverhead bool `json:"limit_tcp_overhead"` LimitUtpRate bool `json:"limit_utp_rate"` - ListenPort int64 `json:"listen_port"` + ListenPort int `json:"listen_port"` Locale string `json:"locale"` Lsd bool `json:"lsd"` MailNotificationAuthEnabled bool `json:"mail_notification_auth_enabled"` @@ -77,79 +77,79 @@ type AppPreferences struct { MailNotificationSmtp string `json:"mail_notification_smtp"` MailNotificationSslEnabled bool `json:"mail_notification_ssl_enabled"` MailNotificationUsername string `json:"mail_notification_username"` - MaxActiveDownloads int64 `json:"max_active_downloads"` - MaxActiveTorrents int64 `json:"max_active_torrents"` - MaxActiveUploads int64 `json:"max_active_uploads"` - MaxConnec int64 `json:"max_connec"` - MaxConnecPerTorrent int64 `json:"max_connec_per_torrent"` - MaxRatio int64 `json:"max_ratio"` - MaxRatioAct int64 `json:"max_ratio_act"` + MaxActiveDownloads int `json:"max_active_downloads"` + MaxActiveTorrents int `json:"max_active_torrents"` + MaxActiveUploads int `json:"max_active_uploads"` + MaxConnec int `json:"max_connec"` + MaxConnecPerTorrent int `json:"max_connec_per_torrent"` + MaxRatio int `json:"max_ratio"` + MaxRatioAct int `json:"max_ratio_act"` MaxRatioEnabled bool `json:"max_ratio_enabled"` - MaxSeedingTime int64 `json:"max_seeding_time"` + MaxSeedingTime int `json:"max_seeding_time"` MaxSeedingTimeEnabled bool `json:"max_seeding_time_enabled"` - MaxUploads int64 `json:"max_uploads"` - MaxUploadsPerTorrent int64 `json:"max_uploads_per_torrent"` - OutgoingPortsMax int64 `json:"outgoing_ports_max"` - OutgoingPortsMin int64 `json:"outgoing_ports_min"` + MaxUploads int `json:"max_uploads"` + MaxUploadsPerTorrent int `json:"max_uploads_per_torrent"` + OutgoingPortsMax int `json:"outgoing_ports_max"` + OutgoingPortsMin int `json:"outgoing_ports_min"` Pex bool `json:"pex"` PreallocateAll bool `json:"preallocate_all"` ProxyAuthEnabled bool `json:"proxy_auth_enabled"` ProxyIp string `json:"proxy_ip"` ProxyPassword string `json:"proxy_password"` ProxyPeerConnections bool `json:"proxy_peer_connections"` - ProxyPort int64 `json:"proxy_port"` + ProxyPort int `json:"proxy_port"` ProxyTorrentsOnly bool `json:"proxy_torrents_only"` - ProxyType int64 `json:"proxy_type"` + ProxyType int `json:"proxy_type"` ProxyUsername string `json:"proxy_username"` QueueingEnabled bool `json:"queueing_enabled"` RandomPort bool `json:"random_port"` RecheckCompletedTorrents bool `json:"recheck_completed_torrents"` ResolvePeerCountries bool `json:"resolve_peer_countries"` RssAutoDownloadingEnabled bool `json:"rss_auto_downloading_enabled"` - RssMaxArticlesPerFeed int64 `json:"rss_max_articles_per_feed"` + RssMaxArticlesPerFeed int `json:"rss_max_articles_per_feed"` RssProcessingEnabled bool `json:"rss_processing_enabled"` - RssRefreshInterval int64 `json:"rss_refresh_interval"` + RssRefreshInterval int `json:"rss_refresh_interval"` SavePath string `json:"save_path"` SavePathChangedTmmEnabled bool `json:"save_path_changed_tmm_enabled"` - SaveResumeDataInterval int64 `json:"save_resume_data_interval"` + SaveResumeDataInterval int `json:"save_resume_data_interval"` ScanDirs ScanDirs `json:"scan_dirs"` - ScheduleFromHour int64 `json:"schedule_from_hour"` - ScheduleFromMin int64 `json:"schedule_from_min"` - ScheduleToHour int64 `json:"schedule_to_hour"` - ScheduleToMin int64 `json:"schedule_to_min"` - SchedulerDays int64 `json:"scheduler_days"` + ScheduleFromHour int `json:"schedule_from_hour"` + ScheduleFromMin int `json:"schedule_from_min"` + ScheduleToHour int `json:"schedule_to_hour"` + ScheduleToMin int `json:"schedule_to_min"` + SchedulerDays int `json:"scheduler_days"` SchedulerEnabled bool `json:"scheduler_enabled"` - SendBufferLowWatermark int64 `json:"send_buffer_low_watermark"` - SendBufferWatermark int64 `json:"send_buffer_watermark"` - SendBufferWatermarkFactor int64 `json:"send_buffer_watermark_factor"` - SlowTorrentDlRateThreshold int64 `json:"slow_torrent_dl_rate_threshold"` - SlowTorrentInactiveTimer int64 `json:"slow_torrent_inactive_timer"` - SlowTorrentUlRateThreshold int64 `json:"slow_torrent_ul_rate_threshold"` - SocketBacklogSize int64 `json:"socket_backlog_size"` + SendBufferLowWatermark int `json:"send_buffer_low_watermark"` + SendBufferWatermark int `json:"send_buffer_watermark"` + SendBufferWatermarkFactor int `json:"send_buffer_watermark_factor"` + SlowTorrentDlRateThreshold int `json:"slow_torrent_dl_rate_threshold"` + SlowTorrentInactiveTimer int `json:"slow_torrent_inactive_timer"` + SlowTorrentUlRateThreshold int `json:"slow_torrent_ul_rate_threshold"` + SocketBacklogSize int `json:"socket_backlog_size"` StartPausedEnabled bool `json:"start_paused_enabled"` - StopTrackerTimeout int64 `json:"stop_tracker_timeout"` + StopTrackerTimeout int `json:"stop_tracker_timeout"` TempPath string `json:"temp_path"` TempPathEnabled bool `json:"temp_path_enabled"` TorrentChangedTmmEnabled bool `json:"torrent_changed_tmm_enabled"` - UpLimit int64 `json:"up_limit"` - UploadChokingAlgorithm int64 `json:"upload_choking_algorithm"` - UploadSlotsBehavior int64 `json:"upload_slots_behavior"` + UpLimit int `json:"up_limit"` + UploadChokingAlgorithm int `json:"upload_choking_algorithm"` + UploadSlotsBehavior int `json:"upload_slots_behavior"` Upnp bool `json:"upnp"` - UpnpLeaseDuration int64 `json:"upnp_lease_duration"` + UpnpLeaseDuration int `json:"upnp_lease_duration"` UseHttps bool `json:"use_https"` - UtpTcpMixedMode int64 `json:"utp_tcp_mixed_mode"` + UtpTcpMixedMode int `json:"utp_tcp_mixed_mode"` WebUiAddress string `json:"web_ui_address"` - WebUiBanDuration int64 `json:"web_ui_ban_duration"` + WebUiBanDuration int `json:"web_ui_ban_duration"` WebUiClickjackingProtectionEnabled bool `json:"web_ui_clickjacking_protection_enabled"` WebUiCsrfProtectionEnabled bool `json:"web_ui_csrf_protection_enabled"` WebUiDomainList string `json:"web_ui_domain_list"` WebUiHostHeaderValidationEnabled bool `json:"web_ui_host_header_validation_enabled"` WebUiHttpsCertPath string `json:"web_ui_https_cert_path"` WebUiHttpsKeyPath string `json:"web_ui_https_key_path"` - WebUiMaxAuthFailCount int64 `json:"web_ui_max_auth_fail_count"` - WebUiPort int64 `json:"web_ui_port"` + WebUiMaxAuthFailCount int `json:"web_ui_max_auth_fail_count"` + WebUiPort int `json:"web_ui_port"` WebUiSecureCookieEnabled bool `json:"web_ui_secure_cookie_enabled"` - WebUiSessionTimeout int64 `json:"web_ui_session_timeout"` + WebUiSessionTimeout int `json:"web_ui_session_timeout"` WebUiUpnp bool `json:"web_ui_upnp"` WebUiUsername string `json:"web_ui_username"` WebUiPassword string `json:"web_ui_password"` @@ -179,44 +179,44 @@ type Torrent struct { Availability float64 `json:"availability,omitempty"` Category string `json:"category,omitempty"` Completed int64 `json:"completed"` - CompletionOn int64 `json:"completion_on,omitempty"` + CompletionOn int `json:"completion_on,omitempty"` ContentPath string `json:"content_path"` - DlLimit int64 `json:"dl_limit"` - Dlspeed int64 `json:"dlspeed"` + DlLimit int `json:"dl_limit"` + Dlspeed int `json:"dlspeed"` Downloaded int64 `json:"downloaded"` DownloadedSession int64 `json:"downloaded_session"` - Eta int64 `json:"eta"` + Eta int `json:"eta"` FlPiecePrio bool `json:"f_l_piece_prio,omitempty"` ForceStart bool `json:"force_start,omitempty"` Hash string `json:"hash"` LastActivity int64 `json:"last_activity,omitempty"` MagnetUri string `json:"magnet_uri,omitempty"` - MaxRatio int64 `json:"max_ratio,omitempty"` - MaxSeedingTime int64 `json:"max_seeding_time,omitempty"` + MaxRatio int `json:"max_ratio,omitempty"` + MaxSeedingTime int `json:"max_seeding_time,omitempty"` Name string `json:"name,omitempty"` - NumComplete int64 `json:"num_complete,omitempty"` - NumIncomplete int64 `json:"num_incomplete,omitempty"` - NumLeechs int64 `json:"num_leechs,omitempty"` - NumSeeds int64 `json:"num_seeds,omitempty"` - Priority int64 `json:"priority,omitempty"` - Progress float32 `json:"progress"` - Ratio int64 `json:"ratio,omitempty"` - RatioLimit int64 `json:"ratio_limit,omitempty"` + NumComplete int `json:"num_complete,omitempty"` + NumIncomplete int `json:"num_incomplete,omitempty"` + NumLeechs int `json:"num_leechs,omitempty"` + NumSeeds int `json:"num_seeds,omitempty"` + Priority int `json:"priority,omitempty"` + Progress float64 `json:"progress"` + Ratio int `json:"ratio,omitempty"` + RatioLimit int `json:"ratio_limit,omitempty"` SavePath string `json:"save_path"` - SeedingTimeLimit int64 `json:"seeding_time_limit,omitempty"` + SeedingTimeLimit int `json:"seeding_time_limit,omitempty"` SeenComplete int64 `json:"seen_complete,omitempty"` SeqDl bool `json:"seq_dl"` Size int64 `json:"size,omitempty"` State string `json:"state,omitempty"` SuperSeeding bool `json:"super_seeding"` Tags string `json:"tags,omitempty"` - TimeActive int64 `json:"time_active,omitempty"` + TimeActive int `json:"time_active,omitempty"` TotalSize int64 `json:"total_size,omitempty"` Tracker string `json:"tracker,omitempty"` UpLimit int64 `json:"up_limit,omitempty"` Uploaded int64 `json:"uploaded,omitempty"` UploadedSession int64 `json:"uploaded_session,omitempty"` - Upspeed int64 `json:"upspeed,omitempty"` + Upspeed int `json:"upspeed,omitempty"` } func (t *Torrent) IsReady() bool { @@ -229,24 +229,24 @@ type TorrentProperties struct { CompletionDate int64 `json:"completion_date,omitempty"` CreatedBy string `json:"created_by,omitempty"` CreationDate int64 `json:"creation_date,omitempty"` - DlLimit int64 `json:"dl_limit,omitempty"` - DlSpeed int64 `json:"dl_speed,omitempty"` - DlSpeedAvg int64 `json:"dl_speed_avg,omitempty"` - Eta int64 `json:"eta,omitempty"` + DlLimit int `json:"dl_limit,omitempty"` + DlSpeed int `json:"dl_speed,omitempty"` + DlSpeedAvg int `json:"dl_speed_avg,omitempty"` + Eta int `json:"eta,omitempty"` LastSeen int64 `json:"last_seen,omitempty"` - NbConnections int64 `json:"nb_connections,omitempty"` - NbConnectionsLimit int64 `json:"nb_connections_limit,omitempty"` - Peers int64 `json:"peers,omitempty"` - PeersTotal int64 `json:"peers_total,omitempty"` + NbConnections int `json:"nb_connections,omitempty"` + NbConnectionsLimit int `json:"nb_connections_limit,omitempty"` + Peers int `json:"peers,omitempty"` + PeersTotal int `json:"peers_total,omitempty"` PieceSize int64 `json:"piece_size,omitempty"` PiecesHave int64 `json:"pieces_have,omitempty"` PiecesNum int64 `json:"pieces_num,omitempty"` - Reannounce int64 `json:"reannounce,omitempty"` + Reannounce int `json:"reannounce,omitempty"` SavePath string `json:"save_path,omitempty"` - SeedingTime int64 `json:"seeding_time,omitempty"` - Seeds int64 `json:"seeds,omitempty"` - SeedsTotal int64 `json:"seeds_total,omitempty"` - ShareRatio int64 `json:"share_ratio,omitempty"` + SeedingTime int `json:"seeding_time,omitempty"` + Seeds int `json:"seeds,omitempty"` + SeedsTotal int `json:"seeds_total,omitempty"` + ShareRatio int `json:"share_ratio,omitempty"` TimeElapsed int64 `json:"time_elapsed,omitempty"` TotalDownloaded int64 `json:"total_downloaded,omitempty"` TotalDownloadedSession int64 `json:"total_downloaded_session,omitempty"` @@ -254,19 +254,19 @@ type TorrentProperties struct { TotalUploaded int64 `json:"total_uploaded,omitempty"` TotalUploadedSession int64 `json:"total_uploaded_session,omitempty"` TotalWasted int64 `json:"total_wasted,omitempty"` - UpLimit int64 `json:"up_limit,omitempty"` - UpSpeed int64 `json:"up_speed,omitempty"` - UpSpeedAvg int64 `json:"up_speed_avg,omitempty"` + UpLimit int `json:"up_limit,omitempty"` + UpSpeed int `json:"up_speed,omitempty"` + UpSpeedAvg int `json:"up_speed_avg,omitempty"` } type TorrentFile struct { Index int `json:"index,omitempty"` Name string `json:"name,omitempty"` Size int64 `json:"size,omitempty"` - Progress int64 `json:"progress,omitempty"` - Priority int64 `json:"priority,omitempty"` + Progress int `json:"progress,omitempty"` + Priority int `json:"priority,omitempty"` IsSeed bool `json:"is_seed,omitempty"` - PieceRange []int64 `json:"piece_range,omitempty"` + PieceRange []int `json:"piece_range,omitempty"` Availability float64 `json:"availability,omitempty"` } diff --git a/pkg/qbit/torrent.go b/pkg/qbit/shared/torrent.go similarity index 50% rename from pkg/qbit/torrent.go rename to pkg/qbit/shared/torrent.go index cdd51fc..b0c0407 100644 --- a/pkg/qbit/torrent.go +++ b/pkg/qbit/shared/torrent.go @@ -1,18 +1,133 @@ -package qbit +package shared import ( "cmp" + "context" + "fmt" + "github.com/google/uuid" + "goBlack/common" + "goBlack/pkg/arr" "goBlack/pkg/debrid" + "io" + "mime/multipart" "os" "path/filepath" + "strings" "time" ) // All torrent related helpers goes here +func (q *QBit) AddMagnet(ctx context.Context, url, category string) error { + magnet, err := common.GetMagnetFromUrl(url) + if err != nil { + q.logger.Printf("Error parsing magnet link: %v\n", err) + return err + } + err = q.Process(ctx, magnet, category) + if err != nil { + q.logger.Println("Failed to process magnet:", err) + return err + } + return nil +} + +func (q *QBit) AddTorrent(ctx context.Context, fileHeader *multipart.FileHeader, category string) error { + file, _ := fileHeader.Open() + defer file.Close() + var reader io.Reader = file + magnet, err := common.GetMagnetFromFile(reader, fileHeader.Filename) + if err != nil { + q.logger.Printf("Error reading file: %s", fileHeader.Filename) + return err + } + err = q.Process(ctx, magnet, category) + if err != nil { + q.logger.Println("Failed to process torrent:", err) + return err + } + return nil +} + +func (q *QBit) Process(ctx context.Context, magnet *common.Magnet, category string) error { + torrent := q.CreateTorrentFromMagnet(magnet, category) + a, ok := ctx.Value("arr").(*arr.Arr) + if !ok { + return fmt.Errorf("arr not found in context") + } + isSymlink := ctx.Value("isSymlink").(bool) + debridTorrent, err := debrid.ProcessTorrent(q.Debrid, magnet, a, isSymlink) + if err != nil || debridTorrent == nil { + if err == nil { + err = fmt.Errorf("failed to process torrent") + } + return err + } + torrent = q.UpdateTorrentMin(torrent, debridTorrent) + q.Storage.AddOrUpdate(torrent) + go q.ProcessFiles(torrent, debridTorrent, a, isSymlink) // We can send async for file processing not to delay the response + return nil +} + +func (q *QBit) CreateTorrentFromMagnet(magnet *common.Magnet, category string) *Torrent { + torrent := &Torrent{ + ID: uuid.NewString(), + Hash: strings.ToLower(magnet.InfoHash), + Name: magnet.Name, + Size: magnet.Size, + Category: category, + State: "downloading", + MagnetUri: magnet.Link, + + Tracker: "udp://tracker.opentrackr.org:1337", + UpLimit: -1, + DlLimit: -1, + AutoTmm: false, + Ratio: 1, + RatioLimit: 1, + } + return torrent +} + +func (q *QBit) ProcessFiles(torrent *Torrent, debridTorrent *debrid.Torrent, arr *arr.Arr, isSymlink bool) { + for debridTorrent.Status != "downloaded" { + progress := debridTorrent.Progress + q.logger.Printf("%s Download Progress: %.2f%%", debridTorrent.Debrid.GetName(), progress) + time.Sleep(5 * time.Second) + dbT, err := debridTorrent.Debrid.CheckStatus(debridTorrent, isSymlink) + if err != nil { + q.logger.Printf("Error checking status: %v", err) + q.MarkAsFailed(torrent) + _ = arr.Refresh() + return + } + debridTorrent = dbT + torrent = q.UpdateTorrentMin(torrent, debridTorrent) + } + var ( + torrentPath string + err error + ) + debridTorrent.Arr = arr + if isSymlink { + torrentPath, err = q.ProcessSymlink(debridTorrent) + } else { + torrentPath, err = q.processManualFiles(debridTorrent) + } + if err != nil { + q.MarkAsFailed(torrent) + go debridTorrent.Delete() + q.logger.Printf("Error: %v", err) + return + } + torrent.TorrentPath = filepath.Base(torrentPath) + q.UpdateTorrent(torrent, debridTorrent) + _ = arr.Refresh() +} + func (q *QBit) MarkAsFailed(t *Torrent) *Torrent { t.State = "error" - q.storage.AddOrUpdate(t) + q.Storage.AddOrUpdate(t) return t } @@ -25,31 +140,31 @@ func (q *QBit) UpdateTorrentMin(t *Torrent, debridTorrent *debrid.Torrent) *Torr if err != nil { addedOn = time.Now() } - totalSize := float64(debridTorrent.Bytes) - progress := cmp.Or(debridTorrent.Progress, 100.0) + totalSize := debridTorrent.Bytes + progress := cmp.Or(debridTorrent.Progress, 100) progress = progress / 100.0 - sizeCompleted := int64(totalSize * progress) + sizeCompleted := int64(float64(totalSize) * progress) - var speed int64 + var speed int if debridTorrent.Speed != 0 { speed = debridTorrent.Speed } - var eta int64 + var eta int if speed != 0 { - eta = int64((totalSize - float64(sizeCompleted)) / float64(speed)) + eta = int(totalSize-sizeCompleted) / speed } t.ID = debridTorrent.Id t.Name = debridTorrent.Name t.AddedOn = addedOn.Unix() t.DebridTorrent = debridTorrent - t.Size = int64(totalSize) + t.Size = totalSize t.Completed = sizeCompleted t.Downloaded = sizeCompleted t.DownloadedSession = sizeCompleted t.Uploaded = sizeCompleted t.UploadedSession = sizeCompleted - t.AmountLeft = int64(totalSize) - sizeCompleted - t.Progress = float32(progress) + t.AmountLeft = totalSize - sizeCompleted + t.Progress = progress t.Eta = eta t.Dlspeed = speed t.Upspeed = speed @@ -82,7 +197,7 @@ func (q *QBit) UpdateTorrent(t *Torrent, debridTorrent *debrid.Torrent) *Torrent if t.IsReady() { t.State = "pausedUP" - q.storage.Update(t) + q.Storage.Update(t) return t } @@ -94,7 +209,7 @@ func (q *QBit) UpdateTorrent(t *Torrent, debridTorrent *debrid.Torrent) *Torrent case <-ticker.C: if t.IsReady() { t.State = "pausedUP" - q.storage.Update(t) + q.Storage.Update(t) return t } updatedT := q.UpdateTorrent(t, debridTorrent) diff --git a/pkg/qbit/utils.go b/pkg/qbit/shared/utils.go similarity index 52% rename from pkg/qbit/utils.go rename to pkg/qbit/shared/utils.go index fd92651..dd45d32 100644 --- a/pkg/qbit/utils.go +++ b/pkg/qbit/shared/utils.go @@ -1,29 +1,13 @@ -package qbit +package shared import ( - "encoding/json" "goBlack/common" "goBlack/pkg/debrid" - "net/http" "path/filepath" "sync" "time" ) -//func generateSID() (string, error) { -// bytes := make([]byte, sidLength) -// if _, err := rand.Read(bytes); err != nil { -// return "", err -// } -// return hex.EncodeToString(bytes), nil -//} - -func JSONResponse(w http.ResponseWriter, data interface{}, code int) { - w.Header().Set("Content-Type", "application/json") - w.WriteHeader(code) - json.NewEncoder(w).Encode(data) -} - func checkFileLoop(wg *sync.WaitGroup, dir string, file debrid.TorrentFile, ready chan<- debrid.TorrentFile) { defer wg.Done() ticker := time.NewTicker(1 * time.Second) // Check every second diff --git a/pkg/qbit/worker.go b/pkg/qbit/shared/worker.go similarity index 63% rename from pkg/qbit/worker.go rename to pkg/qbit/shared/worker.go index 14852f9..46f9e3e 100644 --- a/pkg/qbit/worker.go +++ b/pkg/qbit/shared/worker.go @@ -1,8 +1,7 @@ -package qbit +package shared import ( "context" - "goBlack/pkg/debrid" "time" ) @@ -20,7 +19,7 @@ func (q *QBit) StartRefreshWorker(ctx context.Context) { q.logger.Println("Qbit Refresh Worker stopped") return case <-refreshTicker.C: - torrents := q.storage.GetAll("", "", nil) + torrents := q.Storage.GetAll("", "", nil) if len(torrents) > 0 { q.RefreshArrs() } @@ -29,18 +28,10 @@ func (q *QBit) StartRefreshWorker(ctx context.Context) { } func (q *QBit) RefreshArrs() { - q.arrs.Range(func(key, value interface{}) bool { - host, ok := key.(string) - token, ok2 := value.(string) - if !ok || !ok2 { - return true + for _, arr := range q.Arrs.GetAll() { + err := arr.Refresh() + if err != nil { + return } - arr := &debrid.Arr{ - Name: "", - Token: token, - Host: host, - } - q.RefreshArr(arr) - return true - }) + } }