Changelog: 0.1.4

This commit is contained in:
Mukhtar Akere
2024-09-02 02:58:58 +01:00
parent d405e0d8e0
commit 60c6cb32d3
8 changed files with 147 additions and 80 deletions

View File

@@ -25,3 +25,10 @@
- Bug fixes - Bug fixes
- Fixed indexer getting disabled - Fixed indexer getting disabled
- Fixed blackhole not working - Fixed blackhole not working
#### 0.1.4
- Rewrote Report log
- Fix YTS, 1337x not grabbing infohash
- Fix Torrent symlink bug
-

View File

@@ -19,7 +19,7 @@ The proxy is useful in filtering out un-cached Real Debrid torrents
version: '3.7' version: '3.7'
services: services:
blackhole: blackhole:
image: cy01/blackhole:latest image: cy01/blackhole:latest # or cy01/blackhole:beta
container_name: blackhole container_name: blackhole
user: "1000:1000" user: "1000:1000"
volumes: volumes:

View File

@@ -53,7 +53,7 @@ func (b *Blackhole) processFiles(arr *debrid.Arr, torrent *debrid.Torrent) {
files := torrent.Files files := torrent.Files
ready := make(chan debrid.TorrentFile, len(files)) ready := make(chan debrid.TorrentFile, len(files))
log.Println("Checking files...") log.Printf("Checking %d files...", len(files))
for _, file := range files { for _, file := range files {
wg.Add(1) wg.Add(1)
@@ -80,12 +80,13 @@ func (b *Blackhole) createSymLink(arr *debrid.Arr, torrent *debrid.Torrent) {
if err != nil { if err != nil {
log.Printf("Failed to create directory: %s\n", path) log.Printf("Failed to create directory: %s\n", path)
} }
for _, file := range torrent.Files { for _, file := range torrent.Files {
// Combine the directory and filename to form a full path // Combine the directory and filename to form a full path
fullPath := filepath.Join(arr.CompletedFolder, file.Path) fullPath := filepath.Join(path, file.Name) // completedFolder/MyTVShow/MyTVShow.S01E01.720p.mkv
// Create a symbolic link if file doesn't exist // Create a symbolic link if file doesn't exist
_ = os.Symlink(filepath.Join(arr.Debrid.Folder, file.Path), fullPath) torrentPath := filepath.Join(arr.Debrid.Folder, torrent.Folder, file.Name) // debridFolder/MyTVShow/MyTVShow.S01E01.720p.mkv
_ = os.Symlink(torrentPath, fullPath)
} }
} }

View File

@@ -21,53 +21,48 @@ import (
type RSS struct { type RSS struct {
XMLName xml.Name `xml:"rss"` XMLName xml.Name `xml:"rss"`
Text string `xml:",chardata"`
Version string `xml:"version,attr"` Version string `xml:"version,attr"`
Channel Channel `xml:"channel"` Atom string `xml:"atom,attr"`
} Torznab string `xml:"torznab,attr"`
Channel struct {
type Channel struct { Text string `xml:",chardata"`
XMLName xml.Name `xml:"channel"` Link struct {
Title string `xml:"title"` Text string `xml:",chardata"`
AtomLink AtomLink `xml:"link"` Rel string `xml:"rel,attr"`
Items []Item `xml:"item"` Type string `xml:"type,attr"`
} } `xml:"link"`
Title string `xml:"title"`
type AtomLink struct { Items []Item `xml:"item"`
XMLName xml.Name `xml:"link"` } `xml:"channel"`
Rel string `xml:"rel,attr"`
Type string `xml:"type,attr"`
} }
type Item struct { type Item struct {
XMLName xml.Name `xml:"item"` Text string `xml:",chardata"`
Title string `xml:"title"` Title string `xml:"title"`
Description string `xml:"description"` Description string `xml:"description"`
GUID string `xml:"guid"` GUID string `xml:"guid"`
ProwlarrIndexer ProwlarrIndexer `xml:"prowlarrindexer"` ProwlarrIndexer struct {
Comments string `xml:"comments"` Text string `xml:",chardata"`
PubDate string `xml:"pubDate"` ID string `xml:"id,attr"`
Size int64 `xml:"size"` Type string `xml:"type,attr"`
Link string `xml:"link"` } `xml:"prowlarrindexer"`
Categories []string `xml:"category"` Comments string `xml:"comments"`
Enclosure Enclosure `xml:"enclosure"` PubDate string `xml:"pubDate"`
TorznabAttrs []TorznabAttr `xml:"torznab:attr"` Size string `xml:"size"`
} Link string `xml:"link"`
Category []string `xml:"category"`
type ProwlarrIndexer struct { Enclosure struct {
ID string `xml:"id,attr"` Text string `xml:",chardata"`
Type string `xml:"type,attr"` URL string `xml:"url,attr"`
Value string `xml:",chardata"` Length string `xml:"length,attr"`
} Type string `xml:"type,attr"`
} `xml:"enclosure"`
type Enclosure struct { TorznabAttrs []struct {
URL string `xml:"url,attr"` Text string `xml:",chardata"`
Length int64 `xml:"length,attr"` Name string `xml:"name,attr"`
Type string `xml:"type,attr"` Value string `xml:"value,attr"`
} } `xml:"attr"`
type TorznabAttr struct {
Name string `xml:"name,attr"`
Value string `xml:"value,attr"`
} }
type Proxy struct { type Proxy struct {
@@ -158,7 +153,7 @@ func getItemsHash(items []Item) map[string]string {
wg.Add(1) wg.Add(1)
go func(item Item) { go func(item Item) {
defer wg.Done() defer wg.Done()
hash := strings.ToLower(getItemHash(item)) hash := strings.ToLower(item.getHash())
if hash != "" { if hash != "" {
idHashMap.Store(item.GUID, hash) // Store directly into sync.Map idHashMap.Store(item.GUID, hash) // Store directly into sync.Map
} }
@@ -176,7 +171,7 @@ func getItemsHash(items []Item) map[string]string {
return finalMap return finalMap
} }
func getItemHash(item Item) string { func (item Item) getHash() string {
infohash := "" infohash := ""
for _, attr := range item.TorznabAttrs { for _, attr := range item.TorznabAttrs {
@@ -233,6 +228,7 @@ func (p *Proxy) ProcessXMLResponse(resp *http.Response) *http.Response {
body, err := io.ReadAll(resp.Body) body, err := io.ReadAll(resp.Body)
if err != nil { if err != nil {
log.Println("Error reading response body:", err) log.Println("Error reading response body:", err)
resp.Body = io.NopCloser(bytes.NewReader(body))
return resp return resp
} }
err = resp.Body.Close() err = resp.Body.Close()
@@ -244,8 +240,13 @@ func (p *Proxy) ProcessXMLResponse(resp *http.Response) *http.Response {
err = xml.Unmarshal(body, &rss) err = xml.Unmarshal(body, &rss)
if err != nil { if err != nil {
log.Printf("Error unmarshalling XML: %v", err) log.Printf("Error unmarshalling XML: %v", err)
resp.Body = io.NopCloser(bytes.NewReader(body))
return resp return resp
} }
indexer := ""
if len(rss.Channel.Items) > 0 {
indexer = rss.Channel.Items[0].ProwlarrIndexer.Text
}
// Step 4: Extract infohash or magnet URI, manipulate data // Step 4: Extract infohash or magnet URI, manipulate data
IdsHashMap := getItemsHash(rss.Channel.Items) IdsHashMap := getItemsHash(rss.Channel.Items)
@@ -255,7 +256,6 @@ func (p *Proxy) ProcessXMLResponse(resp *http.Response) *http.Response {
hashes = append(hashes, hash) hashes = append(hashes, hash)
} }
} }
log.Printf("Found %d infohashes/magnet links", len(hashes))
availableHashesMap := p.debrid.IsAvailable(hashes) availableHashesMap := p.debrid.IsAvailable(hashes)
newItems := make([]Item, 0, len(rss.Channel.Items)) newItems := make([]Item, 0, len(rss.Channel.Items))
@@ -273,7 +273,7 @@ func (p *Proxy) ProcessXMLResponse(resp *http.Response) *http.Response {
} }
} }
log.Printf("Report: %d/%d items are cached", len(newItems), len(rss.Channel.Items)) log.Printf("[%s Report]: %d/%d items are cached || Found %d infohash", indexer, len(newItems), len(rss.Channel.Items), len(hashes))
rss.Channel.Items = newItems rss.Channel.Items = newItems
// rss.Channel.Items = newItems // rss.Channel.Items = newItems

View File

@@ -5,8 +5,9 @@ import (
) )
var ( var (
VIDEOMATCH = "(?i)(\\.)(YUV|WMV|WEBM|VOB|VIV|SVI|ROQ|RMVB|RM|OGV|OGG|NSV|MXF|MTS|M2TS|TS|MPG|MPEG|M2V|MP2|MPE|MPV|MP4|M4P|M4V|MOV|QT|MNG|MKV|FLV|DRC|AVI|ASF|AMV)$" VIDEOMATCH = "(?i)(\\.)(YUV|WMV|WEBM|VOB|VIV|SVI|ROQ|RMVB|RM|OGV|OGG|NSV|MXF|MTS|M2TS|TS|MPG|MPEG|M2V|MP2|MPE|MPV|MP4|M4P|M4V|MOV|QT|MNG|MKV|FLV|DRC|AVI|ASF|AMV)$"
SUBMATCH = "(?i)(\\.)(SRT|SUB|SBV|ASS|VTT|TTML|DFXP|STL|SCC|CAP|SMI|TTXT|TDS|USF|JSS|SSA|PSB|RT|LRC|SSB)$" SUBMATCH = "(?i)(\\.)(SRT|SUB|SBV|ASS|VTT|TTML|DFXP|STL|SCC|CAP|SMI|TTXT|TDS|USF|JSS|SSA|PSB|RT|LRC|SSB)$"
SeasonMatch = "(?i)(?:season|s)[.\\-_\\s]?(\\d+)"
) )
func RegexMatch(regex string, value string) bool { func RegexMatch(regex string, value string) bool {
@@ -25,3 +26,13 @@ func RemoveExtension(value string) string {
return value return value
} }
} }
func RegexFind(regex string, value string) string {
re := regexp.MustCompile(regex)
match := re.FindStringSubmatch(value)
if len(match) > 0 {
return match[0]
} else {
return ""
}
}

View File

@@ -84,26 +84,25 @@ func getTorrentInfo(filePath string) (*Torrent, error) {
return torrent, nil return torrent, nil
} }
func GetLocalCache(infohashes []string, cache *common.Cache) (string, map[string]bool) { func GetLocalCache(infohashes []string, cache *common.Cache) ([]string, map[string]bool) {
result := make(map[string]bool) result := make(map[string]bool)
hashes := make([]string, len(infohashes))
if len(infohashes) == 0 { if len(infohashes) == 0 {
return "", result return hashes, result
} }
if len(infohashes) == 1 { if len(infohashes) == 1 {
if cache.Exists(infohashes[0]) { if cache.Exists(infohashes[0]) {
return "", map[string]bool{infohashes[0]: true} return hashes, map[string]bool{infohashes[0]: true}
} }
return infohashes[0], result return infohashes, result
} }
cachedHashes := cache.GetMultiple(infohashes) cachedHashes := cache.GetMultiple(infohashes)
hashes := ""
for _, h := range infohashes { for _, h := range infohashes {
_, exists := cachedHashes[h] _, exists := cachedHashes[h]
if !exists { if !exists {
hashes += h + "/" hashes = append(hashes, h)
} else { } else {
result[h] = true result[h] = true
} }

View File

@@ -47,28 +47,50 @@ func (r *RealDebrid) IsAvailable(infohashes []string) map[string]bool {
// Check if the infohashes are available in the local cache // Check if the infohashes are available in the local cache
hashes, result := GetLocalCache(infohashes, r.cache) hashes, result := GetLocalCache(infohashes, r.cache)
if hashes == "" { if len(hashes) == 0 {
// Either all the infohashes are locally cached or none are // Either all the infohashes are locally cached or none are
r.cache.AddMultiple(result) r.cache.AddMultiple(result)
return result return result
} }
url := fmt.Sprintf("%s/torrents/instantAvailability/%s", r.Host, hashes) // Divide hashes into groups of 100
resp, err := r.client.MakeRequest(http.MethodGet, url, nil) for i := 0; i < len(hashes); i += 200 {
if err != nil { end := i + 200
log.Println("Error checking availability:", err) if end > len(hashes) {
return result end = len(hashes)
} }
var data structs.RealDebridAvailabilityResponse
err = json.Unmarshal(resp, &data) // Filter out empty strings
if err != nil { validHashes := make([]string, 0, end-i)
log.Println("Error marshalling availability:", err) for _, hash := range hashes[i:end] {
return result if hash != "" {
} validHashes = append(validHashes, hash)
for _, h := range infohashes { }
hosters, exists := data[strings.ToLower(h)] }
if exists && len(hosters.Rd) > 0 {
result[h] = true // If no valid hashes in this batch, continue to the next batch
if len(validHashes) == 0 {
continue
}
hashStr := strings.Join(validHashes, "/")
url := fmt.Sprintf("%s/torrents/instantAvailability/%s", r.Host, hashStr)
resp, err := r.client.MakeRequest(http.MethodGet, url, nil)
if err != nil {
log.Println("Error checking availability:", err)
return result
}
var data structs.RealDebridAvailabilityResponse
err = json.Unmarshal(resp, &data)
if err != nil {
log.Println("Error marshalling availability:", err)
return result
}
for _, h := range hashes[i:end] {
hosters, exists := data[strings.ToLower(h)]
if exists && len(hosters.Rd) > 0 {
result[h] = true
}
} }
} }
r.cache.AddMultiple(result) // Add the results to the cache r.cache.AddMultiple(result) // Add the results to the cache
@@ -108,7 +130,7 @@ func (r *RealDebrid) CheckStatus(torrent *Torrent) (*Torrent, error) {
} else if status == "waiting_files_selection" { } else if status == "waiting_files_selection" {
files := make([]TorrentFile, 0) files := make([]TorrentFile, 0)
for _, f := range data.Files { for _, f := range data.Files {
name := f.Path name := filepath.Base(f.Path)
if !common.RegexMatch(common.VIDEOMATCH, name) && !common.RegexMatch(common.SUBMATCH, name) { if !common.RegexMatch(common.VIDEOMATCH, name) && !common.RegexMatch(common.SUBMATCH, name) {
continue continue
} }

View File

@@ -7,6 +7,33 @@ import (
type RealDebridAvailabilityResponse map[string]Hoster type RealDebridAvailabilityResponse map[string]Hoster
func (r *RealDebridAvailabilityResponse) UnmarshalJSON(data []byte) error {
// First, try to unmarshal as an object
var objectData map[string]Hoster
err := json.Unmarshal(data, &objectData)
if err == nil {
*r = objectData
return nil
}
// If that fails, try to unmarshal as an array
var arrayData []map[string]Hoster
err = json.Unmarshal(data, &arrayData)
if err != nil {
return fmt.Errorf("failed to unmarshal as both object and array: %v", err)
}
// If it's an array, use the first element
if len(arrayData) > 0 {
*r = arrayData[0]
return nil
}
// If it's an empty array, initialize as an empty map
*r = make(map[string]Hoster)
return nil
}
type Hoster struct { type Hoster struct {
Rd []map[string]FileVariant `json:"rd"` Rd []map[string]FileVariant `json:"rd"`
} }