Skip to content

Commit

Permalink
Merge branch 'next' into RealVR
Browse files Browse the repository at this point in the history
  • Loading branch information
pops64 authored Jan 6, 2025
2 parents ee6530f + a4bd3c4 commit 6c39f2c
Show file tree
Hide file tree
Showing 15 changed files with 350 additions and 119 deletions.
2 changes: 2 additions & 0 deletions pkg/api/options.go
Original file line number Diff line number Diff line change
Expand Up @@ -73,6 +73,7 @@ type RequestSaveOptionsAdvanced struct {
UseAltSrcInFileMatching bool `json:"useAltSrcInFileMatching"`
UseAltSrcInScriptFilters bool `json:"useAltSrcInScriptFilters"`
IgnoreReleasedBefore time.Time `json:"ignoreReleasedBefore"`
SLRAuthCookie string `json:"SLRAuthCookie"`
}

type RequestSaveOptionsFunscripts struct {
Expand Down Expand Up @@ -497,6 +498,7 @@ func (i ConfigResource) saveOptionsAdvanced(req *restful.Request, resp *restful.
config.Config.Advanced.UseAltSrcInFileMatching = r.UseAltSrcInFileMatching
config.Config.Advanced.UseAltSrcInScriptFilters = r.UseAltSrcInScriptFilters
config.Config.Advanced.IgnoreReleasedBefore = r.IgnoreReleasedBefore
config.Config.Advanced.SLRAuthCookie = r.SLRAuthCookie
config.SaveConfig()

resp.WriteHeaderAndEntity(http.StatusOK, r)
Expand Down
1 change: 1 addition & 0 deletions pkg/config/config.go
Original file line number Diff line number Diff line change
Expand Up @@ -60,6 +60,7 @@ type ObjectConfig struct {
UseAltSrcInFileMatching bool `default:"true" json:"useAltSrcInFileMatching"`
UseAltSrcInScriptFilters bool `default:"true" json:"useAltSrcInScriptFilters"`
IgnoreReleasedBefore time.Time `json:"ignoreReleasedBefore"`
SLRAuthCookie string `default:"" json:"slrAuthCookie"`
} `json:"advanced"`
Funscripts struct {
ScrapeFunscripts bool `default:"false" json:"scrapeFunscripts"`
Expand Down
58 changes: 58 additions & 0 deletions pkg/config/scraper_list.go
Original file line number Diff line number Diff line change
Expand Up @@ -165,3 +165,61 @@ func SetSiteId(configList *[]ScraperConfig, customId string) {
}

}

func MigrateFromOfficalToCustom(id string, url string, name string, company string, avatarUrl string, customId string, suffix string) error {

db, _ := models.GetDB()
defer db.Close()

// Check to see if we even have PS-Porn data. Other wise there is no need to add a custom site entry
var scenes []models.Scene
db.Where("scraper_id = ?", id).Find(&scenes)
if len(scenes) != 0 {
common.Log.Infoln(name + ` Scenes found migration needed`)

// Update scene data to reflect change
for _, scene := range scenes {
//Needed due to weried VRPHub scrapers
scene.ScraperId = strings.TrimPrefix(id, "vrphub-"+`-`+customId)
scene.Site = name + " " + suffix
scene.NeedsUpdate = true

err := db.Save(&scene).Error

if err != nil {
return err
}
}

// Load the custom scrapers
var scraperConfig ScraperList
scraperConfig.Load()

// Data taken from offical SLR scraper, updated to fix url change
scraper := ScraperConfig{URL: url, Name: name, Company: company, AvatarUrl: avatarUrl}
// Add the to the SLR list the new custom PS-Porn site
switch customId {
case "slr":
scraperConfig.CustomScrapers.SlrScrapers = append(scraperConfig.CustomScrapers.SlrScrapers, scraper)
case "povr":
scraperConfig.CustomScrapers.PovrScrapers = append(scraperConfig.CustomScrapers.PovrScrapers, scraper)
case "vrporn":
scraperConfig.CustomScrapers.VrpornScrapers = append(scraperConfig.CustomScrapers.VrpornScrapers, scraper)
case "vrphub":
scraperConfig.CustomScrapers.VrphubScrapers = append(scraperConfig.CustomScrapers.VrphubScrapers, scraper)
}
// Save the new list file
fName := filepath.Join(common.AppDir, "scrapers.json")
list, _ := json.MarshalIndent(scraperConfig, "", " ")
os.WriteFile(fName, list, 0644)

common.Log.Infoln(name + ` migration complete. Please restart XBVR and run ` + name + ` scraper to complete migration`)

} else {

common.Log.Infoln(`No ` + name + ` Scenes found no migration needed`)

}

return db.Delete(&models.Site{ID: id}).Error
}
12 changes: 0 additions & 12 deletions pkg/config/scrapers.json
Original file line number Diff line number Diff line change
Expand Up @@ -176,12 +176,6 @@
"company": "",
"avatar_url": ""
},
{
"url": "https://www.sexlikereal.com/studios/fuckpassvr",
"name": "FuckPassVR",
"company": "FuckPassVR",
"avatar_url": "https://cdn-vr.sexlikereal.com/images/studio_creatives/logotypes/1/352/logo_crop_1635153994.png"
},
{
"url": "https://www.sexlikereal.com/studios/heathering",
"name": "Heathering",
Expand Down Expand Up @@ -320,12 +314,6 @@
"company": "POV Central",
"avatar_url": "https://mcdn.vrporn.com/files/20191125091909/POVCentralLogo.jpg"
},
{
"url": "https://www.sexlikereal.com/studios/ps-porn",
"name": "PS-Porn",
"company": "Paula Shy",
"avatar_url": "https://mcdn.vrporn.com/files/20201221090642/PS-Porn-400x400.jpg"
},
{
"url": "https://www.sexlikereal.com/studios/realhotvr",
"name": "RealHotVR",
Expand Down
12 changes: 12 additions & 0 deletions pkg/migrations/migrations.go
Original file line number Diff line number Diff line change
Expand Up @@ -2072,6 +2072,18 @@ func Migrate() {
return nil
},
},
{
ID: "0081-Offical-Site-Removals-With-Main-Site-Aviable",
Migrate: func(tx *gorm.DB) error {

err := config.MigrateFromOfficalToCustom("ps-porn", "https://www.sexlikereal.com/studios/ps-porn-vr", "PS-Porn", "Paula Shy", "https://mcdn.vrporn.com/files/20201221090642/PS-Porn-400x400.jpg", "slr", "(SLR)")
err = config.MigrateFromOfficalToCustom("fuckpassvr", "https://www.sexlikereal.com/studios/fuckpassvr", "FuckPassVR", "FuckPassVR", "https://cdn-vr.sexlikereal.com/images/studio_creatives/logotypes/1/352/logo_crop_1635153994.png", "slr", "(SLR)")
err = config.MigrateFromOfficalToCustom("vrphub-vrhush", "https://vrphub.com/category/vr-hush", "VRHush", "VRHush", "https://cdn-nexpectation.secure.yourpornpartner.com/sites/vrh/favicon/apple-touch-icon-180x180.png", "vrphub", "(VRP Hub)")
err = config.MigrateFromOfficalToCustom("vrphub-stripzvr", "https://vrphub.com/category/stripzvr/", "StripzVR - VRP Hub", "StripzVR", "https://www.stripzvr.com/wp-content/uploads/2018/09/cropped-favicon-192x192.jpg", "vrphub", "(VRP Hub)")
return err

},
},
})

if err := m.Migrate(); err != nil {
Expand Down
1 change: 1 addition & 0 deletions pkg/scrape/badoink.go
Original file line number Diff line number Diff line change
Expand Up @@ -287,6 +287,7 @@ func VRCosplayX(wg *models.ScrapeWG, updateSite bool, knownScenes []string, out

func BabeVR(wg *models.ScrapeWG, updateSite bool, knownScenes []string, out chan<- models.ScrapedScene, singleSceneURL string, singeScrapeAdditionalInfo string, limitScraping bool) error {
return BadoinkSite(wg, updateSite, knownScenes, out, singleSceneURL, "babevr", "BabeVR", "Badoink", "https://babevr.com/vrpornvideos?order=newest", singeScrapeAdditionalInfo, limitScraping, "", true)

}

func init() {
Expand Down
2 changes: 1 addition & 1 deletion pkg/scrape/fuckpassvr.go
Original file line number Diff line number Diff line change
Expand Up @@ -130,5 +130,5 @@ func FuckPassVR(wg *models.ScrapeWG, updateSite bool, knownScenes []string, out
}

func init() {
registerScraper("fuckpassvr-native", "FuckPassVR", "https://www.fuckpassvr.com/_nuxt/img/logo_bw.1fac7d1.png", "fuckpassvr.com", FuckPassVR)
registerScraper("fuckpassvr-native", "FuckPassVR", "https://www.fuckpassvr.com/favicon.png", "fuckpassvr.com", FuckPassVR)
}
126 changes: 126 additions & 0 deletions pkg/scrape/kinkvr.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,126 @@
package scrape

import (
"strings"

"github.com/gocolly/colly/v2"
"github.com/mozillazg/go-slugify"
"github.com/nleeper/goment"
"github.com/thoas/go-funk"
"github.com/xbapps/xbvr/pkg/models"
)

func KinkVR(wg *models.ScrapeWG, updateSite bool, knownScenes []string, out chan<- models.ScrapedScene, singleSceneURL string, singeScrapeAdditionalInfo string, limitScraping bool) error {
defer wg.Done()
scraperID := "kinkvr"
siteID := "KinkVR"
logScrapeStart(scraperID, siteID)

sceneCollector := createCollector("kinkvr.com")
siteCollector := createCollector("kinkvr.com")

// These cookies are needed for age verification.
siteCollector.OnRequest(func(r *colly.Request) {
r.Headers.Set("Cookie", "agreedToDisclaimer=true")
})

sceneCollector.OnRequest(func(r *colly.Request) {
r.Headers.Set("Cookie", "agreedToDisclaimer=true")
})

sceneCollector.OnHTML(`html`, func(e *colly.HTMLElement) {
sc := models.ScrapedScene{}
sc.ScraperID = scraperID
sc.SceneType = "VR"
sc.Studio = "Badoink"
sc.Site = siteID
sc.SiteID = ""
sc.HomepageURL = e.Request.URL.String()

// Cover Url
coverURL := e.ChildAttr("div#povVideoContainer dl8-video", "poster")
sc.Covers = append(sc.Covers, coverURL)

// Gallery
e.ForEach(`div.owl-carousel div.item`, func(id int, e *colly.HTMLElement) {
sc.Gallery = append(sc.Gallery, e.ChildAttr("img", "src"))
})

// Incase we scrape a single scene use one of the gallery images for the cover
if singleSceneURL != "" {
sc.Covers = append(sc.Covers, sc.Gallery[0])
}

// Cast
sc.ActorDetails = make(map[string]models.ActorDetails)
e.ForEach(`table.video-description-list tbody`, func(id int, e *colly.HTMLElement) {
// Cast
e.ForEach(`tr:nth-child(1) a`, func(id int, e *colly.HTMLElement) {
if strings.TrimSpace(e.Text) != "" {
sc.Cast = append(sc.Cast, strings.TrimSpace(e.Text))
sc.ActorDetails[strings.TrimSpace(e.Text)] = models.ActorDetails{Source: sc.ScraperID + " scrape", ProfileUrl: e.Request.AbsoluteURL(e.Attr("href"))}
}
})

// Tags
e.ForEach(`tr:nth-child(2) a`, func(id int, e *colly.HTMLElement) {
tag := strings.TrimSpace(e.Text)
sc.Tags = append(sc.Tags, tag)
})

// Date
tmpDate, _ := goment.New(strings.TrimSpace(e.ChildText(`tr:nth-child(3) td:last-child`)), "MMMM DD, YYYY")
sc.Released = tmpDate.Format("YYYY-MM-DD")
})

// Synposis
sc.Synopsis = strings.TrimSpace(e.ChildText("div.accordion-body"))

// Title
sc.Title = e.ChildText("h1.page-title")

// Scene ID -- Uses the ending number of the video url instead of the ID used for the directory that the video link is stored in(Maintains backwards compatibility with old scenes)
tmpUrlStr, _ := strings.CutSuffix(e.Request.URL.String(), "/")
tmp := strings.Split(tmpUrlStr, "/")
siteIDstr := strings.Split(tmp[len(tmp)-1], "-")
sc.SiteID = siteIDstr[len(siteIDstr)-1]

if sc.SiteID != "" {
sc.SceneID = slugify.Slugify(sc.Site) + "-" + sc.SiteID

// save only if we got a SceneID
out <- sc
}
})

siteCollector.OnHTML(`a.page-link[aria-label="Next"]:not(.disabled)`, func(e *colly.HTMLElement) {
if !limitScraping {
pageURL := e.Request.AbsoluteURL(e.Attr("href"))
siteCollector.Visit(pageURL)
}
})

siteCollector.OnHTML(`div.video-grid-view a`, func(e *colly.HTMLElement) {
sceneURL := e.Request.AbsoluteURL(e.Attr("href"))
// If scene exist in database, there's no need to scrape
if !funk.ContainsString(knownScenes, sceneURL) {
sceneCollector.Visit(sceneURL)
}
})

if singleSceneURL != "" {
sceneCollector.Visit(singleSceneURL)
} else {
siteCollector.Visit("https://kinkvr.com/videos/page1")
}

if updateSite {
updateSiteLastUpdate(scraperID)
}
logScrapeFinished(scraperID, siteID)
return nil
}

func init() {
registerScraper("kinkvr", "KinkVR", "https://static.rlcontent.com/shared/KINK/skins/web-10/branding/favicon.png", "kinkvr.com", KinkVR)
}
Loading

0 comments on commit 6c39f2c

Please sign in to comment.