mirror of
https://github.com/hexated/cloudstream-extensions-hexated.git
synced 2024-08-15 00:03:22 +00:00
sora: added GdbotMovies
This commit is contained in:
parent
68b5f22b53
commit
ff166f1665
4 changed files with 128 additions and 67 deletions
|
@ -1,5 +1,5 @@
|
|||
// use an integer for version numbers
|
||||
version = 97
|
||||
version = 98
|
||||
|
||||
|
||||
cloudstream {
|
||||
|
|
|
@ -2477,37 +2477,25 @@ object SoraExtractor : SoraStream() {
|
|||
callback: (ExtractorLink) -> Unit,
|
||||
) {
|
||||
val query = getIndexQuery(title, year, season, episode)
|
||||
val (dotSlug, spaceSlug, slashSlug) = getTitleSlug(title)
|
||||
val (seasonSlug, episodeSlug) = getEpisodeSlug(season, episode)
|
||||
|
||||
val files = app.get(
|
||||
"https://api.tgarchive.superfastsearch.zindex.eu.org/search?name=${encode(query)}&page=1",
|
||||
referer = tgarMovieAPI,
|
||||
timeout = 600L
|
||||
).parsedSafe<TgarData>()?.documents?.filter { media ->
|
||||
(if (season == null) {
|
||||
media.name?.contains("$year") == true
|
||||
} else {
|
||||
media.name?.contains(Regex("(?i)S${seasonSlug}.?E${episodeSlug}")) == true
|
||||
}) && media.name?.contains(
|
||||
Regex("(?i)(2160p|1080p|720p)")
|
||||
) == true && (media.mime_type in mimeType) && (media.name.replace(
|
||||
"-",
|
||||
"."
|
||||
).contains(
|
||||
"$dotSlug",
|
||||
matchingIndex(
|
||||
media.name,
|
||||
media.mime_type,
|
||||
title,
|
||||
year,
|
||||
season,
|
||||
episode,
|
||||
true
|
||||
) || media.name.replace(
|
||||
"-",
|
||||
" "
|
||||
).contains("$spaceSlug", true) || media.name.replace(
|
||||
"-",
|
||||
"_"
|
||||
).contains("$slashSlug", true) || media.name.contains("${title?.replace(" ", "_")}"))
|
||||
)
|
||||
}
|
||||
|
||||
files?.map { file ->
|
||||
val size = "%.2f GB".format(bytesToGigaBytes(file.size?.toDouble() ?: return@map null))
|
||||
val size = "%.2f GB".format(bytesToGigaBytes(file.size ?: return@map null))
|
||||
val quality = getIndexQuality(file.name)
|
||||
val tags = getIndexQualityTags(file.name)
|
||||
callback.invoke(
|
||||
|
@ -2523,6 +2511,58 @@ object SoraExtractor : SoraStream() {
|
|||
|
||||
}
|
||||
|
||||
suspend fun invokeGdbotMovies(
|
||||
title: String? = null,
|
||||
year: Int? = null,
|
||||
season: Int? = null,
|
||||
episode: Int? = null,
|
||||
callback: (ExtractorLink) -> Unit,
|
||||
) {
|
||||
val query = getIndexQuery(title, year, season, episode)
|
||||
val files = app.get("$gdbot/search?q=$query").document.select("ul.divide-y li").map {
|
||||
Triple(
|
||||
it.select("a").attr("href"),
|
||||
it.select("a").text(),
|
||||
it.select("span").text()
|
||||
)
|
||||
}.filter {
|
||||
matchingIndex(
|
||||
it.second,
|
||||
null,
|
||||
title,
|
||||
year,
|
||||
season,
|
||||
episode,
|
||||
)
|
||||
}.sortedByDescending {
|
||||
it.third.getFileSize()
|
||||
}
|
||||
|
||||
files.let { file ->
|
||||
listOfNotNull(
|
||||
file.find { it.second.contains("2160p", true) },
|
||||
file.find { it.second.contains("1080p", true) }
|
||||
)
|
||||
}.apmap { file ->
|
||||
val videoUrl = extractDrivebot(file.first)
|
||||
val quality = getIndexQuality(file.second)
|
||||
val tags = getIndexQualityTags(file.second)
|
||||
val size = Regex("(\\d+\\.?\\d+\\sGB|MB)").find(file.third)?.groupValues?.get(0)?.trim()
|
||||
|
||||
callback.invoke(
|
||||
ExtractorLink(
|
||||
"GdbotMovies $tags [$size]",
|
||||
"GdbotMovies $tags [$size]",
|
||||
videoUrl ?: return@apmap null,
|
||||
"",
|
||||
quality,
|
||||
)
|
||||
)
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
suspend fun invokeDahmerMovies(
|
||||
title: String? = null,
|
||||
year: Int? = null,
|
||||
|
@ -2961,7 +3001,7 @@ data class IndexSearch(
|
|||
data class TgarMedia(
|
||||
@JsonProperty("_id") val _id: Int? = null,
|
||||
@JsonProperty("name") val name: String? = null,
|
||||
@JsonProperty("size") val size: Int? = null,
|
||||
@JsonProperty("size") val size: Double? = null,
|
||||
@JsonProperty("file_unique_id") val file_unique_id: String? = null,
|
||||
@JsonProperty("mime_type") val mime_type: String? = null,
|
||||
)
|
||||
|
|
|
@ -29,6 +29,7 @@ import com.hexated.SoraExtractor.invokeFDMovies
|
|||
import com.hexated.SoraExtractor.invokeFlixon
|
||||
import com.hexated.SoraExtractor.invokeFwatayako
|
||||
import com.hexated.SoraExtractor.invokeGMovies
|
||||
import com.hexated.SoraExtractor.invokeGdbotMovies
|
||||
import com.hexated.SoraExtractor.invokeGomovies
|
||||
import com.hexated.SoraExtractor.invokeJmdkhMovies
|
||||
import com.hexated.SoraExtractor.invokeJsmovies
|
||||
|
@ -737,6 +738,9 @@ open class SoraStream : TmdbProvider() {
|
|||
{
|
||||
if (!res.isAnime) invokeTgarMovies(res.title, res.year, res.season, res.episode, callback)
|
||||
},
|
||||
{
|
||||
if (!res.isAnime) invokeGdbotMovies(res.title, res.year, res.season, res.episode, callback)
|
||||
},
|
||||
{
|
||||
if (!res.isAnime) invokeJmdkhMovies(
|
||||
jmdkhMovieAPI,
|
||||
|
|
|
@ -269,30 +269,29 @@ suspend fun extractGdflix(url: String): String? {
|
|||
app.get(fixUrl(it, base))
|
||||
} ?: return null
|
||||
|
||||
// Drivebot dead
|
||||
// val iframeDrivebot2 = gdfDoc?.selectFirst("a.btn.btn-outline-warning")?.attr("href")
|
||||
// return getDrivebotLink(iframeDrivebot2)
|
||||
val iframeDrivebot2 = req.document.selectFirst("a.btn.btn-outline-warning")?.attr("href")
|
||||
return getDrivebotLink(iframeDrivebot2)
|
||||
|
||||
val reqUrl = req.url
|
||||
val ssid = req.cookies["PHPSESSID"]
|
||||
val script = req.document.selectFirst("script:containsData(formData =)")?.data()
|
||||
val key = Regex("append\\(\"key\", \"(\\S+?)\"\\);").find(script ?: return null)?.groupValues?.get(1)
|
||||
|
||||
val body = FormBody.Builder()
|
||||
.addEncoded("action", "direct")
|
||||
.addEncoded("key", "$key")
|
||||
.addEncoded("action_token", "cf_token")
|
||||
.build()
|
||||
|
||||
val gdriveUrl = app.post(
|
||||
reqUrl, requestBody = body,
|
||||
cookies = mapOf("PHPSESSID" to "$ssid"),
|
||||
headers = mapOf(
|
||||
"x-token" to URI(reqUrl).host
|
||||
)
|
||||
).parsedSafe<Gdflix>()?.url
|
||||
|
||||
return getDirectGdrive(gdriveUrl ?: return null)
|
||||
// val reqUrl = req.url
|
||||
// val ssid = req.cookies["PHPSESSID"]
|
||||
// val script = req.document.selectFirst("script:containsData(formData =)")?.data()
|
||||
// val key = Regex("append\\(\"key\", \"(\\S+?)\"\\);").find(script ?: return null)?.groupValues?.get(1)
|
||||
//
|
||||
// val body = FormBody.Builder()
|
||||
// .addEncoded("action", "direct")
|
||||
// .addEncoded("key", "$key")
|
||||
// .addEncoded("action_token", "cf_token")
|
||||
// .build()
|
||||
//
|
||||
// val gdriveUrl = app.post(
|
||||
// reqUrl, requestBody = body,
|
||||
// cookies = mapOf("PHPSESSID" to "$ssid"),
|
||||
// headers = mapOf(
|
||||
// "x-token" to URI(reqUrl).host
|
||||
// )
|
||||
// ).parsedSafe<Gdflix>()?.url
|
||||
//
|
||||
// return getDirectGdrive(gdriveUrl ?: return null)
|
||||
|
||||
}
|
||||
|
||||
|
@ -804,28 +803,15 @@ fun searchIndex(
|
|||
response: String,
|
||||
isTrimmed: Boolean = true,
|
||||
): List<IndexMedia>? {
|
||||
val (dotSlug, spaceSlug, slashSlug) = getTitleSlug(title)
|
||||
val (seasonSlug, episodeSlug) = getEpisodeSlug(season, episode)
|
||||
val files = tryParseJson<IndexSearch>(response)?.data?.files?.filter { media ->
|
||||
(if (season == null) {
|
||||
media.name?.contains("$year") == true
|
||||
} else {
|
||||
media.name?.contains(Regex("(?i)S${seasonSlug}.?E${episodeSlug}")) == true
|
||||
}) && media.name?.contains(
|
||||
Regex("(?i)(2160p|1080p)")
|
||||
) == true && (media.mimeType in mimeType) && (media.name.replace(
|
||||
"-",
|
||||
"."
|
||||
).contains(
|
||||
"$dotSlug",
|
||||
true
|
||||
) || media.name.replace(
|
||||
"-",
|
||||
" "
|
||||
).contains("$spaceSlug", true) || media.name.replace(
|
||||
"-",
|
||||
"_"
|
||||
).contains("$slashSlug", true))
|
||||
matchingIndex(
|
||||
media.name ?: return null,
|
||||
media.mimeType ?: return null,
|
||||
title ?: return null,
|
||||
year,
|
||||
season,
|
||||
episode
|
||||
)
|
||||
}?.distinctBy { it.name }?.sortedByDescending { it.size?.toLongOrNull() ?: 0 } ?: return null
|
||||
|
||||
return if (isTrimmed) {
|
||||
|
@ -840,6 +826,28 @@ fun searchIndex(
|
|||
}
|
||||
}
|
||||
|
||||
fun matchingIndex(
|
||||
mediaName: String?,
|
||||
mediaMimeType: String?,
|
||||
title: String?,
|
||||
year: Int?,
|
||||
season: Int?,
|
||||
episode: Int?,
|
||||
include720: Boolean = false
|
||||
): Boolean {
|
||||
val (dotSlug, spaceSlug, slashSlug) = getTitleSlug(title)
|
||||
val (seasonSlug, episodeSlug) = getEpisodeSlug(season, episode)
|
||||
return (if (season == null) {
|
||||
mediaName?.contains("$year") == true
|
||||
} else {
|
||||
mediaName?.contains(Regex("(?i)S${seasonSlug}.?E${episodeSlug}")) == true
|
||||
}) && mediaName?.contains(
|
||||
if (include720) Regex("(?i)(2160p|1080p|720p)") else Regex("(?i)(2160p|1080p)")
|
||||
) == true && ((mediaMimeType in mimeType) || mediaName.contains(Regex("\\.mkv|\\.mp4|\\.avi"))) && (mediaName.contains(
|
||||
title?.replace(" ", "_").toString()
|
||||
) || mediaName.contains(Regex("(?i)($dotSlug|$spaceSlug|$slashSlug)")))
|
||||
}
|
||||
|
||||
suspend fun getConfig(): BaymoviesConfig {
|
||||
val regex = """const country = "(.*?)";
|
||||
const downloadtime = "(.*?)";
|
||||
|
@ -902,6 +910,15 @@ fun getKisskhTitle(str: String?): String? {
|
|||
return str?.replace(Regex("[^a-zA-Z\\d]"), "-")
|
||||
}
|
||||
|
||||
fun String.getFileSize() : Float? {
|
||||
val size = Regex("(\\d+\\.?\\d+\\sGB|MB)").find(this)?.groupValues?.get(0)?.trim()
|
||||
val num = Regex("(\\d+\\.?\\d+)").find(size ?: return null)?.groupValues?.get(0)?.toFloat() ?: return null
|
||||
return when {
|
||||
size.contains("GB") -> num * 1000000
|
||||
else -> num * 1000
|
||||
}
|
||||
}
|
||||
|
||||
fun getIndexQualityTags(str: String?): String {
|
||||
return Regex("\\d{3,4}[pP]\\.?(.*?)\\.(mkv|mp4|avi)").find(str ?: "")?.groupValues?.getOrNull(1)
|
||||
?.replace(".", " ")?.trim() ?: ""
|
||||
|
|
Loading…
Reference in a new issue