mirror of
				https://github.com/mihonapp/mihon.git
				synced 2025-10-30 22:07:57 +01:00 
			
		
		
		
	Run default Android Studio formatter on code
(cherry picked from commit 3ecc883944)
			
			
This commit is contained in:
		| @@ -51,6 +51,7 @@ class ChapterCache(private val context: Context) { | ||||
|     /** Cache class used for cache management.  */ | ||||
|     // --> EH | ||||
|     private var diskCache = setupDiskCache(prefs.eh_cacheSize().getOrDefault().toLong()) | ||||
|  | ||||
|     init { | ||||
|         prefs.eh_cacheSize().asObservable().skip(1).subscribe { | ||||
|             // Save old cache for destruction later | ||||
| @@ -83,9 +84,9 @@ class ChapterCache(private val context: Context) { | ||||
|     // Cache size is in MB | ||||
|     private fun setupDiskCache(cacheSize: Long): DiskLruCache { | ||||
|         return DiskLruCache.open(File(context.cacheDir, PARAMETER_CACHE_DIRECTORY), | ||||
|             PARAMETER_APP_VERSION, | ||||
|             PARAMETER_VALUE_COUNT, | ||||
|             cacheSize * 1024 * 1024) | ||||
|                 PARAMETER_APP_VERSION, | ||||
|                 PARAMETER_VALUE_COUNT, | ||||
|                 cacheSize * 1024 * 1024) | ||||
|     } | ||||
|     // <-- EH | ||||
|  | ||||
|   | ||||
| @@ -12,7 +12,7 @@ interface Category : Serializable { | ||||
|  | ||||
|     var flags: Int | ||||
|  | ||||
|     var mangaOrder:List<Long> | ||||
|     var mangaOrder: List<Long> | ||||
|  | ||||
|     val nameLower: String | ||||
|         get() = name.toLowerCase() | ||||
|   | ||||
| @@ -1,505 +0,0 @@ | ||||
| package eu.kanade.tachiyomi.data.track.myanimelist | ||||
|  | ||||
| import android.net.Uri | ||||
| import eu.kanade.tachiyomi.data.database.models.Track | ||||
| import eu.kanade.tachiyomi.data.track.TrackManager | ||||
| import eu.kanade.tachiyomi.data.track.model.TrackSearch | ||||
| import eu.kanade.tachiyomi.network.GET | ||||
| import eu.kanade.tachiyomi.network.POST | ||||
| import eu.kanade.tachiyomi.network.asObservable | ||||
| import eu.kanade.tachiyomi.network.asObservableSuccess | ||||
| import eu.kanade.tachiyomi.util.lang.toCalendar | ||||
| import eu.kanade.tachiyomi.util.selectInt | ||||
| import eu.kanade.tachiyomi.util.selectText | ||||
| import java.io.BufferedReader | ||||
| import java.io.InputStreamReader | ||||
| import java.text.SimpleDateFormat | ||||
| import java.util.Calendar | ||||
| import java.util.GregorianCalendar | ||||
| import java.util.Locale | ||||
| import java.util.zip.GZIPInputStream | ||||
| import okhttp3.FormBody | ||||
| import okhttp3.MediaType.Companion.toMediaTypeOrNull | ||||
| import okhttp3.OkHttpClient | ||||
| import okhttp3.RequestBody | ||||
| import okhttp3.RequestBody.Companion.toRequestBody | ||||
| import okhttp3.Response | ||||
| import org.json.JSONObject | ||||
| import org.jsoup.Jsoup | ||||
| import org.jsoup.nodes.Document | ||||
| import org.jsoup.nodes.Element | ||||
| import org.jsoup.parser.Parser | ||||
| import rx.Observable | ||||
|  | ||||
| class MyAnimeListApi(private val client: OkHttpClient, interceptor: MyAnimeListInterceptor) { | ||||
|  | ||||
|     private val authClient = client.newBuilder().addInterceptor(interceptor).build() | ||||
|  | ||||
|     fun search(query: String): Observable<List<TrackSearch>> { | ||||
|         return if (query.startsWith(PREFIX_MY)) { | ||||
|             val realQuery = query.removePrefix(PREFIX_MY) | ||||
|             getList() | ||||
|                 .flatMap { Observable.from(it) } | ||||
|                 .filter { it.title.contains(realQuery, true) } | ||||
|                 .toList() | ||||
|         } else { | ||||
|             client.newCall(GET(searchUrl(query))) | ||||
|                 .asObservable() | ||||
|                 .flatMap { response -> | ||||
|                     Observable.from( | ||||
|                         Jsoup.parse(response.consumeBody()) | ||||
|                             .select("div.js-categories-seasonal.js-block-list.list") | ||||
|                             .select("table").select("tbody") | ||||
|                             .select("tr").drop(1) | ||||
|                     ) | ||||
|                 } | ||||
|                 .filter { row -> | ||||
|                     row.select(TD)[2].text() != "Novel" | ||||
|                 } | ||||
|                 .map { row -> | ||||
|                     TrackSearch.create(TrackManager.MYANIMELIST).apply { | ||||
|                         title = row.searchTitle() | ||||
|                         media_id = row.searchMediaId() | ||||
|                         total_chapters = row.searchTotalChapters() | ||||
|                         summary = row.searchSummary() | ||||
|                         cover_url = row.searchCoverUrl() | ||||
|                         tracking_url = mangaUrl(media_id) | ||||
|                         publishing_status = row.searchPublishingStatus() | ||||
|                         publishing_type = row.searchPublishingType() | ||||
|                         start_date = row.searchStartDate() | ||||
|                     } | ||||
|                 } | ||||
|                 .toList() | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     fun addLibManga(track: Track): Observable<Track> { | ||||
|         return Observable.defer { | ||||
|             authClient.newCall(POST(url = addUrl(), body = mangaPostPayload(track))) | ||||
|                 .asObservableSuccess() | ||||
|                 .map { track } | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     fun updateLibManga(track: Track): Observable<Track> { | ||||
|         return Observable.defer { | ||||
|             // Get track data | ||||
|             val response = authClient.newCall(GET(url = editPageUrl(track.media_id))).execute() | ||||
|             val editData = response.use { | ||||
|                 val page = Jsoup.parse(it.consumeBody()) | ||||
|  | ||||
|                 // Extract track data from MAL page | ||||
|                 extractDataFromEditPage(page).apply { | ||||
|                     // Apply changes to the just fetched data | ||||
|                     copyPersonalFrom(track) | ||||
|                 } | ||||
|             } | ||||
|  | ||||
|             // Update remote | ||||
|             authClient.newCall(POST(url = editPageUrl(track.media_id), body = mangaEditPostBody(editData))) | ||||
|                 .asObservableSuccess() | ||||
|                 .map { | ||||
|                     track | ||||
|                 } | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     fun findLibManga(track: Track): Observable<Track?> { | ||||
|         return authClient.newCall(GET(url = editPageUrl(track.media_id))) | ||||
|             .asObservable() | ||||
|             .map { response -> | ||||
|                 var libTrack: Track? = null | ||||
|                 response.use { | ||||
|                     if (it.priorResponse?.isRedirect != true) { | ||||
|                         val trackForm = Jsoup.parse(it.consumeBody()) | ||||
|  | ||||
|                         libTrack = Track.create(TrackManager.MYANIMELIST).apply { | ||||
|                             last_chapter_read = trackForm.select("#add_manga_num_read_chapters").`val`().toInt() | ||||
|                             total_chapters = trackForm.select("#totalChap").text().toInt() | ||||
|                             status = trackForm.select("#add_manga_status > option[selected]").`val`().toInt() | ||||
|                             score = trackForm.select("#add_manga_score > option[selected]").`val`().toFloatOrNull() | ||||
|                                 ?: 0f | ||||
|                             started_reading_date = trackForm.searchDatePicker("#add_manga_start_date") | ||||
|                             finished_reading_date = trackForm.searchDatePicker("#add_manga_finish_date") | ||||
|                         } | ||||
|                     } | ||||
|                 } | ||||
|                 libTrack | ||||
|             } | ||||
|     } | ||||
|  | ||||
|     fun getLibManga(track: Track): Observable<Track> { | ||||
|         return findLibManga(track) | ||||
|             .map { it ?: throw Exception("Could not find manga") } | ||||
|     } | ||||
|  | ||||
|     fun login(username: String, password: String): String { | ||||
|         val csrf = getSessionInfo() | ||||
|  | ||||
|         login(username, password, csrf) | ||||
|  | ||||
|         return csrf | ||||
|     } | ||||
|  | ||||
|     private fun getSessionInfo(): String { | ||||
|         val response = client.newCall(GET(loginUrl())).execute() | ||||
|  | ||||
|         return Jsoup.parse(response.consumeBody()) | ||||
|             .select("meta[name=csrf_token]") | ||||
|             .attr("content") | ||||
|     } | ||||
|  | ||||
|     private fun login(username: String, password: String, csrf: String) { | ||||
|         val response = client.newCall(POST(url = loginUrl(), body = loginPostBody(username, password, csrf))).execute() | ||||
|  | ||||
|         response.use { | ||||
|             if (response.priorResponse?.code != 302) throw Exception("Authentication error") | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     private fun getList(): Observable<List<TrackSearch>> { | ||||
|         return getListUrl() | ||||
|             .flatMap { url -> | ||||
|                 getListXml(url) | ||||
|             } | ||||
|             .flatMap { doc -> | ||||
|                 Observable.from(doc.select("manga")) | ||||
|             } | ||||
|             .map { | ||||
|                 TrackSearch.create(TrackManager.MYANIMELIST).apply { | ||||
|                     title = it.selectText("manga_title")!! | ||||
|                     media_id = it.selectInt("manga_mangadb_id") | ||||
|                     last_chapter_read = it.selectInt("my_read_chapters") | ||||
|                     status = getStatus(it.selectText("my_status")!!) | ||||
|                     score = it.selectInt("my_score").toFloat() | ||||
|                     total_chapters = it.selectInt("manga_chapters") | ||||
|                     tracking_url = mangaUrl(media_id) | ||||
|                     started_reading_date = it.searchDateXml("my_start_date") | ||||
|                     finished_reading_date = it.searchDateXml("my_finish_date") | ||||
|                 } | ||||
|             } | ||||
|             .toList() | ||||
|     } | ||||
|  | ||||
|     private fun getListUrl(): Observable<String> { | ||||
|         return authClient.newCall(POST(url = exportListUrl(), body = exportPostBody())) | ||||
|             .asObservable() | ||||
|             .map { response -> | ||||
|                 baseUrl + Jsoup.parse(response.consumeBody()) | ||||
|                     .select("div.goodresult") | ||||
|                     .select("a") | ||||
|                     .attr("href") | ||||
|             } | ||||
|     } | ||||
|  | ||||
|     private fun getListXml(url: String): Observable<Document> { | ||||
|         return authClient.newCall(GET(url)) | ||||
|             .asObservable() | ||||
|             .map { response -> | ||||
|                 Jsoup.parse(response.consumeXmlBody(), "", Parser.xmlParser()) | ||||
|             } | ||||
|     } | ||||
|  | ||||
|     private fun Response.consumeBody(): String? { | ||||
|         use { | ||||
|             if (it.code != 200) throw Exception("HTTP error ${it.code}") | ||||
|             return it.body?.string() | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     private fun Response.consumeXmlBody(): String? { | ||||
|         use { res -> | ||||
|             if (res.code != 200) throw Exception("Export list error") | ||||
|             BufferedReader(InputStreamReader(GZIPInputStream(res.body?.source()?.inputStream()))).use { reader -> | ||||
|                 val sb = StringBuilder() | ||||
|                 reader.forEachLine { line -> | ||||
|                     sb.append(line) | ||||
|                 } | ||||
|                 return sb.toString() | ||||
|             } | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     private fun extractDataFromEditPage(page: Document): MyAnimeListEditData { | ||||
|         val tables = page.select("form#main-form table") | ||||
|  | ||||
|         return MyAnimeListEditData( | ||||
|             entry_id = tables[0].select("input[name=entry_id]").`val`(), // Always 0 | ||||
|             manga_id = tables[0].select("#manga_id").`val`(), | ||||
|             status = tables[0].select("#add_manga_status > option[selected]").`val`(), | ||||
|             num_read_volumes = tables[0].select("#add_manga_num_read_volumes").`val`(), | ||||
|             last_completed_vol = tables[0].select("input[name=last_completed_vol]").`val`(), // Always empty | ||||
|             num_read_chapters = tables[0].select("#add_manga_num_read_chapters").`val`(), | ||||
|             score = tables[0].select("#add_manga_score > option[selected]").`val`(), | ||||
|             start_date_month = tables[0].select("#add_manga_start_date_month > option[selected]").`val`(), | ||||
|             start_date_day = tables[0].select("#add_manga_start_date_day > option[selected]").`val`(), | ||||
|             start_date_year = tables[0].select("#add_manga_start_date_year > option[selected]").`val`(), | ||||
|             finish_date_month = tables[0].select("#add_manga_finish_date_month > option[selected]").`val`(), | ||||
|             finish_date_day = tables[0].select("#add_manga_finish_date_day > option[selected]").`val`(), | ||||
|             finish_date_year = tables[0].select("#add_manga_finish_date_year > option[selected]").`val`(), | ||||
|             tags = tables[1].select("#add_manga_tags").`val`(), | ||||
|             priority = tables[1].select("#add_manga_priority > option[selected]").`val`(), | ||||
|             storage_type = tables[1].select("#add_manga_storage_type > option[selected]").`val`(), | ||||
|             num_retail_volumes = tables[1].select("#add_manga_num_retail_volumes").`val`(), | ||||
|             num_read_times = tables[1].select("#add_manga_num_read_times").`val`(), | ||||
|             reread_value = tables[1].select("#add_manga_reread_value > option[selected]").`val`(), | ||||
|             comments = tables[1].select("#add_manga_comments").`val`(), | ||||
|             is_asked_to_discuss = tables[1].select("#add_manga_is_asked_to_discuss > option[selected]").`val`(), | ||||
|             sns_post_type = tables[1].select("#add_manga_sns_post_type > option[selected]").`val`() | ||||
|         ) | ||||
|     } | ||||
|  | ||||
|     companion object { | ||||
|         const val CSRF = "csrf_token" | ||||
|  | ||||
|         private const val baseUrl = "https://myanimelist.net" | ||||
|         private const val baseMangaUrl = "$baseUrl/manga/" | ||||
|         private const val baseModifyListUrl = "$baseUrl/ownlist/manga" | ||||
|         private const val PREFIX_MY = "my:" | ||||
|         private const val TD = "td" | ||||
|  | ||||
|         private fun mangaUrl(remoteId: Int) = baseMangaUrl + remoteId | ||||
|  | ||||
|         private fun loginUrl() = Uri.parse(baseUrl).buildUpon() | ||||
|             .appendPath("login.php") | ||||
|             .toString() | ||||
|  | ||||
|         private fun searchUrl(query: String): String { | ||||
|             val col = "c[]" | ||||
|             return Uri.parse(baseUrl).buildUpon() | ||||
|                 .appendPath("manga.php") | ||||
|                 .appendQueryParameter("q", query) | ||||
|                 .appendQueryParameter(col, "a") | ||||
|                 .appendQueryParameter(col, "b") | ||||
|                 .appendQueryParameter(col, "c") | ||||
|                 .appendQueryParameter(col, "d") | ||||
|                 .appendQueryParameter(col, "e") | ||||
|                 .appendQueryParameter(col, "g") | ||||
|                 .toString() | ||||
|         } | ||||
|  | ||||
|         private fun exportListUrl() = Uri.parse(baseUrl).buildUpon() | ||||
|             .appendPath("panel.php") | ||||
|             .appendQueryParameter("go", "export") | ||||
|             .toString() | ||||
|  | ||||
|         private fun editPageUrl(mediaId: Int) = Uri.parse(baseModifyListUrl).buildUpon() | ||||
|             .appendPath(mediaId.toString()) | ||||
|             .appendPath("edit") | ||||
|             .toString() | ||||
|  | ||||
|         private fun addUrl() = Uri.parse(baseModifyListUrl).buildUpon() | ||||
|             .appendPath("add.json") | ||||
|             .toString() | ||||
|  | ||||
|         private fun loginPostBody(username: String, password: String, csrf: String): RequestBody { | ||||
|             return FormBody.Builder() | ||||
|                 .add("user_name", username) | ||||
|                 .add("password", password) | ||||
|                 .add("cookie", "1") | ||||
|                 .add("sublogin", "Login") | ||||
|                 .add("submit", "1") | ||||
|                 .add(CSRF, csrf) | ||||
|                 .build() | ||||
|         } | ||||
|  | ||||
|         private fun exportPostBody(): RequestBody { | ||||
|             return FormBody.Builder() | ||||
|                 .add("type", "2") | ||||
|                 .add("subexport", "Export My List") | ||||
|                 .build() | ||||
|         } | ||||
|  | ||||
|         private fun mangaPostPayload(track: Track): RequestBody { | ||||
|             val body = JSONObject() | ||||
|                 .put("manga_id", track.media_id) | ||||
|                 .put("status", track.status) | ||||
|                 .put("score", track.score) | ||||
|                 .put("num_read_chapters", track.last_chapter_read) | ||||
|  | ||||
|             return body.toString().toRequestBody("application/json; charset=utf-8".toMediaTypeOrNull()) | ||||
|         } | ||||
|  | ||||
|         private fun mangaEditPostBody(track: MyAnimeListEditData): RequestBody { | ||||
|             return FormBody.Builder() | ||||
|                 .add("entry_id", track.entry_id) | ||||
|                 .add("manga_id", track.manga_id) | ||||
|                 .add("add_manga[status]", track.status) | ||||
|                 .add("add_manga[num_read_volumes]", track.num_read_volumes) | ||||
|                 .add("last_completed_vol", track.last_completed_vol) | ||||
|                 .add("add_manga[num_read_chapters]", track.num_read_chapters) | ||||
|                 .add("add_manga[score]", track.score) | ||||
|                 .add("add_manga[start_date][month]", track.start_date_month) | ||||
|                 .add("add_manga[start_date][day]", track.start_date_day) | ||||
|                 .add("add_manga[start_date][year]", track.start_date_year) | ||||
|                 .add("add_manga[finish_date][month]", track.finish_date_month) | ||||
|                 .add("add_manga[finish_date][day]", track.finish_date_day) | ||||
|                 .add("add_manga[finish_date][year]", track.finish_date_year) | ||||
|                 .add("add_manga[tags]", track.tags) | ||||
|                 .add("add_manga[priority]", track.priority) | ||||
|                 .add("add_manga[storage_type]", track.storage_type) | ||||
|                 .add("add_manga[num_retail_volumes]", track.num_retail_volumes) | ||||
|                 .add("add_manga[num_read_times]", track.num_read_times) | ||||
|                 .add("add_manga[reread_value]", track.reread_value) | ||||
|                 .add("add_manga[comments]", track.comments) | ||||
|                 .add("add_manga[is_asked_to_discuss]", track.is_asked_to_discuss) | ||||
|                 .add("add_manga[sns_post_type]", track.sns_post_type) | ||||
|                 .add("submitIt", track.submitIt) | ||||
|                 .build() | ||||
|         } | ||||
|  | ||||
|         private fun Element.searchDateXml(field: String): Long { | ||||
|             val text = selectText(field, "0000-00-00")!! | ||||
|             // MAL sets the data to 0000-00-00 when date is invalid or missing | ||||
|             if (text == "0000-00-00") { | ||||
|                 return 0L | ||||
|             } | ||||
|  | ||||
|             return SimpleDateFormat("yyyy-MM-dd", Locale.US).parse(text)?.time ?: 0L | ||||
|         } | ||||
|  | ||||
|         private fun Element.searchDatePicker(id: String): Long { | ||||
|             val month = select(id + "_month > option[selected]").`val`().toIntOrNull() | ||||
|             val day = select(id + "_day > option[selected]").`val`().toIntOrNull() | ||||
|             val year = select(id + "_year > option[selected]").`val`().toIntOrNull() | ||||
|             if (year == null || month == null || day == null) { | ||||
|                 return 0L | ||||
|             } | ||||
|  | ||||
|             return GregorianCalendar(year, month - 1, day).timeInMillis | ||||
|         } | ||||
|  | ||||
|         private fun Element.searchTitle() = select("strong").text()!! | ||||
|  | ||||
|         private fun Element.searchTotalChapters() = if (select(TD)[4].text() == "-") 0 else select(TD)[4].text().toInt() | ||||
|  | ||||
|         private fun Element.searchCoverUrl() = select("img") | ||||
|             .attr("data-src") | ||||
|             .split("\\?")[0] | ||||
|             .replace("/r/50x70/", "/") | ||||
|  | ||||
|         private fun Element.searchMediaId() = select("div.picSurround") | ||||
|             .select("a").attr("id") | ||||
|             .replace("sarea", "") | ||||
|             .toInt() | ||||
|  | ||||
|         private fun Element.searchSummary() = select("div.pt4") | ||||
|             .first() | ||||
|             .ownText()!! | ||||
|  | ||||
|         private fun Element.searchPublishingStatus() = if (select(TD).last().text() == "-") "Publishing" else "Finished" | ||||
|  | ||||
|         private fun Element.searchPublishingType() = select(TD)[2].text()!! | ||||
|  | ||||
|         private fun Element.searchStartDate() = select(TD)[6].text()!! | ||||
|  | ||||
|         private fun getStatus(status: String) = when (status) { | ||||
|             "Reading" -> 1 | ||||
|             "Completed" -> 2 | ||||
|             "On-Hold" -> 3 | ||||
|             "Dropped" -> 4 | ||||
|             "Plan to Read" -> 6 | ||||
|             else -> 1 | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     private class MyAnimeListEditData( | ||||
|         // entry_id | ||||
|         var entry_id: String, | ||||
|  | ||||
|         // manga_id | ||||
|         var manga_id: String, | ||||
|  | ||||
|         // add_manga[status] | ||||
|         var status: String, | ||||
|  | ||||
|         // add_manga[num_read_volumes] | ||||
|         var num_read_volumes: String, | ||||
|  | ||||
|         // last_completed_vol | ||||
|         var last_completed_vol: String, | ||||
|  | ||||
|         // add_manga[num_read_chapters] | ||||
|         var num_read_chapters: String, | ||||
|  | ||||
|         // add_manga[score] | ||||
|         var score: String, | ||||
|  | ||||
|         // add_manga[start_date][month] | ||||
|         var start_date_month: String, // [1-12] | ||||
|  | ||||
|         // add_manga[start_date][day] | ||||
|         var start_date_day: String, | ||||
|  | ||||
|         // add_manga[start_date][year] | ||||
|         var start_date_year: String, | ||||
|  | ||||
|         // add_manga[finish_date][month] | ||||
|         var finish_date_month: String, // [1-12] | ||||
|  | ||||
|         // add_manga[finish_date][day] | ||||
|         var finish_date_day: String, | ||||
|  | ||||
|         // add_manga[finish_date][year] | ||||
|         var finish_date_year: String, | ||||
|  | ||||
|         // add_manga[tags] | ||||
|         var tags: String, | ||||
|  | ||||
|         // add_manga[priority] | ||||
|         var priority: String, | ||||
|  | ||||
|         // add_manga[storage_type] | ||||
|         var storage_type: String, | ||||
|  | ||||
|         // add_manga[num_retail_volumes] | ||||
|         var num_retail_volumes: String, | ||||
|  | ||||
|         // add_manga[num_read_times] | ||||
|         var num_read_times: String, | ||||
|  | ||||
|         // add_manga[reread_value] | ||||
|         var reread_value: String, | ||||
|  | ||||
|         // add_manga[comments] | ||||
|         var comments: String, | ||||
|  | ||||
|         // add_manga[is_asked_to_discuss] | ||||
|         var is_asked_to_discuss: String, | ||||
|  | ||||
|         // add_manga[sns_post_type] | ||||
|         var sns_post_type: String, | ||||
|  | ||||
|         // submitIt | ||||
|         val submitIt: String = "0" | ||||
|     ) { | ||||
|         fun copyPersonalFrom(track: Track) { | ||||
|             num_read_chapters = track.last_chapter_read.toString() | ||||
|             val numScore = track.score.toInt() | ||||
|             if (numScore in 1..9) { | ||||
|                 score = numScore.toString() | ||||
|             } | ||||
|             status = track.status.toString() | ||||
|             if (track.started_reading_date == 0L) { | ||||
|                 start_date_month = "" | ||||
|                 start_date_day = "" | ||||
|                 start_date_year = "" | ||||
|             } | ||||
|             if (track.finished_reading_date == 0L) { | ||||
|                 finish_date_month = "" | ||||
|                 finish_date_day = "" | ||||
|                 finish_date_year = "" | ||||
|             } | ||||
|             track.started_reading_date.toCalendar()?.let { cal -> | ||||
|                 start_date_month = (cal[Calendar.MONTH] + 1).toString() | ||||
|                 start_date_day = cal[Calendar.DAY_OF_MONTH].toString() | ||||
|                 start_date_year = cal[Calendar.YEAR].toString() | ||||
|             } | ||||
|             track.finished_reading_date.toCalendar()?.let { cal -> | ||||
|                 finish_date_month = (cal[Calendar.MONTH] + 1).toString() | ||||
|                 finish_date_day = cal[Calendar.DAY_OF_MONTH].toString() | ||||
|                 finish_date_year = cal[Calendar.YEAR].toString() | ||||
|             } | ||||
|         } | ||||
|     } | ||||
| } | ||||
| @@ -138,7 +138,7 @@ class ExtensionManager( | ||||
|     fun <T : Extension> Iterable<T>.filterNotBlacklisted(): List<T> { | ||||
|         val blacklistEnabled = preferences.eh_enableSourceBlacklist().getOrDefault() | ||||
|         return filter { | ||||
|             if(it.isBlacklisted(blacklistEnabled)) { | ||||
|             if (it.isBlacklisted(blacklistEnabled)) { | ||||
|                 XLog.d("[EXH] Removing blacklisted extension: (name: %s, pkgName: %s)!", it.name, it.pkgName) | ||||
|                 false | ||||
|             } else true | ||||
| @@ -304,7 +304,7 @@ class ExtensionManager( | ||||
|      * @param extension The extension to be registered. | ||||
|      */ | ||||
|     private fun registerNewExtension(extension: Extension.Installed) { | ||||
|         if(extension.isBlacklisted()) { | ||||
|         if (extension.isBlacklisted()) { | ||||
|             XLog.d("[EXH] Removing blacklisted extension: (name: String, pkgName: %s)!", extension.name, extension.pkgName) | ||||
|             return | ||||
|         } | ||||
| @@ -320,7 +320,7 @@ class ExtensionManager( | ||||
|      * @param extension The extension to be registered. | ||||
|      */ | ||||
|     private fun registerUpdatedExtension(extension: Extension.Installed) { | ||||
|         if(extension.isBlacklisted()) { | ||||
|         if (extension.isBlacklisted()) { | ||||
|             XLog.d("[EXH] Removing blacklisted extension: (name: String, pkgName: %s)!", extension.name, extension.pkgName) | ||||
|             return | ||||
|         } | ||||
|   | ||||
| @@ -24,7 +24,7 @@ interface SManga : Serializable { | ||||
|  | ||||
|     fun copyFrom(other: SManga) { | ||||
|         // EXH --> | ||||
|         if(other.title.isNotBlank()) | ||||
|         if (other.title.isNotBlank()) | ||||
|             title = other.title | ||||
|         // EXH <-- | ||||
|          | ||||
|   | ||||
| @@ -36,7 +36,8 @@ interface LewdSource<M : RaisedSearchMetadata, I> : CatalogueSource { | ||||
|      */ | ||||
|     private fun newMetaInstance() = metaClass.constructors.find { | ||||
|         it.parameters.isEmpty() | ||||
|     }?.call() ?: error("Could not find no-args constructor for meta class: ${metaClass.qualifiedName}!") | ||||
|     }?.call() | ||||
|             ?: error("Could not find no-args constructor for meta class: ${metaClass.qualifiedName}!") | ||||
|  | ||||
|     /** | ||||
|      * Parses metadata from the input and then copies it into the manga | ||||
| @@ -45,12 +46,12 @@ interface LewdSource<M : RaisedSearchMetadata, I> : CatalogueSource { | ||||
|      */ | ||||
|     fun parseToManga(manga: SManga, input: I): Completable { | ||||
|         val mangaId = (manga as? Manga)?.id | ||||
|         val metaObservable = if(mangaId != null) { | ||||
|         val metaObservable = if (mangaId != null) { | ||||
|             // We have to use fromCallable because StorIO messes up the thread scheduling if we use their rx functions | ||||
|             Single.fromCallable { | ||||
|                 db.getFlatMetadataForManga(mangaId).executeAsBlocking() | ||||
|             } .map { | ||||
|                 if(it != null) it.raise(metaClass) | ||||
|             }.map { | ||||
|                 if (it != null) it.raise(metaClass) | ||||
|                 else newMetaInstance() | ||||
|             } | ||||
|         } else { | ||||
| @@ -62,7 +63,7 @@ interface LewdSource<M : RaisedSearchMetadata, I> : CatalogueSource { | ||||
|             it.copyTo(manga) | ||||
|             it | ||||
|         }.flatMapCompletable { | ||||
|             if(mangaId != null) { | ||||
|             if (mangaId != null) { | ||||
|                 it.mangaId = mangaId | ||||
|                 db.insertFlatMetadata(it.flatten()) | ||||
|             } else Completable.complete() | ||||
| @@ -77,7 +78,7 @@ interface LewdSource<M : RaisedSearchMetadata, I> : CatalogueSource { | ||||
|      * also be saved to the DB. | ||||
|      */ | ||||
|     fun getOrLoadMetadata(mangaId: Long?, inputProducer: () -> Single<I>): Single<M> { | ||||
|         val metaObservable = if(mangaId != null) { | ||||
|         val metaObservable = if (mangaId != null) { | ||||
|             // We have to use fromCallable because StorIO messes up the thread scheduling if we use their rx functions | ||||
|             Single.fromCallable { | ||||
|                 db.getFlatMetadataForManga(mangaId).executeAsBlocking() | ||||
| @@ -87,12 +88,12 @@ interface LewdSource<M : RaisedSearchMetadata, I> : CatalogueSource { | ||||
|         } else Single.just(null) | ||||
|  | ||||
|         return metaObservable.flatMap { existingMeta -> | ||||
|             if(existingMeta == null) { | ||||
|             if (existingMeta == null) { | ||||
|                 inputProducer().flatMap { input -> | ||||
|                     val newMeta = newMetaInstance() | ||||
|                     parseIntoMetadata(newMeta, input) | ||||
|                     val newMetaSingle = Single.just(newMeta) | ||||
|                     if(mangaId != null) { | ||||
|                     if (mangaId != null) { | ||||
|                         newMeta.mangaId = mangaId | ||||
|                         db.insertFlatMetadata(newMeta.flatten()).andThen(newMetaSingle) | ||||
|                     } else newMetaSingle | ||||
|   | ||||
| @@ -56,13 +56,13 @@ class EHentai(override val id: Long, | ||||
|     override val metaClass = EHentaiSearchMetadata::class | ||||
|  | ||||
|     val schema: String | ||||
|         get() = if(prefs.secureEXH().getOrDefault()) | ||||
|         get() = if (prefs.secureEXH().getOrDefault()) | ||||
|             "https" | ||||
|         else | ||||
|             "http" | ||||
|  | ||||
|     val domain: String | ||||
|         get() = if(exh) | ||||
|         get() = if (exh) | ||||
|             "exhentai.org" | ||||
|         else | ||||
|             "e-hentai.org" | ||||
| @@ -81,8 +81,7 @@ class EHentai(override val id: Long, | ||||
|      */ | ||||
|     data class ParsedManga(val fav: Int, val manga: Manga) | ||||
|  | ||||
|     fun extendedGenericMangaParse(doc: Document) | ||||
|             = with(doc) { | ||||
|     fun extendedGenericMangaParse(doc: Document) = with(doc) { | ||||
|         // Parse mangas (supports compact + extended layout) | ||||
|         val parsedMangas = select(".itg > tbody > tr").filter { | ||||
|             // Do not parse header and ads | ||||
| @@ -112,7 +111,7 @@ class EHentai(override val id: Long, | ||||
|         val parsedLocation = doc.location().toHttpUrlOrNull() | ||||
|  | ||||
|         //Add to page if required | ||||
|         val hasNextPage = if(parsedLocation == null | ||||
|         val hasNextPage = if (parsedLocation == null | ||||
|                 || !parsedLocation.queryParameterNames.contains(REVERSE_PARAM)) { | ||||
|             select("a[onclick=return false]").last()?.let { | ||||
|                 it.text() == ">" | ||||
| @@ -126,13 +125,11 @@ class EHentai(override val id: Long, | ||||
|     /** | ||||
|      * Parse a list of galleries | ||||
|      */ | ||||
|     fun genericMangaParse(response: Response) | ||||
|             = extendedGenericMangaParse(response.asJsoup()).let { | ||||
|     fun genericMangaParse(response: Response) = extendedGenericMangaParse(response.asJsoup()).let { | ||||
|         MangasPage(it.first.map { it.manga }, it.second) | ||||
|     } | ||||
|  | ||||
|     override fun fetchChapterList(manga: SManga) | ||||
|             = fetchChapterList(manga) {} | ||||
|     override fun fetchChapterList(manga: SManga) = fetchChapterList(manga) {} | ||||
|  | ||||
|     fun fetchChapterList(manga: SManga, throttleFunc: () -> Unit): Observable<List<SChapter>> { | ||||
|         return Single.fromCallable { | ||||
| @@ -148,7 +145,7 @@ class EHentai(override val id: Long, | ||||
|                     val cachedParent = updateHelper.parentLookupTable.get( | ||||
|                             gid | ||||
|                     ) | ||||
|                     if(cachedParent == null) { | ||||
|                     if (cachedParent == null) { | ||||
|                         throttleFunc() | ||||
|  | ||||
|                         val resp = client.newCall(exGet(baseUrl + url)).execute() | ||||
| @@ -192,7 +189,7 @@ class EHentai(override val id: Long, | ||||
|                 }!!.nextElementSibling().text()).time | ||||
|             } | ||||
|             // Build and append the rest of the galleries | ||||
|             if(DebugToggles.INCLUDE_ONLY_ROOT_WHEN_LOADING_EXH_VERSIONS.enabled) listOf(self) | ||||
|             if (DebugToggles.INCLUDE_ONLY_ROOT_WHEN_LOADING_EXH_VERSIONS.enabled) listOf(self) | ||||
|             else { | ||||
|                 newDisplay.mapIndexed { index, newGallery -> | ||||
|                     val link = newGallery.attr("href") | ||||
| @@ -209,8 +206,7 @@ class EHentai(override val id: Long, | ||||
|         }.toObservable() | ||||
|     } | ||||
|  | ||||
|     override fun fetchPageList(chapter: SChapter) | ||||
|             = fetchChapterPage(chapter, baseUrl + chapter.url).map { | ||||
|     override fun fetchPageList(chapter: SChapter) = fetchChapterPage(chapter, baseUrl + chapter.url).map { | ||||
|         it.mapIndexed { i, s -> | ||||
|             Page(i, s) | ||||
|         } | ||||
| @@ -223,28 +219,28 @@ class EHentai(override val id: Long, | ||||
|             val jsoup = it.asJsoup() | ||||
|             urls += parseChapterPage(jsoup) | ||||
|             val nextUrl = nextPageUrl(jsoup) | ||||
|             if(nextUrl != null) { | ||||
|             if (nextUrl != null) { | ||||
|                 fetchChapterPage(chapter, nextUrl, urls) | ||||
|             } else { | ||||
|                 Observable.just(urls) | ||||
|             } | ||||
|         } | ||||
|     } | ||||
|     private fun parseChapterPage(response: Element) | ||||
|             = with(response) { | ||||
|  | ||||
|     private fun parseChapterPage(response: Element) = with(response) { | ||||
|         select(".gdtm a").map { | ||||
|             Pair(it.child(0).attr("alt").toInt(), it.attr("href")) | ||||
|         }.sortedBy(Pair<Int, String>::first).map { it.second } | ||||
|     } | ||||
|  | ||||
|     private fun chapterPageCall(np: String) = client.newCall(chapterPageRequest(np)).asObservableSuccess() | ||||
|     private fun chapterPageRequest(np: String) = exGet(np, null, headers) | ||||
|  | ||||
|     private fun nextPageUrl(element: Element): String? | ||||
|             = element.select("a[onclick=return false]").last()?.let { | ||||
|     private fun nextPageUrl(element: Element): String? = element.select("a[onclick=return false]").last()?.let { | ||||
|         return if (it.text() == ">") it.attr("href") else null | ||||
|     } | ||||
|  | ||||
|     override fun popularMangaRequest(page: Int) = if(exh) | ||||
|     override fun popularMangaRequest(page: Int) = if (exh) | ||||
|         latestUpdatesRequest(page) | ||||
|     else | ||||
|         exGet("$baseUrl/toplist.php?tl=15&p=${page - 1}", null) // Custom page logic for toplists | ||||
| @@ -254,7 +250,7 @@ class EHentai(override val id: Long, | ||||
|             urlImportFetchSearchManga(query) { | ||||
|                 searchMangaRequestObservable(page, query, filters).flatMap { | ||||
|                     client.newCall(it).asObservableSuccess() | ||||
|                 } .map { response -> | ||||
|                 }.map { response -> | ||||
|                     searchMangaParse(response) | ||||
|                 } | ||||
|             } | ||||
| @@ -263,13 +259,13 @@ class EHentai(override val id: Long, | ||||
|         val uri = Uri.parse("$baseUrl$QUERY_PREFIX").buildUpon() | ||||
|         uri.appendQueryParameter("f_search", query) | ||||
|         filters.forEach { | ||||
|             if(it is UriFilter) it.addToUri(uri) | ||||
|             if (it is UriFilter) it.addToUri(uri) | ||||
|         } | ||||
|  | ||||
|         val request = exGet(uri.toString(), page) | ||||
|  | ||||
|         // Reverse search results on filter | ||||
|         if(filters.any { it is ReverseFilter && it.state }) { | ||||
|         if (filters.any { it is ReverseFilter && it.state }) { | ||||
|             return client.newCall(request) | ||||
|                     .asObservableSuccess() | ||||
|                     .map { | ||||
| @@ -290,8 +286,7 @@ class EHentai(override val id: Long, | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     override fun searchMangaRequest(page: Int, query: String, filters: FilterList) | ||||
|             = throw UnsupportedOperationException() | ||||
|     override fun searchMangaRequest(page: Int, query: String, filters: FilterList) = throw UnsupportedOperationException() | ||||
|  | ||||
|     override fun latestUpdatesRequest(page: Int) = exGet(baseUrl, page) | ||||
|  | ||||
| @@ -299,8 +294,7 @@ class EHentai(override val id: Long, | ||||
|     override fun searchMangaParse(response: Response) = genericMangaParse(response) | ||||
|     override fun latestUpdatesParse(response: Response) = genericMangaParse(response) | ||||
|  | ||||
|     fun exGet(url: String, page: Int? = null, additionalHeaders: Headers? = null, cache: Boolean = true) | ||||
|             = GET(page?.let { | ||||
|     fun exGet(url: String, page: Int? = null, additionalHeaders: Headers? = null, cache: Boolean = true) = GET(page?.let { | ||||
|         addParam(url, "page", Integer.toString(page - 1)) | ||||
|     } ?: url, additionalHeaders?.let { | ||||
|         val headers = headers.newBuilder() | ||||
| @@ -311,7 +305,7 @@ class EHentai(override val id: Long, | ||||
|         } | ||||
|         headers.build() | ||||
|     } ?: headers).let { | ||||
|         if(!cache) | ||||
|         if (!cache) | ||||
|             it.newBuilder().cacheControl(CacheControl.FORCE_NETWORK).build() | ||||
|         else | ||||
|             it | ||||
| @@ -327,11 +321,11 @@ class EHentai(override val id: Long, | ||||
|         return client.newCall(mangaDetailsRequest(manga)) | ||||
|                 .asObservableWithAsyncStacktrace() | ||||
|                 .flatMap { (stacktrace, response) -> | ||||
|                     if(response.isSuccessful) { | ||||
|                     if (response.isSuccessful) { | ||||
|                         // Pull to most recent | ||||
|                         val doc = response.asJsoup() | ||||
|                         val newerGallery = doc.select("#gnd a").lastOrNull() | ||||
|                         val pre = if(newerGallery != null && DebugToggles.PULL_TO_ROOT_WHEN_LOADING_EXH_MANGA_DETAILS.enabled) { | ||||
|                         val pre = if (newerGallery != null && DebugToggles.PULL_TO_ROOT_WHEN_LOADING_EXH_MANGA_DETAILS.enabled) { | ||||
|                             manga.url = EHentaiSearchMetadata.normalizeUrl(newerGallery.attr("href")) | ||||
|                             client.newCall(mangaDetailsRequest(manga)) | ||||
|                                     .asObservableSuccess().map { it.asJsoup() } | ||||
| @@ -388,7 +382,7 @@ class EHentai(override val id: Long, | ||||
|                     val left = it.select(".gdt1").text().nullIfBlank()?.trim() | ||||
|                     val rightElement = it.selectFirst(".gdt2") | ||||
|                     val right = rightElement.text().nullIfBlank()?.trim() | ||||
|                     if(left != null && right != null) { | ||||
|                     if (left != null && right != null) { | ||||
|                         ignore { | ||||
|                             when (left.removeSuffix(":") | ||||
|                                     .toLowerCase()) { | ||||
| @@ -413,10 +407,10 @@ class EHentai(override val id: Long, | ||||
|                 } | ||||
|  | ||||
|                 lastUpdateCheck = System.currentTimeMillis() | ||||
|                 if(datePosted != null | ||||
|                 if (datePosted != null | ||||
|                         && lastUpdateCheck - datePosted!! > EHentaiUpdateWorkerConstants.GALLERY_AGE_TIME) { | ||||
|                     aged = true | ||||
|                         XLog.d("aged %s - too old", title) | ||||
|                     XLog.d("aged %s - too old", title) | ||||
|                 } | ||||
|  | ||||
|                 //Parse ratings | ||||
| @@ -442,7 +436,7 @@ class EHentai(override val id: Long, | ||||
|                         RaisedTag( | ||||
|                                 namespace, | ||||
|                                 element.text().trim(), | ||||
|                                 if(element.hasClass("gtl")) | ||||
|                                 if (element.hasClass("gtl")) | ||||
|                                     TAG_TYPE_LIGHT | ||||
|                                 else | ||||
|                                     TAG_TYPE_NORMAL | ||||
| @@ -458,11 +452,9 @@ class EHentai(override val id: Long, | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     override fun chapterListParse(response: Response) | ||||
|             = throw UnsupportedOperationException("Unused method was called somehow!") | ||||
|     override fun chapterListParse(response: Response) = throw UnsupportedOperationException("Unused method was called somehow!") | ||||
|  | ||||
|     override fun pageListParse(response: Response) | ||||
|             = throw UnsupportedOperationException("Unused method was called somehow!") | ||||
|     override fun pageListParse(response: Response) = throw UnsupportedOperationException("Unused method was called somehow!") | ||||
|  | ||||
|     override fun fetchImageUrl(page: Page): Observable<String> { | ||||
|         return client.newCall(imageUrlRequest(page)) | ||||
| @@ -515,29 +507,29 @@ class EHentai(override val id: Long, | ||||
|         return Pair(result as List<ParsedManga>, favNames!!) | ||||
|     } | ||||
|  | ||||
|     fun spPref() = if(exh) | ||||
|     fun spPref() = if (exh) | ||||
|         prefs.eh_exhSettingsProfile() | ||||
|     else | ||||
|         prefs.eh_ehSettingsProfile() | ||||
|  | ||||
|     fun rawCookies(sp: Int): Map<String, String> { | ||||
|         val cookies: MutableMap<String, String> = mutableMapOf() | ||||
|         if(prefs.enableExhentai().getOrDefault()) { | ||||
|         if (prefs.enableExhentai().getOrDefault()) { | ||||
|             cookies[LoginController.MEMBER_ID_COOKIE] = prefs.memberIdVal().get()!! | ||||
|             cookies[LoginController.PASS_HASH_COOKIE] = prefs.passHashVal().get()!! | ||||
|             cookies[LoginController.IGNEOUS_COOKIE] = prefs.igneousVal().get()!! | ||||
|             cookies["sp"] = sp.toString() | ||||
|  | ||||
|             val sessionKey = prefs.eh_settingsKey().getOrDefault() | ||||
|             if(sessionKey != null) | ||||
|             if (sessionKey != null) | ||||
|                 cookies["sk"] = sessionKey | ||||
|  | ||||
|             val sessionCookie = prefs.eh_sessionCookie().getOrDefault() | ||||
|             if(sessionCookie != null) | ||||
|             if (sessionCookie != null) | ||||
|                 cookies["s"] = sessionCookie | ||||
|  | ||||
|             val hathPerksCookie = prefs.eh_hathPerksCookies().getOrDefault() | ||||
|             if(hathPerksCookie != null) | ||||
|             if (hathPerksCookie != null) | ||||
|                 cookies["hath_perks"] = hathPerksCookie | ||||
|         } | ||||
|  | ||||
| @@ -550,15 +542,12 @@ class EHentai(override val id: Long, | ||||
|         return cookies | ||||
|     } | ||||
|  | ||||
|     fun cookiesHeader(sp: Int = spPref().getOrDefault()) | ||||
|             = buildCookies(rawCookies(sp)) | ||||
|     fun cookiesHeader(sp: Int = spPref().getOrDefault()) = buildCookies(rawCookies(sp)) | ||||
|  | ||||
|     //Headers | ||||
|     override fun headersBuilder() | ||||
|             = super.headersBuilder().add("Cookie", cookiesHeader())!! | ||||
|     override fun headersBuilder() = super.headersBuilder().add("Cookie", cookiesHeader())!! | ||||
|  | ||||
|     fun addParam(url: String, param: String, value: String) | ||||
|             = Uri.parse(url) | ||||
|     fun addParam(url: String, param: String, value: String) = Uri.parse(url) | ||||
|             .buildUpon() | ||||
|             .appendQueryParameter(param, value) | ||||
|             .toString() | ||||
| @@ -586,12 +575,12 @@ class EHentai(override val id: Long, | ||||
|  | ||||
|     class Watched : Filter.CheckBox("Watched List"), UriFilter { | ||||
|         override fun addToUri(builder: Uri.Builder) { | ||||
|             if(state) | ||||
|             if (state) | ||||
|                 builder.appendPath("watched") | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     class GenreOption(name: String, val genreId: Int): Filter.CheckBox(name, false) | ||||
|     class GenreOption(name: String, val genreId: Int) : Filter.CheckBox(name, false) | ||||
|     class GenreGroup : Filter.Group<GenreOption>("Genres", listOf( | ||||
|             GenreOption("Dōjinshi", 2), | ||||
|             GenreOption("Manga", 4), | ||||
| @@ -606,22 +595,22 @@ class EHentai(override val id: Long, | ||||
|     )), UriFilter { | ||||
|         override fun addToUri(builder: Uri.Builder) { | ||||
|             val bits = state.fold(0) { acc, genre -> | ||||
|                 if(!genre.state) acc + genre.genreId else acc | ||||
|                 if (!genre.state) acc + genre.genreId else acc | ||||
|             } | ||||
|             builder.appendQueryParameter("f_cats", bits.toString()) | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     class AdvancedOption(name: String, val param: String, defValue: Boolean = false): Filter.CheckBox(name, defValue), UriFilter { | ||||
|     class AdvancedOption(name: String, val param: String, defValue: Boolean = false) : Filter.CheckBox(name, defValue), UriFilter { | ||||
|         override fun addToUri(builder: Uri.Builder) { | ||||
|             if(state) | ||||
|             if (state) | ||||
|                 builder.appendQueryParameter(param, "on") | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     open class PageOption(name: String, private val queryKey: String) : Filter.Text(name), UriFilter { | ||||
|         override fun addToUri(builder: Uri.Builder) { | ||||
|             if(state.isNotBlank()) { | ||||
|             if (state.isNotBlank()) { | ||||
|                 if (builder.build().getQueryParameters("f_sp").isEmpty()) { | ||||
|                     builder.appendQueryParameter("f_sp", "on") | ||||
|                 } | ||||
| @@ -630,6 +619,7 @@ class EHentai(override val id: Long, | ||||
|             } | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     class MinPagesOption : PageOption("Minimum Pages", "f_spf") | ||||
|     class MaxPagesOption : PageOption("Maximum Pages", "f_spt") | ||||
|  | ||||
| @@ -641,7 +631,7 @@ class EHentai(override val id: Long, | ||||
|             "5 stars" | ||||
|     )), UriFilter { | ||||
|         override fun addToUri(builder: Uri.Builder) { | ||||
|             if(state > 0) { | ||||
|             if (state > 0) { | ||||
|                 builder.appendQueryParameter("f_srdd", Integer.toString(state + 1)) | ||||
|                 builder.appendQueryParameter("f_sr", "on") | ||||
|             } | ||||
| @@ -664,16 +654,16 @@ class EHentai(override val id: Long, | ||||
|  | ||||
|     class ReverseFilter : Filter.CheckBox("Reverse search results") | ||||
|  | ||||
|     override val name = if(exh) | ||||
|     override val name = if (exh) | ||||
|         "ExHentai" | ||||
|     else | ||||
|         "E-Hentai" | ||||
|  | ||||
|     class GalleryNotFoundException(cause: Throwable): RuntimeException("Gallery not found!", cause) | ||||
|     class GalleryNotFoundException(cause: Throwable) : RuntimeException("Gallery not found!", cause) | ||||
|  | ||||
|      // === URL IMPORT STUFF | ||||
|     // === URL IMPORT STUFF | ||||
|  | ||||
|     override val matchingHosts: List<String> = if(exh) listOf( | ||||
|     override val matchingHosts: List<String> = if (exh) listOf( | ||||
|             "exhentai.org" | ||||
|     ) else listOf( | ||||
|             "g.e-hentai.org", | ||||
| @@ -745,8 +735,7 @@ class EHentai(override val id: Long, | ||||
|                 "e8e" | ||||
|         ) | ||||
|  | ||||
|         fun buildCookies(cookies: Map<String, String>) | ||||
|                 = cookies.entries.joinToString(separator = "; ") { | ||||
|         fun buildCookies(cookies: Map<String, String>) = cookies.entries.joinToString(separator = "; ") { | ||||
|             "${URLEncoder.encode(it.key, "UTF-8")}=${URLEncoder.encode(it.value, "UTF-8")}" | ||||
|         } | ||||
|  | ||||
|   | ||||
| @@ -61,7 +61,7 @@ class Hitomi : HttpSource(), LewdSource<HitomiSearchMetadata, Document>, UrlImpo | ||||
|     private var tagIndexVersionCacheTime: Long = 0 | ||||
|     private fun tagIndexVersion(): Single<Long> { | ||||
|         val sCachedTagIndexVersion = cachedTagIndexVersion | ||||
|         return if(sCachedTagIndexVersion == null | ||||
|         return if (sCachedTagIndexVersion == null | ||||
|                 || tagIndexVersionCacheTime + INDEX_VERSION_CACHE_TIME_MS < System.currentTimeMillis()) { | ||||
|             HitomiNozomi.getIndexVersion(client, "tagindex").subscribeOn(Schedulers.io()).doOnNext { | ||||
|                 cachedTagIndexVersion = it | ||||
| @@ -76,7 +76,7 @@ class Hitomi : HttpSource(), LewdSource<HitomiSearchMetadata, Document>, UrlImpo | ||||
|     private var galleryIndexVersionCacheTime: Long = 0 | ||||
|     private fun galleryIndexVersion(): Single<Long> { | ||||
|         val sCachedGalleryIndexVersion = cachedGalleryIndexVersion | ||||
|         return if(sCachedGalleryIndexVersion == null | ||||
|         return if (sCachedGalleryIndexVersion == null | ||||
|                 || galleryIndexVersionCacheTime + INDEX_VERSION_CACHE_TIME_MS < System.currentTimeMillis()) { | ||||
|             HitomiNozomi.getIndexVersion(client, "galleriesindex").subscribeOn(Schedulers.io()).doOnNext { | ||||
|                 cachedGalleryIndexVersion = it | ||||
| @@ -106,7 +106,7 @@ class Hitomi : HttpSource(), LewdSource<HitomiSearchMetadata, Document>, UrlImpo | ||||
|  | ||||
|             input.select(".gallery-info tr").forEach { | ||||
|                 val content = it.child(1) | ||||
|                 when(it.child(0).text().toLowerCase()) { | ||||
|                 when (it.child(0).text().toLowerCase()) { | ||||
|                     "group" -> { | ||||
|                         group = content.text() | ||||
|                         tags += RaisedTag("group", group!!, TAG_TYPE_VIRTUAL) | ||||
| @@ -133,10 +133,10 @@ class Hitomi : HttpSource(), LewdSource<HitomiSearchMetadata, Document>, UrlImpo | ||||
|                     } | ||||
|                     "tags" -> { | ||||
|                         tags += content.select("a").map { | ||||
|                             val ns = if(it.attr("href").startsWith("/tag/male")) "male" | ||||
|                                      else if (it.attr("href").startsWith("/tag/female")) "female" | ||||
|                                      else "misc" | ||||
|                             RaisedTag(ns, it.text().dropLast(if (ns=="misc") 0 else 2), TAG_TYPE_DEFAULT) | ||||
|                             val ns = if (it.attr("href").startsWith("/tag/male")) "male" | ||||
|                             else if (it.attr("href").startsWith("/tag/female")) "female" | ||||
|                             else "misc" | ||||
|                             RaisedTag(ns, it.text().dropLast(if (ns == "misc") 0 else 2), TAG_TYPE_DEFAULT) | ||||
|                         } | ||||
|                     } | ||||
|                 } | ||||
| @@ -178,8 +178,7 @@ class Hitomi : HttpSource(), LewdSource<HitomiSearchMetadata, Document>, UrlImpo | ||||
|      * @param query the search query. | ||||
|      * @param filters the list of filters to apply. | ||||
|      */ | ||||
|     override fun searchMangaRequest(page: Int, query: String, filters: FilterList) | ||||
|             = throw UnsupportedOperationException() | ||||
|     override fun searchMangaRequest(page: Int, query: String, filters: FilterList) = throw UnsupportedOperationException() | ||||
|  | ||||
|     override fun fetchSearchManga(page: Int, query: String, filters: FilterList): Observable<MangasPage> { | ||||
|         return urlImportFetchSearchManga(query) { | ||||
| @@ -192,7 +191,7 @@ class Hitomi : HttpSource(), LewdSource<HitomiSearchMetadata, Document>, UrlImpo | ||||
|             val hn = Single.zip(tagIndexVersion(), galleryIndexVersion()) { tv, gv -> tv to gv } | ||||
|                     .map { HitomiNozomi(client, it.first, it.second) } | ||||
|  | ||||
|             var base = if(positive.isEmpty()) { | ||||
|             var base = if (positive.isEmpty()) { | ||||
|                 hn.flatMap { n -> n.getGalleryIdsFromNozomi(null, "index", "all").map { n to it.toSet() } } | ||||
|             } else { | ||||
|                 val q = positive.removeAt(0) | ||||
| @@ -275,7 +274,7 @@ class Hitomi : HttpSource(), LewdSource<HitomiSearchMetadata, Document>, UrlImpo | ||||
|  | ||||
|     private fun parseNozomiPage(array: ByteArray): Observable<List<SManga>> { | ||||
|         val cursor = ByteCursor(array) | ||||
|         val ids = (1 .. array.size / 4).map { | ||||
|         val ids = (1..array.size / 4).map { | ||||
|             cursor.nextInt() | ||||
|         } | ||||
|  | ||||
| @@ -297,7 +296,7 @@ class Hitomi : HttpSource(), LewdSource<HitomiSearchMetadata, Document>, UrlImpo | ||||
|         return SManga.create().apply { | ||||
|             val titleElement = doc.selectFirst("h1") | ||||
|             title = titleElement.text() | ||||
|             thumbnail_url = "https:" + if(prefs.eh_hl_useHighQualityThumbs().getOrDefault()) { | ||||
|             thumbnail_url = "https:" + if (prefs.eh_hl_useHighQualityThumbs().getOrDefault()) { | ||||
|                 doc.selectFirst("img").attr("data-srcset").substringBefore(' ') | ||||
|             } else { | ||||
|                 doc.selectFirst("img").attr("data-src") | ||||
| @@ -364,16 +363,16 @@ class Hitomi : HttpSource(), LewdSource<HitomiSearchMetadata, Document>, UrlImpo | ||||
|         val hlId = response.request.url.pathSegments.last().removeSuffix(".js").toLong() | ||||
|         val str = response.body!!.string() | ||||
|         val json = jsonParser.parse(str.removePrefix("var galleryinfo = ")) | ||||
|         return json["files"].array.mapIndexed { index, jsonElement ->  | ||||
|         return json["files"].array.mapIndexed { index, jsonElement -> | ||||
|             val hash = jsonElement["hash"].string | ||||
|             val ext = if (jsonElement["haswebp"].string=="0") jsonElement["name"].string.split('.').last() else "webp" | ||||
|             val path = if (jsonElement["haswebp"].string=="0") "images" else "webp" | ||||
|             val ext = if (jsonElement["haswebp"].string == "0") jsonElement["name"].string.split('.').last() else "webp" | ||||
|             val path = if (jsonElement["haswebp"].string == "0") "images" else "webp" | ||||
|             val hashPath1 = hash.takeLast(1) | ||||
|             val hashPath2 = hash.takeLast(3).take(2) | ||||
|             Page( | ||||
|                     index, | ||||
|                     "", | ||||
|             "https://${subdomainFromGalleryId(hlId)}a.hitomi.la/$path/$hashPath1/$hashPath2/$hash.$ext" | ||||
|                     "https://${subdomainFromGalleryId(hlId)}a.hitomi.la/$path/$hashPath1/$hashPath2/$hash.$ext" | ||||
|             ) | ||||
|         } | ||||
|     } | ||||
| @@ -406,7 +405,7 @@ class Hitomi : HttpSource(), LewdSource<HitomiSearchMetadata, Document>, UrlImpo | ||||
|     override fun mapUrlToMangaUrl(uri: Uri): String? { | ||||
|         val lcFirstPathSegment = uri.pathSegments.firstOrNull()?.toLowerCase() ?: return null | ||||
|  | ||||
|         if(lcFirstPathSegment != "manga" && lcFirstPathSegment != "reader") | ||||
|         if (lcFirstPathSegment != "manga" && lcFirstPathSegment != "reader") | ||||
|             return null | ||||
|  | ||||
|         return "https://hitomi.la/manga/${uri.pathSegments[1].substringBefore('.')}.html" | ||||
| @@ -418,7 +417,7 @@ class Hitomi : HttpSource(), LewdSource<HitomiSearchMetadata, Document>, UrlImpo | ||||
|         private val NUMBER_OF_FRONTENDS = 2 | ||||
|  | ||||
|         private val DATE_FORMAT by lazy { | ||||
|             if(Build.VERSION.SDK_INT >= Build.VERSION_CODES.N) | ||||
|             if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.N) | ||||
|                 SimpleDateFormat("yyyy-MM-dd HH:mm:ssX", Locale.US) | ||||
|             else | ||||
|                 SimpleDateFormat("yyyy-MM-dd HH:mm:ss'-05'", Locale.US) | ||||
|   | ||||
| @@ -42,7 +42,7 @@ class NHentai(context: Context) : HttpSource(), LewdSource<NHentaiSearchMetadata | ||||
|     //Support direct URL importing | ||||
|     override fun fetchSearchManga(page: Int, query: String, filters: FilterList): Observable<MangasPage> { | ||||
|         val trimmedIdQuery = query.trim().removePrefix("id:") | ||||
|         val newQuery = if(trimmedIdQuery.toIntOrNull() ?: -1 >= 0) { | ||||
|         val newQuery = if (trimmedIdQuery.toIntOrNull() ?: -1 >= 0) { | ||||
|             "$baseUrl/g/$trimmedIdQuery/" | ||||
|         } else query | ||||
|  | ||||
| @@ -56,15 +56,15 @@ class NHentai(context: Context) : HttpSource(), LewdSource<NHentaiSearchMetadata | ||||
|     } | ||||
|  | ||||
|     private fun searchMangaRequestObservable(page: Int, query: String, filters: FilterList): Observable<Request> { | ||||
| 	val langFilter = filters.filterIsInstance<filterLang>().firstOrNull() | ||||
|         val langFilter = filters.filterIsInstance<filterLang>().firstOrNull() | ||||
|         var langFilterString = "" | ||||
| 	if (langFilter != null) { | ||||
|             langFilterString = SOURCE_LANG_LIST.first {it.first == langFilter!!.values[langFilter!!.state]}.second | ||||
|         if (langFilter != null) { | ||||
|             langFilterString = SOURCE_LANG_LIST.first { it.first == langFilter!!.values[langFilter!!.state] }.second | ||||
|         } | ||||
|  | ||||
|         val uri = if(query.isNotBlank()) { | ||||
|         val uri = if (query.isNotBlank()) { | ||||
|             Uri.parse("$baseUrl/search/").buildUpon().apply { | ||||
|                 appendQueryParameter("q", query+langFilterString) | ||||
|                 appendQueryParameter("q", query + langFilterString) | ||||
|             } | ||||
|         } else { | ||||
|             Uri.parse(baseUrl).buildUpon() | ||||
| @@ -73,12 +73,12 @@ class NHentai(context: Context) : HttpSource(), LewdSource<NHentaiSearchMetadata | ||||
|         val sortFilter = filters.filterIsInstance<SortFilter>().firstOrNull()?.state | ||||
|                 ?: defaultSortFilterSelection() | ||||
|  | ||||
|         if(sortFilter.index == 1) { | ||||
|             if(query.isBlank()) error("You must specify a search query if you wish to sort by popularity!") | ||||
|         if (sortFilter.index == 1) { | ||||
|             if (query.isBlank()) error("You must specify a search query if you wish to sort by popularity!") | ||||
|             uri.appendQueryParameter("sort", "popular") | ||||
|         } | ||||
|  | ||||
|         if(sortFilter.ascending) { | ||||
|         if (sortFilter.ascending) { | ||||
|             return client.newCall(nhGet(uri.toString())) | ||||
|                     .asObservableSuccess() | ||||
|                     .map { | ||||
| @@ -103,11 +103,9 @@ class NHentai(context: Context) : HttpSource(), LewdSource<NHentaiSearchMetadata | ||||
|         return Observable.just(nhGet(uri.toString(), page)) | ||||
|     } | ||||
|  | ||||
|     override fun searchMangaRequest(page: Int, query: String, filters: FilterList) | ||||
|             = throw UnsupportedOperationException() | ||||
|     override fun searchMangaRequest(page: Int, query: String, filters: FilterList) = throw UnsupportedOperationException() | ||||
|  | ||||
|     override fun searchMangaParse(response: Response) | ||||
|             = parseResultPage(response) | ||||
|     override fun searchMangaParse(response: Response) = parseResultPage(response) | ||||
|  | ||||
|     override fun latestUpdatesRequest(page: Int): Request { | ||||
|         val uri = Uri.parse(baseUrl).buildUpon() | ||||
| @@ -115,8 +113,7 @@ class NHentai(context: Context) : HttpSource(), LewdSource<NHentaiSearchMetadata | ||||
|         return nhGet(uri.toString(), page) | ||||
|     } | ||||
|  | ||||
|     override fun latestUpdatesParse(response: Response) | ||||
|             = parseResultPage(response) | ||||
|     override fun latestUpdatesParse(response: Response) = parseResultPage(response) | ||||
|  | ||||
|     override fun mangaDetailsParse(response: Response) = throw UnsupportedOperationException() | ||||
|  | ||||
| @@ -136,8 +133,7 @@ class NHentai(context: Context) : HttpSource(), LewdSource<NHentaiSearchMetadata | ||||
|                 } | ||||
|     } | ||||
|  | ||||
|     override fun mangaDetailsRequest(manga: SManga) | ||||
|             = nhGet(baseUrl + manga.url) | ||||
|     override fun mangaDetailsRequest(manga: SManga) = nhGet(baseUrl + manga.url) | ||||
|  | ||||
|     fun parseResultPage(response: Response): MangasPage { | ||||
|         val doc = response.asJsoup() | ||||
| @@ -153,7 +149,7 @@ class NHentai(context: Context) : HttpSource(), LewdSource<NHentaiSearchMetadata | ||||
|                 // last() is a hack to ignore the lazy-loader placeholder image on the front page | ||||
|                 thumbnail_url = it.select("img").last().attr("src") | ||||
|                 // In some pages, the thumbnail url does not include the protocol | ||||
|                 if(!thumbnail_url!!.startsWith("https:")) thumbnail_url = "https:$thumbnail_url" | ||||
|                 if (!thumbnail_url!!.startsWith("https:")) thumbnail_url = "https:$thumbnail_url" | ||||
|             } | ||||
|         } | ||||
|  | ||||
| @@ -215,16 +211,14 @@ class NHentai(context: Context) : HttpSource(), LewdSource<NHentaiSearchMetadata | ||||
|                 .toSingle() | ||||
|     } | ||||
|  | ||||
|     override fun fetchChapterList(manga: SManga) | ||||
|             = Observable.just(listOf(SChapter.create().apply { | ||||
|     override fun fetchChapterList(manga: SManga) = Observable.just(listOf(SChapter.create().apply { | ||||
|         url = manga.url | ||||
|         name = "Chapter" | ||||
|         chapter_number = 1f | ||||
|     })) | ||||
|  | ||||
|     override fun fetchPageList(chapter: SChapter) | ||||
|             = getOrLoadMetadata(chapter.mangaId, NHentaiSearchMetadata.nhUrlToId(chapter.url)).map { metadata -> | ||||
|         if(metadata.mediaId == null) emptyList() | ||||
|     override fun fetchPageList(chapter: SChapter) = getOrLoadMetadata(chapter.mangaId, NHentaiSearchMetadata.nhUrlToId(chapter.url)).map { metadata -> | ||||
|         if (metadata.mediaId == null) emptyList() | ||||
|         else | ||||
|             metadata.pageImageTypes.mapIndexed { index, s -> | ||||
|                 val imageUrl = imageUrlFromType(metadata.mediaId!!, index + 1, s) | ||||
| @@ -264,6 +258,7 @@ class NHentai(context: Context) : HttpSource(), LewdSource<NHentaiSearchMetadata | ||||
|     val appName by lazy { | ||||
|         context.getString(R.string.app_name) | ||||
|     } | ||||
|  | ||||
|     fun nhGet(url: String, tag: Any? = null) = GET(url) | ||||
|             .newBuilder() | ||||
|             .header("User-Agent", | ||||
| @@ -291,7 +286,7 @@ class NHentai(context: Context) : HttpSource(), LewdSource<NHentaiSearchMetadata | ||||
|     ) | ||||
|  | ||||
|     override fun mapUrlToMangaUrl(uri: Uri): String? { | ||||
|         if(uri.pathSegments.firstOrNull()?.toLowerCase() != "g") | ||||
|         if (uri.pathSegments.firstOrNull()?.toLowerCase() != "g") | ||||
|             return null | ||||
|  | ||||
|         return "$baseUrl/g/${uri.pathSegments[1]}/" | ||||
| @@ -304,10 +299,10 @@ class NHentai(context: Context) : HttpSource(), LewdSource<NHentaiSearchMetadata | ||||
|         private fun defaultSortFilterSelection() = Filter.Sort.Selection(0, false) | ||||
|  | ||||
|         private val SOURCE_LANG_LIST = listOf( | ||||
|             Pair("All", ""), | ||||
|             Pair("English", " english"), | ||||
|             Pair("Japanese", " japanese"), | ||||
|             Pair("Chinese", " chinese") | ||||
|                 Pair("All", ""), | ||||
|                 Pair("English", " english"), | ||||
|                 Pair("Japanese", " japanese"), | ||||
|                 Pair("Chinese", " chinese") | ||||
|         ) | ||||
|  | ||||
|  | ||||
|   | ||||
| @@ -73,7 +73,7 @@ class PervEden(override val id: Long, val pvLang: PervEdenLang) : ParsedHttpSour | ||||
|     override fun searchMangaNextPageSelector() = ".next" | ||||
|  | ||||
|     override fun popularMangaRequest(page: Int): Request { | ||||
|         val urlLang = if(lang == "en") | ||||
|         val urlLang = if (lang == "en") | ||||
|             "eng" | ||||
|         else "it" | ||||
|         return GET("$baseUrl/$urlLang/") | ||||
| @@ -106,7 +106,7 @@ class PervEden(override val id: Long, val pvLang: PervEdenLang) : ParsedHttpSour | ||||
|         uri.appendQueryParameter("page", page.toString()) | ||||
|         uri.appendQueryParameter("title", query) | ||||
|         filters.forEach { | ||||
|             if(it is UriFilter) it.addToUri(uri) | ||||
|             if (it is UriFilter) it.addToUri(uri) | ||||
|         } | ||||
|         return GET(uri.toString()) | ||||
|     } | ||||
| @@ -152,38 +152,38 @@ class PervEden(override val id: Long, val pvLang: PervEdenLang) : ParsedHttpSour | ||||
|             tags.clear() | ||||
|             var inStatus: String? = null | ||||
|             rightBoxElement.childNodes().forEach { | ||||
|                 if(it is Element && it.tagName().toLowerCase() == "h4") { | ||||
|                 if (it is Element && it.tagName().toLowerCase() == "h4") { | ||||
|                     inStatus = it.text().trim() | ||||
|                 } else { | ||||
|                     when(inStatus) { | ||||
|                     when (inStatus) { | ||||
|                         "Alternative name(s)" -> { | ||||
|                             if(it is TextNode) { | ||||
|                             if (it is TextNode) { | ||||
|                                 val text = it.text().trim() | ||||
|                                 if(!text.isBlank()) | ||||
|                                 if (!text.isBlank()) | ||||
|                                     newAltTitles += text | ||||
|                             } | ||||
|                         } | ||||
|                         "Artist" -> { | ||||
|                             if(it is Element && it.tagName() == "a") { | ||||
|                             if (it is Element && it.tagName() == "a") { | ||||
|                                 artist = it.text() | ||||
|                                 tags += RaisedTag("artist", it.text().toLowerCase(), TAG_TYPE_VIRTUAL) | ||||
|                             } | ||||
|                         } | ||||
|                         "Genres" -> { | ||||
|                             if(it is Element && it.tagName() == "a") | ||||
|                             if (it is Element && it.tagName() == "a") | ||||
|                                 tags += RaisedTag(null, it.text().toLowerCase(), TAG_TYPE_DEFAULT) | ||||
|                         } | ||||
|                         "Type" -> { | ||||
|                             if(it is TextNode) { | ||||
|                             if (it is TextNode) { | ||||
|                                 val text = it.text().trim() | ||||
|                                 if(!text.isBlank()) | ||||
|                                 if (!text.isBlank()) | ||||
|                                     type = text | ||||
|                             } | ||||
|                         } | ||||
|                         "Status" -> { | ||||
|                             if(it is TextNode) { | ||||
|                             if (it is TextNode) { | ||||
|                                 val text = it.text().trim() | ||||
|                                 if(!text.isBlank()) | ||||
|                                 if (!text.isBlank()) | ||||
|                                     status = text | ||||
|                             } | ||||
|                         } | ||||
| @@ -197,8 +197,7 @@ class PervEden(override val id: Long, val pvLang: PervEdenLang) : ParsedHttpSour | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     override fun mangaDetailsParse(document: Document): SManga | ||||
|         = throw UnsupportedOperationException() | ||||
|     override fun mangaDetailsParse(document: Document): SManga = throw UnsupportedOperationException() | ||||
|  | ||||
|     override fun latestUpdatesRequest(page: Int): Request { | ||||
|         val num = when (lang) { | ||||
| @@ -226,18 +225,17 @@ class PervEden(override val id: Long, val pvLang: PervEdenLang) : ParsedHttpSour | ||||
|  | ||||
|         try { | ||||
|             date_upload = DATE_FORMAT.parse(element.getElementsByClass("chapterDate").first().text().trim()).time | ||||
|         } catch(ignored: Exception) {} | ||||
|         } catch (ignored: Exception) { | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     override fun pageListParse(document: Document) | ||||
|             = document.getElementById("pageSelect").getElementsByTag("option").map { | ||||
|     override fun pageListParse(document: Document) = document.getElementById("pageSelect").getElementsByTag("option").map { | ||||
|         Page(it.attr("data-page").toInt() - 1, baseUrl + it.attr("value")) | ||||
|     } | ||||
|  | ||||
|     override fun imageUrlParse(document: Document) | ||||
|             = "http:" + document.getElementById("mainImg").attr("src")!! | ||||
|     override fun imageUrlParse(document: Document) = "http:" + document.getElementById("mainImg").attr("src")!! | ||||
|  | ||||
|     override fun getFilterList() = FilterList ( | ||||
|     override fun getFilterList() = FilterList( | ||||
|             AuthorFilter(), | ||||
|             ArtistFilter(), | ||||
|             TypeFilterGroup(), | ||||
| @@ -253,7 +251,7 @@ class PervEden(override val id: Long, val pvLang: PervEdenLang) : ParsedHttpSour | ||||
|  | ||||
|     class StatusFilter(n: String, val id: Int) : Filter.CheckBox(n, false), UriFilter { | ||||
|         override fun addToUri(builder: Uri.Builder) { | ||||
|             if(state) | ||||
|             if (state) | ||||
|                 builder.appendQueryParameter("status", id.toString()) | ||||
|         } | ||||
|     } | ||||
| @@ -302,7 +300,7 @@ class PervEden(override val id: Long, val pvLang: PervEdenLang) : ParsedHttpSour | ||||
|  | ||||
|     class TypeFilter(n: String, val id: Int) : Filter.CheckBox(n, false), UriFilter { | ||||
|         override fun addToUri(builder: Uri.Builder) { | ||||
|             if(state) | ||||
|             if (state) | ||||
|                 builder.appendQueryParameter("type", id.toString()) | ||||
|         } | ||||
|     } | ||||
| @@ -310,7 +308,7 @@ class PervEden(override val id: Long, val pvLang: PervEdenLang) : ParsedHttpSour | ||||
|     override val matchingHosts = listOf("www.perveden.com") | ||||
|  | ||||
|     override fun matchesUri(uri: Uri): Boolean { | ||||
|         return super.matchesUri(uri) && uri.pathSegments.firstOrNull()?.toLowerCase() == when(pvLang) { | ||||
|         return super.matchesUri(uri) && uri.pathSegments.firstOrNull()?.toLowerCase() == when (pvLang) { | ||||
|             PervEdenLang.en -> "en-manga" | ||||
|             PervEdenLang.it -> "it-manga" | ||||
|         } | ||||
|   | ||||
| @@ -32,7 +32,7 @@ import rx.schedulers.Schedulers | ||||
|  | ||||
| typealias SiteMap = NakedTrie<Unit> | ||||
|  | ||||
| class EightMuses: HttpSource(), | ||||
| class EightMuses : HttpSource(), | ||||
|         LewdSource<EightMusesSearchMetadata, Document>, | ||||
|         UrlImportableSource { | ||||
|     override val id = EIGHTMUSES_SOURCE_ID | ||||
| @@ -118,7 +118,7 @@ class EightMuses: HttpSource(), | ||||
|      * @param filters the list of filters to apply. | ||||
|      */ | ||||
|     override fun searchMangaRequest(page: Int, query: String, filters: FilterList): Request { | ||||
|         val urlBuilder = if(!query.isBlank()) { | ||||
|         val urlBuilder = if (!query.isBlank()) { | ||||
|             "$baseUrl/search".toHttpUrlOrNull()!! | ||||
|                     .newBuilder() | ||||
|                     .addQueryParameter("q", query) | ||||
| @@ -161,11 +161,9 @@ class EightMuses: HttpSource(), | ||||
|         throw UnsupportedOperationException("Should not be called!") | ||||
|     } | ||||
|  | ||||
|     override fun fetchLatestUpdates(page: Int) | ||||
|             = fetchListing(latestUpdatesRequest(page), false) | ||||
|     override fun fetchLatestUpdates(page: Int) = fetchListing(latestUpdatesRequest(page), false) | ||||
|  | ||||
|     override fun fetchPopularManga(page: Int) | ||||
|             = fetchListing(popularMangaRequest(page), false) // TODO Dig | ||||
|     override fun fetchPopularManga(page: Int) = fetchListing(popularMangaRequest(page), false) // TODO Dig | ||||
|  | ||||
|     override fun fetchSearchManga(page: Int, query: String, filters: FilterList): Observable<MangasPage> { | ||||
|         return urlImportFetchSearchManga(query) { | ||||
| @@ -190,7 +188,7 @@ class EightMuses: HttpSource(), | ||||
|         val onLastPage = doc.selectFirst(".current:nth-last-child(2)") != null | ||||
|  | ||||
|         return MangasPage( | ||||
|                 if(dig) { | ||||
|                 if (dig) { | ||||
|                     contents.albums.flatMap { | ||||
|                         val href = it.attr("href") | ||||
|                         val splitHref = href.split('/') | ||||
| @@ -265,14 +263,14 @@ class EightMuses: HttpSource(), | ||||
|             val contents = parseSelf(response.asJsoup()) | ||||
|  | ||||
|             val out = mutableListOf<SChapter>() | ||||
|             if(contents.images.isNotEmpty()) { | ||||
|             if (contents.images.isNotEmpty()) { | ||||
|                 out += SChapter.create().apply { | ||||
|                     this.url = url | ||||
|                     this.name = if(prefix.isBlank()) ">" else prefix | ||||
|                     this.name = if (prefix.isBlank()) ">" else prefix | ||||
|                 } | ||||
|             } | ||||
|  | ||||
|             val builtPrefix = if(prefix.isBlank()) "> " else "$prefix > " | ||||
|             val builtPrefix = if (prefix.isBlank()) "> " else "$prefix > " | ||||
|  | ||||
|             out + contents.albums.flatMap { ele -> | ||||
|                 fetchAndParseChapterList(builtPrefix + ele.selectFirst(".title-text").text(), ele.attr("href")) | ||||
| @@ -281,6 +279,7 @@ class EightMuses: HttpSource(), | ||||
|     } | ||||
|  | ||||
|     data class SelfContents(val albums: List<Element>, val images: List<Element>) | ||||
|  | ||||
|     private fun parseSelf(doc: Document): SelfContents { | ||||
|         // Parse self | ||||
|         val gc = doc.select(".gallery .c-tile") | ||||
| @@ -377,7 +376,7 @@ class EightMuses: HttpSource(), | ||||
|  | ||||
|     override fun mapUrlToMangaUrl(uri: Uri): String? { | ||||
|         var path = uri.pathSegments.drop(2) | ||||
|         if(uri.pathSegments[1].toLowerCase() == "picture") { | ||||
|         if (uri.pathSegments[1].toLowerCase() == "picture") { | ||||
|             path = path.dropLast(1) | ||||
|         } | ||||
|         return "/comics/album/${path.joinToString("/")}" | ||||
|   | ||||
| @@ -74,8 +74,7 @@ class HBrowse : HttpSource(), LewdSource<HBrowseSearchMetadata, Document>, UrlIm | ||||
|      * | ||||
|      * @param page the page number to retrieve. | ||||
|      */ | ||||
|     override fun popularMangaRequest(page: Int) | ||||
|             = GET("$baseUrl/browse/title/rank/DESC/$page", headers) | ||||
|     override fun popularMangaRequest(page: Int) = GET("$baseUrl/browse/title/rank/DESC/$page", headers) | ||||
|  | ||||
|     private fun parseListing(response: Response): MangasPage { | ||||
|         val doc = response.asJsoup() | ||||
| @@ -125,8 +124,7 @@ class HBrowse : HttpSource(), LewdSource<HBrowseSearchMetadata, Document>, UrlIm | ||||
|      * @param query the search query. | ||||
|      * @param filters the list of filters to apply. | ||||
|      */ | ||||
|     override fun searchMangaRequest(page: Int, query: String, filters: FilterList) | ||||
|             = throw UnsupportedOperationException("Should not be called!") | ||||
|     override fun searchMangaRequest(page: Int, query: String, filters: FilterList) = throw UnsupportedOperationException("Should not be called!") | ||||
|  | ||||
|     private fun fetchSearchMangaInternal(page: Int, query: String, filters: FilterList): Observable<MangasPage> { | ||||
|         return RxJavaInterop.toV1Single(GlobalScope.async(Dispatchers.IO) { | ||||
| @@ -138,28 +136,28 @@ class HBrowse : HttpSource(), LewdSource<HBrowseSearchMetadata, Document>, UrlIm | ||||
|             // <NS, VALUE, EXCLUDED> | ||||
|             var tagQuery: List<Triple<String, String, Boolean>>? = null | ||||
|  | ||||
|             if(sortFilter != null) { | ||||
|             if (sortFilter != null) { | ||||
|                 sortFilter.state?.let { state -> | ||||
|                     if(query.isNotBlank()) { | ||||
|                     if (query.isNotBlank()) { | ||||
|                         throw IllegalArgumentException("Cannot use sorting while text/tag search is active!") | ||||
|                     } | ||||
|  | ||||
|                     isSortFilter = true | ||||
|                     base = "/browse/title/${SortFilter.SORT_OPTIONS[state.index].first}/${if(state.ascending) "ASC" else "DESC"}" | ||||
|                     base = "/browse/title/${SortFilter.SORT_OPTIONS[state.index].first}/${if (state.ascending) "ASC" else "DESC"}" | ||||
|                 } | ||||
|             } | ||||
|  | ||||
|             if(base == null) { | ||||
|                 base = if(modeFilter != null && modeFilter.state == 1) { | ||||
|             if (base == null) { | ||||
|                 base = if (modeFilter != null && modeFilter.state == 1) { | ||||
|                     tagQuery = searchEngine.parseQuery(query, false).map { | ||||
|                         when (it) { | ||||
|                             is Text -> { | ||||
|                                 var minDist = Int.MAX_VALUE.toDouble() | ||||
|                                 // ns, value | ||||
|                                 var minContent: Pair<String, String> = "" to "" | ||||
|                                 for(ns in ALL_TAGS) { | ||||
|                                 for (ns in ALL_TAGS) { | ||||
|                                     val (v, d) = ns.value.nearest(it.rawTextOnly(), minDist) | ||||
|                                     if(d < minDist) { | ||||
|                                     if (d < minDist) { | ||||
|                                         minDist = d | ||||
|                                         minContent = ns.key to v | ||||
|                                     } | ||||
| @@ -171,7 +169,7 @@ class HBrowse : HttpSource(), LewdSource<HBrowseSearchMetadata, Document>, UrlIm | ||||
|                                 val mappedNs = NS_MAPPINGS[it.namespace] ?: it.namespace | ||||
|  | ||||
|                                 var key = mappedNs | ||||
|                                 if(!ALL_TAGS.containsKey(key)) key = ALL_TAGS.keys.sorted().nearest(mappedNs).first | ||||
|                                 if (!ALL_TAGS.containsKey(key)) key = ALL_TAGS.keys.sorted().nearest(mappedNs).first | ||||
|  | ||||
|                                 // Find nearest NS | ||||
|                                 val nsContents = ALL_TAGS[key] | ||||
| @@ -193,18 +191,18 @@ class HBrowse : HttpSource(), LewdSource<HBrowseSearchMetadata, Document>, UrlIm | ||||
|  | ||||
|             base += "/$page" | ||||
|  | ||||
|             if(isSortFilter) { | ||||
|             if (isSortFilter) { | ||||
|                 parseListing(client.newCall(GET(baseUrl + base, headers)) | ||||
|                         .asObservableSuccess() | ||||
|                         .toSingle() | ||||
|                         .await(Schedulers.io())) | ||||
|             } else { | ||||
|                 val body = if(tagQuery != null) { | ||||
|                 val body = if (tagQuery != null) { | ||||
|                     FormBody.Builder() | ||||
|                             .add("type", "advance") | ||||
|                             .apply { | ||||
|                                 tagQuery.forEach { | ||||
|                                     add(it.first + "_" + it.second, if(it.third) "n" else "y") | ||||
|                                     add(it.first + "_" + it.second, if (it.third) "n" else "y") | ||||
|                                 } | ||||
|                             } | ||||
|                 } else { | ||||
| @@ -222,7 +220,7 @@ class HBrowse : HttpSource(), LewdSource<HBrowseSearchMetadata, Document>, UrlIm | ||||
|                         .toSingle() | ||||
|                         .await(Schedulers.io()) | ||||
|  | ||||
|                 if(!processResponse.isRedirect) | ||||
|                 if (!processResponse.isRedirect) | ||||
|                     throw IllegalStateException("Unexpected process response code!") | ||||
|  | ||||
|                 val sessId = processResponse.headers("Set-Cookie").find { | ||||
| @@ -258,13 +256,13 @@ class HBrowse : HttpSource(), LewdSource<HBrowseSearchMetadata, Document>, UrlIm | ||||
|     // Collection must be sorted and cannot be sorted | ||||
|     private fun List<String>.nearest(string: String, maxDist: Double = Int.MAX_VALUE.toDouble()): Pair<String, Double> { | ||||
|         val idx = binarySearch(string) | ||||
|         return if(idx < 0) { | ||||
|         return if (idx < 0) { | ||||
|             val l = Levenshtein() | ||||
|             var minSoFar = maxDist | ||||
|             var minIndexSoFar = 0 | ||||
|             forEachIndexed { index, s -> | ||||
|                 val d = l.distance(string, s, ceil(minSoFar).toInt()) | ||||
|                 if(d < minSoFar) { | ||||
|                 if (d < minSoFar) { | ||||
|                     minSoFar = d | ||||
|                     minIndexSoFar = index | ||||
|                 } | ||||
| @@ -312,7 +310,7 @@ class HBrowse : HttpSource(), LewdSource<HBrowseSearchMetadata, Document>, UrlIm | ||||
|  | ||||
|             tags.clear() | ||||
|             (tables[""]!! + tables["categories"]!!).forEach { (k, v) -> | ||||
|                 when(val lowercaseNs = k.toLowerCase()) { | ||||
|                 when (val lowercaseNs = k.toLowerCase()) { | ||||
|                     "title" -> title = v.text() | ||||
|                     "length" -> length = v.text().substringBefore(" ").toInt() | ||||
|                     else -> { | ||||
| @@ -376,8 +374,9 @@ class HBrowse : HttpSource(), LewdSource<HBrowseSearchMetadata, Document>, UrlIm | ||||
|         val doc = response.asJsoup() | ||||
|         val basePath = listOf("data") + response.request.url.pathSegments | ||||
|         val scripts = doc.getElementsByTag("script").map { it.data() } | ||||
|         for(script in scripts) { | ||||
|             val totalPages = TOTAL_PAGES_REGEX.find(script)?.groupValues?.getOrNull(1)?.toIntOrNull() ?: continue | ||||
|         for (script in scripts) { | ||||
|             val totalPages = TOTAL_PAGES_REGEX.find(script)?.groupValues?.getOrNull(1)?.toIntOrNull() | ||||
|                     ?: continue | ||||
|             val pageList = PAGE_LIST_REGEX.find(script)?.groupValues?.getOrNull(1) ?: continue | ||||
|  | ||||
|             return jsonParser.parse(pageList).array.take(totalPages).map { | ||||
| @@ -956,6 +955,7 @@ class HBrowse : HttpSource(), LewdSource<HBrowseSearchMetadata, Document>, UrlIm | ||||
|         ).mapValues { it.value.sorted() } | ||||
|  | ||||
|         private val TAGS_AS_MARKDOWN = ALL_TAGS.map { (ns, values) -> | ||||
|             "#### $ns\n" + values.map { "- $it" }.joinToString("\n") }.joinToString("\n\n") | ||||
|             "#### $ns\n" + values.map { "- $it" }.joinToString("\n") | ||||
|         }.joinToString("\n\n") | ||||
|     } | ||||
| } | ||||
|   | ||||
| @@ -98,7 +98,7 @@ class HentaiCafe(delegate: HttpSource) : DelegatedHttpSource(delegate), | ||||
|     override fun mapUrlToMangaUrl(uri: Uri): String? { | ||||
|         val lcFirstPathSegment = uri.pathSegments.firstOrNull()?.toLowerCase() ?: return null | ||||
|  | ||||
|         return if(lcFirstPathSegment == "manga") | ||||
|         return if (lcFirstPathSegment == "manga") | ||||
|             "https://hentai.cafe/${uri.pathSegments[2]}" | ||||
|         else | ||||
|             "https://hentai.cafe/$lcFirstPathSegment" | ||||
|   | ||||
| @@ -32,7 +32,7 @@ class Pururin(delegate: HttpSource) : DelegatedHttpSource(delegate), | ||||
|     //Support direct URL importing | ||||
|     override fun fetchSearchManga(page: Int, query: String, filters: FilterList): Observable<MangasPage> { | ||||
|         val trimmedIdQuery = query.trim().removePrefix("id:") | ||||
|         val newQuery = if(trimmedIdQuery.toIntOrNull() ?: -1 >= 0) { | ||||
|         val newQuery = if (trimmedIdQuery.toIntOrNull() ?: -1 >= 0) { | ||||
|             "$baseUrl/gallery/$trimmedIdQuery/-" | ||||
|         } else query | ||||
|  | ||||
| @@ -68,7 +68,7 @@ class Pururin(delegate: HttpSource) : DelegatedHttpSource(delegate), | ||||
|             contentWrapper.select(".table-gallery-info > tbody > tr").forEach { ele -> | ||||
|                 val key = ele.child(0).text().toLowerCase() | ||||
|                 val value = ele.child(1) | ||||
|                 when(key) { | ||||
|                 when (key) { | ||||
|                     "pages" -> { | ||||
|                         val split = value.text().split("(").trimAll().dropBlank() | ||||
|  | ||||
|   | ||||
| @@ -25,105 +25,107 @@ import java.text.SimpleDateFormat | ||||
| import java.util.* | ||||
|  | ||||
| class Tsumino(delegate: HttpSource) : DelegatedHttpSource(delegate), | ||||
| LewdSource<TsuminoSearchMetadata, Document>, UrlImportableSource { | ||||
| 	override val metaClass = TsuminoSearchMetadata::class; | ||||
| 	override val lang = "en" | ||||
|         LewdSource<TsuminoSearchMetadata, Document>, UrlImportableSource { | ||||
|     override val metaClass = TsuminoSearchMetadata::class; | ||||
|     override val lang = "en" | ||||
|  | ||||
| 	//Support direct URL importing | ||||
| 	override fun fetchSearchManga(page: Int, query: String, filters: FilterList) = | ||||
| 		urlImportFetchSearchManga(query) { | ||||
| 			super.fetchSearchManga(page, query, filters) | ||||
| 		} | ||||
| 	override fun mapUrlToMangaUrl(uri: Uri): String? { | ||||
| 		val lcFirstPathSegment = uri.pathSegments.firstOrNull()?.toLowerCase() ?: return null | ||||
| 		if(lcFirstPathSegment != "read" && lcFirstPathSegment != "book" && lcFirstPathSegment != "entry") | ||||
| 			return null | ||||
| 		return "https://tsumino.com/Book/Info/${uri.lastPathSegment}" | ||||
| 	} | ||||
|     //Support direct URL importing | ||||
|     override fun fetchSearchManga(page: Int, query: String, filters: FilterList) = | ||||
|             urlImportFetchSearchManga(query) { | ||||
|                 super.fetchSearchManga(page, query, filters) | ||||
|             } | ||||
|  | ||||
| 	override fun fetchMangaDetails(manga: SManga): Observable<SManga> { | ||||
| 		return client.newCall(mangaDetailsRequest(manga)) | ||||
| 			.asObservableSuccess() | ||||
| 			.flatMap { | ||||
| 				parseToManga(manga, it.asJsoup()).andThen(Observable.just(manga)) | ||||
| 			} | ||||
| 	} | ||||
|     override fun mapUrlToMangaUrl(uri: Uri): String? { | ||||
|         val lcFirstPathSegment = uri.pathSegments.firstOrNull()?.toLowerCase() ?: return null | ||||
|         if (lcFirstPathSegment != "read" && lcFirstPathSegment != "book" && lcFirstPathSegment != "entry") | ||||
|             return null | ||||
|         return "https://tsumino.com/Book/Info/${uri.lastPathSegment}" | ||||
|     } | ||||
|  | ||||
| 	override fun parseIntoMetadata(metadata: TsuminoSearchMetadata, input: Document) { | ||||
| 		with(metadata) { | ||||
| 			tmId = TsuminoSearchMetadata.tmIdFromUrl(input.location()).toInt() | ||||
| 			tags.clear() | ||||
|     override fun fetchMangaDetails(manga: SManga): Observable<SManga> { | ||||
|         return client.newCall(mangaDetailsRequest(manga)) | ||||
|                 .asObservableSuccess() | ||||
|                 .flatMap { | ||||
|                     parseToManga(manga, it.asJsoup()).andThen(Observable.just(manga)) | ||||
|                 } | ||||
|     } | ||||
|  | ||||
| 			input.getElementById("Title")?.text()?.let { | ||||
| 				title = it.trim() | ||||
| 			} | ||||
|     override fun parseIntoMetadata(metadata: TsuminoSearchMetadata, input: Document) { | ||||
|         with(metadata) { | ||||
|             tmId = TsuminoSearchMetadata.tmIdFromUrl(input.location()).toInt() | ||||
|             tags.clear() | ||||
|  | ||||
| 			input.getElementById("Artist")?.children()?.first()?.text()?.trim()?.let { | ||||
| 				tags.add(RaisedTag("artist", it, TAG_TYPE_VIRTUAL)) | ||||
| 				artist = it | ||||
| 			} | ||||
|             input.getElementById("Title")?.text()?.let { | ||||
|                 title = it.trim() | ||||
|             } | ||||
|  | ||||
| 			input.getElementById("Uploader")?.children()?.first()?.text()?.trim()?.let { | ||||
| 				uploader = it | ||||
| 			} | ||||
|             input.getElementById("Artist")?.children()?.first()?.text()?.trim()?.let { | ||||
|                 tags.add(RaisedTag("artist", it, TAG_TYPE_VIRTUAL)) | ||||
|                 artist = it | ||||
|             } | ||||
|  | ||||
| 			input.getElementById("Uploaded")?.text()?.let { | ||||
| 				uploadDate = TM_DATE_FORMAT.parse(it.trim()).time | ||||
| 			} | ||||
|             input.getElementById("Uploader")?.children()?.first()?.text()?.trim()?.let { | ||||
|                 uploader = it | ||||
|             } | ||||
|  | ||||
| 			input.getElementById("Pages")?.text()?.let { | ||||
| 				length = it.trim().toIntOrNull() | ||||
| 			} | ||||
|             input.getElementById("Uploaded")?.text()?.let { | ||||
|                 uploadDate = TM_DATE_FORMAT.parse(it.trim()).time | ||||
|             } | ||||
|  | ||||
| 			input.getElementById("Rating")?.text()?.let { | ||||
| 				ratingString = it.trim() | ||||
| 			} | ||||
|             input.getElementById("Pages")?.text()?.let { | ||||
|                 length = it.trim().toIntOrNull() | ||||
|             } | ||||
|  | ||||
| 			input.getElementById("Category")?.children()?.first()?.text()?.let { | ||||
| 				category = it.trim() | ||||
| 				tags.add(RaisedTag("genre", it, TAG_TYPE_VIRTUAL)) | ||||
| 			} | ||||
|             input.getElementById("Rating")?.text()?.let { | ||||
|                 ratingString = it.trim() | ||||
|             } | ||||
|  | ||||
| 			input.getElementById("Collection")?.children()?.first()?.text()?.let { | ||||
| 				collection = it.trim() | ||||
| 			} | ||||
|             input.getElementById("Category")?.children()?.first()?.text()?.let { | ||||
|                 category = it.trim() | ||||
|                 tags.add(RaisedTag("genre", it, TAG_TYPE_VIRTUAL)) | ||||
|             } | ||||
|  | ||||
| 			input.getElementById("Group")?.children()?.first()?.text()?.let { | ||||
| 				group = it.trim() | ||||
| 				tags.add(RaisedTag("group", it, TAG_TYPE_VIRTUAL)) | ||||
| 			} | ||||
|             input.getElementById("Collection")?.children()?.first()?.text()?.let { | ||||
|                 collection = it.trim() | ||||
|             } | ||||
|  | ||||
| 			val newParody = mutableListOf<String>() | ||||
| 			input.getElementById("Parody")?.children()?.forEach { | ||||
| 				val entry = it.text().trim() | ||||
| 				newParody.add(entry) | ||||
| 				tags.add(RaisedTag("parody", entry, TAG_TYPE_VIRTUAL)) | ||||
| 			} | ||||
| 			parody = newParody | ||||
|             input.getElementById("Group")?.children()?.first()?.text()?.let { | ||||
|                 group = it.trim() | ||||
|                 tags.add(RaisedTag("group", it, TAG_TYPE_VIRTUAL)) | ||||
|             } | ||||
|  | ||||
| 			val newCharacter = mutableListOf<String>() | ||||
| 			input.getElementById("Character")?.children()?.forEach { | ||||
| 				val entry = it.text().trim() | ||||
| 				newCharacter.add(entry) | ||||
| 				tags.add(RaisedTag("character", entry, TAG_TYPE_VIRTUAL)) | ||||
| 			} | ||||
| 			character = newCharacter | ||||
|             val newParody = mutableListOf<String>() | ||||
|             input.getElementById("Parody")?.children()?.forEach { | ||||
|                 val entry = it.text().trim() | ||||
|                 newParody.add(entry) | ||||
|                 tags.add(RaisedTag("parody", entry, TAG_TYPE_VIRTUAL)) | ||||
|             } | ||||
|             parody = newParody | ||||
|  | ||||
| 			input.getElementById("Tag")?.children()?.let { | ||||
| 				tags.addAll(it.map { | ||||
| 					RaisedTag(null, it.text().trim(), TAG_TYPE_DEFAULT) | ||||
| 				}) | ||||
| 			} | ||||
| 		} | ||||
| 	} | ||||
| 	override val matchingHosts = listOf( | ||||
| 		"www.tsumino.com", | ||||
| 		"tsumino.com" | ||||
| 	) | ||||
|             val newCharacter = mutableListOf<String>() | ||||
|             input.getElementById("Character")?.children()?.forEach { | ||||
|                 val entry = it.text().trim() | ||||
|                 newCharacter.add(entry) | ||||
|                 tags.add(RaisedTag("character", entry, TAG_TYPE_VIRTUAL)) | ||||
|             } | ||||
|             character = newCharacter | ||||
|  | ||||
| 	companion object { | ||||
| 		val jsonParser by lazy {JsonParser()} | ||||
| 		val TM_DATE_FORMAT = SimpleDateFormat("yyyy MMM dd", Locale.US) | ||||
| 		private val ASP_NET_COOKIE_NAME = "ASP.NET_SessionId" | ||||
| 	} | ||||
|             input.getElementById("Tag")?.children()?.let { | ||||
|                 tags.addAll(it.map { | ||||
|                     RaisedTag(null, it.text().trim(), TAG_TYPE_DEFAULT) | ||||
|                 }) | ||||
|             } | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     override val matchingHosts = listOf( | ||||
|             "www.tsumino.com", | ||||
|             "tsumino.com" | ||||
|     ) | ||||
|  | ||||
|     companion object { | ||||
|         val jsonParser by lazy { JsonParser() } | ||||
|         val TM_DATE_FORMAT = SimpleDateFormat("yyyy MMM dd", Locale.US) | ||||
|         private val ASP_NET_COOKIE_NAME = "ASP.NET_SessionId" | ||||
|     } | ||||
| } | ||||
|   | ||||
| @@ -19,13 +19,13 @@ class ExtensionFilterController : SettingsController() { | ||||
|         val activeLangs = preferences.enabledLanguages().get() | ||||
|  | ||||
|         val availableLangs = | ||||
|             Injekt.get<ExtensionManager>().availableExtensions.groupBy { | ||||
|                 it.lang | ||||
|             }.keys.minus("all").partition { | ||||
|                 it in activeLangs | ||||
|             }.let { | ||||
|                 it.first + it.second | ||||
|             } | ||||
|                 Injekt.get<ExtensionManager>().availableExtensions.groupBy { | ||||
|                     it.lang | ||||
|                 }.keys.minus("all").partition { | ||||
|                     it in activeLangs | ||||
|                 }.let { | ||||
|                     it.first + it.second | ||||
|                 } | ||||
|  | ||||
|         availableLangs.forEach { | ||||
|             switchPreference { | ||||
|   | ||||
| @@ -427,14 +427,14 @@ open class BrowseSourcePresenter( | ||||
|         return loaded.map { | ||||
|             try { | ||||
|                 val id = it.substringBefore(':').toLong() | ||||
|                 if(id != source.id) return@map null | ||||
|                 if (id != source.id) return@map null | ||||
|                 val content = jsonParser.parse(it.substringAfter(':')).obj | ||||
|                 val originalFilters = source.getFilterList() | ||||
|                 filterSerializer.deserialize(originalFilters, content["filters"].array) | ||||
|                 EXHSavedSearch(content["name"].string, | ||||
|                         content["query"].string, | ||||
|                         originalFilters) | ||||
|             } catch(t: RuntimeException) { | ||||
|             } catch (t: RuntimeException) { | ||||
|                 // Load failed | ||||
|                 Timber.e(t, "Failed to load saved search!") | ||||
|                 t.printStackTrace() | ||||
|   | ||||
| @@ -36,7 +36,9 @@ class LibraryCategoryAdapter(view: LibraryCategoryView) : | ||||
|     // Keep compatibility as searchText field was replaced when we upgraded FlexibleAdapter | ||||
|     var searchText | ||||
|         get() = getFilter(String::class.java) ?: "" | ||||
|         set(value) { setFilter(value) } | ||||
|         set(value) { | ||||
|             setFilter(value) | ||||
|         } | ||||
|     // EXH <-- | ||||
|  | ||||
|     /** | ||||
| @@ -73,7 +75,7 @@ class LibraryCategoryAdapter(view: LibraryCategoryView) : | ||||
|     //    we want to perform a no-op filter) | ||||
|     suspend fun performFilter(cScope: CoroutineScope) { | ||||
|         lastFilterJob?.cancel() | ||||
|         if(mangas.isNotEmpty() && searchText.isNotBlank()) { | ||||
|         if (mangas.isNotEmpty() && searchText.isNotBlank()) { | ||||
|             val savedSearchText = searchText | ||||
|  | ||||
|             val job = cScope.launch(Dispatchers.IO) { | ||||
| @@ -90,7 +92,7 @@ class LibraryCategoryAdapter(view: LibraryCategoryView) : | ||||
|  | ||||
|                     val mangaWithMetaIdsQuery = db.getIdsOfFavoriteMangaWithMetadata().await() | ||||
|                     val mangaWithMetaIds = LongArray(mangaWithMetaIdsQuery.count) | ||||
|                     if(mangaWithMetaIds.isNotEmpty()) { | ||||
|                     if (mangaWithMetaIds.isNotEmpty()) { | ||||
|                         val mangaIdCol = mangaWithMetaIdsQuery.getColumnIndex(MangaTable.COL_ID) | ||||
|                         mangaWithMetaIdsQuery.moveToFirst() | ||||
|                         while (!mangaWithMetaIdsQuery.isAfterLast) { | ||||
| @@ -104,7 +106,7 @@ class LibraryCategoryAdapter(view: LibraryCategoryView) : | ||||
|                     ensureActive() // Fail early when cancelled | ||||
|  | ||||
|                     val convertedResult = LongArray(queryResult.count) | ||||
|                     if(convertedResult.isNotEmpty()) { | ||||
|                     if (convertedResult.isNotEmpty()) { | ||||
|                         val mangaIdCol = queryResult.getColumnIndex(SearchMetadataTable.COL_MANGA_ID) | ||||
|                         queryResult.moveToFirst() | ||||
|                         while (!queryResult.isAfterLast) { | ||||
| @@ -119,11 +121,11 @@ class LibraryCategoryAdapter(view: LibraryCategoryView) : | ||||
|  | ||||
|                     // Flow the mangas to allow cancellation of this filter operation | ||||
|                     mangas.asFlow().cancellable().filter { item -> | ||||
|                         if(isLewdSource(item.manga.source)) { | ||||
|                         if (isLewdSource(item.manga.source)) { | ||||
|                             val mangaId = item.manga.id ?: -1 | ||||
|                             if(convertedResult.binarySearch(mangaId) < 0) { | ||||
|                             if (convertedResult.binarySearch(mangaId) < 0) { | ||||
|                                 // Check if this manga even has metadata | ||||
|                                 if(mangaWithMetaIds.binarySearch(mangaId) < 0) { | ||||
|                                 if (mangaWithMetaIds.binarySearch(mangaId) < 0) { | ||||
|                                     // No meta? Filter using title | ||||
|                                     item.filter(savedSearchText) | ||||
|                                 } else false | ||||
| @@ -134,7 +136,7 @@ class LibraryCategoryAdapter(view: LibraryCategoryView) : | ||||
|                     }.toList() | ||||
|                 } catch (e: Exception) { | ||||
|                     // Do not catch cancellations | ||||
|                     if(e is CancellationException) throw e | ||||
|                     if (e is CancellationException) throw e | ||||
|  | ||||
|                     Timber.w(e, "Could not filter mangas!") | ||||
|                     mangas | ||||
|   | ||||
| @@ -182,10 +182,10 @@ class MangaInfoPresenter( | ||||
|     suspend fun smartSearchMerge(manga: Manga, originalMangaId: Long): Manga { | ||||
|         val originalManga = db.getManga(originalMangaId).await() | ||||
|                 ?: throw IllegalArgumentException("Unknown manga ID: $originalMangaId") | ||||
|         val toInsert = if(originalManga.source == MERGED_SOURCE_ID) { | ||||
|         val toInsert = if (originalManga.source == MERGED_SOURCE_ID) { | ||||
|             originalManga.apply { | ||||
|                 val originalChildren = MergedSource.MangaConfig.readFromUrl(gson, url).children | ||||
|                 if(originalChildren.any { it.source == manga.source && it.url == manga.url }) | ||||
|                 if (originalChildren.any { it.source == manga.source && it.url == manga.url }) | ||||
|                     throw IllegalArgumentException("This manga is already merged with the current manga!") | ||||
|  | ||||
|                 url = MergedSource.MangaConfig(originalChildren + MergedSource.MangaSource( | ||||
| @@ -216,9 +216,9 @@ class MangaInfoPresenter( | ||||
|  | ||||
|         // Note that if the manga are merged in a different order, this won't trigger, but I don't care lol | ||||
|         val existingManga = db.getManga(toInsert.url, toInsert.source).await() | ||||
|         if(existingManga != null) { | ||||
|         if (existingManga != null) { | ||||
|             withContext(NonCancellable) { | ||||
|                 if(toInsert.id != null) { | ||||
|                 if (toInsert.id != null) { | ||||
|                     db.deleteManga(toInsert).await() | ||||
|                 } | ||||
|             } | ||||
| @@ -230,7 +230,7 @@ class MangaInfoPresenter( | ||||
|         toInsert.initialized = false | ||||
|  | ||||
|         val newId = db.insertManga(toInsert).await().insertedId() | ||||
|         if(newId != null) toInsert.id = newId | ||||
|         if (newId != null) toInsert.id = newId | ||||
|  | ||||
|         return toInsert | ||||
|     } | ||||
|   | ||||
| @@ -37,12 +37,12 @@ class SaveImageNotifier(private val context: Context) { | ||||
|      */ | ||||
|     fun onComplete(file: File) { | ||||
|         val bitmap = GlideApp.with(context) | ||||
|             .asBitmap() | ||||
|             .load(file) | ||||
|             .diskCacheStrategy(DiskCacheStrategy.NONE) | ||||
|             .skipMemoryCache(true) | ||||
|             .submit(720, 1280) | ||||
|             .get() | ||||
|                 .asBitmap() | ||||
|                 .load(file) | ||||
|                 .diskCacheStrategy(DiskCacheStrategy.NONE) | ||||
|                 .skipMemoryCache(true) | ||||
|                 .submit(720, 1280) | ||||
|                 .get() | ||||
|  | ||||
|         if (bitmap != null) { | ||||
|             showCompleteNotification(file, bitmap) | ||||
|   | ||||
| @@ -19,16 +19,16 @@ class DirectoryPageLoader(val file: File) : PageLoader() { | ||||
|      */ | ||||
|     override fun getPages(): Observable<List<ReaderPage>> { | ||||
|         return file.listFiles() | ||||
|             .filter { !it.isDirectory && ImageUtil.isImage(it.name) { FileInputStream(it) } } | ||||
|             .sortedWith(Comparator<File> { f1, f2 -> f1.name.compareToCaseInsensitiveNaturalOrder(f2.name) }) | ||||
|             .mapIndexed { i, file -> | ||||
|                 val streamFn = { FileInputStream(file) } | ||||
|                 ReaderPage(i).apply { | ||||
|                     stream = streamFn | ||||
|                     status = Page.READY | ||||
|                 .filter { !it.isDirectory && ImageUtil.isImage(it.name) { FileInputStream(it) } } | ||||
|                 .sortedWith(Comparator<File> { f1, f2 -> f1.name.compareToCaseInsensitiveNaturalOrder(f2.name) }) | ||||
|                 .mapIndexed { i, file -> | ||||
|                     val streamFn = { FileInputStream(file) } | ||||
|                     ReaderPage(i).apply { | ||||
|                         stream = streamFn | ||||
|                         status = Page.READY | ||||
|                     } | ||||
|                 } | ||||
|             } | ||||
|             .let { Observable.just(it) } | ||||
|                 .let { Observable.just(it) } | ||||
|     } | ||||
|  | ||||
|     /** | ||||
|   | ||||
| @@ -30,14 +30,14 @@ class EpubPageLoader(file: File) : PageLoader() { | ||||
|      */ | ||||
|     override fun getPages(): Observable<List<ReaderPage>> { | ||||
|         return epub.getImagesFromPages() | ||||
|             .mapIndexed { i, path -> | ||||
|                 val streamFn = { epub.getInputStream(epub.getEntry(path)!!) } | ||||
|                 ReaderPage(i).apply { | ||||
|                     stream = streamFn | ||||
|                     status = Page.READY | ||||
|                 .mapIndexed { i, path -> | ||||
|                     val streamFn = { epub.getInputStream(epub.getEntry(path)!!) } | ||||
|                     ReaderPage(i).apply { | ||||
|                         stream = streamFn | ||||
|                         status = Page.READY | ||||
|                     } | ||||
|                 } | ||||
|             } | ||||
|             .let { Observable.just(it) } | ||||
|                 .let { Observable.just(it) } | ||||
|     } | ||||
|  | ||||
|     /** | ||||
|   | ||||
| @@ -43,17 +43,17 @@ class RarPageLoader(file: File) : PageLoader() { | ||||
|      */ | ||||
|     override fun getPages(): Observable<List<ReaderPage>> { | ||||
|         return archive.fileHeaders | ||||
|             .filter { !it.isDirectory && ImageUtil.isImage(it.fileNameString) { archive.getInputStream(it) } } | ||||
|             .sortedWith(Comparator<FileHeader> { f1, f2 -> f1.fileNameString.compareToCaseInsensitiveNaturalOrder(f2.fileNameString) }) | ||||
|             .mapIndexed { i, header -> | ||||
|                 val streamFn = { getStream(header) } | ||||
|                 .filter { !it.isDirectory && ImageUtil.isImage(it.fileNameString) { archive.getInputStream(it) } } | ||||
|                 .sortedWith(Comparator<FileHeader> { f1, f2 -> f1.fileNameString.compareToCaseInsensitiveNaturalOrder(f2.fileNameString) }) | ||||
|                 .mapIndexed { i, header -> | ||||
|                     val streamFn = { getStream(header) } | ||||
|  | ||||
|                 ReaderPage(i).apply { | ||||
|                     stream = streamFn | ||||
|                     status = Page.READY | ||||
|                     ReaderPage(i).apply { | ||||
|                         stream = streamFn | ||||
|                         status = Page.READY | ||||
|                     } | ||||
|                 } | ||||
|             } | ||||
|             .let { Observable.just(it) } | ||||
|                 .let { Observable.just(it) } | ||||
|     } | ||||
|  | ||||
|     /** | ||||
|   | ||||
| @@ -33,16 +33,16 @@ class ZipPageLoader(file: File) : PageLoader() { | ||||
|      */ | ||||
|     override fun getPages(): Observable<List<ReaderPage>> { | ||||
|         return zip.entries().toList() | ||||
|             .filter { !it.isDirectory && ImageUtil.isImage(it.name) { zip.getInputStream(it) } } | ||||
|             .sortedWith(Comparator<ZipEntry> { f1, f2 -> f1.name.compareToCaseInsensitiveNaturalOrder(f2.name) }) | ||||
|             .mapIndexed { i, entry -> | ||||
|                 val streamFn = { zip.getInputStream(entry) } | ||||
|                 ReaderPage(i).apply { | ||||
|                     stream = streamFn | ||||
|                     status = Page.READY | ||||
|                 .filter { !it.isDirectory && ImageUtil.isImage(it.name) { zip.getInputStream(it) } } | ||||
|                 .sortedWith(Comparator<ZipEntry> { f1, f2 -> f1.name.compareToCaseInsensitiveNaturalOrder(f2.name) }) | ||||
|                 .mapIndexed { i, entry -> | ||||
|                     val streamFn = { zip.getInputStream(entry) } | ||||
|                     ReaderPage(i).apply { | ||||
|                         stream = streamFn | ||||
|                         status = Page.READY | ||||
|                     } | ||||
|                 } | ||||
|             } | ||||
|             .let { Observable.just(it) } | ||||
|                 .let { Observable.just(it) } | ||||
|     } | ||||
|  | ||||
|     /** | ||||
|   | ||||
| @@ -146,13 +146,13 @@ open class WebtoonRecyclerView @JvmOverloads constructor( | ||||
|         } | ||||
|  | ||||
|         animate() | ||||
|             .apply { | ||||
|                 newX?.let { x(it) } | ||||
|                 newY?.let { y(it) } | ||||
|             } | ||||
|             .setInterpolator(DecelerateInterpolator()) | ||||
|             .setDuration(400) | ||||
|             .start() | ||||
|                 .apply { | ||||
|                     newX?.let { x(it) } | ||||
|                     newY?.let { y(it) } | ||||
|                 } | ||||
|                 .setInterpolator(DecelerateInterpolator()) | ||||
|                 .setDuration(400) | ||||
|                 .start() | ||||
|  | ||||
|         return true | ||||
|     } | ||||
|   | ||||
| @@ -85,7 +85,7 @@ class SettingsEhController : SettingsController() { | ||||
|  | ||||
|             onChange { newVal -> | ||||
|                 newVal as Boolean | ||||
|                 if(!newVal) { | ||||
|                 if (!newVal) { | ||||
|                     preferences.enableExhentai().set(false) | ||||
|                     true | ||||
|                 } else { | ||||
| @@ -226,7 +226,7 @@ class SettingsEhController : SettingsController() { | ||||
|                     defaultValue = "0" | ||||
|  | ||||
|                     preferences.eh_autoUpdateFrequency().asObservable().subscribeUntilDestroy { newVal -> | ||||
|                         summary = if(newVal == 0) { | ||||
|                         summary = if (newVal == 0) { | ||||
|                             "${context.getString(R.string.app_name)} will currently never check galleries in your library for updates." | ||||
|                         } else { | ||||
|                             "${context.getString(R.string.app_name)} checks/updates galleries in batches. " + | ||||
|   | ||||
| @@ -124,9 +124,9 @@ fun syncChaptersWithSource( | ||||
|             } | ||||
|  | ||||
|             // --> EXH (carry over reading progress) | ||||
|             if(manga.source == EH_SOURCE_ID || manga.source == EXH_SOURCE_ID) { | ||||
|             if (manga.source == EH_SOURCE_ID || manga.source == EXH_SOURCE_ID) { | ||||
|                 val finalAdded = toAdd.subtract(readded) | ||||
|                 if(finalAdded.isNotEmpty()) { | ||||
|                 if (finalAdded.isNotEmpty()) { | ||||
|                     val max = dbChapters.maxBy { it.last_page_read } | ||||
|                     if (max != null && max.last_page_read > 0) { | ||||
|                         for (chapter in finalAdded) { | ||||
|   | ||||
| @@ -139,8 +139,8 @@ class EpubFile(file: File) : Closeable { | ||||
|      */ | ||||
|     private fun getPagesFromDocument(document: Document): List<String> { | ||||
|         val pages = document.select("manifest > item") | ||||
|             .filter { "application/xhtml+xml" == it.attr("media-type") } | ||||
|             .associateBy { it.attr("id") } | ||||
|                 .filter { "application/xhtml+xml" == it.attr("media-type") } | ||||
|                 .associateBy { it.attr("id") } | ||||
|  | ||||
|         val spine = document.select("spine > itemref").map { it.attr("idref") } | ||||
|         return spine.mapNotNull { pages[it] }.map { it.attr("href") } | ||||
|   | ||||
		Reference in New Issue
	
	Block a user