Remove getNextPageUrl() function from ListExtractor

This commit is contained in:
wb9688 2020-04-10 10:25:53 +02:00
parent 54d9e5a2f8
commit e3bfdba135
29 changed files with 270 additions and 488 deletions

View file

@ -16,7 +16,6 @@ import static org.schabi.newpipe.extractor.utils.Utils.isNullOrEmpty;
* Base class to extractors that have a list (e.g. playlists, users).
*/
public abstract class ListExtractor<R extends InfoItem> extends Extractor {
/**
* Constant that should be returned whenever
* a list has an unknown number of items.
@ -46,29 +45,15 @@ public abstract class ListExtractor<R extends InfoItem> extends Extractor {
@Nonnull
public abstract InfoItemsPage<R> getInitialPage() throws IOException, ExtractionException;
/**
* Returns an url that can be used to get the next page relative to the initial one.
* <p>Usually, these links will only work in the implementation itself.</p>
*
* @return an url pointing to the next page relative to the initial page
* @see #getPage(String)
*/
public abstract String getNextPageUrl() throws IOException, ExtractionException;
/**
* Get a list of items corresponding to the specific requested page.
*
* @param pageUrl any page url got from the exclusive implementation of the list extractor
* @return a {@link InfoItemsPage} corresponding to the requested page
* @see #getNextPageUrl()
* @see InfoItemsPage#getNextPageUrl()
*/
public abstract InfoItemsPage<R> getPage(final String pageUrl) throws IOException, ExtractionException;
public boolean hasNextPage() throws IOException, ExtractionException {
return !isNullOrEmpty(getNextPageUrl());
}
@Override
public ListLinkHandler getLinkHandler() {
return (ListLinkHandler) super.getLinkHandler();
@ -140,5 +125,4 @@ public abstract class ListExtractor<R extends InfoItem> extends Extractor {
return errors;
}
}
}

View file

@ -78,11 +78,6 @@ public class MediaCCCConferenceExtractor extends ChannelExtractor {
return new InfoItemsPage<>(collector, null);
}
@Override
public String getNextPageUrl() {
return null;
}
@Override
public InfoItemsPage<StreamInfoItem> getPage(final String pageUrl) {
return null;

View file

@ -40,11 +40,6 @@ public class MediaCCCConferenceKiosk extends KioskExtractor<ChannelInfoItem> {
return new InfoItemsPage<>(collector, "");
}
@Override
public String getNextPageUrl() {
return "";
}
@Override
public InfoItemsPage<ChannelInfoItem> getPage(final String pageUrl) {
return InfoItemsPage.emptyPage();

View file

@ -79,11 +79,6 @@ public class MediaCCCSearchExtractor extends SearchExtractor {
return new InfoItemsPage<>(searchItems, null);
}
@Override
public String getNextPageUrl() {
return "";
}
@Override
public InfoItemsPage<InfoItem> getPage(final String pageUrl) {
return InfoItemsPage.emptyPage();

View file

@ -20,23 +20,21 @@ import org.schabi.newpipe.extractor.utils.Utils;
import java.io.IOException;
import static org.schabi.newpipe.extractor.services.peertube.PeertubeParsingHelper.*;
import static org.schabi.newpipe.extractor.services.peertube.PeertubeParsingHelper.COUNT_KEY;
import static org.schabi.newpipe.extractor.services.peertube.PeertubeParsingHelper.ITEMS_PER_PAGE;
import static org.schabi.newpipe.extractor.services.peertube.PeertubeParsingHelper.START_KEY;
public class PeertubeAccountExtractor extends ChannelExtractor {
private InfoItemsPage<StreamInfoItem> initPage;
private long total;
private JsonObject json;
private final String baseUrl;
public PeertubeAccountExtractor(StreamingService service, ListLinkHandler linkHandler) throws ParsingException {
public PeertubeAccountExtractor(final StreamingService service, final ListLinkHandler linkHandler) throws ParsingException {
super(service, linkHandler);
this.baseUrl = getBaseUrl();
}
@Override
public String getAvatarUrl() throws ParsingException {
public String getAvatarUrl() {
String value;
try {
value = JsonUtils.getString(json, "avatar.path");
@ -47,7 +45,7 @@ public class PeertubeAccountExtractor extends ChannelExtractor {
}
@Override
public String getBannerUrl() throws ParsingException {
public String getBannerUrl() {
return null;
}
@ -58,12 +56,12 @@ public class PeertubeAccountExtractor extends ChannelExtractor {
@Override
public long getSubscriberCount() throws ParsingException {
Number number = JsonUtils.getNumber(json, "followersCount");
final Number number = JsonUtils.getNumber(json, "followersCount");
return number.longValue();
}
@Override
public String getDescription() throws ParsingException {
public String getDescription() {
try {
return JsonUtils.getString(json, "description");
} catch (ParsingException e) {
@ -72,53 +70,46 @@ public class PeertubeAccountExtractor extends ChannelExtractor {
}
@Override
public String getParentChannelName() throws ParsingException {
public String getParentChannelName() {
return "";
}
@Override
public String getParentChannelUrl() throws ParsingException {
public String getParentChannelUrl() {
return "";
}
@Override
public String getParentChannelAvatarUrl() throws ParsingException {
public String getParentChannelAvatarUrl() {
return "";
}
@Override
public InfoItemsPage<StreamInfoItem> getInitialPage() throws IOException, ExtractionException {
super.fetchPage();
return initPage;
final String pageUrl = getUrl() + "/videos?" + START_KEY + "=0&" + COUNT_KEY + "=" + ITEMS_PER_PAGE;
return getPage(pageUrl);
}
private void collectStreamsFrom(StreamInfoItemsCollector collector, JsonObject json, String pageUrl) throws ParsingException {
JsonArray contents;
private void collectStreamsFrom(StreamInfoItemsCollector collector, JsonObject json) throws ParsingException {
final JsonArray contents;
try {
contents = (JsonArray) JsonUtils.getValue(json, "data");
} catch (Exception e) {
throw new ParsingException("unable to extract channel streams", e);
}
for (Object c : contents) {
for (final Object c : contents) {
if (c instanceof JsonObject) {
final JsonObject item = (JsonObject) c;
PeertubeStreamInfoItemExtractor extractor = new PeertubeStreamInfoItemExtractor(item, baseUrl);
final PeertubeStreamInfoItemExtractor extractor = new PeertubeStreamInfoItemExtractor(item, baseUrl);
collector.commit(extractor);
}
}
}
@Override
public String getNextPageUrl() throws IOException, ExtractionException {
super.fetchPage();
return initPage.getNextPageUrl();
}
@Override
public InfoItemsPage<StreamInfoItem> getPage(String pageUrl) throws IOException, ExtractionException {
Response response = getDownloader().get(pageUrl);
public InfoItemsPage<StreamInfoItem> getPage(final String pageUrl) throws IOException, ExtractionException {
final Response response = getDownloader().get(pageUrl);
JsonObject json = null;
if (response != null && !Utils.isBlank(response.responseBody())) {
try {
@ -128,11 +119,12 @@ public class PeertubeAccountExtractor extends ChannelExtractor {
}
}
StreamInfoItemsCollector collector = new StreamInfoItemsCollector(getServiceId());
final StreamInfoItemsCollector collector = new StreamInfoItemsCollector(getServiceId());
final long total;
if (json != null) {
PeertubeParsingHelper.validate(json);
total = JsonUtils.getNumber(json, "total").longValue();
collectStreamsFrom(collector, json, pageUrl);
collectStreamsFrom(collector, json);
} else {
throw new ExtractionException("Unable to get PeerTube kiosk info");
}
@ -140,19 +132,16 @@ public class PeertubeAccountExtractor extends ChannelExtractor {
}
@Override
public void onFetchPage(Downloader downloader) throws IOException, ExtractionException {
Response response = downloader.get(getUrl());
if (null != response && null != response.responseBody()) {
public void onFetchPage(final Downloader downloader) throws IOException, ExtractionException {
final Response response = downloader.get(getUrl());
if (response != null && response.responseBody() != null) {
setInitialData(response.responseBody());
} else {
throw new ExtractionException("Unable to extract PeerTube channel data");
}
String pageUrl = getUrl() + "/videos?" + START_KEY + "=0&" + COUNT_KEY + "=" + ITEMS_PER_PAGE;
this.initPage = getPage(pageUrl);
}
private void setInitialData(String responseBody) throws ExtractionException {
private void setInitialData(final String responseBody) throws ExtractionException {
try {
json = JsonParser.object().from(responseBody);
} catch (JsonParserException e) {
@ -170,5 +159,4 @@ public class PeertubeAccountExtractor extends ChannelExtractor {
public String getOriginalUrl() throws ParsingException {
return baseUrl + "/" + getId();
}
}

View file

@ -16,29 +16,25 @@ import org.schabi.newpipe.extractor.services.peertube.PeertubeParsingHelper;
import org.schabi.newpipe.extractor.stream.StreamInfoItem;
import org.schabi.newpipe.extractor.stream.StreamInfoItemsCollector;
import org.schabi.newpipe.extractor.utils.JsonUtils;
import org.schabi.newpipe.extractor.utils.Parser;
import org.schabi.newpipe.extractor.utils.Parser.RegexException;
import org.schabi.newpipe.extractor.utils.Utils;
import java.io.IOException;
import static org.schabi.newpipe.extractor.services.peertube.PeertubeParsingHelper.*;
import static org.schabi.newpipe.extractor.services.peertube.PeertubeParsingHelper.COUNT_KEY;
import static org.schabi.newpipe.extractor.services.peertube.PeertubeParsingHelper.ITEMS_PER_PAGE;
import static org.schabi.newpipe.extractor.services.peertube.PeertubeParsingHelper.START_KEY;
public class PeertubeChannelExtractor extends ChannelExtractor {
private InfoItemsPage<StreamInfoItem> initPage;
private long total;
private JsonObject json;
private final String baseUrl;
public PeertubeChannelExtractor(StreamingService service, ListLinkHandler linkHandler) throws ParsingException {
public PeertubeChannelExtractor(final StreamingService service, final ListLinkHandler linkHandler) throws ParsingException {
super(service, linkHandler);
this.baseUrl = getBaseUrl();
}
@Override
public String getAvatarUrl() throws ParsingException {
public String getAvatarUrl() {
String value;
try {
value = JsonUtils.getString(json, "avatar.path");
@ -49,7 +45,7 @@ public class PeertubeChannelExtractor extends ChannelExtractor {
}
@Override
public String getBannerUrl() throws ParsingException {
public String getBannerUrl() {
return null;
}
@ -60,12 +56,12 @@ public class PeertubeChannelExtractor extends ChannelExtractor {
@Override
public long getSubscriberCount() throws ParsingException {
Number number = JsonUtils.getNumber(json, "followersCount");
final Number number = JsonUtils.getNumber(json, "followersCount");
return number.longValue();
}
@Override
public String getDescription() throws ParsingException {
public String getDescription() {
try {
return JsonUtils.getString(json, "description");
} catch (ParsingException e) {
@ -84,7 +80,7 @@ public class PeertubeChannelExtractor extends ChannelExtractor {
}
@Override
public String getParentChannelAvatarUrl() throws ParsingException {
public String getParentChannelAvatarUrl() {
String value;
try {
value = JsonUtils.getString(json, "ownerAccount.avatar.path");
@ -96,37 +92,30 @@ public class PeertubeChannelExtractor extends ChannelExtractor {
@Override
public InfoItemsPage<StreamInfoItem> getInitialPage() throws IOException, ExtractionException {
super.fetchPage();
return initPage;
final String pageUrl = getUrl() + "/videos?" + START_KEY + "=0&" + COUNT_KEY + "=" + ITEMS_PER_PAGE;
return getPage(pageUrl);
}
private void collectStreamsFrom(StreamInfoItemsCollector collector, JsonObject json, String pageUrl) throws ParsingException {
JsonArray contents;
private void collectStreamsFrom(final StreamInfoItemsCollector collector, final JsonObject json) throws ParsingException {
final JsonArray contents;
try {
contents = (JsonArray) JsonUtils.getValue(json, "data");
} catch (Exception e) {
throw new ParsingException("unable to extract channel streams", e);
}
for (Object c : contents) {
for (final Object c : contents) {
if (c instanceof JsonObject) {
final JsonObject item = (JsonObject) c;
PeertubeStreamInfoItemExtractor extractor = new PeertubeStreamInfoItemExtractor(item, baseUrl);
final PeertubeStreamInfoItemExtractor extractor = new PeertubeStreamInfoItemExtractor(item, baseUrl);
collector.commit(extractor);
}
}
}
@Override
public String getNextPageUrl() throws IOException, ExtractionException {
super.fetchPage();
return initPage.getNextPageUrl();
}
@Override
public InfoItemsPage<StreamInfoItem> getPage(String pageUrl) throws IOException, ExtractionException {
Response response = getDownloader().get(pageUrl);
public InfoItemsPage<StreamInfoItem> getPage(final String pageUrl) throws IOException, ExtractionException {
final Response response = getDownloader().get(pageUrl);
JsonObject json = null;
if (response != null && !Utils.isBlank(response.responseBody())) {
try {
@ -136,11 +125,12 @@ public class PeertubeChannelExtractor extends ChannelExtractor {
}
}
StreamInfoItemsCollector collector = new StreamInfoItemsCollector(getServiceId());
final StreamInfoItemsCollector collector = new StreamInfoItemsCollector(getServiceId());
final long total;
if (json != null) {
PeertubeParsingHelper.validate(json);
this.total = JsonUtils.getNumber(json, "total").longValue();
collectStreamsFrom(collector, json, pageUrl);
total = JsonUtils.getNumber(json, "total").longValue();
collectStreamsFrom(collector, json);
} else {
throw new ExtractionException("Unable to get PeerTube kiosk info");
}
@ -148,22 +138,20 @@ public class PeertubeChannelExtractor extends ChannelExtractor {
}
@Override
public void onFetchPage(Downloader downloader) throws IOException, ExtractionException {
Response response = downloader.get(getUrl());
if (null != response && null != response.responseBody()) {
public void onFetchPage(final Downloader downloader) throws IOException, ExtractionException {
final Response response = downloader.get(getUrl());
if (response != null && response.responseBody() != null) {
setInitialData(response.responseBody());
} else {
throw new ExtractionException("Unable to extract PeerTube channel data");
}
this.initPage = getPage(getUrl() + "/videos?" + START_KEY + "=0&" + COUNT_KEY + "=" + ITEMS_PER_PAGE);
}
private void setInitialData(String responseBody) throws ExtractionException {
private void setInitialData(final String responseBody) throws ExtractionException {
try {
json = JsonParser.object().from(responseBody);
} catch (JsonParserException e) {
throw new ExtractionException("Unable to extract peertube channel data", e);
throw new ExtractionException("Unable to extract PeerTube channel data", e);
}
if (json == null) throw new ExtractionException("Unable to extract PeerTube channel data");
}
@ -177,5 +165,4 @@ public class PeertubeChannelExtractor extends ChannelExtractor {
public String getOriginalUrl() throws ParsingException {
return baseUrl + "/" + getId();
}
}

View file

@ -24,7 +24,6 @@ import static org.schabi.newpipe.extractor.services.peertube.PeertubeParsingHelp
import static org.schabi.newpipe.extractor.services.peertube.PeertubeParsingHelper.START_KEY;
public class PeertubeCommentsExtractor extends CommentsExtractor {
private InfoItemsPage<CommentsInfoItem> initPage;
private long total;
public PeertubeCommentsExtractor(final StreamingService service, final ListLinkHandler uiHandler) {
@ -33,8 +32,8 @@ public class PeertubeCommentsExtractor extends CommentsExtractor {
@Override
public InfoItemsPage<CommentsInfoItem> getInitialPage() throws IOException, ExtractionException {
super.fetchPage();
return initPage;
final String pageUrl = getUrl() + "?" + START_KEY + "=0&" + COUNT_KEY + "=" + ITEMS_PER_PAGE;
return getPage(pageUrl);
}
private void collectCommentsFrom(final CommentsInfoItemsCollector collector, final JsonObject json) throws ParsingException {
@ -51,12 +50,6 @@ public class PeertubeCommentsExtractor extends CommentsExtractor {
}
}
@Override
public String getNextPageUrl() throws IOException, ExtractionException {
super.fetchPage();
return initPage.getNextPageUrl();
}
@Override
public InfoItemsPage<CommentsInfoItem> getPage(final String pageUrl) throws IOException, ExtractionException {
final Response response = getDownloader().get(pageUrl);
@ -80,8 +73,5 @@ public class PeertubeCommentsExtractor extends CommentsExtractor {
return new InfoItemsPage<>(collector, PeertubeParsingHelper.getNextPageUrl(pageUrl, total));
}
@Override
public void onFetchPage(Downloader downloader) throws IOException, ExtractionException {
this.initPage = getPage(getUrl() + "?" + START_KEY + "=0&" + COUNT_KEY + "=" + ITEMS_PER_PAGE);
}
public void onFetchPage(final Downloader downloader) throws IOException, ExtractionException { }
}

View file

@ -1,6 +1,7 @@
package org.schabi.newpipe.extractor.services.peertube.extractors;
import com.grack.nanojson.JsonObject;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.schabi.newpipe.extractor.ServiceList;
@ -10,14 +11,12 @@ import org.schabi.newpipe.extractor.localization.DateWrapper;
import org.schabi.newpipe.extractor.services.peertube.PeertubeParsingHelper;
import org.schabi.newpipe.extractor.utils.JsonUtils;
public class PeertubeCommentsInfoItemExtractor implements CommentsInfoItemExtractor {
private final JsonObject item;
private final String url;
private final String baseUrl;
public PeertubeCommentsInfoItemExtractor(JsonObject item, PeertubeCommentsExtractor extractor) throws ParsingException {
public PeertubeCommentsInfoItemExtractor(final JsonObject item, final PeertubeCommentsExtractor extractor) throws ParsingException {
this.item = item;
this.url = extractor.getUrl();
this.baseUrl = extractor.getBaseUrl();
@ -29,7 +28,7 @@ public class PeertubeCommentsInfoItemExtractor implements CommentsInfoItemExtrac
}
@Override
public String getThumbnailUrl() throws ParsingException {
public String getThumbnailUrl() {
String value;
try {
value = JsonUtils.getString(item, "account.avatar.path");
@ -51,20 +50,20 @@ public class PeertubeCommentsInfoItemExtractor implements CommentsInfoItemExtrac
@Override
public DateWrapper getUploadDate() throws ParsingException {
String textualUploadDate = getTextualUploadDate();
final String textualUploadDate = getTextualUploadDate();
return new DateWrapper(PeertubeParsingHelper.parseDateFrom(textualUploadDate));
}
@Override
public int getLikeCount() throws ParsingException {
public int getLikeCount() {
return -1;
}
@Override
public String getCommentText() throws ParsingException {
String htmlText = JsonUtils.getString(item, "text");
final String htmlText = JsonUtils.getString(item, "text");
try {
Document doc = Jsoup.parse(htmlText);
final Document doc = Jsoup.parse(htmlText);
return doc.body().text();
} catch (Exception e) {
return htmlText.replaceAll("(?s)<[^>]*>(\\s*<[^>]*>)*", "");
@ -73,12 +72,12 @@ public class PeertubeCommentsInfoItemExtractor implements CommentsInfoItemExtrac
@Override
public String getCommentId() throws ParsingException {
Number value = JsonUtils.getNumber(item, "id");
final Number value = JsonUtils.getNumber(item, "id");
return value.toString();
}
@Override
public String getUploaderAvatarUrl() throws ParsingException {
public String getUploaderAvatarUrl() {
String value;
try {
value = JsonUtils.getString(item, "account.avatar.path");
@ -95,9 +94,8 @@ public class PeertubeCommentsInfoItemExtractor implements CommentsInfoItemExtrac
@Override
public String getUploaderUrl() throws ParsingException {
String name = JsonUtils.getString(item, "account.name");
String host = JsonUtils.getString(item, "account.host");
final String name = JsonUtils.getString(item, "account.name");
final String host = JsonUtils.getString(item, "account.host");
return ServiceList.PeerTube.getChannelLHFactory().fromId("accounts/" + name + "@" + host, baseUrl).getUrl();
}
}

View file

@ -4,6 +4,7 @@ import com.grack.nanojson.JsonArray;
import com.grack.nanojson.JsonObject;
import com.grack.nanojson.JsonParser;
import com.grack.nanojson.JsonParserException;
import org.schabi.newpipe.extractor.StreamingService;
import org.schabi.newpipe.extractor.downloader.Downloader;
import org.schabi.newpipe.extractor.downloader.Response;
@ -16,20 +17,19 @@ import org.schabi.newpipe.extractor.stream.StreamInfoItem;
import org.schabi.newpipe.extractor.stream.StreamInfoItemsCollector;
import org.schabi.newpipe.extractor.utils.JsonUtils;
import javax.annotation.Nonnull;
import java.io.IOException;
import static org.schabi.newpipe.extractor.services.peertube.PeertubeParsingHelper.*;
import javax.annotation.Nonnull;
import static org.schabi.newpipe.extractor.services.peertube.PeertubeParsingHelper.COUNT_KEY;
import static org.schabi.newpipe.extractor.services.peertube.PeertubeParsingHelper.ITEMS_PER_PAGE;
import static org.schabi.newpipe.extractor.services.peertube.PeertubeParsingHelper.START_KEY;
public class PeertubePlaylistExtractor extends PlaylistExtractor {
private JsonObject playlistInfo;
private JsonObject playlistVideos;
private String initialPageUrl;
private long total;
public PeertubePlaylistExtractor(StreamingService service, ListLinkHandler linkHandler) {
public PeertubePlaylistExtractor(final StreamingService service, final ListLinkHandler linkHandler) {
super(service, linkHandler);
}
@ -39,17 +39,17 @@ public class PeertubePlaylistExtractor extends PlaylistExtractor {
}
@Override
public String getBannerUrl() throws ParsingException {
public String getBannerUrl() {
return null;
}
@Override
public String getUploaderUrl() throws ParsingException {
public String getUploaderUrl() {
return playlistInfo.getObject("ownerAccount").getString("url");
}
@Override
public String getUploaderName() throws ParsingException {
public String getUploaderName() {
return playlistInfo.getObject("ownerAccount").getString("displayName");
}
@ -59,19 +59,19 @@ public class PeertubePlaylistExtractor extends PlaylistExtractor {
}
@Override
public long getStreamCount() throws ParsingException {
public long getStreamCount() {
return playlistInfo.getNumber("videosLength").longValue();
}
@Nonnull
@Override
public String getSubChannelName() throws ParsingException {
public String getSubChannelName() {
return playlistInfo.getObject("videoChannel").getString("displayName");
}
@Nonnull
@Override
public String getSubChannelUrl() throws ParsingException {
public String getSubChannelUrl() {
return playlistInfo.getObject("videoChannel").getString("url");
}
@ -88,13 +88,9 @@ public class PeertubePlaylistExtractor extends PlaylistExtractor {
}
@Override
public String getNextPageUrl() throws IOException, ExtractionException {
return PeertubeParsingHelper.getNextPageUrl(initialPageUrl, total);
}
@Override
public InfoItemsPage<StreamInfoItem> getPage(String pageUrl) throws IOException, ExtractionException {
Response response = getDownloader().get(pageUrl);
public InfoItemsPage<StreamInfoItem> getPage(final String pageUrl) throws IOException, ExtractionException {
final Response response = getDownloader().get(pageUrl);
final JsonObject playlistVideos;
try {
playlistVideos = JsonParser.object().from(response.responseBody());
} catch (JsonParserException jpe) {
@ -102,13 +98,13 @@ public class PeertubePlaylistExtractor extends PlaylistExtractor {
}
PeertubeParsingHelper.validate(playlistVideos);
this.total = JsonUtils.getNumber(playlistVideos, "total").longValue();
final long total = JsonUtils.getNumber(playlistVideos, "total").longValue();
StreamInfoItemsCollector collector = new StreamInfoItemsCollector(getServiceId());
final StreamInfoItemsCollector collector = new StreamInfoItemsCollector(getServiceId());
JsonArray videos = playlistVideos.getArray("data");
for (Object o : videos) {
JsonObject video = ((JsonObject) o).getObject("video");
final JsonArray videos = playlistVideos.getArray("data");
for (final Object o : videos) {
final JsonObject video = ((JsonObject) o).getObject("video");
collector.commit(new PeertubeStreamInfoItemExtractor(video, getBaseUrl()));
}
@ -116,8 +112,8 @@ public class PeertubePlaylistExtractor extends PlaylistExtractor {
}
@Override
public void onFetchPage(@Nonnull Downloader downloader) throws IOException, ExtractionException {
Response response = downloader.get(getUrl());
public void onFetchPage(@Nonnull final Downloader downloader) throws IOException, ExtractionException {
final Response response = downloader.get(getUrl());
try {
playlistInfo = JsonParser.object().from(response.responseBody());
} catch (JsonParserException jpe) {

View file

@ -17,27 +17,24 @@ import org.schabi.newpipe.extractor.search.InfoItemsSearchCollector;
import org.schabi.newpipe.extractor.search.SearchExtractor;
import org.schabi.newpipe.extractor.services.peertube.PeertubeParsingHelper;
import org.schabi.newpipe.extractor.utils.JsonUtils;
import org.schabi.newpipe.extractor.utils.Parser;
import org.schabi.newpipe.extractor.utils.Parser.RegexException;
import org.schabi.newpipe.extractor.utils.Utils;
import javax.annotation.Nonnull;
import java.io.IOException;
import static org.schabi.newpipe.extractor.services.peertube.PeertubeParsingHelper.*;
import javax.annotation.Nonnull;
import static org.schabi.newpipe.extractor.services.peertube.PeertubeParsingHelper.COUNT_KEY;
import static org.schabi.newpipe.extractor.services.peertube.PeertubeParsingHelper.ITEMS_PER_PAGE;
import static org.schabi.newpipe.extractor.services.peertube.PeertubeParsingHelper.START_KEY;
public class PeertubeSearchExtractor extends SearchExtractor {
private InfoItemsPage<InfoItem> initPage;
private long total;
public PeertubeSearchExtractor(StreamingService service, SearchQueryHandler linkHandler) {
super(service, linkHandler);
}
@Nonnull
@Override
public String getSearchSuggestion() throws ParsingException {
public String getSearchSuggestion() {
return "";
}
@ -48,44 +45,37 @@ public class PeertubeSearchExtractor extends SearchExtractor {
@Override
public InfoItemsPage<InfoItem> getInitialPage() throws IOException, ExtractionException {
super.fetchPage();
return initPage;
final String pageUrl = getUrl() + "&" + START_KEY + "=0&" + COUNT_KEY + "=" + ITEMS_PER_PAGE;
return getPage(pageUrl);
}
private InfoItemsCollector<InfoItem, InfoItemExtractor> collectStreamsFrom(JsonObject json) throws ParsingException {
private InfoItemsCollector<InfoItem, InfoItemExtractor> collectStreamsFrom(final JsonObject json) throws ParsingException {
final InfoItemsSearchCollector collector = new InfoItemsSearchCollector(getServiceId());
JsonArray contents;
final JsonArray contents;
try {
contents = (JsonArray) JsonUtils.getValue(json, "data");
} catch (Exception e) {
throw new ParsingException("unable to extract search info", e);
}
String baseUrl = getBaseUrl();
for (Object c : contents) {
final String baseUrl = getBaseUrl();
for (final Object c : contents) {
if (c instanceof JsonObject) {
final JsonObject item = (JsonObject) c;
PeertubeStreamInfoItemExtractor extractor = new PeertubeStreamInfoItemExtractor(item, baseUrl);
final PeertubeStreamInfoItemExtractor extractor = new PeertubeStreamInfoItemExtractor(item, baseUrl);
collector.commit(extractor);
}
}
return collector;
}
@Override
public String getNextPageUrl() throws IOException, ExtractionException {
super.fetchPage();
return initPage.getNextPageUrl();
}
@Override
public InfoItemsPage<InfoItem> getPage(String pageUrl) throws IOException, ExtractionException {
Response response = getDownloader().get(pageUrl);
public InfoItemsPage<InfoItem> getPage(final String pageUrl) throws IOException, ExtractionException {
final Response response = getDownloader().get(pageUrl);
JsonObject json = null;
if (null != response && !Utils.isBlank(response.responseBody())) {
if (response != null && !Utils.isBlank(response.responseBody())) {
try {
json = JsonParser.object().from(response.responseBody());
} catch (Exception e) {
@ -94,7 +84,7 @@ public class PeertubeSearchExtractor extends SearchExtractor {
}
if (json != null) {
total = JsonUtils.getNumber(json, "total").longValue();
final long total = JsonUtils.getNumber(json, "total").longValue();
return new InfoItemsPage<>(collectStreamsFrom(json), PeertubeParsingHelper.getNextPageUrl(pageUrl, total));
} else {
throw new ExtractionException("Unable to get peertube search info");
@ -102,7 +92,5 @@ public class PeertubeSearchExtractor extends SearchExtractor {
}
@Override
public void onFetchPage(Downloader downloader) throws IOException, ExtractionException {
initPage = getPage(getUrl() + "&" + START_KEY + "=0&" + COUNT_KEY + "=" + ITEMS_PER_PAGE);
}
public void onFetchPage(@Nonnull final Downloader downloader) throws IOException, ExtractionException { }
}

View file

@ -40,13 +40,11 @@ import java.util.Locale;
import javax.annotation.Nonnull;
public class PeertubeStreamExtractor extends StreamExtractor {
private final String baseUrl;
private JsonObject json;
private List<SubtitlesStream> subtitles = new ArrayList<>();
public PeertubeStreamExtractor(StreamingService service, LinkHandler linkHandler) throws ParsingException {
public PeertubeStreamExtractor(final StreamingService service, final LinkHandler linkHandler) throws ParsingException {
super(service, linkHandler);
this.baseUrl = getBaseUrl();
}
@ -82,10 +80,10 @@ public class PeertubeStreamExtractor extends StreamExtractor {
}
if (text.length() == 250 && text.substring(247).equals("...")) {
//if description is shortened, get full description
Downloader dl = NewPipe.getDownloader();
final Downloader dl = NewPipe.getDownloader();
try {
Response response = dl.get(getUrl() + "/description");
JsonObject jsonObject = JsonParser.object().from(response.responseBody());
final Response response = dl.get(getUrl() + "/description");
final JsonObject jsonObject = JsonParser.object().from(response.responseBody());
text = JsonUtils.getString(jsonObject, "description");
} catch (ReCaptchaException | IOException | JsonParserException e) {
e.printStackTrace();
@ -96,7 +94,7 @@ public class PeertubeStreamExtractor extends StreamExtractor {
@Override
public int getAgeLimit() throws ParsingException {
boolean isNSFW = JsonUtils.getBoolean(json, "nsfw");
final boolean isNSFW = JsonUtils.getBoolean(json, "nsfw");
if (isNSFW) {
return 18;
} else {
@ -106,38 +104,38 @@ public class PeertubeStreamExtractor extends StreamExtractor {
@Override
public long getLength() throws ParsingException {
Number value = JsonUtils.getNumber(json, "duration");
final Number value = JsonUtils.getNumber(json, "duration");
return value.longValue();
}
@Override
public long getTimeStamp() throws ParsingException {
public long getTimeStamp() {
//TODO fetch timestamp from url if present;
return 0;
}
@Override
public long getViewCount() throws ParsingException {
Number value = JsonUtils.getNumber(json, "views");
final Number value = JsonUtils.getNumber(json, "views");
return value.longValue();
}
@Override
public long getLikeCount() throws ParsingException {
Number value = JsonUtils.getNumber(json, "likes");
final Number value = JsonUtils.getNumber(json, "likes");
return value.longValue();
}
@Override
public long getDislikeCount() throws ParsingException {
Number value = JsonUtils.getNumber(json, "dislikes");
final Number value = JsonUtils.getNumber(json, "dislikes");
return value.longValue();
}
@Override
public String getUploaderUrl() throws ParsingException {
String name = JsonUtils.getString(json, "account.name");
String host = JsonUtils.getString(json, "account.host");
final String name = JsonUtils.getString(json, "account.name");
final String host = JsonUtils.getString(json, "account.host");
return getService().getChannelLHFactory().fromId("accounts/" + name + "@" + host, baseUrl).getUrl();
}
@ -147,7 +145,7 @@ public class PeertubeStreamExtractor extends StreamExtractor {
}
@Override
public String getUploaderAvatarUrl() throws ParsingException {
public String getUploaderAvatarUrl() {
String value;
try {
value = JsonUtils.getString(json, "account.avatar.path");
@ -170,7 +168,7 @@ public class PeertubeStreamExtractor extends StreamExtractor {
@Nonnull
@Override
public String getSubChannelAvatarUrl() throws ParsingException {
public String getSubChannelAvatarUrl() {
String value;
try {
value = JsonUtils.getString(json, "channel.avatar.path");
@ -181,35 +179,35 @@ public class PeertubeStreamExtractor extends StreamExtractor {
}
@Override
public String getDashMpdUrl() throws ParsingException {
public String getDashMpdUrl() {
return "";
}
@Override
public String getHlsUrl() throws ParsingException {
public String getHlsUrl() {
return "";
}
@Override
public List<AudioStream> getAudioStreams() throws IOException, ExtractionException {
public List<AudioStream> getAudioStreams() {
return null;
}
@Override
public List<VideoStream> getVideoStreams() throws IOException, ExtractionException {
public List<VideoStream> getVideoStreams() throws ExtractionException {
assertPageFetched();
List<VideoStream> videoStreams = new ArrayList<>();
final List<VideoStream> videoStreams = new ArrayList<>();
try {
JsonArray streams = json.getArray("files");
for (Object s : streams) {
final JsonArray streams = json.getArray("files");
for (final Object s : streams) {
if (!(s instanceof JsonObject)) continue;
JsonObject stream = (JsonObject) s;
String url = JsonUtils.getString(stream, "fileUrl");
String torrentUrl = JsonUtils.getString(stream, "torrentUrl");
String resolution = JsonUtils.getString(stream, "resolution.label");
String extension = url.substring(url.lastIndexOf(".") + 1);
MediaFormat format = MediaFormat.getFromSuffix(extension);
VideoStream videoStream = new VideoStream(url, torrentUrl, format, resolution);
final JsonObject stream = (JsonObject) s;
final String url = JsonUtils.getString(stream, "fileUrl");
final String torrentUrl = JsonUtils.getString(stream, "torrentUrl");
final String resolution = JsonUtils.getString(stream, "resolution.label");
final String extension = url.substring(url.lastIndexOf(".") + 1);
final MediaFormat format = MediaFormat.getFromSuffix(extension);
final VideoStream videoStream = new VideoStream(url, torrentUrl, format, resolution);
if (!Stream.containSimilarStream(videoStream, videoStreams)) {
videoStreams.add(videoStream);
}
@ -223,20 +221,19 @@ public class PeertubeStreamExtractor extends StreamExtractor {
@Override
public List<VideoStream> getVideoOnlyStreams() throws IOException, ExtractionException {
// TODO Auto-generated method stub
public List<VideoStream> getVideoOnlyStreams() {
return null;
}
@Override
public List<SubtitlesStream> getSubtitlesDefault() throws IOException, ExtractionException {
public List<SubtitlesStream> getSubtitlesDefault() {
return subtitles;
}
@Override
public List<SubtitlesStream> getSubtitles(final MediaFormat format) throws IOException, ExtractionException {
List<SubtitlesStream> filteredSubs = new ArrayList<>();
for (SubtitlesStream sub : subtitles) {
public List<SubtitlesStream> getSubtitles(final MediaFormat format) {
final List<SubtitlesStream> filteredSubs = new ArrayList<>();
for (final SubtitlesStream sub : subtitles) {
if (sub.getFormat() == format) {
filteredSubs.add(sub);
}
@ -245,20 +242,20 @@ public class PeertubeStreamExtractor extends StreamExtractor {
}
@Override
public StreamType getStreamType() throws ParsingException {
public StreamType getStreamType() {
return StreamType.VIDEO_STREAM;
}
@Override
public StreamInfoItem getNextStream() throws IOException, ExtractionException {
public StreamInfoItem getNextStream() {
return null;
}
@Override
public StreamInfoItemsCollector getRelatedStreams() throws IOException, ExtractionException {
StreamInfoItemsCollector collector = new StreamInfoItemsCollector(getServiceId());
List<String> tags = getTags();
String apiUrl = null;
final StreamInfoItemsCollector collector = new StreamInfoItemsCollector(getServiceId());
final List<String> tags = getTags();
final String apiUrl;
if (!tags.isEmpty()) {
apiUrl = getRelatedStreamsUrl(tags);
@ -280,7 +277,7 @@ public class PeertubeStreamExtractor extends StreamExtractor {
@Nonnull
@Override
public String getSupportInfo() throws ParsingException {
public String getSupportInfo() {
try {
return JsonUtils.getString(json, "support");
} catch (ParsingException e) {
@ -288,21 +285,21 @@ public class PeertubeStreamExtractor extends StreamExtractor {
}
}
private String getRelatedStreamsUrl(List<String> tags) throws UnsupportedEncodingException {
String url = baseUrl + PeertubeSearchQueryHandlerFactory.SEARCH_ENDPOINT;
StringBuilder params = new StringBuilder();
private String getRelatedStreamsUrl(final List<String> tags) throws UnsupportedEncodingException {
final String url = baseUrl + PeertubeSearchQueryHandlerFactory.SEARCH_ENDPOINT;
final StringBuilder params = new StringBuilder();
params.append("start=0&count=8&sort=-createdAt");
for (String tag : tags) {
for (final String tag : tags) {
params.append("&tagsOneOf=");
params.append(URLEncoder.encode(tag, "UTF-8"));
}
return url + "?" + params.toString();
}
private void getStreamsFromApi(StreamInfoItemsCollector collector, String apiUrl) throws ReCaptchaException, IOException, ParsingException {
Response response = getDownloader().get(apiUrl);
private void getStreamsFromApi(final StreamInfoItemsCollector collector, final String apiUrl) throws ReCaptchaException, IOException, ParsingException {
final Response response = getDownloader().get(apiUrl);
JsonObject relatedVideosJson = null;
if (null != response && !Utils.isBlank(response.responseBody())) {
if (response != null && !Utils.isBlank(response.responseBody())) {
try {
relatedVideosJson = JsonParser.object().from(response.responseBody());
} catch (JsonParserException e) {
@ -315,35 +312,33 @@ public class PeertubeStreamExtractor extends StreamExtractor {
}
}
private void collectStreamsFrom(StreamInfoItemsCollector collector, JsonObject json) throws ParsingException {
JsonArray contents;
private void collectStreamsFrom(final StreamInfoItemsCollector collector, final JsonObject json) throws ParsingException {
final JsonArray contents;
try {
contents = (JsonArray) JsonUtils.getValue(json, "data");
} catch (Exception e) {
throw new ParsingException("unable to extract related videos", e);
}
for (Object c : contents) {
for (final Object c : contents) {
if (c instanceof JsonObject) {
final JsonObject item = (JsonObject) c;
PeertubeStreamInfoItemExtractor extractor = new PeertubeStreamInfoItemExtractor(item, baseUrl);
final PeertubeStreamInfoItemExtractor extractor = new PeertubeStreamInfoItemExtractor(item, baseUrl);
//do not add the same stream in related streams
if (!extractor.getUrl().equals(getUrl())) collector.commit(extractor);
}
}
}
@Override
public String getErrorMessage() {
return null;
}
@Override
public void onFetchPage(Downloader downloader) throws IOException, ExtractionException {
Response response = downloader.get(getUrl());
if (null != response && null != response.responseBody()) {
public void onFetchPage(final Downloader downloader) throws IOException, ExtractionException {
final Response response = downloader.get(getUrl());
if (response != null && response.responseBody() != null) {
setInitialData(response.responseBody());
} else {
throw new ExtractionException("Unable to extract peertube channel data");
@ -352,29 +347,29 @@ public class PeertubeStreamExtractor extends StreamExtractor {
loadSubtitles();
}
private void setInitialData(String responseBody) throws ExtractionException {
private void setInitialData(final String responseBody) throws ExtractionException {
try {
json = JsonParser.object().from(responseBody);
} catch (JsonParserException e) {
throw new ExtractionException("Unable to extract peertube stream data", e);
}
if (null == json) throw new ExtractionException("Unable to extract peertube stream data");
if (json == null) throw new ExtractionException("Unable to extract peertube stream data");
PeertubeParsingHelper.validate(json);
}
private void loadSubtitles() {
if (subtitles.isEmpty()) {
try {
Response response = getDownloader().get(getUrl() + "/captions");
JsonObject captionsJson = JsonParser.object().from(response.responseBody());
JsonArray captions = JsonUtils.getArray(captionsJson, "data");
for (Object c : captions) {
final Response response = getDownloader().get(getUrl() + "/captions");
final JsonObject captionsJson = JsonParser.object().from(response.responseBody());
final JsonArray captions = JsonUtils.getArray(captionsJson, "data");
for (final Object c : captions) {
if (c instanceof JsonObject) {
JsonObject caption = (JsonObject) c;
String url = baseUrl + JsonUtils.getString(caption, "captionPath");
String languageCode = JsonUtils.getString(caption, "language.id");
String ext = url.substring(url.lastIndexOf(".") + 1);
MediaFormat fmt = MediaFormat.getFromSuffix(ext);
final JsonObject caption = (JsonObject) c;
final String url = baseUrl + JsonUtils.getString(caption, "captionPath");
final String languageCode = JsonUtils.getString(caption, "language.id");
final String ext = url.substring(url.lastIndexOf(".") + 1);
final MediaFormat fmt = MediaFormat.getFromSuffix(ext);
if (fmt != null && languageCode != null)
subtitles.add(new SubtitlesStream(fmt, languageCode, url, false));
}
@ -416,7 +411,7 @@ public class PeertubeStreamExtractor extends StreamExtractor {
}
@Override
public Locale getLanguageInfo() throws ParsingException {
public Locale getLanguageInfo() {
try {
return new Locale(JsonUtils.getString(json, "language.id"));
} catch (ParsingException e) {

View file

@ -1,6 +1,7 @@
package org.schabi.newpipe.extractor.services.peertube.extractors;
import com.grack.nanojson.JsonObject;
import org.schabi.newpipe.extractor.ServiceList;
import org.schabi.newpipe.extractor.exceptions.ParsingException;
import org.schabi.newpipe.extractor.localization.DateWrapper;
@ -10,24 +11,23 @@ import org.schabi.newpipe.extractor.stream.StreamType;
import org.schabi.newpipe.extractor.utils.JsonUtils;
public class PeertubeStreamInfoItemExtractor implements StreamInfoItemExtractor {
protected final JsonObject item;
private final String baseUrl;
public PeertubeStreamInfoItemExtractor(JsonObject item, String baseUrl) {
public PeertubeStreamInfoItemExtractor(final JsonObject item, final String baseUrl) {
this.item = item;
this.baseUrl = baseUrl;
}
@Override
public String getUrl() throws ParsingException {
String uuid = JsonUtils.getString(item, "uuid");
final String uuid = JsonUtils.getString(item, "uuid");
return ServiceList.PeerTube.getStreamLHFactory().fromId(uuid, baseUrl).getUrl();
}
@Override
public String getThumbnailUrl() throws ParsingException {
String value = JsonUtils.getString(item, "thumbnailPath");
final String value = JsonUtils.getString(item, "thumbnailPath");
return baseUrl + value;
}
@ -37,20 +37,20 @@ public class PeertubeStreamInfoItemExtractor implements StreamInfoItemExtractor
}
@Override
public boolean isAd() throws ParsingException {
public boolean isAd() {
return false;
}
@Override
public long getViewCount() throws ParsingException {
Number value = JsonUtils.getNumber(item, "views");
final Number value = JsonUtils.getNumber(item, "views");
return value.longValue();
}
@Override
public String getUploaderUrl() throws ParsingException {
String name = JsonUtils.getString(item, "account.name");
String host = JsonUtils.getString(item, "account.host");
final String name = JsonUtils.getString(item, "account.name");
final String host = JsonUtils.getString(item, "account.host");
return ServiceList.PeerTube.getChannelLHFactory().fromId("accounts/" + name + "@" + host, baseUrl).getUrl();
}
@ -77,14 +77,13 @@ public class PeertubeStreamInfoItemExtractor implements StreamInfoItemExtractor
}
@Override
public StreamType getStreamType() throws ParsingException {
public StreamType getStreamType() {
return StreamType.VIDEO_STREAM;
}
@Override
public long getDuration() throws ParsingException {
Number value = JsonUtils.getNumber(item, "duration");
final Number value = JsonUtils.getNumber(item, "duration");
return value.longValue();
}
}

View file

@ -1,21 +0,0 @@
package org.schabi.newpipe.extractor.services.peertube.extractors;
import org.schabi.newpipe.extractor.StreamingService;
import org.schabi.newpipe.extractor.subscription.SubscriptionExtractor;
import java.util.List;
public class PeertubeSubscriptionExtractor extends SubscriptionExtractor {
public PeertubeSubscriptionExtractor(StreamingService service, List<ContentSource> supportedSources) {
super(service, supportedSources);
// TODO Auto-generated constructor stub
}
@Override
public String getRelatedUrl() {
// TODO Auto-generated method stub
return null;
}
}

View file

@ -1,22 +1,18 @@
package org.schabi.newpipe.extractor.services.peertube.extractors;
import org.schabi.newpipe.extractor.StreamingService;
import org.schabi.newpipe.extractor.exceptions.ExtractionException;
import org.schabi.newpipe.extractor.suggestion.SuggestionExtractor;
import java.io.IOException;
import java.util.Collections;
import java.util.List;
public class PeertubeSuggestionExtractor extends SuggestionExtractor {
public PeertubeSuggestionExtractor(StreamingService service) {
public PeertubeSuggestionExtractor(final StreamingService service) {
super(service);
}
@Override
public List<String> suggestionList(String query) throws IOException, ExtractionException {
public List<String> suggestionList(final String query) {
return Collections.emptyList();
}
}

View file

@ -19,14 +19,14 @@ import org.schabi.newpipe.extractor.utils.Utils;
import java.io.IOException;
import static org.schabi.newpipe.extractor.services.peertube.PeertubeParsingHelper.*;
import javax.annotation.Nonnull;
import static org.schabi.newpipe.extractor.services.peertube.PeertubeParsingHelper.COUNT_KEY;
import static org.schabi.newpipe.extractor.services.peertube.PeertubeParsingHelper.ITEMS_PER_PAGE;
import static org.schabi.newpipe.extractor.services.peertube.PeertubeParsingHelper.START_KEY;
public class PeertubeTrendingExtractor extends KioskExtractor<StreamInfoItem> {
private InfoItemsPage<StreamInfoItem> initPage;
private long total;
public PeertubeTrendingExtractor(StreamingService streamingService, ListLinkHandler linkHandler, String kioskId) {
public PeertubeTrendingExtractor(final StreamingService streamingService, final ListLinkHandler linkHandler, final String kioskId) {
super(streamingService, linkHandler, kioskId);
}
@ -37,38 +37,31 @@ public class PeertubeTrendingExtractor extends KioskExtractor<StreamInfoItem> {
@Override
public InfoItemsPage<StreamInfoItem> getInitialPage() throws IOException, ExtractionException {
super.fetchPage();
return initPage;
final String pageUrl = getUrl() + "&" + START_KEY + "=0&" + COUNT_KEY + "=" + ITEMS_PER_PAGE;
return getPage(pageUrl);
}
private void collectStreamsFrom(StreamInfoItemsCollector collector, JsonObject json, String pageUrl) throws ParsingException {
JsonArray contents;
private void collectStreamsFrom(final StreamInfoItemsCollector collector, final JsonObject json) throws ParsingException {
final JsonArray contents;
try {
contents = (JsonArray) JsonUtils.getValue(json, "data");
} catch (Exception e) {
throw new ParsingException("Unable to extract kiosk info", e);
}
String baseUrl = getBaseUrl();
for (Object c : contents) {
final String baseUrl = getBaseUrl();
for (final Object c : contents) {
if (c instanceof JsonObject) {
final JsonObject item = (JsonObject) c;
PeertubeStreamInfoItemExtractor extractor = new PeertubeStreamInfoItemExtractor(item, baseUrl);
final PeertubeStreamInfoItemExtractor extractor = new PeertubeStreamInfoItemExtractor(item, baseUrl);
collector.commit(extractor);
}
}
}
@Override
public String getNextPageUrl() throws IOException, ExtractionException {
super.fetchPage();
return initPage.getNextPageUrl();
}
@Override
public InfoItemsPage<StreamInfoItem> getPage(String pageUrl) throws IOException, ExtractionException {
Response response = getDownloader().get(pageUrl);
public InfoItemsPage<StreamInfoItem> getPage(final String pageUrl) throws IOException, ExtractionException {
final Response response = getDownloader().get(pageUrl);
JsonObject json = null;
if (response != null && !Utils.isBlank(response.responseBody())) {
try {
@ -78,11 +71,12 @@ public class PeertubeTrendingExtractor extends KioskExtractor<StreamInfoItem> {
}
}
StreamInfoItemsCollector collector = new StreamInfoItemsCollector(getServiceId());
final StreamInfoItemsCollector collector = new StreamInfoItemsCollector(getServiceId());
final long total;
if (json != null) {
Number number = JsonUtils.getNumber(json, "total");
if (number != null) this.total = number.longValue();
collectStreamsFrom(collector, json, pageUrl);
final Number number = JsonUtils.getNumber(json, "total");
total = number.longValue();
collectStreamsFrom(collector, json);
} else {
throw new ExtractionException("Unable to get peertube kiosk info");
}
@ -90,8 +84,5 @@ public class PeertubeTrendingExtractor extends KioskExtractor<StreamInfoItem> {
}
@Override
public void onFetchPage(Downloader downloader) throws IOException, ExtractionException {
this.initPage = getPage(getUrl() + "&" + START_KEY + "=0&" + COUNT_KEY + "=" + ITEMS_PER_PAGE);
}
public void onFetchPage(@Nonnull final Downloader downloader) throws IOException, ExtractionException { }
}

View file

@ -25,9 +25,6 @@ public class SoundcloudChannelExtractor extends ChannelExtractor {
private String userId;
private JsonObject user;
private StreamInfoItemsCollector streamInfoItemsCollector = null;
private String nextPageUrl = null;
public SoundcloudChannelExtractor(StreamingService service, ListLinkHandler linkHandler) {
super(service, linkHandler);
}
@ -102,30 +99,17 @@ public class SoundcloudChannelExtractor extends ChannelExtractor {
@Nonnull
@Override
public InfoItemsPage<StreamInfoItem> getInitialPage() throws ExtractionException {
if (streamInfoItemsCollector == null) {
computeNextPageAndGetStreams();
}
return new InfoItemsPage<>(streamInfoItemsCollector, getNextPageUrl());
}
@Override
public String getNextPageUrl() throws ExtractionException {
if (nextPageUrl == null) {
computeNextPageAndGetStreams();
}
return nextPageUrl;
}
private void computeNextPageAndGetStreams() throws ExtractionException {
try {
streamInfoItemsCollector = new StreamInfoItemsCollector(getServiceId());
StreamInfoItemsCollector streamInfoItemsCollector = new StreamInfoItemsCollector(getServiceId());
String apiUrl = "https://api-v2.soundcloud.com/users/" + getId() + "/tracks"
+ "?client_id=" + SoundcloudParsingHelper.clientId()
+ "&limit=20"
+ "&linked_partitioning=1";
nextPageUrl = SoundcloudParsingHelper.getStreamsFromApiMinItems(15, streamInfoItemsCollector, apiUrl);
String nextPageUrl = SoundcloudParsingHelper.getStreamsFromApiMinItems(15, streamInfoItemsCollector, apiUrl);
return new InfoItemsPage<>(streamInfoItemsCollector, nextPageUrl);
} catch (Exception e) {
throw new ExtractionException("Could not get next page", e);
}

View file

@ -16,9 +16,6 @@ import static org.schabi.newpipe.extractor.ServiceList.SoundCloud;
import static org.schabi.newpipe.extractor.utils.Utils.isNullOrEmpty;
public class SoundcloudChartsExtractor extends KioskExtractor<StreamInfoItem> {
private StreamInfoItemsCollector collector = null;
private String nextPageUrl = null;
public SoundcloudChartsExtractor(StreamingService service,
ListLinkHandler linkHandler,
String kioskId) {
@ -49,7 +46,12 @@ public class SoundcloudChartsExtractor extends KioskExtractor<StreamInfoItem> {
private void computeNextPageAndStreams() throws IOException, ExtractionException {
collector = new StreamInfoItemsCollector(getServiceId());
}
@Nonnull
@Override
public InfoItemsPage<StreamInfoItem> getInitialPage() throws IOException, ExtractionException {
StreamInfoItemsCollector collector = new StreamInfoItemsCollector(getServiceId());
String apiUrl = "https://api-v2.soundcloud.com/charts" +
"?genre=soundcloud:genres:all-music" +
@ -61,27 +63,11 @@ public class SoundcloudChartsExtractor extends KioskExtractor<StreamInfoItem> {
apiUrl += "&kind=trending";
}
String contentCountry = SoundCloud.getContentCountry().getCountryCode();
apiUrl += "&region=soundcloud:regions:" + contentCountry;
nextPageUrl = SoundcloudParsingHelper.getStreamsFromApi(collector, apiUrl, true);
}
String nextPageUrl = SoundcloudParsingHelper.getStreamsFromApi(collector, apiUrl, true);
@Override
public String getNextPageUrl() throws IOException, ExtractionException {
if (nextPageUrl == null) {
computeNextPageAndStreams();
}
return nextPageUrl;
}
@Nonnull
@Override
public InfoItemsPage<StreamInfoItem> getInitialPage() throws IOException, ExtractionException {
if (collector == null) {
computeNextPageAndStreams();
}
return new InfoItemsPage<>(collector, getNextPageUrl());
return new InfoItemsPage<>(collector, nextPageUrl);
}
}

View file

@ -33,12 +33,7 @@ public class SoundcloudCommentsExtractor extends CommentsExtractor {
collectStreamsFrom(collector, json.getArray("collection"));
return new InfoItemsPage<>(collector, getNextPageUrl());
}
@Override
public String getNextPageUrl() throws IOException, ExtractionException {
return json.getString("next_href");
return new InfoItemsPage<>(collector, json.getString("next_href"));
}
@Override
@ -54,7 +49,7 @@ public class SoundcloudCommentsExtractor extends CommentsExtractor {
final CommentsInfoItemsCollector collector = new CommentsInfoItemsCollector(getServiceId());
collectStreamsFrom(collector, json.getArray("collection"));
return new InfoItemsPage<>(collector, getNextPageUrl());
return new InfoItemsPage<>(collector, json.getString("next_href"));
}
@Override

View file

@ -30,9 +30,6 @@ public class SoundcloudPlaylistExtractor extends PlaylistExtractor {
private String playlistId;
private JsonObject playlist;
private StreamInfoItemsCollector streamInfoItemsCollector;
private String nextPageUrl;
public SoundcloudPlaylistExtractor(StreamingService service, ListLinkHandler linkHandler) {
super(service, linkHandler);
}
@ -137,14 +134,7 @@ public class SoundcloudPlaylistExtractor extends PlaylistExtractor {
@Nonnull
@Override
public InfoItemsPage<StreamInfoItem> getInitialPage() throws IOException, ExtractionException {
if (streamInfoItemsCollector == null) {
computeInitialTracksAndNextPageUrl();
}
return new InfoItemsPage<>(streamInfoItemsCollector, nextPageUrl);
}
private void computeInitialTracksAndNextPageUrl() throws IOException, ExtractionException {
streamInfoItemsCollector = new StreamInfoItemsCollector(getServiceId());
StreamInfoItemsCollector streamInfoItemsCollector = new StreamInfoItemsCollector(getServiceId());
StringBuilder nextPageUrlBuilder = new StringBuilder("https://api-v2.soundcloud.com/tracks?client_id=");
nextPageUrlBuilder.append(SoundcloudParsingHelper.clientId());
nextPageUrlBuilder.append("&ids=");
@ -163,19 +153,12 @@ public class SoundcloudPlaylistExtractor extends PlaylistExtractor {
}
nextPageUrlBuilder.setLength(nextPageUrlBuilder.length() - 1); // remove trailing ,
nextPageUrl = nextPageUrlBuilder.toString();
String nextPageUrl = nextPageUrlBuilder.toString();
if (nextPageUrl.endsWith("&ids")) {
// there are no other videos
nextPageUrl = "";
}
}
@Override
public String getNextPageUrl() throws IOException, ExtractionException {
if (nextPageUrl == null) {
computeInitialTracksAndNextPageUrl();
}
return nextPageUrl;
return new InfoItemsPage<>(streamInfoItemsCollector, nextPageUrl);
}
@Override

View file

@ -26,7 +26,6 @@ import static org.schabi.newpipe.extractor.services.soundcloud.linkHandler.Sound
import static org.schabi.newpipe.extractor.utils.JsonUtils.EMPTY_STRING;
public class SoundcloudSearchExtractor extends SearchExtractor {
private JsonArray searchCollection;
public SoundcloudSearchExtractor(StreamingService service, SearchQueryHandler linkHandler) {
@ -47,12 +46,7 @@ public class SoundcloudSearchExtractor extends SearchExtractor {
@Nonnull
@Override
public InfoItemsPage<InfoItem> getInitialPage() throws IOException, ExtractionException {
return new InfoItemsPage<>(collectItems(searchCollection), getNextPageUrl());
}
@Override
public String getNextPageUrl() throws IOException, ExtractionException {
return getNextPageUrlFromCurrentUrl(getUrl());
return new InfoItemsPage<>(collectItems(searchCollection), getNextPageUrlFromCurrentUrl(getUrl()));
}
@Override

View file

@ -104,15 +104,6 @@ public class YoutubeChannelExtractor extends ChannelExtractor {
YoutubeParsingHelper.defaultAlertsCheck(initialData);
}
@Override
public String getNextPageUrl() throws ExtractionException {
if (getVideoTab() == null) return "";
return getNextPageUrlFrom(getVideoTab().getObject("content").getObject("sectionListRenderer")
.getArray("contents").getObject(0).getObject("itemSectionRenderer")
.getArray("contents").getObject(0).getObject("gridRenderer").getArray("continuations"));
}
@Nonnull
@Override
public String getUrl() throws ParsingException {
@ -231,16 +222,21 @@ public class YoutubeChannelExtractor extends ChannelExtractor {
@Nonnull
@Override
public InfoItemsPage<StreamInfoItem> getInitialPage() throws ExtractionException {
StreamInfoItemsCollector collector = new StreamInfoItemsCollector(getServiceId());
final StreamInfoItemsCollector collector = new StreamInfoItemsCollector(getServiceId());
String nextPageUrl = null;
if (getVideoTab() != null) {
JsonArray videos = getVideoTab().getObject("content").getObject("sectionListRenderer").getArray("contents")
.getObject(0).getObject("itemSectionRenderer").getArray("contents").getObject(0)
.getObject("gridRenderer").getArray("items");
collectStreamsFrom(collector, videos);
final JsonObject gridRenderer = getVideoTab().getObject("content").getObject("sectionListRenderer")
.getArray("contents").getObject(0).getObject("itemSectionRenderer")
.getArray("contents").getObject(0).getObject("gridRenderer");
collectStreamsFrom(collector, gridRenderer.getArray("items"));
nextPageUrl = getNextPageUrlFrom(gridRenderer.getArray("continuations"));
}
return new InfoItemsPage<>(collector, getNextPageUrl());
return new InfoItemsPage<>(collector, nextPageUrl);
}
@Override

View file

@ -51,15 +51,7 @@ public class YoutubeCommentsExtractor extends CommentsExtractor {
return initPage;
}
@Override
public String getNextPageUrl() throws IOException, ExtractionException {
// initial page does not load any comments but is required to get comments token
super.fetchPage();
return initPage.getNextPageUrl();
}
private String getNextPageUrl(JsonObject ajaxJson) throws IOException, ParsingException {
private String getNextPageUrl(JsonObject ajaxJson) throws ParsingException {
JsonArray arr;
try {
arr = JsonUtils.getArray(ajaxJson, "response.continuationContents.commentSectionContinuation.continuations");

View file

@ -65,18 +65,8 @@ public class YoutubeFeedExtractor extends FeedExtractor {
return document.select("feed > author > name").first().text();
}
@Override
public String getNextPageUrl() {
return null;
}
@Override
public InfoItemsPage<StreamInfoItem> getPage(String pageUrl) {
return null;
}
@Override
public boolean hasNextPage() {
return false;
}
}

View file

@ -169,26 +169,19 @@ public class YoutubeMusicSearchExtractor extends SearchExtractor {
final JsonArray contents = initialData.getObject("contents").getObject("sectionListRenderer").getArray("contents");
for (Object content : contents) {
if (((JsonObject) content).has("musicShelfRenderer")) {
collectMusicStreamsFrom(collector, ((JsonObject) content).getObject("musicShelfRenderer").getArray("contents"));
}
}
return new InfoItemsPage<>(collector, getNextPageUrl());
}
@Override
public String getNextPageUrl() throws ExtractionException, IOException {
final JsonArray contents = initialData.getObject("contents").getObject("sectionListRenderer").getArray("contents");
String nextPageUrl = null;
for (Object content : contents) {
if (((JsonObject) content).has("musicShelfRenderer")) {
return getNextPageUrlFrom(((JsonObject) content).getObject("musicShelfRenderer").getArray("continuations"));
final JsonObject musicShelfRenderer = ((JsonObject) content).getObject("musicShelfRenderer");
collectMusicStreamsFrom(collector, musicShelfRenderer.getArray("contents"));
nextPageUrl = getNextPageUrlFrom(musicShelfRenderer.getArray("continuations"));
}
}
return "";
return new InfoItemsPage<>(collector, nextPageUrl);
}
@Override

View file

@ -77,15 +77,6 @@ public class YoutubePlaylistExtractor extends PlaylistExtractor {
}
}
@Override
public String getNextPageUrl() {
return getNextPageUrlFrom(initialData.getObject("contents").getObject("twoColumnBrowseResultsRenderer")
.getArray("tabs").getObject(0).getObject("tabRenderer").getObject("content")
.getObject("sectionListRenderer").getArray("contents").getObject(0)
.getObject("itemSectionRenderer").getArray("contents").getObject(0)
.getObject("playlistVideoListRenderer").getArray("continuations"));
}
@Nonnull
@Override
public String getName() throws ParsingException {
@ -178,6 +169,7 @@ public class YoutubePlaylistExtractor extends PlaylistExtractor {
@Override
public InfoItemsPage<StreamInfoItem> getInitialPage() {
final StreamInfoItemsCollector collector = new StreamInfoItemsCollector(getServiceId());
String nextPageUrl = null;
final JsonArray contents = initialData.getObject("contents").getObject("twoColumnBrowseResultsRenderer")
.getArray("tabs").getObject(0).getObject("tabRenderer").getObject("content")
@ -193,13 +185,16 @@ public class YoutubePlaylistExtractor extends PlaylistExtractor {
.getObject("videoList").getObject("playlistVideoListRenderer").getArray("contents"));
}
}
return new InfoItemsPage<>(collector, null);
} else if (contents.getObject(0).has("playlistVideoListRenderer")) {
final JsonArray videos = contents.getObject(0)
.getObject("playlistVideoListRenderer").getArray("contents");
collectStreamsFrom(collector, videos);
final JsonObject videos = contents.getObject(0).getObject("playlistVideoListRenderer");
collectStreamsFrom(collector, videos.getArray("contents"));
nextPageUrl = getNextPageUrlFrom(videos.getArray("continuations"));
}
return new InfoItemsPage<>(collector, getNextPageUrl());
return new InfoItemsPage<>(collector, nextPageUrl);
}
@Override

View file

@ -101,18 +101,17 @@ public class YoutubeSearchExtractor extends SearchExtractor {
final JsonArray sections = initialData.getObject("contents").getObject("twoColumnSearchResultsRenderer")
.getObject("primaryContents").getObject("sectionListRenderer").getArray("contents");
String nextPageUrl = null;
for (Object section : sections) {
collectStreamsFrom(collector, ((JsonObject) section).getObject("itemSectionRenderer").getArray("contents"));
final JsonObject itemSectionRenderer = ((JsonObject) section).getObject("itemSectionRenderer");
collectStreamsFrom(collector, itemSectionRenderer.getArray("contents"));
nextPageUrl = getNextPageUrlFrom(itemSectionRenderer.getArray("continuations"));
}
return new InfoItemsPage<>(collector, getNextPageUrl());
}
@Override
public String getNextPageUrl() throws ExtractionException {
return getNextPageUrlFrom(initialData.getObject("contents").getObject("twoColumnSearchResultsRenderer")
.getObject("primaryContents").getObject("sectionListRenderer").getArray("contents")
.getObject(0).getObject("itemSectionRenderer").getArray("continuations"));
return new InfoItemsPage<>(collector, nextPageUrl);
}
@Override

View file

@ -60,11 +60,6 @@ public class YoutubeTrendingExtractor extends KioskExtractor<StreamInfoItem> {
initialData = ajaxJson.getObject(1).getObject("response");
}
@Override
public String getNextPageUrl() {
return "";
}
@Override
public InfoItemsPage<StreamInfoItem> getPage(String pageUrl) {
return null;
@ -98,6 +93,7 @@ public class YoutubeTrendingExtractor extends KioskExtractor<StreamInfoItem> {
collector.commit(new YoutubeStreamInfoItemExtractor(videoInfo, timeAgoParser));
}
}
return new InfoItemsPage<>(collector, getNextPageUrl());
return new InfoItemsPage<>(collector, null);
}
}

View file

@ -84,8 +84,9 @@ public final class DefaultTests {
}
public static <T extends InfoItem> void assertNoMoreItems(ListExtractor<T> extractor) throws Exception {
assertFalse("More items available when it shouldn't", extractor.hasNextPage());
final String nextPageUrl = extractor.getNextPageUrl();
final ListExtractor.InfoItemsPage<T> initialPage = extractor.getInitialPage();
assertFalse("More items available when it shouldn't", initialPage.hasNextPage());
final String nextPageUrl = initialPage.getNextPageUrl();
assertTrue("Next page is not empty or null", isNullOrEmpty(nextPageUrl));
}
@ -118,8 +119,9 @@ public final class DefaultTests {
}
public static <T extends InfoItem> ListExtractor.InfoItemsPage<T> defaultTestMoreItems(ListExtractor<T> extractor) throws Exception {
assertTrue("Doesn't have more items", extractor.hasNextPage());
ListExtractor.InfoItemsPage<T> nextPage = extractor.getPage(extractor.getNextPageUrl());
final ListExtractor.InfoItemsPage<T> initialPage = extractor.getInitialPage();
assertTrue("Doesn't have more items", initialPage.hasNextPage());
ListExtractor.InfoItemsPage<T> nextPage = extractor.getPage(initialPage.getNextPageUrl());
final List<T> items = nextPage.getItems();
assertFalse("Next page is empty", items.isEmpty());
assertEmptyErrors("Next page have errors", nextPage.getErrors());
@ -129,7 +131,7 @@ public final class DefaultTests {
}
public static void defaultTestGetPageInNewExtractor(ListExtractor<? extends InfoItem> extractor, ListExtractor<? extends InfoItem> newExtractor) throws Exception {
final String nextPageUrl = extractor.getNextPageUrl();
final String nextPageUrl = extractor.getInitialPage().getNextPageUrl();
final ListExtractor.InfoItemsPage<? extends InfoItem> page = newExtractor.getPage(nextPageUrl);
defaultTestListOfItems(extractor.getService(), page.getItems(), page.getErrors());

View file

@ -186,9 +186,10 @@ public class YoutubeSearchExtractorTest {
@Test
public void testMoreRelatedItems() throws Exception {
final ListExtractor.InfoItemsPage<InfoItem> initialPage = extractor().getInitialPage();
// YouTube actually gives us an empty next page, but after that, no more pages.
assertTrue(extractor.hasNextPage());
final ListExtractor.InfoItemsPage<InfoItem> nextEmptyPage = extractor.getPage(extractor.getNextPageUrl());
assertTrue(initialPage.hasNextPage());
final ListExtractor.InfoItemsPage<InfoItem> nextEmptyPage = extractor.getPage(initialPage.getNextPageUrl());
assertEquals(0, nextEmptyPage.getItems().size());
assertEmptyErrors("Empty page has errors", nextEmptyPage.getErrors());