Clean up the code

This commit is contained in:
wb9688 2020-06-13 20:25:38 +02:00
parent 9b6fe1dea6
commit 17ba8a57fa
23 changed files with 104 additions and 177 deletions

View file

@ -2,14 +2,12 @@ package org.schabi.newpipe.extractor;
import org.schabi.newpipe.extractor.exceptions.ExtractionException;
import org.schabi.newpipe.extractor.linkhandler.ListLinkHandler;
import org.schabi.newpipe.extractor.utils.Utils;
import javax.annotation.Nonnull;
import java.io.IOException;
import java.util.Collections;
import java.util.List;
import static org.schabi.newpipe.extractor.utils.Utils.isNullOrEmpty;
import javax.annotation.Nonnull;
/**
@ -110,8 +108,7 @@ public abstract class ListExtractor<R extends InfoItem> extends Extractor {
}
public boolean hasNextPage() {
return nextPage != null && (!isNullOrEmpty(nextPage.getUrl())
|| !isNullOrEmpty(nextPage.getIds()));
return Page.isValid(nextPage);
}
public List<T> getItems() {

View file

@ -4,8 +4,6 @@ import org.schabi.newpipe.extractor.linkhandler.ListLinkHandler;
import java.util.List;
import static org.schabi.newpipe.extractor.utils.Utils.isNullOrEmpty;
public abstract class ListInfo<T extends InfoItem> extends Info {
private List<T> relatedItems;
private Page nextPage = null;
@ -39,8 +37,7 @@ public abstract class ListInfo<T extends InfoItem> extends Info {
}
public boolean hasNextPage() {
return nextPage != null && (!isNullOrEmpty(nextPage.getUrl())
|| !isNullOrEmpty(nextPage.getIds()));
return Page.isValid(nextPage);
}
public Page getNextPage() {

View file

@ -4,6 +4,8 @@ import java.io.Serializable;
import java.util.List;
import java.util.Map;
import static org.schabi.newpipe.extractor.utils.Utils.isNullOrEmpty;
public class Page implements Serializable {
private final String url;
private final List<String> ids;
@ -42,4 +44,9 @@ public class Page implements Serializable {
public Map<String, String> getCookies() {
return cookies;
}
public static boolean isValid(final Page page) {
return page != null && (!isNullOrEmpty(page.getUrl())
|| !isNullOrEmpty(page.getIds()));
}
}

View file

@ -1,10 +1,14 @@
package org.schabi.newpipe.extractor.services.peertube;
import com.grack.nanojson.JsonArray;
import com.grack.nanojson.JsonObject;
import org.schabi.newpipe.extractor.InfoItemsCollector;
import org.schabi.newpipe.extractor.Page;
import org.schabi.newpipe.extractor.exceptions.ContentNotAvailableException;
import org.schabi.newpipe.extractor.exceptions.ParsingException;
import org.schabi.newpipe.extractor.services.peertube.extractors.PeertubeStreamInfoItemExtractor;
import org.schabi.newpipe.extractor.utils.JsonUtils;
import org.schabi.newpipe.extractor.utils.Parser;
import org.schabi.newpipe.extractor.utils.Utils;
@ -66,4 +70,21 @@ public class PeertubeParsingHelper {
return new Page(prevPageUrl.replace(START_KEY + "=" + prevStart, START_KEY + "=" + nextStart));
}
}
public static void collectStreamsFrom(final InfoItemsCollector collector, final JsonObject json, final String baseUrl) throws ParsingException {
final JsonArray contents;
try {
contents = (JsonArray) JsonUtils.getValue(json, "data");
} catch (Exception e) {
throw new ParsingException("Unable to extract list info", e);
}
for (final Object c : contents) {
if (c instanceof JsonObject) {
final JsonObject item = (JsonObject) c;
final PeertubeStreamInfoItemExtractor extractor = new PeertubeStreamInfoItemExtractor(item, baseUrl);
collector.commit(extractor);
}
}
}
}

View file

@ -1,6 +1,5 @@
package org.schabi.newpipe.extractor.services.peertube.extractors;
import com.grack.nanojson.JsonArray;
import com.grack.nanojson.JsonObject;
import com.grack.nanojson.JsonParser;
import com.grack.nanojson.JsonParserException;
@ -24,6 +23,7 @@ import java.io.IOException;
import static org.schabi.newpipe.extractor.services.peertube.PeertubeParsingHelper.COUNT_KEY;
import static org.schabi.newpipe.extractor.services.peertube.PeertubeParsingHelper.ITEMS_PER_PAGE;
import static org.schabi.newpipe.extractor.services.peertube.PeertubeParsingHelper.START_KEY;
import static org.schabi.newpipe.extractor.services.peertube.PeertubeParsingHelper.collectStreamsFrom;
import static org.schabi.newpipe.extractor.utils.Utils.isNullOrEmpty;
public class PeertubeAccountExtractor extends ChannelExtractor {
@ -57,9 +57,8 @@ public class PeertubeAccountExtractor extends ChannelExtractor {
}
@Override
public long getSubscriberCount() throws ParsingException {
final Number number = JsonUtils.getNumber(json, "followersCount");
return number.longValue();
public long getSubscriberCount() {
return json.getLong("followersCount");
}
@Override
@ -92,23 +91,6 @@ public class PeertubeAccountExtractor extends ChannelExtractor {
return getPage(new Page(pageUrl));
}
private void collectStreamsFrom(final StreamInfoItemsCollector collector, final JsonObject json) throws ParsingException {
final JsonArray contents;
try {
contents = (JsonArray) JsonUtils.getValue(json, "data");
} catch (Exception e) {
throw new ParsingException("Unable to extract account streams", e);
}
for (final Object c : contents) {
if (c instanceof JsonObject) {
final JsonObject item = (JsonObject) c;
final PeertubeStreamInfoItemExtractor extractor = new PeertubeStreamInfoItemExtractor(item, baseUrl);
collector.commit(extractor);
}
}
}
@Override
public InfoItemsPage<StreamInfoItem> getPage(final Page page) throws IOException, ExtractionException {
if (page == null || isNullOrEmpty(page.getUrl())) {
@ -128,10 +110,10 @@ public class PeertubeAccountExtractor extends ChannelExtractor {
if (json != null) {
PeertubeParsingHelper.validate(json);
final long total = JsonUtils.getNumber(json, "total").longValue();
final long total = json.getLong("total");
final StreamInfoItemsCollector collector = new StreamInfoItemsCollector(getServiceId());
collectStreamsFrom(collector, json);
collectStreamsFrom(collector, json, getBaseUrl());
return new InfoItemsPage<>(collector, PeertubeParsingHelper.getNextPage(page.getUrl(), total));
} else {

View file

@ -1,6 +1,5 @@
package org.schabi.newpipe.extractor.services.peertube.extractors;
import com.grack.nanojson.JsonArray;
import com.grack.nanojson.JsonObject;
import com.grack.nanojson.JsonParser;
import com.grack.nanojson.JsonParserException;
@ -24,6 +23,7 @@ import java.io.IOException;
import static org.schabi.newpipe.extractor.services.peertube.PeertubeParsingHelper.COUNT_KEY;
import static org.schabi.newpipe.extractor.services.peertube.PeertubeParsingHelper.ITEMS_PER_PAGE;
import static org.schabi.newpipe.extractor.services.peertube.PeertubeParsingHelper.START_KEY;
import static org.schabi.newpipe.extractor.services.peertube.PeertubeParsingHelper.collectStreamsFrom;
import static org.schabi.newpipe.extractor.utils.Utils.isNullOrEmpty;
@ -58,9 +58,8 @@ public class PeertubeChannelExtractor extends ChannelExtractor {
}
@Override
public long getSubscriberCount() throws ParsingException {
final Number number = JsonUtils.getNumber(json, "followersCount");
return number.longValue();
public long getSubscriberCount() {
return json.getLong("followersCount");
}
@Override
@ -99,23 +98,6 @@ public class PeertubeChannelExtractor extends ChannelExtractor {
return getPage(new Page(pageUrl));
}
private void collectStreamsFrom(final StreamInfoItemsCollector collector, final JsonObject json) throws ParsingException {
final JsonArray contents;
try {
contents = (JsonArray) JsonUtils.getValue(json, "data");
} catch (Exception e) {
throw new ParsingException("Unable to extract channel streams", e);
}
for (final Object c : contents) {
if (c instanceof JsonObject) {
final JsonObject item = (JsonObject) c;
final PeertubeStreamInfoItemExtractor extractor = new PeertubeStreamInfoItemExtractor(item, baseUrl);
collector.commit(extractor);
}
}
}
@Override
public InfoItemsPage<StreamInfoItem> getPage(final Page page) throws IOException, ExtractionException {
if (page == null || isNullOrEmpty(page.getUrl())) {
@ -135,10 +117,10 @@ public class PeertubeChannelExtractor extends ChannelExtractor {
if (json != null) {
PeertubeParsingHelper.validate(json);
final long total = JsonUtils.getNumber(json, "total").longValue();
final long total = json.getLong("total");
final StreamInfoItemsCollector collector = new StreamInfoItemsCollector(getServiceId());
collectStreamsFrom(collector, json);
collectStreamsFrom(collector, json, getBaseUrl());
return new InfoItemsPage<>(collector, PeertubeParsingHelper.getNextPage(page.getUrl(), total));
} else {

View file

@ -69,7 +69,7 @@ public class PeertubeCommentsExtractor extends CommentsExtractor {
if (json != null) {
PeertubeParsingHelper.validate(json);
final long total = JsonUtils.getNumber(json, "total").longValue();
final long total = json.getLong("total");
final CommentsInfoItemsCollector collector = new CommentsInfoItemsCollector(getServiceId());
collectCommentsFrom(collector, json);

View file

@ -11,6 +11,8 @@ import org.schabi.newpipe.extractor.localization.DateWrapper;
import org.schabi.newpipe.extractor.services.peertube.PeertubeParsingHelper;
import org.schabi.newpipe.extractor.utils.JsonUtils;
import java.util.Objects;
public class PeertubeCommentsInfoItemExtractor implements CommentsInfoItemExtractor {
private final JsonObject item;
private final String url;
@ -71,9 +73,8 @@ public class PeertubeCommentsInfoItemExtractor implements CommentsInfoItemExtrac
}
@Override
public String getCommentId() throws ParsingException {
final Number value = JsonUtils.getNumber(item, "id");
return value.toString();
public String getCommentId() {
return Objects.toString(item.getLong("id"), null);
}
@Override

View file

@ -1,6 +1,5 @@
package org.schabi.newpipe.extractor.services.peertube.extractors;
import com.grack.nanojson.JsonArray;
import com.grack.nanojson.JsonObject;
import com.grack.nanojson.JsonParser;
import com.grack.nanojson.JsonParserException;
@ -16,7 +15,6 @@ import org.schabi.newpipe.extractor.playlist.PlaylistExtractor;
import org.schabi.newpipe.extractor.services.peertube.PeertubeParsingHelper;
import org.schabi.newpipe.extractor.stream.StreamInfoItem;
import org.schabi.newpipe.extractor.stream.StreamInfoItemsCollector;
import org.schabi.newpipe.extractor.utils.JsonUtils;
import org.schabi.newpipe.extractor.utils.Utils;
import java.io.IOException;
@ -26,6 +24,7 @@ import javax.annotation.Nonnull;
import static org.schabi.newpipe.extractor.services.peertube.PeertubeParsingHelper.COUNT_KEY;
import static org.schabi.newpipe.extractor.services.peertube.PeertubeParsingHelper.ITEMS_PER_PAGE;
import static org.schabi.newpipe.extractor.services.peertube.PeertubeParsingHelper.START_KEY;
import static org.schabi.newpipe.extractor.services.peertube.PeertubeParsingHelper.collectStreamsFrom;
import static org.schabi.newpipe.extractor.utils.Utils.isNullOrEmpty;
public class PeertubePlaylistExtractor extends PlaylistExtractor {
@ -62,7 +61,7 @@ public class PeertubePlaylistExtractor extends PlaylistExtractor {
@Override
public long getStreamCount() {
return playlistInfo.getNumber("videosLength").longValue();
return playlistInfo.getLong("videosLength");
}
@Nonnull
@ -89,24 +88,6 @@ public class PeertubePlaylistExtractor extends PlaylistExtractor {
return getPage(new Page(getUrl() + "/videos?" + START_KEY + "=0&" + COUNT_KEY + "=" + ITEMS_PER_PAGE));
}
private void collectStreamsFrom(final StreamInfoItemsCollector collector, final JsonObject json) throws ParsingException {
final JsonArray contents;
try {
contents = (JsonArray) JsonUtils.getValue(json, "data");
} catch (Exception e) {
throw new ParsingException("Unable to extract playlist streams", e);
}
final String baseUrl = getBaseUrl();
for (final Object c : contents) {
if (c instanceof JsonObject) {
final JsonObject item = (JsonObject) c;
final PeertubeStreamInfoItemExtractor extractor = new PeertubeStreamInfoItemExtractor(item, baseUrl);
collector.commit(extractor);
}
}
}
@Override
public InfoItemsPage<StreamInfoItem> getPage(final Page page) throws IOException, ExtractionException {
if (page == null || isNullOrEmpty(page.getUrl())) {
@ -126,10 +107,10 @@ public class PeertubePlaylistExtractor extends PlaylistExtractor {
if (json != null) {
PeertubeParsingHelper.validate(json);
final long total = JsonUtils.getNumber(json, "total").longValue();
final long total = json.getLong("total");
final StreamInfoItemsCollector collector = new StreamInfoItemsCollector(getServiceId());
collectStreamsFrom(collector, json);
collectStreamsFrom(collector, json, getBaseUrl());
return new InfoItemsPage<>(collector, PeertubeParsingHelper.getNextPage(page.getUrl(), total));
} else {

View file

@ -1,6 +1,5 @@
package org.schabi.newpipe.extractor.services.peertube.extractors;
import com.grack.nanojson.JsonArray;
import com.grack.nanojson.JsonObject;
import com.grack.nanojson.JsonParser;
@ -15,7 +14,6 @@ import org.schabi.newpipe.extractor.linkhandler.SearchQueryHandler;
import org.schabi.newpipe.extractor.search.InfoItemsSearchCollector;
import org.schabi.newpipe.extractor.search.SearchExtractor;
import org.schabi.newpipe.extractor.services.peertube.PeertubeParsingHelper;
import org.schabi.newpipe.extractor.utils.JsonUtils;
import org.schabi.newpipe.extractor.utils.Utils;
import java.io.IOException;
@ -25,6 +23,7 @@ import javax.annotation.Nonnull;
import static org.schabi.newpipe.extractor.services.peertube.PeertubeParsingHelper.COUNT_KEY;
import static org.schabi.newpipe.extractor.services.peertube.PeertubeParsingHelper.ITEMS_PER_PAGE;
import static org.schabi.newpipe.extractor.services.peertube.PeertubeParsingHelper.START_KEY;
import static org.schabi.newpipe.extractor.services.peertube.PeertubeParsingHelper.collectStreamsFrom;
import static org.schabi.newpipe.extractor.utils.Utils.isNullOrEmpty;
public class PeertubeSearchExtractor extends SearchExtractor {
@ -49,24 +48,6 @@ public class PeertubeSearchExtractor extends SearchExtractor {
return getPage(new Page(pageUrl));
}
private void collectStreamsFrom(final InfoItemsSearchCollector collector, final JsonObject json) throws ParsingException {
final JsonArray contents;
try {
contents = (JsonArray) JsonUtils.getValue(json, "data");
} catch (Exception e) {
throw new ParsingException("unable to extract search info", e);
}
final String baseUrl = getBaseUrl();
for (final Object c : contents) {
if (c instanceof JsonObject) {
final JsonObject item = (JsonObject) c;
final PeertubeStreamInfoItemExtractor extractor = new PeertubeStreamInfoItemExtractor(item, baseUrl);
collector.commit(extractor);
}
}
}
@Override
public InfoItemsPage<InfoItem> getPage(final Page page) throws IOException, ExtractionException {
if (page == null || isNullOrEmpty(page.getUrl())) {
@ -86,10 +67,10 @@ public class PeertubeSearchExtractor extends SearchExtractor {
if (json != null) {
PeertubeParsingHelper.validate(json);
final long total = JsonUtils.getNumber(json, "total").longValue();
final long total = json.getLong("total");
final InfoItemsSearchCollector collector = new InfoItemsSearchCollector(getServiceId());
collectStreamsFrom(collector, json);
collectStreamsFrom(collector, json, getBaseUrl());
return new InfoItemsPage<>(collector, PeertubeParsingHelper.getNextPage(page.getUrl(), total));
} else {

View file

@ -103,9 +103,8 @@ public class PeertubeStreamExtractor extends StreamExtractor {
}
@Override
public long getLength() throws ParsingException {
final Number value = JsonUtils.getNumber(json, "duration");
return value.longValue();
public long getLength() {
return json.getLong("duration");
}
@Override
@ -115,21 +114,18 @@ public class PeertubeStreamExtractor extends StreamExtractor {
}
@Override
public long getViewCount() throws ParsingException {
final Number value = JsonUtils.getNumber(json, "views");
return value.longValue();
public long getViewCount() {
return json.getLong("views");
}
@Override
public long getLikeCount() throws ParsingException {
final Number value = JsonUtils.getNumber(json, "likes");
return value.longValue();
public long getLikeCount() {
return json.getLong("likes");
}
@Override
public long getDislikeCount() throws ParsingException {
final Number value = JsonUtils.getNumber(json, "dislikes");
return value.longValue();
public long getDislikeCount() {
return json.getLong("dislikes");
}
@Override
@ -222,7 +218,7 @@ public class PeertubeStreamExtractor extends StreamExtractor {
@Override
public List<VideoStream> getVideoOnlyStreams() {
return null;
return Collections.emptyList();
}
@Override

View file

@ -42,9 +42,8 @@ public class PeertubeStreamInfoItemExtractor implements StreamInfoItemExtractor
}
@Override
public long getViewCount() throws ParsingException {
final Number value = JsonUtils.getNumber(item, "views");
return value.longValue();
public long getViewCount() {
return item.getLong("views");
}
@Override
@ -82,8 +81,7 @@ public class PeertubeStreamInfoItemExtractor implements StreamInfoItemExtractor
}
@Override
public long getDuration() throws ParsingException {
final Number value = JsonUtils.getNumber(item, "duration");
return value.longValue();
public long getDuration() {
return item.getLong("duration");
}
}

View file

@ -1,6 +1,5 @@
package org.schabi.newpipe.extractor.services.peertube.extractors;
import com.grack.nanojson.JsonArray;
import com.grack.nanojson.JsonObject;
import com.grack.nanojson.JsonParser;
@ -15,7 +14,6 @@ import org.schabi.newpipe.extractor.linkhandler.ListLinkHandler;
import org.schabi.newpipe.extractor.services.peertube.PeertubeParsingHelper;
import org.schabi.newpipe.extractor.stream.StreamInfoItem;
import org.schabi.newpipe.extractor.stream.StreamInfoItemsCollector;
import org.schabi.newpipe.extractor.utils.JsonUtils;
import org.schabi.newpipe.extractor.utils.Utils;
import java.io.IOException;
@ -25,6 +23,7 @@ import javax.annotation.Nonnull;
import static org.schabi.newpipe.extractor.services.peertube.PeertubeParsingHelper.COUNT_KEY;
import static org.schabi.newpipe.extractor.services.peertube.PeertubeParsingHelper.ITEMS_PER_PAGE;
import static org.schabi.newpipe.extractor.services.peertube.PeertubeParsingHelper.START_KEY;
import static org.schabi.newpipe.extractor.services.peertube.PeertubeParsingHelper.collectStreamsFrom;
import static org.schabi.newpipe.extractor.utils.Utils.isNullOrEmpty;
public class PeertubeTrendingExtractor extends KioskExtractor<StreamInfoItem> {
@ -43,24 +42,6 @@ public class PeertubeTrendingExtractor extends KioskExtractor<StreamInfoItem> {
return getPage(new Page(pageUrl));
}
private void collectStreamsFrom(final StreamInfoItemsCollector collector, final JsonObject json) throws ParsingException {
final JsonArray contents;
try {
contents = (JsonArray) JsonUtils.getValue(json, "data");
} catch (Exception e) {
throw new ParsingException("Unable to extract kiosk info", e);
}
final String baseUrl = getBaseUrl();
for (final Object c : contents) {
if (c instanceof JsonObject) {
final JsonObject item = (JsonObject) c;
final PeertubeStreamInfoItemExtractor extractor = new PeertubeStreamInfoItemExtractor(item, baseUrl);
collector.commit(extractor);
}
}
}
@Override
public InfoItemsPage<StreamInfoItem> getPage(final Page page) throws IOException, ExtractionException {
if (page == null || isNullOrEmpty(page.getUrl())) {
@ -80,10 +61,10 @@ public class PeertubeTrendingExtractor extends KioskExtractor<StreamInfoItem> {
if (json != null) {
PeertubeParsingHelper.validate(json);
final long total = JsonUtils.getNumber(json, "total").longValue();
final long total = json.getLong("total");
final StreamInfoItemsCollector collector = new StreamInfoItemsCollector(getServiceId());
collectStreamsFrom(collector, json);
collectStreamsFrom(collector, json, getBaseUrl());
return new InfoItemsPage<>(collector, PeertubeParsingHelper.getNextPage(page.getUrl(), total));
} else {

View file

@ -75,7 +75,7 @@ public class SoundcloudChannelExtractor extends ChannelExtractor {
@Override
public long getSubscriberCount() {
return user.getNumber("followers_count", 0).longValue();
return user.getLong("followers_count", 0);
}
@Override

View file

@ -32,12 +32,12 @@ public class SoundcloudChannelInfoItemExtractor implements ChannelInfoItemExtrac
@Override
public long getSubscriberCount() {
return itemObject.getNumber("followers_count", 0).longValue();
return itemObject.getLong("followers_count");
}
@Override
public long getStreamCount() {
return itemObject.getNumber("track_count", 0).longValue();
return itemObject.getLong("track_count");
}
@Override

View file

@ -40,8 +40,8 @@ public class SoundcloudChartsExtractor extends KioskExtractor<StreamInfoItem> {
throw new IllegalArgumentException("Page doesn't contain an URL");
}
StreamInfoItemsCollector collector = new StreamInfoItemsCollector(getServiceId());
String nextPageUrl = SoundcloudParsingHelper.getStreamsFromApi(collector, page.getUrl(), true);
final StreamInfoItemsCollector collector = new StreamInfoItemsCollector(getServiceId());
final String nextPageUrl = SoundcloudParsingHelper.getStreamsFromApi(collector, page.getUrl(), true);
return new InfoItemsPage<>(collector, new Page(nextPageUrl));
}
@ -49,7 +49,7 @@ public class SoundcloudChartsExtractor extends KioskExtractor<StreamInfoItem> {
@Nonnull
@Override
public InfoItemsPage<StreamInfoItem> getInitialPage() throws IOException, ExtractionException {
StreamInfoItemsCollector collector = new StreamInfoItemsCollector(getServiceId());
final StreamInfoItemsCollector collector = new StreamInfoItemsCollector(getServiceId());
String apiUrl = "https://api-v2.soundcloud.com/charts" +
"?genre=soundcloud:genres:all-music" +
@ -61,10 +61,10 @@ public class SoundcloudChartsExtractor extends KioskExtractor<StreamInfoItem> {
apiUrl += "&kind=trending";
}
String contentCountry = SoundCloud.getContentCountry().getCountryCode();
final String contentCountry = SoundCloud.getContentCountry().getCountryCode();
apiUrl += "&region=soundcloud:regions:" + contentCountry;
String nextPageUrl = SoundcloudParsingHelper.getStreamsFromApi(collector, apiUrl, true);
final String nextPageUrl = SoundcloudParsingHelper.getStreamsFromApi(collector, apiUrl, true);
return new InfoItemsPage<>(collector, new Page(nextPageUrl));
}

View file

@ -6,10 +6,11 @@ import org.schabi.newpipe.extractor.exceptions.ParsingException;
import org.schabi.newpipe.extractor.localization.DateWrapper;
import org.schabi.newpipe.extractor.services.soundcloud.SoundcloudParsingHelper;
import java.util.Objects;
import javax.annotation.Nullable;
public class SoundcloudCommentsInfoItemExtractor implements CommentsInfoItemExtractor {
private JsonObject json;
private String url;
@ -19,32 +20,32 @@ public class SoundcloudCommentsInfoItemExtractor implements CommentsInfoItemExtr
}
@Override
public String getCommentId() throws ParsingException {
return json.getNumber("id").toString();
public String getCommentId() {
return Objects.toString(json.getLong("id"), null);
}
@Override
public String getCommentText() throws ParsingException {
public String getCommentText() {
return json.getString("body");
}
@Override
public String getUploaderName() throws ParsingException {
public String getUploaderName() {
return json.getObject("user").getString("username");
}
@Override
public String getUploaderAvatarUrl() throws ParsingException {
public String getUploaderAvatarUrl() {
return json.getObject("user").getString("avatar_url");
}
@Override
public String getUploaderUrl() throws ParsingException {
public String getUploaderUrl() {
return json.getObject("user").getString("permalink_url");
}
@Override
public String getTextualUploadDate() throws ParsingException {
public String getTextualUploadDate() {
return json.getString("created_at");
}
@ -55,7 +56,7 @@ public class SoundcloudCommentsInfoItemExtractor implements CommentsInfoItemExtr
}
@Override
public int getLikeCount() throws ParsingException {
public int getLikeCount() {
return -1;
}
@ -70,7 +71,7 @@ public class SoundcloudCommentsInfoItemExtractor implements CommentsInfoItemExtr
}
@Override
public String getThumbnailUrl() throws ParsingException {
public String getThumbnailUrl() {
return json.getObject("user").getString("avatar_url");
}
}

View file

@ -113,7 +113,7 @@ public class SoundcloudPlaylistExtractor extends PlaylistExtractor {
@Override
public long getStreamCount() {
return playlist.getNumber("track_count", 0).longValue();
return playlist.getLong("track_count");
}
@Nonnull

View file

@ -81,6 +81,6 @@ public class SoundcloudPlaylistInfoItemExtractor implements PlaylistInfoItemExtr
@Override
public long getStreamCount() {
return itemObject.getNumber("track_count", 0).longValue();
return itemObject.getLong("track_count");
}
}

View file

@ -102,7 +102,7 @@ public class SoundcloudStreamExtractor extends StreamExtractor {
@Override
public long getLength() {
return track.getNumber("duration", 0).longValue() / 1000L;
return track.getLong("duration") / 1000L;
}
@Override
@ -112,12 +112,12 @@ public class SoundcloudStreamExtractor extends StreamExtractor {
@Override
public long getViewCount() {
return track.getNumber("playback_count", 0).longValue();
return track.getLong("playback_count");
}
@Override
public long getLikeCount() {
return track.getNumber("favoritings_count", -1).longValue();
return track.getLong("favoritings_count", -1);
}
@Override

View file

@ -30,7 +30,7 @@ public class SoundcloudStreamInfoItemExtractor implements StreamInfoItemExtracto
@Override
public long getDuration() {
return itemObject.getNumber("duration", 0).longValue() / 1000L;
return itemObject.getLong("duration") / 1000L;
}
@Override
@ -53,13 +53,9 @@ public class SoundcloudStreamInfoItemExtractor implements StreamInfoItemExtracto
return new DateWrapper(SoundcloudParsingHelper.parseDateFrom(getTextualUploadDate()));
}
private String getCreatedAt() {
return itemObject.getString("created_at");
}
@Override
public long getViewCount() {
return itemObject.getNumber("playback_count", 0).longValue();
return itemObject.getLong("playback_count");
}
@Override

View file

@ -73,7 +73,7 @@ public class YoutubeCommentsInfoItemExtractor implements CommentsInfoItemExtract
@Override
public int getLikeCount() throws ParsingException {
try {
return JsonUtils.getNumber(json, "likeCount").intValue();
return json.getInt("likeCount");
} catch (Exception e) {
throw new ParsingException("Could not get like count", e);
}

View file

@ -1,18 +1,24 @@
package org.schabi.newpipe.extractor.utils;
import com.grack.nanojson.JsonParserException;
import org.junit.Test;
import org.schabi.newpipe.extractor.exceptions.ParsingException;
import java.util.Arrays;
import static org.junit.Assert.assertEquals;
public class UtilsTest {
@Test
public void testMixedNumberWordToLong() throws JsonParserException, ParsingException {
public void testMixedNumberWordToLong() throws ParsingException {
assertEquals(10, Utils.mixedNumberWordToLong("10"));
assertEquals(10.5e3, Utils.mixedNumberWordToLong("10.5K"), 0.0);
assertEquals(10.5e6, Utils.mixedNumberWordToLong("10.5M"), 0.0);
assertEquals(10.5e6, Utils.mixedNumberWordToLong("10,5M"), 0.0);
assertEquals(1.5e9, Utils.mixedNumberWordToLong("1,5B"), 0.0);
}
@Test
public void testJoin() {
assertEquals("some,random,stuff", Utils.join(",", Arrays.asList("some", "random", "stuff")));
}
}