Skip to content

Commit 0b6e37e

Browse files
committed
Improve YouTubeSearchPagingtest
1 parent 64729e5 commit 0b6e37e

1 file changed

Lines changed: 10 additions & 19 deletions

File tree

extractor/src/test/java/org/schabi/newpipe/extractor/services/youtube/search/YoutubeSearchPagingTest.java

Lines changed: 10 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,8 @@ public class YoutubeSearchPagingTest {
2424
private static ListExtractor.InfoItemsPage<InfoItem> page2;
2525
private static Set<String> urlList1;
2626
private static Set<String> urlList2;
27-
private static int pageSize;
27+
private static int page1Size;
28+
private static int page2Size;
2829

2930
@BeforeClass
3031
public static void setUpClass() throws Exception {
@@ -36,14 +37,15 @@ public static void setUpClass() throws Exception {
3637
extractor.fetchPage();
3738
page1 = extractor.getInitialPage();
3839
urlList1 = extractUrls(page1.getItems());
39-
assertTrue("page with items loaded", 15 < page1.getItems().size());
40-
pageSize = page1.getItems().size();
41-
assertEquals("they are all distinct, no repetition", pageSize, urlList1.size());
40+
assertTrue("failed to load search result page one: too few items", 15 < page1.getItems().size());
41+
page1Size = page1.getItems().size();
42+
assertEquals("duplicated items in search result on page one", page1Size, urlList1.size());
4243

43-
assertTrue("has more than one page of results", page1.hasNextPage());
44-
assertNotNull("has next page url", page1.getNextPageUrl());
44+
assertTrue("search result has no second page", page1.hasNextPage());
45+
assertNotNull("next page url is null", page1.getNextPageUrl());
4546
page2 = extractor.getPage(page1.getNextPageUrl());
4647
urlList2 = extractUrls(page2.getItems());
48+
page2Size = page2.getItems().size();
4749
}
4850

4951
private static Set<String> extractUrls(List<InfoItem> list) {
@@ -54,27 +56,16 @@ private static Set<String> extractUrls(List<InfoItem> list) {
5456
return result;
5557
}
5658

57-
@Test
58-
public void firstPageOk() {
59-
assertTrue("first page contains the expected number of items", 15 < page1.getItems().size());
60-
assertEquals("they are all distinct, no repetition", pageSize, urlList1.size());
61-
}
62-
63-
@Test
64-
public void secondPageLength() {
65-
assertEquals("second page contains only the expected number of items", pageSize, page2.getItems().size());
66-
}
67-
6859
@Test
6960
public void secondPageUniqueVideos() {
70-
assertEquals("they are all distinct, no repetition", pageSize, urlList2.size());
61+
assertEquals("Second search result page has duplicated items", page2Size, urlList2.size());
7162
}
7263

7364
@Test
7465
public void noRepeatingVideosInPages() {
7566
Set<String> intersection = new HashSet<>(urlList2);
7667
intersection.retainAll(urlList1);
77-
assertEquals("Found a duplicated video on second search page", 0, intersection.size());
68+
assertEquals("Found the same item on first AND second search page", 0, intersection.size());
7869
}
7970

8071
}

0 commit comments

Comments
 (0)