package net.pterodactylus.rhynodge;
+import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
-import com.google.common.collect.Lists;
-
/**
* A {@code Reaction} binds together {@link Query}s, {@link Merger}s, and
* {@link Action}s, and it stores the intermediary {@link State}s.
private final Query query;
/** The filters to run. */
- private final List<Filter> filters = Lists.newArrayList();
+ private final List<Filter> filters = new ArrayList<>();
/** The merger merges old and new states. */
private final Merger merger;
package net.pterodactylus.rhynodge.filters;
import static com.google.common.base.Preconditions.checkState;
-import static com.google.common.collect.FluentIterable.from;
import static java.util.Arrays.asList;
import java.util.List;
import net.pterodactylus.rhynodge.states.TorrentState;
import net.pterodactylus.rhynodge.states.TorrentState.TorrentFile;
-import com.google.common.base.Predicate;
import org.jspecify.annotations.NonNull;
/**
*/
public class BlacklistFilter implements Filter {
- private final Iterable<String> filterWords;
+ private final List<String> filterWords;
public BlacklistFilter(List<String> filterWords) {
this.filterWords = filterWords;
checkState(state instanceof TorrentState, "state is not a TorrentState but a %s!", state.getClass());
TorrentState torrentState = (TorrentState) state;
- return new TorrentState(from(torrentState.torrentFiles()).filter(new Predicate<TorrentFile>() {
- @Override
- public boolean apply(TorrentFile torrentFile) {
- return (torrentFile != null) && nameDoesNotMatchAFilterWord(torrentFile.name());
- }
+ return new TorrentState(torrentState.torrentFiles().stream().filter(torrentFile -> torrentFile != null && nameDoesNotMatchAFilterWord(torrentFile.name())).toList());
+ }
- private boolean nameDoesNotMatchAFilterWord(final String name) {
- return !from(filterWords).anyMatch(new Predicate<String>() {
- @Override
- public boolean apply(String word) {
- return name.toLowerCase().contains(word.toLowerCase());
- }
- });
- }
- }).toList());
+ private boolean nameDoesNotMatchAFilterWord(final String name) {
+ return filterWords.stream().noneMatch(word -> name.toLowerCase().contains(word.toLowerCase()));
}
public static BlacklistFilter createDefaultBlacklistFilter() {
import static com.google.common.base.Optional.absent;
import static com.google.common.base.Preconditions.checkState;
-import static com.google.common.collect.FluentIterable.from;
+
import static java.util.Arrays.asList;
+import static java.util.stream.Collectors.toSet;
+import java.util.ArrayList;
import java.util.Collection;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.function.Function;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import net.pterodactylus.rhynodge.states.TorrentState;
import net.pterodactylus.rhynodge.states.TorrentState.TorrentFile;
-import com.google.common.base.Function;
import com.google.common.base.Optional;
-import com.google.common.collect.HashMultimap;
-import com.google.common.collect.Multimap;
import org.apache.log4j.Logger;
import org.jspecify.annotations.NonNull;
checkState(state instanceof TorrentState, "state is not a TorrentState but a %s!", state.getClass());
TorrentState torrentState = (TorrentState) state;
- final Multimap<Episode, TorrentFile> episodes = HashMultimap.create();
+ Map<Episode, List<TorrentFile>> episodes = new HashMap<>();
for (TorrentFile torrentFile : torrentState) {
Optional<Episode> episode = extractEpisode(torrentFile);
if (!episode.isPresent()) {
continue;
}
- episodes.put(episode.get(), torrentFile);
+ episodes.computeIfAbsent(episode.get(), e -> new ArrayList<>()).add(torrentFile);
}
- return new EpisodeState(from(episodes.keySet()).transform(episodeFiller(episodes)).toSet());
+ return new EpisodeState(episodes.keySet().stream().map(episodeFiller(episodes)).collect(toSet()));
}
//
* A multimap mapping episodes to torrent files.
* @return The function that performs the extraction of torrent files
*/
- private static Function<Episode, Episode> episodeFiller(final Multimap<Episode, TorrentFile> episodeTorrents) {
- return new Function<Episode, Episode>() {
- @Override
- public Episode apply(Episode episode) {
- Episode completeEpisode = new Episode(episode.season(), episode.episode());
- for (TorrentFile torrentFile : episodeTorrents.get(episode)) {
- completeEpisode.addTorrentFile(torrentFile);
- }
- return completeEpisode;
+ private static Function<Episode, Episode> episodeFiller(final Map<Episode, List<TorrentFile>> episodeTorrents) {
+ return episode -> {
+ Episode completeEpisode = new Episode(episode.season(), episode.episode());
+ for (TorrentFile torrentFile : episodeTorrents.get(episode)) {
+ completeEpisode.addTorrentFile(torrentFile);
}
+ return completeEpisode;
};
}
package net.pterodactylus.rhynodge.filters.comics;
-import static com.google.common.collect.FluentIterable.from;
import static java.util.Optional.of;
import java.util.List;
import java.util.Optional;
import net.pterodactylus.rhynodge.filters.ComicSiteFilter;
-import com.google.common.base.Function;
-import com.google.common.collect.FluentIterable;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
@Override
protected List<String> extractImageUrls(Document document) {
- return getComicImages(document).transform(new Function<Element, String>() {
- @Override
- public String apply(Element element) {
- return element.attr("src");
- }
- }).toList();
+ return getComicImages(document).stream().map(element -> element.attr("src")).toList();
}
@Override
protected List<String> extractImageComments(Document document) {
- return getComicImages(document).transform(new Function<Element, String>() {
- @Override
- public String apply(Element element) {
- return element.attr("title");
- }
- }).toList();
+ return getComicImages(document).stream().map(element -> element.attr("title")).toList();
}
- private FluentIterable<Element> getComicImages(Document document) {
- return from(document.select("section img"));
+ private List<Element> getComicImages(Document document) {
+ return document.select("section img");
}
}
import java.util.Optional;
import net.pterodactylus.rhynodge.filters.ComicSiteFilter;
-import com.google.common.base.Function;
-import com.google.common.collect.FluentIterable;
import org.jsoup.nodes.Document;
-import org.jsoup.nodes.Element;
import org.jsoup.select.Elements;
/**
@Override
protected List<String> extractImageUrls(Document document) {
Elements imageTags = document.select(".webcomic-object-full img");
- return FluentIterable.from(imageTags).transform(new Function<Element, String>() {
-
- @Override
- public String apply(Element input) {
- return input.attr("src");
- }
- }).toList();
+ return imageTags.stream().map(input -> input.attr("src")).toList();
}
@Override
import java.util.Optional;
import net.pterodactylus.rhynodge.filters.ComicSiteFilter;
-import com.google.common.base.Function;
-import com.google.common.collect.FluentIterable;
import org.jsoup.nodes.Document;
-import org.jsoup.nodes.Element;
import org.jsoup.select.Elements;
/**
@Override
protected List<String> extractImageUrls(Document document) {
Elements imageTags = document.select(".MainComic__ComicImage-sc-ndbx87-2 > span noscript img");
- return FluentIterable.from(imageTags).transform(new Function<Element, String>() {
-
- @Override
- public String apply(Element input) {
- String imageUrl = input.attr("src");
- try {
- return new URI(document.baseUri()).resolve(imageUrl).toString();
- } catch (URISyntaxException e) {
- /* ignore. */
- }
- if (!imageUrl.startsWith("/")) {
- return imageUrl;
- }
- String protocol = document.baseUri().substring(0, document.baseUri().indexOf('/'));
- return protocol + imageUrl;
+ return imageTags.stream().map(input -> {
+ String imageUrl = input.attr("src");
+ try {
+ return new URI(document.baseUri()).resolve(imageUrl).toString();
+ } catch (URISyntaxException e) {
+ /* ignore. */
+ }
+ if (!imageUrl.startsWith("/")) {
+ return imageUrl;
}
+ String protocol = document.baseUri().substring(0, document.baseUri().indexOf('/'));
+ return protocol + imageUrl;
}).toList();
}
package net.pterodactylus.rhynodge.filters.comics;
-import static com.google.common.collect.FluentIterable.from;
import static java.util.Arrays.asList;
import static java.util.Collections.emptyList;
import java.util.Optional;
import net.pterodactylus.rhynodge.filters.ComicSiteFilter;
-import com.google.common.base.Function;
import org.jsoup.nodes.Document;
-import org.jsoup.nodes.Element;
import org.jsoup.select.Elements;
/**
@Override
protected List<String> extractImageUrls(Document document) {
Elements imageElements = document.select(".content img[alt~=.Comic.for]");
- return from(imageElements).transformAndConcat(new Function<Element, Iterable<String>>() {
- @Override
- public Iterable<String> apply(Element element) {
- return ((element != null) && element.hasAttr("src")) ? asList(element.attr("src")) : Collections.<String>emptyList();
- }
- }).toList();
+ return imageElements.stream()
+ .flatMap(element -> (((element != null) && element.hasAttr("src")) ? asList(element.attr("src")) : Collections.<String>emptyList()).stream())
+ .toList();
}
@Override
package net.pterodactylus.rhynodge.filters.comics;
import java.util.Arrays;
-import java.util.Collections;
import java.util.List;
import java.util.Optional;
+import java.util.stream.Stream;
import net.pterodactylus.rhynodge.filters.ComicSiteFilter;
-import com.google.common.base.Predicate;
-import com.google.common.collect.FluentIterable;
import org.jsoup.nodes.Document;
/**
protected List<String> extractImageUrls(Document document) {
String imageUrl = document.select("img#cc-comic").get(0).attr("src");
String afterImageUrl = document.select("#aftercomic img").attr("src");
- return FluentIterable.from(Arrays.asList(imageUrl, afterImageUrl)).filter(url -> url.length() > 0).toList();
+ return Stream.of(imageUrl, afterImageUrl).filter(url -> !url.isEmpty()).toList();
}
@Override
package net.pterodactylus.rhynodge.filters.comics;
-import com.google.common.base.Function;
import java.util.Optional;
import net.pterodactylus.rhynodge.filters.ComicSiteFilter;
import org.jsoup.nodes.Document;
-import org.jsoup.nodes.Element;
import org.jsoup.select.Elements;
import java.util.List;
-import static com.google.common.collect.FluentIterable.from;
import static java.util.Collections.emptyList;
/**
@Override
protected List<String> extractImageUrls(Document document) {
- return from(findImageElements(document)).transform(element -> element.attr("src")).toList();
+ return findImageElements(document).stream()
+ .map(element -> element.attr("src"))
+ .toList();
}
@Override
import java.util.List;
+import java.util.stream.Stream;
import net.pterodactylus.rhynodge.filters.ComicSiteFilter;
-import com.google.common.base.Function;
-import com.google.common.collect.FluentIterable;
import org.jsoup.nodes.Document;
-import org.jsoup.nodes.Element;
import org.jsoup.select.Elements;
import static java.util.Optional.empty;
@Override
protected List<String> extractImageUrls(Document document) {
- return extractImages(document).transform(new Function<String[], String>() {
-
- @Override
- public String apply(String[] input) {
- return input[0];
- }
- }).toList();
+ return extractImages(document).map(input -> input[0]).toList();
}
@Override
protected List<String> extractImageComments(Document document) {
- return extractImages(document).transform(new Function<String[], String>() {
-
- @Override
- public String apply(String[] input) {
- return input[1];
- }
- }).toList();
+ return extractImages(document).map(input -> input[1]).toList();
}
//
*
* @param document
* The document to extract the images from
- * @return An iterable containing all image URL and comment pairs
+ * @return A {@link Stream} containing all image URL and comment pairs
*/
- private FluentIterable<String[]> extractImages(Document document) {
- return FluentIterable.from(document.select("div#comic img")).transform(new Function<Element, String[]>() {
-
- @Override
- public String[] apply(Element image) {
- return new String[] { image.attr("src"), image.attr("title") };
- }
- });
+ private Stream<String[]> extractImages(Document document) {
+ return document.select("div#comic img").stream().map(image -> new String[] { image.attr("src"), image.attr("title") });
}
}
import com.fasterxml.jackson.core.JsonParseException;
import com.fasterxml.jackson.databind.JsonMappingException;
import com.fasterxml.jackson.databind.ObjectMapper;
-import com.google.common.base.Predicate;
-import com.google.common.collect.Maps;
import com.google.common.util.concurrent.AbstractExecutionThreadService;
import com.google.common.util.concurrent.Uninterruptibles;
+import static java.util.stream.Collectors.toMap;
+
/**
* Watches a directory for chain configuration files and loads and unloads
* {@link Reaction}s from the {@link Engine}.
}
/* filter enabled chains. */
- Map<String, Chain> enabledChains = Maps.filterEntries(chains, new Predicate<Entry<String, Chain>>() {
-
- @Override
- public boolean apply(Entry<String, Chain> chainEntry) {
- return chainEntry.getValue().enabled();
- }
- });
+ Map<String, Chain> enabledChains = chains.entrySet().stream()
+ .filter(chainEntry -> chainEntry.getValue().enabled())
+ .collect(toMap(Entry::getKey, Entry::getValue));
logger.debug(String.format("Found %d enabled Chain(s).", enabledChains.size()));
/* check for removed chains. */
package net.pterodactylus.rhynodge.output;
+import java.util.HashMap;
import java.util.Map;
-import com.google.common.collect.Maps;
-
/**
* {@link Output} implementation that stores texts for arbitrary MIME types.
*
private final String summary;
/** The texts for the different MIME types. */
- private final Map<String, String> mimeTypeTexts = Maps.newHashMap();
+ private final Map<String, String> mimeTypeTexts = new HashMap<>();
/**
* Creates a new default output.
import net.pterodactylus.rhynodge.states.ComicState.Comic;
import com.fasterxml.jackson.annotation.JsonProperty;
-import com.google.common.collect.Lists;
import org.apache.commons.lang3.StringEscapeUtils;
import org.apache.commons.lang3.StringUtils;
import org.jspecify.annotations.NonNull;
public class ComicState extends AbstractState implements Iterable<Comic> {
@JsonProperty
- private final List<Comic> comics = Lists.newArrayList();
+ private final List<Comic> comics = new ArrayList<>();
private final Set<Comic> newComics = new HashSet<>();
@SuppressWarnings("unused")
private final String title;
@JsonProperty
- private final List<Strip> strips = Lists.newArrayList();
+ private final List<Strip> strips = new ArrayList<>();
public Comic(@JsonProperty("title") String title) {
this.title = title;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
+import java.util.Comparator;
import java.util.HashSet;
import java.util.Iterator;
+import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
+import java.util.function.Function;
import net.pterodactylus.rhynodge.Reaction;
import net.pterodactylus.rhynodge.State;
import net.pterodactylus.rhynodge.filters.EpisodeFilter;
import net.pterodactylus.rhynodge.states.TorrentState.TorrentFile;
import com.fasterxml.jackson.annotation.JsonProperty;
-import com.google.common.base.Function;
-import com.google.common.collect.FluentIterable;
-import com.google.common.collect.ImmutableMap;
-import com.google.common.collect.Ordering;
import org.apache.commons.lang3.StringEscapeUtils;
import org.jspecify.annotations.NonNull;
import org.jspecify.annotations.Nullable;
+import static java.util.stream.Collectors.groupingBy;
+import static java.util.stream.Collectors.toList;
+
/**
* {@link State} implementation that stores episodes of TV shows, parsed via
* {@link EpisodeFilter} from a previous {@link TorrentState}.
}
/* list all known episodes. */
stringBuilder.append("All Known Episodes\n\n");
- ImmutableMap<Integer, Collection<Episode>> episodesBySeason = FluentIterable.from(episodes).index(Episode::season).asMap();
- for (Map.Entry<Integer, Collection<Episode>> seasonEntry : episodesBySeason.entrySet()) {
+ Map<Integer, List<Episode>> episodesBySeason = episodes.stream().collect(groupingBy(Episode::season));
+ for (Map.Entry<Integer, List<Episode>> seasonEntry : episodesBySeason.entrySet()) {
stringBuilder.append(" Season ").append(seasonEntry.getKey()).append("\n\n");
- for (Episode episode : Ordering.natural().sortedCopy(seasonEntry.getValue())) {
+ for (Episode episode : seasonEntry.getValue().stream().sorted().toList()) {
stringBuilder.append(" Episode ").append(episode.episode()).append("\n");
for (TorrentFile torrentFile : episode) {
stringBuilder.append(" Size: ").append(torrentFile.size());
htmlBuilder.append("</thead>\n");
htmlBuilder.append("<tbody>\n");
Episode lastEpisode = null;
- for (Map.Entry<Integer, Collection<Episode>> seasonEntry : FluentIterable.from(Ordering.natural().reverse().sortedCopy(episodes)).index(Episode.BY_SEASON).asMap().entrySet()) {
+ for (Map.Entry<Integer, List<Episode>> seasonEntry : episodes.stream().sorted(Comparator.<Episode>naturalOrder().reversed()).collect(groupingBy(Episode::season, LinkedHashMap::new, toList())).entrySet()) {
for (Episode episode : seasonEntry.getValue()) {
for (TorrentFile torrentFile : episode) {
if (newEpisodes.contains(episode)) {
*/
public static class Episode implements Comparable<Episode>, Iterable<TorrentFile> {
- /** Function to extract the season of an episode. */
- public static final Function<Episode, Integer> BY_SEASON = new Function<Episode, Integer>() {
-
- @Override
- public Integer apply(Episode episode) {
- return (episode != null ) ? episode.season() : -1;
- }
- };
-
/** The season of the episode. */
@JsonProperty
private final int season;
package net.pterodactylus.rhynodge.states;
import java.nio.charset.StandardCharsets;
+import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
+import java.util.Comparator;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import net.pterodactylus.rhynodge.states.TorrentState.TorrentFile;
import com.fasterxml.jackson.annotation.JsonProperty;
-import com.google.common.collect.Lists;
-import com.google.common.collect.Ordering;
import org.apache.commons.lang3.StringEscapeUtils;
import org.apache.http.NameValuePair;
import org.apache.http.client.utils.URLEncodedUtils;
import org.jspecify.annotations.NonNull;
import org.jspecify.annotations.Nullable;
-import static com.google.common.collect.Ordering.from;
import static java.lang.String.format;
/**
/** The torrent files. */
@JsonProperty
- private List<TorrentFile> files = Lists.newArrayList();
+ private List<TorrentFile> files = new ArrayList<>();
private final Set<TorrentFile> newTorrentFiles = new HashSet<>();
htmlBuilder.append("</tr>\n");
htmlBuilder.append("</thead>\n");
htmlBuilder.append("<tbody>\n");
- for (TorrentFile torrentFile : sortNewFirst().sortedCopy(files)) {
+ for (TorrentFile torrentFile : files.stream().sorted(sortNewFirst()).toList()) {
if (newTorrentFiles.contains(torrentFile)) {
htmlBuilder.append("<tr style=\"color: #008000; font-weight: bold;\">");
} else {
}
/**
- * Returns an ordering that sorts torrent files by whether they are new
- * (according to {@link #files}) or not. New files will be sorted
+ * Returns a {@link Comparator} that sorts torrent files by whether they are new
+ * (according to {@link #newTorrentFiles}) or not. New files will be sorted
* first.
*
- * @return An ordering for “new files first”
+ * @return A {@link Comparator} for “new files first”
*/
- private Ordering<TorrentFile> sortNewFirst() {
- return from((TorrentFile leftTorrentFile, TorrentFile rightTorrentFile) -> {
+ private Comparator<TorrentFile> sortNewFirst() {
+ return (leftTorrentFile, rightTorrentFile) -> {
if (newTorrentFiles.contains(leftTorrentFile) && !newTorrentFiles.contains(rightTorrentFile)) {
return -1;
}
return 1;
}
return 0;
- });
+ };
}
//
import net.pterodactylus.rhynodge.mergers.EpisodeMerger;
import net.pterodactylus.rhynodge.queries.HttpQuery;
-import com.google.common.collect.ImmutableList;
-
/**
* {@link Watcher} implementation that watches Kick Ass Torrents for new
* episodes.
* @return The filters of the watcher
*/
private static List<Filter> createFilters() {
- return ImmutableList.of(new HtmlFilter(), new KickAssTorrentsFilter(), createDefaultBlacklistFilter(), new SizeBlacklistFilter(), new EpisodeFilter());
+ return List.of(new HtmlFilter(), new KickAssTorrentsFilter(), createDefaultBlacklistFilter(), new SizeBlacklistFilter(), new EpisodeFilter());
}
}
import net.pterodactylus.rhynodge.mergers.TorrentMerger;
import net.pterodactylus.rhynodge.queries.HttpQuery;
-import com.google.common.collect.ImmutableList;
-
/**
* {@link Watcher} implementation that watches Kick Ass Torrents for new files.
*
* @return The filters of the watcher
*/
private static List<Filter> createFilters() {
- return ImmutableList.of(new HtmlFilter(), new KickAssTorrentsFilter(), createDefaultBlacklistFilter(), new SizeBlacklistFilter());
+ return List.of(new HtmlFilter(), new KickAssTorrentsFilter(), createDefaultBlacklistFilter(), new SizeBlacklistFilter());
}
}
package net.pterodactylus.rhynodge.watchers;
+import java.util.ArrayList;
import java.util.List;
import net.pterodactylus.rhynodge.Filter;
import net.pterodactylus.rhynodge.queries.HttpQuery;
import com.google.common.base.Optional;
-import com.google.common.collect.ImmutableList;
import org.jsoup.nodes.Document;
import org.jsoup.select.Elements;
* @return The filters to parse LICD
*/
private static List<Filter> createFilters() {
- ImmutableList.Builder<Filter> filters = ImmutableList.builder();
+ var filters = new ArrayList<Filter>();
filters.add(new HtmlFilter());
filters.add(new ExtractUrlFilter() {
filters.add(new HtmlFilter());
filters.add(new LeastICouldDoComicFilter());
- return filters.build();
+ return filters;
}
}
import net.pterodactylus.rhynodge.queries.FallbackQuery;
import net.pterodactylus.rhynodge.queries.HttpQuery;
-import com.google.common.collect.ImmutableList;
-
/**
* {@link Watcher} implementation that watches The Pirate Bay for new episodes.
*
* @return The filters of the watcher
*/
private static List<Filter> createFilters() {
- return ImmutableList.of(new HtmlFilter(), new PirateBayFilter(), createDefaultBlacklistFilter(), new SizeBlacklistFilter(), new EpisodeFilter());
+ return List.of(new HtmlFilter(), new PirateBayFilter(), createDefaultBlacklistFilter(), new SizeBlacklistFilter(), new EpisodeFilter());
}
}
import net.pterodactylus.rhynodge.queries.FallbackQuery;
import net.pterodactylus.rhynodge.queries.HttpQuery;
-import com.google.common.collect.ImmutableList;
-
/**
* {@link Watcher} implementation that watches The Pirate Bay for new files.
*
* @return The filters of the watcher
*/
private static List<Filter> createFilters() {
- return ImmutableList.of(new HtmlFilter(), new PirateBayFilter(), createDefaultBlacklistFilter(), new SizeBlacklistFilter());
+ return List.of(new HtmlFilter(), new PirateBayFilter(), createDefaultBlacklistFilter(), new SizeBlacklistFilter());
}
}
import net.pterodactylus.rhynodge.mergers.EpisodeMerger;
import net.pterodactylus.rhynodge.queries.HttpQuery;
-import com.google.common.collect.ImmutableList;
-
/**
* {@link net.pterodactylus.rhynodge.Watcher} implementation that watches
* TorrentHound for new episodes.
* @return The filters of the watcher
*/
private static List<Filter> createFilters() {
- return ImmutableList.of(new HtmlFilter(), new TorrentHoundFilter(), createDefaultBlacklistFilter(), new SizeBlacklistFilter(), new EpisodeFilter());
+ return List.of(new HtmlFilter(), new TorrentHoundFilter(), createDefaultBlacklistFilter(), new SizeBlacklistFilter(), new EpisodeFilter());
}
}
import net.pterodactylus.rhynodge.mergers.TorrentMerger;
import net.pterodactylus.rhynodge.queries.HttpQuery;
-import com.google.common.collect.ImmutableList;
-
/**
* {@link net.pterodactylus.rhynodge.Watcher} implementation that watches
* TorrentHound for new files.
* @return The filters of the watcher
*/
private static List<Filter> createFilters() {
- return ImmutableList.of(new HtmlFilter(), new TorrentHoundFilter(), createDefaultBlacklistFilter(), new SizeBlacklistFilter());
+ return List.of(new HtmlFilter(), new TorrentHoundFilter(), createDefaultBlacklistFilter(), new SizeBlacklistFilter());
}
}
import net.pterodactylus.rhynodge.mergers.EpisodeMerger;
import net.pterodactylus.rhynodge.queries.HttpQuery;
-import com.google.common.collect.ImmutableList;
-
/**
* {@link net.pterodactylus.rhynodge.Watcher} implementation that watches
* {@code torrentz.eu} for new episodes.
* @return The filters of the watcher
*/
private static List<Filter> createFilters() {
- return ImmutableList.of(new HtmlFilter(), new TorrentzEuFilter(), createDefaultBlacklistFilter(), new SizeBlacklistFilter(), new EpisodeFilter());
+ return List.of(new HtmlFilter(), new TorrentzEuFilter(), createDefaultBlacklistFilter(), new SizeBlacklistFilter(), new EpisodeFilter());
}
}
import net.pterodactylus.rhynodge.mergers.TorrentMerger;
import net.pterodactylus.rhynodge.queries.HttpQuery;
-import com.google.common.collect.ImmutableList;
-
/**
* {@link net.pterodactylus.rhynodge.Watcher} implementation that watches
* {@code torrentz.eu} for new files.
* @return The filters of the watcher
*/
private static List<Filter> createFilters() {
- return ImmutableList.of(new HtmlFilter(), new TorrentzEuFilter(), createDefaultBlacklistFilter(), new SizeBlacklistFilter());
+ return List.of(new HtmlFilter(), new TorrentzEuFilter(), createDefaultBlacklistFilter(), new SizeBlacklistFilter());
}
}
import net.pterodactylus.rhynodge.filters.ComicSiteFilter
import net.pterodactylus.rhynodge.utils.asOptional
import org.jsoup.nodes.Document
-import org.jsoup.nodes.Element
-import java.util.stream.Collectors
/**
* [ComicSiteFilter] implementation that can parse “Heldentage” comics.
import net.pterodactylus.rhynodge.filters.ComicSiteFilter
import net.pterodactylus.rhynodge.utils.asOptional
import org.jsoup.nodes.Document
-import org.jsoup.nodes.Element
-import java.util.stream.Collectors
/**
* [ComicSiteFilter] implementation that can parse Sinfest.