2 * © 2009 David ‘Bombe’ Roden
4 package net.pterodactylus.arachne.core;
6 import java.net.MalformedURLException;
8 import java.security.NoSuchAlgorithmException;
9 import java.util.ArrayList;
10 import java.util.List;
11 import java.util.concurrent.Executor;
12 import java.util.concurrent.Executors;
13 import java.util.logging.Level;
14 import java.util.logging.Logger;
16 import net.pterodactylus.arachne.parser.ParserFactory;
17 import de.ina.util.service.AbstractService;
18 import de.ina.util.thread.DumpingThreadFactory;
19 import de.ina.util.validation.Validation;
24 * @author David ‘Bombe’ Roden <bombe@pterodactylus.net>
26 public class Core extends AbstractService {
29 private static final Logger logger = Logger.getLogger(Core.class.getName());
35 /** The host of the freenet node. */
36 private String nodeHost = "localhost";
38 /** The port of the freenet node. */
39 private int nodePort = 8888;
45 /** The parser factory. */
46 private ParserFactory parserFactory = new ParserFactory();
48 /** Thread pool for the URL fetches. */
49 private Executor urlFetcherExecutor = Executors.newFixedThreadPool(1, new DumpingThreadFactory("URLFetcher-"));
51 /** The current list of URLs to crawl. */
52 private final List<Page> pages = new ArrayList<Page>();
59 * Sets the host name of the node.
62 * The node’s host name
64 public void setNodeHost(String nodeHost) {
65 this.nodeHost = nodeHost;
73 * Adds the given URL to the list of pages to crawl.
78 public void addPage(URL url) {
79 Validation.begin().isNotNull("url", url).check().isEqual("url.getHost()", url.getHost(), (Object) nodeHost).isEqual("url.getPort()", url.getPort(), nodePort).check();
83 * Adds the given URL to the list of pages to crawl.
86 * The URL of the page to crawl
87 * @throws MalformedURLException
88 * if the URL is not a valid URL
90 public void addPage(String url) throws MalformedURLException {
91 Validation.begin().isNotNull("url", (Object) url).check();
92 addPage(new URL(url));
96 * Adds the given page to the list of pages to crawl.
101 public void addPage(Page page) {
102 Validation.begin().isNotNull("page", page).check();
114 * @see de.ina.util.service.AbstractService#serviceRun()
117 @SuppressWarnings("null")
118 protected void serviceRun() {
119 while (!shouldStop()) {
120 Page nextPage = null;
121 synchronized (syncObject) {
122 while (!shouldStop() && pages.isEmpty()) {
125 } catch (InterruptedException ie1) {
130 nextPage = pages.remove(0);
136 URL nextURL = nextPage.toURL(nodeHost, nodePort);
137 if (nextURL == null) {
138 logger.log(Level.INFO, "Skipping “" + nextPage + "”.");
141 URLFetcher urlFetcher;
143 logger.log(Level.INFO, "Fetching “" + nextURL + "”...");
144 urlFetcher = new URLFetcher(parserFactory, nextURL);
145 urlFetcherExecutor.execute(urlFetcher);
146 } catch (NoSuchAlgorithmException nsae1) {
147 logger.log(Level.SEVERE, "Could not get “SHA-256” message digest!", nsae1);