import java.net.URL;
import java.security.NoSuchAlgorithmException;
import java.util.ArrayList;
+import java.util.HashSet;
import java.util.List;
+import java.util.Set;
import java.util.concurrent.Executor;
import java.util.concurrent.Executors;
import java.util.logging.Level;
import java.util.logging.Logger;
+import net.pterodactylus.arachne.parser.ParserFactory;
import de.ina.util.service.AbstractService;
import de.ina.util.thread.DumpingThreadFactory;
import de.ina.util.validation.Validation;
// INTERNAL MEMBERS
//
+ /** The parser factory. */
+ private ParserFactory parserFactory = new ParserFactory();
+
/** Thread pool for the URL fetches. */
- private Executor urlFetcherExecutor = Executors.newFixedThreadPool(1, new DumpingThreadFactory("URLFetcher-"));
+ private Executor urlFetcherExecutor = Executors.newSingleThreadExecutor(new DumpingThreadFactory("URLFetcher-"));
/** The current list of URLs to crawl. */
private final List<Page> pages = new ArrayList<Page>();
+ /** Already crawled pages. */
+ private final Set<Page> crawledPages = new HashSet<Page>();
+
//
// ACCESSORS
//
*/
public void addPage(URL url) {
Validation.begin().isNotNull("url", url).check().isEqual("url.getHost()", url.getHost(), (Object) nodeHost).isEqual("url.getPort()", url.getPort(), nodePort).check();
- String path = url.getPath();
- if (path.length() == 0) {
- path = "/";
- }
- String[] pathComponents = path.split("/");
- if (pathComponents.length < 2) {
- throw new IllegalArgumentException("URL “" + url + "” is not a valid freenet page.");
- }
- String siteName = pathComponents[1];
- String[] siteComponents = siteName.split("@");
- if (siteComponents.length != 2) {
- throw new IllegalArgumentException("siteName “" + siteName + "” is not a valid freenet page.");
- }
- if (!"USK".equals(siteComponents[0]) && !"SSK".equals(siteComponents[0]) && !"CHK".equals(siteComponents[0])) {
- throw new IllegalArgumentException("siteName “" + siteName + "” is not a valid freenet page.");
- }
- if ("USK".equals(siteComponents[0])) {
- Site site = new Site(siteComponents[1], pathComponents[2]);
- Edition edition = new Edition(site, Integer.parseInt(pathComponents[3]));
- Page page = new Page(edition, createPath(pathComponents, 4));
- addPage(page);
- }
- if ("SSK".equals(siteComponents[0])) {
- int lastDash = pathComponents[2].lastIndexOf('-');
- String basename = pathComponents[2].substring(0, lastDash);
- int editionNumber = Integer.parseInt(pathComponents[2].substring(lastDash + 1));
- Site site = new Site(siteComponents[1], basename);
- Edition edition = new Edition(site, editionNumber);
- Page page = new Page(edition, createPath(pathComponents, 3));
- addPage(page);
- }
- /* TODO: handle CHK */
+ addPage(Page.fromURL(url));
}
/**
*/
public void addPage(Page page) {
Validation.begin().isNotNull("page", page).check();
- pages.add(page);
- notifySyncObject();
+ if (!crawledPages.contains(page) && !pages.contains(page)) {
+ pages.add(page);
+ notifySyncObject();
+ }
}
//
//
/**
- * {@inheritdoc}
+ * {@inheritDoc}
*
* @see de.ina.util.service.AbstractService#serviceRun()
*/
@Override
protected void serviceRun() {
while (!shouldStop()) {
- Page nextPage = null;
+ final Page nextPage;
+ Page page = null;
synchronized (syncObject) {
while (!shouldStop() && pages.isEmpty()) {
try {
}
}
if (!shouldStop()) {
- nextPage = pages.remove(0);
+ page = pages.remove(0);
}
}
if (shouldStop()) {
break;
}
- URL nextURL = createURL(nextPage);
+ nextPage = page;
+ URL nextURL = nextPage.toURL(nodeHost, nodePort);
if (nextURL == null) {
logger.log(Level.INFO, "Skipping “" + nextPage + "”.");
continue;
}
- URLFetcher urlFetcher;
+ final URLFetcher urlFetcher;
try {
logger.log(Level.INFO, "Fetching “" + nextURL + "”...");
- urlFetcher = new URLFetcher(this, nextURL);
- urlFetcherExecutor.execute(urlFetcher);
+ urlFetcher = new URLFetcher(parserFactory, nextURL);
+ urlFetcherExecutor.execute(new Runnable() {
+
+ @SuppressWarnings("synthetic-access")
+ public void run() {
+ urlFetcher.run();
+ crawledPages.add(nextPage);
+ for (Page page : urlFetcher.getCollectedPages()) {
+ addPage(page);
+ }
+ }
+ });
} catch (NoSuchAlgorithmException nsae1) {
logger.log(Level.SEVERE, "Could not get “SHA-256” message digest!", nsae1);
}
}
}
- //
- // PRIVATE METHODS
- //
-
- /**
- * Creates a path from the given String array, starting at the given index.
- * The path is created by joining all Strings from the array, separating
- * them with a slash (‘/’).
- *
- * @param pathComponents
- * The array of path components
- * @param index
- * The index of the first path components
- * @return The joined path
- */
- private String createPath(String[] pathComponents, int index) {
- Validation.begin().isNotNull("pathComponents", pathComponents).check().isLessOrEqual("index", index, pathComponents.length).check();
- StringBuilder path = new StringBuilder();
- for (int pathComponentIndex = index; pathComponentIndex < pathComponents.length; pathComponentIndex++) {
- if (path.length() > 0) {
- path.append('/');
- }
- path.append(pathComponents[pathComponentIndex]);
- }
- return path.toString();
- }
-
- /**
- * Creates a URL from the given page.
- *
- * @param page
- * The page to create a URL from
- * @return The created URL, or <code>null</code> if the URL could not be
- * created
- */
- private URL createURL(Page page) {
- try {
- return new URL("http://" + nodeHost + ":" + nodePort + "/SSK@" + page.getEdition().getSite().getKey() + "/" + page.getEdition().getSite().getBasename() + "-" + page.getEdition().getNumber() + "/" + page.getPath());
- } catch (MalformedURLException mue1) {
- /* nearly impossible. */
- }
- return null;
- }
-
}