Allow method chaining in Profile.
[Sone.git] / src / main / java / net / pterodactylus / sone / core / SoneDownloader.java
index 1a8f870..091efca 100644 (file)
 
 package net.pterodactylus.sone.core;
 
+import java.io.IOException;
+import java.io.InputStream;
 import java.util.HashSet;
 import java.util.Set;
 import java.util.logging.Level;
 import java.util.logging.Logger;
 
 import net.pterodactylus.sone.data.Sone;
+import net.pterodactylus.util.io.Closer;
 import net.pterodactylus.util.logging.Logging;
 import net.pterodactylus.util.service.AbstractService;
+import net.pterodactylus.util.xml.SimpleXML;
+import net.pterodactylus.util.xml.XML;
+
+import org.w3c.dom.Document;
+
 import freenet.client.FetchResult;
+import freenet.support.api.Bucket;
 
 /**
  * The Sone downloader is responsible for download Sones as they are updated.
@@ -81,6 +90,7 @@ public class SoneDownloader extends AbstractService {
                logger.log(Level.FINE, "Starting fetch for Sone “%s” from %s…", new Object[] { sone, sone.getRequestUri().setMetaString(new String[] { "sone.xml" }) });
                FetchResult fetchResult = freenetInterface.fetchUri(sone.getRequestUri().setMetaString(new String[] { "sone.xml" }));
                logger.log(Level.FINEST, "Got %d bytes back.", fetchResult.size());
+               updateSoneFromXml(sone, fetchResult);
        }
 
        //
@@ -97,4 +107,61 @@ public class SoneDownloader extends AbstractService {
                }
        }
 
+       //
+       // PRIVATE METHODS
+       //
+
+       /**
+        * Updates the contents of the given Sone from the given fetch result.
+        *
+        * @param sone
+        *            The Sone to update
+        * @param fetchResult
+        *            The fetch result
+        */
+       private void updateSoneFromXml(Sone sone, FetchResult fetchResult) {
+               logger.log(Level.FINEST, "Persing FetchResult (%d bytes, %s) for %s…", new Object[] { fetchResult.size(), fetchResult.getMimeType(), sone });
+               /* TODO - impose a size limit? */
+               InputStream xmlInputStream = null;
+               Bucket xmlBucket = null;
+               try {
+                       xmlBucket = fetchResult.asBucket();
+                       xmlInputStream = xmlBucket.getInputStream();
+                       Document document = XML.transformToDocument(xmlInputStream);
+                       SimpleXML soneXml = SimpleXML.fromDocument(document);
+
+                       /* check ID. */
+                       String soneId = soneXml.getValue("id", null);
+                       if (!sone.getId().equals(soneId)) {
+                               /* TODO - mark Sone as bad. */
+                               logger.log(Level.WARNING, "Downloaded ID for Sone %s (%s) does not match known ID (%s)!", new Object[] { sone, sone.getId(), soneId });
+                               return;
+                       }
+
+                       String soneName = soneXml.getValue("name", null);
+                       if (soneName == null) {
+                               /* TODO - mark Sone as bad. */
+                               logger.log(Level.WARNING, "Downloaded name for Sone %s was null!", new Object[] { sone });
+                               return;
+                       }
+
+                       SimpleXML profileXml = soneXml.getNode("profile");
+                       if (profileXml == null) {
+                               /* TODO - mark Sone as bad. */
+                               logger.log(Level.WARNING, "Downloaded Sone %s has no profile!", new Object[] { sone });
+                               return;
+                       }
+
+                       /* parse profile. */
+
+               } catch (IOException ioe1) {
+                       logger.log(Level.WARNING, "Could not read XML file from " + sone + "!", ioe1);
+               } finally {
+                       if (xmlBucket != null) {
+                               xmlBucket.free();
+                       }
+                       Closer.close(xmlInputStream);
+               }
+       }
+
 }