Author: uli
Date: Thu Jan 31 20:05:45 2013
New Revision: 1441170
URL: http://svn.apache.org/viewvc?rev=1441170&view=rev
Log:
apparently CXF's version isn't working for us anymore...
Added:
tapestry/tapestry-site/trunk/src/
tapestry/tapestry-site/trunk/src/main/
tapestry/tapestry-site/trunk/src/main/java/
tapestry/tapestry-site/trunk/src/main/java/org/
tapestry/tapestry-site/trunk/src/main/java/org/apache/
tapestry/tapestry-site/trunk/src/main/java/org/apache/cxf/
tapestry/tapestry-site/trunk/src/main/java/org/apache/cxf/cwiki/
tapestry/tapestry-site/trunk/src/main/java/org/apache/cxf/cwiki/ConfluenceCleanupWriter.java
tapestry/tapestry-site/trunk/src/main/java/org/apache/cxf/cwiki/Page.java
tapestry/tapestry-site/trunk/src/main/java/org/apache/cxf/cwiki/SiteExporter.java
Added:
tapestry/tapestry-site/trunk/src/main/java/org/apache/cxf/cwiki/ConfluenceCleanupWriter.java
URL:
http://svn.apache.org/viewvc/tapestry/tapestry-site/trunk/src/main/java/org/apache/cxf/cwiki/ConfluenceCleanupWriter.java?rev=1441170&view=auto
==============================================================================
---
tapestry/tapestry-site/trunk/src/main/java/org/apache/cxf/cwiki/ConfluenceCleanupWriter.java
(added)
+++
tapestry/tapestry-site/trunk/src/main/java/org/apache/cxf/cwiki/ConfluenceCleanupWriter.java
Thu Jan 31 20:05:45 2013
@@ -0,0 +1,313 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.cxf.cwiki;
+
+import java.io.Writer;
+import java.net.MalformedURLException;
+import java.net.URL;
+import java.util.LinkedHashMap;
+import java.util.Map;
+import java.util.Stack;
+
+import org.xml.sax.Attributes;
+import org.xml.sax.SAXException;
+
+import org.ccil.cowan.tagsoup.XMLWriter;
+
+/**
+ *
+ */
+public class ConfluenceCleanupWriter extends XMLWriter {
+
+ private final Page page;
+ private final SiteExporter exporter;
+ private final String divId;
+ private final String divCls;
+ private final Stack<Integer> trStack = new Stack<Integer>();
+ private int curTrCount;
+
+ public ConfluenceCleanupWriter(SiteExporter exp, Writer writer, Page page,
+ String id, String divCls) {
+ super(writer);
+ this.page = page;
+ this.exporter = exp;
+ this.divId = id;
+ this.divCls = divCls;
+ }
+
+ //CHECKSTYLE:OFF
+ public void startElement(String uri, String localName, String qName, final
Attributes atts)
+ throws SAXException {
+ AttributesWrapper newAtts = new AttributesWrapper(atts);
+ if ("a".equals(localName.toLowerCase())
+ || "a".equals(qName.toLowerCase())) {
+ String href = atts.getValue("href");
+ //Confluence sticks this on links from blog entries, but it's
invalid
+ newAtts.remove("data-username");
+ if (href != null && href.startsWith("/confluence/display/")) {
+ String params = "";
+ if (href.indexOf('#') != -1) {
+ params = href.substring(href.indexOf('#'));
+ href = href.substring(0, href.indexOf('#'));
+ }
+ if (href.indexOf('?') != -1) {
+ if (params.length() > 0) {
+ params = href.substring(href.indexOf('?')) + "#" +
params;
+ } else {
+ params = href.substring(href.indexOf('?'));
+ }
+ href = href.substring(0, href.indexOf('?'));
+ }
+ try {
+ final Page p = exporter.findPageByURL(href);
+ if (p != null) {
+ newAtts.addMapping("href", p.createFileName() +
params);
+ } else {
+ if (href.indexOf('~') == -1) {
+ //link to a user page is OK, don't warn about it
+ System.out.println("Could not find page for " +
href
+ + " linked from " +
page.getTitle());
+ }
+ newAtts.addMapping("href", SiteExporter.ROOT +
href.substring(11));
+ }
+ } catch (Exception e) {
+ throw new SAXException(e);
+ }
+ } else if (href != null &&
href.startsWith("/confluence/plugins/")) {
+ newAtts.addMapping("href", SiteExporter.ROOT +
href.substring(11));
+ } else if (href != null &&
href.contains("/confluence/pages/viewpage.action")) {
+ int idx = href.indexOf("pageId=");
+ String id = href.substring(idx + 7);
+ Page p = exporter.findPageByID(id);
+ if (p != null) {
+ newAtts.addMapping("href", p.createFileName());
+ } else {
+ System.out.println("Could not find page for id: " + id
+ + " linked from " + page.getTitle());
+ }
+ } else if (href != null &&
href.contains("/confluence/download/attachments")) {
+ href = href.substring(href.lastIndexOf("/"));
+ String dirName = page.createFileName();
+ dirName = dirName.substring(0, dirName.lastIndexOf(".")) +
".data";
+
+ newAtts.addMapping("href", dirName + href);
+ } else if (href != null &&
href.contains("/confluence/pages/createpage.action")) {
+ System.out.println("Adding createpage link for " + href + "
from " + page.getTitle());
+ newAtts.addMapping("href", SiteExporter.HOST + href);
+ } else if (href != null
+ && (href.startsWith("http://")
+ || href.startsWith("https://"))) {
+ URL url;
+ try {
+ url = new URL(href);
+ if (url.getHost().contains("apache.org")) {
+ newAtts.remove("rel");
+ }
+ if (url.getHost().equals("cxf.apache.org")
+ && "external-link".equals(newAtts.getValue("class"))) {
+ newAtts.remove("class");
+ }
+ } catch (MalformedURLException e) {
+ //ignore
+ }
+ }
+ } else if ("img".equals(localName.toLowerCase())
+ || "img".equals(qName.toLowerCase())) {
+ String href = atts.getValue("src");
+ if ("absmiddle".equalsIgnoreCase(atts.getValue("align"))) {
+ newAtts.addMapping("align", "middle");
+ }
+ String cls = atts.getValue("class");
+ if (href != null && href.startsWith("/confluence/images/")) {
+ newAtts.addMapping("src", SiteExporter.HOST + href);
+ } else if (href != null &&
href.startsWith("/confluence/download/attachments")) {
+ if (cls == null) {
+ href = href.substring(0, href.lastIndexOf('?'));
+ href = href.substring(href.lastIndexOf('/'));
+ String dirName = page.createFileName();
+ dirName = dirName.substring(0, dirName.lastIndexOf(".")) +
".data";
+
+ newAtts.addMapping("src", dirName + href);
+ } else if (cls.contains("userLogo")) {
+ String name = href;
+ try {
+ name = exporter.loadUserImage(page, href);
+ } catch (Exception ex) {
+ System.out.println("Could not download userLogo " +
href
+ + " linked from " +
page.getTitle());
+ }
+ String dirName = page.createFileName();
+ dirName = dirName.substring(0, dirName.lastIndexOf(".")) +
".userimage/";
+
+ newAtts.addMapping("src", dirName + name);
+ } else {
+ newAtts.addMapping("src", SiteExporter.HOST + href);
+ }
+ } else if (href != null &&
href.startsWith("/confluence/download/thumbnails")) {
+ String name = href;
+ try {
+ name = exporter.loadThumbnail(page, href);
+ } catch (Exception ex) {
+ System.out.println("Could not download thumbnail " + href
+ + " linked from " + page.getTitle());
+ }
+ String dirName = page.createFileName();
+ dirName = dirName.substring(0, dirName.lastIndexOf(".")) +
".thumbs/";
+
+ newAtts.addMapping("src", dirName + name);
+ } else if (href != null && href.startsWith("/confluence")) {
+ newAtts.addMapping("src", SiteExporter.HOST + href);
+ }
+ } else if ("th".equals(localName.toLowerCase())
+ || "th".equals(qName.toLowerCase())) {
+ curTrCount++;
+ } else if ("td".equals(localName.toLowerCase())
+ || "td".equals(qName.toLowerCase())) {
+ curTrCount++;
+ if (newAtts.getIndex("nowrap") != -1) {
+ //make sure nowrap attribute is set to nowrap per HTML spec
+ newAtts.addMapping("nowrap", "nowrap");
+ }
+ } else if ("tr".equals(localName.toLowerCase())
+ || "tr".equals(qName.toLowerCase())) {
+ trStack.push(curTrCount);
+ curTrCount = 0;
+ } else if ("div".equals(localName.toLowerCase())
+ || "div".equals(qName.toLowerCase())) {
+ String id = atts.getValue("id");
+ if ("ConfluenceContent".equals(id)) {
+ if (divCls != null) {
+ newAtts.addMapping("class", divCls);
+ newAtts.remove("id");
+ }
+ if (divId != null) {
+ newAtts.addMapping("id", divId);
+ }
+ }
+ } else if ("input".equals(localName.toLowerCase())
+ || "input".equals(qName.toLowerCase())) {
+ String value = atts.getValue("value");
+ if (value != null && value.startsWith("/confluence/")) {
+ newAtts.addMapping("value", SiteExporter.ROOT +
value.substring(11));
+ }
+ }
+ super.startElement(uri, localName, qName, newAtts);
+ }
+
+
+
+ public void endElement(String uri, String localName, String qName) throws
SAXException {
+ if ("tr".equals(localName.toLowerCase())
+ || "tr".equals(qName.toLowerCase())) {
+ if (curTrCount == 0) {
+ super.startElement("td");
+ super.endElement("td");
+ }
+ curTrCount = trStack.pop();
+ }
+ super.endElement(uri, localName, qName);
+ }
+
+ final class AttributesWrapper implements Attributes {
+ private final Map<String, String> atts = new LinkedHashMap<String,
String>();
+
+ private AttributesWrapper(Attributes atts) {
+ for (int x = 0; x < atts.getLength(); x++) {
+ this.atts.put(atts.getQName(x), atts.getValue(x));
+ }
+ }
+ private Map.Entry<String, String> getByIndex(int i) {
+ for (Map.Entry<String, String> a : atts.entrySet()) {
+ if (i == 0) {
+ return a;
+ }
+ --i;
+ }
+ return null;
+ }
+ private int findIndex(String k) {
+ int i = 0;
+ for (Map.Entry<String, String> a : atts.entrySet()) {
+ if (a.getKey().equals(k)) {
+ return i;
+ }
+ ++i;
+ }
+ return -1;
+ }
+
+ public void remove(String k) {
+ atts.remove(k);
+ }
+
+ public void addMapping(String k, String v) {
+ atts.put(k, v);
+ }
+
+ public int getLength() {
+ return atts.size();
+ }
+
+ public String getURI(int index) {
+ return "";
+ }
+
+ public String getLocalName(int index) {
+ return getByIndex(index).getKey();
+ }
+
+ public String getQName(int index) {
+ return getByIndex(index).getKey();
+ }
+
+ public String getType(int index) {
+ return "CDATA";
+ }
+
+ public int getIndex(String uri, String localName) {
+ return findIndex(localName);
+ }
+
+ public int getIndex(String qName) {
+ return findIndex(qName);
+ }
+
+ public String getType(String uri, String localName) {
+ return "CDATA";
+ }
+
+ public String getType(String qName) {
+ return "CDATA";
+ }
+
+ public String getValue(int index) {
+ return getByIndex(index).getValue();
+ }
+
+ public String getValue(String uri, String localName) {
+ return atts.get(localName);
+ }
+
+ public String getValue(String qName) {
+ return atts.get(qName);
+ }
+ }
+
+}
Added: tapestry/tapestry-site/trunk/src/main/java/org/apache/cxf/cwiki/Page.java
URL:
http://svn.apache.org/viewvc/tapestry/tapestry-site/trunk/src/main/java/org/apache/cxf/cwiki/Page.java?rev=1441170&view=auto
==============================================================================
--- tapestry/tapestry-site/trunk/src/main/java/org/apache/cxf/cwiki/Page.java
(added)
+++ tapestry/tapestry-site/trunk/src/main/java/org/apache/cxf/cwiki/Page.java
Thu Jan 31 20:05:45 2013
@@ -0,0 +1,201 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.cxf.cwiki;
+
+
+import java.io.Serializable;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Set;
+import java.util.concurrent.CopyOnWriteArraySet;
+
+import javax.xml.datatype.DatatypeFactory;
+import javax.xml.datatype.XMLGregorianCalendar;
+
+import org.w3c.dom.Document;
+
+import org.apache.cxf.helpers.DOMUtils;
+
+/**
+ *
+ */
+public class Page implements Serializable {
+ private static final long serialVersionUID = 1L;
+
+ XMLGregorianCalendar modified;
+ final String id;
+ final String parent;
+ final String title;
+ final String url;
+ Map<String, String> attachments;
+ Set<String> includes;
+ Map<String, Integer> childrenOf;
+
+ transient String renderedContent;
+ transient String renderedDivContent;
+ transient String divIdForContent;
+
+ public Page(Document doc) throws Exception {
+ //org.apache.cxf.helpers.XMLUtils.printDOM(doc.getDocumentElement());
+ id =
DOMUtils.getChildContent(doc.getDocumentElement().getFirstChild(), "id");
+ parent =
DOMUtils.getChildContent(doc.getDocumentElement().getFirstChild(), "parentId");
+ title =
DOMUtils.getChildContent(doc.getDocumentElement().getFirstChild(), "title");
+ url =
DOMUtils.getChildContent(doc.getDocumentElement().getFirstChild(), "url");
+ String mod =
DOMUtils.getChildContent(doc.getDocumentElement().getFirstChild(), "modified");
+ if(mod == null)
+ mod =
DOMUtils.getChildContent(doc.getDocumentElement().getFirstChild(),
"publishDate");
+ modified = DatatypeFactory.newInstance().newXMLGregorianCalendar(mod);
+
+ String c =
DOMUtils.getChildContent(doc.getDocumentElement().getFirstChild(), "content");
+ if (c != null) {
+ int idx = c.indexOf("{children");
+ while (idx != -1) {
+ if (childrenOf == null) {
+ childrenOf = new HashMap<String, Integer>();
+ }
+ idx += 9;
+ if (c.charAt(idx) != '}') {
+ // {children:page=Foo|...}
+ idx++;
+ int idx2 = c.indexOf('}', idx);
+ String paramString = c.substring(idx, idx2);
+ String params[] = paramString.split("\\||=");
+ String page = null;
+ int depth = 1;
+ for (int x = 0; x < params.length; x++) {
+ if ("page".equals(params[x])) {
+ page = params[x + 1];
+ x++;
+ } else if ("depth".equals(params[x])) {
+ depth = Integer.parseInt(params[x + 1]);
+ x++;
+ }
+ }
+ childrenOf.put(page, depth);
+ } else {
+ childrenOf.put(title, 1);
+ }
+ idx = c.indexOf("{children", idx);
+ }
+
+ idx = c.indexOf("{include:");
+ while (idx != -1) {
+ int idx2 = c.indexOf("}", idx);
+ String inc = c.substring(idx + 9, idx2);
+ if (includes == null) {
+ includes = new CopyOnWriteArraySet<String>();
+ }
+ includes.add(inc);
+ idx = c.indexOf("{include:", idx2);
+ }
+ }
+ }
+
+ public boolean hasChildrenOf(String t, int d) {
+ if (childrenOf == null) {
+ return false;
+ }
+ Integer i = childrenOf.get(t);
+ if (i == null) {
+ return false;
+ }
+ return d <= i;
+ }
+
+ public boolean includesPage(String s) {
+ if (includes == null) {
+ return false;
+ }
+ return includes.contains(s);
+ }
+
+ public String getId() {
+ return id;
+ }
+ public String getParentId() {
+ return parent;
+ }
+ public String getTitle() {
+ return title;
+ }
+ public XMLGregorianCalendar getModifiedTime() {
+ return modified;
+ }
+
+ public void setContent(String c) {
+ renderedContent = c;
+ }
+ public String getContent() {
+ return renderedContent;
+ }
+ public String getURL() {
+ return url;
+ }
+
+ public String createFileName() {
+ StringBuffer buffer = new StringBuffer();
+ char array[] = getTitle().toLowerCase().toCharArray();
+ boolean separated = true;
+ for (int x = 0; x < array.length; x++) {
+ if ("abcdefghijklmnopqrstuvwxyz0123456789".indexOf(array[x]) >= 0)
{
+ buffer.append(Character.toLowerCase(array[x]));
+ separated = false;
+ } else if ("\r\n\t -".indexOf(array[x]) >= 0) {
+ if (separated) {
+ continue;
+ }
+ buffer.append('-');
+ separated = true;
+ }
+ }
+ if (buffer.length() == 0) {
+ return getId() + ".html";
+ }
+ return buffer.append(".html").toString();
+ }
+
+ public void addAttachment(String aid, String filename) {
+ if (attachments == null) {
+ attachments = new HashMap<String, String>();
+ }
+ attachments.put(aid, filename);
+ }
+ public String getAttachmentFilename(String aid) {
+ if (attachments == null) {
+ return null;
+ }
+ return attachments.get(aid);
+ }
+
+ public void setContentForDivId(String divId, String content) {
+ renderedDivContent = content;
+ divIdForContent = divId;
+ }
+
+ public String getContentForDivId(String divId) {
+ if (divId == null) {
+ return renderedContent;
+ }
+ if (divId.equals(divIdForContent)) {
+ return renderedDivContent;
+ }
+ return null;
+ }
+}
Added:
tapestry/tapestry-site/trunk/src/main/java/org/apache/cxf/cwiki/SiteExporter.java
URL:
http://svn.apache.org/viewvc/tapestry/tapestry-site/trunk/src/main/java/org/apache/cxf/cwiki/SiteExporter.java?rev=1441170&view=auto
==============================================================================
---
tapestry/tapestry-site/trunk/src/main/java/org/apache/cxf/cwiki/SiteExporter.java
(added)
+++
tapestry/tapestry-site/trunk/src/main/java/org/apache/cxf/cwiki/SiteExporter.java
Thu Jan 31 20:05:45 2013
@@ -0,0 +1,914 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied. See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.cxf.cwiki;
+
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileOutputStream;
+import java.io.FileWriter;
+import java.io.InputStream;
+import java.io.ObjectInputStream;
+import java.io.ObjectOutputStream;
+import java.io.StringReader;
+import java.io.StringWriter;
+import java.io.UnsupportedEncodingException;
+import java.io.Writer;
+import java.net.Authenticator;
+import java.net.PasswordAuthentication;
+import java.net.URL;
+import java.net.URLConnection;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collection;
+import java.util.LinkedList;
+import java.util.List;
+import java.util.ListIterator;
+import java.util.Map;
+import java.util.Properties;
+import java.util.Set;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.ConcurrentLinkedQueue;
+import java.util.concurrent.CopyOnWriteArraySet;
+import java.util.concurrent.Future;
+import java.util.concurrent.atomic.AtomicInteger;
+
+import javax.xml.datatype.DatatypeFactory;
+import javax.xml.datatype.XMLGregorianCalendar;
+import javax.xml.namespace.QName;
+import javax.xml.ws.AsyncHandler;
+import javax.xml.ws.Dispatch;
+import javax.xml.ws.Response;
+import javax.xml.ws.Service;
+import javax.xml.ws.soap.SOAPBinding;
+
+import org.w3c.dom.Document;
+import org.w3c.dom.Element;
+import org.w3c.dom.Node;
+
+import org.xml.sax.ContentHandler;
+import org.xml.sax.InputSource;
+import org.xml.sax.XMLReader;
+
+import org.apache.cxf.common.classloader.ClassLoaderUtils;
+import org.apache.cxf.common.util.Base64Utility;
+import org.apache.cxf.helpers.CastUtils;
+import org.apache.cxf.helpers.DOMUtils;
+import org.apache.cxf.helpers.FileUtils;
+import org.apache.cxf.helpers.IOUtils;
+import org.apache.cxf.helpers.XMLUtils;
+import org.apache.cxf.interceptor.LoggingInInterceptor;
+import org.apache.cxf.interceptor.LoggingOutInterceptor;
+import org.apache.cxf.staxutils.StaxUtils;
+import org.apache.cxf.transport.http.HTTPConduit;
+import org.apache.cxf.transports.http.configuration.HTTPClientPolicy;
+import org.apache.velocity.Template;
+import org.apache.velocity.VelocityContext;
+import org.apache.velocity.app.Velocity;
+import org.apache.velocity.runtime.resource.loader.URLResourceLoader;
+import org.ccil.cowan.tagsoup.Parser;
+import org.ccil.cowan.tagsoup.XMLWriter;
+
+/**
+ *
+ */
+public class SiteExporter implements Runnable {
+
+ static final String HOST = "https://cwiki.apache.org";
+ static final String ROOT = HOST + "/confluence";
+ static final String RPC_ROOT = "/rpc/soap-axis/confluenceservice-v1";
+ static final String SOAPNS = "http://soap.rpc.confluence.atlassian.com";
+
+
+
+ static boolean debug;
+ static String userName = "cxf-export-user";
+ static String password;
+
+ static boolean svn;
+ static boolean commit;
+ static StringBuilder svnCommitMessage = new StringBuilder();
+
+ static File rootOutputDir = new File(".");
+ static String loginToken;
+ static Dispatch<Document> dispatch;
+ static AtomicInteger asyncCount = new AtomicInteger();
+
+ Map<String, Page> pages = new ConcurrentHashMap<String, Page>();
+ Collection<Page> modifiedPages = new ConcurrentLinkedQueue<Page>();
+ Set<String> globalPages = new CopyOnWriteArraySet<String>();
+
+ String spaceKey = "CXF";
+ String pageCacheFile = "pagesConfig.obj";
+ String templateName = "template/template.vm";
+ String mainDivClass;
+ boolean forceAll;
+ String breadCrumbRoot;
+
+ File outputDir = rootOutputDir;
+
+ Template template;
+
+ public SiteExporter(String fileName, boolean force) throws Exception {
+ forceAll = force;
+
+ Properties props = new Properties();
+ props.load(new FileInputStream(fileName));
+
+ if (props.containsKey("spaceKey")) {
+ spaceKey = props.getProperty("spaceKey");
+ }
+ if (props.containsKey("pageCacheFile")) {
+ pageCacheFile = props.getProperty("pageCacheFile");
+ }
+ if (props.containsKey("templateName")) {
+ templateName = props.getProperty("templateName");
+ }
+ if (props.containsKey("outputDir")) {
+ outputDir = new File(rootOutputDir,
props.getProperty("outputDir"));
+ }
+ if (props.containsKey("mainDivClass")) {
+ mainDivClass = props.getProperty("mainDivClass");
+ }
+ if (props.containsKey("breadCrumbRoot")) {
+ breadCrumbRoot = props.getProperty("breadCrumbRoot");
+ }
+ if (props.containsKey("globalPages")) {
+ String globals = props.getProperty("globalPages");
+ String[] pgs = globals.split(",");
+ globalPages.addAll(Arrays.asList(pgs));
+ }
+
+ props = new Properties();
+ String clzName = URLResourceLoader.class.getName();
+ props.put("resource.loader", "url");
+ props.put("url.resource.loader.class", clzName);
+ props.put("url.resource.loader.root", "");
+ synchronized (Velocity.class) {
+ Velocity.init(props);
+
+ URL url = ClassLoaderUtils.getResource(templateName,
this.getClass());
+ if (url == null) {
+ File file = new File(templateName);
+ if (file.exists()) {
+ url = file.toURI().toURL();
+ }
+ }
+ template = Velocity.getTemplate(url.toURI().toString());
+ }
+ outputDir.mkdirs();
+ }
+
+ public static synchronized Dispatch<Document> getDispatch() {
+ if (dispatch == null) {
+ Service service = Service.create(new QName(SOAPNS, "Service"));
+ service.addPort(new QName(SOAPNS, "Port"),
+ SOAPBinding.SOAP11HTTP_BINDING,
+ ROOT + RPC_ROOT);
+
+ dispatch = service.createDispatch(new QName(SOAPNS, "Port"),
+ Document.class,
Service.Mode.PAYLOAD);
+ if (debug) {
+ ((org.apache.cxf.jaxws.DispatchImpl<?>)dispatch).getClient()
+ .getEndpoint().getInInterceptors().add(new
LoggingInInterceptor());
+ ((org.apache.cxf.jaxws.DispatchImpl<?>)dispatch).getClient()
+ .getEndpoint().getOutInterceptors().add(new
LoggingOutInterceptor());
+ }
+ HTTPConduit c =
(HTTPConduit)((org.apache.cxf.jaxws.DispatchImpl<?>)dispatch)
+ .getClient().getConduit();
+ HTTPClientPolicy clientPol = c.getClient();
+ if (clientPol == null) {
+ clientPol = new HTTPClientPolicy();
+ }
+ //CAMEL has a couple of HUGE HUGE pages that take a long time to
render
+ clientPol.setReceiveTimeout(5 * 60 * 1000);
+ c.setClient(clientPol);
+
+ }
+ return dispatch;
+ }
+
+ public void run() {
+ try {
+ doExport();
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ }
+
+ public void forcePage(String s) throws Exception {
+ Page p = findPage(s);
+ if (p != null) {
+ pages.remove(p.getId());
+ modifiedPages.add(p);
+ }
+ }
+
+ public void doExport() throws Exception {
+ if (!forceAll) {
+ loadPagesCache();
+ }
+
+ // debug stuff, force regen of a page
+ //forcePage("Navigation");
+ //forcePage("Index");
+ //forcePage("JavaDoc");
+ //forcePage("DOSGi Architecture");
+
+ if (modifiedPages.isEmpty() && checkRSS()) {
+ System.out.println("(" + spaceKey + ") No changes detected from
RSS");
+ return;
+ }
+
+ doLogin();
+ loadPages();
+
+ for (Page p : modifiedPages) {
+ if (globalPages.contains(p.getTitle())) {
+ modifiedPages.clear();
+ modifiedPages.addAll(pages.values());
+ break;
+ }
+ }
+ if (forceAll) {
+ modifiedPages.clear();
+ modifiedPages.addAll(pages.values());
+ }
+
+
+ if (!modifiedPages.isEmpty()) {
+ renderPages();
+ savePages();
+ }
+
+
+ }
+
+
+ public boolean checkRSS() throws Exception {
+ if (forceAll || pages == null || pages.isEmpty()) {
+ return false;
+ }
+ URL url = new URL(ROOT +
"/createrssfeed.action?types=page&types=blogpost&types=mail&"
+ //+ "types=comment&" //cannot handle comment
updates yet
+ +
"types=attachment&statuses=created&statuses=modified"
+ + "&spaces=" + spaceKey +
"&rssType=atom&maxResults=20&timeSpan=2"
+ + "&publicFeed=true");
+ InputStream ins = url.openStream();
+ Document doc = StaxUtils.read(ins);
+ ins.close();
+ List<Element> els =
DOMUtils.getChildrenWithName(doc.getDocumentElement(),
+
"http://www.w3.org/2005/Atom",
+ "entry");
+ //XMLUtils.printDOM(doc);
+ for (Element el : els) {
+ Element e2 = DOMUtils.getFirstChildWithName(el,
"http://www.w3.org/2005/Atom", "updated");
+ String val = DOMUtils.getContent(e2);
+ XMLGregorianCalendar cal =
DatatypeFactory.newInstance().newXMLGregorianCalendar(val);
+ e2 = DOMUtils.getFirstChildWithName(el,
"http://www.w3.org/2005/Atom", "title");
+ String title = DOMUtils.getContent(e2);
+ Page p = findPage(title);
+
+ if (p != null) {
+ //found a modified page - need to rebuild
+ if (cal.compare(p.getModifiedTime()) > 0) {
+ System.out.println("(" + spaceKey + ") Changed page found:
" + title);
+ return false;
+ }
+ } else {
+ System.out.println("(" + spaceKey + ") Did not find page for:
" + title);
+ return false;
+ }
+ }
+
+ return true;
+ }
+
+ private void savePages() throws Exception {
+ File file = new File(rootOutputDir, pageCacheFile);
+ file.getParentFile().mkdirs();
+ FileOutputStream fout = new FileOutputStream(file);
+ ObjectOutputStream oout = new ObjectOutputStream(fout);
+ oout.writeObject(pages);
+ oout.close();
+ }
+
+ private void renderPages() throws Exception {
+ int total = modifiedPages.size();
+ int count = 0;
+ for (Page p : modifiedPages) {
+ count++;
+ System.out.println("(" + spaceKey + ") Rendering " + p.getTitle()
+ + " (" + count + "/" + total + ")");
+ loadAttachments(p);
+
+ loadPageContent(p, null, null);
+ VelocityContext ctx = new VelocityContext();
+ ctx.put("exporter", this);
+ ctx.put("page", p);
+ ctx.put("confluenceUri", ROOT);
+
+ File file = new File(outputDir, p.createFileName());
+ boolean isNew = !file.exists();
+
+ FileWriter writer = new FileWriter(file);
+ ctx.put("out", writer);
+ template.merge(ctx, writer);
+ writer.close();
+ if (isNew) {
+ //call "svn add"
+ callSvn("add", file.getAbsolutePath());
+ svnCommitMessage.append("Adding: " + file.getName() + "\n");
+ } else {
+ svnCommitMessage.append("Modified: " + file.getName() + "\n");
+ }
+ }
+ }
+ void callSvn(String ... commands) throws Exception {
+ callSvn(outputDir, commands);
+ }
+ static void callSvn(File dir, String ... commands) throws Exception {
+ if (svn) {
+ List<String> cmds = new ArrayList<String>();
+ cmds.add("svn");
+ cmds.add("--non-interactive");
+ cmds.addAll(Arrays.asList(commands));
+ Process p = Runtime.getRuntime().exec(cmds.toArray(new
String[cmds.size()]),
+ new String[0], dir);
+ if (p.waitFor() != 0) {
+ IOUtils.copy(p.getErrorStream(), System.err);
+ }
+ }
+ }
+
+ private void loadAttachments(Page p) throws Exception {
+ Document doc = XMLUtils.newDocument();
+ Element el = doc.createElementNS(SOAPNS, "ns1:getAttachments");
+ Element el2 = doc.createElement("in0");
+ el.appendChild(el2);
+ el2.setTextContent(loginToken);
+ el2 = doc.createElement("in1");
+ el.appendChild(el2);
+ el2.setTextContent(p.getId());
+ el.appendChild(el2);
+ doc.appendChild(el);
+
+ doc = getDispatch().invoke(doc);
+ el =
DOMUtils.getFirstElement(DOMUtils.getFirstElement(doc.getDocumentElement()));
+ while (el != null) {
+ try {
+ String filename = DOMUtils.getChildContent(el, "fileName");
+ String durl = DOMUtils.getChildContent(el, "url");
+ String aid = DOMUtils.getChildContent(el, "id");
+
+ p.addAttachment(aid, filename);
+
+ String dirName = p.createFileName();
+ dirName = dirName.substring(0, dirName.lastIndexOf(".")) +
".data";
+ File file = new File(outputDir, dirName);
+ if (!file.exists()) {
+ callSvn("mkdir", file.getAbsolutePath());
+ file.mkdirs();
+ }
+ filename = filename.replace(' ', '-');
+ file = new File(file, filename);
+ boolean exists = file.exists();
+ FileOutputStream out = new FileOutputStream(file);
+ URL url = new URL(durl);
+ InputStream ins = url.openStream();
+ IOUtils.copy(ins, out);
+ out.close();
+ ins.close();
+ if (!exists) {
+ callSvn("add", file.getAbsolutePath());
+ svnCommitMessage.append("Added: " + dirName + "/" +
file.getName() + "\n");
+ } else {
+ svnCommitMessage.append("Modified: " + dirName + "/" +
file.getName() + "\n");
+ }
+ } catch (Exception e) {
+ e.printStackTrace();
+ }
+ el = DOMUtils.getNextElement(el);
+ }
+ }
+ String loadUserImage(Page p, String href) throws Exception {
+ return loadPageBinaryData(p, href, "userimage", true);
+ }
+ String loadThumbnail(Page p, String href) throws Exception {
+ return loadPageBinaryData(p, href, "thumbs", false);
+ }
+ String loadPageBinaryData(Page p, String href, String type, boolean auth)
throws Exception {
+ String filename = href.substring(href.lastIndexOf('/') + 1);
+ filename = filename.replace(' ', '_');
+ if (filename.indexOf('?') != -1) {
+ filename = filename.substring(0, filename.indexOf('?'));
+ }
+
+ String dirName = p.createFileName();
+ dirName = dirName.substring(0, dirName.lastIndexOf(".")) + "." + type;
+ File file = new File(outputDir, dirName);
+ if (!file.exists()) {
+ callSvn("mkdir", file.getAbsolutePath());
+ file.mkdirs();
+ }
+ file = new File(file, filename);
+ boolean exists = file.exists();
+ FileOutputStream out = new FileOutputStream(file);
+ if (auth) {
+ if (href.indexOf('?') != -1) {
+ href += "&os_authType=basic";
+ } else {
+ href += "?os_authType=basic";
+ }
+ }
+ URL url = new URL(HOST + href);
+ URLConnection con = url.openConnection();
+ if (auth) {
+ con.addRequestProperty("Authorization", getBasicAuthHeader());
+ }
+ InputStream ins = con.getInputStream();
+ IOUtils.copy(ins, out);
+ out.close();
+ ins.close();
+ if (!exists) {
+ callSvn("add", file.getAbsolutePath());
+ svnCommitMessage.append("Added: " + dirName + "/" + file.getName()
+ "\n");
+ } else {
+ svnCommitMessage.append("Modified: " + dirName + "/" +
file.getName() + "\n");
+ }
+ return file.getName();
+ }
+ public String getBasicAuthHeader() {
+ String userAndPass = userName + ":" + password;
+ try {
+ return "Basic " +
Base64Utility.encode(userAndPass.getBytes("ISO-8859-1"));
+ } catch (UnsupportedEncodingException e) {
+ return "Basic";
+ }
+ }
+ public Page findPage(String title) throws Exception {
+ for (Page p : pages.values()) {
+ if (title.equals(p.getTitle())) {
+ return p;
+ }
+ }
+ return null;
+ }
+ public Page findPageByURL(String url) throws Exception {
+ for (Page p : pages.values()) {
+ if (p.getURL().endsWith(url)) {
+ return p;
+ }
+ }
+ return null;
+ }
+ public Page findPageByID(String id) {
+ for (Page p : pages.values()) {
+ if (p.getId().equals(id)) {
+ return p;
+ }
+ }
+ return null;
+ }
+ public String breadcrumbs(Page page) {
+ String separator = ">";
+ String s = " " + separator + " ";
+
+ StringBuffer buffer = new StringBuffer();
+ List<Page> p = new LinkedList<Page>();
+ String parentId = page.getParentId();
+ Page parent = parentId == null ? null : pages.get(parentId);
+ while (parent != null) {
+ p.add(0, parent);
+ parentId = parent.getParentId();
+ parent = pages.get(parentId);
+ }
+ if (breadCrumbRoot != null) {
+ buffer.append("<a href=\"");
+ buffer.append("index.html");
+ buffer.append("\">");
+ buffer.append(breadCrumbRoot);
+ buffer.append("</a>");
+ buffer.append(s);
+ }
+ for (Page p2 : p) {
+ buffer.append("<a href=\"");
+ buffer.append(p2.createFileName());
+ buffer.append("\">");
+ buffer.append(p2.getTitle());
+ buffer.append("</a>");
+ buffer.append(s);
+ }
+ buffer.append("<a href=\"");
+ buffer.append(page.createFileName());
+ buffer.append("\">");
+ buffer.append(page.getTitle());
+ buffer.append("</a>");
+
+ return buffer.toString();
+ }
+
+ public String getPageContent(String title, String divId) throws Exception {
+ Page p = findPage(title);
+ String s = p.getContentForDivId(divId);
+ if (s == null) {
+ s = loadPageContent(p, divId, null);
+ }
+ return s;
+ }
+ public String getPageContent(String title, String divId, String cls)
throws Exception {
+ Page p = findPage(title);
+ String s = p.getContentForDivId(divId);
+ if (s == null) {
+ s = loadPageContent(p, divId, cls);
+ }
+ return s;
+ }
+ public String getPageContent(String title) throws Exception {
+ Page p = findPage(title);
+ String s = p.getContent();
+ if (s == null) {
+ loadPageContent(p, null, null);
+ }
+ return p.getContent();
+ }
+ private String loadPageContent(Page p, String divId, String divCls) throws
Exception {
+ Document doc = XMLUtils.newDocument();
+ Element el = doc.createElementNS(SOAPNS, "ns1:renderContent");
+ Element el2 = doc.createElement("in0");
+ el.appendChild(el2);
+ el2.setTextContent(loginToken);
+ el2 = doc.createElement("in1");
+ el.appendChild(el2);
+ el2.setTextContent(spaceKey);
+
+ el2 = doc.createElement("in2");
+ el.appendChild(el2);
+ el2.setTextContent(p.getId());
+
+ el2 = doc.createElement("in3");
+ el.appendChild(el2);
+
+ el2 = doc.createElement("in4"); //apachesoap map
+ el.appendChild(el2);
+
+ Element el4 = doc.createElement("item");
+ el2.appendChild(el4);
+
+ Element el3 = doc.createElement("key");
+ el3.setTextContent("style");
+ el4.appendChild(el3);
+ el3 = doc.createElement("value");
+ el3.setTextContent("clean");
+ el4.appendChild(el3);
+
+
+ doc.appendChild(el);
+ doc = getDispatch().invoke(doc);
+
+ String content =
doc.getDocumentElement().getFirstChild().getTextContent().trim();
+ content = updateContentLinks(p, content, divId,
+ divCls == null && divId == null ?
mainDivClass : divCls);
+ if (divId == null) {
+ p.setContent(content);
+ } else {
+ p.setContentForDivId(divId, content);
+ }
+ return content;
+ }
+
+ public String unwrap(String v) throws Exception {
+ return v.trim().replaceFirst("^<div[^>]*>",
"").replaceFirst("</div>$", "");
+ }
+
+ private static synchronized void doLogin() throws Exception {
+ if (loginToken == null) {
+ Document doc = XMLUtils.newDocument();
+ Element el = doc.createElementNS(SOAPNS, "ns1:login");
+ Element el2 = doc.createElement("in0");
+
+ if (userName == null) {
+ System.out.println("Enter username: ");
+ el2.setTextContent(System.console().readLine());
+ } else {
+ el2.setTextContent(userName);
+ }
+ el.appendChild(el2);
+ el2 = doc.createElement("in1");
+ el.appendChild(el2);
+ if (password == null) {
+ System.out.println("Enter password: ");
+ el2.setTextContent(new
String(System.console().readPassword()));
+ } else {
+ el2.setTextContent(password);
+ }
+ doc.appendChild(el);
+ doc = getDispatch().invoke(doc);
+ loginToken =
doc.getDocumentElement().getFirstChild().getTextContent();
+ }
+ }
+
+ public void loadPagesCache() throws Exception {
+ File file = new File(rootOutputDir, pageCacheFile);
+ if (file.exists()) {
+ FileInputStream fin = new FileInputStream(file);
+ ObjectInputStream oin = new ObjectInputStream(fin);
+ pages = CastUtils.cast((Map<?, ?>)oin.readObject());
+ oin.close();
+ }
+ }
+
+ private Document getPagesDocument() throws Exception {
+ return getElementsDocument("ns1:getPages");
+ }
+
+ private Document getBlogEntriesDocument() throws Exception {
+ return getElementsDocument("ns1:getBlogEntries");
+ }
+
+ private Document getElementsDocument(String function) throws Exception {
+ Document doc = XMLUtils.newDocument();
+ Element el = doc.createElementNS(SOAPNS, function);
+ Element el2 = doc.createElement("in0");
+ el.appendChild(el2);
+ el2.setTextContent(loginToken);
+ el2 = doc.createElement("in1");
+ el.appendChild(el2);
+ el2.setTextContent(spaceKey);
+ doc.appendChild(el);
+ doc = getDispatch().invoke(doc);
+ return doc;
+ }
+
+ private void loadAndAddPages(List<Future<?>> futures, Set<String>
allPages, Set<Page> newPages) throws Exception {
+ Document doc = getPagesDocument();
+ ElementLoader loader = new ElementLoader()
+ {
+ public Future<?> loadElement(Element element, Set<String>
allPages, Set<Page> newPages) throws Exception
+ {
+ return loadPage(element, allPages, newPages);
+ }
+ };
+ loadAndAddElements(futures, loader, doc, allPages, newPages);
+ }
+
+ private void loadAndAddBlogEntries(List<Future<?>> futures, Set<String>
allPages, Set<Page> newPages) throws Exception {
+ Document doc = getBlogEntriesDocument();
+ ElementLoader loader = new ElementLoader()
+ {
+ public Future<?> loadElement(Element element, Set<String>
allPages, Set<Page> newPages) throws Exception
+ {
+ return loadBlogEntry(element, allPages, newPages);
+ }
+ };
+ loadAndAddElements(futures, loader, doc, allPages, newPages);
+ }
+
+ private void loadAndAddElements(List<Future<?>> futures, ElementLoader
loader, Document doc,
+ Set<String> allPages, Set<Page> newPages) throws Exception {
+ Node nd = doc.getDocumentElement().getFirstChild().getFirstChild();
+ while (nd != null) {
+ if (nd instanceof Element) {
+ futures.add(loader.loadElement((Element)nd, allPages,
newPages));
+ }
+ nd = nd.getNextSibling();
+ }
+ }
+
+ private interface ElementLoader {
+ Future<?> loadElement(Element element, Set<String> allPages, Set<Page>
newPages) throws Exception;
+ }
+
+ public void loadPages() throws Exception {
+ Set<String> allPages = new CopyOnWriteArraySet<String>(pages.keySet());
+ Set<Page> newPages = new CopyOnWriteArraySet<Page>();
+ List<Future<?>> futures = new ArrayList<Future<?>>(allPages.size());
+
+ loadAndAddPages(futures, allPages, newPages);
+ loadAndAddBlogEntries(futures, allPages, newPages);
+
+ for (Future<?> f : futures) {
+ //wait for all the pages to be done
+ f.get();
+ }
+ for (Page p : newPages) {
+ //pages have been added, need to check
+ checkForChildren(p);
+ }
+ for (String id : allPages) {
+ //these pages have been deleted
+ Page p = pages.remove(id);
+ checkForChildren(p);
+
+ File file = new File(outputDir, p.createFileName());
+ if (file.exists()) {
+ callSvn("rm", file.getAbsolutePath());
+ svnCommitMessage.append("Deleted: " + file.getName() + "\n");
+ }
+ if (file.exists()) {
+ file.delete();
+ }
+ }
+ while (checkIncludes()) {
+ // nothing
+ }
+ }
+
+ public boolean checkIncludes() {
+ for (Page p : modifiedPages) {
+ if (checkIncludes(p)) {
+ return true;
+ }
+ }
+ return false;
+ }
+
+ public boolean checkIncludes(Page p) {
+ for (Page p2 : pages.values()) {
+ if (p2.includesPage(p.getTitle())
+ && !modifiedPages.contains(p2)) {
+ modifiedPages.add(p2);
+ return true;
+ }
+ }
+ return false;
+ }
+ public void checkForChildren(Page p) {
+ String parentId = p.getParentId();
+ Page parent = parentId == null ? null : pages.get(parentId);
+ int d = 1;
+ while (parent != null) {
+ for (Page p2 : pages.values()) {
+ if (p2.hasChildrenOf(parent.getTitle(), d)
+ && !modifiedPages.contains(p2)) {
+ modifiedPages.add(p2);
+ }
+ }
+ parent = pages.get(parent.getParentId());
+ d++;
+ }
+ }
+
+ public Future<?> loadPage(Element pageSumEl,
+ final Set<String> allPages,
+ final Set<Page> newPages) throws Exception {
+ Document doc = XMLUtils.newDocument();
+ Element el = doc.createElementNS(SOAPNS, "ns1:getPage");
+ Element el2 = doc.createElement("in0");
+ el.appendChild(el2);
+ el2.setTextContent(loginToken);
+ el2 = doc.createElement("in1");
+ el.appendChild(el2);
+ el2.setTextContent(DOMUtils.getChildContent(pageSumEl, "id"));
+ doc.appendChild(el);
+
+ return getResponseHandler(doc, allPages, newPages);
+ }
+
+ public Future<?> loadBlogEntry(Element pageSumEl, final Set<String>
allPages,
+ final Set<Page> newPages) throws Exception
+ {
+ Document doc = XMLUtils.newDocument();
+ Element el = doc.createElementNS(SOAPNS, "ns1:getBlogEntry");
+ Element el2 = doc.createElement("in0");
+ el.appendChild(el2);
+ el2.setTextContent(loginToken);
+ el2 = doc.createElement("in1");
+ el.appendChild(el2);
+ el2.setTextContent(DOMUtils.getChildContent(pageSumEl, "id"));
+ doc.appendChild(el);
+
+ return getResponseHandler(doc, allPages, newPages);
+ }
+
+ private Future<?> getResponseHandler(Document doc, final Set<String>
allPages, final Set<Page> newPages) throws Exception
+ {
+ // make sure we only fire off about 15-20 or confluence may get a bit
overloaded
+ while (asyncCount.get() > 15)
+ {
+ Thread.sleep(10);
+ }
+ asyncCount.incrementAndGet();
+ return getDispatch().invokeAsync(doc, new AsyncHandler<Document>() {
+ public void handleResponse(Response<Document> doc) {
+ try {
+ Page page = new Page(doc.get());
+ Page oldPage = pages.put(page.getId(), page);
+ if (oldPage == null ||
page.getModifiedTime().compare(oldPage.getModifiedTime()) > 0) {
+ modifiedPages.add(page);
+ if (oldPage == null) {
+ //need to check parents to see if it has a
{children} tag so we can re-render
+ newPages.add(page);
+ }
+ }
+ if (allPages.contains(page.getId())) {
+ allPages.remove(page.getId());
+ }
+ } catch (Exception e) {
+ e.printStackTrace();
+ } finally {
+ asyncCount.decrementAndGet();
+ }
+ }
+ });
+ }
+
+ private String updateContentLinks(Page page, String content,
+ String id, String divCls) throws
Exception {
+ XMLReader parser = createTagSoupParser();
+ StringWriter w = new StringWriter();
+ parser.setContentHandler(createContentHandler(page, w, id, divCls));
+ parser.parse(new InputSource(new StringReader(content)));
+ content = w.toString();
+ content = content.substring("<html><body>".length());
+ content = content.substring(0, content.lastIndexOf("</body></html>"));
+ return content;
+ }
+ protected XMLReader createTagSoupParser() throws Exception {
+ XMLReader reader = new Parser();
+ reader.setFeature(Parser.namespacesFeature, false);
+ reader.setFeature(Parser.namespacePrefixesFeature, false);
+ reader.setProperty(Parser.schemaProperty, new
org.ccil.cowan.tagsoup.HTMLSchema() {
+ {
+ //problem with nested lists that the confluence {toc} macro
creates
+ elementType("ul", M_LI, M_BLOCK | M_LI, 0);
+ }
+ });
+
+ return reader;
+ }
+ protected ContentHandler createContentHandler(final Page page, Writer w,
+ String id, String divCls) {
+ XMLWriter xmlWriter = new ConfluenceCleanupWriter(this, w, page, id,
divCls);
+ xmlWriter.setOutputProperty(XMLWriter.OMIT_XML_DECLARATION, "yes");
+ xmlWriter.setOutputProperty(XMLWriter.METHOD, "html");
+ return xmlWriter;
+ }
+
+ public static void main(String[] args) throws Exception {
+ Authenticator.setDefault(new Authenticator() {
+ protected PasswordAuthentication getPasswordAuthentication() {
+ return new PasswordAuthentication(userName,
password.toCharArray());
+ }
+ });
+ ListIterator<String> it = Arrays.asList(args).listIterator();
+ List<String> files = new ArrayList<String>();
+ boolean forceAll = false;
+ while (it.hasNext()) {
+ String s = it.next();
+ if ("-debug".equals(s)) {
+ debug = true;
+ } else if ("-user".equals(s)) {
+ userName = it.next();
+ } else if ("-password".equals(s)) {
+ password = it.next();
+ } else if ("-d".equals(s)) {
+ rootOutputDir = new File(it.next());
+ } else if ("-force".equals(s)) {
+ forceAll = true;
+ } else if ("-svn".equals(s)) {
+ svn = true;
+ } else if ("-commit".equals(s)) {
+ commit = true;
+ } else if (s != null && s.length() > 0) {
+ files.add(s);
+ }
+ }
+
+ List<Thread> threads = new ArrayList<Thread>(files.size());
+ for (String file : files) {
+ Thread t = new Thread(new SiteExporter(file, forceAll));
+ threads.add(t);
+ t.start();
+ }
+ for (Thread t : threads) {
+ t.join();
+ }
+
+ if (commit) {
+ File file = FileUtils.createTempFile("svncommit", "txt");
+ FileWriter writer = new FileWriter(file);
+ writer.write(svnCommitMessage.toString());
+ writer.close();
+ callSvn(rootOutputDir, "commit", "-F", file.getAbsolutePath(),
rootOutputDir.getAbsolutePath());
+ svnCommitMessage.setLength(0);
+ }
+ }
+}