Author: amitj
Date: Fri Mar 24 09:31:18 2017
New Revision: 1788387

URL: http://svn.apache.org/viewvc?rev=1788387&view=rev
Log:
OAK-4933: Create a data store implementation that integrates with Microsoft 
Azure Blob Storage

- New module oak-blob-cloud-azure with DataStore implementation for Azure 
BlobStorage
- Integration tests

Added:
    jackrabbit/oak/trunk/oak-blob-cloud-azure/
    jackrabbit/oak/trunk/oak-blob-cloud-azure/pom.xml   (with props)
    jackrabbit/oak/trunk/oak-blob-cloud-azure/src/
    jackrabbit/oak/trunk/oak-blob-cloud-azure/src/main/
    jackrabbit/oak/trunk/oak-blob-cloud-azure/src/main/java/
    jackrabbit/oak/trunk/oak-blob-cloud-azure/src/main/java/org/
    jackrabbit/oak/trunk/oak-blob-cloud-azure/src/main/java/org/apache/
    
jackrabbit/oak/trunk/oak-blob-cloud-azure/src/main/java/org/apache/jackrabbit/
    
jackrabbit/oak/trunk/oak-blob-cloud-azure/src/main/java/org/apache/jackrabbit/oak/
    
jackrabbit/oak/trunk/oak-blob-cloud-azure/src/main/java/org/apache/jackrabbit/oak/blob/
    
jackrabbit/oak/trunk/oak-blob-cloud-azure/src/main/java/org/apache/jackrabbit/oak/blob/cloud/
    
jackrabbit/oak/trunk/oak-blob-cloud-azure/src/main/java/org/apache/jackrabbit/oak/blob/cloud/azure/
    
jackrabbit/oak/trunk/oak-blob-cloud-azure/src/main/java/org/apache/jackrabbit/oak/blob/cloud/azure/blobstorage/
    
jackrabbit/oak/trunk/oak-blob-cloud-azure/src/main/java/org/apache/jackrabbit/oak/blob/cloud/azure/blobstorage/AbstractAzureDataStoreService.java
   (with props)
    
jackrabbit/oak/trunk/oak-blob-cloud-azure/src/main/java/org/apache/jackrabbit/oak/blob/cloud/azure/blobstorage/AzureBlobStoreBackend.java
   (with props)
    
jackrabbit/oak/trunk/oak-blob-cloud-azure/src/main/java/org/apache/jackrabbit/oak/blob/cloud/azure/blobstorage/AzureConstants.java
   (with props)
    
jackrabbit/oak/trunk/oak-blob-cloud-azure/src/main/java/org/apache/jackrabbit/oak/blob/cloud/azure/blobstorage/AzureDataStore.java
   (with props)
    
jackrabbit/oak/trunk/oak-blob-cloud-azure/src/main/java/org/apache/jackrabbit/oak/blob/cloud/azure/blobstorage/AzureDataStoreService.java
   (with props)
    
jackrabbit/oak/trunk/oak-blob-cloud-azure/src/main/java/org/apache/jackrabbit/oak/blob/cloud/azure/blobstorage/Utils.java
   (with props)
    jackrabbit/oak/trunk/oak-blob-cloud-azure/src/test/
    jackrabbit/oak/trunk/oak-blob-cloud-azure/src/test/java/
    jackrabbit/oak/trunk/oak-blob-cloud-azure/src/test/java/org/
    jackrabbit/oak/trunk/oak-blob-cloud-azure/src/test/java/org/apache/
    
jackrabbit/oak/trunk/oak-blob-cloud-azure/src/test/java/org/apache/jackrabbit/
    
jackrabbit/oak/trunk/oak-blob-cloud-azure/src/test/java/org/apache/jackrabbit/oak/
    
jackrabbit/oak/trunk/oak-blob-cloud-azure/src/test/java/org/apache/jackrabbit/oak/blob/
    
jackrabbit/oak/trunk/oak-blob-cloud-azure/src/test/java/org/apache/jackrabbit/oak/blob/cloud/
    
jackrabbit/oak/trunk/oak-blob-cloud-azure/src/test/java/org/apache/jackrabbit/oak/blob/cloud/azure/
    
jackrabbit/oak/trunk/oak-blob-cloud-azure/src/test/java/org/apache/jackrabbit/oak/blob/cloud/azure/blobstorage/
    
jackrabbit/oak/trunk/oak-blob-cloud-azure/src/test/java/org/apache/jackrabbit/oak/blob/cloud/azure/blobstorage/AzureDataStoreTest.java
   (with props)
    
jackrabbit/oak/trunk/oak-blob-cloud-azure/src/test/java/org/apache/jackrabbit/oak/blob/cloud/azure/blobstorage/AzureDataStoreUtils.java
   (with props)
    
jackrabbit/oak/trunk/oak-blob-cloud-azure/src/test/java/org/apache/jackrabbit/oak/blob/cloud/azure/blobstorage/TestAzureDS.java
   (with props)
    
jackrabbit/oak/trunk/oak-blob-cloud-azure/src/test/java/org/apache/jackrabbit/oak/blob/cloud/azure/blobstorage/TestAzureDSWithSmallCache.java
   (with props)
    
jackrabbit/oak/trunk/oak-blob-cloud-azure/src/test/java/org/apache/jackrabbit/oak/blob/cloud/azure/blobstorage/TestAzureDsCacheOff.java
   (with props)
    jackrabbit/oak/trunk/oak-blob-cloud-azure/src/test/resources/
    
jackrabbit/oak/trunk/oak-blob-cloud-azure/src/test/resources/azure.properties   
(with props)
    
jackrabbit/oak/trunk/oak-blob-cloud-azure/src/test/resources/logback-test.xml   
(with props)
Modified:
    jackrabbit/oak/trunk/oak-doc/src/site/markdown/osgi_config.md
    jackrabbit/oak/trunk/oak-doc/src/site/markdown/plugins/blobstore.md
    jackrabbit/oak/trunk/oak-parent/pom.xml
    jackrabbit/oak/trunk/pom.xml

Added: jackrabbit/oak/trunk/oak-blob-cloud-azure/pom.xml
URL: 
http://svn.apache.org/viewvc/jackrabbit/oak/trunk/oak-blob-cloud-azure/pom.xml?rev=1788387&view=auto
==============================================================================
--- jackrabbit/oak/trunk/oak-blob-cloud-azure/pom.xml (added)
+++ jackrabbit/oak/trunk/oak-blob-cloud-azure/pom.xml Fri Mar 24 09:31:18 2017
@@ -0,0 +1,174 @@
+<?xml version="1.0" encoding="UTF-8"?>
+
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+  -->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0"; 
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"; 
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 
http://maven.apache.org/xsd/maven-4.0.0.xsd";>
+    <parent>
+        <artifactId>oak-parent</artifactId>
+        <groupId>org.apache.jackrabbit</groupId>
+        <version>1.8-SNAPSHOT</version>
+        <relativePath>../oak-parent/pom.xml</relativePath>
+    </parent>
+    <modelVersion>4.0.0</modelVersion>
+
+    <artifactId>oak-blob-cloud-azure</artifactId>
+    <name>Oak Azure Cloud Blob Store</name>
+    <packaging>bundle</packaging>
+
+    <build>
+        <plugins>
+            <plugin>
+                <groupId>org.apache.felix</groupId>
+                <artifactId>maven-scr-plugin</artifactId>
+            </plugin>
+            <plugin>
+                <groupId>org.apache.felix</groupId>
+                <artifactId>maven-bundle-plugin</artifactId>
+                <configuration>
+                    <instructions>
+                        <Export-Package>
+                            
org.apache.jackrabbit.oak.blob.cloud.azure.blobstorage
+                        </Export-Package>
+                        <DynamicImport-Package>sun.io</DynamicImport-Package>
+                        <Embed-Dependency>
+                            azure-storage,
+                            azure-keyvault-core
+                        </Embed-Dependency>
+                    </instructions>
+                </configuration>
+            </plugin>
+        </plugins>
+    </build>
+
+    <!-- 
====================================================================== -->
+    <!-- D E P E N D E N C I E S -->
+    <!-- 
====================================================================== -->
+    <dependencies>
+        <!-- Optional OSGi dependencies, used only when running within OSGi -->
+        <dependency>
+            <groupId>org.osgi</groupId>
+            <artifactId>org.osgi.core</artifactId>
+            <scope>provided</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.osgi</groupId>
+            <artifactId>org.osgi.compendium</artifactId>
+            <scope>provided</scope>
+        </dependency>
+        <dependency>
+            <groupId>biz.aQute.bnd</groupId>
+            <artifactId>bndlib</artifactId>
+            <scope>provided</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.felix</groupId>
+            <artifactId>org.apache.felix.scr.annotations</artifactId>
+            <scope>provided</scope>
+        </dependency>
+
+        <!-- JCR and Jackrabbit dependencies -->
+        <dependency>
+            <groupId>javax.jcr</groupId>
+            <artifactId>jcr</artifactId>
+            <version>2.0</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.jackrabbit</groupId>
+            <artifactId>jackrabbit-jcr-commons</artifactId>
+            <version>${jackrabbit.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.jackrabbit</groupId>
+            <artifactId>jackrabbit-data</artifactId>
+            <version>${jackrabbit.version}</version>
+        </dependency>
+
+        <!-- Dependencies to other Oak components -->
+        <dependency>
+            <groupId>org.apache.jackrabbit</groupId>
+            <artifactId>oak-commons</artifactId>
+            <version>${project.version}</version>
+        </dependency>
+
+        <dependency>
+            <groupId>org.apache.jackrabbit</groupId>
+            <artifactId>oak-blob</artifactId>
+            <version>${project.version}</version>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.jackrabbit</groupId>
+            <artifactId>oak-core</artifactId>
+            <version>${project.version}</version>
+        </dependency>
+
+        <!-- Azure Blob Storage dependency -->
+        <dependency>
+            <groupId>com.microsoft.azure</groupId>
+            <artifactId>azure-storage</artifactId>
+            <version>5.0.0</version>
+        </dependency>
+        <dependency>
+            <groupId>com.microsoft.azure</groupId>
+            <artifactId>azure-keyvault-core</artifactId>
+            <version>0.9.7</version>
+        </dependency>
+
+        <!-- Test dependencies -->
+        <dependency>
+            <groupId>org.apache.jackrabbit</groupId>
+            <artifactId>jackrabbit-data</artifactId>
+            <version>${jackrabbit.version}</version>
+            <classifier>tests</classifier>
+            <scope>test</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.jackrabbit</groupId>
+            <artifactId>oak-core</artifactId>
+            <version>${project.version}</version>
+            <classifier>tests</classifier>
+            <scope>test</scope>
+        </dependency>
+        <dependency>
+            <groupId>junit</groupId>
+            <artifactId>junit</artifactId>
+            <scope>test</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.slf4j</groupId>
+            <artifactId>jul-to-slf4j</artifactId>
+            <scope>test</scope>
+        </dependency>
+        <dependency>
+            <groupId>ch.qos.logback</groupId>
+            <artifactId>logback-classic</artifactId>
+            <scope>test</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.apache.sling</groupId>
+            <artifactId>org.apache.sling.testing.osgi-mock</artifactId>
+            <scope>test</scope>
+        </dependency>
+        <dependency>
+            <groupId>org.mockito</groupId>
+            <artifactId>mockito-core</artifactId>
+            <version>1.10.19</version>
+            <scope>test</scope>
+        </dependency>
+    </dependencies>
+
+</project>

Propchange: jackrabbit/oak/trunk/oak-blob-cloud-azure/pom.xml
------------------------------------------------------------------------------
    svn:eol-style = native

Added: 
jackrabbit/oak/trunk/oak-blob-cloud-azure/src/main/java/org/apache/jackrabbit/oak/blob/cloud/azure/blobstorage/AbstractAzureDataStoreService.java
URL: 
http://svn.apache.org/viewvc/jackrabbit/oak/trunk/oak-blob-cloud-azure/src/main/java/org/apache/jackrabbit/oak/blob/cloud/azure/blobstorage/AbstractAzureDataStoreService.java?rev=1788387&view=auto
==============================================================================
--- 
jackrabbit/oak/trunk/oak-blob-cloud-azure/src/main/java/org/apache/jackrabbit/oak/blob/cloud/azure/blobstorage/AbstractAzureDataStoreService.java
 (added)
+++ 
jackrabbit/oak/trunk/oak-blob-cloud-azure/src/main/java/org/apache/jackrabbit/oak/blob/cloud/azure/blobstorage/AbstractAzureDataStoreService.java
 Fri Mar 24 09:31:18 2017
@@ -0,0 +1,72 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.jackrabbit.oak.blob.cloud.azure.blobstorage;
+
+import org.apache.jackrabbit.core.data.DataStore;
+import org.apache.jackrabbit.core.data.DataStoreException;
+import org.apache.jackrabbit.oak.plugins.blob.AbstractSharedCachingDataStore;
+import 
org.apache.jackrabbit.oak.plugins.blob.datastore.AbstractDataStoreService;
+import org.osgi.framework.Constants;
+import org.osgi.framework.ServiceRegistration;
+import org.osgi.service.component.ComponentContext;
+
+import java.util.Dictionary;
+import java.util.Hashtable;
+import java.util.Map;
+import java.util.Properties;
+
+public abstract class AbstractAzureDataStoreService extends 
AbstractDataStoreService {
+    private static final String DESCRIPTION = "oak.datastore.description";
+
+    private ServiceRegistration delegateReg;
+
+    @Override
+    protected DataStore createDataStore(ComponentContext context, Map<String, 
Object> config) {
+        Properties properties = new Properties();
+        properties.putAll(config);
+
+        AzureDataStore dataStore = new AzureDataStore();
+        dataStore.setStatisticsProvider(getStatisticsProvider());
+        dataStore.setProperties(properties);
+
+        Dictionary<String, Object> props = new Hashtable<String, Object>();
+        props.put(Constants.SERVICE_PID, dataStore.getClass().getName());
+        props.put(DESCRIPTION, getDescription());
+
+        delegateReg = context.getBundleContext().registerService(new String[] {
+                AbstractSharedCachingDataStore.class.getName(),
+                AbstractSharedCachingDataStore.class.getName()
+        }, dataStore , props);
+
+        return dataStore;
+    }
+
+    protected void deactivate() throws DataStoreException {
+        if (delegateReg != null) {
+            delegateReg.unregister();
+        }
+        super.deactivate();
+    }
+
+    @Override
+    protected String[] getDescription() {
+        return new String[] {"type=AzureBlob"};
+    }
+}

Propchange: 
jackrabbit/oak/trunk/oak-blob-cloud-azure/src/main/java/org/apache/jackrabbit/oak/blob/cloud/azure/blobstorage/AbstractAzureDataStoreService.java
------------------------------------------------------------------------------
    svn:eol-style = native

Added: 
jackrabbit/oak/trunk/oak-blob-cloud-azure/src/main/java/org/apache/jackrabbit/oak/blob/cloud/azure/blobstorage/AzureBlobStoreBackend.java
URL: 
http://svn.apache.org/viewvc/jackrabbit/oak/trunk/oak-blob-cloud-azure/src/main/java/org/apache/jackrabbit/oak/blob/cloud/azure/blobstorage/AzureBlobStoreBackend.java?rev=1788387&view=auto
==============================================================================
--- 
jackrabbit/oak/trunk/oak-blob-cloud-azure/src/main/java/org/apache/jackrabbit/oak/blob/cloud/azure/blobstorage/AzureBlobStoreBackend.java
 (added)
+++ 
jackrabbit/oak/trunk/oak-blob-cloud-azure/src/main/java/org/apache/jackrabbit/oak/blob/cloud/azure/blobstorage/AzureBlobStoreBackend.java
 Fri Mar 24 09:31:18 2017
@@ -0,0 +1,789 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.jackrabbit.oak.blob.cloud.azure.blobstorage;
+
+import com.google.common.base.Function;
+import com.google.common.base.Strings;
+import com.google.common.collect.AbstractIterator;
+import com.google.common.collect.Lists;
+import com.microsoft.azure.storage.RequestOptions;
+import com.microsoft.azure.storage.ResultContinuation;
+import com.microsoft.azure.storage.ResultSegment;
+import com.microsoft.azure.storage.RetryPolicy;
+import com.microsoft.azure.storage.StorageException;
+import com.microsoft.azure.storage.blob.BlobListingDetails;
+import com.microsoft.azure.storage.blob.BlobRequestOptions;
+import com.microsoft.azure.storage.blob.CloudBlob;
+import com.microsoft.azure.storage.blob.CloudBlobContainer;
+import com.microsoft.azure.storage.blob.CloudBlobDirectory;
+import com.microsoft.azure.storage.blob.CloudBlockBlob;
+import com.microsoft.azure.storage.blob.CopyStatus;
+import com.microsoft.azure.storage.blob.ListBlobItem;
+import org.apache.jackrabbit.core.data.DataIdentifier;
+import org.apache.jackrabbit.core.data.DataRecord;
+import org.apache.jackrabbit.core.data.DataStoreException;
+import org.apache.jackrabbit.oak.commons.PropertiesUtil;
+import org.apache.jackrabbit.oak.spi.blob.AbstractDataRecord;
+import org.apache.jackrabbit.oak.spi.blob.AbstractSharedBackend;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.BufferedInputStream;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.UnsupportedEncodingException;
+import java.net.URISyntaxException;
+import java.util.EnumSet;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Properties;
+import java.util.Queue;
+
+import static java.lang.Thread.currentThread;
+
+public class AzureBlobStoreBackend extends AbstractSharedBackend {
+
+    private static final Logger LOG = 
LoggerFactory.getLogger(AzureBlobStoreBackend.class);
+
+    private static final String META_DIR_NAME = "META";
+    private static final String META_KEY_PREFIX = META_DIR_NAME + "/";
+
+    private static final long BUFFERED_STREAM_THRESHHOLD = 1024 * 1024;
+
+    private Properties properties;
+    private String containerName;
+    private String connectionString;
+    private int concurrentRequestCount = 1;
+    private RetryPolicy retryPolicy;
+    private Integer requestTimeout;
+
+    private String secret;
+
+    public void setProperties(final Properties properties) {
+        this.properties = properties;
+    }
+
+    protected CloudBlobContainer getAzureContainer() throws DataStoreException 
{
+        CloudBlobContainer container = 
Utils.getBlobContainer(connectionString, containerName);
+        RequestOptions requestOptions = 
container.getServiceClient().getDefaultRequestOptions();
+        if (retryPolicy != null) {
+            requestOptions.setRetryPolicyFactory(retryPolicy);
+        }
+        if (requestTimeout != null) {
+            requestOptions.setTimeoutIntervalInMs(requestTimeout);
+        }
+        return container;
+    }
+
+    @Override
+    public void init() throws DataStoreException {
+        ClassLoader contextClassLoader = 
Thread.currentThread().getContextClassLoader();
+        long start = System.currentTimeMillis();
+        try {
+            
Thread.currentThread().setContextClassLoader(getClass().getClassLoader());
+            LOG.debug("Started backend initialization");
+
+            if (null == properties) {
+                try {
+                    properties = Utils.readConfig(Utils.DEFAULT_CONFIG_FILE);
+                }
+                catch (IOException e) {
+                    throw new DataStoreException("Unable to initialize Azure 
Data Store from " + Utils.DEFAULT_CONFIG_FILE, e);
+                }
+            }
+            secret = properties.getProperty("secret");
+
+            try {
+                Utils.setProxyIfNeeded(properties);
+                containerName = (String) 
properties.get(AzureConstants.AZURE_BLOB_CONTAINER_NAME);
+                connectionString = 
Utils.getConnectionStringFromProperties(properties);
+                concurrentRequestCount = 
PropertiesUtil.toInteger(properties.get(AzureConstants.AZURE_BLOB_CONCURRENT_REQUESTS_PER_OPERATION),
 1);
+                LOG.info("Using concurrentRequestsPerOperation={}", 
concurrentRequestCount);
+                retryPolicy = 
Utils.getRetryPolicy((String)properties.get(AzureConstants.AZURE_BLOB_MAX_REQUEST_RETRY));
+                if 
(properties.getProperty(AzureConstants.AZURE_BLOB_REQUEST_TIMEOUT) != null) {
+                    requestTimeout = 
PropertiesUtil.toInteger(properties.getProperty(AzureConstants.AZURE_BLOB_REQUEST_TIMEOUT),
 RetryPolicy.DEFAULT_CLIENT_RETRY_COUNT);
+                }
+
+                CloudBlobContainer azureContainer = getAzureContainer();
+
+                if (azureContainer.createIfNotExists()) {
+                    LOG.info("New container created. containerName={}", 
containerName);
+                } else {
+                    LOG.info("Reusing existing container. containerName={}", 
containerName);
+                }
+                LOG.debug("Backend initialized. duration={}",
+                          +(System.currentTimeMillis() - start));
+            }
+            catch (StorageException e) {
+                throw new DataStoreException(e);
+            }
+        }
+        finally {
+            Thread.currentThread().setContextClassLoader(contextClassLoader);
+        }
+    }
+
+    @Override
+    public InputStream read(DataIdentifier identifier) throws 
DataStoreException {
+        if (null == identifier) throw new NullPointerException("identifier");
+
+        String key = getKeyName(identifier);
+        long start = System.currentTimeMillis();
+        ClassLoader contextClassLoader = 
Thread.currentThread().getContextClassLoader();
+        try {
+            Thread.currentThread().setContextClassLoader(
+                    getClass().getClassLoader());
+            CloudBlockBlob blob = 
getAzureContainer().getBlockBlobReference(key);
+            if (!blob.exists()) {
+                throw new DataStoreException(String.format("Trying to read 
missing blob. identifier=%s", key));
+            }
+
+            InputStream is = blob.openInputStream();
+            LOG.debug("Got input stream for blob. identifier={} duration={}", 
key, (System.currentTimeMillis() - start));
+            return is;
+        }
+        catch (StorageException e) {
+            LOG.info("Error reading blob. identifier=%s", key);
+            throw new DataStoreException(String.format("Cannot read blob. 
identifier=%s", key), e);
+        }
+        catch (URISyntaxException e) {
+            LOG.debug("Error reading blob. identifier=%s", key);
+            throw new DataStoreException(String.format("Cannot read blob. 
identifier=%s", key), e);
+        } finally {
+            if (contextClassLoader != null) {
+                
Thread.currentThread().setContextClassLoader(contextClassLoader);
+            }
+        }
+    }
+
+    @Override
+    public void write(DataIdentifier identifier, File file) throws 
DataStoreException {
+        if (null == identifier) {
+            throw new NullPointerException("identifier");
+        }
+        if (null == file) {
+            throw new NullPointerException("file");
+        }
+        String key = getKeyName(identifier);
+        long start = System.currentTimeMillis();
+        ClassLoader contextClassLoader = 
Thread.currentThread().getContextClassLoader();
+
+        try {
+            
Thread.currentThread().setContextClassLoader(getClass().getClassLoader());
+
+            long len = file.length();
+            LOG.debug("Blob write started. identifier={} length={}", key, len);
+            CloudBlockBlob blob = 
getAzureContainer().getBlockBlobReference(key);
+            if (!blob.exists()) {
+                BlobRequestOptions options = new BlobRequestOptions();
+                options.setConcurrentRequestCount(concurrentRequestCount);
+                boolean useBufferedStream = len < BUFFERED_STREAM_THRESHHOLD;
+                final InputStream in = useBufferedStream  ? new 
BufferedInputStream(new FileInputStream(file)) : new FileInputStream(file);
+                try {
+                    blob.upload(in, len, null, options, null);
+                    LOG.debug("Blob created. identifier={} length={} 
duration={} buffered={}", key, len, (System.currentTimeMillis() - start), 
useBufferedStream);
+                } finally {
+                    in.close();
+                }
+                return;
+            }
+
+            blob.downloadAttributes();
+            if (blob.getProperties().getLength() != len) {
+                throw new DataStoreException("Length Collision. identifier=" + 
key +
+                                             " new length=" + len +
+                                             " old length=" + 
blob.getProperties().getLength());
+            }
+            LOG.trace("Blob already exists. identifier={} lastModified={}", 
key, blob.getProperties().getLastModified().getTime());
+            blob.startCopy(blob);
+            //TODO: better way of updating lastModified (use custom metadata?)
+            if (!waitForCopy(blob)) {
+                throw new DataStoreException(
+                    String.format("Cannot update lastModified for blob. 
identifier=%s status=%s",
+                                  key, 
blob.getCopyState().getStatusDescription()));
+            }
+            LOG.debug("Blob updated. identifier={} lastModified={} 
duration={}", key,
+                      blob.getProperties().getLastModified().getTime(), 
(System.currentTimeMillis() - start));
+        }
+        catch (StorageException e) {
+            LOG.info("Error writing blob. identifier={}", key, e);
+            throw new DataStoreException(String.format("Cannot write blob. 
identifier=%s", key), e);
+        }
+        catch (URISyntaxException | IOException e) {
+            LOG.debug("Error writing blob. identifier={}", key, e);
+            throw new DataStoreException(String.format("Cannot write blob. 
identifier=%s", key), e);
+        } catch (InterruptedException e) {
+            LOG.debug("Error writing blob. identifier={}", key, e);
+            throw new DataStoreException(String.format("Cannot copy blob. 
identifier=%s", key), e);
+        } finally {
+            if (null != contextClassLoader) {
+                
Thread.currentThread().setContextClassLoader(contextClassLoader);
+            }
+        }
+    }
+
+    private static boolean waitForCopy(CloudBlob blob) throws 
StorageException, InterruptedException {
+        boolean continueLoop = true;
+        CopyStatus status = CopyStatus.PENDING;
+        while (continueLoop) {
+            blob.downloadAttributes();
+            status = blob.getCopyState().getStatus();
+            continueLoop = status == CopyStatus.PENDING;
+            // Sleep if retry is needed
+            if (continueLoop) {
+                Thread.sleep(500);
+            }
+        }
+        return status == CopyStatus.SUCCESS;
+    }
+
+    @Override
+    public byte[] getOrCreateReferenceKey() throws DataStoreException {
+        try {
+            if (!Strings.isNullOrEmpty(secret)) {
+                return secret.getBytes("UTF-8");
+            }
+            LOG.warn("secret not defined");
+            return super.getOrCreateReferenceKey();
+        } catch (UnsupportedEncodingException e) {
+            throw new DataStoreException(e);
+        }
+    }
+
+    @Override
+    public DataRecord getRecord(DataIdentifier identifier) throws 
DataStoreException {
+        if (null == identifier) {
+            throw new NullPointerException("identifier");
+        }
+        String key = getKeyName(identifier);
+        long start = System.currentTimeMillis();
+        ClassLoader contextClassLoader = 
Thread.currentThread().getContextClassLoader();
+        try {
+            
Thread.currentThread().setContextClassLoader(getClass().getClassLoader());
+
+            CloudBlockBlob blob = 
getAzureContainer().getBlockBlobReference(key);
+            if (blob.exists()) {
+                blob.downloadAttributes();
+                AzureBlobStoreDataRecord record = new AzureBlobStoreDataRecord(
+                    this,
+                    connectionString,
+                    containerName,
+                    new DataIdentifier(getIdentifierName(blob.getName())),
+                    blob.getProperties().getLastModified().getTime(),
+                    blob.getProperties().getLength());
+                LOG.debug("Data record read for blob. identifier={} 
duration={} record={}",
+                          key, (System.currentTimeMillis() - start), record);
+                return record;
+            } else {
+                LOG.debug("Blob not found. identifier={} duration={}",
+                          key, (System.currentTimeMillis() - start));
+                throw new DataStoreException(String.format("Cannot find blob. 
identifier=%s", key));
+            }
+        }catch (StorageException e) {
+            LOG.info("Error getting data record for blob. identifier={}", key, 
e);
+            throw new DataStoreException(String.format("Cannot retrieve blob. 
identifier=%s", key), e);
+        }
+        catch (URISyntaxException e) {
+            LOG.debug("Error getting data record for blob. identifier={}", 
key, e);
+            throw new DataStoreException(String.format("Cannot retrieve blob. 
identifier=%s", key), e);
+        } finally {
+            if (contextClassLoader != null) {
+                
Thread.currentThread().setContextClassLoader(contextClassLoader);
+            }
+        }
+    }
+
+    @Override
+    public Iterator<DataIdentifier> getAllIdentifiers() throws 
DataStoreException {
+        return new RecordsIterator<DataIdentifier>(
+                new Function<AzureBlobInfo, DataIdentifier>() {
+                    @Override
+                    public DataIdentifier apply(AzureBlobInfo input) {
+                        return new 
DataIdentifier(getIdentifierName(input.getName()));
+                    }
+                }
+        );
+    }
+
+
+
+    @Override
+    public Iterator<DataRecord> getAllRecords() throws DataStoreException {
+        final AbstractSharedBackend backend = this;
+        return new RecordsIterator<DataRecord>(
+                new Function<AzureBlobInfo, DataRecord>() {
+                    @Override
+                    public DataRecord apply(AzureBlobInfo input) {
+                        return new AzureBlobStoreDataRecord(
+                            backend,
+                            connectionString,
+                            containerName,
+                            new 
DataIdentifier(getIdentifierName(input.getName())),
+                            input.getLastModified(),
+                            input.getLength());
+                    }
+                }
+        );
+    }
+
+    @Override
+    public boolean exists(DataIdentifier identifier) throws DataStoreException 
{
+        long start = System.currentTimeMillis();
+        String key = getKeyName(identifier);
+        ClassLoader contextClassLoader = 
Thread.currentThread().getContextClassLoader();
+        try {
+            
Thread.currentThread().setContextClassLoader(getClass().getClassLoader());
+
+            boolean exists 
=getAzureContainer().getBlockBlobReference(key).exists();
+            LOG.debug("Blob exists={} identifier={} duration={}", exists, key, 
(System.currentTimeMillis() - start));
+            return exists;
+        }
+        catch (Exception e) {
+            throw new DataStoreException(e);
+        }
+        finally {
+            if (null != contextClassLoader) {
+                
Thread.currentThread().setContextClassLoader(contextClassLoader);
+            }
+        }
+    }
+
+    @Override
+    public void close() throws DataStoreException {
+        LOG.info("AzureBlobBackend closed.");
+    }
+
+    @Override
+    public void deleteRecord(DataIdentifier identifier) throws 
DataStoreException {
+        if (null == identifier) throw new NullPointerException("identifier");
+
+        String key = getKeyName(identifier);
+        long start = System.currentTimeMillis();
+        ClassLoader contextClassLoader = 
Thread.currentThread().getContextClassLoader();
+        try {
+            
Thread.currentThread().setContextClassLoader(getClass().getClassLoader());
+
+            boolean result = 
getAzureContainer().getBlockBlobReference(key).deleteIfExists();
+            LOG.debug("Blob {}. identifier={} duration={}",
+                    result ? "deleted" : "delete requested, but it does not 
exist (perhaps already deleted)",
+                    key, (System.currentTimeMillis() - start));
+        }
+        catch (StorageException e) {
+            LOG.info("Error deleting blob. identifier={}", key, e);
+            throw new DataStoreException(e);
+        }
+        catch (URISyntaxException e) {
+            throw new DataStoreException(e);
+        } finally {
+            if (contextClassLoader != null) {
+                
Thread.currentThread().setContextClassLoader(contextClassLoader);
+            }
+        }
+    }
+
+    @Override
+    public void addMetadataRecord(InputStream input, String name) throws 
DataStoreException {
+        if (null == input) {
+            throw new NullPointerException("input");
+        }
+        if (Strings.isNullOrEmpty(name)) {
+            throw new IllegalArgumentException("name");
+        }
+        long start = System.currentTimeMillis();
+        ClassLoader contextClassLoader = 
Thread.currentThread().getContextClassLoader();
+        try {
+            
Thread.currentThread().setContextClassLoader(getClass().getClassLoader());
+
+            addMetadataRecordImpl(input, name, -1L);
+            LOG.debug("Metadata record added. metadataName={} duration={}", 
name, (System.currentTimeMillis() - start));
+        }
+        finally {
+            if (null != contextClassLoader) {
+                
Thread.currentThread().setContextClassLoader(contextClassLoader);
+            }
+        }
+    }
+
+    @Override
+    public void addMetadataRecord(File input, String name) throws 
DataStoreException {
+        if (null == input) {
+            throw new NullPointerException("input");
+        }
+        if (Strings.isNullOrEmpty(name)) {
+            throw new IllegalArgumentException("name");
+        }
+        long start = System.currentTimeMillis();
+        ClassLoader contextClassLoader = 
Thread.currentThread().getContextClassLoader();
+        try {
+            
Thread.currentThread().setContextClassLoader(getClass().getClassLoader());
+
+            addMetadataRecordImpl(new FileInputStream(input), name, 
input.length());
+            LOG.debug("Metadata record added. metadataName={} duration={}", 
name, (System.currentTimeMillis() - start));
+        }
+        catch (FileNotFoundException e) {
+            throw new DataStoreException(e);
+        }
+        finally {
+            if (null != contextClassLoader) {
+                
Thread.currentThread().setContextClassLoader(contextClassLoader);
+            }
+        }
+    }
+
+    private void addMetadataRecordImpl(final InputStream input, String name, 
long recordLength) throws DataStoreException {
+        try {
+            CloudBlobDirectory metaDir = 
getAzureContainer().getDirectoryReference(META_DIR_NAME);
+            CloudBlockBlob blob = metaDir.getBlockBlobReference(name);
+            blob.upload(input, recordLength);
+        }
+        catch (StorageException e) {
+            LOG.info("Error adding metadata record. metadataName={} 
length={}", name, recordLength, e);
+            throw new DataStoreException(e);
+        }
+        catch (URISyntaxException |  IOException e) {
+            throw new DataStoreException(e);
+        }
+    }
+
+    @Override
+    public DataRecord getMetadataRecord(String name) {
+        ClassLoader contextClassLoader = 
Thread.currentThread().getContextClassLoader();
+        long start = System.currentTimeMillis();
+        try {
+            
Thread.currentThread().setContextClassLoader(getClass().getClassLoader());
+
+            CloudBlobDirectory metaDir = 
getAzureContainer().getDirectoryReference(META_DIR_NAME);
+            CloudBlockBlob blob = metaDir.getBlockBlobReference(name);
+            if (!blob.exists()) {
+                LOG.warn("Trying to read missing metadata. metadataName={}", 
name);
+                return null;
+            }
+            blob.downloadAttributes();
+            long lastModified = 
blob.getProperties().getLastModified().getTime();
+            long length = blob.getProperties().getLength();
+            AzureBlobStoreDataRecord record =  new 
AzureBlobStoreDataRecord(this,
+                                                connectionString,
+                                                containerName, new 
DataIdentifier(name),
+                                                lastModified,
+                                                length,
+                                                true);
+            LOG.debug("Metadata record read. metadataName={} duration={} 
record={}", name, (System.currentTimeMillis() - start), record);
+            return record;
+
+        } catch (StorageException e) {
+            LOG.info("Error reading metadata record. metadataName={}", name, 
e);
+            throw new RuntimeException(e);
+        } catch (Exception e) {
+            LOG.debug("Error reading metadata record. metadataName={}", name, 
e);
+            throw new RuntimeException(e);
+        } finally {
+            if (null != contextClassLoader) {
+                
Thread.currentThread().setContextClassLoader(contextClassLoader);
+            }
+        }
+    }
+
+    @Override
+    public List<DataRecord> getAllMetadataRecords(String prefix) {
+        if (null == prefix) {
+            throw new NullPointerException("prefix");
+        }
+        long start = System.currentTimeMillis();
+        final List<DataRecord> records = Lists.newArrayList();
+        ClassLoader contextClassLoader = 
Thread.currentThread().getContextClassLoader();
+        try {
+            
Thread.currentThread().setContextClassLoader(getClass().getClassLoader());
+
+            CloudBlobDirectory metaDir = 
getAzureContainer().getDirectoryReference(META_DIR_NAME);
+            for (ListBlobItem item : metaDir.listBlobs(prefix)) {
+                if (item instanceof CloudBlob) {
+                    CloudBlob blob = (CloudBlob) item;
+                    records.add(new AzureBlobStoreDataRecord(
+                        this,
+                        connectionString,
+                        containerName,
+                        new DataIdentifier(stripMetaKeyPrefix(blob.getName())),
+                        blob.getProperties().getLastModified().getTime(),
+                        blob.getProperties().getLength(),
+                        true));
+                }
+            }
+            LOG.debug("Metadata records read. recordsRead={} metadataFolder={} 
duration={}", records.size(), prefix, (System.currentTimeMillis() - start));
+        }
+        catch (StorageException e) {
+            LOG.info("Error reading all metadata records. metadataFolder={}", 
prefix, e);
+        }
+        catch (DataStoreException | URISyntaxException e) {
+            LOG.debug("Error reading all metadata records. metadataFolder={}", 
prefix, e);
+        }
+        finally {
+            if (null != contextClassLoader) {
+                
Thread.currentThread().setContextClassLoader(contextClassLoader);
+            }
+        }
+        return records;
+    }
+
+    @Override
+    public boolean deleteMetadataRecord(String name) {
+        long start = System.currentTimeMillis();
+        ClassLoader contextClassLoader = 
Thread.currentThread().getContextClassLoader();
+        try {
+            
Thread.currentThread().setContextClassLoader(getClass().getClassLoader());
+
+            CloudBlockBlob blob = 
getAzureContainer().getBlockBlobReference(addMetaKeyPrefix(name));
+            boolean result = blob.deleteIfExists();
+            LOG.debug("Metadata record {}. metadataName={} duration={}",
+                    result ? "deleted" : "delete requested, but it does not 
exist (perhaps already deleted)",
+                    name, (System.currentTimeMillis() - start));
+            return result;
+
+        }
+        catch (StorageException e) {
+            LOG.info("Error deleting metadata record. metadataName={}", name, 
e);
+        }
+        catch (DataStoreException | URISyntaxException e) {
+            LOG.debug("Error deleting metadata record. metadataName={}", name, 
e);
+        }
+        finally {
+            if (contextClassLoader != null) {
+                
Thread.currentThread().setContextClassLoader(contextClassLoader);
+            }
+        }
+        return false;
+    }
+
+    @Override
+    public void deleteAllMetadataRecords(String prefix) {
+        if (null == prefix) {
+            throw new NullPointerException("prefix");
+        }
+        long start = System.currentTimeMillis();
+        ClassLoader contextClassLoader = 
Thread.currentThread().getContextClassLoader();
+        try {
+            
Thread.currentThread().setContextClassLoader(getClass().getClassLoader());
+
+            CloudBlobDirectory metaDir = 
getAzureContainer().getDirectoryReference(META_DIR_NAME);
+            int total = 0;
+            for (ListBlobItem item : metaDir.listBlobs(prefix)) {
+                if (item instanceof CloudBlob) {
+                    if (((CloudBlob)item).deleteIfExists()) {
+                        total++;
+                    }
+                }
+            }
+            LOG.debug("Metadata records deleted. recordsDeleted={} 
metadataFolder={} duration={}",
+                    total, prefix, (System.currentTimeMillis() - start));
+
+        }
+        catch (StorageException e) {
+            LOG.info("Error deleting all metadata records. metadataFolder={}", 
prefix, e);
+        }
+        catch (DataStoreException | URISyntaxException e) {
+            LOG.debug("Error deleting all metadata records. 
metadataFolder={}", prefix, e);
+        }
+        finally {
+            if (null != contextClassLoader) {
+                
Thread.currentThread().setContextClassLoader(contextClassLoader);
+            }
+        }
+    }
+
+
+    /**
+     * Get key from data identifier. Object is stored with key in ADS.
+     */
+    private static String getKeyName(DataIdentifier identifier) {
+        String key = identifier.toString();
+        return key.substring(0, 4) + Utils.DASH + key.substring(4);
+    }
+
+    /**
+     * Get data identifier from key.
+     */
+    private static String getIdentifierName(String key) {
+        if (!key.contains(Utils.DASH)) {
+            return null;
+        } else if (key.contains(META_KEY_PREFIX)) {
+            return key;
+        }
+        return key.substring(0, 4) + key.substring(5);
+    }
+
+    private static String addMetaKeyPrefix(final String key) {
+        return META_KEY_PREFIX + key;
+    }
+
+    private static String stripMetaKeyPrefix(String name) {
+        if (name.startsWith(META_KEY_PREFIX)) {
+            return name.substring(META_KEY_PREFIX.length());
+        }
+        return name;
+    }
+
+    private static class AzureBlobInfo {
+        private final String name;
+        private final long lastModified;
+        private final long length;
+
+        public AzureBlobInfo(String name, long lastModified, long length) {
+            this.name = name;
+            this.lastModified = lastModified;
+            this.length = length;
+        }
+
+        public String getName() {
+            return name;
+        }
+
+        public long getLastModified() {
+            return lastModified;
+        }
+
+        public long getLength() {
+            return length;
+        }
+
+        public static AzureBlobInfo fromCloudBlob(CloudBlob cloudBlob) {
+            return new AzureBlobInfo(cloudBlob.getName(),
+                                     
cloudBlob.getProperties().getLastModified().getTime(),
+                                     cloudBlob.getProperties().getLength());
+        }
+    }
+
+    private class RecordsIterator<T> extends AbstractIterator<T> {
+        // Seems to be thread-safe (in 5.0.0)
+        ResultContinuation resultContinuation;
+        boolean firstCall = true;
+        final Function<AzureBlobInfo, T> transformer;
+        final Queue<AzureBlobInfo> items = Lists.newLinkedList();
+
+        public RecordsIterator (Function<AzureBlobInfo, T> transformer) {
+            this.transformer = transformer;
+        }
+
+        @Override
+        protected T computeNext() {
+            if (items.isEmpty()) {
+                loadItems();
+            }
+            if (!items.isEmpty()) {
+                return transformer.apply(items.remove());
+            }
+            return endOfData();
+        }
+
+        private boolean loadItems() {
+            long start = System.currentTimeMillis();
+            ClassLoader contextClassLoader = 
currentThread().getContextClassLoader();
+            try {
+                
currentThread().setContextClassLoader(getClass().getClassLoader());
+
+                CloudBlobContainer container = 
Utils.getBlobContainer(connectionString, containerName);
+                if (!firstCall && (resultContinuation == null || 
!resultContinuation.hasContinuation())) {
+                    LOG.trace("No more records in container. 
containerName={}", container);
+                    return false;
+                }
+                firstCall = false;
+                ResultSegment<ListBlobItem> results = 
container.listBlobsSegmented(null, false, 
EnumSet.noneOf(BlobListingDetails.class), null, resultContinuation, null, null);
+                resultContinuation = results.getContinuationToken();
+                for (ListBlobItem item : results.getResults()) {
+                    if (item instanceof CloudBlob) {
+                        
items.add(AzureBlobInfo.fromCloudBlob((CloudBlob)item));
+                    }
+                }
+                LOG.debug("Container records batch read. batchSize={} 
containerName={} duration={}",
+                          results.getLength(), containerName,  
(System.currentTimeMillis() - start));
+                return results.getLength() > 0;
+            }
+            catch (StorageException e) {
+                LOG.info("Error listing blobs. containerName={}", 
containerName, e);
+            }
+            catch (DataStoreException e) {
+                LOG.debug("Cannot list blobs. containerName={}", 
containerName, e);
+            } finally {
+                if (contextClassLoader != null) {
+                    currentThread().setContextClassLoader(contextClassLoader);
+                }
+            }
+            return false;
+        }
+    }
+
+    static class AzureBlobStoreDataRecord extends AbstractDataRecord {
+        final String connectionString;
+        final String containerName;
+        final long lastModified;
+        final long length;
+        final boolean isMeta;
+
+        public AzureBlobStoreDataRecord(AbstractSharedBackend backend, String 
connectionString, String containerName,
+                                        DataIdentifier key, long lastModified, 
long length) {
+            this(backend, connectionString, containerName, key, lastModified, 
length, false);
+        }
+
+        public AzureBlobStoreDataRecord(AbstractSharedBackend backend, String 
connectionString, String containerName,
+                                        DataIdentifier key, long lastModified, 
long length, boolean isMeta) {
+            super(backend, key);
+            this.connectionString = connectionString;
+            this.containerName = containerName;
+            this.lastModified = lastModified;
+            this.length = length;
+            this.isMeta = isMeta;
+        }
+
+        @Override
+        public long getLength() throws DataStoreException {
+            return length;
+        }
+
+        @Override
+        public InputStream getStream() throws DataStoreException {
+            String id = getKeyName(getIdentifier());
+            CloudBlobContainer container = 
Utils.getBlobContainer(connectionString, containerName);
+            if (isMeta) {
+                id = addMetaKeyPrefix(getIdentifier().toString());
+            }
+            try {
+                return container.getBlockBlobReference(id).openInputStream();
+            } catch (StorageException | URISyntaxException e) {
+                throw new DataStoreException(e);
+            }
+        }
+
+        @Override
+        public long getLastModified() {
+            return lastModified;
+        }
+
+        @Override
+        public String toString() {
+            return "AzureBlobStoreDataRecord{" +
+                   "identifier=" + getIdentifier() +
+                   ", length=" + length +
+                   ", lastModified=" + lastModified +
+                   ", containerName='" + containerName + '\'' +
+                   '}';
+        }
+    }
+}

Propchange: 
jackrabbit/oak/trunk/oak-blob-cloud-azure/src/main/java/org/apache/jackrabbit/oak/blob/cloud/azure/blobstorage/AzureBlobStoreBackend.java
------------------------------------------------------------------------------
    svn:eol-style = native

Added: 
jackrabbit/oak/trunk/oak-blob-cloud-azure/src/main/java/org/apache/jackrabbit/oak/blob/cloud/azure/blobstorage/AzureConstants.java
URL: 
http://svn.apache.org/viewvc/jackrabbit/oak/trunk/oak-blob-cloud-azure/src/main/java/org/apache/jackrabbit/oak/blob/cloud/azure/blobstorage/AzureConstants.java?rev=1788387&view=auto
==============================================================================
--- 
jackrabbit/oak/trunk/oak-blob-cloud-azure/src/main/java/org/apache/jackrabbit/oak/blob/cloud/azure/blobstorage/AzureConstants.java
 (added)
+++ 
jackrabbit/oak/trunk/oak-blob-cloud-azure/src/main/java/org/apache/jackrabbit/oak/blob/cloud/azure/blobstorage/AzureConstants.java
 Fri Mar 24 09:31:18 2017
@@ -0,0 +1,64 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.jackrabbit.oak.blob.cloud.azure.blobstorage;
+
+public final class AzureConstants {
+    /**
+     * Azure Stoage Account name
+     */
+    public static final String AZURE_STORAGE_ACCOUNT_NAME = "accessKey";
+
+    /**
+     * Azure Stoage Account Key
+     */
+    public static final String AZURE_STORAGE_ACCOUNT_KEY = "secretKey";
+
+    /**
+     * Azure Blob Storage container name
+     */
+    public static final String AZURE_BLOB_CONTAINER_NAME = "container";
+
+    /**
+     * Azure Blob Storage request timeout.
+     */
+    public static final String AZURE_BLOB_REQUEST_TIMEOUT = "socketTimeout";
+
+    /**
+     * Azure Blob Storage maximum retries per request.
+     */
+    public static final String AZURE_BLOB_MAX_REQUEST_RETRY = "maxErrorRetry";
+
+    /**
+     * Azure Blob Storage maximum connections per operation (default 1)
+     */
+    public static final String AZURE_BLOB_CONCURRENT_REQUESTS_PER_OPERATION = 
"maxConnections";
+
+    /**
+     *  Constant to set proxy host.
+     */
+    public static final String PROXY_HOST = "proxyHost";
+
+    /**
+     *  Constant to set proxy port.
+     */
+    public static final String PROXY_PORT = "proxyPort";
+
+    private AzureConstants() { }
+}

Propchange: 
jackrabbit/oak/trunk/oak-blob-cloud-azure/src/main/java/org/apache/jackrabbit/oak/blob/cloud/azure/blobstorage/AzureConstants.java
------------------------------------------------------------------------------
    svn:eol-style = native

Added: 
jackrabbit/oak/trunk/oak-blob-cloud-azure/src/main/java/org/apache/jackrabbit/oak/blob/cloud/azure/blobstorage/AzureDataStore.java
URL: 
http://svn.apache.org/viewvc/jackrabbit/oak/trunk/oak-blob-cloud-azure/src/main/java/org/apache/jackrabbit/oak/blob/cloud/azure/blobstorage/AzureDataStore.java?rev=1788387&view=auto
==============================================================================
--- 
jackrabbit/oak/trunk/oak-blob-cloud-azure/src/main/java/org/apache/jackrabbit/oak/blob/cloud/azure/blobstorage/AzureDataStore.java
 (added)
+++ 
jackrabbit/oak/trunk/oak-blob-cloud-azure/src/main/java/org/apache/jackrabbit/oak/blob/cloud/azure/blobstorage/AzureDataStore.java
 Fri Mar 24 09:31:18 2017
@@ -0,0 +1,59 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.jackrabbit.oak.blob.cloud.azure.blobstorage;
+
+import org.apache.jackrabbit.oak.plugins.blob.AbstractSharedCachingDataStore;
+import org.apache.jackrabbit.oak.spi.blob.AbstractSharedBackend;
+import org.apache.jackrabbit.oak.spi.blob.SharedBackend;
+
+import java.util.Properties;
+
+public class AzureDataStore extends AbstractSharedCachingDataStore {
+
+    private int minRecordLength = 16*1024;
+
+    protected Properties properties;
+
+    @Override
+    protected AbstractSharedBackend createBackend() {
+        AzureBlobStoreBackend backend = new AzureBlobStoreBackend();
+        if (null != properties) {
+            backend.setProperties(properties);
+        }
+        return backend;
+    }
+
+    public void setProperties(final Properties properties) {
+        this.properties = properties;
+    }
+
+    public SharedBackend getBackend() {
+        return backend;
+    }
+
+    @Override
+    public int getMinRecordLength() {
+        return minRecordLength;
+    }
+
+    public void setMinRecordLength(int minRecordLength) {
+        this.minRecordLength = minRecordLength;
+    }
+}

Propchange: 
jackrabbit/oak/trunk/oak-blob-cloud-azure/src/main/java/org/apache/jackrabbit/oak/blob/cloud/azure/blobstorage/AzureDataStore.java
------------------------------------------------------------------------------
    svn:eol-style = native

Added: 
jackrabbit/oak/trunk/oak-blob-cloud-azure/src/main/java/org/apache/jackrabbit/oak/blob/cloud/azure/blobstorage/AzureDataStoreService.java
URL: 
http://svn.apache.org/viewvc/jackrabbit/oak/trunk/oak-blob-cloud-azure/src/main/java/org/apache/jackrabbit/oak/blob/cloud/azure/blobstorage/AzureDataStoreService.java?rev=1788387&view=auto
==============================================================================
--- 
jackrabbit/oak/trunk/oak-blob-cloud-azure/src/main/java/org/apache/jackrabbit/oak/blob/cloud/azure/blobstorage/AzureDataStoreService.java
 (added)
+++ 
jackrabbit/oak/trunk/oak-blob-cloud-azure/src/main/java/org/apache/jackrabbit/oak/blob/cloud/azure/blobstorage/AzureDataStoreService.java
 Fri Mar 24 09:31:18 2017
@@ -0,0 +1,28 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.jackrabbit.oak.blob.cloud.azure.blobstorage;
+
+import org.apache.felix.scr.annotations.Component;
+import org.apache.felix.scr.annotations.ConfigurationPolicy;
+
+@Component(policy = ConfigurationPolicy.REQUIRE, name = 
AzureDataStoreService.NAME, metatype = true)
+public class AzureDataStoreService extends AbstractAzureDataStoreService {
+    public static final String NAME = 
"org.apache.jackrabbit.oak.plugins.blob.datastore.AzureDataStore";
+}

Propchange: 
jackrabbit/oak/trunk/oak-blob-cloud-azure/src/main/java/org/apache/jackrabbit/oak/blob/cloud/azure/blobstorage/AzureDataStoreService.java
------------------------------------------------------------------------------
    svn:eol-style = native

Added: 
jackrabbit/oak/trunk/oak-blob-cloud-azure/src/main/java/org/apache/jackrabbit/oak/blob/cloud/azure/blobstorage/Utils.java
URL: 
http://svn.apache.org/viewvc/jackrabbit/oak/trunk/oak-blob-cloud-azure/src/main/java/org/apache/jackrabbit/oak/blob/cloud/azure/blobstorage/Utils.java?rev=1788387&view=auto
==============================================================================
--- 
jackrabbit/oak/trunk/oak-blob-cloud-azure/src/main/java/org/apache/jackrabbit/oak/blob/cloud/azure/blobstorage/Utils.java
 (added)
+++ 
jackrabbit/oak/trunk/oak-blob-cloud-azure/src/main/java/org/apache/jackrabbit/oak/blob/cloud/azure/blobstorage/Utils.java
 Fri Mar 24 09:31:18 2017
@@ -0,0 +1,141 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.jackrabbit.oak.blob.cloud.azure.blobstorage;
+
+import com.google.common.base.Strings;
+import com.microsoft.azure.storage.CloudStorageAccount;
+import com.microsoft.azure.storage.OperationContext;
+import com.microsoft.azure.storage.RetryExponentialRetry;
+import com.microsoft.azure.storage.RetryNoRetry;
+import com.microsoft.azure.storage.RetryPolicy;
+import com.microsoft.azure.storage.StorageException;
+import com.microsoft.azure.storage.blob.CloudBlobClient;
+import com.microsoft.azure.storage.blob.CloudBlobContainer;
+import org.apache.jackrabbit.core.data.DataStoreException;
+import org.apache.jackrabbit.oak.commons.PropertiesUtil;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.net.InetSocketAddress;
+import java.net.Proxy;
+import java.net.SocketAddress;
+import java.net.URISyntaxException;
+import java.security.InvalidKeyException;
+import java.util.Properties;
+
+public final class Utils {
+
+    public static final String DEFAULT_CONFIG_FILE = "azure.properties";
+
+    public static final String DASH = "-";
+
+    /**
+     * private constructor so that class cannot initialized from outside.
+     */
+    private Utils() {
+    }
+
+    /**
+     * Create CloudBlobClient from properties.
+     *
+     * @param connectionString connectionString to configure @link {@link 
CloudBlobClient}
+     * @return {@link CloudBlobClient}
+     */
+    public static CloudBlobClient getBlobClient(final String connectionString) 
throws URISyntaxException, InvalidKeyException {
+        CloudStorageAccount account = 
CloudStorageAccount.parse(connectionString);
+        CloudBlobClient client = account.createCloudBlobClient();
+        return client;
+    }
+
+    public static CloudBlobContainer getBlobContainer(final String 
connectionString, final String containerName) throws DataStoreException {
+        try {
+            CloudBlobClient client = Utils.getBlobClient(connectionString);
+            return client.getContainerReference(containerName);
+        } catch (InvalidKeyException | URISyntaxException | StorageException 
e) {
+            throw new DataStoreException(e);
+        }
+    }
+
+    public static void setProxyIfNeeded(final Properties properties) {
+        String proxyHost = properties.getProperty(AzureConstants.PROXY_HOST);
+        String proxyPort = properties.getProperty(AzureConstants.PROXY_PORT);
+
+        if (!Strings.isNullOrEmpty(proxyHost) &&
+            Strings.isNullOrEmpty(proxyPort)) {
+            int port = Integer.parseInt(proxyPort);
+            SocketAddress proxyAddr = new InetSocketAddress(proxyHost, port);
+            Proxy proxy = new Proxy(Proxy.Type.HTTP, proxyAddr);
+            OperationContext.setDefaultProxy(proxy);
+        }
+    }
+
+    public static RetryPolicy getRetryPolicy(final String maxRequestRetry) {
+        int retries = PropertiesUtil.toInteger(maxRequestRetry, -1);
+        if (retries < 0) {
+            return null;
+        }
+        if (retries == 0) {
+            return new RetryNoRetry();
+        }
+        return new RetryExponentialRetry(RetryPolicy.DEFAULT_CLIENT_BACKOFF, 
retries);
+    }
+
+
+    public static String getConnectionStringFromProperties(Properties 
properties) {
+        return getConnectionString(
+            properties.getProperty(AzureConstants.AZURE_STORAGE_ACCOUNT_NAME, 
""),
+            properties.getProperty(AzureConstants.AZURE_STORAGE_ACCOUNT_KEY, 
""));
+    }
+
+    public static String getConnectionString(final String accountName, final 
String accountKey) {
+        return String.format(
+            "DefaultEndpointsProtocol=https;AccountName=%s;AccountKey=%s",
+            accountName,
+            accountKey
+        );
+    }
+
+    /**
+     * Read a configuration properties file. If the file name ends with 
";burn",
+     * the file is deleted after reading.
+     *
+     * @param fileName the properties file name
+     * @return the properties
+     * @throws java.io.IOException if the file doesn't exist
+     */
+    public static Properties readConfig(String fileName) throws IOException {
+        if (!new File(fileName).exists()) {
+            throw new IOException("Config file not found. fileName=" + 
fileName);
+        }
+        Properties prop = new Properties();
+        InputStream in = null;
+        try {
+            in = new FileInputStream(fileName);
+            prop.load(in);
+        } finally {
+            if (in != null) {
+                in.close();
+            }
+        }
+        return prop;
+    }
+}

Propchange: 
jackrabbit/oak/trunk/oak-blob-cloud-azure/src/main/java/org/apache/jackrabbit/oak/blob/cloud/azure/blobstorage/Utils.java
------------------------------------------------------------------------------
    svn:eol-style = native


Reply via email to