package org.apache.commons.vfs2.provider.hdfs;
import java.io.InputStream;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.net.URL;
import java.net.URLDecoder;
import java.util.Collection;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.commons.vfs2.CacheStrategy;
import org.apache.commons.vfs2.Capability;
import org.apache.commons.vfs2.FileName;
import org.apache.commons.vfs2.FileObject;
import org.apache.commons.vfs2.FileSystemException;
import org.apache.commons.vfs2.FileSystemOptions;
import org.apache.commons.vfs2.provider.AbstractFileName;
import org.apache.commons.vfs2.provider.AbstractFileSystem;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
public class HdfsFileSystem extends AbstractFileSystem {
private static final Log log = LogFactory.getLog(HdfsFileSystem.class);
private FileSystem fs;
protected HdfsFileSystem(final FileName rootName, final FileSystemOptions fileSystemOptions) {
super(rootName, null, fileSystemOptions);
}
@Override
protected void addCapabilities(final Collection<Capability> capabilities) {
capabilities.addAll(HdfsFileProvider.CAPABILITIES);
}
@Override
public void close() {
try {
if (null != fs) {
fs.close();
}
} catch (final IOException e) {
throw new RuntimeException("Error closing HDFS client", e);
}
super.close();
}
@Override
protected FileObject createFile(final AbstractFileName name) throws Exception {
throw new FileSystemException("Operation not supported");
}
@Override
public FileObject resolveFile(final FileName name) throws FileSystemException {
synchronized (this) {
if (this.fs == null) {
final String hdfsUri = name.getRootURI();
final HdfsFileSystemConfigBuilder builder = HdfsFileSystemConfigBuilder.getInstance();
final FileSystemOptions options = getFileSystemOptions();
final String[] configNames = builder.getConfigNames(options);
final Path[] configPaths = builder.getConfigPaths(options);
final URL[] configURLs = builder.getConfigURLs(options);
final InputStream configStream = builder.getConfigInputStream(options);
final Configuration configConfiguration = builder.getConfigConfiguration(options);
final Configuration conf = new Configuration(true);
conf.set(FileSystem.FS_DEFAULT_NAME_KEY, hdfsUri);
if (configNames != null) {
for (final String configName : configNames) {
log.debug("Adding HDFS configuration resource: " + configName);
conf.addResource(configName);
}
}
if (configPaths != null) {
for (final Path path : configPaths) {
log.debug("Adding HDFS configuration path: " + path);
conf.addResource(path);
}
}
if (configURLs != null) {
for (final URL url : configURLs) {
log.debug("Adding HDFS configuration URL: " + url);
conf.addResource(url);
}
}
if (configStream != null) {
log.debug("Adding HDFS configuration stream");
conf.addResource(configStream);
}
if (configConfiguration != null) {
log.debug("Adding HDFS configuration object");
conf.addResource(configConfiguration);
}
try {
fs = FileSystem.get(conf);
} catch (final IOException e) {
log.error("Error connecting to filesystem " + hdfsUri, e);
throw new FileSystemException("Error connecting to filesystem " + hdfsUri, e);
}
}
}
final boolean useCache = null != getContext().getFileSystemManager().getFilesCache();
FileObject file;
if (useCache) {
file = this.getFileFromCache(name);
} else {
file = null;
}
if (null == file) {
String path = null;
try {
path = URLDecoder.decode(name.getPath(), "UTF-8");
} catch (final UnsupportedEncodingException e) {
path = name.getPath();
}
final Path filePath = new Path(path);
file = new HdfsFileObject((AbstractFileName) name, this, fs, filePath);
if (useCache) {
this.putFileToCache(file);
}
}
if (getFileSystemManager().getCacheStrategy().equals(CacheStrategy.ON_RESOLVE)) {
file.refresh();
}
return file;
}
}