Hadoop 没有用于scheme:hdfs的文件系统

Hadoop 没有用于scheme:hdfs的文件系统,hadoop,hbase,hdfs,Hadoop,Hbase,Hdfs,当我运行包含Hbase螺栓的Storm拓扑时,出现以下错误 java.io.IOException: No FileSystem for scheme: hdfs at org.apache.hadoop.fs.FileSystem.getFileSystemClass(FileSystem.java:2298) ~[hadoop-common-2.0.0-cdh4.7.0.jar:na] at org.apache.hadoop.fs.FileSystem.createFileSystem(F

当我运行包含Hbase螺栓的Storm拓扑时,出现以下错误

java.io.IOException: No FileSystem for scheme: hdfs
at org.apache.hadoop.fs.FileSystem.getFileSystemClass(FileSystem.java:2298) ~[hadoop-common-2.0.0-cdh4.7.0.jar:na]
at org.apache.hadoop.fs.FileSystem.createFileSystem(FileSystem.java:2305) ~[hadoop-common-2.0.0-cdh4.7.0.jar:na]
at org.apache.hadoop.fs.FileSystem.access$200(FileSystem.java:89) ~[hadoop-common-2.0.0-cdh4.7.0.jar:na]
at org.apache.hadoop.fs.FileSystem$Cache.getInternal(FileSystem.java:2344) ~[hadoop-common-2.0.0-cdh4.7.0.jar:na]
at org.apache.hadoop.fs.FileSystem$Cache.get(FileSystem.java:2326) ~[hadoop-common-2.0.0-cdh4.7.0.jar:na]
at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:353) ~[hadoop-common-2.0.0-cdh4.7.0.jar:na]
at org.apache.hadoop.fs.Path.getFileSystem(Path.java:194) ~[hadoop-common-2.0.0-cdh4.7.0.jar:na]
at org.apache.hadoop.hbase.util.DynamicClassLoader.<init>(DynamicClassLoader.java:104) ~[hbase-common-0.98.1-hadoop2.jar:0.98.1-hadoop2]
at org.apache.hadoop.hbase.protobuf.ProtobufUtil.<clinit>(ProtobufUtil.java:201) [hbase-client-0.98.1-hadoop2.jar:0.98.1-hadoop2]
at org.apache.hadoop.hbase.ClusterId.parseFrom(ClusterId.java:64) [hbase-client-0.98.1-hadoop2.jar:0.98.1-hadoop2]
at org.apache.hadoop.hbase.zookeeper.ZKClusterId.readClusterIdZNode(ZKClusterId.java:69) [hbase-client-0.98.1-hadoop2.jar:0.98.1-hadoop2]
at org.apache.hadoop.hbase.client.ZooKeeperRegistry.getClusterId(ZooKeeperRegistry.java:83) [hbase-client-0.98.1-hadoop2.jar:0.98.1-hadoop2]
at org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation.retrieveClusterId(HConnectionManager.java:857) [hbase-client-0.98.1-hadoop2.jar:0.98.1-hadoop2]
at org.apache.hadoop.hbase.client.HConnectionManager$HConnectionImplementation.<init>(HConnectionManager.java:662) [hbase-client-0.98.1-hadoop2.jar:0.98.1-hadoop2]
at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) [na:1.7.0_72]
at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:57) [na:1.7.0_72]
at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) [na:1.7.0_72]
at java.lang.reflect.Constructor.newInstance(Constructor.java:526) [na:1.7.0_72]
at org.apache.hadoop.hbase.client.HConnectionManager.createConnection(HConnectionManager.java:414) [hbase-client-0.98.1-hadoop2.jar:0.98.1-hadoop2]
at org.apache.hadoop.hbase.client.HConnectionManager.createConnection(HConnectionManager.java:393) [hbase-client-0.98.1-hadoop2.jar:0.98.1-hadoop2]
at org.apache.hadoop.hbase.client.HConnectionManager.getConnection(HConnectionManager.java:274) [hbase-client-0.98.1-hadoop2.jar:0.98.1-hadoop2]
at org.apache.hadoop.hbase.client.HTable.<init>(HTable.java:194) [hbase-client-0.98.1-hadoop2.jar:0.98.1-hadoop2]
at org.apache.hadoop.hbase.client.HTable.<init>(HTable.java:156) [hbase-client-0.98.1-hadoop2.jar:0.98.1-hadoop2]
at org.apache.storm.hbase.bolt.HBaseBolt$1.run(HBaseBolt.java:97) [storm-hbase-0.1.2.jar:na]
at org.apache.storm.hbase.bolt.HBaseBolt$1.run(HBaseBolt.java:94) [storm-hbase-0.1.2.jar:na]
at java.security.AccessController.doPrivileged(Native Method) [na:1.7.0_72]
at javax.security.auth.Subject.doAs(Subject.java:415) [na:1.7.0_72]
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1438) [hadoop-common-2.0.0-cdh4.7.0.jar:na]
at org.apache.storm.hbase.bolt.HBaseBolt.prepare(HBaseBolt.java:94) [storm-hbase-0.1.2.jar:na]
at backtype.storm.daemon.executor$fn__3352$fn__3364.invoke(executor.clj:690) [storm-core-0.9.2-incubating.jar:0.9.2-incubating]
at backtype.storm.util$async_loop$fn__452.invoke(util.clj:429) [storm-core-0.9.2-incubating.jar:0.9.2-incubating]
at clojure.lang.AFn.run(AFn.java:24) [clojure-1.5.1.jar:na]
at java.lang.Thread.run(Thread.java:745) [na:1.7.0_72]
16:44:32.839[螺纹-31-HbasePersistorBolt]信息backtype.storm.daemon.executor-准备好的螺栓HbasePersistorBolt:5

这是我的pom.xml

<?xml version="1.0" encoding="UTF-8"?>
http://maven.apache.org/xsd/maven-4.0.0.xsd> 援助cim fr.aid.cim 0.9-1快照 4.0.0 事件结构拓扑

<dependencies>
    <!-- Hadoop Hbase Storm Kafka dependencies-->
    <dependency>
        <groupId>org.apache.hadoop</groupId>
        <artifactId>hadoop-client</artifactId>
        <version>${org.apache.hadoop.version}</version>
        <exclusions>
            <exclusion>
                <artifactId>com.google.protobuf</artifactId>
                <groupId>protobuf-java</groupId>
            </exclusion>
        </exclusions>
    </dependency>

    <dependency>
        <groupId>com.google.protobuf</groupId>
        <artifactId>protobuf-java</artifactId>
        <version>2.5.0</version>
    </dependency>

    <dependency>
        <groupId>com.github.ptgoetz</groupId>
        <artifactId>storm-hbase</artifactId>
        <version>${storm-hbase.version}</version>
    </dependency>

    <dependency>
        <groupId>org.apache.storm</groupId>
        <artifactId>storm-kafka</artifactId>
        <version>0.9.2-incubating</version>
    </dependency>

    <dependency>
        <groupId>org.apache.kafka</groupId>
        <artifactId>kafka_2.10</artifactId>
        <version>0.8.1.1</version>
        <exclusions>
            <exclusion>
                <groupId>org.apache.zookeeper</groupId>
                <artifactId>zookeeper</artifactId>
            </exclusion>
            <exclusion>
                <groupId>log4j</groupId>
                <artifactId>log4j</artifactId>
            </exclusion>
        </exclusions>
    </dependency>
    <!-- END Hadoop Hbase Storm Kafka dependencies-->

    <!-- Project Dependencies -->
    <dependency>
        <groupId>fr.aid.cim</groupId>
        <artifactId>commons</artifactId>
        <version>${project.version}</version>
    </dependency>

    <dependency>
        <groupId>fr.aid.cim</groupId>
        <artifactId>storm-hazelcast</artifactId>
        <version>${project.version}</version>
    </dependency>
    <!-- END Project Dependencies -->

    <!-- Integration TEST Dependencies -->
    <dependency>
        <groupId>org.apache.hadoop</groupId>
        <artifactId>hadoop-core</artifactId>
        <scope>test</scope>
    </dependency>

    <dependency>
        <groupId>org.apache.hadoop</groupId>
        <artifactId>hadoop-common</artifactId>
        <type>test-jar</type>
        <scope>test</scope>
    </dependency>

    <dependency>
        <groupId>org.apache.hbase</groupId>
        <artifactId>hbase</artifactId>
        <version>${org.apache.hbase.version}</version>
        <type>test-jar</type>
        <scope>test</scope>
    </dependency>

    <dependency>
        <groupId>org.apache.hadoop</groupId>
        <artifactId>hadoop-hdfs</artifactId>
        <version>${org.apache.hadoop.version}</version>
        <type>test-jar</type>
        <scope>test</scope>
    </dependency>

    <dependency>
        <groupId>org.apache.hadoop</groupId>
        <artifactId>hadoop-hdfs</artifactId>
        <version>${org.apache.hadoop.version}</version>
        <scope>test</scope>
    </dependency>

    <dependency>
        <groupId>org.apache.zookeeper</groupId>
        <artifactId>zookeeper</artifactId>
        <scope>test</scope>
    </dependency>
    <!-- END TEST Dependencies -->

    <!-- Other Dependencies -->
    <dependency>
        <groupId>org.json</groupId>
        <artifactId>json</artifactId>
        <version>20140107</version>
    </dependency>

    <dependency>
        <groupId>com.google.guava</groupId>
        <artifactId>guava</artifactId>
        <version>11.0</version>
    </dependency>

    <dependency>
        <groupId>com.fasterxml.jackson.core</groupId>
        <artifactId>jackson-databind</artifactId>
    </dependency>
    <!-- END Other Dependencies -->

</dependencies>

<build>
    <plugins>
        <plugin>
            <artifactId>maven-assembly-plugin</artifactId>
            <configuration>
                <descriptorRefs>
                    <descriptorRef>jar-with-dependencies</descriptorRef>
                </descriptorRefs>
            </configuration>
            <executions>
                <execution>
                    <id>make-assembly</id>
                    <phase>package</phase>
                    <goals>
                        <goal>single</goal>
                    </goals>
                </execution>
            </executions>
        </plugin>
        <plugin>
            <groupId>org.apache.maven.plugins</groupId>
            <artifactId>maven-dependency-plugin</artifactId>
        </plugin>
    </plugins>
</build>

<profiles>
    <profile>
        <id>local</id>
        <activation>
            <activeByDefault>true</activeByDefault>
        </activation>
        <dependencies>
            <dependency>
                <groupId>org.apache.storm</groupId>
                <artifactId>storm-core</artifactId>
            </dependency>
            <dependency>
                <groupId>org.apache.zookeeper</groupId>
                <artifactId>zookeeper</artifactId>
            </dependency>
        </dependencies>
    </profile>
    <profile>
        <id>cluster</id>
        <dependencies>
            <dependency>
                <groupId>org.apache.storm</groupId>
                <artifactId>storm-core</artifactId>
                <scope>provided</scope>
            </dependency>
            <dependency>
                <groupId>org.apache.zookeeper</groupId>
                <artifactId>zookeeper</artifactId>
                <scope>provided</scope>
            </dependency>
        </dependencies>
    </profile>

</profiles>
有什么想法吗?谢谢

尝试将hadoop hdfs添加为编译范围的依赖项:

org.apache.hadoop hadoop hdfs ${org.apache.hadoop.version}
尝试添加以下代码

 Configuration conf = new Configuration();
 conf.set("fs.hdfs.impl", "org.apache.hadoop.hdfs.DistributedFileSystem");
也许下面的链接会给出答案