Warning: file_get_contents(/data/phpspider/zhask/data//catemap/2/spring/14.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181
调用init方法失败;嵌套异常为java.lang.IllegalArgumentException:命名空间属性是必需的_Java_Spring_Spring Boot - Fatal编程技术网

调用init方法失败;嵌套异常为java.lang.IllegalArgumentException:命名空间属性是必需的

调用init方法失败;嵌套异常为java.lang.IllegalArgumentException:命名空间属性是必需的,java,spring,spring-boot,Java,Spring,Spring Boot,我正在尝试读取和写入SpringDataHadoop文档中提到的拼花地板文件,但出现以下错误 创建在中定义了名称为“datasetRepositoryFactory”的bean时出错 类路径资源[com/example/demo/DatasetConfig.class]:调用 初始化方法失败;嵌套异常是 java.lang.IllegalArgumentException:命名空间属性是必需的 spring boot中的参考项目 将数据写入拼花地板弹簧数据hadoop- DataSetConf

我正在尝试读取和写入SpringDataHadoop文档中提到的拼花地板文件,但出现以下错误

创建在中定义了名称为“datasetRepositoryFactory”的bean时出错 类路径资源[com/example/demo/DatasetConfig.class]:调用 初始化方法失败;嵌套异常是 java.lang.IllegalArgumentException:命名空间属性是必需的

  • spring boot中的参考项目

  • 将数据写入拼花地板弹簧数据hadoop-
  • DataSetConfig.java


    默认情况下,
    DatasetRepositoryFactory
    没有可用的
    命名空间
    ,因此设置
    命名空间

    公共void setNamespace(java.lang.String命名空间)

    要使用的命名空间。默认为无命名空间(“默认”用于Kite SDK API)

        @Configuration
        @ImportResource("hadoop-context.xml")
        public class DatasetConfig {
            private @Autowired org.apache.hadoop.conf.Configuration hadoopConfiguration;
              @Bean 
              public DatasetRepositoryFactory datasetRepositoryFactory() {
              DatasetRepositoryFactory datasetRepositoryFactory = new
              DatasetRepositoryFactory();
              datasetRepositoryFactory.setConf(hadoopConfiguration);
              datasetRepositoryFactory.setBasePath("/tmp"); return
              datasetRepositoryFactory; 
        }
      @Bean 
          public DataStoreWriter<FileInfo> dataStoreWriter() { 
          return new AvroPojoDatasetStoreWriter<FileInfo>(FileInfo.class,
          datasetRepositoryFactory(), fileInfoDatasetDefinition()); }
    
        @Bean
        public  DatasetOperations datasetOperations() {
            DatasetTemplate datasetOperations = new DatasetTemplate();
            datasetOperations.setDatasetDefinitions(Arrays.asList(fileInfoDatasetDefinition()));
            datasetOperations.setDatasetRepositoryFactory(datasetRepositoryFactory());
            return datasetOperations;
        }
    
        @Bean
        public  DatasetDefinition fileInfoDatasetDefinition() {
            DatasetDefinition definition = new DatasetDefinition();
            definition.setFormat(Formats.PARQUET.getName());
            definition.setTargetClass(FileInfo.class);
            definition.setAllowNullValues(false);
            return definition;
        }
    }
    
    
    @ComponentScan
    @EnableAutoConfiguration
    public class ParquetReaderApplication implements CommandLineRunner {
    
        private DatasetOperations datasetOperations;
    
        private DataStoreWriter<FileInfo> writer;
    
        private long count;
    
        @Autowired
        public  void setDatasetOperations(DatasetOperations datasetOperations) {
            this.datasetOperations = datasetOperations;
        }
    
    
          @Autowired 
          public void setDataStoreWriter(DataStoreWriter dataStoreWriter) {
          this.writer = dataStoreWriter;
          }
    
        public static  void main(String[] args) {
            SpringApplication.run(ParquetReaderApplication.class, args);
        }
    
        @Override
        public  void run(String... strings) {
            String fileDir = System.getProperty("user.home");
            System.out.println("Processing " + fileDir + " ...");
            File f = new File(fileDir);
            try {
                processFile(f);
            } catch (IOException e) {
                throw new StoreException("Error writing FileInfo", e);
            } finally {
                close();
            }
            countFileInfoEntries();
            System.out.println("Done!");
        }
    
        private  void processFile(File file) throws IOException {
            if (file.isDirectory()) {
                for (File f : file.listFiles()) {
                    processFile(f);
                }
            } else {
                if (++count % 10000 == 0) {
                    System.out.println("Writing " + count + " ...");
                }
                FileInfo fileInfo = new FileInfo(file.getName(), file.getParent(), (int)file.length(), file.lastModified());
                writer.write(fileInfo);
            }
        }
    
    hdfs dfs -ls /tmp/*
    Found 2 items
    drwxr-xr-x   - spring supergroup          0 2014-06-09 17:09 /user/spring/fileinfo/.metadata
    -rw-r--r--   3 spring supergroup   13824695 2014-06-09 17:10 /user/spring/fileinfo/6876f250-010a-404a-b8c8-0ce1ee759206.avro
    
    @Bean 
    public DatasetRepositoryFactory datasetRepositoryFactory() {
          DatasetRepositoryFactory datasetRepositoryFactory = new
          DatasetRepositoryFactory();
          datasetRepositoryFactory.setConf(hadoopConfiguration);
          datasetRepositoryFactory.setBasePath("/tmp"); 
          datasetRepositoryFactory.setNamespace("default"); 
          return datasetRepositoryFactory;