Java 如何从Spring集成文件中读取嵌套的txt文件

Java 如何从Spring集成文件中读取嵌套的txt文件,java,spring,spring-integration,spring-integration-sftp,Java,Spring,Spring Integration,Spring Integration Sftp,我有如下配置文件:- @EnableBinding(Source.class) @Configuration @EnableIntegrationManagement public class FileSourceConfig { private static final Logger LOGGER = LoggerFactory.getLogger(FileSourceConfig.class); private FileSourceProperties propertie

我有如下配置文件:-

@EnableBinding(Source.class)
@Configuration
@EnableIntegrationManagement
public class FileSourceConfig {

    private static final Logger LOGGER = LoggerFactory.getLogger(FileSourceConfig.class);

    private FileSourceProperties properties;

    Source source;

    public FileSourceConfig(FileSourceProperties properties, Source source) {
        this.properties = properties;
        this.source = source;
    }

    @Bean
    public DynamicRegexPatternFilter getFilter(){
        return new DynamicRegexPatternFilter();
    }


    @Bean
    public MessageChannel linesChannel() {
        return new DirectChannel();
    }

    /* To poll the file for every given TimeUnit.SECONDS */
    @Bean(name = { "defaultPoller", PollerMetadata.DEFAULT_POLLER })
    public PollerMetadata defaultPoller() {
        PollerMetadata pollerMetadata = new PollerMetadata();
        pollerMetadata.setTrigger(new PeriodicTrigger(properties.getPollPeriod(), TimeUnit.SECONDS));
        return pollerMetadata;
    }

    @Bean
    public IntegrationFlow fileInboundChannelFlow() {
        FileInboundChannelAdapterSpec messageSourceSpec = Files
                .inboundAdapter(Paths.get(this.properties.getDirectory()).toFile());

        messageSourceSpec = messageSourceSpec.filter(getFilter());

        //messageSourceSpec.regexFilter(this.properties.getFilenameRegex());
        messageSourceSpec.preventDuplicates(this.properties.isPreventDuplicates());

        //Setting random UUID as messagekey
        IntegrationFlowBuilder flowBuilder = IntegrationFlows.from(messageSourceSpec)
                .split(new FileSplitter(true, true))
                .enrichHeaders(h -> h.headerExpression(KafkaHeaders.MESSAGE_KEY,"T(java.util.UUID).randomUUID().toString()"));

        return flowBuilder.<Object, Class<?>>route(Object::getClass,
                m -> m.channelMapping(FileSplitter.FileMarker.class, "markers.input").channelMapping(String.class,
                        "lines.input"))
                .get();
    }

    @Bean
    public IntegrationFlow lines() {
        return f -> f.headerFilter("file_originalFile") .channel(source.output());
    }


    @Bean
    public IntegrationFlow logErrors() {
        return f -> f.log(LoggingHandler.Level.ERROR, "error", m -> "Error in sending message :"+m.getPayload());
    }

    @Bean
    public IntegrationFlow markers() {
        return f -> f.log().<FileSplitter.FileMarker>filter(m -> m.getMark().equals(FileSplitter.FileMarker.Mark.END))
                .handle(m -> m.getHeaders(), e -> e.id("archive").advice(afterAdvice()));
    }}
@EnableBinding(Source.class)
@配置
@启用集成管理
公共类文件源配置{
私有静态最终记录器Logger=LoggerFactory.getLogger(FileSourceConfig.class);
私有文件源属性属性;
来源;
公共文件源配置(文件源属性,源){
这个。属性=属性;
this.source=源;
}
@豆子
公共DynamicExpatterFilter getFilter(){
返回新的DynamicExpatterFilter();
}
@豆子
公共消息频道linesChannel(){
返回新的DirectChannel();
}
/*为每个给定的TimeUnit.SECONDS轮询文件*/
@Bean(名称={“defaultPoller”,PollerMetadata.DEFAULT\u POLLER})
公共PollerMetadata defaultPoller(){
PollerMetadata PollerMetadata=新PollerMetadata();
setTrigger(新的PeriodicTrigger(properties.getPollPeriod(),TimeUnit.SECONDS));
返回pollerMetadata;
}
@豆子
公共集成流文件InboundChannelFlow(){
FileInboundChannelAdapterSpec messageSourceSpec=文件
.inboundAdapter(path.get(this.properties.getDirectory()).toFile());
messageSourceSpec=messageSourceSpec.filter(getFilter());
//messageSourceSpec.regexFilter(this.properties.getFilenameRegex());
messageSourceSpec.preventDuplicates(this.properties.isPreventDuplicates());
//将随机UUID设置为messagekey
IntegrationFlowBuilder flowBuilder=IntegrationFlows.from(messageSourceSpec)
.split(新文件拆分器(true,true))
.enrichHeaders(h->h.headerExpression(KafkaHeaders.MESSAGE_键,“T(java.util.UUID).randomUUID().toString()”);

返回flowBuilder。请参阅递归目录扫描程序。 默认情况下,
FileReadingMessageSource
附带一个
DefaultDirectoryScanner
。因此,您只需使用该
RecursiveDirectoryScanner
配置
messageSourceSpec

FileInboundChannelAdapterSpec messageSourceSpec =
                Files.inboundAdapter(Paths.get(this.properties.getDirectory()).toFile())
                        .scanner(new RecursiveDirectoryScanner());

另请参阅文档:

请参阅
RecursiveDirectoryScanner
。 默认情况下,
FileReadingMessageSource
附带一个
DefaultDirectoryScanner
。因此,您只需使用该
RecursiveDirectoryScanner
配置
messageSourceSpec

FileInboundChannelAdapterSpec messageSourceSpec =
                Files.inboundAdapter(Paths.get(this.properties.getDirectory()).toFile())
                        .scanner(new RecursiveDirectoryScanner());

另请参阅文档:

Hello Im getting Down error:-当使用外部扫描仪时,不应使用“filter”和“locker”选项。相反,请在外部目录扫描仪上设置这些选项:以及?为什么不遵循该建议?另一方面,我们可以引入一个方便的
recursive()
提供更好的最终使用体验的选项。看起来我们对该扫描仪没有很强的bean要求…以下是修复方法:您好,我得到以下错误:-使用外部扫描仪时,不应使用“过滤器”和“锁定器”选项。相反,在外部目录扫描仪上设置这些选项:和?为什么不遵循建议那么呢?另一方面,我们可以引入一个方便的
recursive()
选项,以获得更好的最终使用体验。看起来我们对该扫描仪没有很强的bean要求……以下是修复方法: