Spring integration Spring集成SFTP下载和处理文件
我使用了下面的建议,能够从远程服务器下载文件。如果我想在下载完成后处理文件,那么如何继续?一旦文件下载到我的本地系统,有人能告诉我如何/在哪里获得控制吗 ApplicationContext.xml 石英作业 实际上,我想解析下载的文件并将它们插入数据库表中 嗯。由于FTP适配器轮询远程目录,将这些文件存储到本地目录,并将其作为Message.payload File对象逐个发送,因此在下游流中不需要停止为每个文件执行某些自定义过程 例如,您可以根据需要使用并提供一些进一步的自定义解析器Spring integration Spring集成SFTP下载和处理文件,spring-integration,Spring Integration,我使用了下面的建议,能够从远程服务器下载文件。如果我想在下载完成后处理文件,那么如何继续?一旦文件下载到我的本地系统,有人能告诉我如何/在哪里获得控制吗 ApplicationContext.xml 石英作业 实际上,我想解析下载的文件并将它们插入数据库表中 嗯。由于FTP适配器轮询远程目录,将这些文件存储到本地目录,并将其作为Message.payload File对象逐个发送,因此在下游流中不需要停止为每个文件执行某些自定义过程 例如,您可以根据需要使用并提供一些进一步的自定义解析器 最后,
最后,将数据从有效负载存储到数据库就足够了。请更具体一些,并分享您的潜在配置。实际上,这正是您需要的:嗨,Artem,我已经更新了我的查询。实际上,我想解析下载的文件并将它们插入数据库表。谢谢您的快速回复。我必须解析6个不同的文件5.txt和1.csv,并插入到6个不同的表中。那么,我如何将每个文件存储到不同的表中呢。对于这个端口,有一个路由器组件,它通过消息内容确定目标目的地。当然,每个目标表的每种类型都有几个。我的csv文件包含带有列标题和值的数据。我在将csv文件转换为POJO时面临挑战。我可以使用将硬编码数据插入数据库,如果您对CSV转换有问题,这将是一个新问题。您可以查看Spring批处理功能:
<?xml version="1.0" encoding="UTF-8"?>
<beans xmlns="http://www.springframework.org/schema/beans"
xmlns:context="http://www.springframework.org/schema/context"
xmlns:int="http://www.springframework.org/schema/integration"
xmlns:int-sftp="http://www.springframework.org/schema/integration/sftp"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://www.springframework.org/schema/beans http://www.springframework.org/schema/beans/spring-beans.xsd
http://www.springframework.org/schema/integration http://www.springframework.org/schema/integration/spring-integration.xsd
http://www.springframework.org/schema/integration/sftp http://www.springframework.org/schema/integration/sftp/spring-integration-sftp.xsd
http://www.springframework.org/schema/context http://www.springframework.org/schema/context/spring-context-4.1.xsd">
<context:component-scan base-package="com.reports"/>
<context:property-placeholder location="classpath:reports.properties"/>
<bean class="org.springframework.integration.sftp.session.DefaultSftpSessionFactory"
id="sftpSessionFactory">
<property name="host" value="${sftp.host}"/>
<property name="privateKey" value="${sftp.key.location}"/>
<property name="port" value="${sftp.port}"/>
<property name="user" value="${sftp.user}"/>
</bean>
<int-sftp:inbound-channel-adapter
id="sftpInboundAdapter"
auto-startup="false"
auto-create-local-directory="false"
channel="receiveChannel"
delete-remote-files="false"
filter = "customfilter"
remote-file-separator="/"
local-directory="${local.directory}"
remote-directory="${remote.directory}"
session-factory="sftpSessionFactory"
preserve-timestamp="true"
local-filter="acceptAll">
<int:poller trigger="sftp-trigger"/>
</int-sftp:inbound-channel-adapter>
<int:channel id="receiveChannel">
<int:queue/>
</int:channel>
<bean id="customfilter" class="com.reports.filters.ReportsFileListFilter"/>
<bean id="acceptAll" class="org.springframework.integration.file.filters.AcceptAllFileListFilter" />
<bean id="sftp-trigger" class="com.reports.jobs.ReportsTrigger"/>
<!-- Quartz job -->
<!-- (1) Trigger - This is used in the Scheduler below -->
<bean id="ReportCronTrigger" class="org.springframework.scheduling.quartz.CronTriggerFactoryBean">
<property name="jobDetail" ref="ReportsJob"/>
<property name="cronExpression" value="${cron.expression}"/>
</bean>
<!-- (2) Job -->
<bean name="ReportsJob" class="org.springframework.scheduling.quartz.JobDetailFactoryBean">
<property name="jobClass" value="com.reports.jobs.ReportsJob"/>
<property name="name" value="ReportsJob" />
<property name="durability" value="true" />
</bean>
<!-- (3) Scheduler -->
<bean class="org.springframework.scheduling.quartz.SchedulerFactoryBean">
<property name="schedulerContextAsMap">
<map>
<entry key="inputEndpoint"><ref bean="sftpInboundAdapter" /></entry>
<entry key="inputEndpointTrigger"><ref bean="sftp-trigger" /></entry>
</map>
</property>
<property name="dataSource" ref="dataSource" />
<property name="overwriteExistingJobs" value="true" />
<property name="quartzProperties">
<props>
<prop key="org.quartz.scheduler.instanceName">ReportsBatchScheduler</prop>
<prop key="org.quartz.scheduler.instanceId">AUTO</prop>
<prop key="org.quartz.jobStore.misfireThreshold">60000</prop>
<prop key="org.quartz.jobStore.class">org.quartz.impl.jdbcjobstore.JobStoreTX</prop>
<prop key="org.quartz.jobStore.driverDelegateClass">org.quartz.impl.jdbcjobstore.StdJDBCDelegate</prop>
<prop key="org.quartz.jobStore.tablePrefix">dbo.QRTZ_</prop>
<prop key="org.quartz.jobStore.isClustered">true</prop>
<prop key="org.quartz.threadPool.class">org.quartz.simpl.SimpleThreadPool</prop>
<prop key="org.quartz.threadPool.threadCount">1</prop>
<prop key="org.quartz.threadPool.threadPriority">5</prop>
<prop key="org.quartz.jobStore.selectWithLockSQL">SELECT * FROM {0}LOCKS UPDLOCK WHERE LOCK_NAME = ?</prop>
</props>
</property>
<property name="triggers">
<list>
<ref bean="ReportCronTrigger" />
</list>
</property>
</bean>
<bean id="dataSource" class="org.springframework.jndi.JndiObjectFactoryBean">
<property name="jndiName"><value>java:/MSSQLDS_APP</value></property>
</bean>
</beans>
public class ReportsFileListFilter extends AbstractFileListFilter<LsEntry> {
SimpleDateFormat sdf = new SimpleDateFormat("yyyyMMdd");
@Override
protected boolean accept(LsEntry file) {
if (".".equals(file.getFilename()) || "..".equals(file.getFilename())) {
return false;
}
String dt = sdf.format(new Date());
if (file != null && file.getFilename().contains(dt)) {
Log.logDebug(ReportsFileListFilter.class, "Downloading File :: "+file.getFilename());
return true;
}
return false;
}
}
public class ReportsTrigger implements Trigger {
boolean done;
public Date nextExecutionTime(TriggerContext triggerContext) {
if (done) {
return null;
}
done = true;
Log.logDebug(ReportsTrigger.class, "Job started for date :: "+new Date());
return new Date();
}
public void reset() {
Log.logDebug(ReportsTrigger.class, "Reset is called");
done = false;
}
}
public class ReportsJob extends QuartzJobBean {
private AbstractEndpoint inputEndpoint;
private ReportsTrigger inputEndpointTrigger;
public void setInputEndpoint(final AbstractEndpoint pInputEndpoint) {
this.inputEndpoint = pInputEndpoint;
}
public void setInputEndpointTrigger(final ReportsTrigger pInputEndpointTrigger) {
this.inputEndpointTrigger = pInputEndpointTrigger;
}
@Override
protected void executeInternal(final JobExecutionContext pParamJobExecutionContext)
throws JobExecutionException {
inputEndpoint.stop();
inputEndpointTrigger.reset();
inputEndpoint.start();
Log.logDebug(ReportsJob.class, "Job runnnnnnning");
}
}