Java 自动连线变量为null

Java 自动连线变量为null,java,spring-boot,autowired,Java,Spring Boot,Autowired,在我的应用程序中,我有一个自动连接的dataSource对象,它返回null。 对象正在DAO类中使用 应用程序流-> 1.OAConsumer.java开始执行 2.调用service.java 3.在KafkaDAOImpl类中调用insertOffset eclispe氧气中使用的IDE。 下面是代码和错误详细信息 显示错误 java.lang.NullPointerException at com.oa.dao.KafkaOffsetDAOImpl.insertOffset(Ka

在我的应用程序中,我有一个自动连接的dataSource对象,它返回null。 对象正在DAO类中使用

应用程序流-> 1.OAConsumer.java开始执行 2.调用service.java 3.在KafkaDAOImpl类中调用insertOffset

eclispe氧气中使用的IDE。 下面是代码和错误详细信息

显示错误

java.lang.NullPointerException
    at com.oa.dao.KafkaOffsetDAOImpl.insertOffset(KafkaOffsetDAOImpl.java:36)
    at com.oa.services.Service.savePayload(Service.java:54)
    at com.oa.consumer.OAConsumer.orderConsumer(OAConsumer.java:30)
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke(Unknown Source)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke(Unknown Source)
    at java.lang.reflect.Method.invoke(Unknown Source)
    at org.springframework.messaging.handler.invocation.InvocableHandlerMethod.doInvoke(InvocableHandlerMethod.java:170)
    at org.springframework.messaging.handler.invocation.InvocableHandlerMethod.invoke(InvocableHandlerMethod.java:120)
    at org.springframework.kafka.listener.adapter.HandlerAdapter.invoke(HandlerAdapter.java:48)
    at org.springframework.kafka.listener.adapter.MessagingMessageListenerAdapter.invokeHandler(MessagingMessageListenerAdapter.java:283)
    at org.springframework.kafka.listener.adapter.BatchMessagingMessageListenerAdapter.invoke(BatchMessagingMessageListenerAdapter.java:146)
    at org.springframework.kafka.listener.adapter.BatchMessagingMessageListenerAdapter.onMessage(BatchMessagingMessageListenerAdapter.java:138)
    at org.springframework.kafka.listener.adapter.BatchMessagingMessageListenerAdapter.onMessage(BatchMessagingMessageListenerAdapter.java:59)
    at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.doInvokeBatchOnMessage(KafkaMessageListenerContainer.java:1052)
    at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.invokeBatchOnMessage(KafkaMessageListenerContainer.java:1036)
    at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.doInvokeBatchListener(KafkaMessageListenerContainer.java:998)
    at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.invokeBatchListener(KafkaMessageListenerContainer.java:938)
    at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.invokeListener(KafkaMessageListenerContainer.java:921)
    at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.pollAndInvoke(KafkaMessageListenerContainer.java:740)
    at org.springframework.kafka.listener.KafkaMessageListenerContainer$ListenerConsumer.run(KafkaMessageListenerContainer.java:689)
    at java.util.concurrent.Executors$RunnableAdapter.call(Unknown Source)
    at java.util.concurrent.FutureTask.run(Unknown Source)
    at java.lang.Thread.run(Unknown Source)
KafkaOffsetDAOImpl.java

package com.oa.dao;

import java.sql.Connection;
import java.util.HashMap;
import java.util.Map;

import javax.sql.DataSource;

import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.jdbc.core.namedparam.MapSqlParameterSource;
import org.springframework.jdbc.core.namedparam.SqlParameterSource;
import org.springframework.jdbc.core.simple.SimpleJdbcCall;
import org.springframework.jdbc.datasource.DriverManagerDataSource;

import com.oa.model.KafkaOffsetRecord;


public class KafkaOffsetDAOImpl implements KafkaOffsetDAO {

//  @Autowired
//  JdbcTemplate jdbcTemplate;
    
    @Autowired
    DataSource dataSource;

@Override
public boolean insertOffset(KafkaOffsetRecord offsetRecord) {
    
    boolean status=false;
    
    try(Connection connection =dataSource.getConnection()){
    
    if(connection.isValid(10000))
        status= true;
    }catch (Exception e) {
        e.printStackTrace();
        
    }
    
    
    return false;
}
OracleConfig.java

package com.oa.config;

import java.sql.SQLException;

import javax.sql.DataSource;

import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.jdbc.DataSourceBuilder;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.jdbc.core.JdbcTemplate;

import com.zaxxer.hikari.HikariConfig;
import com.zaxxer.hikari.HikariDataSource;


@Configuration
public class OracleConfig {
    

    private static final Logger logger = LoggerFactory.getLogger(OracleConfig.class);

    @Value("${spring.datasource.url}")
    private String url;

    @Value("${spring.datasource.username}")
    private String username;

    @Value("${spring.datasource.password}")
    private String password;

    @Value("${spring.datasource.driver-class-name}")
    private String driverClassName;

    @Value("${spring.datasource.maximumPoolSize}")
    private int maxPoolSize;
    
    @Value("${spring.datasource.minimumIdle}")
    private int minIdle;
    
    @Value("${spring.datasource.connectionTimeout}")
    private long connTimeOut;
    
    @Value("${spring.datasource.idleTimeout}")
    private long idleTimeout;
//
    /**
     * See <a href="http://google.com">http://www.baeldung.com/hikaricp</a>
     */
    @Bean
    public HikariDataSource dataSource() {

        HikariConfig hikariConfig = new HikariConfig();
        hikariConfig.setDriverClassName(driverClassName);
        hikariConfig.setUsername(username);
        hikariConfig.setPassword(password);
        hikariConfig.setJdbcUrl(url);
        hikariConfig.setMaximumPoolSize(maxPoolSize);
        hikariConfig.setConnectionTimeout(connTimeOut);
        hikariConfig.setMinimumIdle(minIdle);
        hikariConfig.setIdleTimeout(idleTimeout);

        HikariDataSource hikariDataSource = new HikariDataSource(hikariConfig);

        if (hikariDataSource.isRunning()) {
            logger.info("------> Oracle DB connection created successfully.");
        }

        return hikariDataSource;

    }

使用构造函数注入时,请使用它

public class KafkaOffsetDAOImpl implements KafkaOffsetDAO {


    private final DataSource dataSource;

    @Autowired
    KafkaOffsetDAOImpl(final Datasource datasrouce) {
       this.dataSource = dataSource;
    }

通过这种方式,您可以确定spring创建了autowired字段并将其注入。

您刚从办公室上传了一些代码吗?数据源的定义在哪里?这些类缺少annotation
Service
OAConsumer
,KafkaOffsetDAOImpl`,要将数据源注入,这个
KafkaOffsetDAOImpl
是beanCisco不会喜欢的,您只能在Autowired类中拥有Autowired组件,因此您的KafkaOffsetDAOImpl实例也必须是Autowired的不,您也不能确定这样做。问题是他在
savePayload()
方法中使用了
新的KafkaOffsetDAOImpl(..)
,因此没有发生自动连线。在这种情况下,构造函数注入的唯一好处是感知(因为他将在
服务
中得到编译错误)。
package com.oa.services;

import java.io.IOException;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import com.oa.dao.KafkaOffsetDAO;
import com.oa.dao.KafkaOffsetDAOImpl;
import com.oa.model.KafkaOffsetRecord;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;

public class Service {

    private static final Logger LOGGER = LoggerFactory.getLogger(Service.class);

    public Boolean checkOAEligible(String message, long offset, String topic) throws IOException {

        
        return true;
    }

    public Boolean savePayload(ConsumerRecord<String, String> record) {
        
        LOGGER.debug("START:: saveOrderPayload ::offset= {} :: topic= {}", record.offset(), record.topic());
        
        Boolean status = false;
        KafkaOffsetDAO dao = new KafkaOffsetDAOImpl();
        
        try {
            if (checkOAEligible(record.value(), record.offset(), record.topic())) {

                KafkaOffsetRecord kafkaOffsetRecord= new KafkaOffsetRecord(record);
                
                status = dao.insertOffset(kafkaOffsetRecord);
                                

            }
        } catch (IOException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }

        LOGGER.debug("END:: saveOrderPayload :: offset={} :: topic={} , status={}", record.offset(), record.topic(), status);
        return status;

    }
}
long offsetId;
String jsonMsg;
String kafkaSourceType;
int partitionId;
String kafkaTopicName;


public KafkaOffsetRecord() {
    
}

public KafkaOffsetRecord(ConsumerRecord<String, String> consumerRecord) {
    offsetId = consumerRecord.offset();
    jsonMsg = consumerRecord.value();
    partitionId=consumerRecord.partition();
    kafkaTopicName= consumerRecord.topic();
    kafkaSourceType = "TEST";
    
}

public long getOffsetId() {
    return offsetId;
}

public String getJsonMsg() {
    return jsonMsg;
}

public String getKafkaSourceType() {
    return kafkaSourceType;
}

public int getPartitionId() {
    return partitionId;
}

public String getKafkaTopicName() {
    return kafkaTopicName;
}

public void setOffsetId(long offsetId) {
    this.offsetId = offsetId;
}

public void setJsonMsg(String jsonMsg) {
    this.jsonMsg = jsonMsg;
}

public void setKafkaSourceType(String kafkaSourceType) {
    this.kafkaSourceType = kafkaSourceType;
}

public void setPartitionId(int partitionId) {
    this.partitionId = partitionId;
}

public void setKafkaTopicName(String kafkaTopicName) {
    this.kafkaTopicName = kafkaTopicName;
}
spring.datasource.url=jdbc:oracle:thin:@(DESCRIPTION=(ADDRESS_LIST=(LOAD_BALANCE=ON)(FAILOVER=OFF)(ADDRESS=(PROTOCOL=TCP)(HOST=**removed**)(PORT=**removed**))(ADDRESS=(PROTOCOL=TCP)(HOST=**removed**)(PORT=**removed**)))(CONNECT_DATA=(SERVICE_NAME=**removed**)(SERVER=DEDICATED)))
spring.datasource.driver-class-name=oracle.jdbc.OracleDriver
spring.datasource.username=**removed**
spring.datasource.password=**removed**
spring.datasource.type=oracle.jdbc.pool.OracleDataSource
spring.datasource.maximumPoolSize=20
spring.datasource.minimumIdle=5
spring.datasource.connectionTimeout=30000
spring.datasource.idleTimeout=1500000
public class KafkaOffsetDAOImpl implements KafkaOffsetDAO {


    private final DataSource dataSource;

    @Autowired
    KafkaOffsetDAOImpl(final Datasource datasrouce) {
       this.dataSource = dataSource;
    }