java中的Spark-MySQL连接器

java中的Spark-MySQL连接器,java,mysql,apache-spark,intellij-idea,Java,Mysql,Apache Spark,Intellij Idea,我想连接spark和MySQL。我尝试了以下代码: public class Get_Data_From_MySQL implements Serializable { private static final org.apache.log4j.Logger LOGGER = org.apache.log4j.Logger.getLogger(Get_Data_From_MySQL.class); private static final String MYSQL_CONNECTION_UR

我想连接spark和MySQL。我尝试了以下代码:

public class Get_Data_From_MySQL implements Serializable {

private static final org.apache.log4j.Logger LOGGER = org.apache.log4j.Logger.getLogger(Get_Data_From_MySQL.class);

private static final String MYSQL_CONNECTION_URL = "jdbc:mysql://localhost:3306/test";
private static final String MYSQL_USERNAME = "root";
private static final String MYSQL_PWD = "";

private static final SparkSession sparkSession =
        SparkSession.builder().master("local[*]").appName("Spark2JdbcDs").getOrCreate();

public static void main(String[] args) {
    //JDBC connection properties
    final Properties connectionProperties = new Properties();
    connectionProperties.put("user", MYSQL_USERNAME);
    connectionProperties.put("password", MYSQL_PWD);
    connectionProperties.put("driver", "com.mysql.jdbc.Driver");
    final String dbTable =
            "(select age from employe";
      //Load MySQL query result as Dataset
    Dataset<Row> jdbcDF =
            sparkSession.read()
                    .jdbc(MYSQL_CONNECTION_URL, dbTable, "age", 10001, 499999, 10, connectionProperties);
在这一行:

Dataset<Row> jdbcDF =sparkSession.read().jdbc(MYSQL_CONNECTION_URL, dbTable, "age", 10001, 499999, 10, connectionProperties);
Dataset jdbcDF=sparkSession.read().jdbc(MYSQL_CONNECTION_URL,dbTable,“age”,10001499999,10,connectionProperties);
我检查了MySQL的用户和密码,一切都是正确的


谢谢。

您面临的问题可能意味着根本无法访问数据库。出现此问题的原因可能是:JDBC URL中的IP地址或主机名错误,或者本地DNS服务器无法识别JDBC URL中的主机名,或者缺少端口号,或者DB服务器关闭。可能有很多原因,所以我建议您再次检查您的方法,并检查所有内容。
通过终端连接并检查

您面临的问题可能意味着根本无法访问数据库。出现此问题的原因可能是:JDBC URL中的IP地址或主机名错误,或者本地DNS服务器无法识别JDBC URL中的主机名,或者缺少端口号,或者DB服务器关闭。可能有很多原因,所以我建议您再次检查您的方法,并检查所有内容。
  package JavaSpark.Javs.SQL;

   import java.util.Properties;

   import org.apache.spark.sql.Dataset;
   import org.apache.spark.sql.Row;
   import org.apache.spark.sql.SparkSession;


 public class sparkSqlMysql {


private static final org.apache.log4j.Logger LOGGER = org.apache.log4j.Logger.getLogger(sparkSqlMysql.class);

private static final SparkSession sparkSession =
        SparkSession.builder().master("local[*]").appName("Spark2JdbcDs").getOrCreate();    

public static void main(String[] args) {
    //JDBC connection properties
    
    final Properties connectionProperties = new Properties();
    connectionProperties.put("user", "root");
    connectionProperties.put("password","mypassword");
    connectionProperties.put("driver", "com.mysql.jdbc.Driver");
     // Load MySQL query result as Dataset
    Dataset<Row> jdbcDF2 =
            sparkSession.read()
                    .jdbc("jdbc:mysql://localhost:3306/SQLprep", "customer", connectionProperties);
    
    jdbcDF2.show();
 }
通过终端连接并检查

包JavaSpark.Javs.SQL;
  package JavaSpark.Javs.SQL;

   import java.util.Properties;

   import org.apache.spark.sql.Dataset;
   import org.apache.spark.sql.Row;
   import org.apache.spark.sql.SparkSession;


 public class sparkSqlMysql {


private static final org.apache.log4j.Logger LOGGER = org.apache.log4j.Logger.getLogger(sparkSqlMysql.class);

private static final SparkSession sparkSession =
        SparkSession.builder().master("local[*]").appName("Spark2JdbcDs").getOrCreate();    

public static void main(String[] args) {
    //JDBC connection properties
    
    final Properties connectionProperties = new Properties();
    connectionProperties.put("user", "root");
    connectionProperties.put("password","mypassword");
    connectionProperties.put("driver", "com.mysql.jdbc.Driver");
     // Load MySQL query result as Dataset
    Dataset<Row> jdbcDF2 =
            sparkSession.read()
                    .jdbc("jdbc:mysql://localhost:3306/SQLprep", "customer", connectionProperties);
    
    jdbcDF2.show();
 }
导入java.util.Properties; 导入org.apache.spark.sql.Dataset; 导入org.apache.spark.sql.Row; 导入org.apache.spark.sql.SparkSession; 公共类sparkSqlMysql{ 私有静态final org.apache.log4j.Logger Logger=org.apache.log4j.Logger.getLogger(sparkSqlMysql.class); 专用静态最终SparkSession SparkSession= SparkSession.builder().master(“local[*]”).appName(“Spark2JdbcDs”).getOrCreate(); 公共静态void main(字符串[]args){ //JDBC连接属性 最终属性connectionProperties=新属性(); connectionProperties.put(“用户”、“根”); connectionProperties.put(“密码”、“我的密码”); connectionProperties.put(“driver”、“com.mysql.jdbc.driver”); //将MySQL查询结果加载为数据集 数据集jdbcDF2= sparkSession.read() .jdbc(“jdbc:mysql://localhost:3306/SQLprep“,”客户“,”连接属性); jdbcDF2.show(); }
包JavaSpark.Javs.SQL;
导入java.util.Properties;
导入org.apache.spark.sql.Dataset;
导入org.apache.spark.sql.Row;
导入org.apache.spark.sql.SparkSession;
公共类sparkSqlMysql{
私有静态final org.apache.log4j.Logger Logger=org.apache.log4j.Logger.getLogger(sparkSqlMysql.class);
专用静态最终SparkSession SparkSession=
SparkSession.builder().master(“local[*]”).appName(“Spark2JdbcDs”).getOrCreate();
公共静态void main(字符串[]args){
//JDBC连接属性
最终属性connectionProperties=新属性();
connectionProperties.put(“用户”、“根”);
connectionProperties.put(“密码”、“我的密码”);
connectionProperties.put(“driver”、“com.mysql.jdbc.driver”);
//将MySQL查询结果加载为数据集
数据集jdbcDF2=
sparkSession.read()
.jdbc(“jdbc:mysql://localhost:3306/SQLprep“,”客户“,”连接属性);
jdbcDF2.show();
}

我通过更改mysql connector的版本解决了这个问题。谢谢你这实际上是一个语法错误,我可以通过做一些修改来解决它。数据库连接不是问题。再次感谢你的响应我通过更改mysql connector的版本来解决它。谢谢你这实际上是一个语法错误,我可以解决请通过做一些修改来修改它。数据库连接不是问题。再次感谢您的回复