Warning: file_get_contents(/data/phpspider/zhask/data//catemap/9/java/390.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181
Java Junit之后备份数据库数据_Java - Fatal编程技术网

Java Junit之后备份数据库数据

Java Junit之后备份数据库数据,java,Java,当执行Junit测试时,它会删除数据库的原始数据。如何执行备份,以便在Junit测试后检索数据 @Test public void testExecute_N001() throws Exception { this.importFileName_tsc = "input/input_tsc_" + "1" + EXTENSION_XML; this.importFileName_postgre = "input/input_eccube_" + "1"

当执行Junit测试时,它会删除数据库的原始数据。如何执行备份,以便在Junit测试后检索数据

@Test
public void testExecute_N001() throws Exception {
    this.importFileName_tsc = "input/input_tsc_" + "1" + EXTENSION_XML;
    this.importFileName_postgre = "input/input_eccube_" + "1"
            + EXTENSION_XML;
    this.resultFileName = "output/output_" + "1" + EXTENSION_XML;
    super.setResultBatchFilePrefix("N-001_");
    super.copyToImportFile("input/N-001" + EXTENSION_TXT);
    checkNoBatchTarget();
}

public void testExecute_N002() throws Exception {
    this.importFileName_tsc = "input/input_tsc_" + "1" + EXTENSION_XML;
    this.importFileName_postgre = "input/input_eccube_" + "1"
            + EXTENSION_XML;
    this.resultFileName = "output/output_" + "1" + EXTENSION_XML;

    super.setResultBatchFilePrefix("N-002_");
    super.copyToImportFile("input/N-002" + EXTENSION_TXT);
    checkBatchExecute();
}

public void testExecute_E001() throws Exception {
    this.importFileName_tsc = "input/input_tsc_" + "2" + EXTENSION_XML;
    this.importFileName_postgre = "input/input_eccube_" + "2"
            + EXTENSION_XML;
    this.resultFileName = "output/output_" + "2" + EXTENSION_XML;

    super.setResultBatchFilePrefix("E-001_");
    super.copyToImportFile("input/E-001" + EXTENSION_TXT);
    checkError();
}

private void checkNoBatchTarget() throws Exception {
    IDatabaseConnection ecCubeConn = null; // DBコネクション
    IDatabaseConnection connection = null; // DB POSTGRESQL

    try {
        connection = getConnection();

        IDataSet dataSet = getDataSetFromFile(this.importFileName_tsc);

        // 個別データ削除処理
        DatabaseOperation.DELETE_ALL.execute(connection, dataSet);

        // xmlからテストデータを取得し、テーブルにセットする
        DatabaseOperation.CLEAN_INSERT.execute(connection, dataSet);
        ecCubeConn = getConnection();

        IDataSet dataSet_postgre = getDataSetFromFile(this.importFileName_postgre);
        DatabaseOperation.DELETE_ALL.execute(ecCubeConn, dataSet_postgre);
        DatabaseOperation.CLEAN_INSERT.execute(ecCubeConn, dataSet_postgre);

        int result = super.batchStart(args);

        // 戻り値チェック
        assertEquals("0", String.valueOf(result));

        // DBテーブルチェック
        // 期待されるデータを取得
        IDataSet expectedDataSet = getDataSetFromFile(this.resultFileName);

        QueryDataSet queryDataSet = new QueryDataSet(ecCubeConn);
        queryDataSet
                .addTable(
                        DESTINATION_TABLE_1,
                        "SELECT  store_cd, store_name, area_id, product_code, goods_code, maker_id, maker_product_code, size, color, stock, season FROM "
                                + DESTINATION_TABLE_1);
        IDataSet databaseDataSet = queryDataSet;

        ITable expectedTableOrd = sortTable(expectedDataSet);
        ITable actualTableOrd = sortTable(databaseDataSet);

        // REMOVE UNNECESSARY COLUMNS FOR TESTING
        expectedTableOrd = DefaultColumnFilter.includedColumnsTable(
                expectedTableOrd, new String[] { "store_cd", "store_name",
                        " area_id", "product_code", "goods_code",
                        "maker_id", "maker_product_code", "size", "color",
                        "stock", "season" });

        // 更新されていることを確認
        Assertion.assertEquals(expectedTableOrd, actualTableOrd);

    } catch (Exception e) {
        e.printStackTrace();
        fail(e.getMessage());
    } finally {
        if (super.isBatchFile()) {
            super.moveToBackup();
        }

        try {
            if (connection != null) {
                connection.close();
            }
        } catch (SQLException e) {

        }
    }
}

private void checkError() throws Exception {
    IDatabaseConnection connection = null; // DBコネクション
    IDatabaseConnection ecCubeConn = null; // DB POSTGRESQL

    try {
        connection = getConnection();
        IDataSet dataSet = getDataSetFromFile(this.importFileName_tsc);
        DatabaseOperation.DELETE_ALL.execute(connection, dataSet);
        DatabaseOperation.CLEAN_INSERT.execute(connection, dataSet);
        ecCubeConn = getConnection();
        IDataSet dataSet_postgre = getDataSetFromFile(this.importFileName_postgre);
        DatabaseOperation.DELETE_ALL.execute(ecCubeConn, dataSet_postgre);
        DatabaseOperation.CLEAN_INSERT.execute(ecCubeConn, dataSet_postgre);

        int result = super.batchStart(args);

        assertEquals("-1", String.valueOf(result));

        IDataSet expectedDataSet = getDataSetFromFile(this.resultFileName);

        QueryDataSet queryDataSet = new QueryDataSet(ecCubeConn);
        queryDataSet
                .addTable(
                        DESTINATION_TABLE_1,
                        "SELECT  store_cd,, store_name, area_id, product_code, goods_code, maker_id, maker_product_code, size, color, stock, season  FROM "
                                + DESTINATION_TABLE_1);
        IDataSet databaseDataSet = queryDataSet;

        ITable expectedTableOrd = sortTable(expectedDataSet);
        ITable actualTableOrd = sortTable(databaseDataSet);

        expectedTableOrd = DefaultColumnFilter.includedColumnsTable(
                expectedTableOrd, new String[] { "store_cd", "store_name",
                        " area_id", "product_code", "goods_code",
                        "maker_id", "maker_product_code", "size", "color",
                        "stock", "season" });

        Assertion.assertEquals(expectedTableOrd, actualTableOrd);
    } catch (Exception e) {
        e.printStackTrace();
        fail(e.getMessage());
    } finally {
        if (super.isBatchFile()) {
            super.moveToBackup();
        }

        try {
            if (connection != null) {
                connection.close();
            }
        } catch (SQLException e) {

        }
    }
}

private void checkBatchExecute() throws Exception {
    IDatabaseConnection connection = null; // DBコネクション
    IDatabaseConnection ecCubeConn = null; // DB POSTGRESQL

    try {
        connection = getConnection();
        IDataSet dataSet = getDataSetFromFile(this.importFileName_tsc);
        DatabaseOperation.DELETE_ALL.execute(connection, dataSet);
        DatabaseOperation.CLEAN_INSERT.execute(connection, dataSet);
        ecCubeConn = getConnection();
        IDataSet dataSet_postgre = getDataSetFromFile(this.importFileName_postgre);
        DatabaseOperation.DELETE_ALL.execute(ecCubeConn, dataSet_postgre);
        DatabaseOperation.CLEAN_INSERT.execute(ecCubeConn, dataSet_postgre);
        int result = super.batchStart(args);
        assertEquals("0", String.valueOf(result));

        IDataSet expectedDataSet = getDataSetFromFile(this.resultFileName);
        QueryDataSet queryDataSet = new QueryDataSet(ecCubeConn);
        queryDataSet
                .addTable(
                        DESTINATION_TABLE_1,
                        "SELECT store_cd,, store_name, area_id, product_code, goods_code, maker_id, maker_product_code, size, color, stock, season  FROM "
                                + DESTINATION_TABLE_1);
        IDataSet databaseDataSet = queryDataSet;

        ITable expectedTableOrd = sortTable(expectedDataSet);
        ITable actualTableOrd = sortTable(databaseDataSet);

        expectedTableOrd = DefaultColumnFilter.includedColumnsTable(
                expectedTableOrd, new String[] { "store_cd", "store_name",
                        " area_id", "product_code", "goods_code",
                        "maker_id", "maker_product_code", "size", "color",
                        "stock", "season" });
        Assertion.assertEquals(expectedTableOrd, actualTableOrd);

    } catch (Exception e) {
        e.printStackTrace();
        fail(e.getMessage());
    } finally {
        if (super.isBatchFile()) {
            super.moveToBackup();
        }

        try {
            if (connection != null) {
                connection.close();
            }
        } catch (SQLException e) {

        }
    }
}

private ITable sortTable(IDataSet dataSet) throws DataSetException {
    return new SortedTable(dataSet.getTable(DESTINATION_TABLE_1),
            new String[] { "store_cd" });
}
是否有其他简单的方法来检查数据库中的插入更新和删除。。。。。 这个方法行吗。。。 如何改进此代码。。。。。。 是否有其他方法来实现这一点,以使其不会影响数据库数据

  • 如果可能,请使用单独的环境来运行测试。我希望您不是在生产环境中运行测试
  • 永远不要使用实际数据来运行测试。始终设置数据并对其运行测试。如果您使用实际数据,则无法保证数据始终存在,因此将来您的测试可能会失败
  • 在事务中执行数据库插入/更新,并在执行测试后最终回滚