Warning: file_get_contents(/data/phpspider/zhask/data//catemap/1/php/255.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181

Warning: file_get_contents(/data/phpspider/zhask/data//catemap/3/arrays/13.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181

Warning: file_get_contents(/data/phpspider/zhask/data//catemap/7/user-interface/2.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181
PHP数组所有重复记录的最后一个条目_Php_Arrays - Fatal编程技术网

PHP数组所有重复记录的最后一个条目

PHP数组所有重复记录的最后一个条目,php,arrays,Php,Arrays,我需要获取数组中所有重复记录的最后一个条目。如何在php中实现这一点 示例数据 Input Array Array ( [0] => Array ( [0] => A ) [1] => Array ( [0] => A ) [2] => Array ( [0] => B ) [3] => Array ( [0] => C ) [4] => Array ( [0] => C ) [5] =&

我需要获取数组中所有重复记录的最后一个条目。如何在php中实现这一点

示例数据

Input Array
Array
(
    [0] => Array ( [0] => A )
    [1] => Array ( [0] => A )
    [2] => Array ( [0] => B )
    [3] => Array ( [0] => C )
    [4] => Array ( [0] => C )
    [5] => Array ( [0] => D )
    [6] => Array ( [0] => F )
    [7] => Array ( [0] => F )
)
结果输出列表1应仅包含所有重复记录的最后一项

Array
(
    [1] => Array ( [0] => A )
    [4] => Array ( [0] => C )
    [7] => Array ( [0] => F )
)
结果输出列表2应包含所有其他条目

Array
(
    [0] => Array ( [0] => A )
    [2] => Array ( [0] => B )
    [3] => Array ( [0] => C )
    [5] => Array ( [0] => D )
    [6] => Array ( [0] => F )
)

很快把这个打出来。还没有机会测试它,但应该会成功

请注意,内部循环是非最优的——它可以得到很大的改进,特别是如果已知数据是经过排序的(如示例数据),但是您没有指定,所以我没有假设它

<?php
$output_dups = array();
$output_remainder = array();
foreach($input_array as $key=>$data) {
    $dup_found = false;
    foreach($input_array as $key2=>$data2) {
        if($key < $key2 && $data===$data2) { $dup_found = true; }
    }
    if($dup_found) { $output_dups[] = $data; } else { $output_remainder[] = $data; }
}

?>

不确定您是否希望这样做,但您可以尝试以下方法:

<?php

$array = array
(
    0 => array ( 0 => 'A' ),
    1 => array ( 0 => 'A' ),
    2 => array ( 0 => 'B' ),
    3 => array ( 0 => 'C' ),
    4 => array ( 0 => 'C' ),
    5 => array ( 0 => 'D' ),
    6 => array ( 0 => 'F' ),
    7 => array ( 0 => 'F' )
);

foreach ($array as $k => $v){
    echo $k . " --> ";
    if (is_array($v)){
        foreach($v as $k1=>$v1){
            echo $v1."<br />";
            $new_array[$k]=$v1;
        }
    }else{
        echo $v."<br />";
    }
}
echo "<br />";
// Restructured array
print_r($new_array);
echo "<br />";
// Duplicates
print_r(get_duplicates($new_array));
echo "<br />";
// All entries
print_r(array_unique($new_array));

// Function to get duplicates
function get_duplicates( $array ) {
    return array_unique( array_diff_assoc( $array, array_unique( $array ) ) );
}
?>

//我的实现是这样的

set_time_limit (1500) ;
ini_set("memory_limit","128M");

$fileName = "_one";

$objScan = new scanCSV();

$objScan->setCSVFileName($fileName);
$objScan->loadCsvFile();
$objScan->separateDuplicateFromUniq();

$objScan->process();



class scanCSV 
{
    private $_csvFile = NULL;
    private $_arrayListAll = NULL;
    private $_allDuplicateRec  = NULL;
    private $_uniqueRec  = NULL;

    function setCSVFileName($fileName){
        $this->_csvFile = $fileName;
    }


    //-----------------------------------------------------------------------
    function loadCsvFile()
    {
        $arrayListAll = array();
        if (($handle = fopen($this->_csvFile . ".csv", "r")) !== FALSE) {
            while (($data = fgetcsv($handle, 1000, ",")) !== FALSE) {
                $arrayListAll[] = $data;
            }
        }

        $this->_arrayListAll = $arrayListAll;
    }
    //-----------------------------------------------------------------------
    public function separateDuplicateFromUniq(){

        $allDuplicateRec = array();
        $uniqueRec = array();

        foreach($this->_arrayListAll as $data){
             if ($this->getcount($this->_arrayListAll, $data) > 1)
                  $allDuplicateRec[] = $data;
             else 
                  $uniqueRec[] = $data;
        }

        $this->_allDuplicateRec = $allDuplicateRec;
        $this->_uniqueRec = $uniqueRec;

    }
    //-----------------------------------------------------------------------   
    public function process (){     
        $uniq = $this->removeDuplicate ($this->_allDuplicateRec);
        $this->writeCSVFile ($this->_csvFile . "A.csv", $uniq);

        $restofEntries = $this->removeLastEntries ($this->_arrayListAll, $uniq);
        $this->writeCSVFile ($this->_csvFile . "B.csv", $restofEntries);
    }
    //-----------------------------------------------------------------------
    function removeDuplicate ($allDuplicateRec)
    {

        foreach ($allDuplicateRec as $k => $v) 
        if ( $this->getcount($allDuplicateRec, $v) > 1 )
            unset($allDuplicateRec[$k]);
        return $allDuplicateRec;
    }

    //-----------------------------------------------------------------------
    function removeLastEntries ($arrayListAll, $uniq){
        foreach ($uniq as $entry)
            if(in_array($entry, $arrayListAll))
                unset($arrayListAll[array_search($entry, $arrayListAll)]);  

        return $arrayListAll;   

    }
    //-----------------------------------------------------------------------
    function getcount($arrayList1, $data){
           $address = $data[2];
            $count =0;
           foreach ($arrayList1 as $dt)
                if ($address == $dt[2])
                    $count++;

           return $count;
    }
    //-----------------------------------------------------------------------
    function writeCSVFile ($fileName, $data){

        $fp = fopen($fileName, 'w');

        foreach ($data as $k=>$fields) 
            fputcsv($fp, $fields);

        fclose($fp);
    }
    //-----------------------------------------------------------------------
}  // end of scan Optimized

你试过给这个编码吗?你有错误吗?发布一些尝试…为什么你有这个。。。奇怪的结构?这是示例数据。实际数据不同,但逻辑相同。我正在读取csv文件。这里我需要一点逻辑,如何读取数组中的最后一个重复条目我应该将其放入mysql数据库并运行sql select语句吗?
set_time_limit (1500) ;
ini_set("memory_limit","128M");

$fileName = "_one";

$objScan = new scanCSV();

$objScan->setCSVFileName($fileName);
$objScan->loadCsvFile();
$objScan->separateDuplicateFromUniq();

$objScan->process();



class scanCSV 
{
    private $_csvFile = NULL;
    private $_arrayListAll = NULL;
    private $_allDuplicateRec  = NULL;
    private $_uniqueRec  = NULL;

    function setCSVFileName($fileName){
        $this->_csvFile = $fileName;
    }


    //-----------------------------------------------------------------------
    function loadCsvFile()
    {
        $arrayListAll = array();
        if (($handle = fopen($this->_csvFile . ".csv", "r")) !== FALSE) {
            while (($data = fgetcsv($handle, 1000, ",")) !== FALSE) {
                $arrayListAll[] = $data;
            }
        }

        $this->_arrayListAll = $arrayListAll;
    }
    //-----------------------------------------------------------------------
    public function separateDuplicateFromUniq(){

        $allDuplicateRec = array();
        $uniqueRec = array();

        foreach($this->_arrayListAll as $data){
             if ($this->getcount($this->_arrayListAll, $data) > 1)
                  $allDuplicateRec[] = $data;
             else 
                  $uniqueRec[] = $data;
        }

        $this->_allDuplicateRec = $allDuplicateRec;
        $this->_uniqueRec = $uniqueRec;

    }
    //-----------------------------------------------------------------------   
    public function process (){     
        $uniq = $this->removeDuplicate ($this->_allDuplicateRec);
        $this->writeCSVFile ($this->_csvFile . "A.csv", $uniq);

        $restofEntries = $this->removeLastEntries ($this->_arrayListAll, $uniq);
        $this->writeCSVFile ($this->_csvFile . "B.csv", $restofEntries);
    }
    //-----------------------------------------------------------------------
    function removeDuplicate ($allDuplicateRec)
    {

        foreach ($allDuplicateRec as $k => $v) 
        if ( $this->getcount($allDuplicateRec, $v) > 1 )
            unset($allDuplicateRec[$k]);
        return $allDuplicateRec;
    }

    //-----------------------------------------------------------------------
    function removeLastEntries ($arrayListAll, $uniq){
        foreach ($uniq as $entry)
            if(in_array($entry, $arrayListAll))
                unset($arrayListAll[array_search($entry, $arrayListAll)]);  

        return $arrayListAll;   

    }
    //-----------------------------------------------------------------------
    function getcount($arrayList1, $data){
           $address = $data[2];
            $count =0;
           foreach ($arrayList1 as $dt)
                if ($address == $dt[2])
                    $count++;

           return $count;
    }
    //-----------------------------------------------------------------------
    function writeCSVFile ($fileName, $data){

        $fp = fopen($fileName, 'w');

        foreach ($data as $k=>$fields) 
            fputcsv($fp, $fields);

        fclose($fp);
    }
    //-----------------------------------------------------------------------
}  // end of scan Optimized