Warning: file_get_contents(/data/phpspider/zhask/data//catemap/4/macos/9.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181
Golang azure blob存储,0b blob并覆盖下载的blob数据_Azure_Go_Azure Storage Blobs - Fatal编程技术网

Golang azure blob存储,0b blob并覆盖下载的blob数据

Golang azure blob存储,0b blob并覆盖下载的blob数据,azure,go,azure-storage-blobs,Azure,Go,Azure Storage Blobs,目前正在使用: 概述:我目前正在从azure blob商店下载blob,解析blob并将转录的blob上传回另一个名为filtered的文件夹中的商店 问题:上载的blob不在已筛选的文件夹中,而是在根目录中,并且blob为0B,没有数据。blob上传似乎也破坏了我刚刚下载的blob,导致blob为0B,没有数据。下载blob工作正常,我能够获得数据的[]字节数组 代码: 导入( “布菲奥” “fmt” “操作系统” “字符串” “strconv” “数学/大” “字节” “io/ioutil”

目前正在使用:

概述:我目前正在从azure blob商店下载blob,解析blob并将转录的blob上传回另一个名为filtered的文件夹中的商店

问题:上载的blob不在已筛选的文件夹中,而是在根目录中,并且blob为0B,没有数据。blob上传似乎也破坏了我刚刚下载的blob,导致blob为0B,没有数据。下载blob工作正常,我能够获得数据的[]字节数组

代码:

导入(
“布菲奥”
“fmt”
“操作系统”
“字符串”
“strconv”
“数学/大”
“字节”
“io/ioutil”
“github.com/Azure/Azure sdk for go/storage”
“压缩/gzip”
“编码/base64”
“加密/md5”
)
func main(){
var filter bool=true//检查智能过滤器
测试:=0
configfile,err:=os.Open(“config.txt”)//打开configfile
check(err)//检查打开的文件
ConfigScanner:=bufio.NewScanner(配置文件)//打开缓冲区
ConfigScanner.Scan()//获取序列号
serialnum:=ConfigScanner.Text()
configfile.Close()//关闭配置文件
CanLUT:=ParseDBC(“file.dbc”)//解析关联的dbc文件
check(err)//检查打开的文件
m:=make(map[int64]string)//上次看到的消息的映射
//Azure API
客户端,错误:=storage.NewBasicClient(accountName,accountKey)//从azure获取客户端
检查(错误)
bsc:=client.GetBlobService()//访问blob服务
cnt:=bsc.GetContainerReference(“containerblob”)//获取blob的容器
LBP:=storage.ListBlobsParameters{}
LBP.Prefix=“dev4/dev4”//仅获取具有dev4/dev4前缀的blob
list,err:=cnt.ListBlobs(LBP)//获取所有匹配blob的列表
检查(错误)
对于uB:=范围列表.blob{//读取azure中前缀为dev4/dev4的所有blob
oa:=make([]字节,0)
fmt.Println(“获取blob:,b.Name”)
readCloser,err:=b.Get(nil)//获取blob数据
检查(错误)
bytesRead,err:=ioutil.ReadAll(readCloser)//将blob数据读取到字节[]
检查(错误)
如果len(字节读取)<1{
持续
}
br:=bytes.NewReader(bytesRead)
zr,err:=gzip.NewReader(br)//对压缩数据使用gzip读取器
检查(错误)
uz,err:=ioutil.ReadAll(zr)//解压文件的uz字节[]
检查(错误)
readCloser.Close()//关闭读卡器
zr.Close()//关闭gzip读取器
r:=字节。新读取器(uz)
扫描器:=bufio.NewScanner(r)
对于scanner.Scan(){//循环输入文件中的每一行
temp:=parsetFrame(scanner.Text())//将行解析为可用的结构
_,exists:=m[temp.nodeid]//检查帧是否已被读取并存储在hashmap中
if exists{//if存在于地图中
如果chkdupplicate(m,temp){//消息是重复的吗?如果为true,则消息不是重复的,因此添加它
m[temp.nodeid]=temp.data//将数据更新到hashmap
DecodeFrame(temp、&oa、CanLUT、filter、serialnum)//解码帧并将其输出到另一个文件
}
}否则{//DNE在地图中,所以添加它
m[temp.nodeid]=临时数据
DecodeFrame(temp、&oa、CanLUT、filter、serialnum)//解码帧并将其输出到另一个文件
}
}//结束blob文件
filestr:=strings.Split(b.Name,“”)[1]
filestr=“已过滤/已过滤”+filestr
var buffout字节。缓冲区
gz:=gzip.NewWriter(&buffout)
_,err=gz.Write(oa)
检查(错误)
gz.Flush()
gz.Close()
compressedData:=buffout.Bytes()
//将块blob推送到azure
fmt.Println(“上传:”,filestr)
clientnew,err:=storage.NewBasicClient(accountName,accountKey)//从azure获取客户端
检查(错误)
senderbsc:=clientnew.GetBlobService()//访问blob服务
sendercnt:=senderbsc.GetContainerReference(“storeblob”)//获取存储blob的容器
bblob:=sendercnt.GetBlobReference(“筛选的_U3;”+strings.Split(b.Name,“/”)[1])
err=bblob.CreateBlockBlob(无)
检查(错误)
blockID:=base64.StdEncoding.EncodeToString([]字节(“00000”))
err=bblob.PutBlock(blockID,compressedData,nil)
检查(错误)
列表,错误:=b.GetBlockList(storage.BlockListTypeUncommitted,nil)
检查(错误)
uncommittedBlocksList:=make([]storage.Block,len(list.UncommittedBlocks))
对于i:=范围列表.UncommittedBlocks{
uncommittedBlocksList[i].ID=list.UncommittedBlocks[i].Name
uncommittedBlocksList[i]。状态=storage.BlockStatu
import (
"bufio"
"fmt"
"os"
"strings"
"strconv"
"math/big"
"bytes"
"io/ioutil"
"github.com/Azure/azure-sdk-for-go/storage"
"compress/gzip"
"encoding/base64"
"crypto/md5"
)

func main() {
    var filter bool = true                                                  //check smart filter
    test := 0
    configfile, err := os.Open("config.txt")                                //open configfile
    check(err)                                                              //check file opened
    ConfigScanner := bufio.NewScanner(configfile)                           //open buffer
    ConfigScanner.Scan()                                                    //get serial number
    serialnum := ConfigScanner.Text()
    configfile.Close()                                                      //close the config file
    CanLUT := ParseDBC("file.dbc")                                          //parse the associated DBC file
    check(err)                                                              //check file opened
    m := make(map[int64]string)                                             //map of last seen message
    //Azure API
    client, err := storage.NewBasicClient(accountName, accountKey)          //get client from azure
    check(err)
    bsc := client.GetBlobService()                                          //access blob service
    cnt := bsc.GetContainerReference("containerblob")                           //get container of the blob
    LBP := storage.ListBlobsParameters{}                                    
    LBP.Prefix = "dev4/dev4"                                                //only get blobs with dev4/dev4 prefix
    list, err := cnt.ListBlobs(LBP)                                         //get list of all matching blobs
    check(err)
    for _, b := range list.Blobs {                                          //read all blobs from azure with prefix dev4/dev4
        oa := make([]byte,0)
        fmt.Println("getting blob: ",b.Name)
        readCloser, err := b.Get(nil)                                       //get blob data
        check(err)
        bytesRead, err := ioutil.ReadAll(readCloser)                        //read blob data to byte[]
        check(err)
        if len(bytesRead) < 1 {
            continue
        }
        br := bytes.NewReader(bytesRead)
        zr, err := gzip.NewReader(br)                                       //use gzip reader for zipped data
        check(err)
        uz, err := ioutil.ReadAll(zr)                                       //uz byte[] of unzipped file
        check(err)
        readCloser.Close()                                                  //close the reader
        zr.Close()                                                          //close gzip reader
        r := bytes.NewReader(uz)
        scanner := bufio.NewScanner(r)
        for scanner.Scan() {                                                //loop on each line in the input file
            temp := ParseToFrame(scanner.Text())                            //parse the line into a usable struct
            _, exists := m[temp.nodeid]                                     //check if the frame has alread been seen and is stored in the hashmap
            if exists {                                                     //if exists in the map
                if ChkDuplicate(m, temp) {                                  //is the msg a duplicate? if true the message isnt so add it
                    m[temp.nodeid] = temp.data                              //update the data to the hashmap
                    DecodeFrame(temp, &oa, CanLUT, filter, serialnum)       //decode the frame and output it to another file
                }
            } else {                                                        //DNE in map so add it
                m[temp.nodeid] = temp.data
                DecodeFrame(temp, &oa, CanLUT,filter, serialnum)            //decode the frame and output it to another file
            }
        }//end blob file
        filestr := strings.Split(b.Name, "_")[1]
        filestr = "filtered/filtered_" + filestr
        var buffout bytes.Buffer
        gz := gzip.NewWriter(&buffout)
        _, err = gz.Write(oa)
        check(err)
        gz.Flush()
        gz.Close()
        compressedData := buffout.Bytes()
        //push block blob to azure
        fmt.Println("uploading: ",filestr)
        clientnew, err := storage.NewBasicClient(accountName, accountKey)           //get client from azure
        check(err)
        senderbsc := clientnew.GetBlobService()                                         //access blob service
        sendercnt := senderbsc.GetContainerReference("storeblob")                           //get container of store blob
        bblob := sendercnt.GetBlobReference("filtered_" + strings.Split(b.Name, "/")[1])
        err = bblob.CreateBlockBlob(nil)
        check(err)
        blockID := base64.StdEncoding.EncodeToString([]byte("00000"))
        err = bblob.PutBlock(blockID, compressedData, nil)
        check(err)
        list, err := b.GetBlockList(storage.BlockListTypeUncommitted, nil)
        check(err)
        uncommittedBlocksList := make([]storage.Block, len(list.UncommittedBlocks))
        for i := range list.UncommittedBlocks {
            uncommittedBlocksList[i].ID = list.UncommittedBlocks[i].Name
            uncommittedBlocksList[i].Status = storage.BlockStatusUncommitted
        }
        err = b.PutBlockList(uncommittedBlocksList, nil)
        //check if upload was good.
        CheckHash(&compressedData,filestr,sendercnt)
        check(err)
        if(test == 0){
            break       //test only read one file
        }
        test++
    }//end for blobs     
}//end main
accountName := "<your-storage-account-name>"
accountKey := "<your-storage-account-key>"
client, _ := storage.NewBasicClient(accountName, accountKey)
blobClinet := client.GetBlobService()
containerName := "mycontainer"
container := blobClinet.GetContainerReference(containerName)

// Two sample ways for uploading
// 1. Upload a text blob from string reader
blobName := "upload.txt"
blob := container.GetBlobReference(blobName)
strReader := strings.NewReader("upload text to blob from string reader")
blob.CreateBlockBlobFromReader(strReader, nil)

// 2. Upload a file from file reader
fileName := "hello.png"
file, _ := os.Open(fileName)
blobName := "hello.png"
blob := container.GetBlobReference(blobName)
blob.CreateBlockBlobFromReader(file, nil)
    compressedData := buffout.Bytes()
    //push block blob to azure
    fmt.Println("uploading: ",filestr)
    blockID := base64.StdEncoding.EncodeToString([]byte("00001"))
    newblob := cnt.GetBlobReference(filestr)
    err = newblob.CreateBlockBlobFromReader(bytes.NewReader(compressedData),nil)
    check(err)
    err = newblob.PutBlock(blockID, compressedData, nil)
    check(err)
    list, err := newblob.GetBlockList(storage.BlockListTypeUncommitted, nil)
    check(err)
    uncommittedBlocksList := make([]storage.Block, len(list.UncommittedBlocks))
    for i := range list.UncommittedBlocks {
        uncommittedBlocksList[i].ID = list.UncommittedBlocks[i].Name
        uncommittedBlocksList[i].Status = storage.BlockStatusUncommitted
    }
    err = newblob.PutBlockList(uncommittedBlocksList, nil)
    check(err)
list, err := b.GetBlockList(storage.BlockListTypeUncommitted, nil)