Warning: file_get_contents(/data/phpspider/zhask/data//catemap/9/java/386.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181
Java 权限\u被拒绝:权限表\u更新\u数据_Java_Json_Google Bigquery_Google Cloud Functions - Fatal编程技术网

Java 权限\u被拒绝:权限表\u更新\u数据

Java 权限\u被拒绝:权限表\u更新\u数据,java,json,google-bigquery,google-cloud-functions,Java,Json,Google Bigquery,Google Cloud Functions,我试图使用Java编写的云函数将JSON对象插入GoogleBigQuery。但是,代码给出了一个奇怪的权限错误,我想确认Cloud函数提供了所有Bigquery权限,以便写入表。ProjectID、数据集名称和表名称也已验证且正确无误 我从谷歌云函数运行下面的代码时遇到运行时异常。请帮忙。 错误: 2021-05-08 22:52:45.674 IstTopicReadergCpJFunctionsAj66v5ty43一个名为:com.google.api.gax.rpc.Permission

我试图使用Java编写的云函数将JSON对象插入GoogleBigQuery。但是,代码给出了一个奇怪的权限错误,我想确认Cloud函数提供了所有Bigquery权限,以便写入表。ProjectID、数据集名称和表名称也已验证且正确无误

我从谷歌云函数运行下面的代码时遇到运行时异常。请帮忙。 错误:

2021-05-08 22:52:45.674 IstTopicReadergCpJFunctionsAj66v5ty43一个名为:com.google.api.gax.rpc.PermissionDeniedException:io.grpc.StatusRuntimeException:PERMISSION_DENIED:PERMISSION“TABLES_UPDATE_DATA”在资源“”上被拒绝(或者它可能不存在)。调用的错误:com.google.api.gax.rpc.PermissionDeniedException:io.grpc.StatusRuntimeException:PERMISSION\u DENIED:PERMISSION“TABLES\u UPDATE\u DATA”在资源“”上被拒绝(或者它可能不存在)

包函数;
导入com.google.api.core.ApiFuture;
导入com.google.cloud.bigquery.bigquery;
导入com.google.cloud.bigquery.BigQueryOptions;
导入com.google.cloud.bigquery.Schema;
导入com.google.cloud.bigquery.Table;
导入com.google.cloud.bigquery.storage.v1beta2.AppendRowsResponse;
导入com.google.cloud.bigquery.storage.v1beta2.BigQueryWriteClient;
导入com.google.cloud.bigquery.storage.v1beta2.CreateWriteStreamRequest;
导入com.google.cloud.bigquery.storage.v1beta2.JsonStreamWriter;
导入com.google.cloud.bigquery.storage.v1beta2.TableName;
导入com.google.cloud.bigquery.storage.v1beta2.WriteStream;
导入com.google.protobuf.Descriptors.DescriptorValidationException;
导入java.io.IOException;
导入java.util.concurrent.ExecutionException;
导入org.json.JSONArray;
导入org.json.JSONObject;
//导入org.json.simple.JSONArray;
//导入org.json.simple.JSONObject;
//导入org.json.simple.parser.JSONParser;
导入java.util.*;
公共类WriteCommitteedStream{
//TODO(开发人员):在运行示例之前替换这些变量。
公共静态布尔writeCommittedStreamToBQ(字符串projectId、字符串datasetName、字符串tableName、,
HashSet streamHashSet)抛出DescriptorValidationException、InterruptedException、IOException{
try(BigQueryWriteClient=BigQueryWriteClient.create()){
迭代器值=streamHashSet.Iterator();
while(value.hasNext()){
//创建一个与表模式兼容的JSON对象。
//JSONObject记录=新的JSONObject();
//record.put(“col1”,String.format(“record%03d”,value.next());
//JSONArray jsonArr=新的JSONArray();
//value.next()。
//get(value.next());
System.out.println(value.next().get(0.toString());
}
//数组->{data}
//数组->[{data}]
System.out.println(“projectId:+projectId”);
System.out.println(“datasetName:+datasetName”);
System.out.println(“tableName:+tableName”);
BigQuery BigQuery=BigQueryOptions.getDefaultInstance().getService();
Table Table=bigquery.getTable(datasetName,tableName);
TableName parentTable=TableName.of(projectId、datasetName、TableName);
Schema Schema=table.getDefinition().getSchema();
System.out.println(“Schema:+Schema.toString());
System.out.println(“表:”+parentTable.toString());
try(JsonStreamWriter writer=JsonStreamWriter.newBuilder(parentTable.toString(),schema).createDefaultStream()
.build()){
//将10个JSON对象附加到流中。
迭代器值2=streamHashSet.Iterator();
/////////////////////
/*
JSONArray jsonArr=value2.next();
System.out.println(“循环内:+jsonArr”);
ApiFuture=writer.append(jsonArr);
AppendRowsResponse response=future.get();
System.out.println(“成功追加记录”。+response.toString());
*/      
JSONObject记录=新的JSONObject();
record.put(“RPdeviceName”,String.format(“记录%03d”,0));
record.put(“rporganizationname”,String.format(“记录%03d”,1));
记录.put(“RPdate”,String.format(“记录%03d”,2));
record.put(“RPtime”,String.format(“record%03d”,3));
记录.put(“RPmacid”,String.format(“记录%03d”,4));
记录.put(“状态”,字符串.format(“记录%03d”,5));
record.put(“mac”,String.format(“record%03d”,6));
记录.put(“日期”,字符串.format(“记录%03d”,7));
record.put(“time”,String.format(“record%03d”,8));
记录.put(“count”,String.format(“记录%03d”,9));
记录.put(“peakadc”,String.format(“记录%03d”,10));
记录.put(“重置”,字符串.format(“记录%03d”,11));
JSONArray jsonArr=新的JSONArray();
jsonArr.put(记录);
System.out.println(“Initially.+jsonArr.toString());
ApiFuture=writer.append(jsonArr);
AppendRowsResponse response=future.get();
System.out.println(“成功追加记录”。+response.toString());
//////////////////////
/*
while(value2.hasNext()){
//创建一个与表模式兼容的JSON对象。
//JSONObject记录=新的JSONObject();
//record.put(“col1”,String.format(“record%03d”,value.next());
JSONArray jsonArr=value2.next();
//put(value2.next());
//System.out.println(jsonArr.get(0.toString());
System.out.println(“循环内:+jsonArr”);
//杰索纳尔=
//要检测重复记录,请将索引作为记录偏移量传递。
//要禁用重复数据消除,请忽略偏移量或使用WriteStream.Type.DEFAULT。
ApiFuture=writer.append(jsonArr);
AppendRowsResponse response=future.get();
System.out.println(“成功追加记录”。+res
package functions;

import com.google.api.core.ApiFuture;
import com.google.cloud.bigquery.BigQuery;
import com.google.cloud.bigquery.BigQueryOptions;
import com.google.cloud.bigquery.Schema;
import com.google.cloud.bigquery.Table;
import com.google.cloud.bigquery.storage.v1beta2.AppendRowsResponse;
import com.google.cloud.bigquery.storage.v1beta2.BigQueryWriteClient;
import com.google.cloud.bigquery.storage.v1beta2.CreateWriteStreamRequest;
import com.google.cloud.bigquery.storage.v1beta2.JsonStreamWriter;
import com.google.cloud.bigquery.storage.v1beta2.TableName;
import com.google.cloud.bigquery.storage.v1beta2.WriteStream;
import com.google.protobuf.Descriptors.DescriptorValidationException;
import java.io.IOException;
import java.util.concurrent.ExecutionException;
import org.json.JSONArray;
import org.json.JSONObject;

//import org.json.simple.JSONArray;
//import org.json.simple.JSONObject;
//import org.json.simple.parser.JSONParser;

import java.util.*;

public class WriteCommittedStream {
  // TODO(developer): Replace these variables before running the sample.
  public static boolean writeCommittedStreamToBQ(String projectId, String datasetName, String tableName,
      HashSet<JSONArray> streamHashSet) throws DescriptorValidationException, InterruptedException, IOException {

    try (BigQueryWriteClient client = BigQueryWriteClient.create()) {

      Iterator<JSONArray> value = streamHashSet.iterator();
      while (value.hasNext()) {
        // Create a JSON object that is compatible with the table schema.
        // JSONObject record = new JSONObject();

        // record.put("col1", String.format("record %03d", value.next()));
        // JSONArray jsonArr = new JSONArray();
        // value.next().
        // jsonArr.get(value.next());
        System.out.println(value.next().get(0).toString());
      }
      // Array -> {data}
      // Array -> [{data}]
      System.out.println("projectId:" + projectId);
      System.out.println("datasetName:" + datasetName);
      System.out.println("tableName:" + tableName);

      BigQuery bigquery = BigQueryOptions.getDefaultInstance().getService();
      Table table = bigquery.getTable(datasetName, tableName);
      TableName parentTable = TableName.of(projectId, datasetName, tableName);
      Schema schema = table.getDefinition().getSchema();

      System.out.println("Schema:" + schema.toString());
      System.out.println("Table:" + parentTable.toString());

      try (JsonStreamWriter writer = JsonStreamWriter.newBuilder(parentTable.toString(), schema).createDefaultStream()
          .build()) {
       
        // Append 10 JSON objects to the stream.
        Iterator<JSONArray> value2 = streamHashSet.iterator();

        /////////////////////
/*
        JSONArray jsonArr = value2.next();
        System.out.println("Inside the loop:"+jsonArr);
        ApiFuture<AppendRowsResponse> future = writer.append(jsonArr);
        AppendRowsResponse response = future.get();
        System.out.println("Appended records successfully." + response.toString());
*/      
        JSONObject record = new JSONObject();
        record.put("RPdeviceName", String.format("record %03d",0));
        record.put("RPorganisationName", String.format("record %03d",1));
        record.put("RPdate", String.format("record %03d",2));
        record.put("RPtime", String.format("record %03d",3));
        record.put("RPmacid", String.format("record %03d",4));

        record.put("status", String.format("record %03d",5));
        record.put("mac", String.format("record %03d",6));
        record.put("date", String.format("record %03d",7));
        record.put("time", String.format("record %03d",8));
        record.put("count", String.format("record %03d",9));
        record.put("peakadc", String.format("record %03d",10));
        record.put("reset", String.format("record %03d",11));
        

        JSONArray jsonArr = new JSONArray();
        jsonArr.put(record);
        System.out.println("Initially." + jsonArr.toString());

        ApiFuture<AppendRowsResponse> future = writer.append(jsonArr);
        AppendRowsResponse response = future.get();
        System.out.println("Appended records successfully." + response.toString());
        //////////////////////
            /*
        while (value2.hasNext()) {
          // Create a JSON object that is compatible with the table schema.
          // JSONObject record = new JSONObject();

          // record.put("col1", String.format("record %03d", value.next()));
          JSONArray jsonArr = value2.next();
          // jsonArr.put(value2.next());
          // System.out.println(jsonArr.get(0).toString());

          System.out.println("Inside the loop:"+jsonArr);
          // jsonArr =

          // To detect duplicate records, pass the index as the record offset.
          // To disable deduplication, omit the offset or use WriteStream.Type.DEFAULT.
          ApiFuture<AppendRowsResponse> future = writer.append(jsonArr);
          
          AppendRowsResponse response = future.get();
          System.out.println("Appended records successfully." + response.toString());
        }*/
      }

      return true;
    } catch (ExecutionException e) {
      // If the wrapped exception is a StatusRuntimeException, check the state of the
      // operation.
      // If the state is INTERNAL, CANCELLED, or ABORTED, you can retry. For more
      // information, see:
      // https://grpc.github.io/grpc-java/javadoc/io/grpc/StatusRuntimeException.html
      System.out.println("Failed to append records. \n" );
      e.printStackTrace();
      return false;
    }
  }
}