C# 如何将.net控制台应用程序客户端连接到azure数据工厂
我已创建了Azure data factory管道,如下所示:C# 如何将.net控制台应用程序客户端连接到azure数据工厂,c#,sql,.net,postgresql,azure,C#,Sql,.net,Postgresql,Azure,我已创建了Azure data factory管道,如下所示: Azure SQL Database --> CSV Blob storage --> Azure Hyperscale Citus. 但是这个管道是在azure datafactory UI上创建的 现在我想通过一个控制台应用程序创建这个管道 1。)但我无法确定如何验证客户端并连接到数据工厂以执行管道。 2.)这是转换sql DB-->hyperscale的最佳方法吗?如果您想使用C#console应用程序创建Azu
Azure SQL Database --> CSV Blob storage --> Azure Hyperscale Citus.
但是这个管道是在azure datafactory UI上创建的
现在我想通过一个控制台应用程序创建这个管道
1。)但我无法确定如何验证客户端并连接到数据工厂以执行管道。
2.)这是转换sql DB-->hyperscale的最佳方法吗?如果您想使用C#console应用程序创建Azure数据工厂管道,我们可以使用package
Microsoft.Azure.Management.DataFactory
。有关更多详细信息,请参阅
例如(我创建一个管道,将blob从一个容器复制到另一个容器)
参与者
角色分配给sp如果这对您有帮助,您可以吗?它可以帮助更多的人。
z login
az account set --subscription "<your subscription id>"
# the sp will have Azure Contributor role
az ad sp create-for-rbac -n "readMetric"
Install-Package Microsoft.Azure.Management.DataFactory
Install-Package Microsoft.Azure.Management.ResourceManager -IncludePrerelease
Install-Package Microsoft.IdentityModel.Clients.ActiveDirectory
string tenantID = "<your tenant ID>";
string applicationId = "<your application ID>";
string authenticationKey = "<your authentication key for the application>";
string subscriptionId = "<your subscription ID where the data factory resides>";
string resourceGroup = "<your resource group where the data factory resides>";
string region = "<the location of your resource group>";
string dataFactoryName =
"<specify the name of data factory ";
var context = new AuthenticationContext("https://login.windows.net/" + tenantID);
ClientCredential cc = new ClientCredential(applicationId, authenticationKey);
AuthenticationResult result = context.AcquireTokenAsync(
"https://management.azure.com/", cc).Result;
ServiceClientCredentials cred = new TokenCredentials(result.AccessToken);
var client = new DataFactoryManagementClient(cred) {
SubscriptionId = subscriptionId };
string storageAccount = "<your storage account name to copy data>";
string storageKey = "<your storage account key>";
string storageLinkedServiceName = "AzureStorageLinkedService";
LinkedServiceResource storageLinkedService = new LinkedServiceResource(
new AzureStorageLinkedService
{
ConnectionString = new SecureString(
"DefaultEndpointsProtocol=https;AccountName=" + storageAccount +
";AccountKey=" + storageKey)
}
);
await client.LinkedServices.CreateOrUpdateAsync(
resourceGroup, dataFactoryName, storageLinkedServiceName, storageLinkedService);
string blobDatasetName = "BlobDataset";
DatasetResource blobDataset = new DatasetResource(
new AzureBlobDataset
{
LinkedServiceName = new LinkedServiceReference
{
ReferenceName = storageLinkedServiceName
},
FolderPath = new Expression { Value = "@{dataset().path}" },
Parameters = new Dictionary<string, ParameterSpecification>
{
{ "path", new ParameterSpecification { Type = ParameterType.String } }
}
}
);
await client.Datasets.CreateOrUpdateAsync(
resourceGroup, dataFactoryName, blobDatasetName, blobDataset);
string pipelineName = "mypipeline";
PipelineResource pipeline = new PipelineResource
{
Parameters = new Dictionary<string, ParameterSpecification>
{
{ "inputPath", new ParameterSpecification { Type = ParameterType.String } },
{ "outputPath", new ParameterSpecification { Type = ParameterType.String } }
},
Activities = new List<Activity>
{
new CopyActivity
{
Name = "CopyFromBlobToBlob",
Inputs = new List<DatasetReference>
{
new DatasetReference()
{
ReferenceName = blobDatasetName,
Parameters = new Dictionary<string, object>
{
{ "path", "@pipeline().parameters.inputPath" }
}
}
},
Outputs = new List<DatasetReference>
{
new DatasetReference
{
ReferenceName = blobDatasetName,
Parameters = new Dictionary<string, object>
{
{ "path", "@pipeline().parameters.outputPath" }
}
}
},
Source = new BlobSource { },
Sink = new BlobSink { }
}
}
};
await client.Pipelines.CreateOrUpdateAsync(resourceGroup, dataFactoryName, pipelineName, pipeline);
// specify the container and input folder from which all files
string inputBlobPath =
"<path to existing blob(s) to copy data from, e.g. containername/inputdir>";
//specify the contains and output folder where the files are copied
string outputBlobPath =
"<the blob path to copy data to, e.g. containername/outputdir>";
Dictionary<string, object> parameters = new Dictionary<string, object>
{
{ "inputPath", inputBlobPath },
{ "outputPath", outputBlobPath }
};
var runResponse = await client.Pipelines.CreateRunWithHttpMessagesAsync(
resourceGroup, dataFactoryName, pipelineName, parameters: parameters
);
//Checking pipeline run status...
Console.WriteLine("Checking pipeline run status...");
PipelineRun pipelineRun;
while (true)
{
pipelineRun = client.PipelineRuns.Get(
resourceGroup, dataFactoryName, runResponse.Body.RunId);
Console.WriteLine("Status: " + pipelineRun.Status);
if (pipelineRun.Status == "InProgress" || pipelineRun.Status == "Queued")
System.Threading.Thread.Sleep(15000);
else
break;
}
// Check the copy activity run details
Console.WriteLine("Checking copy activity run details...");
RunFilterParameters filterParams = new RunFilterParameters(
DateTime.UtcNow.AddMinutes(-10), DateTime.UtcNow.AddMinutes(10));
ActivityRunsQueryResponse queryResponse = await client.ActivityRuns.QueryByPipelineRunAsync(
resourceGroup, dataFactoryName, runResponse.Body.RunId, filterParams);
if (pipelineRun.Status == "Succeeded")
Console.WriteLine(queryResponse.Value.First().Output);
else
Console.WriteLine(queryResponse.Value.First().Error);
Console.WriteLine("\nPress any key to exit...");
Console.ReadKey();