Azure Data Factory 详解

output的dataset会depend on 其时间跨度内的input dataset的ready

只有其时间跨度内 input的dataset的slice都完成了, 它才执行


client.Pipelines.CreateOrUpdate(resourceGroupName, dataFactoryName,
	new PipelineCreateOrUpdateParameters()
	{
	    Pipeline = new Pipeline()
	    {
	        Name = pip.Pipeline.Name,
	        Properties = new PipelineProperties()
	        {
	            //Description = "Description",

	            // Initial value for pipeline's active period. With this, you won't need to set slice status
	            //Start = PipelineActivePeriodStartTime,
	            //End = PipelineActivePeriodEndTime,

	            Activities = new List
  
   ()
	            {
	                new Activity()
	                {
	                    Name = "PatientPayerODSAct",
	                    Inputs = new List
   
    ()
	                    {
	                        new ActivityInput() {
	                            Name = "PatientOnPrem"
	                        },
							new ActivityInput() {
	                            Name = "PatientPayerOnPrem"
	                        }
	                    },
	                    Outputs = new List
    
     ()
	                    {
	                        new ActivityOutput()
	                        {
	                            Name = "PatientPayerODS"
	                        }
	                    },
	                    TypeProperties = new CopyActivity()
	                    {
	                        Source = s,
	                        Sink = new SqlSink()
	                        {
	                            WriteBatchSize = 0,
	                            WriteBatchTimeout = TimeSpan.FromMinutes(0)
	                        }
	                    }
	                }
	            }
	        }
	    }
	}
);
    
   
  


posted @ 2017-09-11 21:56  爱知菜  阅读(22)  评论(0编辑  收藏  举报