@@ -270,6 +270,9 @@ class DataPipeline:
270270 enabled : bool
271271 """Whether the data pipeline is enabled"""
272272
273+ data_source_type : TabularDataSourceType .ValueType
274+ """The type of data source for the data pipeline"""
275+
273276 @classmethod
274277 def from_proto (cls , data_pipeline : ProtoDataPipeline ) -> Self :
275278 return cls (
@@ -281,6 +284,7 @@ def from_proto(cls, data_pipeline: ProtoDataPipeline) -> Self:
281284 created_on = data_pipeline .created_on .ToDatetime (),
282285 updated_at = data_pipeline .updated_at .ToDatetime (),
283286 enabled = data_pipeline .enabled ,
287+ data_source_type = data_pipeline .data_source_type ,
284288 )
285289
286290 @dataclass
@@ -1883,7 +1887,14 @@ async def list_data_pipelines(self, organization_id: str) -> List[DataPipeline]:
18831887 response : ListDataPipelinesResponse = await self ._data_pipelines_client .ListDataPipelines (request , metadata = self ._metadata )
18841888 return [DataClient .DataPipeline .from_proto (pipeline ) for pipeline in response .data_pipelines ]
18851889
1886- async def create_data_pipeline (self , organization_id : str , name : str , mql_binary : List [Dict [str , Any ]], schedule : str ) -> str :
1890+ async def create_data_pipeline (
1891+ self ,
1892+ organization_id : str ,
1893+ name : str ,
1894+ mql_binary : List [Dict [str , Any ]],
1895+ schedule : str ,
1896+ data_source_type : TabularDataSourceType .ValueType = TabularDataSourceType .TABULAR_DATA_SOURCE_TYPE_STANDARD ,
1897+ ) -> str :
18871898 """Create a new data pipeline.
18881899
18891900 ::
@@ -1892,7 +1903,8 @@ async def create_data_pipeline(self, organization_id: str, name: str, mql_binary
18921903 organization_id="<YOUR-ORGANIZATION-ID>",
18931904 name="<YOUR-PIPELINE-NAME>",
18941905 mql_binary=[<YOUR-MQL-PIPELINE-AGGREGATION>],
1895- schedule="<YOUR-SCHEDULE>"
1906+ schedule="<YOUR-SCHEDULE>",
1907+ data_source_type=TabularDataSourceType.TABULAR_DATA_SOURCE_TYPE_STANDARD,
18961908 )
18971909
18981910 Args:
@@ -1902,12 +1914,14 @@ async def create_data_pipeline(self, organization_id: str, name: str, mql_binary
19021914 mql_binary (List[Dict[str, Any]]):The MQL pipeline to run, as a list of MongoDB aggregation pipeline stages.
19031915 schedule (str): A cron expression representing the expected execution schedule in UTC (note this also
19041916 defines the input time window; an hourly schedule would process 1 hour of data at a time).
1917+ data_source_type (TabularDataSourceType): The type of data source to use for the pipeline.
1918+ Defaults to TabularDataSourceType.TABULAR_DATA_SOURCE_TYPE_STANDARD.
19051919
19061920 Returns:
19071921 str: The ID of the newly created pipeline.
19081922 """
19091923 binary : List [bytes ] = [bson .encode (query ) for query in mql_binary ]
1910- request = CreateDataPipelineRequest (organization_id = organization_id , name = name , mql_binary = binary , schedule = schedule )
1924+ request = CreateDataPipelineRequest (organization_id = organization_id , name = name , mql_binary = binary , schedule = schedule , data_source_type = data_source_type )
19111925 response : CreateDataPipelineResponse = await self ._data_pipelines_client .CreateDataPipeline (request , metadata = self ._metadata )
19121926 return response .id
19131927
0 commit comments