Skip to content

Commit d3e18e2

Browse files
AWS SDK for RubyNobody
authored andcommitted
Updated service API models for release.
1 parent 14d19c7 commit d3e18e2

File tree

4 files changed

+68
-28
lines changed

4 files changed

+68
-28
lines changed

CHANGELOG.md

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,8 @@
11
Unreleased Changes
22
------------------
33

4+
* Feature - Aws::IoTAnalytics - Updated the API, and documentation for AWS IoT Analytics.
5+
46
2.11.198 (2019-01-03)
57
------------------
68

aws-sdk-core/apis/iotanalytics/2017-11-27/api-2.json

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1404,6 +1404,16 @@
14041404
"shape":"MaxResults",
14051405
"location":"querystring",
14061406
"locationName":"maxResults"
1407+
},
1408+
"scheduledOnOrAfter":{
1409+
"shape":"Timestamp",
1410+
"location":"querystring",
1411+
"locationName":"scheduledOnOrAfter"
1412+
},
1413+
"scheduledBefore":{
1414+
"shape":"Timestamp",
1415+
"location":"querystring",
1416+
"locationName":"scheduledBefore"
14071417
}
14081418
}
14091419
},

aws-sdk-core/apis/iotanalytics/2017-11-27/docs-2.json

Lines changed: 30 additions & 28 deletions
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@
66
"CancelPipelineReprocessing": "<p>Cancels the reprocessing of data through the pipeline.</p>",
77
"CreateChannel": "<p>Creates a channel. A channel collects data from an MQTT topic and archives the raw, unprocessed messages before publishing the data to a pipeline.</p>",
88
"CreateDataset": "<p>Creates a data set. A data set stores data retrieved from a data store by applying a \"queryAction\" (a SQL query) or a \"containerAction\" (executing a containerized application). This operation creates the skeleton of a data set. The data set can be populated manually by calling \"CreateDatasetContent\" or automatically according to a \"trigger\" you specify.</p>",
9-
"CreateDatasetContent": "<p>Creates the content of a data set by applying a SQL action.</p>",
9+
"CreateDatasetContent": "<p>Creates the content of a data set by applying a \"queryAction\" (a SQL query) or a \"containerAction\" (executing a containerized application).</p>",
1010
"CreateDatastore": "<p>Creates a data store, which is a repository for messages.</p>",
1111
"CreatePipeline": "<p>Creates a pipeline. A pipeline consumes messages from one or more channels and allows you to process the messages before storing them in a data store.</p>",
1212
"DeleteChannel": "<p>Deletes the specified channel.</p>",
@@ -84,7 +84,7 @@
8484
"DeviceRegistryEnrichActivity$thingName": "<p>The name of the IoT device whose registry information is added to the message.</p>",
8585
"DeviceShadowEnrichActivity$attribute": "<p>The name of the attribute that is added to the message.</p>",
8686
"DeviceShadowEnrichActivity$thingName": "<p>The name of the IoT device whose shadow information is added to the message.</p>",
87-
"MathActivity$attribute": "<p>The name of the attribute that will contain the result of the math operation.</p>"
87+
"MathActivity$attribute": "<p>The name of the attribute that contains the result of the math operation.</p>"
8888
}
8989
},
9090
"AttributeNameMapping": {
@@ -260,7 +260,7 @@
260260
}
261261
},
262262
"DatasetAction": {
263-
"base": "<p>A \"DatasetAction\" object specifying the query that creates the data set content.</p>",
263+
"base": "<p>A \"DatasetAction\" object that specifies how data set contents are automatically created.</p>",
264264
"refs": {
265265
"DatasetActions$member": null
266266
}
@@ -306,23 +306,23 @@
306306
}
307307
},
308308
"DatasetContentDeliveryDestination": {
309-
"base": null,
309+
"base": "<p>The destination to which data set contents are delivered.</p>",
310310
"refs": {
311-
"DatasetContentDeliveryRule$destination": null
311+
"DatasetContentDeliveryRule$destination": "<p>The destination to which data set contents are delivered.</p>"
312312
}
313313
},
314314
"DatasetContentDeliveryRule": {
315-
"base": null,
315+
"base": "<p>When data set contents are created they are delivered to destination specified here.</p>",
316316
"refs": {
317317
"DatasetContentDeliveryRules$member": null
318318
}
319319
},
320320
"DatasetContentDeliveryRules": {
321321
"base": null,
322322
"refs": {
323-
"CreateDatasetRequest$contentDeliveryRules": null,
324-
"Dataset$contentDeliveryRules": null,
325-
"UpdateDatasetRequest$contentDeliveryRules": null
323+
"CreateDatasetRequest$contentDeliveryRules": "<p>When data set contents are created they are delivered to destinations specified here.</p>",
324+
"Dataset$contentDeliveryRules": "<p>When data set contents are created they are delivered to destinations specified here.</p>",
325+
"UpdateDatasetRequest$contentDeliveryRules": "<p>When data set contents are created they are delivered to destinations specified here.</p>"
326326
}
327327
},
328328
"DatasetContentState": {
@@ -360,7 +360,7 @@
360360
}
361361
},
362362
"DatasetContentVersionValue": {
363-
"base": "<p>The data set whose latest contents will be used as input to the notebook or application.</p>",
363+
"base": "<p>The data set whose latest contents are used as input to the notebook or application.</p>",
364364
"refs": {
365365
"Variable$datasetContentVersionValue": "<p>The value of the variable as a structure that specifies a data set content version.</p>"
366366
}
@@ -384,14 +384,14 @@
384384
"CreateDatasetRequest$datasetName": "<p>The name of the data set.</p>",
385385
"CreateDatasetResponse$datasetName": "<p>The name of the data set.</p>",
386386
"Dataset$name": "<p>The name of the data set.</p>",
387-
"DatasetContentVersionValue$datasetName": "<p>The name of the data set whose latest contents will be used as input to the notebook or application.</p>",
387+
"DatasetContentVersionValue$datasetName": "<p>The name of the data set whose latest contents are used as input to the notebook or application.</p>",
388388
"DatasetSummary$datasetName": "<p>The name of the data set.</p>",
389389
"DeleteDatasetContentRequest$datasetName": "<p>The name of the data set whose content is deleted.</p>",
390390
"DeleteDatasetRequest$datasetName": "<p>The name of the data set to delete.</p>",
391391
"DescribeDatasetRequest$datasetName": "<p>The name of the data set whose information is retrieved.</p>",
392392
"GetDatasetContentRequest$datasetName": "<p>The name of the data set whose contents are retrieved.</p>",
393393
"ListDatasetContentsRequest$datasetName": "<p>The name of the data set whose contents information you want to list.</p>",
394-
"TriggeringDataset$name": "<p>The name of the data set whose content generation will trigger the new data set content generation.</p>",
394+
"TriggeringDataset$name": "<p>The name of the data set whose content generation triggers the new data set content generation.</p>",
395395
"UpdateDatasetRequest$datasetName": "<p>The name of the data set to update.</p>"
396396
}
397397
},
@@ -512,9 +512,9 @@
512512
}
513513
},
514514
"DeltaTime": {
515-
"base": "<p>When you create data set contents using message data from a specified time frame, some message data may still be \"in flight\" when processing begins, and so will not arrive in time to be processed. Use this field to make allowances for the \"in flight\" time of your message data, so that data not processed from the previous time frame will be included with the next time frame. Without this, missed message data would be excluded from processing during the next time frame as well, because its timestamp places it within the previous time frame.</p>",
515+
"base": "<p>Used to limit data to that which has arrived since the last execution of the action.</p>",
516516
"refs": {
517-
"QueryFilter$deltaTime": "<p>Used to limit data to that which has arrived since the last execution of the action. When you create data set contents using message data from a specified time frame, some message data may still be \"in flight\" when processing begins, and so will not arrive in time to be processed. Use this field to make allowances for the \"in flight\" time of you message data, so that data not processed from a previous time frame will be included with the next time frame. Without this, missed message data would be excluded from processing during the next time frame as well, because its timestamp places it within the previous time frame.</p>"
517+
"QueryFilter$deltaTime": "<p>Used to limit data to that which has arrived since the last execution of the action.</p>"
518518
}
519519
},
520520
"DescribeChannelRequest": {
@@ -595,7 +595,7 @@
595595
"EntryName": {
596596
"base": null,
597597
"refs": {
598-
"DatasetContentDeliveryRule$entryName": null,
598+
"DatasetContentDeliveryRule$entryName": "<p>The name of the data set content delivery rules entry.</p>",
599599
"DatasetEntry$entryName": "<p>The name of the data set item.</p>"
600600
}
601601
},
@@ -664,15 +664,15 @@
664664
}
665665
},
666666
"IotEventsDestinationConfiguration": {
667-
"base": null,
667+
"base": "<p>Configuration information for delivery of data set contents to AWS IoT Events.</p>",
668668
"refs": {
669-
"DatasetContentDeliveryDestination$iotEventsDestinationConfiguration": null
669+
"DatasetContentDeliveryDestination$iotEventsDestinationConfiguration": "<p>Configuration information for delivery of data set contents to AWS IoT Events.</p>"
670670
}
671671
},
672672
"IotEventsInputName": {
673673
"base": null,
674674
"refs": {
675-
"IotEventsDestinationConfiguration$inputName": null
675+
"IotEventsDestinationConfiguration$inputName": "<p>The name of the AWS IoT Events input to which data set contents are delivered.</p>"
676676
}
677677
},
678678
"LambdaActivity": {
@@ -836,7 +836,7 @@
836836
"Messages": {
837837
"base": null,
838838
"refs": {
839-
"BatchPutMessageRequest$messages": "<p>The list of messages to be sent. Each message has format: '{ \"messageId\": \"string\", \"payload\": \"string\"}'.</p>"
839+
"BatchPutMessageRequest$messages": "<p>The list of messages to be sent. Each message has format: '{ \"messageId\": \"string\", \"payload\": \"string\"}'.</p> <p>Note that the field names of message payloads (data) that you send to AWS IoT Analytics:</p> <ul> <li> <p>Must contain only alphanumeric characters and undescores (_); no other special characters are allowed.</p> </li> <li> <p>Must begin with an alphabetic character or single underscore (_).</p> </li> <li> <p>Cannot contain hyphens (-).</p> </li> <li> <p>In regular expression terms: \"^[A-Za-z_]([A-Za-z0-9]*|[A-Za-z0-9][A-Za-z0-9_]*)$\". </p> </li> <li> <p>Cannot be greater than 255 characters.</p> </li> <li> <p>Are case-insensitive. (Fields named \"foo\" and \"FOO\" in the same payload are considered duplicates.)</p> </li> </ul> <p>For example, {\"temp_01\": 29} or {\"_temp_01\": 29} are valid, but {\"temp-01\": 29}, {\"01_temp\": 29} or {\"__temp_01\": 29} are invalid in message payloads. </p>"
840840
}
841841
},
842842
"NextToken": {
@@ -857,7 +857,7 @@
857857
"OffsetSeconds": {
858858
"base": null,
859859
"refs": {
860-
"DeltaTime$offsetSeconds": "<p>The number of seconds of estimated \"in flight\" lag time of message data.</p>"
860+
"DeltaTime$offsetSeconds": "<p>The number of seconds of estimated \"in flight\" lag time of message data. When you create data set contents using message data from a specified time frame, some message data may still be \"in flight\" when processing begins, and so will not arrive in time to be processed. Use this field to make allowances for the \"in flight\" time of your message data, so that data not processed from a previous time frame will be included with the next time frame. Without this, missed message data would be excluded from processing during the next time frame as well, because its timestamp places it within the previous time frame.</p>"
861861
}
862862
},
863863
"OutputFileName": {
@@ -867,7 +867,7 @@
867867
}
868868
},
869869
"OutputFileUriValue": {
870-
"base": "<p>The URI of the location where data set contents are stored, usually the URI of a file in an S3 bucket.</p>",
870+
"base": "<p>The value of the variable as a structure that specifies an output file URI.</p>",
871871
"refs": {
872872
"Variable$outputFileUriValue": "<p>The value of the variable as a structure that specifies an output file URI.</p>"
873873
}
@@ -997,8 +997,8 @@
997997
"base": null,
998998
"refs": {
999999
"ListTagsForResourceRequest$resourceArn": "<p>The ARN of the resource whose tags you want to list.</p>",
1000-
"TagResourceRequest$resourceArn": "<p>The ARN of the resource whose tags will be modified.</p>",
1001-
"UntagResourceRequest$resourceArn": "<p>The ARN of the resource whose tags will be removed.</p>"
1000+
"TagResourceRequest$resourceArn": "<p>The ARN of the resource whose tags you want to modify.</p>",
1001+
"UntagResourceRequest$resourceArn": "<p>The ARN of the resource whose tags you want to remove.</p>"
10021002
}
10031003
},
10041004
"ResourceConfiguration": {
@@ -1041,7 +1041,7 @@
10411041
"ContainerDatasetAction$executionRoleArn": "<p>The ARN of the role which gives permission to the system to access needed resources in order to run the \"containerAction\". This includes, at minimum, permission to retrieve the data set contents which are the input to the containerized application.</p>",
10421042
"DeviceRegistryEnrichActivity$roleArn": "<p>The ARN of the role that allows access to the device's registry information.</p>",
10431043
"DeviceShadowEnrichActivity$roleArn": "<p>The ARN of the role that allows access to the device's shadow.</p>",
1044-
"IotEventsDestinationConfiguration$roleArn": null,
1044+
"IotEventsDestinationConfiguration$roleArn": "<p>The ARN of the role which grants AWS IoT Analytics permission to deliver data set contents to an AWS IoT Events input.</p>",
10451045
"LoggingOptions$roleArn": "<p>The ARN of the role that grants permission to AWS IoT Analytics to perform logging.</p>"
10461046
}
10471047
},
@@ -1103,7 +1103,7 @@
11031103
"SqlQueryDatasetAction": {
11041104
"base": "<p>The SQL query to modify the message.</p>",
11051105
"refs": {
1106-
"DatasetAction$queryAction": "<p>An \"SqlQueryDatasetAction\" object that contains the SQL query to modify the message.</p>"
1106+
"DatasetAction$queryAction": "<p>An \"SqlQueryDatasetAction\" object that uses an SQL query to automatically create data set contents.</p>"
11071107
}
11081108
},
11091109
"StartPipelineReprocessingRequest": {
@@ -1145,7 +1145,7 @@
11451145
"TagKeyList": {
11461146
"base": null,
11471147
"refs": {
1148-
"UntagResourceRequest$tagKeys": "<p>The keys of those tags which will be removed.</p>"
1148+
"UntagResourceRequest$tagKeys": "<p>The keys of those tags which you want to remove.</p>"
11491149
}
11501150
},
11511151
"TagList": {
@@ -1205,6 +1205,8 @@
12051205
"DatastoreSummary$lastUpdateTime": "<p>The last time the data store was updated.</p>",
12061206
"EstimatedResourceSize$estimatedOn": "<p>The time when the estimate of the size of the resource was made.</p>",
12071207
"GetDatasetContentResponse$timestamp": "<p>The time when the request was made.</p>",
1208+
"ListDatasetContentsRequest$scheduledOnOrAfter": "<p>A filter to limit results to those data set contents whose creation is scheduled on or after the given time. See the field <code>triggers.schedule</code> in the CreateDataset request. (timestamp)</p>",
1209+
"ListDatasetContentsRequest$scheduledBefore": "<p>A filter to limit results to those data set contents whose creation is scheduled before the given time. See the field <code>triggers.schedule</code> in the CreateDataset request. (timestamp)</p>",
12081210
"Pipeline$creationTime": "<p>When the pipeline was created.</p>",
12091211
"Pipeline$lastUpdateTime": "<p>The last time the pipeline was updated.</p>",
12101212
"PipelineSummary$creationTime": "<p>When the pipeline was created.</p>",
@@ -1213,9 +1215,9 @@
12131215
}
12141216
},
12151217
"TriggeringDataset": {
1216-
"base": "<p>Information about the data set whose content generation will trigger the new data set content generation.</p>",
1218+
"base": "<p>Information about the data set whose content generation triggers the new data set content generation.</p>",
12171219
"refs": {
1218-
"DatasetTrigger$dataset": "<p>The data set whose content creation will trigger the creation of this data set's contents.</p>"
1220+
"DatasetTrigger$dataset": "<p>The data set whose content creation triggers the creation of this data set's contents.</p>"
12191221
}
12201222
},
12211223
"UnlimitedRetentionPeriod": {

aws-sdk-core/endpoints.json

Lines changed: 26 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1111,6 +1111,18 @@
11111111
"us-west-2" : { }
11121112
}
11131113
},
1114+
"fsx" : {
1115+
"defaults" : {
1116+
"protocols" : [ "https" ],
1117+
"sslCommonName" : "fsx.us-west-2.amazonaws.com"
1118+
},
1119+
"endpoints" : {
1120+
"eu-west-1" : { },
1121+
"us-east-1" : { },
1122+
"us-east-2" : { },
1123+
"us-west-2" : { }
1124+
}
1125+
},
11141126
"gamelift" : {
11151127
"endpoints" : {
11161128
"ap-northeast-1" : { },
@@ -1762,6 +1774,20 @@
17621774
"us-east-1" : { }
17631775
}
17641776
},
1777+
"route53resolver" : {
1778+
"defaults" : {
1779+
"protocols" : [ "http", "https" ]
1780+
},
1781+
"endpoints" : {
1782+
"ap-northeast-1" : { },
1783+
"ap-southeast-1" : { },
1784+
"ap-southeast-2" : { },
1785+
"eu-west-1" : { },
1786+
"us-east-1" : { },
1787+
"us-east-2" : { },
1788+
"us-west-2" : { }
1789+
}
1790+
},
17651791
"runtime.lex" : {
17661792
"defaults" : {
17671793
"credentialScope" : {

0 commit comments

Comments
 (0)