@@ -164,6 +164,14 @@ func getMonitorBaseSchema() map[string]*schema.Schema {
164164 Schema : sloBurnRateTriggerConditionSchema ,
165165 },
166166 },
167+ logsAnomalyConditionFieldName : {
168+ Type : schema .TypeList ,
169+ MaxItems : 1 ,
170+ Optional : true ,
171+ Elem : & schema.Resource {
172+ Schema : logsAnomalyTriggerConditionSchema ,
173+ },
174+ },
167175 },
168176 },
169177 },
@@ -353,7 +361,6 @@ func getMonitorSchema() map[string]*schema.Schema {
353361
354362 "status" : {
355363 Type : schema .TypeList ,
356- Optional : true ,
357364 Computed : true ,
358365 Elem : & schema.Schema {
359366 Type : schema .TypeString ,
@@ -438,6 +445,7 @@ var (
438445 "trigger_conditions.0.metrics_missing_data_condition" ,
439446 "trigger_conditions.0.slo_sli_condition" ,
440447 "trigger_conditions.0.slo_burn_rate_condition" ,
448+ fmt .Sprintf ("trigger_conditions.0.%s" , logsAnomalyConditionFieldName ),
441449 }
442450 logStaticConditionCriticalOrWarningAtleastOneKeys = []string {
443451 "trigger_conditions.0.logs_static_condition.0.warning" ,
@@ -611,6 +619,38 @@ var sloBurnRateTriggerConditionSchema = map[string]*schema.Schema{
611619 }, sloBurnRateConditionCriticalOrWarningAtleastOneKeys ),
612620}
613621
622+ var logsAnomalyTriggerConditionSchema = map [string ]* schema.Schema {
623+ "field" : {
624+ Type : schema .TypeString ,
625+ Required : true ,
626+ },
627+ "direction" : {
628+ Type : schema .TypeString ,
629+ Optional : true ,
630+ Default : "Both" ,
631+ ValidateFunc : validation .StringInSlice ([]string {"Both" , "Up" , "Down" }, false ),
632+ },
633+ "anomaly_detector_type" : {
634+ Type : schema .TypeString ,
635+ Required : true ,
636+ ValidateFunc : validation .StringInSlice ([]string {"Cluster" }, false ),
637+ },
638+ "critical" : nested (false , schemaMap {
639+ "sensitivity" : {
640+ Type : schema .TypeFloat ,
641+ Optional : true ,
642+ Default : 0.5 ,
643+ ValidateFunc : validation .FloatBetween (0.1 , 1.0 ),
644+ },
645+ "min_anomaly_count" : {
646+ Type : schema .TypeInt ,
647+ Optional : true ,
648+ Default : 1 ,
649+ },
650+ "time_range" : & timeRangeWithAllowedValuesSchema ,
651+ }),
652+ }
653+
614654func getBurnRateSchema (triggerType string ) * schema.Schema {
615655 burnRateThresholdConflict := fmt .Sprintf ("trigger_conditions.0.slo_burn_rate_condition.0.%s.0.burn_rate_threshold" , triggerType )
616656 timeRangeConflict := fmt .Sprintf ("trigger_conditions.0.slo_burn_rate_condition.0.%s.0.time_range" , triggerType )
@@ -714,12 +754,14 @@ var allowedTimeRanges = []string{
714754 "-1d" , "1d" ,
715755}
716756var timeRangeAllowedValuesValidation = validation .StringInSlice (allowedTimeRanges , false )
717- var timeRangeFormatValidation = validation .StringMatch (regexp .MustCompile (`^-?(\d)+[smhd]$` ), "Time range must be in the format '-?\\ d+[smhd]'. Examples: -15m, 1d, etc." )
757+ var timeRangeFormatValidation = validation .StringMatch (regexp .MustCompile (`^-?(\d)+[smhd]$` ),
758+ "Time range must be in the format '-?\\ d+[smhd]'. Examples: -15m, 1d, etc." )
718759
719760var resolutionWindowSchema = schema.Schema {
720- Type : schema .TypeString ,
721- Optional : true ,
722- ValidateFunc : validation .StringMatch (regexp .MustCompile (`^(\d)+[smhd]` ), "Resolution window must be in the format '\\ d+[smhd]'. Examples: 0m, 15m, 1d, etc." ),
761+ Type : schema .TypeString ,
762+ Optional : true ,
763+ ValidateFunc : validation .StringMatch (regexp .MustCompile (`^(\d)+[smhd]` ),
764+ "Resolution window must be in the format '\\ d+[smhd]'. Examples: 0m, 15m, 1d, etc." ),
723765 DiffSuppressFunc : SuppressEquivalentTimeDiff (false ),
724766}
725767
@@ -739,6 +781,7 @@ func resourceSumologicMonitorsLibraryMonitorCreate(d *schema.ResourceData, meta
739781
740782 if d .Id () == "" {
741783 monitor := resourceToMonitorsLibraryMonitor (d )
784+ log .Printf ("creating monitor: %+v\n " , monitor )
742785 if monitor .ParentID == "" {
743786 rootFolder , err := c .GetMonitorsLibraryFolder ("root" )
744787 if err != nil {
@@ -774,6 +817,7 @@ func resourceSumologicMonitorsLibraryMonitorRead(d *schema.ResourceData, meta in
774817 c := meta .(* Client )
775818
776819 monitor , err := c .MonitorsRead (d .Id ())
820+ log .Printf ("read monitor: %+v\n " , monitor )
777821 if err != nil {
778822 return err
779823 }
@@ -796,6 +840,10 @@ func resourceSumologicMonitorsLibraryMonitorRead(d *schema.ResourceData, meta in
796840 CmfFgpPermStmtsSetToResource (d , fgpResponse .PermissionStatements )
797841 }
798842
843+ // Always use "Normal" as status; otherwise it can cause state to drift from backend.
844+ // For e.g., with anomaly monitor status is initially "GeneratingModel" and then switches to
845+ // "Normal" after training is complete. This will cause backend to drift from tf state.
846+ monitor .Status = []string {"Normal" }
799847 d .Set ("created_by" , monitor .CreatedBy )
800848 d .Set ("created_at" , monitor .CreatedAt )
801849 d .Set ("monitor_type" , monitor .MonitorType )
@@ -931,6 +979,7 @@ func resourceSumologicMonitorsLibraryMonitorUpdate(d *schema.ResourceData, meta
931979 monitor = * updatedMonitor
932980 }
933981 monitor .Type = "MonitorsLibraryMonitorUpdate"
982+ log .Printf ("updating monitor: %+v\n " , monitor )
934983 err := c .UpdateMonitorsLibraryMonitor (monitor )
935984 if err != nil {
936985 return err
@@ -1090,6 +1139,9 @@ func triggerConditionsBlockToJson(block map[string]interface{}) []TriggerConditi
10901139 if sc , ok := fromSingletonArray (block , sloBurnRateConditionFieldName ); ok {
10911140 conditions = append (conditions , sloBurnConditionBlockToJson (sc )... )
10921141 }
1142+ if sc , ok := fromSingletonArray (block , logsAnomalyConditionFieldName ); ok {
1143+ conditions = append (conditions , logsAnomalyConditionBlockToJson (sc )... )
1144+ }
10931145
10941146 return conditions
10951147}
@@ -1107,7 +1159,7 @@ func metricsStaticConditionBlockToJson(block map[string]interface{}) []TriggerCo
11071159 DetectionMethod : metricsStaticConditionDetectionMethod ,
11081160 }
11091161 triggerConditions := base .cloneReadingFromNestedBlocks (block )
1110- for i , _ := range triggerConditions {
1162+ for i := range triggerConditions {
11111163 if (triggerConditions [i ].TriggerType == "ResolvedCritical" && triggerConditions [i ].OccurrenceType == "" ) ||
11121164 (triggerConditions [i ].TriggerType == "ResolvedWarning" && triggerConditions [i ].OccurrenceType == "" ) {
11131165 triggerConditions [i ].OccurrenceType = "Always"
@@ -1198,6 +1250,22 @@ func sloBurnConditionBlockToJson(block map[string]interface{}) []TriggerConditio
11981250 return base .sloCloneReadingFromNestedBlocks (block )
11991251}
12001252
1253+ func logsAnomalyConditionBlockToJson (block map [string ]interface {}) []TriggerCondition {
1254+ base := TriggerCondition {
1255+ Field : block ["field" ].(string ),
1256+ Direction : block ["direction" ].(string ),
1257+ AnomalyDetectorType : block ["anomaly_detector_type" ].(string ),
1258+ DetectionMethod : logsAnomalyConditionDetectionMethod ,
1259+ }
1260+ // log anomaly condition does not have 'alert' and 'resolution' objects. Here we generate empty blocks
1261+ // for reading to work
1262+ if subBlock , ok := fromSingletonArray (block , "critical" ); ok {
1263+ subBlock ["alert" ] = toSingletonArray (map [string ]interface {}{})
1264+ subBlock ["resolution" ] = toSingletonArray (map [string ]interface {}{})
1265+ }
1266+ return base .cloneReadingFromNestedBlocks (block )
1267+ }
1268+
12011269// TriggerCondition JSON model to 'trigger_conditions' block
12021270func jsonToTriggerConditionsBlock (conditions []TriggerCondition ) map [string ]interface {} {
12031271 missingDataConditions := make ([]TriggerCondition , 0 )
@@ -1224,6 +1292,8 @@ func jsonToTriggerConditionsBlock(conditions []TriggerCondition) map[string]inte
12241292 triggerConditionsBlock [sloSLIConditionFieldName ] = toSingletonArray (jsonToSloSliConditionBlock (dataConditions ))
12251293 case sloBurnRateConditionDetectionMethod :
12261294 triggerConditionsBlock [sloBurnRateConditionFieldName ] = toSingletonArray (jsonToSloBurnRateConditionBlock (dataConditions ))
1295+ case logsAnomalyConditionDetectionMethod :
1296+ triggerConditionsBlock [logsAnomalyConditionFieldName ] = toSingletonArray (jsonToLogsAnomalyConditionBlock (dataConditions ))
12271297 }
12281298 }
12291299 if len (missingDataConditions ) > 0 {
@@ -1312,8 +1382,7 @@ func jsonToMetricsStaticConditionBlock(conditions []TriggerCondition) map[string
13121382 criticalRslv ["threshold_type" ] = condition .ThresholdType
13131383 if condition .OccurrenceType == "AtLeastOnce" {
13141384 criticalRslv ["occurrence_type" ] = condition .OccurrenceType
1315- } else {
1316- // otherwise, the canonical translation is to leave out occurrenceType in the Resolved block
1385+ // for other cases, the canonical translation is to leave out occurrenceType in the Resolved block
13171386 }
13181387 case "Warning" :
13191388 hasWarning = true
@@ -1330,8 +1399,7 @@ func jsonToMetricsStaticConditionBlock(conditions []TriggerCondition) map[string
13301399 warningRslv ["threshold_type" ] = condition .ThresholdType
13311400 if condition .OccurrenceType == "AtLeastOnce" {
13321401 warningRslv ["occurrence_type" ] = condition .OccurrenceType
1333- } else {
1334- // otherwise, the canonical translation is to leave out occurrenceType in the Resolved block
1402+ // for other cases, the canonical translation is to leave out occurrenceType in the Resolved block
13351403 }
13361404 }
13371405 }
@@ -1481,6 +1549,37 @@ func jsonToSloBurnRateConditionBlock(conditions []TriggerCondition) map[string]i
14811549 return block
14821550}
14831551
1552+ func jsonToLogsAnomalyConditionBlock (conditions []TriggerCondition ) map [string ]interface {} {
1553+ block := map [string ]interface {}{}
1554+
1555+ block ["field" ] = conditions [0 ].Field
1556+ block ["direction" ] = conditions [0 ].Direction
1557+ block ["anomaly_detector_type" ] = conditions [0 ].AnomalyDetectorType
1558+
1559+ var criticalDict = dict {}
1560+ block ["critical" ] = toSingletonArray (criticalDict )
1561+
1562+ var hasCritical = false
1563+ for _ , condition := range conditions {
1564+ switch condition .TriggerType {
1565+ case "Critical" :
1566+ hasCritical = true
1567+ criticalDict ["sensitivity" ] = condition .Sensitivity
1568+ criticalDict ["min_anomaly_count" ] = condition .MinAnomalyCount
1569+ criticalDict ["time_range" ] = condition .PositiveTimeRange ()
1570+ case "ResolvedCritical" :
1571+ hasCritical = true
1572+ criticalDict ["sensitivity" ] = condition .Sensitivity
1573+ criticalDict ["min_anomaly_count" ] = condition .MinAnomalyCount
1574+ criticalDict ["time_range" ] = condition .PositiveTimeRange ()
1575+ }
1576+ }
1577+ if ! hasCritical {
1578+ delete (block , "critical" )
1579+ }
1580+ return block
1581+ }
1582+
14841583func getAlertBlock (condition TriggerCondition ) dict {
14851584 var alert = dict {}
14861585 burnRates := make ([]interface {}, len (condition .BurnRates ))
@@ -1522,6 +1621,7 @@ const logsMissingDataConditionFieldName = "logs_missing_data_condition"
15221621const metricsMissingDataConditionFieldName = "metrics_missing_data_condition"
15231622const sloSLIConditionFieldName = "slo_sli_condition"
15241623const sloBurnRateConditionFieldName = "slo_burn_rate_condition"
1624+ const logsAnomalyConditionFieldName = "logs_anomaly_condition"
15251625
15261626const logsStaticConditionDetectionMethod = "LogsStaticCondition"
15271627const metricsStaticConditionDetectionMethod = "MetricsStaticCondition"
@@ -1531,6 +1631,7 @@ const logsMissingDataConditionDetectionMethod = "LogsMissingDataCondition"
15311631const metricsMissingDataConditionDetectionMethod = "MetricsMissingDataCondition"
15321632const sloSLIConditionDetectionMethod = "SloSliCondition"
15331633const sloBurnRateConditionDetectionMethod = "SloBurnRateCondition"
1634+ const logsAnomalyConditionDetectionMethod = "LogsAnomalyCondition"
15341635
15351636func getQueries (d * schema.ResourceData ) []MonitorQuery {
15361637 rawQueries := d .Get ("queries" ).([]interface {})
@@ -1549,11 +1650,12 @@ func resourceToMonitorsLibraryMonitor(d *schema.ResourceData) MonitorsLibraryMon
15491650 notifications := getNotifications (d )
15501651 triggers := getTriggers (d )
15511652 queries := getQueries (d )
1552- rawStatus := d .Get ("status" ).([]interface {})
1553- status := make ([]string , len (rawStatus ))
1554- for i := range rawStatus {
1555- status [i ] = rawStatus [i ].(string )
1556- }
1653+
1654+ // Always use "Normal" as status; otherwise it can cause state to drift from backend.
1655+ // For e.g., with anomaly monitor status is initially "GeneratingModel" and then switches to
1656+ // "Normal" after training is complete. This will cause backend to drift from tf state.
1657+ status := []string {"Normal" }
1658+
15571659 rawGroupFields := d .Get ("notification_group_fields" ).([]interface {})
15581660 notificationGroupFields := make ([]string , len (rawGroupFields ))
15591661 for i := range rawGroupFields {
@@ -1714,6 +1816,14 @@ func (base TriggerCondition) cloneReadingFromNestedBlocks(block map[string]inter
17141816 // we want the caller to be able to tell whether the resolution block had set its own occurrence type
17151817 resolvedCriticalCondition .OccurrenceType = ""
17161818 }
1819+
1820+ if criticalCondition .DetectionMethod == logsAnomalyConditionDetectionMethod {
1821+ criticalCondition .MinAnomalyCount = critical ["min_anomaly_count" ].(int )
1822+ criticalCondition .Sensitivity = critical ["sensitivity" ].(float64 )
1823+ resolvedCriticalCondition .MinAnomalyCount = criticalCondition .MinAnomalyCount
1824+ resolvedCriticalCondition .Sensitivity = criticalCondition .Sensitivity
1825+ }
1826+
17171827 if alert , ok := fromSingletonArray (critical , "alert" ); ok {
17181828 criticalCondition .readFrom (alert )
17191829 }
0 commit comments