1212# See the License for the specific language governing permissions and
1313# limitations under the License.
1414
15+ import logging
1516import os
1617import platform
1718import re
2122from google .cloud import monitoring_v3
2223
2324from opencensus .__version__ import __version__
25+ from opencensus .common import utils
2426from opencensus .common .monitored_resource_util .monitored_resource_util \
2527 import MonitoredResourceUtil
2628from opencensus .common .transports import async_
2931from opencensus .stats .exporters import base
3032
3133MAX_TIME_SERIES_PER_UPLOAD = 200
34+ OPENCENSUS_TASK = "opencensus_task"
3235OPENCENSUS_TASK_DESCRIPTION = "Opencensus task identifier"
3336DEFAULT_DISPLAY_NAME_PREFIX = "OpenCensus"
3437ERROR_BLANK_PROJECT_ID = "expecting a non-blank ProjectID"
@@ -107,8 +110,8 @@ def default_monitoring_labels(self):
107110
108111
109112class StackdriverStatsExporter (base .StatsExporter ):
110- """ StackdriverStatsExporter exports stats
111- to the Stackdriver Monitoring."""
113+ """Stats exporter for the Stackdriver Monitoring backend."""
114+
112115 def __init__ (self ,
113116 options = Options (),
114117 client = None ,
@@ -164,7 +167,8 @@ def upload_stats(self, view_data):
164167 """ It receives an array of view_data object
165168 and create time series for each value
166169 """
167- requests = self .make_request (view_data , MAX_TIME_SERIES_PER_UPLOAD )
170+ view_data_set = utils .uniq (view_data )
171+ requests = self .make_request (view_data_set , MAX_TIME_SERIES_PER_UPLOAD )
168172 for request in requests :
169173 self .client .create_time_series (request [CONS_NAME ],
170174 request [CONS_TIME_SERIES ])
@@ -181,7 +185,7 @@ def make_request(self, view_data, limit):
181185 for v_data in view_data :
182186 series = self .create_time_series_list (v_data , resource ,
183187 metric_prefix )
184- time_series .append (series )
188+ time_series .extend (series )
185189
186190 project_id = self .options .project_id
187191 request = {}
@@ -197,16 +201,19 @@ def create_time_series_list(self, v_data, option_resource_type,
197201 metric_prefix ):
198202 """ Create the TimeSeries object based on the view data
199203 """
200- series = monitoring_v3 .types .TimeSeries ()
201- series .metric .type = namespaced_view_name (v_data .view .name ,
202- metric_prefix )
203- set_monitored_resource (series , option_resource_type )
204-
204+ time_series_list = []
205205 tag_agg = v_data .tag_value_aggregation_data_map
206206 for tag_value , agg in tag_agg .items ():
207+ series = monitoring_v3 .types .TimeSeries ()
208+ series .metric .type = namespaced_view_name (v_data .view .name ,
209+ metric_prefix )
210+ set_metric_labels (series , v_data .view , tag_value )
211+ set_monitored_resource (series , option_resource_type )
212+
207213 point = series .points .add ()
208- if type (agg ) is \
209- aggregation .aggregation_data .DistributionAggregationData :
214+ if isinstance (
215+ agg ,
216+ aggregation .aggregation_data .DistributionAggregationData ):
210217 agg_data = tag_agg .get (tag_value )
211218 dist_value = point .value .distribution_value
212219 dist_value .count = agg_data .count_data
@@ -228,12 +235,21 @@ def create_time_series_list(self, v_data, option_resource_type,
228235 buckets .extend ([0 ])
229236 bounds .extend (list (map (float , agg_data .bounds )))
230237 buckets .extend (list (map (int , agg_data .counts_per_bucket )))
238+ elif isinstance (agg ,
239+ aggregation .aggregation_data .CountAggregationData ):
240+ point .value .int64_value = agg .count_data
241+ elif isinstance (
242+ agg , aggregation .aggregation_data .SumAggregationDataFloat ):
243+ point .value .double_value = agg .sum_data
244+ elif not isinstance (
245+ agg , aggregation .aggregation_data
246+ .LastValueAggregationData ): # pragma: NO COVER
247+ if isinstance (v_data .view .measure , measure .MeasureInt ):
248+ point .value .int64_value = int (agg .value )
249+ elif isinstance (v_data .view .measure , measure .MeasureFloat ):
250+ point .value .double_value = float (agg .value )
231251 else :
232- convFloat , isFloat = as_float (tag_value [0 ])
233- if isFloat : # pragma: NO COVER
234- point .value .double_value = convFloat
235- else : # pragma: NO COVER
236- point .value .string_value = str (tag_value [0 ])
252+ point .value .string_value = str (tag_value [0 ])
237253
238254 start = datetime .strptime (v_data .start_time , EPOCH_PATTERN )
239255 end = datetime .strptime (v_data .end_time , EPOCH_PATTERN )
@@ -244,7 +260,7 @@ def create_time_series_list(self, v_data, option_resource_type,
244260 point .interval .end_time .seconds = int (timestamp_end )
245261
246262 secs = point .interval .end_time .seconds
247- point .interval .end_time .nanos = int ((timestamp_end - secs )* 10 ** 9 )
263+ point .interval .end_time .nanos = int ((timestamp_end - secs ) * 10 ** 9 )
248264
249265 if type (agg ) is not aggregation .aggregation_data .\
250266 LastValueAggregationData : # pragma: NO COVER
@@ -256,7 +272,10 @@ def create_time_series_list(self, v_data, option_resource_type,
256272 start_time .seconds = int (timestamp_start )
257273 start_secs = start_time .seconds
258274 start_time .nanos = int ((timestamp_start - start_secs ) * 1e9 )
259- return series
275+
276+ time_series_list .append (series )
277+
278+ return time_series_list
260279
261280 def create_metric_descriptor (self , view ):
262281 """ it creates a MetricDescriptor
@@ -300,8 +319,8 @@ def create_metric_descriptor(self, view):
300319 if isinstance (view_measure , measure .MeasureFloat ):
301320 value_type = metric_desc .ValueType .DOUBLE
302321 else :
303- raise Exception ("unsupported aggregation type: %s"
304- % type (view_aggregation ))
322+ raise Exception (
323+ "unsupported aggregation type: %s" % type (view_aggregation ))
305324
306325 display_name_prefix = DEFAULT_DISPLAY_NAME_PREFIX
307326 if self .options .metric_prefix != "" :
@@ -406,11 +425,6 @@ def new_stats_exporter(options):
406425
407426 if options .default_monitoring_labels is not None :
408427 exporter .set_default_labels (options .default_monitoring_labels )
409- else :
410- label = {}
411- key = remove_non_alphanumeric (get_task_value ())
412- label [key ] = OPENCENSUS_TASK_DESCRIPTION
413- exporter .set_default_labels (label )
414428 return exporter
415429
416430
@@ -446,9 +460,24 @@ def new_label_descriptors(defaults, keys):
446460 label = {}
447461 label ["key" ] = remove_non_alphanumeric (tag_key )
448462 label_descriptors .append (label )
463+ label_descriptors .append ({"key" : OPENCENSUS_TASK ,
464+ "description" : OPENCENSUS_TASK_DESCRIPTION })
449465 return label_descriptors
450466
451467
468+ def set_metric_labels (series , view , tag_values ):
469+ if len (view .columns ) != len (tag_values ):
470+ logging .warning (
471+ "TagKeys and TagValues don't have same size."
472+ ) # pragma: NO COVER
473+
474+ for ii , tag_value in enumerate (tag_values ):
475+ if tag_value is not None :
476+ metric_label = remove_non_alphanumeric (view .columns [ii ])
477+ series .metric .labels [metric_label ] = tag_value
478+ series .metric .labels [OPENCENSUS_TASK ] = get_task_value ()
479+
480+
452481def remove_non_alphanumeric (text ):
453482 """ Remove characters not accepted in labels key
454483 """
0 commit comments