aboutsummaryrefslogtreecommitdiff
path: root/exporters/stats
diff options
context:
space:
mode:
authorJulien Desprez <jdesprez@google.com>2018-10-22 11:37:22 -0700
committerandroid-build-merger <android-build-merger@google.com>2018-10-22 11:37:22 -0700
commit13217871fefa43f6d16fbb31b04e9904996d87d5 (patch)
treeede84fcf0a9687d4907ae5f8a4788271d62e0922 /exporters/stats
parentcfbefd32336596ea63784607e4106dc37ce0567f (diff)
parent6fbc3cf5a1a3369fd354c1e5d9f90c86e4bce0a4 (diff)
downloadopencensus-java-13217871fefa43f6d16fbb31b04e9904996d87d5.tar.gz
Merge remote-tracking branch 'aosp/upstream-master' into merge am: dd3cabeacc
am: 6fbc3cf5a1 Change-Id: I11b0ec1cf561d2a14da78e444b1594f167787fe6
Diffstat (limited to 'exporters/stats')
-rw-r--r--exporters/stats/prometheus/README.md81
-rw-r--r--exporters/stats/prometheus/build.gradle19
-rw-r--r--exporters/stats/prometheus/src/main/java/io/opencensus/exporter/stats/prometheus/PrometheusExportUtils.java298
-rw-r--r--exporters/stats/prometheus/src/main/java/io/opencensus/exporter/stats/prometheus/PrometheusStatsCollector.java177
-rw-r--r--exporters/stats/prometheus/src/main/java/io/opencensus/exporter/stats/prometheus/PrometheusStatsConfiguration.java81
-rw-r--r--exporters/stats/prometheus/src/test/java/io/opencensus/exporter/stats/prometheus/PrometheusExportUtilsTest.java326
-rw-r--r--exporters/stats/prometheus/src/test/java/io/opencensus/exporter/stats/prometheus/PrometheusStatsCollectorTest.java168
-rw-r--r--exporters/stats/signalfx/README.md76
-rw-r--r--exporters/stats/signalfx/build.gradle23
-rw-r--r--exporters/stats/signalfx/src/main/java/io/opencensus/exporter/stats/signalfx/SignalFxMetricsSenderFactory.java59
-rw-r--r--exporters/stats/signalfx/src/main/java/io/opencensus/exporter/stats/signalfx/SignalFxSessionAdaptor.java188
-rw-r--r--exporters/stats/signalfx/src/main/java/io/opencensus/exporter/stats/signalfx/SignalFxStatsConfiguration.java153
-rw-r--r--exporters/stats/signalfx/src/main/java/io/opencensus/exporter/stats/signalfx/SignalFxStatsExporter.java109
-rw-r--r--exporters/stats/signalfx/src/main/java/io/opencensus/exporter/stats/signalfx/SignalFxStatsExporterWorkerThread.java105
-rw-r--r--exporters/stats/signalfx/src/test/java/io/opencensus/exporter/stats/signalfx/SignalFxSessionAdaptorTest.java320
-rw-r--r--exporters/stats/signalfx/src/test/java/io/opencensus/exporter/stats/signalfx/SignalFxStatsConfigurationTest.java90
-rw-r--r--exporters/stats/signalfx/src/test/java/io/opencensus/exporter/stats/signalfx/SignalFxStatsExporterTest.java93
-rw-r--r--exporters/stats/signalfx/src/test/java/io/opencensus/exporter/stats/signalfx/SignalFxStatsExporterWorkerThreadTest.java149
-rw-r--r--exporters/stats/stackdriver/README.md171
-rw-r--r--exporters/stats/stackdriver/build.gradle30
-rw-r--r--exporters/stats/stackdriver/src/main/java/io/opencensus/exporter/stats/stackdriver/StackdriverExportUtils.java518
-rw-r--r--exporters/stats/stackdriver/src/main/java/io/opencensus/exporter/stats/stackdriver/StackdriverExporterWorker.java274
-rw-r--r--exporters/stats/stackdriver/src/main/java/io/opencensus/exporter/stats/stackdriver/StackdriverStatsConfiguration.java159
-rw-r--r--exporters/stats/stackdriver/src/main/java/io/opencensus/exporter/stats/stackdriver/StackdriverStatsExporter.java363
-rw-r--r--exporters/stats/stackdriver/src/test/java/io/opencensus/exporter/stats/stackdriver/StackdriverExportUtilsTest.java568
-rw-r--r--exporters/stats/stackdriver/src/test/java/io/opencensus/exporter/stats/stackdriver/StackdriverExporterWorkerTest.java310
-rw-r--r--exporters/stats/stackdriver/src/test/java/io/opencensus/exporter/stats/stackdriver/StackdriverStatsConfigurationTest.java72
-rw-r--r--exporters/stats/stackdriver/src/test/java/io/opencensus/exporter/stats/stackdriver/StackdriverStatsExporterTest.java129
28 files changed, 5109 insertions, 0 deletions
diff --git a/exporters/stats/prometheus/README.md b/exporters/stats/prometheus/README.md
new file mode 100644
index 00000000..fa19efc9
--- /dev/null
+++ b/exporters/stats/prometheus/README.md
@@ -0,0 +1,81 @@
+# OpenCensus Prometheus Stats Exporter
+
+The *OpenCensus Prometheus Stats Exporter* is a stats exporter that exports data to
+Prometheus. [Prometheus](https://prometheus.io/) is an open-source systems monitoring and alerting
+toolkit originally built at [SoundCloud](https://soundcloud.com/).
+
+## Quickstart
+
+### Prerequisites
+
+To use this exporter, you need to install, configure and start Prometheus first. Follow the
+instructions [here](https://prometheus.io/docs/introduction/first_steps/).
+
+### Hello "Prometheus Stats"
+
+#### Add the dependencies to your project
+
+For Maven add to your `pom.xml`:
+```xml
+<dependencies>
+ <dependency>
+ <groupId>io.opencensus</groupId>
+ <artifactId>opencensus-api</artifactId>
+ <version>0.16.1</version>
+ </dependency>
+ <dependency>
+ <groupId>io.opencensus</groupId>
+ <artifactId>opencensus-exporter-stats-prometheus</artifactId>
+ <version>0.16.1</version>
+ </dependency>
+ <dependency>
+ <groupId>io.opencensus</groupId>
+ <artifactId>opencensus-impl</artifactId>
+ <version>0.16.1</version>
+ <scope>runtime</scope>
+ </dependency>
+</dependencies>
+```
+
+For Gradle add to your dependencies:
+```groovy
+compile 'io.opencensus:opencensus-api:0.16.1'
+compile 'io.opencensus:opencensus-exporter-stats-prometheus:0.16.1'
+runtime 'io.opencensus:opencensus-impl:0.16.1'
+```
+
+#### Register the exporter
+
+```java
+public class MyMainClass {
+ public static void main(String[] args) {
+ // Creates a PrometheusStatsCollector and registers it to the default Prometheus registry.
+ PrometheusStatsCollector.createAndRegister();
+
+ // Uses a simple Prometheus HTTPServer to export metrics.
+ // You can use a Prometheus PushGateway instead, though that's discouraged by Prometheus:
+ // https://prometheus.io/docs/practices/pushing/#should-i-be-using-the-pushgateway.
+ io.prometheus.client.exporter.HTTPServer server =
+ new HTTPServer(/*host*/ "localhost", /*port*/ 9091, /*daemon*/ true);
+
+ // Your code here.
+ // ...
+ }
+}
+```
+
+In this example, you should be able to see all the OpenCensus Prometheus metrics by visiting
+localhost:9091/metrics. Every time when you visit localhost:9091/metrics, the metrics will be
+collected from OpenCensus library and refreshed.
+
+#### Exporting
+
+After collecting stats from OpenCensus, there are multiple options for exporting them.
+See [Exporting via HTTP](https://github.com/prometheus/client_java#http), [Exporting to a Pushgateway](https://github.com/prometheus/client_java#exporting-to-a-pushgateway)
+and [Bridges](https://github.com/prometheus/client_java#bridges).
+
+#### Java Versions
+
+Java 7 or above is required for using this exporter.
+
+## FAQ
diff --git a/exporters/stats/prometheus/build.gradle b/exporters/stats/prometheus/build.gradle
new file mode 100644
index 00000000..fe8563c4
--- /dev/null
+++ b/exporters/stats/prometheus/build.gradle
@@ -0,0 +1,19 @@
+description = 'OpenCensus Stats Prometheus Exporter'
+
+[compileJava, compileTestJava].each() {
+ it.sourceCompatibility = 1.7
+ it.targetCompatibility = 1.7
+}
+
+dependencies {
+ compileOnly libraries.auto_value
+
+ compile project(':opencensus-api'),
+ libraries.guava,
+ libraries.prometheus_simpleclient
+
+ testCompile project(':opencensus-api')
+
+ signature "org.codehaus.mojo.signature:java17:1.0@signature"
+ signature "net.sf.androidscents.signature:android-api-level-14:4.0_r4@signature"
+} \ No newline at end of file
diff --git a/exporters/stats/prometheus/src/main/java/io/opencensus/exporter/stats/prometheus/PrometheusExportUtils.java b/exporters/stats/prometheus/src/main/java/io/opencensus/exporter/stats/prometheus/PrometheusExportUtils.java
new file mode 100644
index 00000000..288813d3
--- /dev/null
+++ b/exporters/stats/prometheus/src/main/java/io/opencensus/exporter/stats/prometheus/PrometheusExportUtils.java
@@ -0,0 +1,298 @@
+/*
+ * Copyright 2018, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.exporter.stats.prometheus;
+
+import static io.prometheus.client.Collector.doubleToGoString;
+
+import com.google.common.annotations.VisibleForTesting;
+import com.google.common.base.Preconditions;
+import com.google.common.collect.Lists;
+import io.opencensus.common.Function;
+import io.opencensus.common.Functions;
+import io.opencensus.stats.Aggregation;
+import io.opencensus.stats.Aggregation.Count;
+import io.opencensus.stats.Aggregation.Distribution;
+import io.opencensus.stats.Aggregation.Sum;
+import io.opencensus.stats.AggregationData;
+import io.opencensus.stats.AggregationData.CountData;
+import io.opencensus.stats.AggregationData.DistributionData;
+import io.opencensus.stats.AggregationData.LastValueDataDouble;
+import io.opencensus.stats.AggregationData.LastValueDataLong;
+import io.opencensus.stats.AggregationData.SumDataDouble;
+import io.opencensus.stats.AggregationData.SumDataLong;
+import io.opencensus.stats.View;
+import io.opencensus.stats.ViewData;
+import io.opencensus.tags.TagKey;
+import io.opencensus.tags.TagValue;
+import io.prometheus.client.Collector;
+import io.prometheus.client.Collector.MetricFamilySamples;
+import io.prometheus.client.Collector.MetricFamilySamples.Sample;
+import io.prometheus.client.Collector.Type;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+import java.util.Map.Entry;
+
+/*>>>
+import org.checkerframework.checker.nullness.qual.Nullable;
+*/
+
+/**
+ * Util methods to convert OpenCensus Stats data models to Prometheus data models.
+ *
+ * <p>Each OpenCensus {@link View} will be converted to a Prometheus {@link MetricFamilySamples}
+ * with no {@link Sample}s, and is used for registering Prometheus {@code Metric}s. Only {@code
+ * Cumulative} views are supported. All views are under namespace "opencensus".
+ *
+ * <p>{@link Aggregation} will be converted to a corresponding Prometheus {@link Type}. {@link Sum}
+ * will be {@link Type#UNTYPED}, {@link Count} will be {@link Type#COUNTER}, {@link
+ * Aggregation.Mean} will be {@link Type#SUMMARY}, {@link Aggregation.LastValue} will be {@link
+ * Type#GAUGE} and {@link Distribution} will be {@link Type#HISTOGRAM}. Please note we cannot set
+ * bucket boundaries for custom {@link Type#HISTOGRAM}.
+ *
+ * <p>Each OpenCensus {@link ViewData} will be converted to a Prometheus {@link
+ * MetricFamilySamples}, and each {@code Row} of the {@link ViewData} will be converted to
+ * Prometheus {@link Sample}s.
+ *
+ * <p>{@link SumDataDouble}, {@link SumDataLong}, {@link LastValueDataDouble}, {@link
+ * LastValueDataLong} and {@link CountData} will be converted to a single {@link Sample}. {@link
+ * AggregationData.MeanData} will be converted to two {@link Sample}s sum and count. {@link
+ * DistributionData} will be converted to a list of {@link Sample}s that have the sum, count and
+ * histogram buckets.
+ *
+ * <p>{@link TagKey} and {@link TagValue} will be converted to Prometheus {@code LabelName} and
+ * {@code LabelValue}. {@code Null} {@link TagValue} will be converted to an empty string.
+ *
+ * <p>Please note that Prometheus Metric and Label name can only have alphanumeric characters and
+ * underscore. All other characters will be sanitized by underscores.
+ */
+@SuppressWarnings("deprecation")
+final class PrometheusExportUtils {
+
+ @VisibleForTesting static final String SAMPLE_SUFFIX_BUCKET = "_bucket";
+ @VisibleForTesting static final String SAMPLE_SUFFIX_COUNT = "_count";
+ @VisibleForTesting static final String SAMPLE_SUFFIX_SUM = "_sum";
+ @VisibleForTesting static final String LABEL_NAME_BUCKET_BOUND = "le";
+
+ private static final Function<Object, Type> TYPE_UNTYPED_FUNCTION =
+ Functions.returnConstant(Type.UNTYPED);
+ private static final Function<Object, Type> TYPE_COUNTER_FUNCTION =
+ Functions.returnConstant(Type.COUNTER);
+ private static final Function<Object, Type> TYPE_HISTOGRAM_FUNCTION =
+ Functions.returnConstant(Type.HISTOGRAM);
+ private static final Function<Object, Type> TYPE_GAUGE_FUNCTION =
+ Functions.returnConstant(Type.GAUGE);
+
+ // Converts a ViewData to a Prometheus MetricFamilySamples.
+ static MetricFamilySamples createMetricFamilySamples(ViewData viewData) {
+ View view = viewData.getView();
+ String name = Collector.sanitizeMetricName(view.getName().asString());
+ Type type = getType(view.getAggregation(), view.getWindow());
+ List<String> labelNames = convertToLabelNames(view.getColumns());
+ List<Sample> samples = Lists.newArrayList();
+ for (Entry<List</*@Nullable*/ TagValue>, AggregationData> entry :
+ viewData.getAggregationMap().entrySet()) {
+ samples.addAll(
+ getSamples(name, labelNames, entry.getKey(), entry.getValue(), view.getAggregation()));
+ }
+ return new MetricFamilySamples(name, type, view.getDescription(), samples);
+ }
+
+ // Converts a View to a Prometheus MetricFamilySamples.
+ // Used only for Prometheus metric registry, should not contain any actual samples.
+ static MetricFamilySamples createDescribableMetricFamilySamples(View view) {
+ String name = Collector.sanitizeMetricName(view.getName().asString());
+ Type type = getType(view.getAggregation(), view.getWindow());
+ List<String> labelNames = convertToLabelNames(view.getColumns());
+ if (containsDisallowedLeLabelForHistogram(labelNames, type)) {
+ throw new IllegalStateException(
+ "Prometheus Histogram cannot have a label named 'le', "
+ + "because it is a reserved label for bucket boundaries. "
+ + "Please remove this tag key from your view.");
+ }
+ return new MetricFamilySamples(
+ name, type, view.getDescription(), Collections.<Sample>emptyList());
+ }
+
+ @VisibleForTesting
+ static Type getType(Aggregation aggregation, View.AggregationWindow window) {
+ if (!(window instanceof View.AggregationWindow.Cumulative)) {
+ return Type.UNTYPED;
+ }
+ return aggregation.match(
+ TYPE_UNTYPED_FUNCTION, // SUM
+ TYPE_COUNTER_FUNCTION, // COUNT
+ TYPE_HISTOGRAM_FUNCTION, // DISTRIBUTION
+ TYPE_GAUGE_FUNCTION, // LAST VALUE
+ new Function<Aggregation, Type>() {
+ @Override
+ public Type apply(Aggregation arg) {
+ if (arg instanceof Aggregation.Mean) {
+ return Type.SUMMARY;
+ }
+ return Type.UNTYPED;
+ }
+ });
+ }
+
+ // Converts a row in ViewData (a.k.a Entry<List<TagValue>, AggregationData>) to a list of
+ // Prometheus Samples.
+ @VisibleForTesting
+ static List<Sample> getSamples(
+ final String name,
+ final List<String> labelNames,
+ List</*@Nullable*/ TagValue> tagValues,
+ AggregationData aggregationData,
+ final Aggregation aggregation) {
+ Preconditions.checkArgument(
+ labelNames.size() == tagValues.size(), "Label names and tag values have different sizes.");
+ final List<Sample> samples = Lists.newArrayList();
+ final List<String> labelValues = new ArrayList<String>(tagValues.size());
+ for (TagValue tagValue : tagValues) {
+ String labelValue = tagValue == null ? "" : tagValue.asString();
+ labelValues.add(labelValue);
+ }
+
+ aggregationData.match(
+ new Function<SumDataDouble, Void>() {
+ @Override
+ public Void apply(SumDataDouble arg) {
+ samples.add(new Sample(name, labelNames, labelValues, arg.getSum()));
+ return null;
+ }
+ },
+ new Function<SumDataLong, Void>() {
+ @Override
+ public Void apply(SumDataLong arg) {
+ samples.add(new Sample(name, labelNames, labelValues, arg.getSum()));
+ return null;
+ }
+ },
+ new Function<CountData, Void>() {
+ @Override
+ public Void apply(CountData arg) {
+ samples.add(new Sample(name, labelNames, labelValues, arg.getCount()));
+ return null;
+ }
+ },
+ new Function<DistributionData, Void>() {
+ @Override
+ public Void apply(DistributionData arg) {
+ // For histogram buckets, manually add the bucket boundaries as "le" labels. See
+ // https://github.com/prometheus/client_java/commit/ed184d8e50c82e98bb2706723fff764424840c3a#diff-c505abbde72dd6bf36e89917b3469404R241
+ @SuppressWarnings("unchecked")
+ Distribution distribution = (Distribution) aggregation;
+ List<Double> boundaries = distribution.getBucketBoundaries().getBoundaries();
+ List<String> labelNamesWithLe = new ArrayList<String>(labelNames);
+ labelNamesWithLe.add(LABEL_NAME_BUCKET_BOUND);
+ long cumulativeCount = 0;
+ for (int i = 0; i < arg.getBucketCounts().size(); i++) {
+ List<String> labelValuesWithLe = new ArrayList<String>(labelValues);
+ // The label value of "le" is the upper inclusive bound.
+ // For the last bucket, it should be "+Inf".
+ String bucketBoundary =
+ doubleToGoString(
+ i < boundaries.size() ? boundaries.get(i) : Double.POSITIVE_INFINITY);
+ labelValuesWithLe.add(bucketBoundary);
+ cumulativeCount += arg.getBucketCounts().get(i);
+ samples.add(
+ new MetricFamilySamples.Sample(
+ name + SAMPLE_SUFFIX_BUCKET,
+ labelNamesWithLe,
+ labelValuesWithLe,
+ cumulativeCount));
+ }
+
+ samples.add(
+ new MetricFamilySamples.Sample(
+ name + SAMPLE_SUFFIX_COUNT, labelNames, labelValues, arg.getCount()));
+ samples.add(
+ new MetricFamilySamples.Sample(
+ name + SAMPLE_SUFFIX_SUM,
+ labelNames,
+ labelValues,
+ arg.getCount() * arg.getMean()));
+ return null;
+ }
+ },
+ new Function<LastValueDataDouble, Void>() {
+ @Override
+ public Void apply(LastValueDataDouble arg) {
+ samples.add(new Sample(name, labelNames, labelValues, arg.getLastValue()));
+ return null;
+ }
+ },
+ new Function<LastValueDataLong, Void>() {
+ @Override
+ public Void apply(LastValueDataLong arg) {
+ samples.add(new Sample(name, labelNames, labelValues, arg.getLastValue()));
+ return null;
+ }
+ },
+ new Function<AggregationData, Void>() {
+ @Override
+ public Void apply(AggregationData arg) {
+ // TODO(songya): remove this once Mean aggregation is completely removed. Before that
+ // we need to continue supporting Mean, since it could still be used by users and some
+ // deprecated RPC views.
+ if (arg instanceof AggregationData.MeanData) {
+ AggregationData.MeanData meanData = (AggregationData.MeanData) arg;
+ samples.add(
+ new MetricFamilySamples.Sample(
+ name + SAMPLE_SUFFIX_COUNT, labelNames, labelValues, meanData.getCount()));
+ samples.add(
+ new MetricFamilySamples.Sample(
+ name + SAMPLE_SUFFIX_SUM,
+ labelNames,
+ labelValues,
+ meanData.getCount() * meanData.getMean()));
+ return null;
+ }
+ throw new IllegalArgumentException("Unknown Aggregation.");
+ }
+ });
+
+ return samples;
+ }
+
+ // Converts the list of tag keys to a list of string label names. Also sanitizes the tag keys.
+ @VisibleForTesting
+ static List<String> convertToLabelNames(List<TagKey> tagKeys) {
+ final List<String> labelNames = new ArrayList<String>(tagKeys.size());
+ for (TagKey tagKey : tagKeys) {
+ labelNames.add(Collector.sanitizeMetricName(tagKey.getName()));
+ }
+ return labelNames;
+ }
+
+ // Returns true if there is an "le" label name in histogram label names, returns false otherwise.
+ // Similar check to
+ // https://github.com/prometheus/client_java/commit/ed184d8e50c82e98bb2706723fff764424840c3a#diff-c505abbde72dd6bf36e89917b3469404R78
+ static boolean containsDisallowedLeLabelForHistogram(List<String> labelNames, Type type) {
+ if (!Type.HISTOGRAM.equals(type)) {
+ return false;
+ }
+ for (String label : labelNames) {
+ if (LABEL_NAME_BUCKET_BOUND.equals(label)) {
+ return true;
+ }
+ }
+ return false;
+ }
+
+ private PrometheusExportUtils() {}
+}
diff --git a/exporters/stats/prometheus/src/main/java/io/opencensus/exporter/stats/prometheus/PrometheusStatsCollector.java b/exporters/stats/prometheus/src/main/java/io/opencensus/exporter/stats/prometheus/PrometheusStatsCollector.java
new file mode 100644
index 00000000..d555c92b
--- /dev/null
+++ b/exporters/stats/prometheus/src/main/java/io/opencensus/exporter/stats/prometheus/PrometheusStatsCollector.java
@@ -0,0 +1,177 @@
+/*
+ * Copyright 2018, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.exporter.stats.prometheus;
+
+import static io.opencensus.exporter.stats.prometheus.PrometheusExportUtils.containsDisallowedLeLabelForHistogram;
+import static io.opencensus.exporter.stats.prometheus.PrometheusExportUtils.convertToLabelNames;
+import static io.opencensus.exporter.stats.prometheus.PrometheusExportUtils.getType;
+
+import com.google.common.annotations.VisibleForTesting;
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.Lists;
+import io.opencensus.common.Scope;
+import io.opencensus.stats.Stats;
+import io.opencensus.stats.View;
+import io.opencensus.stats.ViewData;
+import io.opencensus.stats.ViewManager;
+import io.opencensus.trace.Sampler;
+import io.opencensus.trace.Span;
+import io.opencensus.trace.Status;
+import io.opencensus.trace.Tracer;
+import io.opencensus.trace.Tracing;
+import io.opencensus.trace.samplers.Samplers;
+import io.prometheus.client.Collector;
+import io.prometheus.client.CollectorRegistry;
+import java.util.List;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+/**
+ * OpenCensus Stats {@link Collector} for Prometheus.
+ *
+ * @since 0.12
+ */
+@SuppressWarnings("deprecation")
+public final class PrometheusStatsCollector extends Collector implements Collector.Describable {
+
+ private static final Logger logger = Logger.getLogger(PrometheusStatsCollector.class.getName());
+ private static final Tracer tracer = Tracing.getTracer();
+ private static final Sampler probabilitySampler = Samplers.probabilitySampler(0.0001);
+
+ private final ViewManager viewManager;
+
+ /**
+ * Creates a {@link PrometheusStatsCollector} and registers it to Prometheus {@link
+ * CollectorRegistry#defaultRegistry}.
+ *
+ * <p>This is equivalent with:
+ *
+ * <pre>{@code
+ * PrometheusStatsCollector.createAndRegister(PrometheusStatsConfiguration.builder().build());
+ * }</pre>
+ *
+ * @throws IllegalArgumentException if a {@code PrometheusStatsCollector} has already been created
+ * and registered.
+ * @since 0.12
+ */
+ public static void createAndRegister() {
+ new PrometheusStatsCollector(Stats.getViewManager()).register();
+ }
+
+ /**
+ * Creates a {@link PrometheusStatsCollector} and registers it to the given Prometheus {@link
+ * CollectorRegistry} in the {@link PrometheusStatsConfiguration}.
+ *
+ * <p>If {@code CollectorRegistry} of the configuration is not set, the collector will use {@link
+ * CollectorRegistry#defaultRegistry}.
+ *
+ * @throws IllegalArgumentException if a {@code PrometheusStatsCollector} has already been created
+ * and registered.
+ * @since 0.13
+ */
+ public static void createAndRegister(PrometheusStatsConfiguration configuration) {
+ CollectorRegistry registry = configuration.getRegistry();
+ if (registry == null) {
+ registry = CollectorRegistry.defaultRegistry;
+ }
+ new PrometheusStatsCollector(Stats.getViewManager()).register(registry);
+ }
+
+ @Override
+ public List<MetricFamilySamples> collect() {
+ List<MetricFamilySamples> samples = Lists.newArrayList();
+ Span span =
+ tracer
+ .spanBuilder("ExportStatsToPrometheus")
+ .setSampler(probabilitySampler)
+ .setRecordEvents(true)
+ .startSpan();
+ span.addAnnotation("Collect Prometheus Metric Samples.");
+ Scope scope = tracer.withSpan(span);
+ try {
+ for (View view : viewManager.getAllExportedViews()) {
+ if (containsDisallowedLeLabelForHistogram(
+ convertToLabelNames(view.getColumns()),
+ getType(view.getAggregation(), view.getWindow()))) {
+ continue; // silently skip Distribution views with "le" tag key
+ }
+ try {
+ ViewData viewData = viewManager.getView(view.getName());
+ if (viewData == null) {
+ continue;
+ } else {
+ samples.add(PrometheusExportUtils.createMetricFamilySamples(viewData));
+ }
+ } catch (Throwable e) {
+ logger.log(Level.WARNING, "Exception thrown when collecting metric samples.", e);
+ span.setStatus(
+ Status.UNKNOWN.withDescription(
+ "Exception thrown when collecting Prometheus Metric Samples: "
+ + exceptionMessage(e)));
+ }
+ }
+ span.addAnnotation("Finish collecting Prometheus Metric Samples.");
+ } finally {
+ scope.close();
+ span.end();
+ }
+ return samples;
+ }
+
+ @Override
+ public List<MetricFamilySamples> describe() {
+ List<MetricFamilySamples> samples = Lists.newArrayList();
+ Span span =
+ tracer
+ .spanBuilder("DescribeMetricsForPrometheus")
+ .setSampler(probabilitySampler)
+ .setRecordEvents(true)
+ .startSpan();
+ span.addAnnotation("Describe Prometheus Metrics.");
+ Scope scope = tracer.withSpan(span);
+ try {
+ for (View view : viewManager.getAllExportedViews()) {
+ try {
+ samples.add(PrometheusExportUtils.createDescribableMetricFamilySamples(view));
+ } catch (Throwable e) {
+ logger.log(Level.WARNING, "Exception thrown when describing metrics.", e);
+ span.setStatus(
+ Status.UNKNOWN.withDescription(
+ "Exception thrown when describing Prometheus Metrics: " + exceptionMessage(e)));
+ }
+ }
+ span.addAnnotation("Finish describing Prometheus Metrics.");
+ } finally {
+ scope.close();
+ span.end();
+ }
+ return samples;
+ }
+
+ @VisibleForTesting
+ PrometheusStatsCollector(ViewManager viewManager) {
+ this.viewManager = viewManager;
+ Tracing.getExportComponent()
+ .getSampledSpanStore()
+ .registerSpanNamesForCollection(
+ ImmutableList.of("DescribeMetricsForPrometheus", "ExportStatsToPrometheus"));
+ }
+
+ private static String exceptionMessage(Throwable e) {
+ return e.getMessage() != null ? e.getMessage() : e.getClass().getName();
+ }
+}
diff --git a/exporters/stats/prometheus/src/main/java/io/opencensus/exporter/stats/prometheus/PrometheusStatsConfiguration.java b/exporters/stats/prometheus/src/main/java/io/opencensus/exporter/stats/prometheus/PrometheusStatsConfiguration.java
new file mode 100644
index 00000000..3e8b95ed
--- /dev/null
+++ b/exporters/stats/prometheus/src/main/java/io/opencensus/exporter/stats/prometheus/PrometheusStatsConfiguration.java
@@ -0,0 +1,81 @@
+/*
+ * Copyright 2018, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.exporter.stats.prometheus;
+
+import com.google.auto.value.AutoValue;
+import io.prometheus.client.CollectorRegistry;
+import javax.annotation.Nullable;
+import javax.annotation.concurrent.Immutable;
+
+/**
+ * Configurations for {@link PrometheusStatsCollector}.
+ *
+ * @since 0.13
+ */
+@AutoValue
+@Immutable
+public abstract class PrometheusStatsConfiguration {
+
+ PrometheusStatsConfiguration() {}
+
+ /**
+ * Returns the Prometheus {@link CollectorRegistry}.
+ *
+ * @return the Prometheus {@code CollectorRegistry}.
+ * @since 0.13
+ */
+ @Nullable
+ public abstract CollectorRegistry getRegistry();
+
+ /**
+ * Returns a new {@link Builder}.
+ *
+ * @return a {@code Builder}.
+ * @since 0.13
+ */
+ public static Builder builder() {
+ return new AutoValue_PrometheusStatsConfiguration.Builder();
+ }
+
+ /**
+ * Builder for {@link PrometheusStatsConfiguration}.
+ *
+ * @since 0.13
+ */
+ @AutoValue.Builder
+ public abstract static class Builder {
+
+ Builder() {}
+
+ /**
+ * Sets the given Prometheus {@link CollectorRegistry}.
+ *
+ * @param registry the Prometheus {@code CollectorRegistry}.
+ * @return this.
+ * @since 0.13
+ */
+ public abstract Builder setRegistry(CollectorRegistry registry);
+
+ /**
+ * Builds a new {@link PrometheusStatsConfiguration} with current settings.
+ *
+ * @return a {@code PrometheusStatsConfiguration}.
+ * @since 0.13
+ */
+ public abstract PrometheusStatsConfiguration build();
+ }
+}
diff --git a/exporters/stats/prometheus/src/test/java/io/opencensus/exporter/stats/prometheus/PrometheusExportUtilsTest.java b/exporters/stats/prometheus/src/test/java/io/opencensus/exporter/stats/prometheus/PrometheusExportUtilsTest.java
new file mode 100644
index 00000000..ca8315b9
--- /dev/null
+++ b/exporters/stats/prometheus/src/test/java/io/opencensus/exporter/stats/prometheus/PrometheusExportUtilsTest.java
@@ -0,0 +1,326 @@
+/*
+ * Copyright 2018, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.exporter.stats.prometheus;
+
+import static com.google.common.truth.Truth.assertThat;
+import static io.opencensus.exporter.stats.prometheus.PrometheusExportUtils.LABEL_NAME_BUCKET_BOUND;
+import static io.opencensus.exporter.stats.prometheus.PrometheusExportUtils.SAMPLE_SUFFIX_BUCKET;
+import static io.opencensus.exporter.stats.prometheus.PrometheusExportUtils.SAMPLE_SUFFIX_COUNT;
+import static io.opencensus.exporter.stats.prometheus.PrometheusExportUtils.SAMPLE_SUFFIX_SUM;
+import static io.opencensus.exporter.stats.prometheus.PrometheusExportUtils.convertToLabelNames;
+
+import com.google.common.collect.ImmutableMap;
+import io.opencensus.common.Duration;
+import io.opencensus.common.Timestamp;
+import io.opencensus.stats.Aggregation.Count;
+import io.opencensus.stats.Aggregation.Distribution;
+import io.opencensus.stats.Aggregation.LastValue;
+import io.opencensus.stats.Aggregation.Mean;
+import io.opencensus.stats.Aggregation.Sum;
+import io.opencensus.stats.AggregationData.CountData;
+import io.opencensus.stats.AggregationData.DistributionData;
+import io.opencensus.stats.AggregationData.LastValueDataDouble;
+import io.opencensus.stats.AggregationData.LastValueDataLong;
+import io.opencensus.stats.AggregationData.MeanData;
+import io.opencensus.stats.AggregationData.SumDataDouble;
+import io.opencensus.stats.AggregationData.SumDataLong;
+import io.opencensus.stats.BucketBoundaries;
+import io.opencensus.stats.Measure.MeasureDouble;
+import io.opencensus.stats.View;
+import io.opencensus.stats.View.AggregationWindow.Cumulative;
+import io.opencensus.stats.View.AggregationWindow.Interval;
+import io.opencensus.stats.ViewData;
+import io.opencensus.stats.ViewData.AggregationWindowData.CumulativeData;
+import io.opencensus.stats.ViewData.AggregationWindowData.IntervalData;
+import io.opencensus.tags.TagKey;
+import io.opencensus.tags.TagValue;
+import io.prometheus.client.Collector.MetricFamilySamples;
+import io.prometheus.client.Collector.MetricFamilySamples.Sample;
+import io.prometheus.client.Collector.Type;
+import java.util.Arrays;
+import java.util.Collections;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.ExpectedException;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
+
+/** Unit tests for {@link PrometheusExportUtils}. */
+@RunWith(JUnit4.class)
+public class PrometheusExportUtilsTest {
+
+ @Rule public final ExpectedException thrown = ExpectedException.none();
+
+ private static final Duration ONE_SECOND = Duration.create(1, 0);
+ private static final Cumulative CUMULATIVE = Cumulative.create();
+ private static final Interval INTERVAL = Interval.create(ONE_SECOND);
+ private static final Sum SUM = Sum.create();
+ private static final Count COUNT = Count.create();
+ private static final Mean MEAN = Mean.create();
+ private static final BucketBoundaries BUCKET_BOUNDARIES =
+ BucketBoundaries.create(Arrays.asList(-5.0, 0.0, 5.0));
+ private static final Distribution DISTRIBUTION = Distribution.create(BUCKET_BOUNDARIES);
+ private static final LastValue LAST_VALUE = LastValue.create();
+ private static final View.Name VIEW_NAME_1 = View.Name.create("view1");
+ private static final View.Name VIEW_NAME_2 = View.Name.create("view2");
+ private static final View.Name VIEW_NAME_3 = View.Name.create("view-3");
+ private static final View.Name VIEW_NAME_4 = View.Name.create("-view4");
+ private static final String DESCRIPTION = "View description";
+ private static final MeasureDouble MEASURE_DOUBLE =
+ MeasureDouble.create("measure", "description", "1");
+ private static final TagKey K1 = TagKey.create("k1");
+ private static final TagKey K2 = TagKey.create("k2");
+ private static final TagKey K3 = TagKey.create("k-3");
+ private static final TagKey TAG_KEY_LE = TagKey.create(LABEL_NAME_BUCKET_BOUND);
+ private static final TagValue V1 = TagValue.create("v1");
+ private static final TagValue V2 = TagValue.create("v2");
+ private static final TagValue V3 = TagValue.create("v-3");
+ private static final SumDataDouble SUM_DATA_DOUBLE = SumDataDouble.create(-5.5);
+ private static final SumDataLong SUM_DATA_LONG = SumDataLong.create(123456789);
+ private static final CountData COUNT_DATA = CountData.create(12345);
+ private static final MeanData MEAN_DATA = MeanData.create(3.4, 22);
+ private static final DistributionData DISTRIBUTION_DATA =
+ DistributionData.create(4.4, 5, -3.2, 15.7, 135.22, Arrays.asList(0L, 2L, 2L, 1L));
+ private static final LastValueDataDouble LAST_VALUE_DATA_DOUBLE = LastValueDataDouble.create(7.9);
+ private static final LastValueDataLong LAST_VALUE_DATA_LONG = LastValueDataLong.create(66666666);
+ private static final View VIEW1 =
+ View.create(
+ VIEW_NAME_1, DESCRIPTION, MEASURE_DOUBLE, COUNT, Arrays.asList(K1, K2), CUMULATIVE);
+ private static final View VIEW2 =
+ View.create(VIEW_NAME_2, DESCRIPTION, MEASURE_DOUBLE, MEAN, Arrays.asList(K3), CUMULATIVE);
+ private static final View VIEW3 =
+ View.create(
+ VIEW_NAME_3, DESCRIPTION, MEASURE_DOUBLE, DISTRIBUTION, Arrays.asList(K1), CUMULATIVE);
+ private static final View VIEW4 =
+ View.create(VIEW_NAME_4, DESCRIPTION, MEASURE_DOUBLE, COUNT, Arrays.asList(K1), INTERVAL);
+ private static final View DISTRIBUTION_VIEW_WITH_LE_KEY =
+ View.create(
+ VIEW_NAME_1,
+ DESCRIPTION,
+ MEASURE_DOUBLE,
+ DISTRIBUTION,
+ Arrays.asList(K1, TAG_KEY_LE),
+ CUMULATIVE);
+ private static final CumulativeData CUMULATIVE_DATA =
+ CumulativeData.create(Timestamp.fromMillis(1000), Timestamp.fromMillis(2000));
+ private static final IntervalData INTERVAL_DATA = IntervalData.create(Timestamp.fromMillis(1000));
+ private static final String SAMPLE_NAME = "view";
+
+ @Test
+ public void testConstants() {
+ assertThat(SAMPLE_SUFFIX_BUCKET).isEqualTo("_bucket");
+ assertThat(SAMPLE_SUFFIX_COUNT).isEqualTo("_count");
+ assertThat(SAMPLE_SUFFIX_SUM).isEqualTo("_sum");
+ assertThat(LABEL_NAME_BUCKET_BOUND).isEqualTo("le");
+ }
+
+ @Test
+ public void getType() {
+ assertThat(PrometheusExportUtils.getType(COUNT, INTERVAL)).isEqualTo(Type.UNTYPED);
+ assertThat(PrometheusExportUtils.getType(COUNT, CUMULATIVE)).isEqualTo(Type.COUNTER);
+ assertThat(PrometheusExportUtils.getType(DISTRIBUTION, CUMULATIVE)).isEqualTo(Type.HISTOGRAM);
+ assertThat(PrometheusExportUtils.getType(SUM, CUMULATIVE)).isEqualTo(Type.UNTYPED);
+ assertThat(PrometheusExportUtils.getType(MEAN, CUMULATIVE)).isEqualTo(Type.SUMMARY);
+ assertThat(PrometheusExportUtils.getType(LAST_VALUE, CUMULATIVE)).isEqualTo(Type.GAUGE);
+ }
+
+ @Test
+ public void createDescribableMetricFamilySamples() {
+ assertThat(PrometheusExportUtils.createDescribableMetricFamilySamples(VIEW1))
+ .isEqualTo(
+ new MetricFamilySamples(
+ "view1", Type.COUNTER, DESCRIPTION, Collections.<Sample>emptyList()));
+ assertThat(PrometheusExportUtils.createDescribableMetricFamilySamples(VIEW2))
+ .isEqualTo(
+ new MetricFamilySamples(
+ "view2", Type.SUMMARY, DESCRIPTION, Collections.<Sample>emptyList()));
+ assertThat(PrometheusExportUtils.createDescribableMetricFamilySamples(VIEW3))
+ .isEqualTo(
+ new MetricFamilySamples(
+ "view_3", Type.HISTOGRAM, DESCRIPTION, Collections.<Sample>emptyList()));
+ assertThat(PrometheusExportUtils.createDescribableMetricFamilySamples(VIEW4))
+ .isEqualTo(
+ new MetricFamilySamples(
+ "_view4", Type.UNTYPED, DESCRIPTION, Collections.<Sample>emptyList()));
+ }
+
+ @Test
+ public void getSamples() {
+ assertThat(
+ PrometheusExportUtils.getSamples(
+ SAMPLE_NAME,
+ convertToLabelNames(Arrays.asList(K1, K2)),
+ Arrays.asList(V1, V2),
+ SUM_DATA_DOUBLE,
+ SUM))
+ .containsExactly(
+ new Sample(SAMPLE_NAME, Arrays.asList("k1", "k2"), Arrays.asList("v1", "v2"), -5.5));
+ assertThat(
+ PrometheusExportUtils.getSamples(
+ SAMPLE_NAME,
+ convertToLabelNames(Arrays.asList(K3)),
+ Arrays.asList(V3),
+ SUM_DATA_LONG,
+ SUM))
+ .containsExactly(
+ new Sample(SAMPLE_NAME, Arrays.asList("k_3"), Arrays.asList("v-3"), 123456789));
+ assertThat(
+ PrometheusExportUtils.getSamples(
+ SAMPLE_NAME,
+ convertToLabelNames(Arrays.asList(K1, K3)),
+ Arrays.asList(V1, null),
+ COUNT_DATA,
+ COUNT))
+ .containsExactly(
+ new Sample(SAMPLE_NAME, Arrays.asList("k1", "k_3"), Arrays.asList("v1", ""), 12345));
+ assertThat(
+ PrometheusExportUtils.getSamples(
+ SAMPLE_NAME,
+ convertToLabelNames(Arrays.asList(K3)),
+ Arrays.asList(V3),
+ MEAN_DATA,
+ MEAN))
+ .containsExactly(
+ new Sample(SAMPLE_NAME + "_count", Arrays.asList("k_3"), Arrays.asList("v-3"), 22),
+ new Sample(SAMPLE_NAME + "_sum", Arrays.asList("k_3"), Arrays.asList("v-3"), 74.8))
+ .inOrder();
+ assertThat(
+ PrometheusExportUtils.getSamples(
+ SAMPLE_NAME,
+ convertToLabelNames(Arrays.asList(K1)),
+ Arrays.asList(V1),
+ DISTRIBUTION_DATA,
+ DISTRIBUTION))
+ .containsExactly(
+ new Sample(
+ SAMPLE_NAME + "_bucket", Arrays.asList("k1", "le"), Arrays.asList("v1", "-5.0"), 0),
+ new Sample(
+ SAMPLE_NAME + "_bucket", Arrays.asList("k1", "le"), Arrays.asList("v1", "0.0"), 2),
+ new Sample(
+ SAMPLE_NAME + "_bucket", Arrays.asList("k1", "le"), Arrays.asList("v1", "5.0"), 4),
+ new Sample(
+ SAMPLE_NAME + "_bucket", Arrays.asList("k1", "le"), Arrays.asList("v1", "+Inf"), 5),
+ new Sample(SAMPLE_NAME + "_count", Arrays.asList("k1"), Arrays.asList("v1"), 5),
+ new Sample(SAMPLE_NAME + "_sum", Arrays.asList("k1"), Arrays.asList("v1"), 22.0))
+ .inOrder();
+ assertThat(
+ PrometheusExportUtils.getSamples(
+ SAMPLE_NAME,
+ convertToLabelNames(Arrays.asList(K1, K2)),
+ Arrays.asList(V1, V2),
+ LAST_VALUE_DATA_DOUBLE,
+ LAST_VALUE))
+ .containsExactly(
+ new Sample(SAMPLE_NAME, Arrays.asList("k1", "k2"), Arrays.asList("v1", "v2"), 7.9));
+ assertThat(
+ PrometheusExportUtils.getSamples(
+ SAMPLE_NAME,
+ convertToLabelNames(Arrays.asList(K3)),
+ Arrays.asList(V3),
+ LAST_VALUE_DATA_LONG,
+ LAST_VALUE))
+ .containsExactly(
+ new Sample(SAMPLE_NAME, Arrays.asList("k_3"), Arrays.asList("v-3"), 66666666));
+ }
+
+ @Test
+ public void getSamples_KeysAndValuesHaveDifferentSizes() {
+ thrown.expect(IllegalArgumentException.class);
+ thrown.expectMessage("Label names and tag values have different sizes.");
+ PrometheusExportUtils.getSamples(
+ SAMPLE_NAME,
+ convertToLabelNames(Arrays.asList(K1, K2, K3)),
+ Arrays.asList(V1, V2),
+ DISTRIBUTION_DATA,
+ DISTRIBUTION);
+ }
+
+ @Test
+ public void createDescribableMetricFamilySamples_Histogram_DisallowLeLabelName() {
+ thrown.expect(IllegalStateException.class);
+ thrown.expectMessage(
+ "Prometheus Histogram cannot have a label named 'le', "
+ + "because it is a reserved label for bucket boundaries. "
+ + "Please remove this tag key from your view.");
+ PrometheusExportUtils.createDescribableMetricFamilySamples(DISTRIBUTION_VIEW_WITH_LE_KEY);
+ }
+
+ @Test
+ public void createMetricFamilySamples() {
+ assertThat(
+ PrometheusExportUtils.createMetricFamilySamples(
+ ViewData.create(
+ VIEW1, ImmutableMap.of(Arrays.asList(V1, V2), COUNT_DATA), CUMULATIVE_DATA)))
+ .isEqualTo(
+ new MetricFamilySamples(
+ "view1",
+ Type.COUNTER,
+ DESCRIPTION,
+ Arrays.asList(
+ new Sample(
+ "view1", Arrays.asList("k1", "k2"), Arrays.asList("v1", "v2"), 12345))));
+ assertThat(
+ PrometheusExportUtils.createMetricFamilySamples(
+ ViewData.create(
+ VIEW2, ImmutableMap.of(Arrays.asList(V1), MEAN_DATA), CUMULATIVE_DATA)))
+ .isEqualTo(
+ new MetricFamilySamples(
+ "view2",
+ Type.SUMMARY,
+ DESCRIPTION,
+ Arrays.asList(
+ new Sample("view2_count", Arrays.asList("k_3"), Arrays.asList("v1"), 22),
+ new Sample("view2_sum", Arrays.asList("k_3"), Arrays.asList("v1"), 74.8))));
+ assertThat(
+ PrometheusExportUtils.createMetricFamilySamples(
+ ViewData.create(
+ VIEW3, ImmutableMap.of(Arrays.asList(V3), DISTRIBUTION_DATA), CUMULATIVE_DATA)))
+ .isEqualTo(
+ new MetricFamilySamples(
+ "view_3",
+ Type.HISTOGRAM,
+ DESCRIPTION,
+ Arrays.asList(
+ new Sample(
+ "view_3_bucket",
+ Arrays.asList("k1", "le"),
+ Arrays.asList("v-3", "-5.0"),
+ 0),
+ new Sample(
+ "view_3_bucket", Arrays.asList("k1", "le"), Arrays.asList("v-3", "0.0"), 2),
+ new Sample(
+ "view_3_bucket", Arrays.asList("k1", "le"), Arrays.asList("v-3", "5.0"), 4),
+ new Sample(
+ "view_3_bucket",
+ Arrays.asList("k1", "le"),
+ Arrays.asList("v-3", "+Inf"),
+ 5),
+ new Sample("view_3_count", Arrays.asList("k1"), Arrays.asList("v-3"), 5),
+ new Sample("view_3_sum", Arrays.asList("k1"), Arrays.asList("v-3"), 22.0))));
+ assertThat(
+ PrometheusExportUtils.createMetricFamilySamples(
+ ViewData.create(
+ VIEW4, ImmutableMap.of(Arrays.asList(V1), COUNT_DATA), INTERVAL_DATA)))
+ .isEqualTo(
+ new MetricFamilySamples(
+ "_view4",
+ Type.UNTYPED,
+ DESCRIPTION,
+ Arrays.asList(
+ new Sample("_view4", Arrays.asList("k1"), Arrays.asList("v1"), 12345))));
+ }
+}
diff --git a/exporters/stats/prometheus/src/test/java/io/opencensus/exporter/stats/prometheus/PrometheusStatsCollectorTest.java b/exporters/stats/prometheus/src/test/java/io/opencensus/exporter/stats/prometheus/PrometheusStatsCollectorTest.java
new file mode 100644
index 00000000..3bd98451
--- /dev/null
+++ b/exporters/stats/prometheus/src/test/java/io/opencensus/exporter/stats/prometheus/PrometheusStatsCollectorTest.java
@@ -0,0 +1,168 @@
+/*
+ * Copyright 2018, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.exporter.stats.prometheus;
+
+import static com.google.common.truth.Truth.assertThat;
+import static io.opencensus.exporter.stats.prometheus.PrometheusExportUtils.LABEL_NAME_BUCKET_BOUND;
+import static org.mockito.Mockito.doReturn;
+
+import com.google.common.collect.ImmutableMap;
+import com.google.common.collect.ImmutableSet;
+import io.opencensus.common.Timestamp;
+import io.opencensus.stats.Aggregation.Distribution;
+import io.opencensus.stats.AggregationData.DistributionData;
+import io.opencensus.stats.BucketBoundaries;
+import io.opencensus.stats.Measure.MeasureDouble;
+import io.opencensus.stats.Stats;
+import io.opencensus.stats.View;
+import io.opencensus.stats.View.AggregationWindow.Cumulative;
+import io.opencensus.stats.ViewData;
+import io.opencensus.stats.ViewData.AggregationWindowData.CumulativeData;
+import io.opencensus.stats.ViewManager;
+import io.opencensus.tags.TagKey;
+import io.opencensus.tags.TagValue;
+import io.prometheus.client.Collector.MetricFamilySamples;
+import io.prometheus.client.Collector.MetricFamilySamples.Sample;
+import io.prometheus.client.Collector.Type;
+import java.util.Arrays;
+import java.util.Collections;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
+import org.mockito.Mock;
+import org.mockito.MockitoAnnotations;
+
+/** Unit tests for {@link PrometheusStatsCollector}. */
+@RunWith(JUnit4.class)
+public class PrometheusStatsCollectorTest {
+
+ private static final Cumulative CUMULATIVE = Cumulative.create();
+ private static final BucketBoundaries BUCKET_BOUNDARIES =
+ BucketBoundaries.create(Arrays.asList(-5.0, 0.0, 5.0));
+ private static final Distribution DISTRIBUTION = Distribution.create(BUCKET_BOUNDARIES);
+ private static final View.Name VIEW_NAME = View.Name.create("view1");
+ private static final String DESCRIPTION = "View description";
+ private static final MeasureDouble MEASURE_DOUBLE =
+ MeasureDouble.create("measure", "description", "1");
+ private static final TagKey K1 = TagKey.create("k1");
+ private static final TagKey K2 = TagKey.create("k2");
+ private static final TagKey LE_TAG_KEY = TagKey.create(LABEL_NAME_BUCKET_BOUND);
+ private static final TagValue V1 = TagValue.create("v1");
+ private static final TagValue V2 = TagValue.create("v2");
+ private static final DistributionData DISTRIBUTION_DATA =
+ DistributionData.create(4.4, 5, -3.2, 15.7, 135.22, Arrays.asList(0L, 2L, 2L, 1L));
+ private static final View VIEW =
+ View.create(
+ VIEW_NAME, DESCRIPTION, MEASURE_DOUBLE, DISTRIBUTION, Arrays.asList(K1, K2), CUMULATIVE);
+ private static final View VIEW_WITH_LE_TAG_KEY =
+ View.create(
+ VIEW_NAME,
+ DESCRIPTION,
+ MEASURE_DOUBLE,
+ DISTRIBUTION,
+ Arrays.asList(K1, LE_TAG_KEY),
+ CUMULATIVE);
+ private static final CumulativeData CUMULATIVE_DATA =
+ CumulativeData.create(Timestamp.fromMillis(1000), Timestamp.fromMillis(2000));
+ private static final ViewData VIEW_DATA =
+ ViewData.create(
+ VIEW, ImmutableMap.of(Arrays.asList(V1, V2), DISTRIBUTION_DATA), CUMULATIVE_DATA);
+ private static final ViewData VIEW_DATA_WITH_LE_TAG_KEY =
+ ViewData.create(
+ VIEW_WITH_LE_TAG_KEY,
+ ImmutableMap.of(Arrays.asList(V1, V2), DISTRIBUTION_DATA),
+ CUMULATIVE_DATA);
+
+ @Mock private ViewManager mockViewManager;
+
+ @Before
+ public void setUp() {
+ MockitoAnnotations.initMocks(this);
+ doReturn(ImmutableSet.of(VIEW)).when(mockViewManager).getAllExportedViews();
+ doReturn(VIEW_DATA).when(mockViewManager).getView(VIEW_NAME);
+ }
+
+ @Test
+ public void testCollect() {
+ PrometheusStatsCollector collector = new PrometheusStatsCollector(mockViewManager);
+ String name = "view1";
+ assertThat(collector.collect())
+ .containsExactly(
+ new MetricFamilySamples(
+ "view1",
+ Type.HISTOGRAM,
+ "View description",
+ Arrays.asList(
+ new Sample(
+ name + "_bucket",
+ Arrays.asList("k1", "k2", "le"),
+ Arrays.asList("v1", "v2", "-5.0"),
+ 0),
+ new Sample(
+ name + "_bucket",
+ Arrays.asList("k1", "k2", "le"),
+ Arrays.asList("v1", "v2", "0.0"),
+ 2),
+ new Sample(
+ name + "_bucket",
+ Arrays.asList("k1", "k2", "le"),
+ Arrays.asList("v1", "v2", "5.0"),
+ 4),
+ new Sample(
+ name + "_bucket",
+ Arrays.asList("k1", "k2", "le"),
+ Arrays.asList("v1", "v2", "+Inf"),
+ 5),
+ new Sample(
+ name + "_count", Arrays.asList("k1", "k2"), Arrays.asList("v1", "v2"), 5),
+ new Sample(
+ name + "_sum",
+ Arrays.asList("k1", "k2"),
+ Arrays.asList("v1", "v2"),
+ 22.0))));
+ }
+
+ @Test
+ public void testCollect_SkipDistributionViewWithLeTagKey() {
+ doReturn(ImmutableSet.of(VIEW_WITH_LE_TAG_KEY)).when(mockViewManager).getAllExportedViews();
+ doReturn(VIEW_DATA_WITH_LE_TAG_KEY).when(mockViewManager).getView(VIEW_NAME);
+ PrometheusStatsCollector collector = new PrometheusStatsCollector(mockViewManager);
+ assertThat(collector.collect()).isEmpty();
+ }
+
+ @Test
+ public void testDescribe() {
+ PrometheusStatsCollector collector = new PrometheusStatsCollector(mockViewManager);
+ assertThat(collector.describe())
+ .containsExactly(
+ new MetricFamilySamples(
+ "view1", Type.HISTOGRAM, "View description", Collections.<Sample>emptyList()));
+ }
+
+ @Test
+ public void testCollect_WithNoopViewManager() {
+ PrometheusStatsCollector collector = new PrometheusStatsCollector(Stats.getViewManager());
+ assertThat(collector.collect()).isEmpty();
+ }
+
+ @Test
+ public void testDescribe_WithNoopViewManager() {
+ PrometheusStatsCollector collector = new PrometheusStatsCollector(Stats.getViewManager());
+ assertThat(collector.describe()).isEmpty();
+ }
+}
diff --git a/exporters/stats/signalfx/README.md b/exporters/stats/signalfx/README.md
new file mode 100644
index 00000000..7c61f896
--- /dev/null
+++ b/exporters/stats/signalfx/README.md
@@ -0,0 +1,76 @@
+# OpenCensus SignalFx Stats Exporter
+
+The _OpenCensus SignalFx Stats Exporter_ is a stats exporter that
+exports data to [SignalFx](https://signalfx.com), a real-time monitoring
+solution for cloud and distributed applications. SignalFx ingests that
+data and offers various visualizations on charts, dashboards and service
+maps, as well as real-time anomaly detection.
+
+## Quickstart
+
+### Prerequisites
+
+To use this exporter, you must have a [SignalFx](https://signalfx.com)
+account and corresponding [data ingest
+token](https://docs.signalfx.com/en/latest/admin-guide/tokens.html).
+
+#### Java versions
+
+This exporter requires Java 7 or above.
+
+### Add the dependencies to your project
+
+For Maven add to your `pom.xml`:
+
+```xml
+<dependencies>
+ <dependency>
+ <groupId>io.opencensus</groupId>
+ <artifactId>opencensus-api</artifactId>
+ <version>0.16.1</version>
+ </dependency>
+ <dependency>
+ <groupId>io.opencensus</groupId>
+ <artifactId>opencensus-exporter-stats-signalfx</artifactId>
+ <version>0.16.1</version>
+ </dependency>
+ <dependency>
+ <groupId>io.opencensus</groupId>
+ <artifactId>opencensus-impl</artifactId>
+ <version>0.16.1</version>
+ <scope>runtime</scope>
+ </dependency>
+</dependencies>
+```
+
+For Gradle add to your dependencies:
+
+```
+compile 'io.opencensus:opencensus-api:0.16.1'
+compile 'io.opencensus:opencensus-exporter-stats-signalfx:0.16.1'
+runtime 'io.opencensus:opencensus-impl:0.16.1'
+```
+
+### Register the exporter
+
+```java
+public class MyMainClass {
+ public static void main(String[] args) {
+ // SignalFx token is read from Java system properties.
+ // Stats will be reported every second by default.
+ SignalFxStatsExporter.create(SignalFxStatsConfiguration.builder().build());
+ }
+}
+```
+
+If you want to pass in the token yourself, or set a different reporting
+interval, use:
+
+```java
+// Use token "your_signalfx_token" and report every 5 seconds.
+SignalFxStatsExporter.create(
+ SignalFxStatsConfiguration.builder()
+ .setToken("your_signalfx_token")
+ .setExportInterval(Duration.create(5, 0))
+ .build());
+```
diff --git a/exporters/stats/signalfx/build.gradle b/exporters/stats/signalfx/build.gradle
new file mode 100644
index 00000000..d496b1e5
--- /dev/null
+++ b/exporters/stats/signalfx/build.gradle
@@ -0,0 +1,23 @@
+description = 'OpenCensus SignalFx Stats Exporter'
+
+[compileJava, compileTestJava].each() {
+ it.sourceCompatibility = 1.7
+ it.targetCompatibility = 1.7
+}
+
+dependencies {
+ compileOnly libraries.auto_value
+
+ compile project(':opencensus-api'),
+ libraries.guava
+
+ compile (libraries.signalfx_java) {
+ // Prefer library version.
+ exclude group: 'com.google.guava', module: 'guava'
+ }
+
+ testCompile project(':opencensus-api')
+
+ signature "org.codehaus.mojo.signature:java17:1.0@signature"
+ signature "net.sf.androidscents.signature:android-api-level-14:4.0_r4@signature"
+}
diff --git a/exporters/stats/signalfx/src/main/java/io/opencensus/exporter/stats/signalfx/SignalFxMetricsSenderFactory.java b/exporters/stats/signalfx/src/main/java/io/opencensus/exporter/stats/signalfx/SignalFxMetricsSenderFactory.java
new file mode 100644
index 00000000..5601a54c
--- /dev/null
+++ b/exporters/stats/signalfx/src/main/java/io/opencensus/exporter/stats/signalfx/SignalFxMetricsSenderFactory.java
@@ -0,0 +1,59 @@
+/*
+ * Copyright 2017, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.exporter.stats.signalfx;
+
+import com.signalfx.endpoint.SignalFxEndpoint;
+import com.signalfx.metrics.auth.StaticAuthToken;
+import com.signalfx.metrics.connection.HttpDataPointProtobufReceiverFactory;
+import com.signalfx.metrics.connection.HttpEventProtobufReceiverFactory;
+import com.signalfx.metrics.errorhandler.OnSendErrorHandler;
+import com.signalfx.metrics.flush.AggregateMetricSender;
+import java.net.URI;
+import java.util.Collections;
+
+/** Interface for creators of {@link AggregateMetricSender}. */
+interface SignalFxMetricsSenderFactory {
+
+ /**
+ * Creates a new SignalFx metrics sender instance.
+ *
+ * @param endpoint The SignalFx ingest endpoint URL.
+ * @param token The SignalFx ingest token.
+ * @param errorHandler An {@link OnSendErrorHandler} through which errors when sending data to
+ * SignalFx will be communicated.
+ * @return The created {@link AggregateMetricSender} instance.
+ */
+ AggregateMetricSender create(URI endpoint, String token, OnSendErrorHandler errorHandler);
+
+ /** The default, concrete implementation of this interface. */
+ SignalFxMetricsSenderFactory DEFAULT =
+ new SignalFxMetricsSenderFactory() {
+ @Override
+ @SuppressWarnings("nullness")
+ public AggregateMetricSender create(
+ URI endpoint, String token, OnSendErrorHandler errorHandler) {
+ SignalFxEndpoint sfx =
+ new SignalFxEndpoint(endpoint.getScheme(), endpoint.getHost(), endpoint.getPort());
+ return new AggregateMetricSender(
+ null,
+ new HttpDataPointProtobufReceiverFactory(sfx).setVersion(2),
+ new HttpEventProtobufReceiverFactory(sfx),
+ new StaticAuthToken(token),
+ Collections.singleton(errorHandler));
+ }
+ };
+}
diff --git a/exporters/stats/signalfx/src/main/java/io/opencensus/exporter/stats/signalfx/SignalFxSessionAdaptor.java b/exporters/stats/signalfx/src/main/java/io/opencensus/exporter/stats/signalfx/SignalFxSessionAdaptor.java
new file mode 100644
index 00000000..2eb75c4c
--- /dev/null
+++ b/exporters/stats/signalfx/src/main/java/io/opencensus/exporter/stats/signalfx/SignalFxSessionAdaptor.java
@@ -0,0 +1,188 @@
+/*
+ * Copyright 2017, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.exporter.stats.signalfx;
+
+import com.google.common.annotations.VisibleForTesting;
+import com.google.common.base.Preconditions;
+import com.google.common.base.Strings;
+import com.signalfx.metrics.protobuf.SignalFxProtocolBuffers.DataPoint;
+import com.signalfx.metrics.protobuf.SignalFxProtocolBuffers.Datum;
+import com.signalfx.metrics.protobuf.SignalFxProtocolBuffers.Dimension;
+import com.signalfx.metrics.protobuf.SignalFxProtocolBuffers.MetricType;
+import io.opencensus.common.Function;
+import io.opencensus.stats.Aggregation;
+import io.opencensus.stats.AggregationData;
+import io.opencensus.stats.AggregationData.CountData;
+import io.opencensus.stats.AggregationData.DistributionData;
+import io.opencensus.stats.AggregationData.LastValueDataDouble;
+import io.opencensus.stats.AggregationData.LastValueDataLong;
+import io.opencensus.stats.AggregationData.SumDataDouble;
+import io.opencensus.stats.AggregationData.SumDataLong;
+import io.opencensus.stats.View;
+import io.opencensus.stats.ViewData;
+import io.opencensus.tags.TagKey;
+import io.opencensus.tags.TagValue;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.List;
+import java.util.ListIterator;
+import java.util.Map;
+
+/*>>>
+import org.checkerframework.checker.nullness.qual.Nullable;
+*/
+
+/** Adapter for a {@code ViewData}'s contents into SignalFx datapoints. */
+@SuppressWarnings("deprecation")
+final class SignalFxSessionAdaptor {
+
+ private SignalFxSessionAdaptor() {}
+
+ /**
+ * Converts the given view data into datapoints that can be sent to SignalFx.
+ *
+ * <p>The view name is used as the metric name, and the aggregation type and aggregation window
+ * type determine the metric type.
+ *
+ * @param data The {@link ViewData} containing the aggregation data of each combination of tag
+ * values.
+ * @return A list of datapoints for the corresponding metric timeseries of this view's metric.
+ */
+ static List<DataPoint> adapt(ViewData data) {
+ View view = data.getView();
+ List<TagKey> keys = view.getColumns();
+
+ MetricType metricType = getMetricTypeForAggregation(view.getAggregation(), view.getWindow());
+ if (metricType == null) {
+ return Collections.emptyList();
+ }
+
+ List<DataPoint> datapoints = new ArrayList<>(data.getAggregationMap().size());
+ for (Map.Entry<List</*@Nullable*/ TagValue>, AggregationData> entry :
+ data.getAggregationMap().entrySet()) {
+ datapoints.add(
+ DataPoint.newBuilder()
+ .setMetric(view.getName().asString())
+ .setMetricType(metricType)
+ .addAllDimensions(createDimensions(keys, entry.getKey()))
+ .setValue(createDatum(entry.getValue()))
+ .build());
+ }
+ return datapoints;
+ }
+
+ @VisibleForTesting
+ @javax.annotation.Nullable
+ static MetricType getMetricTypeForAggregation(
+ Aggregation aggregation, View.AggregationWindow window) {
+ if (aggregation instanceof Aggregation.Mean || aggregation instanceof Aggregation.LastValue) {
+ return MetricType.GAUGE;
+ } else if (aggregation instanceof Aggregation.Count || aggregation instanceof Aggregation.Sum) {
+ if (window instanceof View.AggregationWindow.Cumulative) {
+ return MetricType.CUMULATIVE_COUNTER;
+ }
+ // TODO(mpetazzoni): support incremental counters when AggregationWindow.Interval is ready
+ }
+
+ // TODO(mpetazzoni): add support for histograms (Aggregation.Distribution).
+ return null;
+ }
+
+ @VisibleForTesting
+ static Iterable<Dimension> createDimensions(
+ List<TagKey> keys, List</*@Nullable*/ TagValue> values) {
+ Preconditions.checkArgument(
+ keys.size() == values.size(), "TagKeys and TagValues don't have the same size.");
+ List<Dimension> dimensions = new ArrayList<>(keys.size());
+ for (ListIterator<TagKey> it = keys.listIterator(); it.hasNext(); ) {
+ TagKey key = it.next();
+ TagValue value = values.get(it.previousIndex());
+ if (value == null || Strings.isNullOrEmpty(value.asString())) {
+ continue;
+ }
+ dimensions.add(createDimension(key, value));
+ }
+ return dimensions;
+ }
+
+ @VisibleForTesting
+ static Dimension createDimension(TagKey key, TagValue value) {
+ return Dimension.newBuilder().setKey(key.getName()).setValue(value.asString()).build();
+ }
+
+ @VisibleForTesting
+ static Datum createDatum(AggregationData data) {
+ final Datum.Builder builder = Datum.newBuilder();
+ data.match(
+ new Function<SumDataDouble, Void>() {
+ @Override
+ public Void apply(SumDataDouble arg) {
+ builder.setDoubleValue(arg.getSum());
+ return null;
+ }
+ },
+ new Function<SumDataLong, Void>() {
+ @Override
+ public Void apply(SumDataLong arg) {
+ builder.setIntValue(arg.getSum());
+ return null;
+ }
+ },
+ new Function<CountData, Void>() {
+ @Override
+ public Void apply(CountData arg) {
+ builder.setIntValue(arg.getCount());
+ return null;
+ }
+ },
+ new Function<DistributionData, Void>() {
+ @Override
+ public Void apply(DistributionData arg) {
+ // TODO(mpetazzoni): add histogram support.
+ throw new IllegalArgumentException("Distribution aggregations are not supported");
+ }
+ },
+ new Function<LastValueDataDouble, Void>() {
+ @Override
+ public Void apply(LastValueDataDouble arg) {
+ builder.setDoubleValue(arg.getLastValue());
+ return null;
+ }
+ },
+ new Function<LastValueDataLong, Void>() {
+ @Override
+ public Void apply(LastValueDataLong arg) {
+ builder.setIntValue(arg.getLastValue());
+ return null;
+ }
+ },
+ new Function<AggregationData, Void>() {
+ @Override
+ public Void apply(AggregationData arg) {
+ // TODO(songya): remove this once Mean aggregation is completely removed. Before that
+ // we need to continue supporting Mean, since it could still be used by users and some
+ // deprecated RPC views.
+ if (arg instanceof AggregationData.MeanData) {
+ builder.setDoubleValue(((AggregationData.MeanData) arg).getMean());
+ return null;
+ }
+ throw new IllegalArgumentException("Unknown Aggregation.");
+ }
+ });
+ return builder.build();
+ }
+}
diff --git a/exporters/stats/signalfx/src/main/java/io/opencensus/exporter/stats/signalfx/SignalFxStatsConfiguration.java b/exporters/stats/signalfx/src/main/java/io/opencensus/exporter/stats/signalfx/SignalFxStatsConfiguration.java
new file mode 100644
index 00000000..e8b4d756
--- /dev/null
+++ b/exporters/stats/signalfx/src/main/java/io/opencensus/exporter/stats/signalfx/SignalFxStatsConfiguration.java
@@ -0,0 +1,153 @@
+/*
+ * Copyright 2017, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.exporter.stats.signalfx;
+
+import com.google.auto.value.AutoValue;
+import com.google.common.base.Preconditions;
+import com.google.common.base.Strings;
+import io.opencensus.common.Duration;
+import java.net.URI;
+import java.net.URISyntaxException;
+import javax.annotation.concurrent.Immutable;
+
+/**
+ * Configurations for {@link SignalFxStatsExporter}.
+ *
+ * @since 0.11
+ */
+@AutoValue
+@Immutable
+public abstract class SignalFxStatsConfiguration {
+
+ /**
+ * The default SignalFx ingest API URL.
+ *
+ * @since 0.11
+ */
+ public static final URI DEFAULT_SIGNALFX_ENDPOINT;
+
+ static {
+ try {
+ DEFAULT_SIGNALFX_ENDPOINT = new URI("https://ingest.signalfx.com");
+ } catch (URISyntaxException e) {
+ // This shouldn't happen if DEFAULT_SIGNALFX_ENDPOINT was typed in correctly.
+ throw new IllegalStateException(e);
+ }
+ }
+
+ /**
+ * The default stats export interval.
+ *
+ * @since 0.11
+ */
+ public static final Duration DEFAULT_EXPORT_INTERVAL = Duration.create(1, 0);
+
+ private static final Duration ZERO = Duration.create(0, 0);
+
+ SignalFxStatsConfiguration() {}
+
+ /**
+ * Returns the SignalFx ingest API URL.
+ *
+ * @return the SignalFx ingest API URL.
+ * @since 0.11
+ */
+ public abstract URI getIngestEndpoint();
+
+ /**
+ * Returns the authentication token.
+ *
+ * @return the authentication token.
+ * @since 0.11
+ */
+ public abstract String getToken();
+
+ /**
+ * Returns the export interval between pushes to SignalFx.
+ *
+ * @return the export interval.
+ * @since 0.11
+ */
+ public abstract Duration getExportInterval();
+
+ /**
+ * Returns a new {@link Builder}.
+ *
+ * @return a {@code Builder}.
+ * @since 0.11
+ */
+ public static Builder builder() {
+ return new AutoValue_SignalFxStatsConfiguration.Builder()
+ .setIngestEndpoint(DEFAULT_SIGNALFX_ENDPOINT)
+ .setExportInterval(DEFAULT_EXPORT_INTERVAL);
+ }
+
+ /**
+ * Builder for {@link SignalFxStatsConfiguration}.
+ *
+ * @since 0.11
+ */
+ @AutoValue.Builder
+ public abstract static class Builder {
+
+ Builder() {}
+
+ /**
+ * Sets the given SignalFx ingest API URL.
+ *
+ * @param url the SignalFx ingest API URL.
+ * @return this.
+ * @since 0.11
+ */
+ public abstract Builder setIngestEndpoint(URI url);
+
+ /**
+ * Sets the given authentication token.
+ *
+ * @param token the authentication token.
+ * @return this.
+ * @since 0.11
+ */
+ public abstract Builder setToken(String token);
+
+ /**
+ * Sets the export interval.
+ *
+ * @param exportInterval the export interval between pushes to SignalFx.
+ * @return this.
+ * @since 0.11
+ */
+ public abstract Builder setExportInterval(Duration exportInterval);
+
+ abstract SignalFxStatsConfiguration autoBuild();
+
+ /**
+ * Builds a new {@link SignalFxStatsConfiguration} with current settings.
+ *
+ * @return a {@code SignalFxStatsConfiguration}.
+ * @since 0.11
+ */
+ public SignalFxStatsConfiguration build() {
+ SignalFxStatsConfiguration config = autoBuild();
+ Preconditions.checkArgument(
+ !Strings.isNullOrEmpty(config.getToken()), "Invalid SignalFx token");
+ Preconditions.checkArgument(
+ config.getExportInterval().compareTo(ZERO) > 0, "Interval duration must be positive");
+ return config;
+ }
+ }
+}
diff --git a/exporters/stats/signalfx/src/main/java/io/opencensus/exporter/stats/signalfx/SignalFxStatsExporter.java b/exporters/stats/signalfx/src/main/java/io/opencensus/exporter/stats/signalfx/SignalFxStatsExporter.java
new file mode 100644
index 00000000..f7915b71
--- /dev/null
+++ b/exporters/stats/signalfx/src/main/java/io/opencensus/exporter/stats/signalfx/SignalFxStatsExporter.java
@@ -0,0 +1,109 @@
+/*
+ * Copyright 2017, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.exporter.stats.signalfx;
+
+import com.google.common.annotations.VisibleForTesting;
+import com.google.common.base.Preconditions;
+import io.opencensus.stats.Stats;
+import io.opencensus.stats.ViewManager;
+import javax.annotation.Nullable;
+import javax.annotation.concurrent.GuardedBy;
+
+/**
+ * Exporter to SignalFx.
+ *
+ * <p>Example of usage:
+ *
+ * <pre><code>
+ * public static void main(String[] args) {
+ * SignalFxStatsExporter.create(SignalFxStatsConfiguration.builder().build());
+ * ... // Do work.
+ * }
+ * </code></pre>
+ *
+ * @since 0.11
+ */
+public final class SignalFxStatsExporter {
+
+ private static final Object monitor = new Object();
+
+ private final SignalFxStatsConfiguration configuration;
+ private final SignalFxStatsExporterWorkerThread workerThread;
+
+ @GuardedBy("monitor")
+ @Nullable
+ private static SignalFxStatsExporter exporter = null;
+
+ private SignalFxStatsExporter(SignalFxStatsConfiguration configuration, ViewManager viewManager) {
+ Preconditions.checkNotNull(configuration, "SignalFx stats exporter configuration");
+ this.configuration = configuration;
+ this.workerThread =
+ new SignalFxStatsExporterWorkerThread(
+ SignalFxMetricsSenderFactory.DEFAULT,
+ configuration.getIngestEndpoint(),
+ configuration.getToken(),
+ configuration.getExportInterval(),
+ viewManager);
+ }
+
+ /**
+ * Creates a SignalFx Stats exporter from the given {@link SignalFxStatsConfiguration}.
+ *
+ * <p>If {@code ingestEndpoint} is not set on the configuration, the exporter will use {@link
+ * SignalFxStatsConfiguration#DEFAULT_SIGNALFX_ENDPOINT}.
+ *
+ * <p>If {@code exportInterval} is not set on the configuration, the exporter will use {@link
+ * SignalFxStatsConfiguration#DEFAULT_EXPORT_INTERVAL}.
+ *
+ * @param configuration the {@code SignalFxStatsConfiguration}.
+ * @throws IllegalStateException if a SignalFx exporter is already created.
+ * @since 0.11
+ */
+ public static void create(SignalFxStatsConfiguration configuration) {
+ synchronized (monitor) {
+ Preconditions.checkState(exporter == null, "SignalFx stats exporter is already created.");
+ exporter = new SignalFxStatsExporter(configuration, Stats.getViewManager());
+ exporter.workerThread.start();
+ }
+ }
+
+ @VisibleForTesting
+ static void unsafeResetExporter() {
+ synchronized (monitor) {
+ if (exporter != null) {
+ SignalFxStatsExporterWorkerThread workerThread = exporter.workerThread;
+ if (workerThread != null && workerThread.isAlive()) {
+ try {
+ workerThread.interrupt();
+ workerThread.join();
+ } catch (InterruptedException e) {
+ Thread.currentThread().interrupt();
+ }
+ }
+ exporter = null;
+ }
+ }
+ }
+
+ @VisibleForTesting
+ @Nullable
+ static SignalFxStatsConfiguration unsafeGetConfig() {
+ synchronized (monitor) {
+ return exporter != null ? exporter.configuration : null;
+ }
+ }
+}
diff --git a/exporters/stats/signalfx/src/main/java/io/opencensus/exporter/stats/signalfx/SignalFxStatsExporterWorkerThread.java b/exporters/stats/signalfx/src/main/java/io/opencensus/exporter/stats/signalfx/SignalFxStatsExporterWorkerThread.java
new file mode 100644
index 00000000..348778e2
--- /dev/null
+++ b/exporters/stats/signalfx/src/main/java/io/opencensus/exporter/stats/signalfx/SignalFxStatsExporterWorkerThread.java
@@ -0,0 +1,105 @@
+/*
+ * Copyright 2017, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.exporter.stats.signalfx;
+
+import com.google.common.annotations.VisibleForTesting;
+import com.signalfx.metrics.errorhandler.MetricError;
+import com.signalfx.metrics.errorhandler.OnSendErrorHandler;
+import com.signalfx.metrics.flush.AggregateMetricSender;
+import com.signalfx.metrics.flush.AggregateMetricSender.Session;
+import com.signalfx.metrics.protobuf.SignalFxProtocolBuffers.DataPoint;
+import io.opencensus.common.Duration;
+import io.opencensus.stats.View;
+import io.opencensus.stats.ViewData;
+import io.opencensus.stats.ViewManager;
+import java.io.IOException;
+import java.net.URI;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+/**
+ * Worker {@code Thread} that polls ViewData from the Stats's ViewManager and exports to SignalFx.
+ *
+ * <p>{@code SignalFxStatsExporterWorkerThread} is a daemon {@code Thread}
+ */
+final class SignalFxStatsExporterWorkerThread extends Thread {
+
+ private static final Logger logger =
+ Logger.getLogger(SignalFxStatsExporterWorkerThread.class.getName());
+
+ private static final OnSendErrorHandler ERROR_HANDLER =
+ new OnSendErrorHandler() {
+ @Override
+ public void handleError(MetricError error) {
+ logger.log(Level.WARNING, "Unable to send metrics to SignalFx: {0}", error.getMessage());
+ }
+ };
+
+ private final long intervalMs;
+ private final ViewManager views;
+ private final AggregateMetricSender sender;
+
+ SignalFxStatsExporterWorkerThread(
+ SignalFxMetricsSenderFactory factory,
+ URI endpoint,
+ String token,
+ Duration interval,
+ ViewManager views) {
+ this.intervalMs = interval.toMillis();
+ this.views = views;
+ this.sender = factory.create(endpoint, token, ERROR_HANDLER);
+
+ setDaemon(true);
+ setName(getClass().getSimpleName());
+ logger.log(Level.FINE, "Initialized SignalFx exporter to {0}.", endpoint);
+ }
+
+ @VisibleForTesting
+ void export() throws IOException {
+ Session session = sender.createSession();
+ try {
+ for (View view : views.getAllExportedViews()) {
+ ViewData data = views.getView(view.getName());
+ if (data == null) {
+ continue;
+ }
+
+ for (DataPoint datapoint : SignalFxSessionAdaptor.adapt(data)) {
+ session.setDatapoint(datapoint);
+ }
+ }
+ } finally {
+ session.close();
+ }
+ }
+
+ @Override
+ public void run() {
+ while (true) {
+ try {
+ export();
+ Thread.sleep(intervalMs);
+ } catch (InterruptedException ie) {
+ Thread.currentThread().interrupt();
+ break;
+ } catch (Throwable e) {
+ logger.log(Level.WARNING, "Exception thrown by the SignalFx stats exporter", e);
+ }
+ }
+ logger.log(Level.INFO, "SignalFx stats exporter stopped.");
+ }
+}
diff --git a/exporters/stats/signalfx/src/test/java/io/opencensus/exporter/stats/signalfx/SignalFxSessionAdaptorTest.java b/exporters/stats/signalfx/src/test/java/io/opencensus/exporter/stats/signalfx/SignalFxSessionAdaptorTest.java
new file mode 100644
index 00000000..34f4dfa7
--- /dev/null
+++ b/exporters/stats/signalfx/src/test/java/io/opencensus/exporter/stats/signalfx/SignalFxSessionAdaptorTest.java
@@ -0,0 +1,320 @@
+/*
+ * Copyright 2017, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.exporter.stats.signalfx;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertNull;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.ImmutableMap;
+import com.google.common.collect.Lists;
+import com.signalfx.metrics.protobuf.SignalFxProtocolBuffers.DataPoint;
+import com.signalfx.metrics.protobuf.SignalFxProtocolBuffers.Datum;
+import com.signalfx.metrics.protobuf.SignalFxProtocolBuffers.Dimension;
+import com.signalfx.metrics.protobuf.SignalFxProtocolBuffers.MetricType;
+import io.opencensus.common.Duration;
+import io.opencensus.stats.Aggregation;
+import io.opencensus.stats.AggregationData;
+import io.opencensus.stats.AggregationData.CountData;
+import io.opencensus.stats.AggregationData.DistributionData;
+import io.opencensus.stats.AggregationData.LastValueDataDouble;
+import io.opencensus.stats.AggregationData.LastValueDataLong;
+import io.opencensus.stats.AggregationData.MeanData;
+import io.opencensus.stats.AggregationData.SumDataDouble;
+import io.opencensus.stats.AggregationData.SumDataLong;
+import io.opencensus.stats.BucketBoundaries;
+import io.opencensus.stats.View;
+import io.opencensus.stats.View.AggregationWindow;
+import io.opencensus.stats.View.Name;
+import io.opencensus.stats.ViewData;
+import io.opencensus.tags.TagKey;
+import io.opencensus.tags.TagValue;
+import java.util.List;
+import java.util.Map;
+import org.junit.Before;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.ExpectedException;
+import org.junit.runner.RunWith;
+import org.mockito.Mock;
+import org.mockito.Mockito;
+import org.mockito.runners.MockitoJUnitRunner;
+
+@RunWith(MockitoJUnitRunner.class)
+public class SignalFxSessionAdaptorTest {
+
+ private static final Duration ONE_SECOND = Duration.create(1, 0);
+
+ @Rule public final ExpectedException thrown = ExpectedException.none();
+
+ @Mock private View view;
+
+ @Mock private ViewData viewData;
+
+ @Before
+ public void setUp() {
+ Mockito.when(view.getName()).thenReturn(Name.create("view-name"));
+ Mockito.when(view.getColumns()).thenReturn(ImmutableList.of(TagKey.create("animal")));
+ Mockito.when(viewData.getView()).thenReturn(view);
+ }
+
+ @Test
+ public void checkMetricTypeFromAggregation() {
+ assertNull(SignalFxSessionAdaptor.getMetricTypeForAggregation(null, null));
+ assertNull(
+ SignalFxSessionAdaptor.getMetricTypeForAggregation(
+ null, AggregationWindow.Cumulative.create()));
+ assertEquals(
+ MetricType.GAUGE,
+ SignalFxSessionAdaptor.getMetricTypeForAggregation(
+ Aggregation.Mean.create(), AggregationWindow.Cumulative.create()));
+ assertEquals(
+ MetricType.GAUGE,
+ SignalFxSessionAdaptor.getMetricTypeForAggregation(
+ Aggregation.Mean.create(), AggregationWindow.Interval.create(ONE_SECOND)));
+ assertEquals(
+ MetricType.CUMULATIVE_COUNTER,
+ SignalFxSessionAdaptor.getMetricTypeForAggregation(
+ Aggregation.Count.create(), AggregationWindow.Cumulative.create()));
+ assertEquals(
+ MetricType.CUMULATIVE_COUNTER,
+ SignalFxSessionAdaptor.getMetricTypeForAggregation(
+ Aggregation.Sum.create(), AggregationWindow.Cumulative.create()));
+ assertNull(
+ SignalFxSessionAdaptor.getMetricTypeForAggregation(Aggregation.Count.create(), null));
+ assertNull(SignalFxSessionAdaptor.getMetricTypeForAggregation(Aggregation.Sum.create(), null));
+ assertNull(
+ SignalFxSessionAdaptor.getMetricTypeForAggregation(
+ Aggregation.Count.create(), AggregationWindow.Interval.create(ONE_SECOND)));
+ assertNull(
+ SignalFxSessionAdaptor.getMetricTypeForAggregation(
+ Aggregation.Sum.create(), AggregationWindow.Interval.create(ONE_SECOND)));
+ assertNull(
+ SignalFxSessionAdaptor.getMetricTypeForAggregation(
+ Aggregation.Distribution.create(BucketBoundaries.create(ImmutableList.of(3.15d))),
+ AggregationWindow.Cumulative.create()));
+ assertEquals(
+ MetricType.GAUGE,
+ SignalFxSessionAdaptor.getMetricTypeForAggregation(
+ Aggregation.LastValue.create(), AggregationWindow.Cumulative.create()));
+ assertEquals(
+ MetricType.GAUGE,
+ SignalFxSessionAdaptor.getMetricTypeForAggregation(
+ Aggregation.LastValue.create(), AggregationWindow.Interval.create(ONE_SECOND)));
+ }
+
+ @Test
+ public void createDimensionsWithNonMatchingListSizes() {
+ thrown.expect(IllegalArgumentException.class);
+ thrown.expectMessage("don't have the same size");
+ SignalFxSessionAdaptor.createDimensions(
+ ImmutableList.of(TagKey.create("animal"), TagKey.create("color")),
+ ImmutableList.of(TagValue.create("dog")));
+ }
+
+ @Test
+ public void createDimensionsIgnoresEmptyValues() {
+ List<Dimension> dimensions =
+ Lists.newArrayList(
+ SignalFxSessionAdaptor.createDimensions(
+ ImmutableList.of(TagKey.create("animal"), TagKey.create("color")),
+ ImmutableList.of(TagValue.create("dog"), TagValue.create(""))));
+ assertEquals(1, dimensions.size());
+ assertEquals("animal", dimensions.get(0).getKey());
+ assertEquals("dog", dimensions.get(0).getValue());
+ }
+
+ @Test
+ public void createDimension() {
+ Dimension dimension =
+ SignalFxSessionAdaptor.createDimension(TagKey.create("animal"), TagValue.create("dog"));
+ assertEquals("animal", dimension.getKey());
+ assertEquals("dog", dimension.getValue());
+ }
+
+ @Test
+ public void unsupportedAggregationYieldsNoDatapoints() {
+ Mockito.when(view.getAggregation())
+ .thenReturn(
+ Aggregation.Distribution.create(BucketBoundaries.create(ImmutableList.of(3.15d))));
+ Mockito.when(view.getWindow()).thenReturn(AggregationWindow.Cumulative.create());
+ List<DataPoint> datapoints = SignalFxSessionAdaptor.adapt(viewData);
+ assertEquals(0, datapoints.size());
+ }
+
+ @Test
+ public void noAggregationDataYieldsNoDatapoints() {
+ Mockito.when(view.getAggregation()).thenReturn(Aggregation.Count.create());
+ Mockito.when(view.getWindow()).thenReturn(AggregationWindow.Cumulative.create());
+ List<DataPoint> datapoints = SignalFxSessionAdaptor.adapt(viewData);
+ assertEquals(0, datapoints.size());
+ }
+
+ @Test
+ public void createDatumFromDoubleSum() {
+ SumDataDouble data = SumDataDouble.create(3.15d);
+ Datum datum = SignalFxSessionAdaptor.createDatum(data);
+ assertTrue(datum.hasDoubleValue());
+ assertFalse(datum.hasIntValue());
+ assertFalse(datum.hasStrValue());
+ assertEquals(3.15d, datum.getDoubleValue(), 0d);
+ }
+
+ @Test
+ public void createDatumFromLongSum() {
+ SumDataLong data = SumDataLong.create(42L);
+ Datum datum = SignalFxSessionAdaptor.createDatum(data);
+ assertFalse(datum.hasDoubleValue());
+ assertTrue(datum.hasIntValue());
+ assertFalse(datum.hasStrValue());
+ assertEquals(42L, datum.getIntValue());
+ }
+
+ @Test
+ public void createDatumFromCount() {
+ CountData data = CountData.create(42L);
+ Datum datum = SignalFxSessionAdaptor.createDatum(data);
+ assertFalse(datum.hasDoubleValue());
+ assertTrue(datum.hasIntValue());
+ assertFalse(datum.hasStrValue());
+ assertEquals(42L, datum.getIntValue());
+ }
+
+ @Test
+ public void createDatumFromMean() {
+ MeanData data = MeanData.create(3.15d, 2L);
+ Datum datum = SignalFxSessionAdaptor.createDatum(data);
+ assertTrue(datum.hasDoubleValue());
+ assertFalse(datum.hasIntValue());
+ assertFalse(datum.hasStrValue());
+ assertEquals(3.15d, datum.getDoubleValue(), 0d);
+ }
+
+ @Test
+ public void createDatumFromDistributionThrows() {
+ thrown.expect(IllegalArgumentException.class);
+ thrown.expectMessage("Distribution aggregations are not supported");
+ SignalFxSessionAdaptor.createDatum(
+ DistributionData.create(5, 2, 0, 10, 40, ImmutableList.of(1L)));
+ }
+
+ @Test
+ public void createDatumFromLastValueDouble() {
+ LastValueDataDouble data = LastValueDataDouble.create(12.2);
+ Datum datum = SignalFxSessionAdaptor.createDatum(data);
+ assertTrue(datum.hasDoubleValue());
+ assertFalse(datum.hasIntValue());
+ assertFalse(datum.hasStrValue());
+ assertEquals(12.2, datum.getDoubleValue(), 0d);
+ }
+
+ @Test
+ public void createDatumFromLastValueLong() {
+ LastValueDataLong data = LastValueDataLong.create(100000);
+ Datum datum = SignalFxSessionAdaptor.createDatum(data);
+ assertFalse(datum.hasDoubleValue());
+ assertTrue(datum.hasIntValue());
+ assertFalse(datum.hasStrValue());
+ assertEquals(100000, datum.getIntValue());
+ }
+
+ @Test
+ public void adaptViewIntoDatapoints() {
+ Map<List<TagValue>, AggregationData> map =
+ ImmutableMap.<List<TagValue>, AggregationData>of(
+ ImmutableList.of(TagValue.create("dog")),
+ SumDataLong.create(2L),
+ ImmutableList.of(TagValue.create("cat")),
+ SumDataLong.create(3L));
+ Mockito.when(viewData.getAggregationMap()).thenReturn(map);
+ Mockito.when(view.getAggregation()).thenReturn(Aggregation.Count.create());
+ Mockito.when(view.getWindow()).thenReturn(AggregationWindow.Cumulative.create());
+
+ List<DataPoint> datapoints = SignalFxSessionAdaptor.adapt(viewData);
+ assertEquals(2, datapoints.size());
+ for (DataPoint dp : datapoints) {
+ assertEquals("view-name", dp.getMetric());
+ assertEquals(MetricType.CUMULATIVE_COUNTER, dp.getMetricType());
+ assertEquals(1, dp.getDimensionsCount());
+ assertTrue(dp.hasValue());
+ assertFalse(dp.hasSource());
+
+ Datum datum = dp.getValue();
+ assertTrue(datum.hasIntValue());
+ assertFalse(datum.hasDoubleValue());
+ assertFalse(datum.hasStrValue());
+
+ Dimension dimension = dp.getDimensions(0);
+ assertEquals("animal", dimension.getKey());
+ switch (dimension.getValue()) {
+ case "dog":
+ assertEquals(2L, datum.getIntValue());
+ break;
+ case "cat":
+ assertEquals(3L, datum.getIntValue());
+ break;
+ default:
+ fail("unexpected dimension value");
+ }
+ }
+ }
+
+ @Test
+ public void adaptViewWithEmptyTagValueIntoDatapoints() {
+ Map<List<TagValue>, AggregationData> map =
+ ImmutableMap.<List<TagValue>, AggregationData>of(
+ ImmutableList.of(TagValue.create("dog")),
+ SumDataLong.create(2L),
+ ImmutableList.of(TagValue.create("")),
+ SumDataLong.create(3L));
+ Mockito.when(viewData.getAggregationMap()).thenReturn(map);
+ Mockito.when(view.getAggregation()).thenReturn(Aggregation.Count.create());
+ Mockito.when(view.getWindow()).thenReturn(AggregationWindow.Cumulative.create());
+
+ List<DataPoint> datapoints = SignalFxSessionAdaptor.adapt(viewData);
+ assertEquals(2, datapoints.size());
+ for (DataPoint dp : datapoints) {
+ assertEquals("view-name", dp.getMetric());
+ assertEquals(MetricType.CUMULATIVE_COUNTER, dp.getMetricType());
+ assertTrue(dp.hasValue());
+ assertFalse(dp.hasSource());
+
+ Datum datum = dp.getValue();
+ assertTrue(datum.hasIntValue());
+ assertFalse(datum.hasDoubleValue());
+ assertFalse(datum.hasStrValue());
+
+ switch (dp.getDimensionsCount()) {
+ case 0:
+ assertEquals(3L, datum.getIntValue());
+ break;
+ case 1:
+ Dimension dimension = dp.getDimensions(0);
+ assertEquals("animal", dimension.getKey());
+ assertEquals("dog", dimension.getValue());
+ assertEquals(2L, datum.getIntValue());
+ break;
+ default:
+ fail("Unexpected number of dimensions on the created datapoint");
+ break;
+ }
+ }
+ }
+}
diff --git a/exporters/stats/signalfx/src/test/java/io/opencensus/exporter/stats/signalfx/SignalFxStatsConfigurationTest.java b/exporters/stats/signalfx/src/test/java/io/opencensus/exporter/stats/signalfx/SignalFxStatsConfigurationTest.java
new file mode 100644
index 00000000..1d3508fb
--- /dev/null
+++ b/exporters/stats/signalfx/src/test/java/io/opencensus/exporter/stats/signalfx/SignalFxStatsConfigurationTest.java
@@ -0,0 +1,90 @@
+/*
+ * Copyright 2017, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.exporter.stats.signalfx;
+
+import static org.junit.Assert.assertEquals;
+
+import io.opencensus.common.Duration;
+import java.net.URI;
+import java.net.URISyntaxException;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.ExpectedException;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
+
+/** Unit tests for {@link SignalFxStatsConfiguration}. */
+@RunWith(JUnit4.class)
+public class SignalFxStatsConfigurationTest {
+
+ private static final String TEST_TOKEN = "token";
+
+ @Rule public final ExpectedException thrown = ExpectedException.none();
+
+ @Test
+ public void buildWithDefaults() {
+ SignalFxStatsConfiguration configuration =
+ SignalFxStatsConfiguration.builder().setToken(TEST_TOKEN).build();
+ assertEquals(TEST_TOKEN, configuration.getToken());
+ assertEquals(
+ SignalFxStatsConfiguration.DEFAULT_SIGNALFX_ENDPOINT, configuration.getIngestEndpoint());
+ assertEquals(
+ SignalFxStatsConfiguration.DEFAULT_EXPORT_INTERVAL, configuration.getExportInterval());
+ }
+
+ @Test
+ public void buildWithFields() throws URISyntaxException {
+ URI url = new URI("http://example.com");
+ Duration duration = Duration.create(5, 0);
+ SignalFxStatsConfiguration configuration =
+ SignalFxStatsConfiguration.builder()
+ .setToken(TEST_TOKEN)
+ .setIngestEndpoint(url)
+ .setExportInterval(duration)
+ .build();
+ assertEquals(TEST_TOKEN, configuration.getToken());
+ assertEquals(url, configuration.getIngestEndpoint());
+ assertEquals(duration, configuration.getExportInterval());
+ }
+
+ @Test
+ public void sameConfigurationsAreEqual() {
+ SignalFxStatsConfiguration config1 =
+ SignalFxStatsConfiguration.builder().setToken(TEST_TOKEN).build();
+ SignalFxStatsConfiguration config2 =
+ SignalFxStatsConfiguration.builder().setToken(TEST_TOKEN).build();
+ assertEquals(config1, config2);
+ assertEquals(config1.hashCode(), config2.hashCode());
+ }
+
+ @Test
+ public void buildWithEmptyToken() {
+ thrown.expect(IllegalArgumentException.class);
+ thrown.expectMessage("Invalid SignalFx token");
+ SignalFxStatsConfiguration.builder().setToken("").build();
+ }
+
+ @Test
+ public void buildWithNegativeDuration() {
+ thrown.expect(IllegalArgumentException.class);
+ thrown.expectMessage("Interval duration must be positive");
+ SignalFxStatsConfiguration.builder()
+ .setToken(TEST_TOKEN)
+ .setExportInterval(Duration.create(-1, 0))
+ .build();
+ }
+}
diff --git a/exporters/stats/signalfx/src/test/java/io/opencensus/exporter/stats/signalfx/SignalFxStatsExporterTest.java b/exporters/stats/signalfx/src/test/java/io/opencensus/exporter/stats/signalfx/SignalFxStatsExporterTest.java
new file mode 100644
index 00000000..cc5730b1
--- /dev/null
+++ b/exporters/stats/signalfx/src/test/java/io/opencensus/exporter/stats/signalfx/SignalFxStatsExporterTest.java
@@ -0,0 +1,93 @@
+/*
+ * Copyright 2017, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.exporter.stats.signalfx;
+
+import static org.junit.Assert.assertEquals;
+
+import io.opencensus.common.Duration;
+import java.net.URI;
+import java.net.URISyntaxException;
+import org.junit.After;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.ExpectedException;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
+
+/** Unit tests for {@link SignalFxStatsExporter}. */
+@RunWith(JUnit4.class)
+public class SignalFxStatsExporterTest {
+
+ private static final String TEST_TOKEN = "token";
+ private static final String TEST_ENDPOINT = "https://example.com";
+ private static final Duration ONE_SECOND = Duration.create(1, 0);
+
+ @Rule public final ExpectedException thrown = ExpectedException.none();
+
+ @After
+ public void tearDown() {
+ SignalFxStatsExporter.unsafeResetExporter();
+ }
+
+ @Test
+ public void createWithNullConfiguration() {
+ thrown.expect(NullPointerException.class);
+ thrown.expectMessage("configuration");
+ SignalFxStatsExporter.create(null);
+ }
+
+ @Test
+ public void createWithNullHostUsesDefault() {
+ SignalFxStatsExporter.create(SignalFxStatsConfiguration.builder().setToken(TEST_TOKEN).build());
+ assertEquals(
+ SignalFxStatsConfiguration.DEFAULT_SIGNALFX_ENDPOINT,
+ SignalFxStatsExporter.unsafeGetConfig().getIngestEndpoint());
+ }
+
+ @Test
+ public void createWithNullIntervalUsesDefault() {
+ SignalFxStatsExporter.create(SignalFxStatsConfiguration.builder().setToken(TEST_TOKEN).build());
+ assertEquals(
+ SignalFxStatsConfiguration.DEFAULT_EXPORT_INTERVAL,
+ SignalFxStatsExporter.unsafeGetConfig().getExportInterval());
+ }
+
+ @Test
+ public void createExporterTwice() {
+ SignalFxStatsConfiguration config =
+ SignalFxStatsConfiguration.builder()
+ .setToken(TEST_TOKEN)
+ .setExportInterval(ONE_SECOND)
+ .build();
+ SignalFxStatsExporter.create(config);
+ thrown.expect(IllegalStateException.class);
+ thrown.expectMessage("SignalFx stats exporter is already created.");
+ SignalFxStatsExporter.create(config);
+ }
+
+ @Test
+ public void createWithConfiguration() throws URISyntaxException {
+ SignalFxStatsConfiguration config =
+ SignalFxStatsConfiguration.builder()
+ .setToken(TEST_TOKEN)
+ .setIngestEndpoint(new URI(TEST_ENDPOINT))
+ .setExportInterval(ONE_SECOND)
+ .build();
+ SignalFxStatsExporter.create(config);
+ assertEquals(config, SignalFxStatsExporter.unsafeGetConfig());
+ }
+}
diff --git a/exporters/stats/signalfx/src/test/java/io/opencensus/exporter/stats/signalfx/SignalFxStatsExporterWorkerThreadTest.java b/exporters/stats/signalfx/src/test/java/io/opencensus/exporter/stats/signalfx/SignalFxStatsExporterWorkerThreadTest.java
new file mode 100644
index 00000000..d8852d5f
--- /dev/null
+++ b/exporters/stats/signalfx/src/test/java/io/opencensus/exporter/stats/signalfx/SignalFxStatsExporterWorkerThreadTest.java
@@ -0,0 +1,149 @@
+/*
+ * Copyright 2017, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.exporter.stats.signalfx;
+
+import static org.hamcrest.CoreMatchers.startsWith;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertThat;
+import static org.junit.Assert.assertTrue;
+
+import com.google.common.collect.ImmutableList;
+import com.google.common.collect.ImmutableMap;
+import com.google.common.collect.ImmutableSet;
+import com.signalfx.metrics.errorhandler.OnSendErrorHandler;
+import com.signalfx.metrics.flush.AggregateMetricSender;
+import com.signalfx.metrics.protobuf.SignalFxProtocolBuffers.DataPoint;
+import com.signalfx.metrics.protobuf.SignalFxProtocolBuffers.Datum;
+import com.signalfx.metrics.protobuf.SignalFxProtocolBuffers.Dimension;
+import com.signalfx.metrics.protobuf.SignalFxProtocolBuffers.MetricType;
+import io.opencensus.common.Duration;
+import io.opencensus.stats.Aggregation;
+import io.opencensus.stats.AggregationData;
+import io.opencensus.stats.AggregationData.MeanData;
+import io.opencensus.stats.View;
+import io.opencensus.stats.View.AggregationWindow;
+import io.opencensus.stats.View.Name;
+import io.opencensus.stats.ViewData;
+import io.opencensus.stats.ViewManager;
+import io.opencensus.tags.TagKey;
+import io.opencensus.tags.TagValue;
+import java.io.IOException;
+import java.net.URI;
+import java.util.List;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.mockito.Mock;
+import org.mockito.Mockito;
+import org.mockito.invocation.InvocationOnMock;
+import org.mockito.runners.MockitoJUnitRunner;
+import org.mockito.stubbing.Answer;
+
+@RunWith(MockitoJUnitRunner.class)
+public class SignalFxStatsExporterWorkerThreadTest {
+
+ private static final String TEST_TOKEN = "token";
+ private static final Duration ONE_SECOND = Duration.create(1, 0);
+
+ @Mock private AggregateMetricSender.Session session;
+
+ @Mock private ViewManager viewManager;
+
+ @Mock private SignalFxMetricsSenderFactory factory;
+
+ private URI endpoint;
+
+ @Before
+ public void setUp() throws Exception {
+ endpoint = new URI("http://example.com");
+
+ Mockito.when(
+ factory.create(
+ Mockito.any(URI.class), Mockito.anyString(), Mockito.any(OnSendErrorHandler.class)))
+ .thenAnswer(
+ new Answer<AggregateMetricSender>() {
+ @Override
+ public AggregateMetricSender answer(InvocationOnMock invocation) {
+ Object[] args = invocation.getArguments();
+ AggregateMetricSender sender =
+ SignalFxMetricsSenderFactory.DEFAULT.create(
+ (URI) args[0], (String) args[1], (OnSendErrorHandler) args[2]);
+ AggregateMetricSender spy = Mockito.spy(sender);
+ Mockito.doReturn(session).when(spy).createSession();
+ return spy;
+ }
+ });
+ }
+
+ @Test
+ public void createThread() {
+ SignalFxStatsExporterWorkerThread thread =
+ new SignalFxStatsExporterWorkerThread(
+ factory, endpoint, TEST_TOKEN, ONE_SECOND, viewManager);
+ assertTrue(thread.isDaemon());
+ assertThat(thread.getName(), startsWith("SignalFx"));
+ }
+
+ @Test
+ public void senderThreadInterruptStopsLoop() throws InterruptedException {
+ Mockito.when(session.setDatapoint(Mockito.any(DataPoint.class))).thenReturn(session);
+ Mockito.when(viewManager.getAllExportedViews()).thenReturn(ImmutableSet.<View>of());
+
+ SignalFxStatsExporterWorkerThread thread =
+ new SignalFxStatsExporterWorkerThread(
+ factory, endpoint, TEST_TOKEN, ONE_SECOND, viewManager);
+ thread.start();
+ thread.interrupt();
+ thread.join(5000, 0);
+ assertFalse("Worker thread should have stopped", thread.isAlive());
+ }
+
+ @Test
+ public void setsDatapointsFromViewOnSession() throws IOException {
+ View view = Mockito.mock(View.class);
+ Name viewName = Name.create("test");
+ Mockito.when(view.getName()).thenReturn(viewName);
+ Mockito.when(view.getAggregation()).thenReturn(Aggregation.Mean.create());
+ Mockito.when(view.getWindow()).thenReturn(AggregationWindow.Cumulative.create());
+ Mockito.when(view.getColumns()).thenReturn(ImmutableList.of(TagKey.create("animal")));
+
+ ViewData viewData = Mockito.mock(ViewData.class);
+ Mockito.when(viewData.getView()).thenReturn(view);
+ Mockito.when(viewData.getAggregationMap())
+ .thenReturn(
+ ImmutableMap.<List<TagValue>, AggregationData>of(
+ ImmutableList.of(TagValue.create("cat")), MeanData.create(3.15d, 1)));
+
+ Mockito.when(viewManager.getAllExportedViews()).thenReturn(ImmutableSet.of(view));
+ Mockito.when(viewManager.getView(Mockito.eq(viewName))).thenReturn(viewData);
+
+ SignalFxStatsExporterWorkerThread thread =
+ new SignalFxStatsExporterWorkerThread(
+ factory, endpoint, TEST_TOKEN, ONE_SECOND, viewManager);
+ thread.export();
+
+ DataPoint datapoint =
+ DataPoint.newBuilder()
+ .setMetric("test")
+ .setMetricType(MetricType.GAUGE)
+ .addDimensions(Dimension.newBuilder().setKey("animal").setValue("cat").build())
+ .setValue(Datum.newBuilder().setDoubleValue(3.15d).build())
+ .build();
+ Mockito.verify(session).setDatapoint(Mockito.eq(datapoint));
+ Mockito.verify(session).close();
+ }
+}
diff --git a/exporters/stats/stackdriver/README.md b/exporters/stats/stackdriver/README.md
new file mode 100644
index 00000000..1b35c635
--- /dev/null
+++ b/exporters/stats/stackdriver/README.md
@@ -0,0 +1,171 @@
+# OpenCensus Stackdriver Stats Exporter
+
+The *OpenCensus Stackdriver Stats Exporter* is a stats exporter that exports data to
+Stackdriver Monitoring. [Stackdriver Monitoring][stackdriver-monitoring] provides visibility into
+the performance, uptime, and overall health of cloud-powered applications. Stackdriver ingests that
+data and generates insights via dashboards, charts, and alerts.
+
+## Quickstart
+
+### Prerequisites
+
+To use this exporter, you must have an application that you'd like to monitor. The app can be on
+Google Cloud Platform, on-premise, or another cloud platform.
+
+In order to be able to push your stats to [Stackdriver Monitoring][stackdriver-monitoring], you must:
+
+1. [Create a Cloud project](https://support.google.com/cloud/answer/6251787?hl=en).
+2. [Enable billing](https://support.google.com/cloud/answer/6288653#new-billing).
+3. [Enable the Stackdriver Monitoring API](https://console.cloud.google.com/apis/dashboard).
+
+These steps enable the API but don't require that your app is hosted on Google Cloud Platform.
+
+### Hello "Stackdriver Stats"
+
+#### Add the dependencies to your project
+
+For Maven add to your `pom.xml`:
+```xml
+<dependencies>
+ <dependency>
+ <groupId>io.opencensus</groupId>
+ <artifactId>opencensus-api</artifactId>
+ <version>0.16.1</version>
+ </dependency>
+ <dependency>
+ <groupId>io.opencensus</groupId>
+ <artifactId>opencensus-exporter-stats-stackdriver</artifactId>
+ <version>0.16.1</version>
+ </dependency>
+ <dependency>
+ <groupId>io.opencensus</groupId>
+ <artifactId>opencensus-impl</artifactId>
+ <version>0.16.1</version>
+ <scope>runtime</scope>
+ </dependency>
+</dependencies>
+```
+
+For Gradle add to your dependencies:
+```groovy
+compile 'io.opencensus:opencensus-api:0.16.1'
+compile 'io.opencensus:opencensus-exporter-stats-stackdriver:0.16.1'
+runtime 'io.opencensus:opencensus-impl:0.16.1'
+```
+
+#### Register the exporter
+
+This uses the default configuration for authentication and a given project ID.
+
+```java
+public class MyMainClass {
+ public static void main(String[] args) {
+ StackdriverStatsExporter.createAndRegister(
+ StackdriverStatsConfiguration.builder().build());
+ }
+}
+```
+
+#### Set Monitored Resource for exporter
+
+By default, Stackdriver Stats Exporter will try to automatically detect the environment if your
+application is running on GCE, GKE or AWS EC2, and generate a corresponding Stackdriver GCE/GKE/EC2
+monitored resource. For GKE particularly, you may want to set up some environment variables so that
+Exporter can correctly identify your pod, cluster and container. Follow the Kubernetes instruction
+[here](https://cloud.google.com/kubernetes-engine/docs/tutorials/custom-metrics-autoscaling#exporting_metrics_from_the_application)
+and [here](https://kubernetes.io/docs/tasks/inject-data-application/environment-variable-expose-pod-information/).
+
+Otherwise, Exporter will use [a global Stackdriver monitored resource with a project_id label](https://cloud.google.com/monitoring/api/resources#tag_global),
+and it works fine when you have only one exporter running.
+
+If you want to have multiple processes exporting stats for the same metric concurrently, and your
+application is running on some different environment than GCE, GKE or AWS EC2 (for example DataFlow),
+please associate a unique monitored resource with each exporter if possible.
+Please note that there is also an "opencensus_task" metric label that uniquely identifies the
+uploaded stats.
+
+To set a custom MonitoredResource:
+
+```java
+public class MyMainClass {
+ public static void main(String[] args) {
+ // A sample DataFlow monitored resource.
+ MonitoredResource myResource = MonitoredResource.newBuilder()
+ .setType("dataflow_job")
+ .putLabels("project_id", "my_project")
+ .putLabels("job_name", "my_job")
+ .putLabels("region", "us-east1")
+ .build();
+
+ // Set a custom MonitoredResource. Please make sure each Stackdriver Stats Exporter has a
+ // unique MonitoredResource.
+ StackdriverStatsExporter.createAndRegister(
+ StackdriverStatsConfiguration.builder().setMonitoredResource(myResource).build());
+ }
+}
+```
+
+For a complete list of valid Stackdriver monitored resources, please refer to [Stackdriver
+Documentation](https://cloud.google.com/monitoring/custom-metrics/creating-metrics#which-resource).
+Please also note that although there are a lot of monitored resources available on [Stackdriver](https://cloud.google.com/monitoring/api/resources),
+only [a small subset of them](https://cloud.google.com/monitoring/custom-metrics/creating-metrics#which-resource)
+are compatible with the Opencensus Stackdriver Stats Exporter.
+
+#### Authentication
+
+This exporter uses [google-cloud-java](https://github.com/GoogleCloudPlatform/google-cloud-java),
+for details about how to configure the authentication see [here](https://github.com/GoogleCloudPlatform/google-cloud-java#authentication).
+
+If you prefer to manually set the credentials use:
+```
+StackdriverStatsExporter.createAndRegister(
+ StackdriverStatsConfiguration.builder()
+ .setCredentials(new GoogleCredentials(new AccessToken(accessToken, expirationTime)))
+ .setProjectId("MyStackdriverProjectId")
+ .setExportInterval(Duration.create(10, 0))
+ .build());
+```
+
+#### Specifying a Project ID
+
+This exporter uses [google-cloud-java](https://github.com/GoogleCloudPlatform/google-cloud-java),
+for details about how to configure the project ID see [here](https://github.com/GoogleCloudPlatform/google-cloud-java#specifying-a-project-id).
+
+If you prefer to manually set the project ID use:
+```
+StackdriverStatsExporter.createAndRegister(
+ StackdriverStatsConfiguration.builder().setProjectId("MyStackdriverProjectId").build());
+```
+
+#### Java Versions
+
+Java 7 or above is required for using this exporter.
+
+## FAQ
+### Why did I get a PERMISSION_DENIED error from Stackdriver when using this exporter?
+To use our Stackdriver Stats exporter, you need to set up billing for your cloud project, since
+creating and uploading custom metrics to Stackdriver Monitoring is
+[not free](https://cloud.google.com/stackdriver/pricing_v2#monitoring-costs).
+
+To enable billing, follow the instructions [here](https://support.google.com/cloud/answer/6288653#new-billing).
+
+### What is "opencensus_task" metric label ?
+Stackdriver requires that each Timeseries to be updated only by one task at a time. A
+`Timeseries` is uniquely identified by the `MonitoredResource` and the `Metric`'s labels.
+Stackdriver exporter adds a new `Metric` label for each custom `Metric` to ensure the uniqueness
+of the `Timeseries`. The format of the label is: `{LANGUAGE}-{PID}@{HOSTNAME}`, if `{PID}` is not
+available a random number will be used.
+
+### Why did I get an error "java.lang.NoSuchMethodError: com.google.common...", like "java.lang.NoSuchMethodError:com.google.common.base.Throwables.throwIfInstanceOf"?
+This is probably because there is a version conflict on Guava in the dependency tree.
+
+For example, `com.google.common.base.Throwables.throwIfInstanceOf` is introduced to Guava 20.0.
+If your application has a dependency that bundles a Guava with version 19.0 or below
+(for example, gRPC 1.10.0), it might cause a `NoSuchMethodError` since
+`com.google.common.base.Throwables.throwIfInstanceOf` doesn't exist before Guava 20.0.
+
+In this case, please either add an explicit dependency on a newer version of Guava that has the
+new method (20.0 in the previous example), or if possible, upgrade the dependency that depends on
+Guava to a newer version that depends on the newer Guava (for example, upgrade to gRPC 1.12.0).
+
+[stackdriver-monitoring]: https://cloud.google.com/monitoring/
diff --git a/exporters/stats/stackdriver/build.gradle b/exporters/stats/stackdriver/build.gradle
new file mode 100644
index 00000000..0bc302a6
--- /dev/null
+++ b/exporters/stats/stackdriver/build.gradle
@@ -0,0 +1,30 @@
+description = 'OpenCensus Stats Stackdriver Exporter'
+
+[compileJava, compileTestJava].each() {
+ it.sourceCompatibility = 1.7
+ it.targetCompatibility = 1.7
+}
+
+dependencies {
+ compileOnly libraries.auto_value
+
+ compile project(':opencensus-api'),
+ project(':opencensus-contrib-monitored-resource-util'),
+ libraries.google_auth,
+ libraries.guava
+
+ compile (libraries.google_cloud_monitoring) {
+ // Prefer library version.
+ exclude group: 'com.google.guava', module: 'guava'
+
+ // Prefer library version.
+ exclude group: 'com.google.code.findbugs', module: 'jsr305'
+
+ // We will always be more up to date.
+ exclude group: 'io.opencensus', module: 'opencensus-api'
+ }
+
+ testCompile project(':opencensus-api')
+
+ signature "org.codehaus.mojo.signature:java17:1.0@signature"
+} \ No newline at end of file
diff --git a/exporters/stats/stackdriver/src/main/java/io/opencensus/exporter/stats/stackdriver/StackdriverExportUtils.java b/exporters/stats/stackdriver/src/main/java/io/opencensus/exporter/stats/stackdriver/StackdriverExportUtils.java
new file mode 100644
index 00000000..4f8715b0
--- /dev/null
+++ b/exporters/stats/stackdriver/src/main/java/io/opencensus/exporter/stats/stackdriver/StackdriverExportUtils.java
@@ -0,0 +1,518 @@
+/*
+ * Copyright 2017, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.exporter.stats.stackdriver;
+
+import static com.google.common.base.Preconditions.checkArgument;
+
+import com.google.api.Distribution;
+import com.google.api.Distribution.BucketOptions;
+import com.google.api.Distribution.BucketOptions.Explicit;
+import com.google.api.LabelDescriptor;
+import com.google.api.LabelDescriptor.ValueType;
+import com.google.api.Metric;
+import com.google.api.MetricDescriptor;
+import com.google.api.MetricDescriptor.MetricKind;
+import com.google.api.MonitoredResource;
+import com.google.cloud.MetadataConfig;
+import com.google.common.annotations.VisibleForTesting;
+import com.google.common.collect.Lists;
+import com.google.common.collect.Maps;
+import com.google.monitoring.v3.Point;
+import com.google.monitoring.v3.TimeInterval;
+import com.google.monitoring.v3.TimeSeries;
+import com.google.monitoring.v3.TypedValue;
+import com.google.monitoring.v3.TypedValue.Builder;
+import com.google.protobuf.Timestamp;
+import io.opencensus.common.Function;
+import io.opencensus.common.Functions;
+import io.opencensus.contrib.monitoredresource.util.MonitoredResource.AwsEc2InstanceMonitoredResource;
+import io.opencensus.contrib.monitoredresource.util.MonitoredResource.GcpGceInstanceMonitoredResource;
+import io.opencensus.contrib.monitoredresource.util.MonitoredResource.GcpGkeContainerMonitoredResource;
+import io.opencensus.contrib.monitoredresource.util.MonitoredResourceUtils;
+import io.opencensus.contrib.monitoredresource.util.ResourceType;
+import io.opencensus.stats.Aggregation;
+import io.opencensus.stats.Aggregation.LastValue;
+import io.opencensus.stats.AggregationData;
+import io.opencensus.stats.AggregationData.CountData;
+import io.opencensus.stats.AggregationData.DistributionData;
+import io.opencensus.stats.AggregationData.LastValueDataDouble;
+import io.opencensus.stats.AggregationData.LastValueDataLong;
+import io.opencensus.stats.AggregationData.SumDataDouble;
+import io.opencensus.stats.AggregationData.SumDataLong;
+import io.opencensus.stats.BucketBoundaries;
+import io.opencensus.stats.Measure;
+import io.opencensus.stats.View;
+import io.opencensus.stats.ViewData;
+import io.opencensus.tags.TagKey;
+import io.opencensus.tags.TagValue;
+import java.lang.management.ManagementFactory;
+import java.net.InetAddress;
+import java.net.UnknownHostException;
+import java.security.SecureRandom;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+/*>>>
+import org.checkerframework.checker.nullness.qual.Nullable;
+*/
+
+/** Util methods to convert OpenCensus Stats data models to StackDriver monitoring data models. */
+@SuppressWarnings("deprecation")
+final class StackdriverExportUtils {
+
+ // TODO(songya): do we want these constants to be customizable?
+ @VisibleForTesting static final String LABEL_DESCRIPTION = "OpenCensus TagKey";
+ @VisibleForTesting static final String OPENCENSUS_TASK = "opencensus_task";
+ @VisibleForTesting static final String OPENCENSUS_TASK_DESCRIPTION = "Opencensus task identifier";
+ private static final String GCP_GKE_CONTAINER = "k8s_container";
+ private static final String GCP_GCE_INSTANCE = "gce_instance";
+ private static final String AWS_EC2_INSTANCE = "aws_ec2_instance";
+ private static final String GLOBAL = "global";
+
+ private static final Logger logger = Logger.getLogger(StackdriverExportUtils.class.getName());
+ private static final String OPENCENSUS_TASK_VALUE_DEFAULT = generateDefaultTaskValue();
+ private static final String PROJECT_ID_LABEL_KEY = "project_id";
+
+ // Constant functions for ValueType.
+ private static final Function<Object, MetricDescriptor.ValueType> VALUE_TYPE_DOUBLE_FUNCTION =
+ Functions.returnConstant(MetricDescriptor.ValueType.DOUBLE);
+ private static final Function<Object, MetricDescriptor.ValueType> VALUE_TYPE_INT64_FUNCTION =
+ Functions.returnConstant(MetricDescriptor.ValueType.INT64);
+ private static final Function<Object, MetricDescriptor.ValueType>
+ VALUE_TYPE_UNRECOGNIZED_FUNCTION =
+ Functions.returnConstant(MetricDescriptor.ValueType.UNRECOGNIZED);
+ private static final Function<Object, MetricDescriptor.ValueType>
+ VALUE_TYPE_DISTRIBUTION_FUNCTION =
+ Functions.returnConstant(MetricDescriptor.ValueType.DISTRIBUTION);
+ private static final Function<Aggregation, MetricDescriptor.ValueType> valueTypeMeanFunction =
+ new Function<Aggregation, MetricDescriptor.ValueType>() {
+ @Override
+ public MetricDescriptor.ValueType apply(Aggregation arg) {
+ // TODO(songya): remove this once Mean aggregation is completely removed. Before that
+ // we need to continue supporting Mean, since it could still be used by users and some
+ // deprecated RPC views.
+ if (arg instanceof Aggregation.Mean) {
+ return MetricDescriptor.ValueType.DOUBLE;
+ }
+ return MetricDescriptor.ValueType.UNRECOGNIZED;
+ }
+ };
+
+ // Constant functions for MetricKind.
+ private static final Function<Object, MetricKind> METRIC_KIND_CUMULATIVE_FUNCTION =
+ Functions.returnConstant(MetricKind.CUMULATIVE);
+ private static final Function<Object, MetricKind> METRIC_KIND_UNRECOGNIZED_FUNCTION =
+ Functions.returnConstant(MetricKind.UNRECOGNIZED);
+
+ // Constant functions for TypedValue.
+ private static final Function<SumDataDouble, TypedValue> typedValueSumDoubleFunction =
+ new Function<SumDataDouble, TypedValue>() {
+ @Override
+ public TypedValue apply(SumDataDouble arg) {
+ Builder builder = TypedValue.newBuilder();
+ builder.setDoubleValue(arg.getSum());
+ return builder.build();
+ }
+ };
+ private static final Function<SumDataLong, TypedValue> typedValueSumLongFunction =
+ new Function<SumDataLong, TypedValue>() {
+ @Override
+ public TypedValue apply(SumDataLong arg) {
+ Builder builder = TypedValue.newBuilder();
+ builder.setInt64Value(arg.getSum());
+ return builder.build();
+ }
+ };
+ private static final Function<CountData, TypedValue> typedValueCountFunction =
+ new Function<CountData, TypedValue>() {
+ @Override
+ public TypedValue apply(CountData arg) {
+ Builder builder = TypedValue.newBuilder();
+ builder.setInt64Value(arg.getCount());
+ return builder.build();
+ }
+ };
+ private static final Function<LastValueDataDouble, TypedValue> typedValueLastValueDoubleFunction =
+ new Function<LastValueDataDouble, TypedValue>() {
+ @Override
+ public TypedValue apply(LastValueDataDouble arg) {
+ Builder builder = TypedValue.newBuilder();
+ builder.setDoubleValue(arg.getLastValue());
+ return builder.build();
+ }
+ };
+ private static final Function<LastValueDataLong, TypedValue> typedValueLastValueLongFunction =
+ new Function<LastValueDataLong, TypedValue>() {
+ @Override
+ public TypedValue apply(LastValueDataLong arg) {
+ Builder builder = TypedValue.newBuilder();
+ builder.setInt64Value(arg.getLastValue());
+ return builder.build();
+ }
+ };
+ private static final Function<AggregationData, TypedValue> typedValueMeanFunction =
+ new Function<AggregationData, TypedValue>() {
+ @Override
+ public TypedValue apply(AggregationData arg) {
+ Builder builder = TypedValue.newBuilder();
+ // TODO(songya): remove this once Mean aggregation is completely removed. Before that
+ // we need to continue supporting Mean, since it could still be used by users and some
+ // deprecated RPC views.
+ if (arg instanceof AggregationData.MeanData) {
+ builder.setDoubleValue(((AggregationData.MeanData) arg).getMean());
+ return builder.build();
+ }
+ throw new IllegalArgumentException("Unknown Aggregation");
+ }
+ };
+
+ private static String generateDefaultTaskValue() {
+ // Something like '<pid>@<hostname>', at least in Oracle and OpenJdk JVMs
+ final String jvmName = ManagementFactory.getRuntimeMXBean().getName();
+ // If not the expected format then generate a random number.
+ if (jvmName.indexOf('@') < 1) {
+ String hostname = "localhost";
+ try {
+ hostname = InetAddress.getLocalHost().getHostName();
+ } catch (UnknownHostException e) {
+ logger.log(Level.INFO, "Unable to get the hostname.", e);
+ }
+ // Generate a random number and use the same format "random_number@hostname".
+ return "java-" + new SecureRandom().nextInt() + "@" + hostname;
+ }
+ return "java-" + jvmName;
+ }
+
+ // Construct a MetricDescriptor using a View.
+ @javax.annotation.Nullable
+ static MetricDescriptor createMetricDescriptor(
+ View view, String projectId, String domain, String displayNamePrefix) {
+ if (!(view.getWindow() instanceof View.AggregationWindow.Cumulative)) {
+ // TODO(songya): Only Cumulative view will be exported to Stackdriver in this version.
+ return null;
+ }
+
+ MetricDescriptor.Builder builder = MetricDescriptor.newBuilder();
+ String viewName = view.getName().asString();
+ String type = generateType(viewName, domain);
+ // Name format refers to
+ // cloud.google.com/monitoring/api/ref_v3/rest/v3/projects.metricDescriptors/create
+ builder.setName(String.format("projects/%s/metricDescriptors/%s", projectId, type));
+ builder.setType(type);
+ builder.setDescription(view.getDescription());
+ String displayName = createDisplayName(viewName, displayNamePrefix);
+ builder.setDisplayName(displayName);
+ for (TagKey tagKey : view.getColumns()) {
+ builder.addLabels(createLabelDescriptor(tagKey));
+ }
+ builder.addLabels(
+ LabelDescriptor.newBuilder()
+ .setKey(OPENCENSUS_TASK)
+ .setDescription(OPENCENSUS_TASK_DESCRIPTION)
+ .setValueType(ValueType.STRING)
+ .build());
+ builder.setUnit(createUnit(view.getAggregation(), view.getMeasure()));
+ builder.setMetricKind(createMetricKind(view.getWindow(), view.getAggregation()));
+ builder.setValueType(createValueType(view.getAggregation(), view.getMeasure()));
+ return builder.build();
+ }
+
+ private static String generateType(String viewName, String domain) {
+ return domain + viewName;
+ }
+
+ private static String createDisplayName(String viewName, String displayNamePrefix) {
+ return displayNamePrefix + viewName;
+ }
+
+ // Construct a LabelDescriptor from a TagKey
+ @VisibleForTesting
+ static LabelDescriptor createLabelDescriptor(TagKey tagKey) {
+ LabelDescriptor.Builder builder = LabelDescriptor.newBuilder();
+ builder.setKey(tagKey.getName());
+ builder.setDescription(LABEL_DESCRIPTION);
+ // Now we only support String tags
+ builder.setValueType(ValueType.STRING);
+ return builder.build();
+ }
+
+ // Construct a MetricKind from an AggregationWindow
+ @VisibleForTesting
+ static MetricKind createMetricKind(View.AggregationWindow window, Aggregation aggregation) {
+ if (aggregation instanceof LastValue) {
+ return MetricKind.GAUGE;
+ }
+ return window.match(
+ METRIC_KIND_CUMULATIVE_FUNCTION, // Cumulative
+ // TODO(songya): We don't support exporting Interval stats to StackDriver in this version.
+ METRIC_KIND_UNRECOGNIZED_FUNCTION, // Interval
+ METRIC_KIND_UNRECOGNIZED_FUNCTION);
+ }
+
+ // Construct a MetricDescriptor.ValueType from an Aggregation and a Measure
+ @VisibleForTesting
+ static String createUnit(Aggregation aggregation, final Measure measure) {
+ if (aggregation instanceof Aggregation.Count) {
+ return "1";
+ }
+ return measure.getUnit();
+ }
+
+ // Construct a MetricDescriptor.ValueType from an Aggregation and a Measure
+ @VisibleForTesting
+ static MetricDescriptor.ValueType createValueType(
+ Aggregation aggregation, final Measure measure) {
+ return aggregation.match(
+ Functions.returnConstant(
+ measure.match(
+ VALUE_TYPE_DOUBLE_FUNCTION, // Sum Double
+ VALUE_TYPE_INT64_FUNCTION, // Sum Long
+ VALUE_TYPE_UNRECOGNIZED_FUNCTION)),
+ VALUE_TYPE_INT64_FUNCTION, // Count
+ VALUE_TYPE_DISTRIBUTION_FUNCTION, // Distribution
+ Functions.returnConstant(
+ measure.match(
+ VALUE_TYPE_DOUBLE_FUNCTION, // LastValue Double
+ VALUE_TYPE_INT64_FUNCTION, // LastValue Long
+ VALUE_TYPE_UNRECOGNIZED_FUNCTION)),
+ valueTypeMeanFunction);
+ }
+
+ // Convert ViewData to a list of TimeSeries, so that ViewData can be uploaded to Stackdriver.
+ static List<TimeSeries> createTimeSeriesList(
+ @javax.annotation.Nullable ViewData viewData,
+ MonitoredResource monitoredResource,
+ String domain) {
+ List<TimeSeries> timeSeriesList = Lists.newArrayList();
+ if (viewData == null) {
+ return timeSeriesList;
+ }
+ View view = viewData.getView();
+ if (!(view.getWindow() instanceof View.AggregationWindow.Cumulative)) {
+ // TODO(songya): Only Cumulative view will be exported to Stackdriver in this version.
+ return timeSeriesList;
+ }
+
+ // Shared fields for all TimeSeries generated from the same ViewData
+ TimeSeries.Builder shared = TimeSeries.newBuilder();
+ shared.setMetricKind(createMetricKind(view.getWindow(), view.getAggregation()));
+ shared.setResource(monitoredResource);
+ shared.setValueType(createValueType(view.getAggregation(), view.getMeasure()));
+
+ // Each entry in AggregationMap will be converted into an independent TimeSeries object
+ for (Entry<List</*@Nullable*/ TagValue>, AggregationData> entry :
+ viewData.getAggregationMap().entrySet()) {
+ TimeSeries.Builder builder = shared.clone();
+ builder.setMetric(createMetric(view, entry.getKey(), domain));
+ builder.addPoints(
+ createPoint(entry.getValue(), viewData.getWindowData(), view.getAggregation()));
+ timeSeriesList.add(builder.build());
+ }
+
+ return timeSeriesList;
+ }
+
+ // Create a Metric using the TagKeys and TagValues.
+ @VisibleForTesting
+ static Metric createMetric(View view, List</*@Nullable*/ TagValue> tagValues, String domain) {
+ Metric.Builder builder = Metric.newBuilder();
+ // TODO(songya): use pre-defined metrics for canonical views
+ builder.setType(generateType(view.getName().asString(), domain));
+ Map<String, String> stringTagMap = Maps.newHashMap();
+ List<TagKey> columns = view.getColumns();
+ checkArgument(
+ tagValues.size() == columns.size(), "TagKeys and TagValues don't have same size.");
+ for (int i = 0; i < tagValues.size(); i++) {
+ TagKey key = columns.get(i);
+ TagValue value = tagValues.get(i);
+ if (value == null) {
+ continue;
+ }
+ stringTagMap.put(key.getName(), value.asString());
+ }
+ stringTagMap.put(OPENCENSUS_TASK, OPENCENSUS_TASK_VALUE_DEFAULT);
+ builder.putAllLabels(stringTagMap);
+ return builder.build();
+ }
+
+ // Create Point from AggregationData, AggregationWindowData and Aggregation.
+ @VisibleForTesting
+ static Point createPoint(
+ AggregationData aggregationData,
+ ViewData.AggregationWindowData windowData,
+ Aggregation aggregation) {
+ Point.Builder builder = Point.newBuilder();
+ builder.setInterval(createTimeInterval(windowData, aggregation));
+ builder.setValue(createTypedValue(aggregation, aggregationData));
+ return builder.build();
+ }
+
+ // Convert AggregationWindowData to TimeInterval, currently only support CumulativeData.
+ @VisibleForTesting
+ static TimeInterval createTimeInterval(
+ ViewData.AggregationWindowData windowData, final Aggregation aggregation) {
+ return windowData.match(
+ new Function<ViewData.AggregationWindowData.CumulativeData, TimeInterval>() {
+ @Override
+ public TimeInterval apply(ViewData.AggregationWindowData.CumulativeData arg) {
+ TimeInterval.Builder builder = TimeInterval.newBuilder();
+ builder.setEndTime(convertTimestamp(arg.getEnd()));
+ if (!(aggregation instanceof LastValue)) {
+ builder.setStartTime(convertTimestamp(arg.getStart()));
+ }
+ return builder.build();
+ }
+ },
+ Functions.<TimeInterval>throwIllegalArgumentException(),
+ Functions.<TimeInterval>throwIllegalArgumentException());
+ }
+
+ // Create a TypedValue using AggregationData and Aggregation
+ // Note TypedValue is "A single strongly-typed value", i.e only one field should be set.
+ @VisibleForTesting
+ static TypedValue createTypedValue(
+ final Aggregation aggregation, AggregationData aggregationData) {
+ return aggregationData.match(
+ typedValueSumDoubleFunction,
+ typedValueSumLongFunction,
+ typedValueCountFunction,
+ new Function<DistributionData, TypedValue>() {
+ @Override
+ public TypedValue apply(DistributionData arg) {
+ TypedValue.Builder builder = TypedValue.newBuilder();
+ checkArgument(
+ aggregation instanceof Aggregation.Distribution,
+ "Aggregation and AggregationData mismatch.");
+ builder.setDistributionValue(
+ createDistribution(
+ arg, ((Aggregation.Distribution) aggregation).getBucketBoundaries()));
+ return builder.build();
+ }
+ },
+ typedValueLastValueDoubleFunction,
+ typedValueLastValueLongFunction,
+ typedValueMeanFunction);
+ }
+
+ // Create a StackDriver Distribution from DistributionData and BucketBoundaries
+ @VisibleForTesting
+ static Distribution createDistribution(
+ DistributionData distributionData, BucketBoundaries bucketBoundaries) {
+ return Distribution.newBuilder()
+ .setBucketOptions(createBucketOptions(bucketBoundaries))
+ .addAllBucketCounts(distributionData.getBucketCounts())
+ .setCount(distributionData.getCount())
+ .setMean(distributionData.getMean())
+ // TODO(songya): uncomment this once Stackdriver supports setting max and min.
+ // .setRange(
+ // Range.newBuilder()
+ // .setMax(distributionData.getMax())
+ // .setMin(distributionData.getMin())
+ // .build())
+ .setSumOfSquaredDeviation(distributionData.getSumOfSquaredDeviations())
+ .build();
+ }
+
+ // Create BucketOptions from BucketBoundaries
+ @VisibleForTesting
+ static BucketOptions createBucketOptions(BucketBoundaries bucketBoundaries) {
+ return BucketOptions.newBuilder()
+ .setExplicitBuckets(Explicit.newBuilder().addAllBounds(bucketBoundaries.getBoundaries()))
+ .build();
+ }
+
+ // Convert a Census Timestamp to a StackDriver Timestamp
+ @VisibleForTesting
+ static Timestamp convertTimestamp(io.opencensus.common.Timestamp censusTimestamp) {
+ if (censusTimestamp.getSeconds() < 0) {
+ // Stackdriver doesn't handle negative timestamps.
+ return Timestamp.newBuilder().build();
+ }
+ return Timestamp.newBuilder()
+ .setSeconds(censusTimestamp.getSeconds())
+ .setNanos(censusTimestamp.getNanos())
+ .build();
+ }
+
+ /* Return a self-configured Stackdriver monitored resource. */
+ static MonitoredResource getDefaultResource() {
+ MonitoredResource.Builder builder = MonitoredResource.newBuilder();
+ io.opencensus.contrib.monitoredresource.util.MonitoredResource autoDetectedResource =
+ MonitoredResourceUtils.getDefaultResource();
+ if (autoDetectedResource == null) {
+ builder.setType(GLOBAL);
+ if (MetadataConfig.getProjectId() != null) {
+ // For default global resource, always use the project id from MetadataConfig. This allows
+ // stats from other projects (e.g from GAE running in another project) to be collected.
+ builder.putLabels(PROJECT_ID_LABEL_KEY, MetadataConfig.getProjectId());
+ }
+ return builder.build();
+ }
+ builder.setType(mapToStackdriverResourceType(autoDetectedResource.getResourceType()));
+ setMonitoredResourceLabelsForBuilder(builder, autoDetectedResource);
+ return builder.build();
+ }
+
+ private static String mapToStackdriverResourceType(ResourceType resourceType) {
+ switch (resourceType) {
+ case GCP_GCE_INSTANCE:
+ return GCP_GCE_INSTANCE;
+ case GCP_GKE_CONTAINER:
+ return GCP_GKE_CONTAINER;
+ case AWS_EC2_INSTANCE:
+ return AWS_EC2_INSTANCE;
+ }
+ throw new IllegalArgumentException("Unknown resource type.");
+ }
+
+ private static void setMonitoredResourceLabelsForBuilder(
+ MonitoredResource.Builder builder,
+ io.opencensus.contrib.monitoredresource.util.MonitoredResource autoDetectedResource) {
+ switch (autoDetectedResource.getResourceType()) {
+ case GCP_GCE_INSTANCE:
+ GcpGceInstanceMonitoredResource gcpGceInstanceMonitoredResource =
+ (GcpGceInstanceMonitoredResource) autoDetectedResource;
+ builder.putLabels(PROJECT_ID_LABEL_KEY, gcpGceInstanceMonitoredResource.getAccount());
+ builder.putLabels("instance_id", gcpGceInstanceMonitoredResource.getInstanceId());
+ builder.putLabels("zone", gcpGceInstanceMonitoredResource.getZone());
+ return;
+ case GCP_GKE_CONTAINER:
+ GcpGkeContainerMonitoredResource gcpGkeContainerMonitoredResource =
+ (GcpGkeContainerMonitoredResource) autoDetectedResource;
+ builder.putLabels(PROJECT_ID_LABEL_KEY, gcpGkeContainerMonitoredResource.getAccount());
+ builder.putLabels("cluster_name", gcpGkeContainerMonitoredResource.getClusterName());
+ builder.putLabels("container_name", gcpGkeContainerMonitoredResource.getContainerName());
+ builder.putLabels("namespace_name", gcpGkeContainerMonitoredResource.getNamespaceId());
+ builder.putLabels("pod_name", gcpGkeContainerMonitoredResource.getPodId());
+ builder.putLabels("location", gcpGkeContainerMonitoredResource.getZone());
+ return;
+ case AWS_EC2_INSTANCE:
+ AwsEc2InstanceMonitoredResource awsEc2InstanceMonitoredResource =
+ (AwsEc2InstanceMonitoredResource) autoDetectedResource;
+ builder.putLabels("aws_account", awsEc2InstanceMonitoredResource.getAccount());
+ builder.putLabels("instance_id", awsEc2InstanceMonitoredResource.getInstanceId());
+ builder.putLabels("region", "aws:" + awsEc2InstanceMonitoredResource.getRegion());
+ return;
+ }
+ throw new IllegalArgumentException("Unknown subclass of MonitoredResource.");
+ }
+
+ private StackdriverExportUtils() {}
+}
diff --git a/exporters/stats/stackdriver/src/main/java/io/opencensus/exporter/stats/stackdriver/StackdriverExporterWorker.java b/exporters/stats/stackdriver/src/main/java/io/opencensus/exporter/stats/stackdriver/StackdriverExporterWorker.java
new file mode 100644
index 00000000..5ffed9d5
--- /dev/null
+++ b/exporters/stats/stackdriver/src/main/java/io/opencensus/exporter/stats/stackdriver/StackdriverExporterWorker.java
@@ -0,0 +1,274 @@
+/*
+ * Copyright 2017, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.exporter.stats.stackdriver;
+
+import com.google.api.MetricDescriptor;
+import com.google.api.MonitoredResource;
+import com.google.api.gax.rpc.ApiException;
+import com.google.cloud.monitoring.v3.MetricServiceClient;
+import com.google.common.annotations.VisibleForTesting;
+import com.google.common.base.Strings;
+import com.google.common.collect.Lists;
+import com.google.monitoring.v3.CreateMetricDescriptorRequest;
+import com.google.monitoring.v3.CreateTimeSeriesRequest;
+import com.google.monitoring.v3.ProjectName;
+import com.google.monitoring.v3.TimeSeries;
+import io.opencensus.common.Duration;
+import io.opencensus.common.Scope;
+import io.opencensus.stats.View;
+import io.opencensus.stats.ViewData;
+import io.opencensus.stats.ViewManager;
+import io.opencensus.trace.Sampler;
+import io.opencensus.trace.Span;
+import io.opencensus.trace.Status;
+import io.opencensus.trace.Tracer;
+import io.opencensus.trace.Tracing;
+import io.opencensus.trace.samplers.Samplers;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+import javax.annotation.concurrent.NotThreadSafe;
+
+/*>>>
+import org.checkerframework.checker.nullness.qual.Nullable;
+*/
+
+/**
+ * Worker {@code Runnable} that polls ViewData from Stats library and batch export to StackDriver.
+ *
+ * <p>{@code StackdriverExporterWorker} will be started in a daemon {@code Thread}.
+ *
+ * <p>The state of this class should only be accessed from the thread which {@link
+ * StackdriverExporterWorker} resides in.
+ */
+@NotThreadSafe
+final class StackdriverExporterWorker implements Runnable {
+
+ private static final Logger logger = Logger.getLogger(StackdriverExporterWorker.class.getName());
+
+ // Stackdriver Monitoring v3 only accepts up to 200 TimeSeries per CreateTimeSeries call.
+ @VisibleForTesting static final int MAX_BATCH_EXPORT_SIZE = 200;
+
+ @VisibleForTesting static final String DEFAULT_DISPLAY_NAME_PREFIX = "OpenCensus/";
+ @VisibleForTesting static final String CUSTOM_METRIC_DOMAIN = "custom.googleapis.com/";
+
+ @VisibleForTesting
+ static final String CUSTOM_OPENCENSUS_DOMAIN = CUSTOM_METRIC_DOMAIN + "opencensus/";
+
+ private final long scheduleDelayMillis;
+ private final String projectId;
+ private final ProjectName projectName;
+ private final MetricServiceClient metricServiceClient;
+ private final ViewManager viewManager;
+ private final MonitoredResource monitoredResource;
+ private final String domain;
+ private final String displayNamePrefix;
+ private final Map<View.Name, View> registeredViews = new HashMap<View.Name, View>();
+
+ private static final Tracer tracer = Tracing.getTracer();
+ private static final Sampler probabilitySampler = Samplers.probabilitySampler(0.0001);
+
+ StackdriverExporterWorker(
+ String projectId,
+ MetricServiceClient metricServiceClient,
+ Duration exportInterval,
+ ViewManager viewManager,
+ MonitoredResource monitoredResource,
+ @javax.annotation.Nullable String metricNamePrefix) {
+ this.scheduleDelayMillis = exportInterval.toMillis();
+ this.projectId = projectId;
+ projectName = ProjectName.newBuilder().setProject(projectId).build();
+ this.metricServiceClient = metricServiceClient;
+ this.viewManager = viewManager;
+ this.monitoredResource = monitoredResource;
+ this.domain = getDomain(metricNamePrefix);
+ this.displayNamePrefix = getDisplayNamePrefix(metricNamePrefix);
+
+ Tracing.getExportComponent()
+ .getSampledSpanStore()
+ .registerSpanNamesForCollection(
+ Collections.singletonList("ExportStatsToStackdriverMonitoring"));
+ }
+
+ // Returns true if the given view is successfully registered to Stackdriver Monitoring, or the
+ // exact same view has already been registered. Returns false otherwise.
+ @VisibleForTesting
+ boolean registerView(View view) {
+ View existing = registeredViews.get(view.getName());
+ if (existing != null) {
+ if (existing.equals(view)) {
+ // Ignore views that are already registered.
+ return true;
+ } else {
+ // If we upload a view that has the same name with a registered view but with different
+ // attributes, Stackdriver client will throw an exception.
+ logger.log(
+ Level.WARNING,
+ "A different view with the same name is already registered: " + existing);
+ return false;
+ }
+ }
+ registeredViews.put(view.getName(), view);
+
+ Span span = tracer.getCurrentSpan();
+ span.addAnnotation("Create Stackdriver Metric.");
+ // TODO(songya): don't need to create MetricDescriptor for RpcViewConstants once we defined
+ // canonical metrics. Registration is required only for custom view definitions. Canonical
+ // views should be pre-registered.
+ MetricDescriptor metricDescriptor =
+ StackdriverExportUtils.createMetricDescriptor(view, projectId, domain, displayNamePrefix);
+ if (metricDescriptor == null) {
+ // Don't register interval views in this version.
+ return false;
+ }
+
+ CreateMetricDescriptorRequest request =
+ CreateMetricDescriptorRequest.newBuilder()
+ .setName(projectName.toString())
+ .setMetricDescriptor(metricDescriptor)
+ .build();
+ try {
+ metricServiceClient.createMetricDescriptor(request);
+ span.addAnnotation("Finish creating MetricDescriptor.");
+ return true;
+ } catch (ApiException e) {
+ logger.log(Level.WARNING, "ApiException thrown when creating MetricDescriptor.", e);
+ span.setStatus(
+ Status.CanonicalCode.valueOf(e.getStatusCode().getCode().name())
+ .toStatus()
+ .withDescription(
+ "ApiException thrown when creating MetricDescriptor: " + exceptionMessage(e)));
+ return false;
+ } catch (Throwable e) {
+ logger.log(Level.WARNING, "Exception thrown when creating MetricDescriptor.", e);
+ span.setStatus(
+ Status.UNKNOWN.withDescription(
+ "Exception thrown when creating MetricDescriptor: " + exceptionMessage(e)));
+ return false;
+ }
+ }
+
+ // Polls ViewData from Stats library for all exported views, and upload them as TimeSeries to
+ // StackDriver.
+ @VisibleForTesting
+ void export() {
+ List</*@Nullable*/ ViewData> viewDataList = Lists.newArrayList();
+ for (View view : viewManager.getAllExportedViews()) {
+ if (registerView(view)) {
+ // Only upload stats for valid views.
+ viewDataList.add(viewManager.getView(view.getName()));
+ }
+ }
+
+ List<TimeSeries> timeSeriesList = Lists.newArrayList();
+ for (/*@Nullable*/ ViewData viewData : viewDataList) {
+ timeSeriesList.addAll(
+ StackdriverExportUtils.createTimeSeriesList(viewData, monitoredResource, domain));
+ }
+ for (List<TimeSeries> batchedTimeSeries :
+ Lists.partition(timeSeriesList, MAX_BATCH_EXPORT_SIZE)) {
+ Span span = tracer.getCurrentSpan();
+ span.addAnnotation("Export Stackdriver TimeSeries.");
+ try {
+ CreateTimeSeriesRequest request =
+ CreateTimeSeriesRequest.newBuilder()
+ .setName(projectName.toString())
+ .addAllTimeSeries(batchedTimeSeries)
+ .build();
+ metricServiceClient.createTimeSeries(request);
+ span.addAnnotation("Finish exporting TimeSeries.");
+ } catch (ApiException e) {
+ logger.log(Level.WARNING, "ApiException thrown when exporting TimeSeries.", e);
+ span.setStatus(
+ Status.CanonicalCode.valueOf(e.getStatusCode().getCode().name())
+ .toStatus()
+ .withDescription(
+ "ApiException thrown when exporting TimeSeries: " + exceptionMessage(e)));
+ } catch (Throwable e) {
+ logger.log(Level.WARNING, "Exception thrown when exporting TimeSeries.", e);
+ span.setStatus(
+ Status.UNKNOWN.withDescription(
+ "Exception thrown when exporting TimeSeries: " + exceptionMessage(e)));
+ }
+ }
+ }
+
+ @Override
+ public void run() {
+ while (true) {
+ Span span =
+ tracer
+ .spanBuilder("ExportStatsToStackdriverMonitoring")
+ .setRecordEvents(true)
+ .setSampler(probabilitySampler)
+ .startSpan();
+ Scope scope = tracer.withSpan(span);
+ try {
+ export();
+ } catch (Throwable e) {
+ logger.log(Level.WARNING, "Exception thrown by the Stackdriver stats exporter.", e);
+ span.setStatus(
+ Status.UNKNOWN.withDescription(
+ "Exception from Stackdriver Exporter: " + exceptionMessage(e)));
+ } finally {
+ scope.close();
+ span.end();
+ }
+ try {
+ Thread.sleep(scheduleDelayMillis);
+ } catch (InterruptedException ie) {
+ // Preserve the interruption status as per guidance and stop doing any work.
+ Thread.currentThread().interrupt();
+ return;
+ }
+ }
+ }
+
+ private static String exceptionMessage(Throwable e) {
+ return e.getMessage() != null ? e.getMessage() : e.getClass().getName();
+ }
+
+ @VisibleForTesting
+ static String getDomain(@javax.annotation.Nullable String metricNamePrefix) {
+ String domain;
+ if (Strings.isNullOrEmpty(metricNamePrefix)) {
+ domain = CUSTOM_OPENCENSUS_DOMAIN;
+ } else {
+ if (!metricNamePrefix.endsWith("/")) {
+ domain = metricNamePrefix + '/';
+ } else {
+ domain = metricNamePrefix;
+ }
+ }
+ return domain;
+ }
+
+ @VisibleForTesting
+ static String getDisplayNamePrefix(@javax.annotation.Nullable String metricNamePrefix) {
+ if (metricNamePrefix == null) {
+ return DEFAULT_DISPLAY_NAME_PREFIX;
+ } else {
+ if (!metricNamePrefix.endsWith("/") && !metricNamePrefix.isEmpty()) {
+ metricNamePrefix += '/';
+ }
+ return metricNamePrefix;
+ }
+ }
+}
diff --git a/exporters/stats/stackdriver/src/main/java/io/opencensus/exporter/stats/stackdriver/StackdriverStatsConfiguration.java b/exporters/stats/stackdriver/src/main/java/io/opencensus/exporter/stats/stackdriver/StackdriverStatsConfiguration.java
new file mode 100644
index 00000000..c4008ca1
--- /dev/null
+++ b/exporters/stats/stackdriver/src/main/java/io/opencensus/exporter/stats/stackdriver/StackdriverStatsConfiguration.java
@@ -0,0 +1,159 @@
+/*
+ * Copyright 2017, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.exporter.stats.stackdriver;
+
+import com.google.api.MonitoredResource;
+import com.google.auth.Credentials;
+import com.google.auto.value.AutoValue;
+import io.opencensus.common.Duration;
+import javax.annotation.Nullable;
+import javax.annotation.concurrent.Immutable;
+
+/**
+ * Configurations for {@link StackdriverStatsExporter}.
+ *
+ * @since 0.11
+ */
+@AutoValue
+@Immutable
+public abstract class StackdriverStatsConfiguration {
+
+ StackdriverStatsConfiguration() {}
+
+ /**
+ * Returns the {@link Credentials}.
+ *
+ * @return the {@code Credentials}.
+ * @since 0.11
+ */
+ @Nullable
+ public abstract Credentials getCredentials();
+
+ /**
+ * Returns the project id.
+ *
+ * @return the project id.
+ * @since 0.11
+ */
+ @Nullable
+ public abstract String getProjectId();
+
+ /**
+ * Returns the export interval between pushes to StackDriver.
+ *
+ * @return the export interval.
+ * @since 0.11
+ */
+ @Nullable
+ public abstract Duration getExportInterval();
+
+ /**
+ * Returns the Stackdriver {@link MonitoredResource}.
+ *
+ * @return the {@code MonitoredResource}.
+ * @since 0.11
+ */
+ @Nullable
+ public abstract MonitoredResource getMonitoredResource();
+
+ /**
+ * Returns the name prefix for Stackdriver metrics.
+ *
+ * @return the metric name prefix.
+ * @since 0.16
+ */
+ @Nullable
+ public abstract String getMetricNamePrefix();
+
+ /**
+ * Returns a new {@link Builder}.
+ *
+ * @return a {@code Builder}.
+ * @since 0.11
+ */
+ public static Builder builder() {
+ return new AutoValue_StackdriverStatsConfiguration.Builder();
+ }
+
+ /**
+ * Builder for {@link StackdriverStatsConfiguration}.
+ *
+ * @since 0.11
+ */
+ @AutoValue.Builder
+ public abstract static class Builder {
+
+ Builder() {}
+
+ /**
+ * Sets the given {@link Credentials}.
+ *
+ * @param credentials the {@code Credentials}.
+ * @return this.
+ * @since 0.11
+ */
+ public abstract Builder setCredentials(Credentials credentials);
+
+ /**
+ * Sets the given project id.
+ *
+ * @param projectId the cloud project id.
+ * @return this.
+ * @since 0.11
+ */
+ public abstract Builder setProjectId(String projectId);
+
+ /**
+ * Sets the export interval.
+ *
+ * @param exportInterval the export interval between pushes to StackDriver.
+ * @return this.
+ * @since 0.11
+ */
+ public abstract Builder setExportInterval(Duration exportInterval);
+
+ /**
+ * Sets the {@link MonitoredResource}.
+ *
+ * @param monitoredResource the Stackdriver {@code MonitoredResource}.
+ * @return this.
+ * @since 0.11
+ */
+ public abstract Builder setMonitoredResource(MonitoredResource monitoredResource);
+
+ /**
+ * Sets the the name prefix for Stackdriver metrics.
+ *
+ * <p>It is suggested to use prefix with custom or external domain name, for example
+ * "custom.googleapis.com/myorg/" or "external.googleapis.com/prometheus/". If the given prefix
+ * doesn't start with a valid domain, we will add "custom.googleapis.com/" before the prefix.
+ *
+ * @param prefix the metric name prefix.
+ * @return this.
+ * @since 0.16
+ */
+ public abstract Builder setMetricNamePrefix(String prefix);
+
+ /**
+ * Builds a new {@link StackdriverStatsConfiguration} with current settings.
+ *
+ * @return a {@code StackdriverStatsConfiguration}.
+ * @since 0.11
+ */
+ public abstract StackdriverStatsConfiguration build();
+ }
+}
diff --git a/exporters/stats/stackdriver/src/main/java/io/opencensus/exporter/stats/stackdriver/StackdriverStatsExporter.java b/exporters/stats/stackdriver/src/main/java/io/opencensus/exporter/stats/stackdriver/StackdriverStatsExporter.java
new file mode 100644
index 00000000..51c54916
--- /dev/null
+++ b/exporters/stats/stackdriver/src/main/java/io/opencensus/exporter/stats/stackdriver/StackdriverStatsExporter.java
@@ -0,0 +1,363 @@
+/*
+ * Copyright 2017, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.exporter.stats.stackdriver;
+
+import static com.google.common.base.Preconditions.checkArgument;
+import static com.google.common.base.Preconditions.checkNotNull;
+import static com.google.common.base.Preconditions.checkState;
+
+import com.google.api.MonitoredResource;
+import com.google.api.gax.core.FixedCredentialsProvider;
+import com.google.auth.Credentials;
+import com.google.auth.oauth2.GoogleCredentials;
+import com.google.cloud.ServiceOptions;
+import com.google.cloud.monitoring.v3.MetricServiceClient;
+import com.google.cloud.monitoring.v3.MetricServiceSettings;
+import com.google.common.annotations.VisibleForTesting;
+import com.google.common.util.concurrent.MoreExecutors;
+import io.opencensus.common.Duration;
+import io.opencensus.stats.Stats;
+import io.opencensus.stats.ViewManager;
+import java.io.IOException;
+import java.util.concurrent.ThreadFactory;
+import javax.annotation.Nullable;
+import javax.annotation.concurrent.GuardedBy;
+
+/**
+ * Exporter to Stackdriver Monitoring Client API v3.
+ *
+ * <p>Example of usage on Google Cloud VMs:
+ *
+ * <pre><code>
+ * public static void main(String[] args) {
+ * StackdriverStatsExporter.createAndRegister(
+ * StackdriverStatsConfiguration
+ * .builder()
+ * .setProjectId("MyStackdriverProjectId")
+ * .setExportInterval(Duration.fromMillis(100000))
+ * .build());
+ * ... // Do work.
+ * }
+ * </code></pre>
+ *
+ * @since 0.9
+ */
+public final class StackdriverStatsExporter {
+
+ private static final Object monitor = new Object();
+
+ private final Thread workerThread;
+
+ @GuardedBy("monitor")
+ @Nullable
+ private static StackdriverStatsExporter exporter = null;
+
+ private static final Duration ZERO = Duration.create(0, 0);
+
+ @VisibleForTesting static final Duration DEFAULT_INTERVAL = Duration.create(60, 0);
+
+ private static final MonitoredResource DEFAULT_RESOURCE =
+ StackdriverExportUtils.getDefaultResource();
+
+ @VisibleForTesting
+ StackdriverStatsExporter(
+ String projectId,
+ MetricServiceClient metricServiceClient,
+ Duration exportInterval,
+ ViewManager viewManager,
+ MonitoredResource monitoredResource,
+ @Nullable String metricNamePrefix) {
+ checkArgument(exportInterval.compareTo(ZERO) > 0, "Duration must be positive");
+ StackdriverExporterWorker worker =
+ new StackdriverExporterWorker(
+ projectId,
+ metricServiceClient,
+ exportInterval,
+ viewManager,
+ monitoredResource,
+ metricNamePrefix);
+ this.workerThread = new DaemonThreadFactory().newThread(worker);
+ }
+
+ /**
+ * Creates a StackdriverStatsExporter for an explicit project ID and using explicit credentials,
+ * with default Monitored Resource.
+ *
+ * <p>Only one Stackdriver exporter can be created.
+ *
+ * @param credentials a credentials used to authenticate API calls.
+ * @param projectId the cloud project id.
+ * @param exportInterval the interval between pushing stats to StackDriver.
+ * @throws IllegalStateException if a Stackdriver exporter already exists.
+ * @deprecated in favor of {@link #createAndRegister(StackdriverStatsConfiguration)}.
+ * @since 0.9
+ */
+ @Deprecated
+ public static void createAndRegisterWithCredentialsAndProjectId(
+ Credentials credentials, String projectId, Duration exportInterval) throws IOException {
+ checkNotNull(credentials, "credentials");
+ checkNotNull(projectId, "projectId");
+ checkNotNull(exportInterval, "exportInterval");
+ createInternal(credentials, projectId, exportInterval, null, null);
+ }
+
+ /**
+ * Creates a Stackdriver Stats exporter for an explicit project ID, with default Monitored
+ * Resource.
+ *
+ * <p>Only one Stackdriver exporter can be created.
+ *
+ * <p>This uses the default application credentials. See {@link
+ * GoogleCredentials#getApplicationDefault}.
+ *
+ * <p>This is equivalent with:
+ *
+ * <pre>{@code
+ * StackdriverStatsExporter.createWithCredentialsAndProjectId(
+ * GoogleCredentials.getApplicationDefault(), projectId);
+ * }</pre>
+ *
+ * @param projectId the cloud project id.
+ * @param exportInterval the interval between pushing stats to StackDriver.
+ * @throws IllegalStateException if a Stackdriver exporter is already created.
+ * @deprecated in favor of {@link #createAndRegister(StackdriverStatsConfiguration)}.
+ * @since 0.9
+ */
+ @Deprecated
+ public static void createAndRegisterWithProjectId(String projectId, Duration exportInterval)
+ throws IOException {
+ checkNotNull(projectId, "projectId");
+ checkNotNull(exportInterval, "exportInterval");
+ createInternal(null, projectId, exportInterval, null, null);
+ }
+
+ /**
+ * Creates a Stackdriver Stats exporter with a {@link StackdriverStatsConfiguration}.
+ *
+ * <p>Only one Stackdriver exporter can be created.
+ *
+ * <p>If {@code credentials} of the configuration is not set, the exporter will use the default
+ * application credentials. See {@link GoogleCredentials#getApplicationDefault}.
+ *
+ * <p>If {@code projectId} of the configuration is not set, the exporter will use the default
+ * project ID configured. See {@link ServiceOptions#getDefaultProjectId}.
+ *
+ * <p>If {@code exportInterval} of the configuration is not set, the exporter will use the default
+ * interval of one minute.
+ *
+ * <p>If {@code monitoredResources} of the configuration is not set, the exporter will try to
+ * create an appropriate {@code monitoredResources} based on the environment variables. In
+ * addition, please refer to
+ * cloud.google.com/monitoring/custom-metrics/creating-metrics#which-resource for a list of valid
+ * {@code MonitoredResource}s.
+ *
+ * <p>If {@code metricNamePrefix} of the configuration is not set, the exporter will use the
+ * default prefix "OpenCensus".
+ *
+ * @param configuration the {@code StackdriverStatsConfiguration}.
+ * @throws IllegalStateException if a Stackdriver exporter is already created.
+ * @since 0.11.0
+ */
+ public static void createAndRegister(StackdriverStatsConfiguration configuration)
+ throws IOException {
+ checkNotNull(configuration, "configuration");
+ createInternal(
+ configuration.getCredentials(),
+ configuration.getProjectId(),
+ configuration.getExportInterval(),
+ configuration.getMonitoredResource(),
+ configuration.getMetricNamePrefix());
+ }
+
+ /**
+ * Creates a Stackdriver Stats exporter with default settings.
+ *
+ * <p>Only one Stackdriver exporter can be created.
+ *
+ * <p>This is equivalent with:
+ *
+ * <pre>{@code
+ * StackdriverStatsExporter.createAndRegister(StackdriverStatsConfiguration.builder().build());
+ * }</pre>
+ *
+ * <p>This method uses the default application credentials. See {@link
+ * GoogleCredentials#getApplicationDefault}.
+ *
+ * <p>This method uses the default project ID configured. See {@link
+ * ServiceOptions#getDefaultProjectId}.
+ *
+ * <p>This method uses the default interval of one minute.
+ *
+ * <p>This method uses the default resource created from the environment variables.
+ *
+ * <p>This method uses the default display name prefix "OpenCensus".
+ *
+ * @throws IllegalStateException if a Stackdriver exporter is already created.
+ * @since 0.11.0
+ */
+ public static void createAndRegister() throws IOException {
+ createInternal(null, null, null, null, null);
+ }
+
+ /**
+ * Creates a Stackdriver Stats exporter with default Monitored Resource.
+ *
+ * <p>Only one Stackdriver exporter can be created.
+ *
+ * <p>This uses the default application credentials. See {@link
+ * GoogleCredentials#getApplicationDefault}.
+ *
+ * <p>This uses the default project ID configured see {@link ServiceOptions#getDefaultProjectId}.
+ *
+ * <p>This is equivalent with:
+ *
+ * <pre>{@code
+ * StackdriverStatsExporter.createWithProjectId(ServiceOptions.getDefaultProjectId());
+ * }</pre>
+ *
+ * @param exportInterval the interval between pushing stats to StackDriver.
+ * @throws IllegalStateException if a Stackdriver exporter is already created.
+ * @deprecated in favor of {@link #createAndRegister(StackdriverStatsConfiguration)}.
+ * @since 0.9
+ */
+ @Deprecated
+ public static void createAndRegister(Duration exportInterval) throws IOException {
+ checkNotNull(exportInterval, "exportInterval");
+ createInternal(null, null, exportInterval, null, null);
+ }
+
+ /**
+ * Creates a Stackdriver Stats exporter with an explicit project ID and a custom Monitored
+ * Resource.
+ *
+ * <p>Only one Stackdriver exporter can be created.
+ *
+ * <p>Please refer to cloud.google.com/monitoring/custom-metrics/creating-metrics#which-resource
+ * for a list of valid {@code MonitoredResource}s.
+ *
+ * <p>This uses the default application credentials. See {@link
+ * GoogleCredentials#getApplicationDefault}.
+ *
+ * @param projectId the cloud project id.
+ * @param exportInterval the interval between pushing stats to StackDriver.
+ * @param monitoredResource the Monitored Resource used by exporter.
+ * @throws IllegalStateException if a Stackdriver exporter is already created.
+ * @deprecated in favor of {@link #createAndRegister(StackdriverStatsConfiguration)}.
+ * @since 0.10
+ */
+ @Deprecated
+ public static void createAndRegisterWithProjectIdAndMonitoredResource(
+ String projectId, Duration exportInterval, MonitoredResource monitoredResource)
+ throws IOException {
+ checkNotNull(projectId, "projectId");
+ checkNotNull(exportInterval, "exportInterval");
+ checkNotNull(monitoredResource, "monitoredResource");
+ createInternal(null, projectId, exportInterval, monitoredResource, null);
+ }
+
+ /**
+ * Creates a Stackdriver Stats exporter with a custom Monitored Resource.
+ *
+ * <p>Only one Stackdriver exporter can be created.
+ *
+ * <p>Please refer to cloud.google.com/monitoring/custom-metrics/creating-metrics#which-resource
+ * for a list of valid {@code MonitoredResource}s.
+ *
+ * <p>This uses the default application credentials. See {@link
+ * GoogleCredentials#getApplicationDefault}.
+ *
+ * <p>This uses the default project ID configured see {@link ServiceOptions#getDefaultProjectId}.
+ *
+ * @param exportInterval the interval between pushing stats to StackDriver.
+ * @param monitoredResource the Monitored Resource used by exporter.
+ * @throws IllegalStateException if a Stackdriver exporter is already created.
+ * @deprecated in favor of {@link #createAndRegister(StackdriverStatsConfiguration)}.
+ * @since 0.10
+ */
+ @Deprecated
+ public static void createAndRegisterWithMonitoredResource(
+ Duration exportInterval, MonitoredResource monitoredResource) throws IOException {
+ checkNotNull(exportInterval, "exportInterval");
+ checkNotNull(monitoredResource, "monitoredResource");
+ createInternal(null, null, exportInterval, monitoredResource, null);
+ }
+
+ // Use createInternal() (instead of constructor) to enforce singleton.
+ private static void createInternal(
+ @Nullable Credentials credentials,
+ @Nullable String projectId,
+ @Nullable Duration exportInterval,
+ @Nullable MonitoredResource monitoredResource,
+ @Nullable String metricNamePrefix)
+ throws IOException {
+ projectId = projectId == null ? ServiceOptions.getDefaultProjectId() : projectId;
+ exportInterval = exportInterval == null ? DEFAULT_INTERVAL : exportInterval;
+ monitoredResource = monitoredResource == null ? DEFAULT_RESOURCE : monitoredResource;
+ synchronized (monitor) {
+ checkState(exporter == null, "Stackdriver stats exporter is already created.");
+ MetricServiceClient metricServiceClient;
+ // Initialize MetricServiceClient inside lock to avoid creating multiple clients.
+ if (credentials == null) {
+ metricServiceClient = MetricServiceClient.create();
+ } else {
+ metricServiceClient =
+ MetricServiceClient.create(
+ MetricServiceSettings.newBuilder()
+ .setCredentialsProvider(FixedCredentialsProvider.create(credentials))
+ .build());
+ }
+ exporter =
+ new StackdriverStatsExporter(
+ projectId,
+ metricServiceClient,
+ exportInterval,
+ Stats.getViewManager(),
+ monitoredResource,
+ metricNamePrefix);
+ exporter.workerThread.start();
+ }
+ }
+
+ // Resets exporter to null. Used only for unit tests.
+ @VisibleForTesting
+ static void unsafeResetExporter() {
+ synchronized (monitor) {
+ StackdriverStatsExporter.exporter = null;
+ }
+ }
+
+ /** A lightweight {@link ThreadFactory} to spawn threads in a GAE-Java7-compatible way. */
+ // TODO(Hailong): Remove this once we use a callback to implement the exporter.
+ static final class DaemonThreadFactory implements ThreadFactory {
+ // AppEngine runtimes have constraints on threading and socket handling
+ // that need to be accommodated.
+ public static final boolean IS_RESTRICTED_APPENGINE =
+ System.getProperty("com.google.appengine.runtime.environment") != null
+ && "1.7".equals(System.getProperty("java.specification.version"));
+ private static final ThreadFactory threadFactory = MoreExecutors.platformThreadFactory();
+
+ @Override
+ public Thread newThread(Runnable r) {
+ Thread thread = threadFactory.newThread(r);
+ if (!IS_RESTRICTED_APPENGINE) {
+ thread.setName("ExportWorkerThread");
+ thread.setDaemon(true);
+ }
+ return thread;
+ }
+ }
+}
diff --git a/exporters/stats/stackdriver/src/test/java/io/opencensus/exporter/stats/stackdriver/StackdriverExportUtilsTest.java b/exporters/stats/stackdriver/src/test/java/io/opencensus/exporter/stats/stackdriver/StackdriverExportUtilsTest.java
new file mode 100644
index 00000000..cd536e8f
--- /dev/null
+++ b/exporters/stats/stackdriver/src/test/java/io/opencensus/exporter/stats/stackdriver/StackdriverExportUtilsTest.java
@@ -0,0 +1,568 @@
+/*
+ * Copyright 2017, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.exporter.stats.stackdriver;
+
+import static com.google.common.truth.Truth.assertThat;
+import static io.opencensus.exporter.stats.stackdriver.StackdriverExporterWorker.CUSTOM_OPENCENSUS_DOMAIN;
+import static io.opencensus.exporter.stats.stackdriver.StackdriverExporterWorker.DEFAULT_DISPLAY_NAME_PREFIX;
+
+import com.google.api.Distribution.BucketOptions;
+import com.google.api.Distribution.BucketOptions.Explicit;
+import com.google.api.LabelDescriptor;
+import com.google.api.LabelDescriptor.ValueType;
+import com.google.api.Metric;
+import com.google.api.MetricDescriptor;
+import com.google.api.MetricDescriptor.MetricKind;
+import com.google.api.MonitoredResource;
+import com.google.common.collect.ImmutableMap;
+import com.google.monitoring.v3.Point;
+import com.google.monitoring.v3.TimeInterval;
+import com.google.monitoring.v3.TimeSeries;
+import com.google.monitoring.v3.TypedValue;
+import io.opencensus.common.Duration;
+import io.opencensus.common.Timestamp;
+import io.opencensus.stats.Aggregation.Count;
+import io.opencensus.stats.Aggregation.Distribution;
+import io.opencensus.stats.Aggregation.LastValue;
+import io.opencensus.stats.Aggregation.Mean;
+import io.opencensus.stats.Aggregation.Sum;
+import io.opencensus.stats.AggregationData.CountData;
+import io.opencensus.stats.AggregationData.DistributionData;
+import io.opencensus.stats.AggregationData.LastValueDataDouble;
+import io.opencensus.stats.AggregationData.LastValueDataLong;
+import io.opencensus.stats.AggregationData.MeanData;
+import io.opencensus.stats.AggregationData.SumDataDouble;
+import io.opencensus.stats.AggregationData.SumDataLong;
+import io.opencensus.stats.BucketBoundaries;
+import io.opencensus.stats.Measure.MeasureDouble;
+import io.opencensus.stats.Measure.MeasureLong;
+import io.opencensus.stats.View;
+import io.opencensus.stats.View.AggregationWindow.Cumulative;
+import io.opencensus.stats.View.AggregationWindow.Interval;
+import io.opencensus.stats.View.Name;
+import io.opencensus.stats.ViewData;
+import io.opencensus.stats.ViewData.AggregationWindowData.CumulativeData;
+import io.opencensus.stats.ViewData.AggregationWindowData.IntervalData;
+import io.opencensus.tags.TagKey;
+import io.opencensus.tags.TagValue;
+import java.lang.management.ManagementFactory;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Map;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.ExpectedException;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
+
+/** Unit tests for {@link StackdriverExportUtils}. */
+@RunWith(JUnit4.class)
+public class StackdriverExportUtilsTest {
+
+ @Rule public final ExpectedException thrown = ExpectedException.none();
+
+ private static final TagKey KEY = TagKey.create("KEY");
+ private static final TagKey KEY_2 = TagKey.create("KEY2");
+ private static final TagKey KEY_3 = TagKey.create("KEY3");
+ private static final TagValue VALUE_1 = TagValue.create("VALUE1");
+ private static final TagValue VALUE_2 = TagValue.create("VALUE2");
+ private static final String MEASURE_UNIT = "us";
+ private static final String MEASURE_DESCRIPTION = "measure description";
+ private static final MeasureDouble MEASURE_DOUBLE =
+ MeasureDouble.create("measure1", MEASURE_DESCRIPTION, MEASURE_UNIT);
+ private static final MeasureLong MEASURE_LONG =
+ MeasureLong.create("measure2", MEASURE_DESCRIPTION, MEASURE_UNIT);
+ private static final String VIEW_NAME = "view";
+ private static final String VIEW_DESCRIPTION = "view description";
+ private static final Duration TEN_SECONDS = Duration.create(10, 0);
+ private static final Cumulative CUMULATIVE = Cumulative.create();
+ private static final Interval INTERVAL = Interval.create(TEN_SECONDS);
+ private static final BucketBoundaries BUCKET_BOUNDARIES =
+ BucketBoundaries.create(Arrays.asList(0.0, 1.0, 3.0, 5.0));
+ private static final Sum SUM = Sum.create();
+ private static final Count COUNT = Count.create();
+ private static final Mean MEAN = Mean.create();
+ private static final Distribution DISTRIBUTION = Distribution.create(BUCKET_BOUNDARIES);
+ private static final LastValue LAST_VALUE = LastValue.create();
+ private static final String PROJECT_ID = "id";
+ private static final MonitoredResource DEFAULT_RESOURCE =
+ MonitoredResource.newBuilder().setType("global").build();
+ private static final String DEFAULT_TASK_VALUE =
+ "java-" + ManagementFactory.getRuntimeMXBean().getName();
+
+ @Test
+ public void testConstant() {
+ assertThat(StackdriverExportUtils.LABEL_DESCRIPTION).isEqualTo("OpenCensus TagKey");
+ }
+
+ @Test
+ public void createLabelDescriptor() {
+ assertThat(StackdriverExportUtils.createLabelDescriptor(TagKey.create("string")))
+ .isEqualTo(
+ LabelDescriptor.newBuilder()
+ .setKey("string")
+ .setDescription(StackdriverExportUtils.LABEL_DESCRIPTION)
+ .setValueType(ValueType.STRING)
+ .build());
+ }
+
+ @Test
+ public void createMetricKind() {
+ assertThat(StackdriverExportUtils.createMetricKind(CUMULATIVE, SUM))
+ .isEqualTo(MetricKind.CUMULATIVE);
+ assertThat(StackdriverExportUtils.createMetricKind(INTERVAL, COUNT))
+ .isEqualTo(MetricKind.UNRECOGNIZED);
+ assertThat(StackdriverExportUtils.createMetricKind(CUMULATIVE, LAST_VALUE))
+ .isEqualTo(MetricKind.GAUGE);
+ assertThat(StackdriverExportUtils.createMetricKind(INTERVAL, LAST_VALUE))
+ .isEqualTo(MetricKind.GAUGE);
+ }
+
+ @Test
+ public void createValueType() {
+ assertThat(StackdriverExportUtils.createValueType(SUM, MEASURE_DOUBLE))
+ .isEqualTo(MetricDescriptor.ValueType.DOUBLE);
+ assertThat(StackdriverExportUtils.createValueType(SUM, MEASURE_LONG))
+ .isEqualTo(MetricDescriptor.ValueType.INT64);
+ assertThat(StackdriverExportUtils.createValueType(COUNT, MEASURE_DOUBLE))
+ .isEqualTo(MetricDescriptor.ValueType.INT64);
+ assertThat(StackdriverExportUtils.createValueType(COUNT, MEASURE_LONG))
+ .isEqualTo(MetricDescriptor.ValueType.INT64);
+ assertThat(StackdriverExportUtils.createValueType(MEAN, MEASURE_DOUBLE))
+ .isEqualTo(MetricDescriptor.ValueType.DOUBLE);
+ assertThat(StackdriverExportUtils.createValueType(MEAN, MEASURE_LONG))
+ .isEqualTo(MetricDescriptor.ValueType.DOUBLE);
+ assertThat(StackdriverExportUtils.createValueType(DISTRIBUTION, MEASURE_DOUBLE))
+ .isEqualTo(MetricDescriptor.ValueType.DISTRIBUTION);
+ assertThat(StackdriverExportUtils.createValueType(DISTRIBUTION, MEASURE_LONG))
+ .isEqualTo(MetricDescriptor.ValueType.DISTRIBUTION);
+ assertThat(StackdriverExportUtils.createValueType(LAST_VALUE, MEASURE_DOUBLE))
+ .isEqualTo(MetricDescriptor.ValueType.DOUBLE);
+ assertThat(StackdriverExportUtils.createValueType(LAST_VALUE, MEASURE_LONG))
+ .isEqualTo(MetricDescriptor.ValueType.INT64);
+ }
+
+ @Test
+ public void createUnit() {
+ assertThat(StackdriverExportUtils.createUnit(SUM, MEASURE_DOUBLE)).isEqualTo(MEASURE_UNIT);
+ assertThat(StackdriverExportUtils.createUnit(COUNT, MEASURE_DOUBLE)).isEqualTo("1");
+ assertThat(StackdriverExportUtils.createUnit(MEAN, MEASURE_DOUBLE)).isEqualTo(MEASURE_UNIT);
+ assertThat(StackdriverExportUtils.createUnit(DISTRIBUTION, MEASURE_DOUBLE))
+ .isEqualTo(MEASURE_UNIT);
+ assertThat(StackdriverExportUtils.createUnit(LAST_VALUE, MEASURE_DOUBLE))
+ .isEqualTo(MEASURE_UNIT);
+ }
+
+ @Test
+ public void createMetric() {
+ View view =
+ View.create(
+ Name.create(VIEW_NAME),
+ VIEW_DESCRIPTION,
+ MEASURE_DOUBLE,
+ DISTRIBUTION,
+ Arrays.asList(KEY),
+ CUMULATIVE);
+ assertThat(
+ StackdriverExportUtils.createMetric(
+ view, Arrays.asList(VALUE_1), CUSTOM_OPENCENSUS_DOMAIN))
+ .isEqualTo(
+ Metric.newBuilder()
+ .setType("custom.googleapis.com/opencensus/" + VIEW_NAME)
+ .putLabels("KEY", "VALUE1")
+ .putLabels(StackdriverExportUtils.OPENCENSUS_TASK, DEFAULT_TASK_VALUE)
+ .build());
+ }
+
+ @Test
+ public void createMetric_WithExternalMetricDomain() {
+ View view =
+ View.create(
+ Name.create(VIEW_NAME),
+ VIEW_DESCRIPTION,
+ MEASURE_DOUBLE,
+ DISTRIBUTION,
+ Arrays.asList(KEY),
+ CUMULATIVE);
+ String prometheusDomain = "external.googleapis.com/prometheus/";
+ assertThat(StackdriverExportUtils.createMetric(view, Arrays.asList(VALUE_1), prometheusDomain))
+ .isEqualTo(
+ Metric.newBuilder()
+ .setType(prometheusDomain + VIEW_NAME)
+ .putLabels("KEY", "VALUE1")
+ .putLabels(StackdriverExportUtils.OPENCENSUS_TASK, DEFAULT_TASK_VALUE)
+ .build());
+ }
+
+ @Test
+ public void createMetric_skipNullTagValue() {
+ View view =
+ View.create(
+ Name.create(VIEW_NAME),
+ VIEW_DESCRIPTION,
+ MEASURE_DOUBLE,
+ DISTRIBUTION,
+ Arrays.asList(KEY, KEY_2, KEY_3),
+ CUMULATIVE);
+ assertThat(
+ StackdriverExportUtils.createMetric(
+ view, Arrays.asList(VALUE_1, null, VALUE_2), CUSTOM_OPENCENSUS_DOMAIN))
+ .isEqualTo(
+ Metric.newBuilder()
+ .setType("custom.googleapis.com/opencensus/" + VIEW_NAME)
+ .putLabels("KEY", "VALUE1")
+ .putLabels("KEY3", "VALUE2")
+ .putLabels(StackdriverExportUtils.OPENCENSUS_TASK, DEFAULT_TASK_VALUE)
+ .build());
+ }
+
+ @Test
+ public void createMetric_throwWhenTagKeysAndValuesHaveDifferentSize() {
+ View view =
+ View.create(
+ Name.create(VIEW_NAME),
+ VIEW_DESCRIPTION,
+ MEASURE_DOUBLE,
+ DISTRIBUTION,
+ Arrays.asList(KEY, KEY_2, KEY_3),
+ CUMULATIVE);
+ List<TagValue> tagValues = Arrays.asList(VALUE_1, null);
+ thrown.expect(IllegalArgumentException.class);
+ thrown.expectMessage("TagKeys and TagValues don't have same size.");
+ StackdriverExportUtils.createMetric(view, tagValues, CUSTOM_OPENCENSUS_DOMAIN);
+ }
+
+ @Test
+ public void convertTimestamp() {
+ Timestamp censusTimestamp1 = Timestamp.create(100, 3000);
+ assertThat(StackdriverExportUtils.convertTimestamp(censusTimestamp1))
+ .isEqualTo(
+ com.google.protobuf.Timestamp.newBuilder().setSeconds(100).setNanos(3000).build());
+
+ // Stackdriver doesn't allow negative values, instead it will replace the negative values
+ // by returning a default instance.
+ Timestamp censusTimestamp2 = Timestamp.create(-100, 3000);
+ assertThat(StackdriverExportUtils.convertTimestamp(censusTimestamp2))
+ .isEqualTo(com.google.protobuf.Timestamp.newBuilder().build());
+ }
+
+ @Test
+ public void createTimeInterval_cumulative() {
+ Timestamp censusTimestamp1 = Timestamp.create(100, 3000);
+ Timestamp censusTimestamp2 = Timestamp.create(200, 0);
+ assertThat(
+ StackdriverExportUtils.createTimeInterval(
+ CumulativeData.create(censusTimestamp1, censusTimestamp2), DISTRIBUTION))
+ .isEqualTo(
+ TimeInterval.newBuilder()
+ .setStartTime(StackdriverExportUtils.convertTimestamp(censusTimestamp1))
+ .setEndTime(StackdriverExportUtils.convertTimestamp(censusTimestamp2))
+ .build());
+ assertThat(
+ StackdriverExportUtils.createTimeInterval(
+ CumulativeData.create(censusTimestamp1, censusTimestamp2), LAST_VALUE))
+ .isEqualTo(
+ TimeInterval.newBuilder()
+ .setEndTime(StackdriverExportUtils.convertTimestamp(censusTimestamp2))
+ .build());
+ }
+
+ @Test
+ public void createTimeInterval_interval() {
+ IntervalData intervalData = IntervalData.create(Timestamp.create(200, 0));
+ // Only Cumulative view will supported in this version.
+ thrown.expect(IllegalArgumentException.class);
+ StackdriverExportUtils.createTimeInterval(intervalData, SUM);
+ }
+
+ @Test
+ public void createBucketOptions() {
+ assertThat(StackdriverExportUtils.createBucketOptions(BUCKET_BOUNDARIES))
+ .isEqualTo(
+ BucketOptions.newBuilder()
+ .setExplicitBuckets(
+ Explicit.newBuilder().addAllBounds(Arrays.asList(0.0, 1.0, 3.0, 5.0)))
+ .build());
+ }
+
+ @Test
+ public void createDistribution() {
+ DistributionData distributionData =
+ DistributionData.create(2, 3, 0, 5, 14, Arrays.asList(0L, 1L, 1L, 0L, 1L));
+ assertThat(StackdriverExportUtils.createDistribution(distributionData, BUCKET_BOUNDARIES))
+ .isEqualTo(
+ com.google.api.Distribution.newBuilder()
+ .setMean(2)
+ .setCount(3)
+ // TODO(songya): uncomment this once Stackdriver supports setting max and min.
+ // .setRange(
+ // com.google.api.Distribution.Range.newBuilder().setMin(0).setMax(5).build())
+ .setBucketOptions(StackdriverExportUtils.createBucketOptions(BUCKET_BOUNDARIES))
+ .addAllBucketCounts(Arrays.asList(0L, 1L, 1L, 0L, 1L))
+ .setSumOfSquaredDeviation(14)
+ .build());
+ }
+
+ @Test
+ public void createTypedValue() {
+ assertThat(StackdriverExportUtils.createTypedValue(SUM, SumDataDouble.create(1.1)))
+ .isEqualTo(TypedValue.newBuilder().setDoubleValue(1.1).build());
+ assertThat(StackdriverExportUtils.createTypedValue(SUM, SumDataLong.create(10000)))
+ .isEqualTo(TypedValue.newBuilder().setInt64Value(10000).build());
+ assertThat(StackdriverExportUtils.createTypedValue(COUNT, CountData.create(55)))
+ .isEqualTo(TypedValue.newBuilder().setInt64Value(55).build());
+ assertThat(StackdriverExportUtils.createTypedValue(MEAN, MeanData.create(7.7, 8)))
+ .isEqualTo(TypedValue.newBuilder().setDoubleValue(7.7).build());
+ DistributionData distributionData =
+ DistributionData.create(2, 3, 0, 5, 14, Arrays.asList(0L, 1L, 1L, 0L, 1L));
+ assertThat(StackdriverExportUtils.createTypedValue(DISTRIBUTION, distributionData))
+ .isEqualTo(
+ TypedValue.newBuilder()
+ .setDistributionValue(
+ StackdriverExportUtils.createDistribution(distributionData, BUCKET_BOUNDARIES))
+ .build());
+ assertThat(StackdriverExportUtils.createTypedValue(LAST_VALUE, LastValueDataDouble.create(9.9)))
+ .isEqualTo(TypedValue.newBuilder().setDoubleValue(9.9).build());
+ assertThat(StackdriverExportUtils.createTypedValue(LAST_VALUE, LastValueDataLong.create(90000)))
+ .isEqualTo(TypedValue.newBuilder().setInt64Value(90000).build());
+ }
+
+ @Test
+ public void createPoint_cumulative() {
+ Timestamp censusTimestamp1 = Timestamp.create(100, 3000);
+ Timestamp censusTimestamp2 = Timestamp.create(200, 0);
+ CumulativeData cumulativeData = CumulativeData.create(censusTimestamp1, censusTimestamp2);
+ SumDataDouble sumDataDouble = SumDataDouble.create(33.3);
+
+ assertThat(StackdriverExportUtils.createPoint(sumDataDouble, cumulativeData, SUM))
+ .isEqualTo(
+ Point.newBuilder()
+ .setInterval(StackdriverExportUtils.createTimeInterval(cumulativeData, SUM))
+ .setValue(StackdriverExportUtils.createTypedValue(SUM, sumDataDouble))
+ .build());
+ }
+
+ @Test
+ public void createPoint_interval() {
+ IntervalData intervalData = IntervalData.create(Timestamp.create(200, 0));
+ SumDataDouble sumDataDouble = SumDataDouble.create(33.3);
+ // Only Cumulative view will supported in this version.
+ thrown.expect(IllegalArgumentException.class);
+ StackdriverExportUtils.createPoint(sumDataDouble, intervalData, SUM);
+ }
+
+ @Test
+ public void createMetricDescriptor_cumulative() {
+ View view =
+ View.create(
+ Name.create(VIEW_NAME),
+ VIEW_DESCRIPTION,
+ MEASURE_DOUBLE,
+ DISTRIBUTION,
+ Arrays.asList(KEY),
+ CUMULATIVE);
+ MetricDescriptor metricDescriptor =
+ StackdriverExportUtils.createMetricDescriptor(
+ view, PROJECT_ID, "custom.googleapis.com/myorg/", "myorg/");
+ assertThat(metricDescriptor.getName())
+ .isEqualTo(
+ "projects/"
+ + PROJECT_ID
+ + "/metricDescriptors/custom.googleapis.com/myorg/"
+ + VIEW_NAME);
+ assertThat(metricDescriptor.getDescription()).isEqualTo(VIEW_DESCRIPTION);
+ assertThat(metricDescriptor.getDisplayName()).isEqualTo("myorg/" + VIEW_NAME);
+ assertThat(metricDescriptor.getType()).isEqualTo("custom.googleapis.com/myorg/" + VIEW_NAME);
+ assertThat(metricDescriptor.getUnit()).isEqualTo(MEASURE_UNIT);
+ assertThat(metricDescriptor.getMetricKind()).isEqualTo(MetricKind.CUMULATIVE);
+ assertThat(metricDescriptor.getValueType()).isEqualTo(MetricDescriptor.ValueType.DISTRIBUTION);
+ assertThat(metricDescriptor.getLabelsList())
+ .containsExactly(
+ LabelDescriptor.newBuilder()
+ .setKey(KEY.getName())
+ .setDescription(StackdriverExportUtils.LABEL_DESCRIPTION)
+ .setValueType(ValueType.STRING)
+ .build(),
+ LabelDescriptor.newBuilder()
+ .setKey(StackdriverExportUtils.OPENCENSUS_TASK)
+ .setDescription(StackdriverExportUtils.OPENCENSUS_TASK_DESCRIPTION)
+ .setValueType(ValueType.STRING)
+ .build());
+ }
+
+ @Test
+ public void createMetricDescriptor_cumulative_count() {
+ View view =
+ View.create(
+ Name.create(VIEW_NAME),
+ VIEW_DESCRIPTION,
+ MEASURE_DOUBLE,
+ COUNT,
+ Arrays.asList(KEY),
+ CUMULATIVE);
+ MetricDescriptor metricDescriptor =
+ StackdriverExportUtils.createMetricDescriptor(
+ view, PROJECT_ID, CUSTOM_OPENCENSUS_DOMAIN, DEFAULT_DISPLAY_NAME_PREFIX);
+ assertThat(metricDescriptor.getName())
+ .isEqualTo(
+ "projects/"
+ + PROJECT_ID
+ + "/metricDescriptors/custom.googleapis.com/opencensus/"
+ + VIEW_NAME);
+ assertThat(metricDescriptor.getDescription()).isEqualTo(VIEW_DESCRIPTION);
+ assertThat(metricDescriptor.getDisplayName()).isEqualTo("OpenCensus/" + VIEW_NAME);
+ assertThat(metricDescriptor.getType())
+ .isEqualTo("custom.googleapis.com/opencensus/" + VIEW_NAME);
+ assertThat(metricDescriptor.getUnit()).isEqualTo("1");
+ assertThat(metricDescriptor.getMetricKind()).isEqualTo(MetricKind.CUMULATIVE);
+ assertThat(metricDescriptor.getValueType()).isEqualTo(MetricDescriptor.ValueType.INT64);
+ assertThat(metricDescriptor.getLabelsList())
+ .containsExactly(
+ LabelDescriptor.newBuilder()
+ .setKey(KEY.getName())
+ .setDescription(StackdriverExportUtils.LABEL_DESCRIPTION)
+ .setValueType(ValueType.STRING)
+ .build(),
+ LabelDescriptor.newBuilder()
+ .setKey(StackdriverExportUtils.OPENCENSUS_TASK)
+ .setDescription(StackdriverExportUtils.OPENCENSUS_TASK_DESCRIPTION)
+ .setValueType(ValueType.STRING)
+ .build());
+ }
+
+ @Test
+ public void createMetricDescriptor_interval() {
+ View view =
+ View.create(
+ Name.create(VIEW_NAME),
+ VIEW_DESCRIPTION,
+ MEASURE_DOUBLE,
+ DISTRIBUTION,
+ Arrays.asList(KEY),
+ INTERVAL);
+ assertThat(
+ StackdriverExportUtils.createMetricDescriptor(
+ view, PROJECT_ID, CUSTOM_OPENCENSUS_DOMAIN, DEFAULT_DISPLAY_NAME_PREFIX))
+ .isNull();
+ }
+
+ @Test
+ public void createTimeSeriesList_cumulative() {
+ View view =
+ View.create(
+ Name.create(VIEW_NAME),
+ VIEW_DESCRIPTION,
+ MEASURE_DOUBLE,
+ DISTRIBUTION,
+ Arrays.asList(KEY),
+ CUMULATIVE);
+ DistributionData distributionData1 =
+ DistributionData.create(2, 3, 0, 5, 14, Arrays.asList(0L, 1L, 1L, 0L, 1L));
+ DistributionData distributionData2 =
+ DistributionData.create(-1, 1, -1, -1, 0, Arrays.asList(1L, 0L, 0L, 0L, 0L));
+ Map<List<TagValue>, DistributionData> aggregationMap =
+ ImmutableMap.of(
+ Arrays.asList(VALUE_1), distributionData1, Arrays.asList(VALUE_2), distributionData2);
+ CumulativeData cumulativeData =
+ CumulativeData.create(Timestamp.fromMillis(1000), Timestamp.fromMillis(2000));
+ ViewData viewData = ViewData.create(view, aggregationMap, cumulativeData);
+ List<TimeSeries> timeSeriesList =
+ StackdriverExportUtils.createTimeSeriesList(
+ viewData, DEFAULT_RESOURCE, CUSTOM_OPENCENSUS_DOMAIN);
+ assertThat(timeSeriesList).hasSize(2);
+ TimeSeries expected1 =
+ TimeSeries.newBuilder()
+ .setMetricKind(MetricKind.CUMULATIVE)
+ .setValueType(MetricDescriptor.ValueType.DISTRIBUTION)
+ .setMetric(
+ StackdriverExportUtils.createMetric(
+ view, Arrays.asList(VALUE_1), CUSTOM_OPENCENSUS_DOMAIN))
+ .setResource(MonitoredResource.newBuilder().setType("global"))
+ .addPoints(
+ StackdriverExportUtils.createPoint(distributionData1, cumulativeData, DISTRIBUTION))
+ .build();
+ TimeSeries expected2 =
+ TimeSeries.newBuilder()
+ .setMetricKind(MetricKind.CUMULATIVE)
+ .setValueType(MetricDescriptor.ValueType.DISTRIBUTION)
+ .setMetric(
+ StackdriverExportUtils.createMetric(
+ view, Arrays.asList(VALUE_2), CUSTOM_OPENCENSUS_DOMAIN))
+ .setResource(MonitoredResource.newBuilder().setType("global"))
+ .addPoints(
+ StackdriverExportUtils.createPoint(distributionData2, cumulativeData, DISTRIBUTION))
+ .build();
+ assertThat(timeSeriesList).containsExactly(expected1, expected2);
+ }
+
+ @Test
+ public void createTimeSeriesList_interval() {
+ View view =
+ View.create(
+ Name.create(VIEW_NAME),
+ VIEW_DESCRIPTION,
+ MEASURE_DOUBLE,
+ DISTRIBUTION,
+ Arrays.asList(KEY),
+ INTERVAL);
+ Map<List<TagValue>, DistributionData> aggregationMap =
+ ImmutableMap.of(
+ Arrays.asList(VALUE_1),
+ DistributionData.create(2, 3, 0, 5, 14, Arrays.asList(0L, 1L, 1L, 0L, 1L)),
+ Arrays.asList(VALUE_2),
+ DistributionData.create(-1, 1, -1, -1, 0, Arrays.asList(1L, 0L, 0L, 0L, 0L)));
+ ViewData viewData =
+ ViewData.create(view, aggregationMap, IntervalData.create(Timestamp.fromMillis(2000)));
+ assertThat(
+ StackdriverExportUtils.createTimeSeriesList(
+ viewData, DEFAULT_RESOURCE, CUSTOM_OPENCENSUS_DOMAIN))
+ .isEmpty();
+ }
+
+ @Test
+ public void createTimeSeriesList_withCustomMonitoredResource() {
+ MonitoredResource resource =
+ MonitoredResource.newBuilder().setType("global").putLabels("key", "value").build();
+ View view =
+ View.create(
+ Name.create(VIEW_NAME),
+ VIEW_DESCRIPTION,
+ MEASURE_DOUBLE,
+ SUM,
+ Arrays.asList(KEY),
+ CUMULATIVE);
+ SumDataDouble sumData = SumDataDouble.create(55.5);
+ Map<List<TagValue>, SumDataDouble> aggregationMap =
+ ImmutableMap.of(Arrays.asList(VALUE_1), sumData);
+ CumulativeData cumulativeData =
+ CumulativeData.create(Timestamp.fromMillis(1000), Timestamp.fromMillis(2000));
+ ViewData viewData = ViewData.create(view, aggregationMap, cumulativeData);
+ List<TimeSeries> timeSeriesList =
+ StackdriverExportUtils.createTimeSeriesList(viewData, resource, CUSTOM_OPENCENSUS_DOMAIN);
+ assertThat(timeSeriesList)
+ .containsExactly(
+ TimeSeries.newBuilder()
+ .setMetricKind(MetricKind.CUMULATIVE)
+ .setValueType(MetricDescriptor.ValueType.DOUBLE)
+ .setMetric(
+ StackdriverExportUtils.createMetric(
+ view, Arrays.asList(VALUE_1), CUSTOM_OPENCENSUS_DOMAIN))
+ .setResource(resource)
+ .addPoints(StackdriverExportUtils.createPoint(sumData, cumulativeData, SUM))
+ .build());
+ }
+}
diff --git a/exporters/stats/stackdriver/src/test/java/io/opencensus/exporter/stats/stackdriver/StackdriverExporterWorkerTest.java b/exporters/stats/stackdriver/src/test/java/io/opencensus/exporter/stats/stackdriver/StackdriverExporterWorkerTest.java
new file mode 100644
index 00000000..27593829
--- /dev/null
+++ b/exporters/stats/stackdriver/src/test/java/io/opencensus/exporter/stats/stackdriver/StackdriverExporterWorkerTest.java
@@ -0,0 +1,310 @@
+/*
+ * Copyright 2017, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.exporter.stats.stackdriver;
+
+import static com.google.common.truth.Truth.assertThat;
+import static io.opencensus.exporter.stats.stackdriver.StackdriverExporterWorker.CUSTOM_OPENCENSUS_DOMAIN;
+import static io.opencensus.exporter.stats.stackdriver.StackdriverExporterWorker.DEFAULT_DISPLAY_NAME_PREFIX;
+import static org.mockito.Matchers.any;
+import static org.mockito.Matchers.eq;
+import static org.mockito.Mockito.doReturn;
+import static org.mockito.Mockito.doThrow;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+
+import com.google.api.MetricDescriptor;
+import com.google.api.MonitoredResource;
+import com.google.api.gax.rpc.UnaryCallable;
+import com.google.cloud.monitoring.v3.MetricServiceClient;
+import com.google.cloud.monitoring.v3.stub.MetricServiceStub;
+import com.google.common.collect.ImmutableMap;
+import com.google.common.collect.ImmutableSet;
+import com.google.monitoring.v3.CreateMetricDescriptorRequest;
+import com.google.monitoring.v3.CreateTimeSeriesRequest;
+import com.google.monitoring.v3.TimeSeries;
+import com.google.protobuf.Empty;
+import io.opencensus.common.Duration;
+import io.opencensus.common.Timestamp;
+import io.opencensus.stats.Aggregation.Sum;
+import io.opencensus.stats.AggregationData;
+import io.opencensus.stats.AggregationData.SumDataLong;
+import io.opencensus.stats.Measure.MeasureLong;
+import io.opencensus.stats.View;
+import io.opencensus.stats.View.AggregationWindow.Cumulative;
+import io.opencensus.stats.View.AggregationWindow.Interval;
+import io.opencensus.stats.View.Name;
+import io.opencensus.stats.ViewData;
+import io.opencensus.stats.ViewData.AggregationWindowData.CumulativeData;
+import io.opencensus.stats.ViewManager;
+import io.opencensus.tags.TagKey;
+import io.opencensus.tags.TagValue;
+import java.io.IOException;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.List;
+import org.junit.Before;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
+import org.mockito.Mock;
+import org.mockito.MockitoAnnotations;
+
+/** Unit tests for {@link StackdriverExporterWorker}. */
+@RunWith(JUnit4.class)
+public class StackdriverExporterWorkerTest {
+
+ private static final String PROJECT_ID = "projectId";
+ private static final Duration ONE_SECOND = Duration.create(1, 0);
+ private static final TagKey KEY = TagKey.create("KEY");
+ private static final TagValue VALUE = TagValue.create("VALUE");
+ private static final String MEASURE_NAME = "my measurement";
+ private static final String MEASURE_UNIT = "us";
+ private static final String MEASURE_DESCRIPTION = "measure description";
+ private static final MeasureLong MEASURE =
+ MeasureLong.create(MEASURE_NAME, MEASURE_DESCRIPTION, MEASURE_UNIT);
+ private static final Name VIEW_NAME = Name.create("my view");
+ private static final String VIEW_DESCRIPTION = "view description";
+ private static final Cumulative CUMULATIVE = Cumulative.create();
+ private static final Interval INTERVAL = Interval.create(ONE_SECOND);
+ private static final Sum SUM = Sum.create();
+ private static final MonitoredResource DEFAULT_RESOURCE =
+ MonitoredResource.newBuilder().setType("global").build();
+
+ @Mock private ViewManager mockViewManager;
+
+ @Mock private MetricServiceStub mockStub;
+
+ @Mock
+ private UnaryCallable<CreateMetricDescriptorRequest, MetricDescriptor>
+ mockCreateMetricDescriptorCallable;
+
+ @Mock private UnaryCallable<CreateTimeSeriesRequest, Empty> mockCreateTimeSeriesCallable;
+
+ @Before
+ public void setUp() {
+ MockitoAnnotations.initMocks(this);
+
+ doReturn(mockCreateMetricDescriptorCallable).when(mockStub).createMetricDescriptorCallable();
+ doReturn(mockCreateTimeSeriesCallable).when(mockStub).createTimeSeriesCallable();
+ doReturn(null)
+ .when(mockCreateMetricDescriptorCallable)
+ .call(any(CreateMetricDescriptorRequest.class));
+ doReturn(null).when(mockCreateTimeSeriesCallable).call(any(CreateTimeSeriesRequest.class));
+ }
+
+ @Test
+ public void testConstants() {
+ assertThat(StackdriverExporterWorker.MAX_BATCH_EXPORT_SIZE).isEqualTo(200);
+ assertThat(StackdriverExporterWorker.CUSTOM_METRIC_DOMAIN).isEqualTo("custom.googleapis.com/");
+ assertThat(StackdriverExporterWorker.CUSTOM_OPENCENSUS_DOMAIN)
+ .isEqualTo("custom.googleapis.com/opencensus/");
+ assertThat(StackdriverExporterWorker.DEFAULT_DISPLAY_NAME_PREFIX).isEqualTo("OpenCensus/");
+ }
+
+ @Test
+ public void export() throws IOException {
+ View view =
+ View.create(VIEW_NAME, VIEW_DESCRIPTION, MEASURE, SUM, Arrays.asList(KEY), CUMULATIVE);
+ ViewData viewData =
+ ViewData.create(
+ view,
+ ImmutableMap.of(Arrays.asList(VALUE), SumDataLong.create(1)),
+ CumulativeData.create(Timestamp.fromMillis(100), Timestamp.fromMillis(200)));
+ doReturn(ImmutableSet.of(view)).when(mockViewManager).getAllExportedViews();
+ doReturn(viewData).when(mockViewManager).getView(VIEW_NAME);
+
+ StackdriverExporterWorker worker =
+ new StackdriverExporterWorker(
+ PROJECT_ID,
+ new FakeMetricServiceClient(mockStub),
+ ONE_SECOND,
+ mockViewManager,
+ DEFAULT_RESOURCE,
+ null);
+ worker.export();
+
+ verify(mockStub, times(1)).createMetricDescriptorCallable();
+ verify(mockStub, times(1)).createTimeSeriesCallable();
+
+ MetricDescriptor descriptor =
+ StackdriverExportUtils.createMetricDescriptor(
+ view, PROJECT_ID, CUSTOM_OPENCENSUS_DOMAIN, DEFAULT_DISPLAY_NAME_PREFIX);
+ List<TimeSeries> timeSeries =
+ StackdriverExportUtils.createTimeSeriesList(
+ viewData, DEFAULT_RESOURCE, CUSTOM_OPENCENSUS_DOMAIN);
+ verify(mockCreateMetricDescriptorCallable, times(1))
+ .call(
+ eq(
+ CreateMetricDescriptorRequest.newBuilder()
+ .setName("projects/" + PROJECT_ID)
+ .setMetricDescriptor(descriptor)
+ .build()));
+ verify(mockCreateTimeSeriesCallable, times(1))
+ .call(
+ eq(
+ CreateTimeSeriesRequest.newBuilder()
+ .setName("projects/" + PROJECT_ID)
+ .addAllTimeSeries(timeSeries)
+ .build()));
+ }
+
+ @Test
+ public void doNotExportForEmptyViewData() {
+ View view =
+ View.create(VIEW_NAME, VIEW_DESCRIPTION, MEASURE, SUM, Arrays.asList(KEY), CUMULATIVE);
+ ViewData empty =
+ ViewData.create(
+ view,
+ Collections.<List<TagValue>, AggregationData>emptyMap(),
+ CumulativeData.create(Timestamp.fromMillis(100), Timestamp.fromMillis(200)));
+ doReturn(ImmutableSet.of(view)).when(mockViewManager).getAllExportedViews();
+ doReturn(empty).when(mockViewManager).getView(VIEW_NAME);
+
+ StackdriverExporterWorker worker =
+ new StackdriverExporterWorker(
+ PROJECT_ID,
+ new FakeMetricServiceClient(mockStub),
+ ONE_SECOND,
+ mockViewManager,
+ DEFAULT_RESOURCE,
+ null);
+
+ worker.export();
+ verify(mockStub, times(1)).createMetricDescriptorCallable();
+ verify(mockStub, times(0)).createTimeSeriesCallable();
+ }
+
+ @Test
+ public void doNotExportIfFailedToRegisterView() {
+ View view =
+ View.create(VIEW_NAME, VIEW_DESCRIPTION, MEASURE, SUM, Arrays.asList(KEY), CUMULATIVE);
+ doReturn(ImmutableSet.of(view)).when(mockViewManager).getAllExportedViews();
+ doThrow(new IllegalArgumentException()).when(mockStub).createMetricDescriptorCallable();
+ StackdriverExporterWorker worker =
+ new StackdriverExporterWorker(
+ PROJECT_ID,
+ new FakeMetricServiceClient(mockStub),
+ ONE_SECOND,
+ mockViewManager,
+ DEFAULT_RESOURCE,
+ null);
+
+ assertThat(worker.registerView(view)).isFalse();
+ worker.export();
+ verify(mockStub, times(1)).createMetricDescriptorCallable();
+ verify(mockStub, times(0)).createTimeSeriesCallable();
+ }
+
+ @Test
+ public void skipDifferentViewWithSameName() throws IOException {
+ StackdriverExporterWorker worker =
+ new StackdriverExporterWorker(
+ PROJECT_ID,
+ new FakeMetricServiceClient(mockStub),
+ ONE_SECOND,
+ mockViewManager,
+ DEFAULT_RESOURCE,
+ null);
+ View view1 =
+ View.create(VIEW_NAME, VIEW_DESCRIPTION, MEASURE, SUM, Arrays.asList(KEY), CUMULATIVE);
+ assertThat(worker.registerView(view1)).isTrue();
+ verify(mockStub, times(1)).createMetricDescriptorCallable();
+
+ View view2 =
+ View.create(
+ VIEW_NAME,
+ "This is a different description.",
+ MEASURE,
+ SUM,
+ Arrays.asList(KEY),
+ CUMULATIVE);
+ assertThat(worker.registerView(view2)).isFalse();
+ verify(mockStub, times(1)).createMetricDescriptorCallable();
+ }
+
+ @Test
+ public void doNotCreateMetricDescriptorForRegisteredView() {
+ StackdriverExporterWorker worker =
+ new StackdriverExporterWorker(
+ PROJECT_ID,
+ new FakeMetricServiceClient(mockStub),
+ ONE_SECOND,
+ mockViewManager,
+ DEFAULT_RESOURCE,
+ null);
+ View view =
+ View.create(VIEW_NAME, VIEW_DESCRIPTION, MEASURE, SUM, Arrays.asList(KEY), CUMULATIVE);
+ assertThat(worker.registerView(view)).isTrue();
+ verify(mockStub, times(1)).createMetricDescriptorCallable();
+
+ assertThat(worker.registerView(view)).isTrue();
+ verify(mockStub, times(1)).createMetricDescriptorCallable();
+ }
+
+ @Test
+ public void doNotCreateMetricDescriptorForIntervalView() {
+ StackdriverExporterWorker worker =
+ new StackdriverExporterWorker(
+ PROJECT_ID,
+ new FakeMetricServiceClient(mockStub),
+ ONE_SECOND,
+ mockViewManager,
+ DEFAULT_RESOURCE,
+ null);
+ View view =
+ View.create(VIEW_NAME, VIEW_DESCRIPTION, MEASURE, SUM, Arrays.asList(KEY), INTERVAL);
+ assertThat(worker.registerView(view)).isFalse();
+ verify(mockStub, times(0)).createMetricDescriptorCallable();
+ }
+
+ @Test
+ public void getDomain() {
+ assertThat(StackdriverExporterWorker.getDomain(null))
+ .isEqualTo("custom.googleapis.com/opencensus/");
+ assertThat(StackdriverExporterWorker.getDomain(""))
+ .isEqualTo("custom.googleapis.com/opencensus/");
+ assertThat(StackdriverExporterWorker.getDomain("custom.googleapis.com/myorg/"))
+ .isEqualTo("custom.googleapis.com/myorg/");
+ assertThat(StackdriverExporterWorker.getDomain("external.googleapis.com/prometheus/"))
+ .isEqualTo("external.googleapis.com/prometheus/");
+ assertThat(StackdriverExporterWorker.getDomain("myorg")).isEqualTo("myorg/");
+ }
+
+ @Test
+ public void getDisplayNamePrefix() {
+ assertThat(StackdriverExporterWorker.getDisplayNamePrefix(null)).isEqualTo("OpenCensus/");
+ assertThat(StackdriverExporterWorker.getDisplayNamePrefix("")).isEqualTo("");
+ assertThat(StackdriverExporterWorker.getDisplayNamePrefix("custom.googleapis.com/myorg/"))
+ .isEqualTo("custom.googleapis.com/myorg/");
+ assertThat(
+ StackdriverExporterWorker.getDisplayNamePrefix("external.googleapis.com/prometheus/"))
+ .isEqualTo("external.googleapis.com/prometheus/");
+ assertThat(StackdriverExporterWorker.getDisplayNamePrefix("myorg")).isEqualTo("myorg/");
+ }
+
+ /*
+ * MetricServiceClient.createMetricDescriptor() and MetricServiceClient.createTimeSeries() are
+ * final methods and cannot be mocked. We have to use a mock MetricServiceStub in order to verify
+ * the output.
+ */
+ private static final class FakeMetricServiceClient extends MetricServiceClient {
+
+ protected FakeMetricServiceClient(MetricServiceStub stub) {
+ super(stub);
+ }
+ }
+}
diff --git a/exporters/stats/stackdriver/src/test/java/io/opencensus/exporter/stats/stackdriver/StackdriverStatsConfigurationTest.java b/exporters/stats/stackdriver/src/test/java/io/opencensus/exporter/stats/stackdriver/StackdriverStatsConfigurationTest.java
new file mode 100644
index 00000000..2d5eba1b
--- /dev/null
+++ b/exporters/stats/stackdriver/src/test/java/io/opencensus/exporter/stats/stackdriver/StackdriverStatsConfigurationTest.java
@@ -0,0 +1,72 @@
+/*
+ * Copyright 2017, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.exporter.stats.stackdriver;
+
+import static com.google.common.truth.Truth.assertThat;
+
+import com.google.api.MonitoredResource;
+import com.google.auth.Credentials;
+import com.google.auth.oauth2.AccessToken;
+import com.google.auth.oauth2.GoogleCredentials;
+import io.opencensus.common.Duration;
+import java.util.Date;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
+
+/** Unit tests for {@link StackdriverStatsConfiguration}. */
+@RunWith(JUnit4.class)
+public class StackdriverStatsConfigurationTest {
+
+ private static final Credentials FAKE_CREDENTIALS =
+ GoogleCredentials.newBuilder().setAccessToken(new AccessToken("fake", new Date(100))).build();
+ private static final String PROJECT_ID = "project";
+ private static final Duration DURATION = Duration.create(10, 0);
+ private static final MonitoredResource RESOURCE =
+ MonitoredResource.newBuilder()
+ .setType("gce-instance")
+ .putLabels("instance-id", "instance")
+ .build();
+ private static final String CUSTOM_PREFIX = "myorg";
+
+ @Test
+ public void testBuild() {
+ StackdriverStatsConfiguration configuration =
+ StackdriverStatsConfiguration.builder()
+ .setCredentials(FAKE_CREDENTIALS)
+ .setProjectId(PROJECT_ID)
+ .setExportInterval(DURATION)
+ .setMonitoredResource(RESOURCE)
+ .setMetricNamePrefix(CUSTOM_PREFIX)
+ .build();
+ assertThat(configuration.getCredentials()).isEqualTo(FAKE_CREDENTIALS);
+ assertThat(configuration.getProjectId()).isEqualTo(PROJECT_ID);
+ assertThat(configuration.getExportInterval()).isEqualTo(DURATION);
+ assertThat(configuration.getMonitoredResource()).isEqualTo(RESOURCE);
+ assertThat(configuration.getMetricNamePrefix()).isEqualTo(CUSTOM_PREFIX);
+ }
+
+ @Test
+ public void testBuild_Default() {
+ StackdriverStatsConfiguration configuration = StackdriverStatsConfiguration.builder().build();
+ assertThat(configuration.getCredentials()).isNull();
+ assertThat(configuration.getProjectId()).isNull();
+ assertThat(configuration.getExportInterval()).isNull();
+ assertThat(configuration.getMonitoredResource()).isNull();
+ assertThat(configuration.getMetricNamePrefix()).isNull();
+ }
+}
diff --git a/exporters/stats/stackdriver/src/test/java/io/opencensus/exporter/stats/stackdriver/StackdriverStatsExporterTest.java b/exporters/stats/stackdriver/src/test/java/io/opencensus/exporter/stats/stackdriver/StackdriverStatsExporterTest.java
new file mode 100644
index 00000000..f5e3edd5
--- /dev/null
+++ b/exporters/stats/stackdriver/src/test/java/io/opencensus/exporter/stats/stackdriver/StackdriverStatsExporterTest.java
@@ -0,0 +1,129 @@
+/*
+ * Copyright 2017, OpenCensus Authors
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package io.opencensus.exporter.stats.stackdriver;
+
+import static com.google.common.truth.Truth.assertThat;
+
+import com.google.auth.Credentials;
+import com.google.auth.oauth2.AccessToken;
+import com.google.auth.oauth2.GoogleCredentials;
+import io.opencensus.common.Duration;
+import java.io.IOException;
+import java.util.Date;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.ExpectedException;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
+
+/** Unit tests for {@link StackdriverStatsExporter}. */
+@RunWith(JUnit4.class)
+public class StackdriverStatsExporterTest {
+
+ private static final String PROJECT_ID = "projectId";
+ private static final Duration ONE_SECOND = Duration.create(1, 0);
+ private static final Duration NEG_ONE_SECOND = Duration.create(-1, 0);
+ private static final Credentials FAKE_CREDENTIALS =
+ GoogleCredentials.newBuilder().setAccessToken(new AccessToken("fake", new Date(100))).build();
+ private static final StackdriverStatsConfiguration CONFIGURATION =
+ StackdriverStatsConfiguration.builder()
+ .setCredentials(FAKE_CREDENTIALS)
+ .setProjectId("project")
+ .build();
+
+ @Rule public final ExpectedException thrown = ExpectedException.none();
+
+ @Test
+ public void testConstants() {
+ assertThat(StackdriverStatsExporter.DEFAULT_INTERVAL).isEqualTo(Duration.create(60, 0));
+ }
+
+ @Test
+ public void createWithNullStackdriverStatsConfiguration() throws IOException {
+ thrown.expect(NullPointerException.class);
+ thrown.expectMessage("configuration");
+ StackdriverStatsExporter.createAndRegister((StackdriverStatsConfiguration) null);
+ }
+
+ @Test
+ public void createWithNegativeDuration_WithConfiguration() throws IOException {
+ StackdriverStatsConfiguration configuration =
+ StackdriverStatsConfiguration.builder()
+ .setCredentials(FAKE_CREDENTIALS)
+ .setExportInterval(NEG_ONE_SECOND)
+ .build();
+ thrown.expect(IllegalArgumentException.class);
+ thrown.expectMessage("Duration must be positive");
+ StackdriverStatsExporter.createAndRegister(configuration);
+ }
+
+ @Test
+ @SuppressWarnings("deprecation")
+ public void createWithNullCredentials() throws IOException {
+ thrown.expect(NullPointerException.class);
+ thrown.expectMessage("credentials");
+ StackdriverStatsExporter.createAndRegisterWithCredentialsAndProjectId(
+ null, PROJECT_ID, ONE_SECOND);
+ }
+
+ @Test
+ @SuppressWarnings("deprecation")
+ public void createWithNullProjectId() throws IOException {
+ thrown.expect(NullPointerException.class);
+ thrown.expectMessage("projectId");
+ StackdriverStatsExporter.createAndRegisterWithCredentialsAndProjectId(
+ GoogleCredentials.newBuilder().build(), null, ONE_SECOND);
+ }
+
+ @Test
+ @SuppressWarnings("deprecation")
+ public void createWithNullDuration() throws IOException {
+ thrown.expect(NullPointerException.class);
+ thrown.expectMessage("exportInterval");
+ StackdriverStatsExporter.createAndRegisterWithCredentialsAndProjectId(
+ GoogleCredentials.newBuilder().build(), PROJECT_ID, null);
+ }
+
+ @Test
+ @SuppressWarnings("deprecation")
+ public void createWithNegativeDuration() throws IOException {
+ thrown.expect(IllegalArgumentException.class);
+ thrown.expectMessage("Duration must be positive");
+ StackdriverStatsExporter.createAndRegisterWithCredentialsAndProjectId(
+ GoogleCredentials.newBuilder().build(), PROJECT_ID, NEG_ONE_SECOND);
+ }
+
+ @Test
+ public void createExporterTwice() throws IOException {
+ StackdriverStatsExporter.createAndRegister(CONFIGURATION);
+ try {
+ thrown.expect(IllegalStateException.class);
+ thrown.expectMessage("Stackdriver stats exporter is already created.");
+ StackdriverStatsExporter.createAndRegister(CONFIGURATION);
+ } finally {
+ StackdriverStatsExporter.unsafeResetExporter();
+ }
+ }
+
+ @Test
+ @SuppressWarnings("deprecation")
+ public void createWithNullMonitoredResource() throws IOException {
+ thrown.expect(NullPointerException.class);
+ thrown.expectMessage("monitoredResource");
+ StackdriverStatsExporter.createAndRegisterWithMonitoredResource(ONE_SECOND, null);
+ }
+}