diff --git a/.github/workflows/elixir.yml b/.github/workflows/elixir.yml
index e498252b..97595912 100644
--- a/.github/workflows/elixir.yml
+++ b/.github/workflows/elixir.yml
@@ -10,15 +10,15 @@ on:
jobs:
format:
- runs-on: ubuntu-20.04
+ runs-on: ubuntu-22.04
steps:
- - uses: actions/checkout@v2
+ - uses: actions/checkout@v4
- uses: erlef/setup-beam@v1
with:
- otp-version: '25.0'
- elixir-version: '1.13.4'
+ otp-version: '26.1.2'
+ elixir-version: '1.15.7'
version-type: 'strict'
- - uses: actions/cache@v2
+ - uses: actions/cache@v4
name: Cache
with:
path: |
@@ -34,22 +34,24 @@ jobs:
name: Test SDK on Elixir ${{ matrix.elixir_version }} (OTP ${{ matrix.otp_version }}) and ${{ matrix.os }}
strategy:
matrix:
- otp_version: ['25.0.3', '24.1.2']
- elixir_version: ['1.14', '1.13.4']
- rebar3_version: ['3.20.0']
+ otp_version: ['26.1.2', '24.3.4.14']
+ elixir_version: ['1.15.7', '1.12.3']
+ rebar3_version: ['3.22.1']
os: [ubuntu-20.04]
+ exclude:
+ - elixir_version: "1.12.3"
+ - otp_version: "26.1.2"
env:
OTP_VERSION: ${{ matrix.otp_version }}
ELIXIR_VERSION: ${{ matrix.elixir_version }}
steps:
- - uses: actions/checkout@v2
+ - uses: actions/checkout@v4
- uses: erlef/setup-beam@v1
with:
otp-version: ${{ matrix.otp_version }}
elixir-version: ${{ matrix.elixir_version }}
rebar3-version: ${{ matrix.rebar3_version }}
version-type: 'strict'
- - run: mix local.rebar --force rebar3 /home/runner/work/_temp/.setup-beam/rebar3/bin/rebar3
- name: Compile
run: rebar3 as test compile
- name: ExUnit
@@ -60,10 +62,13 @@ jobs:
name: Test API on Elixir ${{ matrix.elixir_version }} (OTP ${{ matrix.otp_version }}) and ${{ matrix.os }}
strategy:
matrix:
- otp_version: ['25.0.3', '24.1.2']
- elixir_version: ['1.14', '1.13.4']
- rebar3_version: ['3.20.0']
+ otp_version: ['26.1.2', '24.3.4.14']
+ elixir_version: ['1.15.7', '1.12.3']
+ rebar3_version: ['3.22.1']
os: [ubuntu-20.04]
+ exclude:
+ - elixir_version: "1.12.3"
+ - otp_version: "26.1.2"
env:
OTP_VERSION: ${{ matrix.otp_version }}
ELIXIR_VERSION: ${{ matrix.elixir_version }}
@@ -71,14 +76,14 @@ jobs:
run:
working-directory: apps/opentelemetry_api/
steps:
- - uses: actions/checkout@v2
+ - uses: actions/checkout@v4
- uses: erlef/setup-beam@v1
with:
otp-version: ${{ matrix.otp_version }}
elixir-version: ${{ matrix.elixir_version }}
rebar3-version: ${{ matrix.rebar3_version }}
version-type: 'strict'
- - uses: actions/cache@v2
+ - uses: actions/cache@v4
name: Cache
with:
path: |
@@ -87,12 +92,11 @@ jobs:
key: ${{ runner.os }}-build-${{ matrix.otp_version }}-${{ matrix.elixir_version }}-v4-${{ hashFiles(format('{0}{1}', github.workspace, '/apps/opentelemetry_api/mix.lock')) }}
restore-keys: |
${{ runner.os }}-build-${{ matrix.otp_version }}-${{ matrix.elixir_version }}-
- - run: mix local.rebar --force rebar3 /home/runner/work/_temp/.setup-beam/rebar3/bin/rebar3
- run: mix deps.get
name: Deps
- run: mix test --cover
name: ExUnit
- - uses: codecov/codecov-action@v3
+ - uses: codecov/codecov-action@v4
if: ${{ always() }}
with:
file: apps/opentelemetry_api/coverage.xml
@@ -104,8 +108,9 @@ jobs:
name: Dialyze on Elixir ${{ matrix.elixir_version }} (OTP ${{ matrix.otp_version }}) and ${{ matrix.os }}
strategy:
matrix:
- otp_version: ['25.0']
- elixir_version: ['1.14']
+ otp_version: ['26.1.2']
+ elixir_version: ['1.15.7']
+ rebar_version: ['3.22.1']
os: [ubuntu-20.04]
env:
OTP_VERSION: ${{ matrix.otp_version }}
@@ -114,13 +119,13 @@ jobs:
run:
working-directory: apps/opentelemetry_api/
steps:
- - uses: actions/checkout@v2
+ - uses: actions/checkout@v4
- uses: erlef/setup-beam@v1
with:
otp-version: ${{ matrix.otp_version }}
elixir-version: ${{ matrix.elixir_version }}
version-type: 'strict'
- - uses: actions/cache@v2
+ - uses: actions/cache@v4
name: Cache
with:
path: |
diff --git a/.github/workflows/erlang.yml b/.github/workflows/erlang.yml
index ae2e6bce..b4b3faa0 100644
--- a/.github/workflows/erlang.yml
+++ b/.github/workflows/erlang.yml
@@ -18,17 +18,17 @@ jobs:
runs-on: ${{ matrix.os }}
strategy:
matrix:
- otp_version: ['26.0', '25.0.3', '24.1.2']
+ otp_version: ['26.1.2', '25.3.2.7', '24.3.4.14']
rebar3_version: ['3.22.1']
os: [ubuntu-20.04]
include:
- - otp_version: '23.3'
+ - otp_version: '23.3.4.19'
rebar3_version: '3.17.0'
os: ubuntu-20.04
env:
OTP_VERSION: ${{ matrix.otp_version }}
steps:
- - uses: actions/checkout@v2
+ - uses: actions/checkout@v4
- name: Run Collector
run: docker-compose up -d
- uses: erlef/setup-beam@v1
@@ -36,7 +36,7 @@ jobs:
otp-version: ${{ matrix.otp_version }}
rebar3-version: ${{ matrix.rebar3_version }}
version-type: 'strict'
- - uses: actions/cache@v2
+ - uses: actions/cache@v4
name: Cache
with:
path: |
@@ -52,7 +52,7 @@ jobs:
run: rebar3 ct --cover
- name: Publish Test Report
- uses: mikepenz/action-junit-report@v3
+ uses: mikepenz/action-junit-report@v4
if: success() || failure() # always run even if the previous step fails
with:
report_paths: '**/_build/test/logs/*/junit_report.xml'
@@ -62,25 +62,25 @@ jobs:
- name: Covertool
if: ${{ always() }}
run: rebar3 covertool generate
- - uses: codecov/codecov-action@v3
+ - uses: codecov/codecov-action@v4
if: ${{ always() }}
with:
file: _build/test/covertool/opentelemetry.covertool.xml
env_vars: OTP_VERSION
flags: sdk,erlang
- - uses: codecov/codecov-action@v3
+ - uses: codecov/codecov-action@v4
if: ${{ always() }}
with:
file: _build/test/covertool/opentelemetry_api.covertool.xml
env_vars: OTP_VERSION
flags: api,erlang
- - uses: codecov/codecov-action@v3
+ - uses: codecov/codecov-action@v4
if: ${{ always() }}
with:
file: _build/test/covertool/opentelemetry_exporter.covertool.xml
env_vars: OTP_VERSION
flags: exporter,erlang
- - uses: codecov/codecov-action@v3
+ - uses: codecov/codecov-action@v4
if: ${{ always() }}
with:
file: _build/test/covertool/opentelemetry_zipkin.covertool.xml
@@ -91,17 +91,17 @@ jobs:
runs-on: ${{ matrix.os }}
strategy:
matrix:
- otp_version: ['26.0']
+ otp_version: ['26.1.2']
rebar3_version: ['3.22.1']
os: [ubuntu-20.04]
steps:
- - uses: actions/checkout@v2
+ - uses: actions/checkout@v4
- uses: erlef/setup-beam@v1
with:
otp-version: ${{ matrix.otp_version }}
rebar3-version: ${{ matrix.rebar3_version }}
version-type: 'strict'
- - uses: actions/cache@v2
+ - uses: actions/cache@v4
name: Cache
with:
path: |
@@ -119,17 +119,17 @@ jobs:
runs-on: ${{ matrix.os }}
strategy:
matrix:
- otp_version: ['26.0']
+ otp_version: ['26.1.2']
rebar3_version: ['3.22.1']
os: [ubuntu-20.04]
steps:
- - uses: actions/checkout@v2
+ - uses: actions/checkout@v4
- uses: erlef/setup-beam@v1
with:
otp-version: ${{ matrix.otp_version }}
rebar3-version: ${{ matrix.rebar3_version }}
version-type: 'strict'
- - uses: actions/cache@v2
+ - uses: actions/cache@v4
name: Cache
with:
path: |
diff --git a/.github/workflows/labeler.yml b/.github/workflows/labeler.yml
index e0137549..6e2f46ab 100644
--- a/.github/workflows/labeler.yml
+++ b/.github/workflows/labeler.yml
@@ -5,6 +5,6 @@ jobs:
triage:
runs-on: ubuntu-latest
steps:
- - uses: actions/labeler@main
+ - uses: actions/labeler@v4
with:
repo-token: "${{ secrets.GITHUB_TOKEN }}"
diff --git a/.github/workflows/w3c_interop.yml b/.github/workflows/w3c_interop.yml
index 85dd5955..9fc53352 100644
--- a/.github/workflows/w3c_interop.yml
+++ b/.github/workflows/w3c_interop.yml
@@ -11,15 +11,15 @@ on:
jobs:
interop_tests:
name: Run W3C Trace Context Interop Tests
- runs-on: ubuntu-20.04
- env:
- OTP_VERSION:
+ runs-on: ubuntu-22.04
steps:
- - uses: actions/checkout@v2
+ - uses: actions/checkout@v4
- uses: erlef/setup-beam@v1
with:
- otp-version: '26.0'
- - uses: actions/cache@v2
+ otp-version: '26.1.2'
+ version-type: strict
+ rebar3-version: '3.22.1'
+ - uses: actions/cache@v4
name: Cache
with:
path: |
@@ -31,14 +31,14 @@ jobs:
- name: Compile
run: rebar3 as interop compile
- - uses: actions/checkout@v2
+ - uses: actions/checkout@v4
with:
repository: 'w3c/trace-context'
path: 'trace-context'
- name: Setup python
- uses: actions/setup-python@v2
+ uses: actions/setup-python@v5
with:
- python-version: 3.6
+ python-version: 3.12
architecture: x64
- run: pip install aiohttp
- run: |
diff --git a/CHANGELOG.md b/CHANGELOG.md
index d0cd8b70..d756ea81 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -25,11 +25,18 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
- [Attributes module `otel_attributes` moved to
API](https://github.com/open-telemetry/opentelemetry-erlang/pull/618)
+### Fixes
+
+- [Fix leak of atoms/persistent terms by creating unique processor name in `otel_tracer_server`](https://github.com/open-telemetry/opentelemetry-erlang/pull/646)
+
## Experimental API
### Changes
-- [Allow to create observable instruments without passing callback arguments](https://github.com/open-telemetry/opentelemetry-erlang/pull/604)
+- [Allow to create observable instruments without passing callback
+ arguments](https://github.com/open-telemetry/opentelemetry-erlang/pull/604)
+- [Allow to give `advisory_params` to instrument creation functions](https://github.com/open-telemetry/opentelemetry-erlang/pull/628)
+- [Attributes are optional in Counter.add(), UpDownCounter.add() and Histo.record()](https://github.com/open-telemetry/opentelemetry-erlang/pull/632)
## Experimental SDK
@@ -37,6 +44,9 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
- [Add `instrument_unit` to view criteria](https://github.com/open-telemetry/opentelemetry-erlang/pull/604)
- [Validate instrument name](https://github.com/open-telemetry/opentelemetry-erlang/pull/604)
+- [Handle `explict_bucket_boundaries` advisory parameter](https://github.com/open-telemetry/opentelemetry-erlang/pull/628)
+- [Rename `boundaries` to `explict_bucket_boundaries` in histogram explicit aggregation options](https://github.com/open-telemetry/opentelemetry-erlang/pull/628)
+- [Allow creating wildcard views](https://github.com/open-telemetry/opentelemetry-erlang/pull/624)
### Changes
@@ -46,6 +56,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
- [Correctly record histogram values greater than last boundary](https://github.com/open-telemetry/opentelemetry-erlang/pull/614)
- [Readers should use a default cumulative temporality if not specified](https://github.com/open-telemetry/opentelemetry-erlang/pull/613)
+ - [Check for positive data values in counters and histograms](https://github.com/open-telemetry/opentelemetry-erlang/pull/632)
## SDK 1.3.1 - 2023-08-15
diff --git a/apps/opentelemetry/src/otel_batch_processor.erl b/apps/opentelemetry/src/otel_batch_processor.erl
index d3ad675f..1686fe84 100644
--- a/apps/opentelemetry/src/otel_batch_processor.erl
+++ b/apps/opentelemetry/src/otel_batch_processor.erl
@@ -12,14 +12,19 @@
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%
-%% @doc The Batch Span Processor implements the `otel_span_processor'
-%% behaviour. It stores finished Spans in a ETS table buffer and exports
+%% @doc The Batch Span Processor implements the {@link otel_span_processor}
+%% behaviour.
+%%
+%% It stores finished Spans in a ETS table buffer and exports
%% them on an interval or when the table reaches a maximum size.
%%
-%% Timeouts:
-%% exporting_timeout_ms: How long to let the exports run before killing.
-%% check_table_size_ms: Timeout to check the size of the export table.
-%% scheduled_delay_ms: How often to trigger running the exporters.
+%% You can configure these timeouts:
+%%
+%%
+%% - `exporting_timeout_ms': how long to let the exports run before killing.
+%% - `check_table_size_ms': timeout to check the size of the export table.
+%% - `scheduled_delay_ms': how often to trigger running the exporters.
+%%
%%
%% The size limit of the current table where finished spans are stored can
%% be configured with the `max_queue_size' option.
@@ -76,6 +81,7 @@ start_link(#{name := Name} = Config) ->
Err
end.
+%% @private
-spec on_start(otel_ctx:t(), opentelemetry:span(), otel_span_processor:processor_config())
-> opentelemetry:span().
on_start(_Ctx, Span, _) ->
diff --git a/apps/opentelemetry/src/otel_sampler_trace_id_ratio_based.erl b/apps/opentelemetry/src/otel_sampler_trace_id_ratio_based.erl
index 73f40b88..dc23f124 100644
--- a/apps/opentelemetry/src/otel_sampler_trace_id_ratio_based.erl
+++ b/apps/opentelemetry/src/otel_sampler_trace_id_ratio_based.erl
@@ -39,7 +39,7 @@
setup(Probability) ->
IdUpperBound =
case Probability of
- P when P =:= 0.0 ->
+ P when P == 0.0 ->
0;
P when P =:= 1.0 ->
?MAX_VALUE;
diff --git a/apps/opentelemetry/src/otel_simple_processor.erl b/apps/opentelemetry/src/otel_simple_processor.erl
index 0f896e00..92ddc45a 100644
--- a/apps/opentelemetry/src/otel_simple_processor.erl
+++ b/apps/opentelemetry/src/otel_simple_processor.erl
@@ -65,17 +65,13 @@
-define(DEFAULT_EXPORTER_TIMEOUT_MS, timer:minutes(5)).
-define(NAME_TO_ATOM(Name, Unique), list_to_atom(lists:concat([Name, "_", Unique]))).
-start_link(Config) ->
- Name = case maps:find(name, Config) of
- {ok, N} ->
- N;
- error ->
- %% use a unique reference to distiguish multiple batch processors while
- %% still having a single name, instead of a possibly changing pid, to
- %% communicate with the processor
- erlang:ref_to_list(erlang:make_ref())
- end,
-
+%% require a unique name to distiguish multiple simple processors while
+%% still having a single name, instead of a possibly changing pid, to
+%% communicate with the processor
+%% @doc Starts a Simple Span Processor.
+%% @end
+-spec start_link(#{name := atom() | list()}) -> {ok, pid(), map()}.
+start_link(Config=#{name := Name}) ->
RegisterName = ?NAME_TO_ATOM(?MODULE, Name),
Config1 = Config#{reg_name => RegisterName},
{ok, Pid} = gen_statem:start_link({local, RegisterName}, ?MODULE, [Config1], []),
@@ -88,20 +84,20 @@ set_exporter(Exporter) ->
%% @deprecated Please use {@link otel_tracer_provider}
-spec set_exporter(module(), term()) -> ok.
set_exporter(Exporter, Options) ->
- %% eqwalizer:ignore doesn't like gen_`statem:call' returns `term()'
gen_statem:call(?REG_NAME(global), {set_exporter, {Exporter, Options}}).
%% @deprecated Please use {@link otel_tracer_provider}
-spec set_exporter(atom(), module(), term()) -> ok.
set_exporter(Name, Exporter, Options) ->
- %% eqwalizer:ignore doesn't like `gen_statem:call' returns `term()'
gen_statem:call(?REG_NAME(Name), {set_exporter, {Exporter, Options}}).
+%% @private
-spec on_start(otel_ctx:t(), opentelemetry:span(), otel_span_processor:processor_config())
-> opentelemetry:span().
on_start(_Ctx, Span, _) ->
Span.
+%% @private
-spec on_end(opentelemetry:span(), otel_span_processor:processor_config())
-> true | dropped | {error, invalid_span} | {error, no_export_buffer}.
on_end(#span{trace_flags=TraceFlags}, _) when not(?IS_SAMPLED(TraceFlags)) ->
@@ -111,6 +107,7 @@ on_end(Span=#span{}, #{reg_name := RegName}) ->
on_end(_Span, _) ->
{error, invalid_span}.
+%% @private
-spec force_flush(#{reg_name := gen_statem:server_ref()}) -> ok.
force_flush(#{reg_name := RegName}) ->
gen_statem:cast(RegName, force_flush).
@@ -137,9 +134,11 @@ init([#{reg_name := RegName}=Args]) ->
reg_name=RegName},
[{next_event, internal, init_exporter}]}.
+%% @private
callback_mode() ->
state_functions.
+%% @private
idle({call, From}, {export, _Span}, #data{exporter=undefined}) ->
{keep_state_and_data, [{reply, From, dropped}]};
idle({call, From}, {export, Span}, Data) ->
@@ -147,6 +146,7 @@ idle({call, From}, {export, Span}, Data) ->
idle(EventType, Event, Data) ->
handle_event_(idle, EventType, Event, Data).
+%% @private
exporting({call, _From}, {export, _}, _) ->
{keep_state_and_data, [postpone]};
exporting(internal, {export, From, Span}, Data=#data{exporting_timeout_ms=ExportingTimeout}) ->
@@ -203,7 +203,7 @@ kill_runner(Data=#data{runner_pid=RunnerPid}) when RunnerPid =/= undefined ->
Mon = erlang:monitor(process, RunnerPid),
erlang:unlink(RunnerPid),
erlang:exit(RunnerPid, kill),
- %% Wait for the runner process terminatation to be sure that
+ %% Wait for the runner process termination to be sure that
%% the export table is destroyed and can be safely recreated
receive
{'DOWN', Mon, process, RunnerPid, _} ->
@@ -261,6 +261,7 @@ export({ExporterModule, Config}, Resource, SpansTid) ->
end.
%% logger format functions
+%% @private
report_cb(#{source := exporter,
during := export,
kind := Kind,
diff --git a/apps/opentelemetry/src/otel_span_processor.erl b/apps/opentelemetry/src/otel_span_processor.erl
index 1b4280d1..54213612 100644
--- a/apps/opentelemetry/src/otel_span_processor.erl
+++ b/apps/opentelemetry/src/otel_span_processor.erl
@@ -26,6 +26,7 @@
-callback processor_init(pid(), processor_config()) -> processor_config().
-callback on_start(otel_ctx:t(), opentelemetry:span(), processor_config()) -> opentelemetry:span().
+
-callback on_end(opentelemetry:span(), processor_config()) -> true |
dropped |
{error, term()}.
@@ -34,6 +35,13 @@
-optional_callbacks([processor_init/2]).
+%% @doc Starts a span processor.
+%%
+%% `Module' must implement the `otel_span_processor' behaviour. This function
+%% calls `Module:start_link/1' with `Config' as the argument.
+%% @end
+-spec start_link(module(), Config) -> {ok, pid(), Config} | {error, term()} when
+ Config :: processor_config().
start_link(Module, Config) ->
case Module:start_link(Config) of
{ok, Pid} ->
diff --git a/apps/opentelemetry/src/otel_span_processor_sup.erl b/apps/opentelemetry/src/otel_span_processor_sup.erl
index 93e9cc1b..dcf78ba5 100644
--- a/apps/opentelemetry/src/otel_span_processor_sup.erl
+++ b/apps/opentelemetry/src/otel_span_processor_sup.erl
@@ -12,7 +12,7 @@
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%
-%% @doc
+%% @private
%% @end
%%%-------------------------------------------------------------------------
-module(otel_span_processor_sup).
diff --git a/apps/opentelemetry/src/otel_tracer_server.erl b/apps/opentelemetry/src/otel_tracer_server.erl
index 4b536906..00e367e5 100644
--- a/apps/opentelemetry/src/otel_tracer_server.erl
+++ b/apps/opentelemetry/src/otel_tracer_server.erl
@@ -159,7 +159,14 @@ init_processor(SpanProcessorSup, ProcessorModule, Config) ->
%% start_link is an optional callback for processors
case lists:member({start_link, 1}, ProcessorModule:module_info(exports)) of
true ->
- try supervisor:start_child(SpanProcessorSup, [ProcessorModule, Config]) of
+ try supervisor:start_child(SpanProcessorSup,
+ [ProcessorModule,
+ %% use a unique reference to distiguish multiple processors of the same type while
+ %% still having a single name, instead of a possibly changing pid, to
+ %% communicate with the processor
+ maps:merge(#{name => erlang:ref_to_list(erlang:make_ref())},
+ Config)])
+ of
{ok, _Pid, Config1} ->
{true, {ProcessorModule, Config1}};
{error, Reason} ->
diff --git a/apps/opentelemetry/test/otel_batch_processor_SUITE.erl b/apps/opentelemetry/test/otel_batch_processor_SUITE.erl
index 95e2d4a7..29873a7a 100644
--- a/apps/opentelemetry/test/otel_batch_processor_SUITE.erl
+++ b/apps/opentelemetry/test/otel_batch_processor_SUITE.erl
@@ -19,10 +19,10 @@ exporting_timeout_test(_Config) ->
process_flag(trap_exit, true),
{ok, Pid, _} = otel_batch_processor:start_link(#{name => test_processor,
- resource => otel_resource:create([]),
- exporter => ?MODULE,
- exporting_timeout_ms => 1,
- scheduled_delay_ms => 1}),
+ resource => otel_resource:create([]),
+ exporter => ?MODULE,
+ exporting_timeout_ms => 1,
+ scheduled_delay_ms => 1}),
receive
{'EXIT', Pid, _} ->
diff --git a/apps/opentelemetry/test/otel_configuration_SUITE.erl b/apps/opentelemetry/test/otel_configuration_SUITE.erl
index 4f292c12..e021c45b 100644
--- a/apps/opentelemetry/test/otel_configuration_SUITE.erl
+++ b/apps/opentelemetry/test/otel_configuration_SUITE.erl
@@ -239,7 +239,7 @@ sampler_parent_based_one(_Config) ->
ok.
sampler_parent_based_zero(_Config) ->
- ?assertMatch({parent_based, #{root := {trace_id_ratio_based, 0.0}}},
+ ?assertMatch({parent_based, #{root := {trace_id_ratio_based, +0.0}}},
maps:get(sampler, otel_configuration:merge_with_os([]))),
ok.
diff --git a/apps/opentelemetry_api/mix.exs b/apps/opentelemetry_api/mix.exs
index b1f7af1d..fe282710 100644
--- a/apps/opentelemetry_api/mix.exs
+++ b/apps/opentelemetry_api/mix.exs
@@ -50,7 +50,7 @@ defmodule OpenTelemetry.MixProject do
end
defp load_app do
- {:ok, [{:application, name, desc}]} = :file.consult('src/opentelemetry_api.app.src')
+ {:ok, [{:application, name, desc}]} = :file.consult(~c"src/opentelemetry_api.app.src")
{name, desc}
end
diff --git a/apps/opentelemetry_api/mix.lock b/apps/opentelemetry_api/mix.lock
index badaab35..34024f8e 100644
--- a/apps/opentelemetry_api/mix.lock
+++ b/apps/opentelemetry_api/mix.lock
@@ -1,7 +1,7 @@
%{
"cmark": {:hex, :cmark, "0.10.0", "51217dc90fec459f34a30ea73345e6a7c1d2f3d618cb4a1738a2ebd0697a57a0", [:make, :mix], [{:elixir_make, "~> 0.6", [hex: :elixir_make, repo: "hexpm", optional: false]}], "hexpm", "00abeadc6f3176e3941853122413082de95d57787777dd0400d64e568acf7c32"},
- "covertool": {:hex, :covertool, "2.0.3", "5d1ca6958482b9b7e718daf61f398e382426ed0f4689d5c8698a60ae3b5ba521", [:rebar3], [], "hexpm", "5c13170a55dbd6bd9efc722bc7fa32caff6f3c9cde9c692bd4a88bfc9ac4f029"},
- "dialyxir": {:hex, :dialyxir, "1.3.0", "fd1672f0922b7648ff9ce7b1b26fcf0ef56dda964a459892ad15f6b4410b5284", [:mix], [{:erlex, ">= 0.2.6", [hex: :erlex, repo: "hexpm", optional: false]}], "hexpm", "00b2a4bcd6aa8db9dcb0b38c1225b7277dca9bc370b6438715667071a304696f"},
+ "covertool": {:hex, :covertool, "2.0.6", "4a291b4e3449025b0595d8f44c8d7635d4f48f033be2ce88d22a329f36f94a91", [:rebar3], [], "hexpm", "5db3fcd82180d8ea4ad857d4d1ab21a8d31b5aee0d60d2f6c0f9e25a411d1e21"},
+ "dialyxir": {:hex, :dialyxir, "1.4.2", "764a6e8e7a354f0ba95d58418178d486065ead1f69ad89782817c296d0d746a5", [:mix], [{:erlex, ">= 0.2.6", [hex: :erlex, repo: "hexpm", optional: false]}], "hexpm", "516603d8067b2fd585319e4b13d3674ad4f314a5902ba8130cd97dc902ce6bbd"},
"earmark": {:hex, :earmark, "1.4.14", "d04572cef64dd92726a97d92d714e38d6e130b024ea1b3f8a56e7de66ec04e50", [:mix], [{:earmark_parser, ">= 1.4.12", [hex: :earmark_parser, repo: "hexpm", optional: false]}], "hexpm", "df338b8b1852ee425180b276c56c6941cb12220e04fe8718fe4acbdd35fd699f"},
"earmark_parser": {:hex, :earmark_parser, "1.4.19", "de0d033d5ff9fc396a24eadc2fcf2afa3d120841eb3f1004d138cbf9273210e8", [:mix], [], "hexpm", "527ab6630b5c75c3a3960b75844c314ec305c76d9899bb30f71cb85952a9dc45"},
"elixir_make": {:hex, :elixir_make, "0.6.2", "7dffacd77dec4c37b39af867cedaabb0b59f6a871f89722c25b28fcd4bd70530", [:mix], [], "hexpm", "03e49eadda22526a7e5279d53321d1cced6552f344ba4e03e619063de75348d9"},
diff --git a/apps/opentelemetry_api/src/opentelemetry.erl b/apps/opentelemetry_api/src/opentelemetry.erl
index 854c944d..653ddd15 100644
--- a/apps/opentelemetry_api/src/opentelemetry.erl
+++ b/apps/opentelemetry_api/src/opentelemetry.erl
@@ -248,6 +248,11 @@ module_to_application(Name, Version, SchemaUrl) ->
Acc#{M => {Name, Version, SchemaUrl}}
end, #{}, Modules).
+%% @doc Returns the default global tracer.
+%%
+%% Without the `opentelemetry' application loaded (as a dependency) and
+%% started (or another SDK), this function returns the default value
+%% `{otel_tracer_noop, []}'.
-spec get_tracer() -> tracer().
get_tracer() ->
get_tracer_(?GLOBAL_TRACER_PROVIDER_NAME).
@@ -256,11 +261,16 @@ get_tracer() ->
get_tracer_(TracerProvider) ->
persistent_term:get(?DEFAULT_TRACER_KEY(TracerProvider), {otel_tracer_noop, []}).
+%% @doc Returns the tracer for the given name.
+%%
+%% Passing `{Name, Vsn, SchemaUrl}' is the same as calling
+%% `get_tracer(Name, Vsn, SchemaUrl)'.
+%% @see get_tracer/3
-spec get_tracer(Name) -> Tracer when
Name :: atom() | {atom(), Vsn, SchemaUrl},
Vsn :: unicode:chardata() | undefined,
SchemaUrl :: uri_string:uri_string() | undefined,
- Tracer:: opentelemetry:tracer().
+ Tracer :: opentelemetry:tracer().
get_tracer('$__default_tracer') ->
get_tracer();
get_tracer({Name, Vsn, SchemaUrl}) ->
@@ -268,11 +278,12 @@ get_tracer({Name, Vsn, SchemaUrl}) ->
get_tracer(Name) ->
get_tracer(Name, undefined, undefined).
+%% @equiv get_tracer({Name, Vsn, SchemaUrl})
-spec get_tracer(Name, Vsn, SchemaUrl) -> Tracer when
Name :: atom(),
Vsn :: unicode:chardata() | undefined,
SchemaUrl :: uri_string:uri_string() | undefined,
- Tracer:: opentelemetry:tracer().
+ Tracer :: opentelemetry:tracer().
get_tracer(Name, Vsn, SchemaUrl) ->
get_tracer(?GLOBAL_TRACER_PROVIDER_NAME, Name, Vsn, SchemaUrl).
@@ -281,7 +292,7 @@ get_tracer(Name, Vsn, SchemaUrl) ->
Name :: atom(),
Vsn :: unicode:chardata() | undefined,
SchemaUrl :: uri_string:uri_string() | undefined,
- Tracer:: opentelemetry:tracer().
+ Tracer :: opentelemetry:tracer().
get_tracer(TracerProvider, Name, Vsn, SchemaUrl) ->
%% check cache and then use provider to get the tracer if it isn't cached yet
case persistent_term:get(?TRACER_KEY(TracerProvider, {Name, Vsn, SchemaUrl}), undefined) of
diff --git a/apps/opentelemetry_api/src/otel_attributes.erl b/apps/opentelemetry_api/src/otel_attributes.erl
index c5dd21d2..ff941e4b 100644
--- a/apps/opentelemetry_api/src/otel_attributes.erl
+++ b/apps/opentelemetry_api/src/otel_attributes.erl
@@ -12,7 +12,10 @@
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%
-%% @doc
+%% @doc Functions to work with Attributes.
+%%
+%% An Attribute is a key-value pair with string or atom keys.
+%% See the specification.
%% @end
%%%-------------------------------------------------------------------------
-module(otel_attributes).
@@ -42,8 +45,17 @@
-export_type([t/0]).
-new(List, CountLimit, ValueLengthLimit) when is_list(List) ->
- new(maps:from_list(List), CountLimit, ValueLengthLimit);
+%% @doc Creates a new `Attributes' from `Pairs' with the given count and value length limits.
+%%
+%% `Pairs' can be a list of key-value pairs or a map. If `Pairs' is not a list or map, the
+%% function returns an empty `Attributes'.
+-spec new(
+ [opentelemetry:attribute()] | opentelemetry:attributes_map() | term(),
+ integer(),
+ integer() | infinity
+) -> t().
+new(Pairs, CountLimit, ValueLengthLimit) when is_list(Pairs) ->
+ new(maps:from_list(Pairs), CountLimit, ValueLengthLimit);
new(Map, CountLimit, ValueLengthLimit) when is_map(Map) ->
update_attributes(Map, #attributes{count_limit=CountLimit,
value_length_limit=ValueLengthLimit,
@@ -55,19 +67,41 @@ new(_, CountLimit, ValueLengthLimit) ->
dropped=0,
map=#{}}.
-set(NewList, Attributes) when is_list(NewList) ->
- set(maps:from_list(NewList), Attributes);
+%% @doc Sets the given key-value pairs in the given `Attributes'. Overrides
+%% the existing value for a given key if it already exists in `Attributes'.
+%%
+%% `NewListOrMap' can be a list of key-value pairs or a map. If `NewListOrMap' is not a list
+%% or map, the function returns `Attributes' as is. Returns the updated `Attributes'.
+-spec set([opentelemetry:attribute()] | opentelemetry:attributes_map() | term(), t()) -> t().
+set(NewListOrMap, Attributes) when is_list(NewListOrMap) ->
+ set(maps:from_list(NewListOrMap), Attributes);
set(NewMap, Attributes) when is_map(NewMap) ->
update_attributes(NewMap, Attributes);
set(_, Attributes) ->
Attributes.
+%% @doc Sets the given key-value pair in the given `Attributes'.
+%%
+%% Overrides the existing value under `Key' if `Key' already exists.
+%% Returns the updated `Attributes'.
+-spec set(opentelemetry:attribute_key(), opentelemetry:attribute_value(), t()) -> t().
set(Key, Value, Attributes) ->
update_attribute(Key, Value, Attributes).
+%% @doc Returns the count of dropped attributes in the given `Attributes'.
dropped(#attributes{dropped=Dropped}) ->
Dropped.
+%% @doc Returns the Attributes in the form of a map.
+%%
+%% For example:
+%% ```
+%% otel_attributes:new([], 10, 10),
+%% otel_attributes:set(<<"key">>, <<"value">>, Attributes),
+%% otel_attributes:map(Attributes).
+%% %=> #{<<"key">> => <<"value">>}
+%% '''
+-spec map(t()) -> map().
map(#attributes{map=Map}) ->
Map.
@@ -115,6 +149,19 @@ maybe_truncate_binary(Value, ValueLengthLimit) ->
Value
end.
+%% @doc Checks whether the given key-value pair makes for a valid attribute.
+%%
+%% For example:
+%% ```
+%% otel_attributes:is_valid_attribute(<<"key">>, <<"value">>).
+%% %=> true
+%%
+%% otel_attributes:is_valid_attribute(atom_key, <<"value">>).
+%% %=> true
+%%
+%% otel_attributes:is_valid_attribute(123, <<"value">>).
+%% %=> false
+%% '''
-spec is_valid_attribute(opentelemetry:attribute_key(), opentelemetry:attribute_value()) -> boolean().
is_valid_attribute(Key, Value) when is_tuple(Value) , ?is_allowed_key(Key) ->
is_valid_attribute(Key, tuple_to_list(Value));
@@ -145,6 +192,7 @@ is_valid_atom_value(nil) ->
is_valid_atom_value(Value) ->
is_atom(Value) andalso (is_boolean(Value) == false).
+%% @private
-spec process_attributes(eqwalizer:dynamic()) -> opentelemetry:attributes_map().
process_attributes(Attributes) when is_map(Attributes) ->
maps:fold(fun process_attribute/3, #{}, Attributes);
diff --git a/apps/opentelemetry_api/src/otel_baggage.erl b/apps/opentelemetry_api/src/otel_baggage.erl
index 1b38ac66..9ce3640c 100644
--- a/apps/opentelemetry_api/src/otel_baggage.erl
+++ b/apps/opentelemetry_api/src/otel_baggage.erl
@@ -15,6 +15,9 @@
%% @doc Baggage is used to annotate telemetry, adding context and
%% information to metrics, traces, and logs. It is represented by a set
%% of name/value pairs describing user-defined properties.
+%%
+%% The baggage can be stored either in the current context (with {@link set/1} or
+%% {@link set/3}, for example) or in an explicit Context (see {@link otel_ctx}).
%% @end
%%%-------------------------------------------------------------------------
-module(otel_baggage).
@@ -31,14 +34,26 @@
clear/0,
clear/1]).
-%% keys and values are UTF-8 binaries
+%% Keys and values are UTF-8 binaries
+
-type key() :: unicode:unicode_binary().
+%% The type for the baggage key, which is a UTF-8 binary.
+
-type value() :: unicode:unicode_binary().
+%% The type for the baggage value, which is a UTF-8 binary.
+
-type input_key() :: key() | unicode:charlist().
+%% An input key, that is, a key that is then converted to a UTF-8 binary.
+
-type input_value() :: value() | unicode:charlist() | atom().
+%% An input value, that is, a value that is then converted to a UTF-8 binary.
+
-type metadata() :: [unicode:unicode_binary() | {unicode:unicode_binary(), unicode:unicode_binary()}].
+%% The type for the baggage metadata, which is a list of UTF-8 binaries or a list of
+%% tuples of UTF-8 binaries (as key-value pairs).
-type t() :: #{key() => {value(), metadata()}}.
+%% The type for the baggage.
-export_type([t/0,
key/0,
@@ -48,6 +63,9 @@
-include("gradualizer.hrl").
+%% @doc Sets the given key-value pairs in the current baggage.
+%%
+%% If you need to set metadata for the key-value pair, use {@link set/3} instead.
-spec set(#{key() => value()} | [{key(), value()}]) -> ok.
set(KeyValues) when is_list(KeyValues) ->
set(maps:from_list(KeyValues));
@@ -57,6 +75,12 @@ set(KeyValues) when is_map(KeyValues) ->
set(_) ->
ok.
+%% @doc Sets the given key-value pair in the current baggage, or sets the
+%% given key-value pairs in the baggage for the given context.
+%%
+%% Returns `ok' when using the `set(Key, Value)' form, or the updated context when
+%% using the `set(Ctx, KeyValues)' form.
+%% @end
%% Ctx will never be a list or binary so we can tell if a context is passed by checking that
-spec set(otel_ctx:t() | input_key(), #{input_key() => input_value()} | [{input_key(), input_value()}] | input_value()) -> otel_ctx:t() | ok.
set(Key, Value) when (is_list(Key) orelse is_binary(Key)) andalso is_binary(Value) ->
@@ -71,6 +95,9 @@ set(Ctx, KeyValues) when is_map(KeyValues) andalso (is_map(Ctx) orelse Ctx =:= u
Baggage = otel_ctx:get_value(Ctx, ?BAGGAGE_KEY, #{}),
otel_ctx:set_value(Ctx, ?BAGGAGE_KEY, maps:merge(Baggage, verify_baggage(KeyValues))).
+%% @doc Sets the given key-value pairs in the baggage for the given context.
+%%
+%% Returns the updated context.
-spec set_to(otel_ctx:t(), #{input_key() => input_value()} | [{input_key(), input_value()}]) -> otel_ctx:t().
set_to(Ctx, KeyValues) when is_list(KeyValues) ->
set_to(Ctx, maps:from_list(KeyValues));
@@ -78,6 +105,12 @@ set_to(Ctx, KeyValues) when is_map(KeyValues) ->
Baggage = otel_ctx:get_value(Ctx, ?BAGGAGE_KEY, #{}),
otel_ctx:set_value(Ctx, ?BAGGAGE_KEY, maps:merge(Baggage, verify_baggage(KeyValues))).
+%% @doc Sets the given key-value pair in the current baggage (with the
+%% associated metadata), or sets the given key-value pair in the baggage for the
+%% given context.
+%%
+%% Returns `ok' when using the `set(Key, Value, Metadata)' form, or the updated
+%% context when using the `set(Ctx, Key, Value)' form.
-spec set(otel_ctx:t() | input_key(), input_key() | input_value(), input_value() | metadata()) -> otel_ctx:t() | ok.
set(Key, Value, Metadata) when (is_list(Key) orelse is_binary(Key)) andalso is_binary(Value) ->
Baggage = otel_ctx:get_value(?BAGGAGE_KEY, #{}),
@@ -91,10 +124,17 @@ set(Ctx, Key, Value) ->
?assert_type(Value, input_value()),
[]).
+%% @doc Sets the given key-value pair in the baggage for the given context.
+%%
+%% Returns the updated context.
-spec set_to(otel_ctx:t(), input_key(), input_value()) -> otel_ctx:t().
set_to(Ctx, Key, Value) ->
set_to(Ctx, Key, Value, []).
+%% @doc Sets the given key-value pair in the baggage for the given context, with the
+%% associated metadata.
+%%
+%% Returns the updated context.
-spec set(otel_ctx:t(), input_key(), input_value(), metadata()) -> otel_ctx:t().
set(Ctx, Key, Value, Metadata) when is_binary(Value) ->
Baggage = otel_ctx:get_value(Ctx, ?BAGGAGE_KEY, #{}),
@@ -103,6 +143,10 @@ set(Ctx, Key, Value, Metadata) when is_binary(Value) ->
set(Ctx, _, _, _) ->
Ctx.
+%% @doc Sets the given key-value pair in the baggage for the given context, with the
+%% associated metadata.
+%%
+%% Returns the updated context.
-spec set_to(otel_ctx:t(), input_key(), input_value(), metadata()) -> otel_ctx:t().
set_to(Ctx, Key, Value, Metadata) when is_binary(Value) ->
Baggage = otel_ctx:get_value(Ctx, ?BAGGAGE_KEY, #{}),
@@ -111,18 +155,22 @@ set_to(Ctx, Key, Value, Metadata) when is_binary(Value) ->
set_to(Ctx, _, _, _) ->
Ctx.
+%% @doc Returns the baggage from the process dictionary.
-spec get_all() -> t().
get_all() ->
otel_ctx:get_value(?BAGGAGE_KEY, #{}).
+%% @doc Returns the baggage for the given context.
-spec get_all(otel_ctx:t()) -> t().
get_all(Ctx) ->
otel_ctx:get_value(Ctx, ?BAGGAGE_KEY, #{}).
+%% @doc Clears the baggage, removing all the current key-value pairs.
-spec clear() -> ok.
clear() ->
otel_ctx:set_value(?BAGGAGE_KEY, #{}).
+%% @doc Clears the baggage for the given context, removing all the current key-value pairs.
-spec clear(otel_ctx:t()) -> otel_ctx:t().
clear(Ctx) ->
otel_ctx:set_value(Ctx, ?BAGGAGE_KEY, #{}).
diff --git a/apps/opentelemetry_api/src/otel_ctx.erl b/apps/opentelemetry_api/src/otel_ctx.erl
index 86288b71..ea8019ed 100644
--- a/apps/opentelemetry_api/src/otel_ctx.erl
+++ b/apps/opentelemetry_api/src/otel_ctx.erl
@@ -16,6 +16,10 @@
%% are associated with a particular Trace or set of Baggage.
%% `OpenTelemetry.Tracer' and `OpenTelemetry.Baggage' handle updating
%% the Context.
+%%
+%% Functions in this module include variants that explicitly take a `Ctx'
+%% argument and variants that implicitly use the current context, which is
+%% the context stored in the process dictionary.
%% @end
%%%-------------------------------------------------------------------------
-module(otel_ctx).
@@ -41,11 +45,17 @@
text_map_injector_fun/3]).
-type t() :: map() | undefined.
+%% A context map.
+
-type key() :: term().
+%% A context key.
+
-type value() :: term().
+%% A context value.
%% at this time the "token" is actually the context map itself
-opaque token() :: t().
+%% An opaque token that represents a context.
-export_type([t/0,
token/0,
@@ -55,29 +65,39 @@
-define(CURRENT_CTX, '$__current_otel_ctx').
+%% @doc Creates a new context.
-spec new() -> t().
new() ->
#{}.
+%% @doc Sets a value in the current context under the given key.
-spec set_value(term(), term()) -> ok.
set_value(Key, Value) ->
erlang:put(?CURRENT_CTX, set_value(erlang:get(?CURRENT_CTX), Key, Value)),
ok.
+%% @doc Sets a value in the given context under the given key.
+%%
+%% Returns the updated context.
-spec set_value(t(), term(), term()) -> t().
set_value(Ctx, Key, Value) when is_map(Ctx) ->
Ctx#{Key => Value};
set_value(_, Key, Value) ->
#{Key => Value}.
+%% @doc Gets a value from the current context under the given key.
-spec get_value(term()) -> eqwalizer:dynamic().
get_value(Key) ->
get_value(erlang:get(?CURRENT_CTX), Key, undefined).
+%% @doc Gets a value from the current context under the given key, or returns the default value
+%% if the key is not present in the current context.
-spec get_value(term(), term()) -> eqwalizer:dynamic().
get_value(Key, Default) ->
get_value(erlang:get(?CURRENT_CTX), Key, Default).
+%% @doc Gets a value from the given context under the given key, or returns the default value
+%% if the key is not present in the given context or if `Ctx' is `undefined'.
-spec get_value(t(), term(), term()) -> eqwalizer:dynamic().
get_value(undefined, _Key, Default) ->
Default;
@@ -86,15 +106,20 @@ get_value(Ctx, Key, Default) when is_map(Ctx) ->
get_value(_, _, Default) ->
Default.
+%% @doc Removes all key-value pairs from the current context.
-spec clear() -> ok.
clear() ->
erlang:erase(?CURRENT_CTX),
ok.
+%% @doc Removes all key-value pairs from the given context.
+%%
+%% Returns an empty context.
-spec clear(t()) -> t().
-clear(_) ->
+clear(_Ctx) ->
new().
+%% @doc Removes the value under the given key from the current context.
-spec remove(term()) -> ok.
remove(Key) ->
case erlang:get(?CURRENT_CTX) of
@@ -105,12 +130,16 @@ remove(Key) ->
ok
end.
+%% @doc Removes the value under the given key from the given context.
+%%
+%% Returns the updated context.
-spec remove(t(), term()) -> t().
remove(Ctx, Key) when is_map(Ctx) ->
maps:remove(Key, Ctx);
remove(_, _) ->
new().
+%% @doc Returns the current context.
-spec get_current() -> map().
get_current() ->
case erlang:get(?CURRENT_CTX) of
@@ -120,11 +149,16 @@ get_current() ->
#{}
end.
+%% @doc Attaches the given context to the current process.
+%%
+%% Essentially, this sets `Ctx' as the current context
+%% .
-spec attach(t()) -> token().
attach(Ctx) ->
update_logger_process_metadata(Ctx),
erlang:put(?CURRENT_CTX, Ctx).
+%% @doc Detaches the given context from the current process.
-spec detach(token()) -> ok.
detach(Token) ->
%% at this time `Token' is a context
@@ -134,16 +168,20 @@ detach(Token) ->
%% Extractor and Injector setup functions
+%% @private
text_map_extractor(Key, FromText) ->
{fun ?MODULE:text_map_extractor_fun/3, {Key, FromText}}.
+%% @private
text_map_extractor_fun(TextMap, Key, FromText) ->
New = FromText(TextMap, ?MODULE:get_value(Key, #{})),
?MODULE:set_value(Key, New).
+%% @private
text_map_injector(Key, ToText) ->
{fun ?MODULE:text_map_injector_fun/3, {Key, ToText}}.
+%% @private
text_map_injector_fun(TextMap, Key, ToText) ->
TextMap ++ ToText(?MODULE:get_value(Key, undefined)).
diff --git a/apps/opentelemetry_api/src/otel_span.erl b/apps/opentelemetry_api/src/otel_span.erl
index 8a2d0d1f..1d7f7e01 100644
--- a/apps/opentelemetry_api/src/otel_span.erl
+++ b/apps/opentelemetry_api/src/otel_span.erl
@@ -57,6 +57,7 @@
-export_type([start_opts/0]).
+%% @doc Validates the start options for a span and fills in defaults.
-spec validate_start_opts(start_opts()) -> start_opts().
validate_start_opts(Opts) when is_map(Opts) ->
Attributes = maps:get(attributes, Opts, #{}),
@@ -72,11 +73,13 @@ validate_start_opts(Opts) when is_map(Opts) ->
is_recording => IsRecording
}.
+%% @doc Returns whether the span is recording.
-spec is_recording(SpanCtx) -> boolean() when
SpanCtx :: opentelemetry:span_ctx().
is_recording(SpanCtx) ->
?is_recording(SpanCtx).
+%% @doc Returns whether the span context is valid.
-spec is_valid(SpanCtx) -> boolean() when
SpanCtx :: opentelemetry:span_ctx().
is_valid(#span_ctx{trace_id=TraceId,
@@ -86,6 +89,7 @@ is_valid(#span_ctx{trace_id=TraceId,
is_valid(_) ->
false.
+%% @private
-spec is_valid_name(any()) -> boolean().
is_valid_name(undefined) ->
false;
@@ -95,10 +99,13 @@ is_valid_name(_) ->
false.
%% accessors
+
+%% @doc Returns the trace ID of the given span context.
-spec trace_id(opentelemetry:span_ctx()) -> opentelemetry:trace_id().
trace_id(#span_ctx{trace_id=TraceId}) ->
TraceId.
+%% @doc Returns the span ID of the given span context.
-spec span_id(opentelemetry:span_ctx()) -> opentelemetry:span_id().
span_id(#span_ctx{span_id=SpanId}) ->
SpanId.
@@ -175,6 +182,10 @@ set_attributes(SpanCtx=#span_ctx{span_sdk={Module, _}}, Attributes) when ?is_rec
set_attributes(_, _) ->
false.
+%% @doc Adds an event to the given span context.
+%%
+%% Returns `false' if the given span context is not recording, or if the event `Name' is
+%% not valid.
-spec add_event(SpanCtx, Name, Attributes) -> boolean() when
Name :: opentelemetry:event_name(),
Attributes :: opentelemetry:attributes_map(),
@@ -191,7 +202,9 @@ add_event(SpanCtx=#span_ctx{span_sdk={Module, _}}, Name, Attributes)
add_event(_, _, _) ->
false.
-%% todo - validate
+%% @doc Same as {@link add_event/3}, but takes a list of events.
+%%
+%% Returns `false' if the given span context is not recording.
-spec add_events(SpanCtx, Events) -> boolean() when
Events :: [opentelemetry:event()],
SpanCtx :: opentelemetry:span_ctx().
@@ -258,6 +271,9 @@ set_status(_, _) ->
set_status(SpanCtx, Code, Message) ->
set_status(SpanCtx, opentelemetry:status(Code, Message)).
+%% @doc Updates the name of the given span context to `Name'.
+%%
+%% Returns `false' if the given span context is not recording, or if the name `Name' is not valid.
-spec update_name(SpanCtx, Name) -> boolean() when
Name :: opentelemetry:span_name(),
SpanCtx :: opentelemetry:span_ctx().
@@ -271,6 +287,10 @@ update_name(SpanCtx=#span_ctx{span_sdk={Module, _}}, SpanName) when ?is_recordin
update_name(_, _) ->
false.
+%% @doc Ends the given span context.
+%%
+%% If `SpanCtx' is not recording, this function doesn't do anything.
+%% Returns the updated span context.
-spec end_span(SpanCtx) -> SpanCtx when
SpanCtx :: opentelemetry:span_ctx().
end_span(SpanCtx=#span_ctx{span_sdk={Module, _}}) when ?is_recording(SpanCtx) ->
@@ -279,6 +299,11 @@ end_span(SpanCtx=#span_ctx{span_sdk={Module, _}}) when ?is_recording(SpanCtx) ->
end_span(SpanCtx) ->
SpanCtx.
+%% @doc Ends the given span context with the given timestamp.
+%%
+%% If `SpanCtx' is not recording, this function doesn't do anything.
+%% If `Timestamp' is `undefined', this is equivalent to {@link end_span/1}.
+%% Returns the updated span context.
-spec end_span(SpanCtx, Timestamp) -> SpanCtx when
SpanCtx :: opentelemetry:span_ctx(),
Timestamp :: integer() | undefined.
diff --git a/apps/opentelemetry_api/test/opentelemetry_api_SUITE.erl b/apps/opentelemetry_api/test/opentelemetry_api_SUITE.erl
index 5a6fb9ed..2ed3be6c 100644
--- a/apps/opentelemetry_api/test/opentelemetry_api_SUITE.erl
+++ b/apps/opentelemetry_api/test/opentelemetry_api_SUITE.erl
@@ -36,7 +36,6 @@ can_create_link_from_span(_Config) ->
TraceId = otel_span:trace_id(SpanCtx),
SpanId = otel_span:span_id(SpanCtx),
Tracestate = otel_span:tracestate(SpanCtx),
- ct:pal("STATE ~p", [Tracestate]),
%% end span, so there's no current span set
?end_span(opentelemetry:timestamp()),
diff --git a/apps/opentelemetry_api_experimental/include/match_spec.hrl b/apps/opentelemetry_api_experimental/include/match_spec.hrl
new file mode 100644
index 00000000..a1ce5e82
--- /dev/null
+++ b/apps/opentelemetry_api_experimental/include/match_spec.hrl
@@ -0,0 +1,8 @@
+-ifndef(MATCH_SPEC_TYPES_DEFINED).
+
+-type match_var() :: '_' | '$1' | '$2' | '$3' | '$4' | '$5' | '$6' | '$7' | '$8' | '$9'.
+-type match_spec(A) :: A | match_var() | {const, A}.
+
+-define(MATCH_SPEC_TYPES_DEFINED, true).
+
+-endif.
diff --git a/apps/opentelemetry_api_experimental/include/otel_meter.hrl b/apps/opentelemetry_api_experimental/include/otel_meter.hrl
index f25417d8..e983f75e 100644
--- a/apps/opentelemetry_api_experimental/include/otel_meter.hrl
+++ b/apps/opentelemetry_api_experimental/include/otel_meter.hrl
@@ -25,12 +25,21 @@
-define(create_observable_updowncounter(Name, Callback, CallbackArgs, Opts),
otel_meter:create_observable_updowncounter(?current_meter, Name, Callback, CallbackArgs, Opts)).
+-define(counter_add(Name, Number),
+ otel_counter:add(?current_meter, Name, Number)).
+
-define(counter_add(Name, Number, Attributes),
otel_counter:add(?current_meter, Name, Number, Attributes)).
+-define(updown_counter_add(Name, Number),
+ otel_updown_counter:add(?current_meter, Name, Number)).
+
-define(updown_counter_add(Name, Number, Attributes),
otel_updown_counter:add(?current_meter, Name, Number, Attributes)).
+-define(histogram_record(Name, Number),
+ otel_histogram:record(?current_meter, Name, Number)).
+
-define(histogram_record(Name, Number, Attributes),
otel_histogram:record(?current_meter, Name, Number, Attributes)).
diff --git a/apps/opentelemetry_api_experimental/include/otel_metrics.hrl b/apps/opentelemetry_api_experimental/include/otel_metrics.hrl
index 5446cf2e..1216df8d 100644
--- a/apps/opentelemetry_api_experimental/include/otel_metrics.hrl
+++ b/apps/opentelemetry_api_experimental/include/otel_metrics.hrl
@@ -1,12 +1,15 @@
--record(instrument, {module :: module(),
- meter :: otel_meter:t(),
- name :: otel_instrument:name(),
- description :: otel_instrument:description() | undefined,
- kind :: otel_instrument:kind(),
- unit :: otel_instrument:unit() | undefined,
- temporality :: otel_instrument:temporality(),
- callback :: otel_instrument:callback() | undefined,
- callback_args :: otel_instrument:callback_args() | undefined}).
+-include_lib("match_spec.hrl").
+
+-record(instrument, {module :: module(),
+ meter :: otel_meter:t(),
+ name :: otel_instrument:name(),
+ description :: otel_instrument:description() | undefined,
+ kind :: otel_instrument:kind(),
+ unit :: otel_instrument:unit() | undefined,
+ temporality :: otel_instrument:temporality(),
+ callback :: otel_instrument:callback() | undefined,
+ callback_args :: otel_instrument:callback_args() | undefined,
+ advisory_params :: otel_instrument:advisory_params() | undefined}).
-define(TEMPORALITY_DELTA, temporality_delta).
-define(TEMPORALITY_CUMULATIVE, temporality_cumulative).
diff --git a/apps/opentelemetry_api_experimental/lib/open_telemetry/counter.ex b/apps/opentelemetry_api_experimental/lib/open_telemetry/counter.ex
index 9955ecf9..aae952dd 100644
--- a/apps/opentelemetry_api_experimental/lib/open_telemetry/counter.ex
+++ b/apps/opentelemetry_api_experimental/lib/open_telemetry/counter.ex
@@ -13,6 +13,16 @@ defmodule OpenTelemetryAPIExperimental.Counter do
end
end
+ defmacro add(name, number) do
+ quote bind_quoted: [name: name, number: number] do
+ :otel_counter.add(
+ :opentelemetry_experimental.get_meter(:opentelemetry.get_application_scope(__MODULE__)),
+ name,
+ number
+ )
+ end
+ end
+
defmacro add(name, number, attributes) do
quote bind_quoted: [name: name, number: number, attributes: attributes] do
:otel_counter.add(
diff --git a/apps/opentelemetry_api_experimental/lib/open_telemetry/histogram.ex b/apps/opentelemetry_api_experimental/lib/open_telemetry/histogram.ex
index e371150e..929798d7 100644
--- a/apps/opentelemetry_api_experimental/lib/open_telemetry/histogram.ex
+++ b/apps/opentelemetry_api_experimental/lib/open_telemetry/histogram.ex
@@ -13,6 +13,16 @@ defmodule OpenTelemetryAPIExperimental.Histogram do
end
end
+ defmacro record(name, number) do
+ quote bind_quoted: [name: name, number: number] do
+ :otel_histogram.record(
+ :opentelemetry_experimental.get_meter(:opentelemetry.get_application_scope(__MODULE__)),
+ name,
+ number
+ )
+ end
+ end
+
defmacro record(name, number, attributes) do
quote bind_quoted: [name: name, number: number, attributes: attributes] do
:otel_histogram.record(
diff --git a/apps/opentelemetry_api_experimental/lib/open_telemetry/updown_counter.ex b/apps/opentelemetry_api_experimental/lib/open_telemetry/updown_counter.ex
index 6b424668..4e10ee3a 100644
--- a/apps/opentelemetry_api_experimental/lib/open_telemetry/updown_counter.ex
+++ b/apps/opentelemetry_api_experimental/lib/open_telemetry/updown_counter.ex
@@ -13,6 +13,16 @@ defmodule OpenTelemetryAPIExperimental.UpDownCounter do
end
end
+ defmacro add(name, number) do
+ quote bind_quoted: [name: name, number: number] do
+ :otel_updown_counter.add(
+ :opentelemetry_experimental.get_meter(:opentelemetry.get_application_scope(__MODULE__)),
+ name,
+ number
+ )
+ end
+ end
+
defmacro add(name, number, attributes) do
quote bind_quoted: [name: name, number: number, attributes: attributes] do
:otel_updown_counter.add(
diff --git a/apps/opentelemetry_api_experimental/mix.exs b/apps/opentelemetry_api_experimental/mix.exs
index df92b836..6742c025 100644
--- a/apps/opentelemetry_api_experimental/mix.exs
+++ b/apps/opentelemetry_api_experimental/mix.exs
@@ -52,8 +52,8 @@ defmodule OpenTelemetryExperimental.MixProject do
dep when is_atom(dep) -> {dep, ">= 0.0.0"}
end)
|> Enum.concat([
- {:cmark, "~> 0.7", only: :dev, runtime: false},
- {:ex_doc, "0.21.0", only: :dev, runtime: false},
+ {:cmark, "~> 0.10", only: :dev, runtime: false},
+ {:ex_doc, "~> 0.31", only: :dev, runtime: false},
{:dialyxir, "~> 1.0", only: [:dev], runtime: false},
{:covertool, ">= 0.0.0", only: :test}
])
diff --git a/apps/opentelemetry_api_experimental/mix.lock b/apps/opentelemetry_api_experimental/mix.lock
index a1cc64e6..342af5eb 100644
--- a/apps/opentelemetry_api_experimental/mix.lock
+++ b/apps/opentelemetry_api_experimental/mix.lock
@@ -3,13 +3,14 @@
"covertool": {:hex, :covertool, "2.0.6", "4a291b4e3449025b0595d8f44c8d7635d4f48f033be2ce88d22a329f36f94a91", [:rebar3], [], "hexpm", "5db3fcd82180d8ea4ad857d4d1ab21a8d31b5aee0d60d2f6c0f9e25a411d1e21"},
"dialyxir": {:hex, :dialyxir, "1.3.0", "fd1672f0922b7648ff9ce7b1b26fcf0ef56dda964a459892ad15f6b4410b5284", [:mix], [{:erlex, ">= 0.2.6", [hex: :erlex, repo: "hexpm", optional: false]}], "hexpm", "00b2a4bcd6aa8db9dcb0b38c1225b7277dca9bc370b6438715667071a304696f"},
"earmark": {:hex, :earmark, "1.4.39", "acdb2f02c536471029dbcc509fbd6b94b89f40ad7729fb3f68f4b6944843f01d", [:mix], [{:earmark_parser, "~> 1.4.33", [hex: :earmark_parser, repo: "hexpm", optional: false]}], "hexpm", "156c9d8ec3cbeccdbf26216d8247bdeeacc8c76b4d9eee7554be2f1b623ea440"},
- "earmark_parser": {:hex, :earmark_parser, "1.4.33", "3c3fd9673bb5dcc9edc28dd90f50c87ce506d1f71b70e3de69aa8154bc695d44", [:mix], [], "hexpm", "2d526833729b59b9fdb85785078697c72ac5e5066350663e5be6a1182da61b8f"},
+ "earmark_parser": {:hex, :earmark_parser, "1.4.39", "424642f8335b05bb9eb611aa1564c148a8ee35c9c8a8bba6e129d51a3e3c6769", [:mix], [], "hexpm", "06553a88d1f1846da9ef066b87b57c6f605552cfbe40d20bd8d59cc6bde41944"},
"elixir_make": {:hex, :elixir_make, "0.7.7", "7128c60c2476019ed978210c245badf08b03dbec4f24d05790ef791da11aa17c", [:mix], [{:castore, "~> 0.1 or ~> 1.0", [hex: :castore, repo: "hexpm", optional: true]}], "hexpm", "5bc19fff950fad52bbe5f211b12db9ec82c6b34a9647da0c2224b8b8464c7e6c"},
"erlex": {:hex, :erlex, "0.2.6", "c7987d15e899c7a2f34f5420d2a2ea0d659682c06ac607572df55a43753aa12e", [:mix], [], "hexpm", "2ed2e25711feb44d52b17d2780eabf998452f6efda104877a3881c2f8c0c0c75"},
- "ex_doc": {:hex, :ex_doc, "0.21.0", "7af8cd3e3df2fe355e99dabd2d4dcecc6e76eb417200e3b7a3da362d52730e3c", [:mix], [{:earmark, "~> 1.3", [hex: :earmark, repo: "hexpm", optional: false]}, {:makeup_elixir, "~> 0.14", [hex: :makeup_elixir, repo: "hexpm", optional: false]}], "hexpm", "ef679a81de63385c7e72597e81ca1276187505eeacb38281a672d2822254ff1a"},
- "makeup": {:hex, :makeup, "1.1.0", "6b67c8bc2882a6b6a445859952a602afc1a41c2e08379ca057c0f525366fc3ca", [:mix], [{:nimble_parsec, "~> 1.2.2 or ~> 1.3", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "0a45ed501f4a8897f580eabf99a2e5234ea3e75a4373c8a52824f6e873be57a6"},
+ "ex_doc": {:hex, :ex_doc, "0.31.0", "06eb1dfd787445d9cab9a45088405593dd3bb7fe99e097eaa71f37ba80c7a676", [:mix], [{:earmark_parser, "~> 1.4.39", [hex: :earmark_parser, repo: "hexpm", optional: false]}, {:makeup_c, ">= 0.1.1", [hex: :makeup_c, repo: "hexpm", optional: true]}, {:makeup_elixir, "~> 0.14", [hex: :makeup_elixir, repo: "hexpm", optional: false]}, {:makeup_erlang, "~> 0.1", [hex: :makeup_erlang, repo: "hexpm", optional: false]}], "hexpm", "5350cafa6b7f77bdd107aa2199fe277acf29d739aba5aee7e865fc680c62a110"},
+ "makeup": {:hex, :makeup, "1.1.1", "fa0bc768698053b2b3869fa8a62616501ff9d11a562f3ce39580d60860c3a55e", [:mix], [{:nimble_parsec, "~> 1.2.2 or ~> 1.3", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "5dc62fbdd0de44de194898b6710692490be74baa02d9d108bc29f007783b0b48"},
"makeup_elixir": {:hex, :makeup_elixir, "0.16.1", "cc9e3ca312f1cfeccc572b37a09980287e243648108384b97ff2b76e505c3555", [:mix], [{:makeup, "~> 1.0", [hex: :makeup, repo: "hexpm", optional: false]}, {:nimble_parsec, "~> 1.2.3 or ~> 1.3", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm", "e127a341ad1b209bd80f7bd1620a15693a9908ed780c3b763bccf7d200c767c6"},
- "nimble_parsec": {:hex, :nimble_parsec, "1.3.1", "2c54013ecf170e249e9291ed0a62e5832f70a476c61da16f6aac6dca0189f2af", [:mix], [], "hexpm", "2682e3c0b2eb58d90c6375fc0cc30bc7be06f365bf72608804fb9cffa5e1b167"},
+ "makeup_erlang": {:hex, :makeup_erlang, "0.1.3", "d684f4bac8690e70b06eb52dad65d26de2eefa44cd19d64a8095e1417df7c8fd", [:mix], [{:makeup, "~> 1.0", [hex: :makeup, repo: "hexpm", optional: false]}], "hexpm", "b78dc853d2e670ff6390b605d807263bf606da3c82be37f9d7f68635bd886fc9"},
+ "nimble_parsec": {:hex, :nimble_parsec, "1.4.0", "51f9b613ea62cfa97b25ccc2c1b4216e81df970acd8e16e8d1bdc58fef21370d", [:mix], [], "hexpm", "9c565862810fb383e9838c1dd2d7d2c437b3d13b267414ba6af33e50d2d1cf28"},
"opentelemetry_api": {:hex, :opentelemetry_api, "1.2.2", "693f47b0d8c76da2095fe858204cfd6350c27fe85d00e4b763deecc9588cf27a", [:mix, :rebar3], [{:opentelemetry_semantic_conventions, "~> 0.2", [hex: :opentelemetry_semantic_conventions, repo: "hexpm", optional: false]}], "hexpm", "dc77b9a00f137a858e60a852f14007bb66eda1ffbeb6c05d5fe6c9e678b05e9d"},
"opentelemetry_semantic_conventions": {:hex, :opentelemetry_semantic_conventions, "0.2.0", "b67fe459c2938fcab341cb0951c44860c62347c005ace1b50f8402576f241435", [:mix, :rebar3], [], "hexpm", "d61fa1f5639ee8668d74b527e6806e0503efc55a42db7b5f39939d84c07d6895"},
}
diff --git a/apps/opentelemetry_api_experimental/src/otel_counter.erl b/apps/opentelemetry_api_experimental/src/otel_counter.erl
index 3c0e9728..88777995 100644
--- a/apps/opentelemetry_api_experimental/src/otel_counter.erl
+++ b/apps/opentelemetry_api_experimental/src/otel_counter.erl
@@ -19,6 +19,7 @@
-module(otel_counter).
-export([create/3,
+ add/2,
add/3,
add/4]).
@@ -28,14 +29,24 @@
-spec create(Meter, Name, Opts) -> otel_instrument:t() when
Meter :: otel_meter:t(),
Name :: otel_instrument:name(),
- Opts :: otel_meter:opts().
+ Opts :: otel_instrument:opts().
create(Meter, Name, Opts) ->
otel_meter:create_counter(Meter, Name, Opts).
--spec add(otel_meter:t(), otel_instrument:name(), pos_integer() |float(), opentelemetry:attributes_map()) -> ok.
-add(Meter, Name, Number, Attributes) ->
- otel_meter:record(Meter, Name, Number, Attributes).
+-spec add(otel_instrument:t(), pos_integer() | float()) -> ok.
+add(Instrument=#instrument{module=Module}, Number) ->
+ Module:record(Instrument, Number).
--spec add(otel_instrument:t(), pos_integer() |float(), opentelemetry:attributes_map()) -> ok.
+-spec add(
+ otel_meter:t() | otel_instrument:t(),
+ otel_instrument:name() | pos_integer() | float(),
+ pos_integer() | float() | opentelemetry:attributes_map()) -> ok.
add(Instrument=#instrument{module=Module}, Number, Attributes) ->
- Module:record(Instrument, Number, Attributes).
+ Module:record(Instrument, Number, Attributes);
+
+add(Meter, Name, Number) ->
+ otel_meter:record(Meter, Name, Number).
+
+-spec add(otel_meter:t(), otel_instrument:name(), pos_integer() |float(), opentelemetry:attributes_map()) -> ok.
+add(Meter, Name, Number, Attributes) ->
+ otel_meter:record(Meter, Name, Number, Attributes).
\ No newline at end of file
diff --git a/apps/opentelemetry_api_experimental/src/otel_histogram.erl b/apps/opentelemetry_api_experimental/src/otel_histogram.erl
index f82a55ea..4dc51de1 100644
--- a/apps/opentelemetry_api_experimental/src/otel_histogram.erl
+++ b/apps/opentelemetry_api_experimental/src/otel_histogram.erl
@@ -20,6 +20,7 @@
-module(otel_histogram).
-export([create/3,
+ record/2,
record/3,
record/4]).
@@ -29,14 +30,24 @@
-spec create(Meter, Name, Opts) -> otel_instrument:t() when
Meter :: otel_meter:t(),
Name :: otel_instrument:name(),
- Opts :: otel_meter:opts().
+ Opts :: otel_instrument:opts().
create(Meter, Name, Opts) ->
otel_meter:create_histogram(Meter, Name, Opts).
--spec record(otel_meter:t(), otel_instrument:name(), pos_integer() | float(), opentelemetry:attributes_map()) -> ok.
-record(Meter, Name, Number, Attributes) ->
- otel_meter:record(Meter, Name, Number, Attributes).
+-spec record(otel_instrument:t(), pos_integer() | float()) -> ok.
+record(Instrument=#instrument{module=Module}, Number) ->
+ Module:record(Instrument, Number).
--spec record(otel_instrument:t(), pos_integer() | float(), opentelemetry:attributes_map()) -> ok.
+-spec record(
+ otel_meter:t() | otel_instrument:t(),
+ otel_instrument:name() | pos_integer() | float(),
+ pos_integer() | float() | opentelemetry:attributes_map()) -> ok.
record(Instrument=#instrument{module=Module}, Number, Attributes) ->
- Module:record(Instrument, Number, Attributes).
+ Module:record(Instrument, Number, Attributes);
+
+record(Meter, Name, Number) ->
+ otel_meter:record(Meter, Name, Number).
+
+-spec record(otel_meter:t(), otel_instrument:name(), pos_integer() | float(), opentelemetry:attributes_map()) -> ok.
+record(Meter, Name, Number, Attributes) ->
+ otel_meter:record(Meter, Name, Number, Attributes).
\ No newline at end of file
diff --git a/apps/opentelemetry_api_experimental/src/otel_instrument.erl b/apps/opentelemetry_api_experimental/src/otel_instrument.erl
index 3d37b4f7..d1ff3994 100644
--- a/apps/opentelemetry_api_experimental/src/otel_instrument.erl
+++ b/apps/opentelemetry_api_experimental/src/otel_instrument.erl
@@ -17,8 +17,8 @@
%%%-------------------------------------------------------------------------
-module(otel_instrument).
--export([new/6,
- new/8,
+-export([new/5,
+ new/7,
is_monotonic/1,
temporality/1]).
@@ -38,6 +38,12 @@
-type temporality() :: ?TEMPORALITY_DELTA | ?TEMPORALITY_CUMULATIVE.
+-type advisory_params() :: #{explicit_bucket_boundaries => [number(), ...]}.
+
+-type opts() :: #{description => description(),
+ unit => unit(),
+ advisory_params => advisory_params()}.
+
-type t() :: #instrument{}.
-export_type([t/0,
@@ -46,31 +52,43 @@
kind/0,
unit/0,
temporality/0,
+ observation/0,
+ named_observations/0,
callback/0,
callback_args/0,
- callback_result/0]).
+ callback_result/0,
+ advisory_params/0,
+ opts/0]).
--spec new(module(), otel_meter:t(), kind(), name(), description() | undefined, unit() | undefined) -> t().
-new(Module, Meter, Kind, Name, Description, Unit) ->
- #instrument{module = Module,
- meter = Meter,
- name = Name,
- description = Description,
- temporality = ?TEMPORALITY_DELTA,
- kind = Kind,
- unit = Unit}.
+-spec new(module(), otel_meter:t(), kind(), name(), opts()) -> t().
+new(Module, Meter, Kind, Name, Opts) ->
+ Description = maps:get(description, Opts, undefined),
+ Unit = maps:get(unit, Opts, undefined),
+ AdvisoryParams = maps:get(advisory_params, Opts, undefined),
+ #instrument{module = Module,
+ meter = Meter,
+ name = Name,
+ description = Description,
+ temporality = ?TEMPORALITY_DELTA,
+ kind = Kind,
+ unit = Unit,
+ advisory_params = AdvisoryParams}.
--spec new(module(), otel_meter:t(), kind(), name(), description() | undefined, unit() | undefined, callback(), callback_args()) -> t().
-new(Module, Meter, Kind, Name, Description, Unit, Callback, CallbackArgs) ->
- #instrument{module = Module,
- meter = Meter,
- name = Name,
- description = Description,
- kind = Kind,
- unit = Unit,
- temporality = ?TEMPORALITY_CUMULATIVE,
- callback = Callback,
- callback_args = CallbackArgs}.
+-spec new(module(), otel_meter:t(), kind(), name(), callback(), callback_args(), opts()) -> t().
+new(Module, Meter, Kind, Name, Callback, CallbackArgs, Opts) ->
+ Description = maps:get(description, Opts, undefined),
+ Unit = maps:get(unit, Opts, undefined),
+ AdvisoryParams = maps:get(advisory_params, Opts, undefined),
+ #instrument{module = Module,
+ meter = Meter,
+ name = Name,
+ description = Description,
+ kind = Kind,
+ unit = Unit,
+ temporality = ?TEMPORALITY_CUMULATIVE,
+ callback = Callback,
+ callback_args = CallbackArgs,
+ advisory_params = AdvisoryParams}.
is_monotonic(#instrument{kind=?KIND_COUNTER}) ->
true;
@@ -92,4 +110,4 @@ temporality(#instrument{kind=?KIND_OBSERVABLE_UPDOWNCOUNTER}) ->
temporality(#instrument{kind=?KIND_HISTOGRAM}) ->
?TEMPORALITY_DELTA;
temporality(#instrument{kind=?KIND_OBSERVABLE_GAUGE}) ->
- ?TEMPORALITY_CUMULATIVE.
\ No newline at end of file
+ ?TEMPORALITY_CUMULATIVE.
diff --git a/apps/opentelemetry_api_experimental/src/otel_meter.erl b/apps/opentelemetry_api_experimental/src/otel_meter.erl
index fbda6063..f82fcfbe 100644
--- a/apps/opentelemetry_api_experimental/src/otel_meter.erl
+++ b/apps/opentelemetry_api_experimental/src/otel_meter.erl
@@ -34,6 +34,7 @@
lookup_instrument/2,
+ record/3,
record/4]).
-include("otel_metrics.hrl").
@@ -42,7 +43,7 @@
Meter :: t(),
Name :: otel_instrument:name(),
Kind :: otel_instrument:kind(),
- Opts :: opts().
+ Opts :: otel_instrument:opts().
-callback create_instrument(Meter, Name, Kind, Callback, CallbackArgs, Opts) -> otel_instrument:t() when
Meter :: t(),
@@ -50,7 +51,7 @@
Kind :: otel_instrument:kind(),
Callback :: otel_instrument:callback(),
CallbackArgs :: otel_instrument:callback_args(),
- Opts :: opts().
+ Opts :: otel_instrument:opts().
-callback register_callback(Meter, Instruments, Callback, CallbackArgs) -> ok when
Meter :: t(),
@@ -58,39 +59,35 @@
Callback :: otel_instrument:callback(),
CallbackArgs :: otel_instrument:callback_args().
--type opts() :: #{description => otel_instrument:description(),
- unit => otel_instrument:unit()}.
-
-type t() :: {module(), term()}.
--export_type([t/0,
- opts/0]).
+-export_type([t/0]).
-spec create_counter(Meter, Name, Opts) -> otel_instrument:t() when
Meter :: t(),
Name :: otel_instrument:name(),
- Opts :: opts().
+ Opts :: otel_instrument:opts().
create_counter(Meter, Name, Opts) ->
create_instrument(Meter, Name, ?KIND_COUNTER, Opts).
-spec create_updown_counter(Meter, Name, Opts) -> otel_instrument:t() when
Meter :: t(),
Name :: otel_instrument:name(),
- Opts :: opts().
+ Opts :: otel_instrument:opts().
create_updown_counter(Meter, Name, Opts) ->
create_instrument(Meter, Name, ?KIND_UPDOWN_COUNTER, Opts).
-spec create_histogram(Meter, Name, Opts) -> otel_instrument:t() when
Meter :: t(),
Name :: otel_instrument:name(),
- Opts :: opts().
+ Opts :: otel_instrument:opts().
create_histogram(Meter, Name, Opts) ->
create_instrument(Meter, Name, ?KIND_HISTOGRAM, Opts).
-spec create_observable_counter(Meter, Name, Opts) -> otel_instrument:t() when
Meter :: t(),
Name :: otel_instrument:name(),
- Opts :: opts().
+ Opts :: otel_instrument:opts().
create_observable_counter(Meter, Name, Opts) ->
create_instrument(Meter, Name, ?KIND_OBSERVABLE_COUNTER, Opts).
@@ -99,14 +96,14 @@ create_observable_counter(Meter, Name, Opts) ->
Name :: otel_instrument:name(),
Callback :: otel_instrument:callback(),
CallbackArgs :: otel_instrument:callback_args(),
- Opts :: opts().
+ Opts :: otel_instrument:opts().
create_observable_counter(Meter, Name, Callback, CallbackArgs, Opts) ->
create_instrument(Meter, Name, ?KIND_OBSERVABLE_COUNTER, Callback, CallbackArgs, Opts).
-spec create_observable_gauge(Meter, Name, Opts) -> otel_instrument:t() when
Meter :: t(),
Name :: otel_instrument:name(),
- Opts :: opts().
+ Opts :: otel_instrument:opts().
create_observable_gauge(Meter, Name, Opts) ->
create_instrument(Meter, Name, ?KIND_OBSERVABLE_GAUGE, Opts).
@@ -115,14 +112,14 @@ create_observable_gauge(Meter, Name, Opts) ->
Name :: otel_instrument:name(),
Callback :: otel_instrument:callback(),
CallbackArgs :: otel_instrument:callback_args(),
- Opts :: opts().
+ Opts :: otel_instrument:opts().
create_observable_gauge(Meter, Name, Callback, CallbackArgs, Opts) ->
create_instrument(Meter, Name, ?KIND_OBSERVABLE_GAUGE, Callback, CallbackArgs, Opts).
-spec create_observable_updowncounter(Meter, Name, Opts) -> otel_instrument:t() when
Meter :: t(),
Name :: otel_instrument:name(),
- Opts :: opts().
+ Opts :: otel_instrument:opts().
create_observable_updowncounter(Meter, Name, Opts) ->
create_instrument(Meter, Name, ?KIND_OBSERVABLE_UPDOWNCOUNTER, Opts).
@@ -131,7 +128,7 @@ create_observable_updowncounter(Meter, Name, Opts) ->
Name :: otel_instrument:name(),
Callback :: otel_instrument:callback(),
CallbackArgs :: otel_instrument:callback_args(),
- Opts :: opts().
+ Opts :: otel_instrument:opts().
create_observable_updowncounter(Meter, Name, Callback, CallbackArgs, Opts) ->
create_instrument(Meter, Name, ?KIND_OBSERVABLE_UPDOWNCOUNTER, Callback, CallbackArgs, Opts).
@@ -145,7 +142,7 @@ scope(Meter={Module, _}) ->
Meter :: t(),
Name :: otel_instrument:name(),
Kind :: otel_instrument:kind(),
- Opts :: opts().
+ Opts :: otel_instrument:opts().
create_instrument(Meter={Module, _}, Name, Kind, Opts) ->
Module:create_instrument(Meter, Name, Kind, Opts).
@@ -155,7 +152,7 @@ create_instrument(Meter={Module, _}, Name, Kind, Opts) ->
Kind :: otel_instrument:kind(),
Callback :: otel_instrument:callback(),
CallbackArgs :: otel_instrument:callback_args(),
- Opts :: opts().
+ Opts :: otel_instrument:opts().
create_instrument(Meter={Module, _}, Name, Kind, Callback, CallbackArgs, Opts) ->
Module:create_instrument(Meter, Name, Kind, Callback, CallbackArgs, Opts).
@@ -173,5 +170,8 @@ lookup_instrument(Meter={Module, _}, Name) ->
register_callback(Meter={Module, _}, Instruments, Callback, CallbackArgs) ->
Module:register_callback(Meter, Instruments, Callback, CallbackArgs).
+record(Meter={Module, _}, Name, Number) ->
+ Module:record(Meter, Name, Number).
+
record(Meter={Module, _}, Name, Number, Attributes) ->
Module:record(Meter, Name, Number, Attributes).
\ No newline at end of file
diff --git a/apps/opentelemetry_api_experimental/src/otel_meter_noop.erl b/apps/opentelemetry_api_experimental/src/otel_meter_noop.erl
index e9b01048..775822f9 100644
--- a/apps/opentelemetry_api_experimental/src/otel_meter_noop.erl
+++ b/apps/opentelemetry_api_experimental/src/otel_meter_noop.erl
@@ -41,9 +41,7 @@ register_callback(_Meter, _Instruments, _Callback, _CallbackArgs) ->
ok.
create_instrument(Meter, Name, Kind, Opts) ->
- otel_instrument:new(?MODULE, Meter, Kind, Name, maps:get(description, Opts, undefined),
- maps:get(unit, Opts, undefined)).
+ otel_instrument:new(?MODULE, Meter, Kind, Name, Opts).
create_instrument(Meter, Name, Kind, Callback, CallbackArgs, Opts) ->
- otel_instrument:new(?MODULE, Meter, Kind, Name, maps:get(description, Opts, undefined),
- maps:get(unit, Opts, undefined), Callback, CallbackArgs).
+ otel_instrument:new(?MODULE, Meter, Kind, Name, Callback, CallbackArgs, Opts).
diff --git a/apps/opentelemetry_api_experimental/src/otel_updown_counter.erl b/apps/opentelemetry_api_experimental/src/otel_updown_counter.erl
index f143a929..72063057 100644
--- a/apps/opentelemetry_api_experimental/src/otel_updown_counter.erl
+++ b/apps/opentelemetry_api_experimental/src/otel_updown_counter.erl
@@ -19,6 +19,7 @@
-module(otel_updown_counter).
-export([create/3,
+ add/2,
add/3,
add/4]).
@@ -28,14 +29,24 @@
-spec create(Meter, Name, Opts) -> otel_instrument:t() when
Meter :: otel_meter:t(),
Name :: otel_instrument:name(),
- Opts :: otel_meter:opts().
+ Opts :: otel_instrument:opts().
create(Meter, Name, Opts) ->
otel_meter:create_updown_counter(Meter, Name, Opts).
--spec add(otel_meter:t(), otel_instrument:name(), number(), opentelemetry:attributes_map()) -> ok.
-add(Meter, Name, Number, Attributes) ->
- otel_meter:record(Meter, Name, Number, Attributes).
+-spec add(otel_instrument:t(), number()) -> ok.
+add(Instrument=#instrument{module=Module}, Number) ->
+ Module:record(Instrument, Number).
--spec add(otel_instrument:t(), number(), opentelemetry:attributes_map()) -> ok.
+-spec add(
+ otel_meter:t() | otel_instrument:t(),
+ otel_instrument:name() | number(),
+ number() | opentelemetry:attributes_map()) -> ok.
add(Instrument=#instrument{module=Module}, Number, Attributes) ->
- Module:record(Instrument, Number, Attributes).
+ Module:record(Instrument, Number, Attributes);
+
+add(Meter, Name, Number) ->
+ otel_meter:record(Meter, Name, Number).
+
+-spec add(otel_meter:t(), otel_instrument:name(), number(), opentelemetry:attributes_map()) -> ok.
+add(Meter, Name, Number, Attributes) ->
+ otel_meter:record(Meter, Name, Number, Attributes).
\ No newline at end of file
diff --git a/apps/opentelemetry_experimental/include/otel_metrics.hrl b/apps/opentelemetry_experimental/include/otel_metrics.hrl
index 55e4e95d..fb7cb0b0 100644
--- a/apps/opentelemetry_experimental/include/otel_metrics.hrl
+++ b/apps/opentelemetry_experimental/include/otel_metrics.hrl
@@ -1,8 +1,6 @@
--define(DEFAULT_METER_PROVIDER, otel_meter_provider_default).
+-include_lib("opentelemetry_api_experimental/include/match_spec.hrl").
--type match_var() :: '_' | '$1' | '$2' | '$3' | '$4' | '$5' | '$6' | '$7' | '$8' | '$9'.
--type match_expr(A) :: undefined | match_var() | {const, A}.
--type match_spec(A) :: match_expr(A).
+-define(DEFAULT_METER_PROVIDER, otel_meter_provider_default).
-record(meter,
{
@@ -10,7 +8,7 @@
instrumentation_scope :: opentelemetry:instrumentation_scope() | undefined,
provider :: atom() | '_',
instruments_tab :: ets:table() | '_',
- view_aggregations_tab :: ets:table() | '_',
+ streams_tab :: ets:table() | '_',
metrics_tab :: ets:table() | '_'
}).
@@ -26,49 +24,47 @@
-record(sum_aggregation,
{
%% TODO: attributes should be a tuple of just the values, sorted by attribute name
- key :: otel_aggregation:key() | match_spec(otel_aggregation:key()) | {element, 2, '$_'},
- start_time :: integer() | match_spec(integer()),
- last_start_time :: integer() | match_spec(integer()),
- checkpoint :: number() | match_spec(number()) | {'+', '$2', '$3'} | {'+', '$3', '$4'},
- previous_checkpoint :: number() | match_spec(number()) | {'+', '$5', '$6'},
- int_value :: number() | match_spec(number()) | {'+', '$3', {const, number()}},
- float_value :: number() | match_spec(number()) | {'+', '$4', {const, number()}}
+ key :: otel_aggregation:key() | undefined,
+ start_time :: integer(), %% | undefined,
+ checkpoint :: number(), %% | undefined | {'+', '$2', '$3'} | {'+', '$3', '$4'},
+ previous_checkpoint :: number(), %% | undefined | {'+', '$5', '$6'},
+ int_value :: number(), %% | undefined | {'+', '$3', {const, number()}},
+ float_value :: number() %% | undefined | {'+', '$4', {const, number()}}
}).
-record(last_value_aggregation,
{
%% TODO: attributes should be a tuple of just the values, sorted by attribute name
- key :: otel_aggregation:key() | match_spec(otel_aggregation:key()),
- checkpoint :: number() | match_spec(number()),
- value :: number() | match_spec(number()),
- start_time :: integer() | match_spec(integer()),
- last_start_time :: integer() | match_spec(integer())
+ key :: otel_aggregation:key() | undefined,
+ checkpoint :: number(),
+ value :: number() | undefined,
+ start_time :: integer()
}).
-record(explicit_histogram_checkpoint,
{
- bucket_counts :: counters:counters_ref() | match_spec(counters:counters_ref()),
- min :: number() | match_spec(number()),
- max :: number() | match_spec(number()),
- sum :: number() | match_spec(number()),
- start_time :: integer() | match_spec(number())
+ bucket_counts :: match_spec(counters:counters_ref()) | undefined,
+ min :: number() | undefined,
+ max :: number() | undefined,
+ sum :: number() | undefined,
+ start_time :: number() | undefined
}).
-record(explicit_histogram_aggregation,
{
%% TODO: attributes should be a tuple of just the values, sorted by attribute name
- key :: otel_aggregation:key() | match_spec(otel_aggregation:key()),
- start_time :: integer() | {const, eqwalizer:dynamic()} | '$9' | '$2' | undefined,
+ key :: otel_aggregation:key() | undefined,
+ start_time :: integer() | undefined,
%% instrument_temporality :: otel_aggregation:temporality(),
%% default: [0.0, 5.0, 10.0, 25.0, 50.0, 75.0, 100.0, 250.0, 500.0, 1000.0]
- boundaries :: [float()] | match_spec([float()]),
- record_min_max :: boolean() | match_spec(boolean()),
- checkpoint :: #explicit_histogram_checkpoint{} | match_spec(#explicit_histogram_checkpoint{}) | {#explicit_histogram_checkpoint{}},
+ explicit_bucket_boundaries :: match_spec([float()]) | undefined,
+ record_min_max :: match_spec(boolean()) | undefined,
+ checkpoint :: match_spec(#explicit_histogram_checkpoint{}) | {#explicit_histogram_checkpoint{}} | undefined,
bucket_counts :: counters:counters_ref() | match_spec(undefined),
- min :: number() | infinity | match_spec(number()),
- max :: number() | match_spec(number()),
- sum :: number() | match_spec(number())
+ min :: infinity | number() | undefined,
+ max :: number() | undefined,
+ sum :: number() | undefined
}).
-record(datapoint,
@@ -96,16 +92,16 @@
-record(histogram_datapoint,
{
attributes :: opentelemetry:attributes_map(),
- start_time :: integer() | match_spec(integer()) | {const, eqwalizer:dynamic()},
+ start_time :: integer() | {const, eqwalizer:dynamic()} | undefined,
time :: integer(),
count :: number(),
- sum :: float() | match_spec(integer()),
+ sum :: float() | integer() | undefined,
bucket_counts :: list(),
- explicit_bounds :: [float()] | match_spec([float()]),
+ explicit_bounds :: match_spec([float()]) | undefined,
exemplars :: list(),
flags :: integer(),
- min :: integer() | infinity | match_spec(integer()),
- max :: integer() | match_spec(integer())
+ min :: infinity | integer() | undefined,
+ max :: integer() | undefined
}).
-record(histogram,
diff --git a/apps/opentelemetry_experimental/src/otel_aggregation.erl b/apps/opentelemetry_experimental/src/otel_aggregation.erl
index 451e7c9f..ffb17e34 100644
--- a/apps/opentelemetry_experimental/src/otel_aggregation.erl
+++ b/apps/opentelemetry_experimental/src/otel_aggregation.erl
@@ -1,7 +1,8 @@
-module(otel_aggregation).
-export([maybe_init_aggregate/4,
- default_mapping/0]).
+ default_mapping/0,
+ ets_lookup_element/4]).
-include_lib("opentelemetry_api_experimental/include/otel_metrics.hrl").
-include("otel_metrics.hrl").
@@ -11,7 +12,7 @@
-type t() :: otel_aggregation_drop:t() | otel_aggregation_sum:t() |
otel_aggregation_last_value:t() | otel_aggregation_histogram_explicit:t().
--type key() :: {atom(), opentelemetry:attributes_map(), reference()}.
+-type key() :: {atom(), opentelemetry:attributes_map(), reference() | undefined, number()}.
-type options() :: map().
@@ -21,40 +22,35 @@
%% Returns the aggregation's record as it is seen and updated by
%% the aggregation module in the metrics table.
--callback init(ViewAggregation, Attributes) -> Aggregation when
- ViewAggregation :: #view_aggregation{},
+-callback init(Stream, Attributes) -> Aggregation when
+ Stream :: #stream{},
Attributes :: opentelemetry:attributes_map(),
Aggregation :: t().
--callback aggregate(Table, ViewAggregation, Value, Attributes) -> boolean() when
+-callback aggregate(Table, Stream, Value, Attributes) -> boolean() when
Table :: ets:table(),
- ViewAggregation :: #view_aggregation{},
+ Stream :: #stream{},
Value :: number(),
Attributes :: opentelemetry:attributes_map().
--callback checkpoint(Table, ViewAggregation, CollectionStartTime) -> ok when
+-callback collect(Table, Stream, Generation) -> tuple() when
Table :: ets:table(),
- ViewAggregation :: #view_aggregation{},
- CollectionStartTime :: integer().
+ Stream :: #stream{},
+ Generation :: integer().
--callback collect(Table, ViewAggregation, CollectionStartTime) -> tuple() when
- Table :: ets:table(),
- ViewAggregation :: #view_aggregation{},
- CollectionStartTime :: integer().
-
-maybe_init_aggregate(MetricsTab, ViewAggregation=#view_aggregation{aggregation_module=AggregationModule,
+maybe_init_aggregate(MetricsTab, Stream=#stream{aggregation_module=AggregationModule,
attribute_keys=AttributeKeys},
Value, Attributes) ->
FilteredAttributes = filter_attributes(AttributeKeys, Attributes),
- case AggregationModule:aggregate(MetricsTab, ViewAggregation, Value, FilteredAttributes) of
+ case AggregationModule:aggregate(MetricsTab, Stream, Value, FilteredAttributes) of
true ->
ok;
false ->
%% entry doesn't exist, create it and rerun the aggregate function
- Metric = AggregationModule:init(ViewAggregation, FilteredAttributes),
+ Metric = AggregationModule:init(Stream, FilteredAttributes),
%% don't overwrite a possible concurrent measurement doing the same
_ = ets:insert_new(MetricsTab, Metric),
- AggregationModule:aggregate(MetricsTab, ViewAggregation, Value, FilteredAttributes)
+ AggregationModule:aggregate(MetricsTab, Stream, Value, FilteredAttributes)
end.
filter_attributes(undefined, Attributes) ->
@@ -69,4 +65,18 @@ default_mapping() ->
?KIND_HISTOGRAM => otel_aggregation_histogram_explicit,
?KIND_OBSERVABLE_GAUGE => otel_aggregation_last_value,
?KIND_UPDOWN_COUNTER => otel_aggregation_sum,
- ?KIND_OBSERVABLE_UPDOWNCOUNTER => otel_aggregation_sum}.
\ No newline at end of file
+ ?KIND_OBSERVABLE_UPDOWNCOUNTER => otel_aggregation_sum}.
+
+
+-if(?OTP_RELEASE >= 26).
+ets_lookup_element(Tab, Key, Pos, Default) ->
+ ets:lookup_element(Tab, Key, Pos, Default).
+-else.
+ets_lookup_element(Tab, Key, Pos, Default) ->
+ try
+ ets:lookup_element(Tab, Key, Pos)
+ catch
+ error:badarg ->
+ Default
+ end.
+-endif.
diff --git a/apps/opentelemetry_experimental/src/otel_aggregation_drop.erl b/apps/opentelemetry_experimental/src/otel_aggregation_drop.erl
index 9565ab12..c6a70415 100644
--- a/apps/opentelemetry_experimental/src/otel_aggregation_drop.erl
+++ b/apps/opentelemetry_experimental/src/otel_aggregation_drop.erl
@@ -4,7 +4,6 @@
-export([init/2,
aggregate/4,
- checkpoint/3,
collect/3]).
-include("otel_metrics.hrl").
@@ -19,8 +18,5 @@ init(_, _) ->
aggregate(_, _, _, _) ->
true.
-checkpoint(_, _, _) ->
- ok.
-
collect(_, _, _) ->
{}.
diff --git a/apps/opentelemetry_experimental/src/otel_aggregation_histogram_explicit.erl b/apps/opentelemetry_experimental/src/otel_aggregation_histogram_explicit.erl
index 0279db23..e5376e4c 100644
--- a/apps/opentelemetry_experimental/src/otel_aggregation_histogram_explicit.erl
+++ b/apps/opentelemetry_experimental/src/otel_aggregation_histogram_explicit.erl
@@ -21,7 +21,6 @@
-export([init/2,
aggregate/4,
- checkpoint/3,
collect/3]).
-include("otel_metrics.hrl").
@@ -42,106 +41,124 @@
%% can't use `ets:fun2ms' as it will shadow `Key' in the `fun' head
-if(?OTP_RELEASE >= 26).
-define(AGGREATE_MATCH_SPEC(Key, Value, BucketCounts),
- [
- {
- {explicit_histogram_aggregation,Key,'_','_','_','_','_','$1','$2','$3'},
- [],
- [{{
- explicit_histogram_aggregation,
- {element,2,'$_'},
- {element,3,'$_'},
- {element,4,'$_'},
- {element,5,'$_'},
- {element,6,'$_'},
- {const,BucketCounts},
- {min,'$1',{const,Value}},
- {max,'$2',{const,Value}},
- {'+','$3',{const,Value}}
+ [
+ {
+ {explicit_histogram_aggregation,Key,'_','_','_','_','_','$1','$2','$3'},
+ [],
+ [{{
+ explicit_histogram_aggregation,
+ {element,2,'$_'},
+ {element,3,'$_'},
+ {element,4,'$_'},
+ {element,5,'$_'},
+ {element,6,'$_'},
+ {const,BucketCounts},
+ {min,'$1',{const,Value}},
+ {max,'$2',{const,Value}},
+ {'+','$3',{const,Value}}
}}]
- }
- ]
-).
+ }
+ ]
+ ).
-else.
-define(AGGREATE_MATCH_SPEC(Key, Value, BucketCounts),
- [
- {
- {explicit_histogram_aggregation,Key,'_','_','_','_','_','$1','$2','$3'},
- [{'<','$2',{const,Value}},{'>','$1',{const,Value}}],
- [{{
- explicit_histogram_aggregation,
- {element,2,'$_'},
- {element,3,'$_'},
- {element,4,'$_'},
- {element,5,'$_'},
- {element,6,'$_'},
- {const,BucketCounts},
- {const,Value},
- {const,Value},
- {'+','$3',{const,Value}}
+ [
+ {
+ {explicit_histogram_aggregation,Key,'_','_','_','_','_','$1','$2','$3'},
+ [{'<','$2',{const,Value}},{'>','$1',{const,Value}}],
+ [{{
+ explicit_histogram_aggregation,
+ {element,2,'$_'},
+ {element,3,'$_'},
+ {element,4,'$_'},
+ {element,5,'$_'},
+ {element,6,'$_'},
+ {const,BucketCounts},
+ {const,Value},
+ {const,Value},
+ {'+','$3',{const,Value}}
}}]
- },
- {
- {explicit_histogram_aggregation,Key,'_','_','_','_','_','_','$1','$2'},
- [{'<','$1',{const,Value}}],
- [{{
- explicit_histogram_aggregation,
- {element,2,'$_'},
- {element,3,'$_'},
- {element,4,'$_'},
- {element,5,'$_'},
- {element,6,'$_'},
- {const,BucketCounts},
- {element,8,'$_'},
- {const,Value},
- {'+','$2',{const,Value}}
+ },
+ {
+ {explicit_histogram_aggregation,Key,'_','_','_','_','_','_','$1','$2'},
+ [{'<','$1',{const,Value}}],
+ [{{
+ explicit_histogram_aggregation,
+ {element,2,'$_'},
+ {element,3,'$_'},
+ {element,4,'$_'},
+ {element,5,'$_'},
+ {element,6,'$_'},
+ {const,BucketCounts},
+ {element,8,'$_'},
+ {const,Value},
+ {'+','$2',{const,Value}}
}}]
- },
- {
- {explicit_histogram_aggregation,Key,'_','_','_','_','_','$1','_','$2'},
- [{'>','$1',{const,Value}}],
- [{{
- explicit_histogram_aggregation,
- {element,2,'$_'},
- {element,3,'$_'},
- {element,4,'$_'},
- {element,5,'$_'},
- {element,6,'$_'},
- {const,BucketCounts},
- {const,Value},
- {element,9,'$_'},
- {'+','$2',{const,Value}}
+ },
+ {
+ {explicit_histogram_aggregation,Key,'_','_','_','_','_','$1','_','$2'},
+ [{'>','$1',{const,Value}}],
+ [{{
+ explicit_histogram_aggregation,
+ {element,2,'$_'},
+ {element,3,'$_'},
+ {element,4,'$_'},
+ {element,5,'$_'},
+ {element,6,'$_'},
+ {const,BucketCounts},
+ {const,Value},
+ {element,9,'$_'},
+ {'+','$2',{const,Value}}
}}]
- },
- {
- {explicit_histogram_aggregation,Key,'_','_','_','_','_','_','_','$1'},
- [],
- [{{
- explicit_histogram_aggregation,
- {element,2,'$_'},
- {element,3,'$_'},
- {element,4,'$_'},
- {element,5,'$_'},
- {element,6,'$_'},
- {const,BucketCounts},
- {element,8,'$_'},
- {element,9,'$_'},
- {'+','$1',{const,Value}}
+ },
+ {
+ {explicit_histogram_aggregation,Key,'_','_','_','_','_','_','_','$1'},
+ [],
+ [{{
+ explicit_histogram_aggregation,
+ {element,2,'$_'},
+ {element,3,'$_'},
+ {element,4,'$_'},
+ {element,5,'$_'},
+ {element,6,'$_'},
+ {const,BucketCounts},
+ {element,8,'$_'},
+ {element,9,'$_'},
+ {'+','$1',{const,Value}}
}}]
- }
- ]
-).
+ }
+ ]
+ ).
-endif.
-init(#view_aggregation{name=Name,
- reader=ReaderId,
- aggregation_options=Options}, Attributes) ->
- Key = {Name, Attributes, ReaderId},
- Boundaries = maps:get(boundaries, Options, ?DEFAULT_BOUNDARIES),
+%% ignore eqwalizer errors in functions using a lot of matchspecs
+-eqwalizer({nowarn_function, checkpoint/3}).
+-eqwalizer({nowarn_function, collect/3}).
+-dialyzer({nowarn_function, checkpoint/3}).
+-dialyzer({nowarn_function, aggregate/4}).
+-dialyzer({nowarn_function, collect/3}).
+-dialyzer({nowarn_function, maybe_delete_old_generation/4}).
+-dialyzer({nowarn_function, datapoint/2}).
+-dialyzer({nowarn_function, get_buckets/2}).
+-dialyzer({nowarn_function, counters_get/2}).
+
+init(#stream{name=Name,
+ reader=ReaderId,
+ aggregation_options=Options,
+ forget=Forget}, Attributes) ->
+ Generation = case Forget of
+ true ->
+ otel_metric_reader:checkpoint_generation(ReaderId);
+ _ ->
+ 0
+ end,
+ Key = {Name, Attributes, ReaderId, Generation},
+ ExplicitBucketBoundaries = maps:get(explicit_bucket_boundaries, Options, ?DEFAULT_BOUNDARIES),
RecordMinMax = maps:get(record_min_max, Options, true),
#explicit_histogram_aggregation{key=Key,
start_time=opentelemetry:timestamp(),
- boundaries=Boundaries,
- bucket_counts=new_bucket_counts(Boundaries),
+ explicit_bucket_boundaries=ExplicitBucketBoundaries,
+ bucket_counts=new_bucket_counts(ExplicitBucketBoundaries),
checkpoint=undefined,
record_min_max=RecordMinMax,
min=infinity, %% works because any atom is > any integer
@@ -149,39 +166,44 @@ init(#view_aggregation{name=Name,
sum=0
}.
-aggregate(Table, #view_aggregation{name=Name,
- reader=ReaderId,
- aggregation_options=Options}, Value, Attributes) ->
- Key = {Name, Attributes, ReaderId},
- Boundaries = maps:get(boundaries, Options, ?DEFAULT_BOUNDARIES),
- try ets:lookup_element(Table, Key, #explicit_histogram_aggregation.bucket_counts) of
+aggregate(Table, #stream{name=Name,
+ reader=ReaderId,
+ aggregation_options=Options,
+ forget=Forget}, Value, Attributes) ->
+ Generation = case Forget of
+ true ->
+ otel_metric_reader:checkpoint_generation(ReaderId);
+ _ ->
+ 0
+ end,
+ Key = {Name, Attributes, ReaderId, Generation},
+ ExplicitBucketBoundaries = maps:get(explicit_bucket_boundaries, Options, ?DEFAULT_BOUNDARIES),
+ case otel_aggregation:ets_lookup_element(Table, Key, #explicit_histogram_aggregation.bucket_counts, false) of
+ false ->
+ %% since we need the options to initialize a histogram `false' is
+ %% returned and `otel_metric_server' will initialize the histogram
+ false;
BucketCounts0 ->
BucketCounts = case BucketCounts0 of
undefined ->
- new_bucket_counts(Boundaries);
+ new_bucket_counts(ExplicitBucketBoundaries);
_ ->
BucketCounts0
end,
- BucketIdx = find_bucket(Boundaries, Value),
+ BucketIdx = find_bucket(ExplicitBucketBoundaries, Value),
counters:add(BucketCounts, BucketIdx, 1),
MS = ?AGGREATE_MATCH_SPEC(Key, Value, BucketCounts),
1 =:= ets:select_replace(Table, MS)
- catch
- error:badarg->
- %% since we need the options to initialize a histogram `false' is
- %% returned and `otel_metric_server' will initialize the histogram
- false
end.
--dialyzer({nowarn_function, checkpoint/3}).
-checkpoint(Tab, #view_aggregation{name=Name,
- reader=ReaderId,
- temporality=?TEMPORALITY_DELTA}, CollectionStartTime) ->
- MS = [{#explicit_histogram_aggregation{key='$1',
+checkpoint(Tab, #stream{name=Name,
+ reader=ReaderId,
+ temporality=?TEMPORALITY_DELTA}, Generation) ->
+ MS = [{#explicit_histogram_aggregation{key={Name, '$1', ReaderId, Generation},
start_time='$9',
- boundaries='$2',
+ explicit_bucket_boundaries='$2',
record_min_max='$3',
checkpoint='_',
bucket_counts='$5',
@@ -189,47 +211,80 @@ checkpoint(Tab, #view_aggregation{name=Name,
max='$7',
sum='$8'
},
- [{'=:=', {element, 1, '$1'}, {const, Name}},
- {'=:=', {element, 3, '$1'}, {const, ReaderId}}],
- [{#explicit_histogram_aggregation{key='$1',
- start_time={const, CollectionStartTime},
- boundaries='$2',
+ [],
+ [{#explicit_histogram_aggregation{key={{{const, Name}, '$1', {const, ReaderId}, {const, Generation}}},
+ start_time='$9',
+ explicit_bucket_boundaries='$2',
record_min_max='$3',
checkpoint={#explicit_histogram_checkpoint{bucket_counts='$5',
min='$6',
max='$7',
sum='$8',
start_time='$9'}},
- bucket_counts={const, undefined},
- min=infinity,
- max=?MIN_DOUBLE,
- sum=0}}]}],
+ bucket_counts='$5',
+ min='$6',
+ max='$7',
+ sum='$8'}}]}],
_ = ets:select_replace(Tab, MS),
ok;
-checkpoint(_Tab, _, _CollectionStartTime) ->
+checkpoint(_Tab, _, _) ->
%% no good way to checkpoint the `counters' without being out of sync with
%% min/max/sum, so may as well just collect them in `collect', which will
%% also be out of sync, but best we can do right now
-
+
ok.
-collect(Tab, #view_aggregation{name=Name,
- reader=ReaderPid,
- temporality=Temporality}, CollectionStartTime) ->
- Select = [{'$1',
- [{'==', Name, {element, 1, {element, 2, '$1'}}},
- {'==', ReaderPid, {element, 3, {element, 2, '$1'}}}],
- ['$1']}],
+collect(Tab, Stream=#stream{name=Name,
+ reader=ReaderId,
+ temporality=Temporality,
+ forget=Forget}, Generation0) ->
+ CollectionStartTime = opentelemetry:timestamp(),
+ Generation = case Forget of
+ true ->
+ Generation0;
+ _ ->
+ 0
+ end,
+
+ checkpoint(Tab, Stream, Generation),
+
+ Select = [{#explicit_histogram_aggregation{key={Name, '$1', ReaderId, Generation},
+ start_time='$2',
+ explicit_bucket_boundaries='$3',
+ record_min_max='$4',
+ checkpoint='$5',
+ bucket_counts='$6',
+ min='$7',
+ max='$8',
+ sum='$9'}, [], ['$_']}],
AttributesAggregation = ets:select(Tab, Select),
- #histogram{datapoints=[datapoint(CollectionStartTime, SumAgg) || SumAgg <- AttributesAggregation],
- aggregation_temporality=Temporality}.
+ Result = #histogram{datapoints=[datapoint(CollectionStartTime, SumAgg) || SumAgg <- AttributesAggregation],
+ aggregation_temporality=Temporality},
+
+ %% would be nice to do this in the reader so its not duplicated in each aggregator
+ maybe_delete_old_generation(Tab, Name, ReaderId, Generation),
+
+ Result.
%%
+%% 0 means it is either cumulative or the first generation with nothing older to delete
+maybe_delete_old_generation(_Tab, _Name, _ReaderId, 0) ->
+ ok;
+maybe_delete_old_generation(Tab, Name, ReaderId, Generation) ->
+ %% delete all older than the Generation instead of just the previous in case a
+ %% a crash had happened between incrementing the Generation counter and doing
+ %% the delete in a previous collection cycle
+ %% eqwalizer:ignore matchspecs mess with the typing
+ Select = [{#explicit_histogram_aggregation{key={Name, '_', ReaderId, '$1'}, _='_'},
+ [{'<', '$1', {const, Generation}}],
+ [true]}],
+ ets:select_delete(Tab, Select).
+
datapoint(CollectionStartTime, #explicit_histogram_aggregation{
- key={_, Attributes, _},
- boundaries=Boundaries,
+ key={_, Attributes, _, _},
+ explicit_bucket_boundaries=Boundaries,
start_time=StartTime,
checkpoint=undefined,
bucket_counts=BucketCounts,
@@ -252,8 +307,8 @@ datapoint(CollectionStartTime, #explicit_histogram_aggregation{
max=Max
};
datapoint(CollectionStartTime, #explicit_histogram_aggregation{
- key={_, Attributes, _},
- boundaries=Boundaries,
+ key={_, Attributes, _, _},
+ explicit_bucket_boundaries=Boundaries,
checkpoint=#explicit_histogram_checkpoint{bucket_counts=BucketCounts,
min=Min,
max=Max,
diff --git a/apps/opentelemetry_experimental/src/otel_aggregation_last_value.erl b/apps/opentelemetry_experimental/src/otel_aggregation_last_value.erl
index d52741f7..ab236662 100644
--- a/apps/opentelemetry_experimental/src/otel_aggregation_last_value.erl
+++ b/apps/opentelemetry_experimental/src/otel_aggregation_last_value.erl
@@ -21,7 +21,6 @@
-export([init/2,
aggregate/4,
- checkpoint/3,
collect/3]).
-include_lib("opentelemetry_api_experimental/include/otel_metrics.hrl").
@@ -31,59 +30,77 @@
-export_type([t/0]).
+%% ignore eqwalizer errors in functions using a lot of matchspecs
+-eqwalizer({nowarn_function, checkpoint/3}).
+-eqwalizer({nowarn_function, collect/3}).
+-dialyzer({nowarn_function, checkpoint/3}).
+-dialyzer({nowarn_function, aggregate/4}).
+-dialyzer({nowarn_function, collect/3}).
+-dialyzer({nowarn_function, maybe_delete_old_generation/4}).
+-dialyzer({nowarn_function, datapoint/2}).
+
-include_lib("opentelemetry_api/include/gradualizer.hrl").
-include("otel_view.hrl").
-init(#view_aggregation{name=Name,
- reader=ReaderId,
- aggregation_options=_Options}, Attributes) ->
- Key = {Name, Attributes, ReaderId},
+init(#stream{name=Name,
+ reader=ReaderId,
+ aggregation_options=_Options,
+ forget=Forget}, Attributes) ->
+ Generation = case Forget of
+ true ->
+ otel_metric_reader:checkpoint_generation(ReaderId);
+ _ ->
+ 0
+ end,
+ Key = {Name, Attributes, ReaderId, Generation},
#last_value_aggregation{key=Key,
start_time=opentelemetry:timestamp(),
- value=undefined}.
+ value=undefined,
+ %% not needed or used, but makes eqwalizer happy
+ checkpoint=0}.
-aggregate(Tab, ViewAggregation=#view_aggregation{name=Name,
- reader=ReaderId}, Value, Attributes) ->
- Key = {Name, Attributes, ReaderId},
+aggregate(Tab, Stream=#stream{name=Name,
+ reader=ReaderId,
+ forget=Forget}, Value, Attributes) ->
+ Generation = case Forget of
+ true ->
+ otel_metric_reader:checkpoint_generation(ReaderId);
+ _ ->
+ 0
+ end,
+ Key = {Name, Attributes, ReaderId, Generation},
case ets:update_element(Tab, Key, {#last_value_aggregation.value, Value}) of
true ->
true;
false ->
- Metric = init(ViewAggregation, Attributes),
+ Metric = init(Stream, Attributes),
ets:insert(Tab, ?assert_type((?assert_type(Metric, #last_value_aggregation{}))#last_value_aggregation{value=Value}, tuple()))
end.
--dialyzer({nowarn_function, checkpoint/3}).
-checkpoint(Tab, #view_aggregation{name=Name,
- reader=ReaderId,
- temporality=?TEMPORALITY_DELTA}, CollectionStartTime) ->
- MS = [{#last_value_aggregation{key='$1',
+checkpoint(Tab, #stream{name=Name,
+ reader=ReaderId,
+ temporality=?TEMPORALITY_DELTA}, Generation) ->
+ MS = [{#last_value_aggregation{key={Name, '$1', ReaderId, Generation},
start_time='$3',
- last_start_time='_',
checkpoint='_',
value='$2'},
- [{'=:=', {element, 1, '$1'}, {const, Name}},
- {'=:=', {element, 3, '$1'}, {const, ReaderId}}],
- [{#last_value_aggregation{key='$1',
- start_time={const, CollectionStartTime},
- last_start_time='$3',
+ [],
+ [{#last_value_aggregation{key={{Name, '$1', {const, ReaderId}, {const, Generation}}},
+ start_time='$3',
checkpoint='$2',
value='$2'}}]}],
_ = ets:select_replace(Tab, MS),
ok;
-checkpoint(Tab, #view_aggregation{name=Name,
- reader=ReaderId}, _CollectionStartTime) ->
- MS = [{#last_value_aggregation{key='$1',
+checkpoint(Tab, #stream{name=Name,
+ reader=ReaderId}, Generation) ->
+ MS = [{#last_value_aggregation{key={Name, '$1', ReaderId, Generation},
start_time='$3',
- last_start_time='_',
checkpoint='_',
value='$2'},
- [{'=:=', {element, 1, '$1'}, {const, Name}},
- {'=:=', {element, 3, '$1'}, {const, ReaderId}}],
- [{#last_value_aggregation{key='$1',
+ [],
+ [{#last_value_aggregation{key={{{const, Name}, '$1', {const, ReaderId}, {const, Generation}}},
start_time='$3',
- last_start_time='$3',
checkpoint='$2',
value='$2'}}]}],
_ = ets:select_replace(Tab, MS),
@@ -91,28 +108,51 @@ checkpoint(Tab, #view_aggregation{name=Name,
ok.
-collect(Tab, #view_aggregation{name=Name,
- reader=ReaderPid}, CollectionStartTime) ->
- Select = [{'$1',
- [{'=:=', Name, {element, 1, {element, 2, '$1'}}},
- {'=:=', ReaderPid, {element, 3, {element, 2, '$1'}}}],
- ['$1']}],
+collect(Tab, Stream=#stream{name=Name,
+ reader=ReaderId,
+ forget=Forget}, Generation0) ->
+ CollectionStartTime = opentelemetry:timestamp(),
+ Generation = case Forget of
+ true ->
+ Generation0;
+ _ ->
+ 0
+ end,
+
+ checkpoint(Tab, Stream, Generation),
+
+ Select = [{#last_value_aggregation{key={Name, '_', ReaderId, Generation},
+ _='_'}, [], ['$_']}],
AttributesAggregation = ets:select(Tab, Select),
- #gauge{datapoints=[datapoint(CollectionStartTime, LastValueAgg) ||
- LastValueAgg <- AttributesAggregation]}.
+ Result = #gauge{datapoints=[datapoint(CollectionStartTime, LastValueAgg) ||
+ LastValueAgg <- AttributesAggregation]},
+
+ %% would be nice to do this in the reader so its not duplicated in each aggregator
+ maybe_delete_old_generation(Tab, Name, ReaderId, Generation),
+
+ Result.
%%
-datapoint(CollectionStartTime, #last_value_aggregation{key={_, Attributes, _},
- last_start_time=StartTime,
+%% 0 means it is either cumulative or the first generation with nothing older to delete
+maybe_delete_old_generation(_Tab, _Name, _ReaderId, 0) ->
+ ok;
+maybe_delete_old_generation(Tab, Name, ReaderId, Generation) ->
+ %% delete all older than the Generation instead of just the previous in case a
+ %% a crash had happened between incrementing the Generation counter and doing
+ %% the delete in a previous collection cycle
+ %% eqwalizer:ignore matchspecs mess with the typing
+ Select = [{#last_value_aggregation{key={Name, '_', ReaderId, '$1'}, _='_'},
+ [{'<', '$1', {const, Generation}}],
+ [true]}],
+ ets:select_delete(Tab, Select).
+
+datapoint(CollectionStartTime, #last_value_aggregation{key={_, Attributes, _, _},
+ start_time=StartTime,
checkpoint=Checkpoint}) ->
#datapoint{attributes=Attributes,
- %% `start_time' being set to `last_start_time' causes complaints
- %% because `last_start_time' has matchspec values in its typespec
- %% eqwalizer:ignore see above
start_time=StartTime,
time=CollectionStartTime,
- %% eqwalizer:ignore more matchspec fun
value=Checkpoint,
exemplars=[],
flags=0}.
diff --git a/apps/opentelemetry_experimental/src/otel_aggregation_sum.erl b/apps/opentelemetry_experimental/src/otel_aggregation_sum.erl
index aed912cb..2787a874 100644
--- a/apps/opentelemetry_experimental/src/otel_aggregation_sum.erl
+++ b/apps/opentelemetry_experimental/src/otel_aggregation_sum.erl
@@ -21,7 +21,6 @@
-export([init/2,
aggregate/4,
- checkpoint/3,
collect/3]).
-include("otel_metrics.hrl").
@@ -32,23 +31,45 @@
-export_type([t/0]).
-init(#view_aggregation{name=Name,
- reader=ReaderId}, Attributes) ->
- Key = {Name, Attributes, ReaderId},
+%% ignore eqwalizer errors in functions using a lot of matchspecs
+-eqwalizer({nowarn_function, checkpoint/3}).
+-eqwalizer({nowarn_function, aggregate/4}).
+-dialyzer({nowarn_function, checkpoint/3}).
+-dialyzer({nowarn_function, aggregate/4}).
+-dialyzer({nowarn_function, collect/3}).
+-dialyzer({nowarn_function, maybe_delete_old_generation/4}).
+-dialyzer({nowarn_function, datapoint/5}).
+
+init(#stream{name=Name,
+ reader=ReaderId,
+ forget=Forget}, Attributes) ->
+ Generation = case Forget of
+ true ->
+ otel_metric_reader:checkpoint_generation(ReaderId);
+ _ ->
+ 0
+ end,
+ StartTime = opentelemetry:timestamp(),
+ Key = {Name, Attributes, ReaderId, Generation},
#sum_aggregation{key=Key,
- start_time=opentelemetry:timestamp(),
+ start_time=StartTime,
checkpoint=0, %% 0 value is never reported but gets copied to previous_checkpoint
- %% which is used to add/subtract for conversion of temporality
+ %% which is used to add/subtract for conversion of temporality
previous_checkpoint=0,
int_value=0,
float_value=0.0}.
-aggregate(Tab, #view_aggregation{name=Name,
- reader=ReaderId,
- is_monotonic=IsMonotonic}, Value, Attributes)
- when is_integer(Value) andalso
- ((IsMonotonic andalso Value >= 0) orelse not IsMonotonic) ->
- Key = {Name, Attributes, ReaderId},
+aggregate(Tab, #stream{name=Name,
+ reader=ReaderId,
+ forget=Forget}, Value, Attributes)
+ when is_integer(Value) ->
+ Generation = case Forget of
+ true ->
+ otel_metric_reader:checkpoint_generation(ReaderId);
+ _ ->
+ 0
+ end,
+ Key = {Name, Attributes, ReaderId, Generation},
try
_ = ets:update_counter(Tab, Key, {#sum_aggregation.int_value, Value}),
true
@@ -64,14 +85,18 @@ aggregate(Tab, #view_aggregation{name=Name,
%% true
false
end;
-aggregate(Tab, #view_aggregation{name=Name,
- reader=ReaderId,
- is_monotonic=IsMonotonic}, Value, Attributes)
- when (IsMonotonic andalso Value >= 0.0) orelse not IsMonotonic ->
- Key = {Name, Attributes, ReaderId},
+aggregate(Tab, #stream{name=Name,
+ reader=ReaderId,
+ forget=Forget}, Value, Attributes) ->
+ Generation = case Forget of
+ true ->
+ otel_metric_reader:checkpoint_generation(ReaderId);
+ _ ->
+ 0
+ end,
+ Key = {Name, Attributes, ReaderId, Generation},
MS = [{#sum_aggregation{key=Key,
start_time='$1',
- last_start_time='$5',
checkpoint='$2',
previous_checkpoint='$6',
int_value='$3',
@@ -79,87 +104,75 @@ aggregate(Tab, #view_aggregation{name=Name,
[],
[{#sum_aggregation{key={element, 2, '$_'},
start_time='$1',
- last_start_time='$5',
checkpoint='$2',
previous_checkpoint='$6',
int_value='$3',
float_value={'+', '$4', {const, Value}}}}]}],
- 1 =:= ets:select_replace(Tab, MS);
-aggregate(_Tab, #view_aggregation{name=_Name,
- is_monotonic=_IsMonotonic}, _Value, _) ->
- false.
+ 1 =:= ets:select_replace(Tab, MS).
--dialyzer({nowarn_function, checkpoint/3}).
-checkpoint(Tab, #view_aggregation{name=Name,
- reader=ReaderPid,
- temporality=?TEMPORALITY_DELTA}, CollectionStartTime) ->
- MS = [{#sum_aggregation{key='$1',
+checkpoint(Tab, #stream{name=Name,
+ reader=ReaderId,
+ temporality=?TEMPORALITY_DELTA}, Generation) ->
+ MS = [{#sum_aggregation{key={Name, '$1', ReaderId, Generation},
start_time='$4',
- last_start_time='_',
checkpoint='$5',
previous_checkpoint='_',
int_value='$2',
float_value='$3'},
- [{'=:=', {element, 1, '$1'}, {const, Name}},
- {'=:=', {element, 3, '$1'}, {const, ReaderPid}},
- {'=:=', '$3', {const, 0.0}}],
- [{#sum_aggregation{key='$1',
- start_time={const, CollectionStartTime},
- last_start_time='$4',
+ [{'=:=', '$3', {const, 0.0}}],
+ [{#sum_aggregation{key={{Name, '$1', {const, ReaderId}, {const, Generation}}},
+ start_time='$4',
checkpoint='$2',
previous_checkpoint='$5',
int_value=0,
float_value=0.0}}]},
- {#sum_aggregation{key='$1',
+ {#sum_aggregation{key={Name, '$1', ReaderId, Generation},
start_time='$4',
- last_start_time='_',
checkpoint='$5',
previous_checkpoint='_',
int_value='$2',
float_value='$3'},
- [{'=:=', {element, 1, '$1'}, {const, Name}},
- {'=:=', {element, 3, '$1'}, {const, ReaderPid}}],
- [{#sum_aggregation{key='$1',
- start_time={const, CollectionStartTime},
- last_start_time='$4',
+ [],
+ [{#sum_aggregation{key={{Name, '$1', {const, ReaderId}, {const, Generation}}},
+ start_time='$4',
checkpoint={'+', '$2', '$3'},
previous_checkpoint='$5',
int_value=0,
float_value=0.0}}]}],
_ = ets:select_replace(Tab, MS),
ok;
-checkpoint(Tab, #view_aggregation{name=Name,
- reader=ReaderPid,
- temporality=?TEMPORALITY_CUMULATIVE}, _CollectionStartTime) ->
- MS = [{#sum_aggregation{key='$1',
+checkpoint(Tab, #stream{name=Name,
+ reader=ReaderId,
+ forget=Forget,
+ temporality=?TEMPORALITY_CUMULATIVE}, Generation0) ->
+ Generation = case Forget of
+ true ->
+ Generation0;
+ _ ->
+ 0
+ end,
+ MS = [{#sum_aggregation{key={Name, '$1', ReaderId, Generation},
start_time='$2',
- last_start_time='_',
checkpoint='$5',
previous_checkpoint='$6',
int_value='$3',
float_value='$4'},
- [{'=:=', {element, 1, '$1'}, {const, Name}},
- {'=:=', {element, 3, '$1'}, {const, ReaderPid}},
- {'=:=', '$4', {const, 0.0}}],
- [{#sum_aggregation{key='$1',
+ [{'=:=', '$4', {const, 0.0}}],
+ [{#sum_aggregation{key={{Name, '$1', {const, ReaderId}, {const, Generation}}},
start_time='$2',
- last_start_time='$2',
checkpoint='$3',
previous_checkpoint={'+', '$5', '$6'},
int_value=0,
float_value=0.0}}]},
- {#sum_aggregation{key='$1',
+ {#sum_aggregation{key={Name, '$1', ReaderId, Generation},
start_time='$2',
- last_start_time='_',
checkpoint='$5',
previous_checkpoint='$6',
int_value='$3',
float_value='$4'},
- [{'=:=', {element, 1, '$1'}, {const, Name}},
- {'=:=', {element, 3, '$1'}, {const, ReaderPid}}],
- [{#sum_aggregation{key='$1',
+ [],
+ [{#sum_aggregation{key={{Name, '$1', {const, ReaderId}, {const, Generation}}},
start_time='$2',
- last_start_time='$2',
checkpoint={'+', '$3', '$4'},
previous_checkpoint={'+', '$5', '$6'},
int_value=0,
@@ -167,57 +180,92 @@ checkpoint(Tab, #view_aggregation{name=Name,
_ = ets:select_replace(Tab, MS),
ok.
-collect(Tab, #view_aggregation{name=Name,
- reader=ReaderId,
- instrument=#instrument{temporality=InstrumentTemporality},
- temporality=Temporality,
- is_monotonic=IsMonotonic}, CollectionStartTime) ->
- Select = [{'$1',
- [{'=:=', Name, {element, 1, {element, 2, '$1'}}},
- {'=:=', ReaderId, {element, 3, {element, 2, '$1'}}}],
- ['$1']}],
+collect(Tab, Stream=#stream{name=Name,
+ reader=ReaderId,
+ instrument=#instrument{temporality=InstrumentTemporality},
+ temporality=Temporality,
+ is_monotonic=IsMonotonic,
+ forget=Forget}, Generation0) ->
+ CollectionStartTime = opentelemetry:timestamp(),
+ Generation = case Forget of
+ true ->
+ Generation0;
+ _ ->
+ 0
+ end,
+
+ checkpoint(Tab, Stream, Generation),
+
+ %% eqwalizer:ignore matchspecs mess with the typing
+ Select = [{#sum_aggregation{key={Name, '_', ReaderId, Generation}, _='_'}, [], ['$_']}],
AttributesAggregation = ets:select(Tab, Select),
- #sum{aggregation_temporality=Temporality,
- is_monotonic=IsMonotonic,
- datapoints=[datapoint(CollectionStartTime, InstrumentTemporality, Temporality, SumAgg) || SumAgg <- AttributesAggregation]}.
+ Result = #sum{aggregation_temporality=Temporality,
+ is_monotonic=IsMonotonic,
+ datapoints=[datapoint(Tab, CollectionStartTime, InstrumentTemporality, Temporality, SumAgg) || SumAgg <- AttributesAggregation]},
-datapoint(CollectionStartTime, Temporality, Temporality, #sum_aggregation{key={_, Attributes, _},
- last_start_time=StartTime,
- checkpoint=Value}) ->
+ %% would be nice to do this in the reader so its not duplicated in each aggregator
+ maybe_delete_old_generation(Tab, Name, ReaderId, Generation),
+
+ Result.
+
+%% 0 means it is either cumulative or the first generation with nothing older to delete
+maybe_delete_old_generation(_Tab, _Name, _ReaderId, 0) ->
+ ok;
+maybe_delete_old_generation(Tab, Name, ReaderId, Generation) ->
+ %% delete all older than the Generation instead of just the previous in case a
+ %% a crash had happened between incrementing the Generation counter and doing
+ %% the delete in a previous collection cycle
+ %% eqwalizer:ignore matchspecs mess with the typing
+ Select = [{#sum_aggregation{key={Name, '_', ReaderId, '$1'}, _='_'},
+ [{'<', '$1', {const, Generation}}],
+ [true]}],
+ ets:select_delete(Tab, Select).
+
+%% nothing special to do if the instrument temporality and view temporality are the same
+datapoint(_Tab, CollectionStartTime, Temporality, Temporality, #sum_aggregation{key={_, Attributes, _, _},
+ start_time=StartTime,
+ checkpoint=Value}) ->
#datapoint{
- %% eqwalizer:ignore something
attributes=Attributes,
- %% eqwalizer:ignore something
start_time=StartTime,
time=CollectionStartTime,
- %% eqwalizer:ignore something
value=Value,
exemplars=[],
flags=0
};
-datapoint(CollectionStartTime, _, ?TEMPORALITY_CUMULATIVE, #sum_aggregation{key={_, Attributes, _},
- last_start_time=StartTime,
- previous_checkpoint=PreviousCheckpoint,
- checkpoint=Value}) ->
+%% converting an instrument of delta temporality to cumulative means we need to add the
+%% previous value to the current because the actual value is only a delta
+datapoint(_Tab, Time, _, ?TEMPORALITY_CUMULATIVE, #sum_aggregation{key={_Name, Attributes, _ReaderId, _Generation},
+ start_time=StartTime,
+ previous_checkpoint=PreviousCheckpoint,
+ checkpoint=Value}) ->
#datapoint{
- %% eqwalizer:ignore something
attributes=Attributes,
- %% eqwalizer:ignore something
start_time=StartTime,
- time=CollectionStartTime,
+ time=Time,
value=Value + PreviousCheckpoint,
exemplars=[],
flags=0
};
-datapoint(CollectionStartTime, _, ?TEMPORALITY_DELTA, #sum_aggregation{key={_, Attributes, _},
- last_start_time=StartTime,
- previous_checkpoint=PreviousCheckpoint,
- checkpoint=Value}) ->
+%% converting an instrument of cumulative temporality to delta means subtracting the
+%% value of the previous collection, if one exists.
+%% because we use a generation counter to reset delta aggregates the previous value
+%% has to be looked up with an ets lookup of the previous generation
+datapoint(Tab, Time, _, ?TEMPORALITY_DELTA, #sum_aggregation{key={Name, Attributes, ReaderId, Generation},
+ start_time=StartTime,
+ checkpoint=Value}) ->
+ %% converting from cumulative to delta by grabbing the last generation and subtracting it
+ %% can't use `previous_checkpoint' because with delta metrics have their generation changed
+ %% at each collection
+ PreviousCheckpoint =
+ otel_aggregation:ets_lookup_element(Tab, {Name, Attributes, ReaderId, Generation-1},
+ #sum_aggregation.checkpoint, 0),
#datapoint{
attributes=Attributes,
start_time=StartTime,
- time=CollectionStartTime,
+ time=Time,
value=Value - PreviousCheckpoint,
exemplars=[],
flags=0
}.
+
diff --git a/apps/opentelemetry_experimental/src/otel_meter_default.erl b/apps/opentelemetry_experimental/src/otel_meter_default.erl
index 83d3772c..8edb1c32 100644
--- a/apps/opentelemetry_experimental/src/otel_meter_default.erl
+++ b/apps/opentelemetry_experimental/src/otel_meter_default.erl
@@ -25,19 +25,22 @@
register_callback/4,
scope/1]).
--export([record/3,
+-export([record/2,
+ record/3,
record/4]).
-include_lib("kernel/include/logger.hrl").
-include_lib("opentelemetry_api_experimental/include/otel_metrics.hrl").
-include("otel_metrics.hrl").
--spec create_instrument(otel_meter:t(), otel_instrument:name(), otel_instrument:kind(), otel_meter:opts()) -> otel_instrument:t().
+-define(INSTRUMENT_NAME_REGEX, "^[A-Za-z]+[A-Za-z0-9/_.\-]{0,254}$").
+
+-spec create_instrument(otel_meter:t(), otel_instrument:name(), otel_instrument:kind(), otel_instrument:opts()) -> otel_instrument:t().
create_instrument(Meter, Name, Kind, Opts) ->
validate_name(Name),
+ ValidatedOpts = validate_opts(Name, Kind, Opts),
Instrument=#instrument{meter={_, #meter{provider=Provider}}} =
- otel_instrument:new(?MODULE, Meter, Kind, Name, maps:get(description, Opts, undefined),
- maps:get(unit, Opts, undefined)),
+ otel_instrument:new(?MODULE, Meter, Kind, Name, ValidatedOpts),
_ = otel_meter_server:add_instrument(Provider, Instrument),
Instrument.
@@ -50,12 +53,12 @@ lookup_instrument(Meter={_, #meter{instruments_tab=Tab}}, Name) ->
undefined
end.
--spec create_instrument(otel_meter:t(), otel_instrument:name(), otel_instrument:kind(), otel_instrument:callback(), otel_instrument:callback_args(), otel_meter:opts()) -> otel_instrument:t().
+-spec create_instrument(otel_meter:t(), otel_instrument:name(), otel_instrument:kind(), otel_instrument:callback(), otel_instrument:callback_args(), otel_instrument:opts()) -> otel_instrument:t().
create_instrument(Meter, Name, Kind, Callback, CallbackArgs, Opts) ->
validate_name(Name),
+ ValidatedOpts = validate_opts(Name, Kind, Opts),
Instrument=#instrument{meter={_, #meter{provider=Provider}}} =
- otel_instrument:new(?MODULE, Meter, Kind, Name, maps:get(description, Opts, undefined),
- maps:get(unit, Opts, undefined), Callback, CallbackArgs),
+ otel_instrument:new(?MODULE, Meter, Kind, Name, Callback, CallbackArgs, ValidatedOpts),
_ = otel_meter_server:add_instrument(Provider, Instrument),
Instrument.
@@ -68,24 +71,58 @@ scope({_, #meter{instrumentation_scope=Scope}}) ->
Scope.
validate_name(Name) when is_atom(Name) ->
- Re = "^[A-Za-z]+[A-Za-z0-9_.\-]{0,62}$",
NameString = atom_to_list(Name),
- case re:run(NameString, Re, [{capture, none}]) of
+ case re:run(NameString, ?INSTRUMENT_NAME_REGEX, [{capture, none}]) of
match ->
ok;
nomatch ->
- ?LOG_ERROR("Invalid instrument name, should be an atom matching '~s', but got '~s'", [NameString]),
+ ?LOG_ERROR("Invalid instrument name, should be an atom matching '~s', but got '~s'", [?INSTRUMENT_NAME_REGEX, NameString]),
ok
end;
validate_name(Name) ->
- ?LOG_ERROR("Invalid instrument name, should be an atom matching '~s', but got ~p", [Name]),
+ ?LOG_ERROR("Invalid instrument name, should be an atom matching '~s', but got ~p", [?INSTRUMENT_NAME_REGEX, Name]),
ok.
+
+validate_opts(Name, Kind, #{advisory_params := AdvisoryParams} = Opts) ->
+ % switch to maps:filtermap when we support only 24 onwards
+ ValidatedAdvisoryParams = maps:from_list(lists:filtermap(fun({Key, Value}) -> validate_advisory_param(Name, Kind, Key, Value) end, maps:to_list(AdvisoryParams))),
+ maps:put(advisory_params, ValidatedAdvisoryParams, Opts);
+validate_opts(_Name, _Kind, Opts) ->
+ Opts.
+
+validate_advisory_param(Name, ?KIND_HISTOGRAM, explicit_bucket_boundaries, Value) ->
+ validate_explicit_bucket_boundaries(Name, Value);
+validate_advisory_param(Name, _Kind, explicit_bucket_boundaries, _Value) ->
+ ?LOG_WARNING("[instrument '~s'] 'explicit_bucket_boundaries' advisory parameter is allowed only for histograms, ignoring", [Name]),
+ false;
+validate_advisory_param(Name, _Kind, Opt, _Value) ->
+ ?LOG_WARNING("[instrument '~s'] '~s' advisory parameter is not supported, ignoring", [Name, Opt]),
+ false.
+
+validate_explicit_bucket_boundaries(Name, [_ | _] = Value) ->
+ case lists:all(fun is_number/1, Value) and (lists:sort(Value) == Value) of
+ true ->
+ {true, {explicit_bucket_boundaries, Value}};
+ false ->
+ ?LOG_WARNING("[instrument '~s'] 'explicit_bucket_boundaries' advisory parameter should be a not empty ordered list of numbers, got ~p", [Name, Value]),
+ false
+ end;
+validate_explicit_bucket_boundaries(Name, Value) ->
+ ?LOG_WARNING("[instrument '~s'] 'explicit_bucket_boundaries' advisory parameter should be a not empty ordered list of numbers, got ~p", [Name, Value]),
+ false.
%%
-record(Instrument=#instrument{meter={_, #meter{view_aggregations_tab=ViewAggregationTab,
+
+record(Instrument=#instrument{}, Number) ->
+ record(Instrument, Number, #{}).
+
+record(Meter={_,#meter{}}, Name, Number) ->
+ record(Meter, Name, Number, #{});
+
+record(Instrument=#instrument{meter={_, #meter{streams_tab=StreamTab,
metrics_tab=MetricsTab}}}, Number, Attributes) ->
- otel_meter_server:record(ViewAggregationTab, MetricsTab, Instrument, Number, Attributes).
+ otel_meter_server:record(StreamTab, MetricsTab, Instrument, Number, Attributes).
-record(Meter={_, #meter{view_aggregations_tab=ViewAggregationTab,
+record(Meter={_, #meter{streams_tab=StreamTab,
metrics_tab=MetricsTab}}, Name, Number, Attributes) ->
- otel_meter_server:record(Meter, ViewAggregationTab, MetricsTab, Name, Number, Attributes).
+ otel_meter_server:record(Meter, StreamTab, MetricsTab, Name, Number, Attributes).
diff --git a/apps/opentelemetry_experimental/src/otel_meter_server.erl b/apps/opentelemetry_experimental/src/otel_meter_server.erl
index e99cd82b..90cc9789 100644
--- a/apps/opentelemetry_experimental/src/otel_meter_server.erl
+++ b/apps/opentelemetry_experimental/src/otel_meter_server.erl
@@ -56,9 +56,6 @@
handle_info/2,
code_change/1]).
--include_lib("opentelemetry_api/include/opentelemetry.hrl").
-%% need to move shared records out of otel_span.hrl
--include_lib("opentelemetry/include/otel_span.hrl").
-include_lib("opentelemetry_api_experimental/include/otel_metrics.hrl").
-include_lib("opentelemetry_api_experimental/include/otel_meter.hrl").
-include_lib("kernel/include/logger.hrl").
@@ -72,8 +69,6 @@
id :: reference(),
pid :: pid(),
monitor_ref :: reference(),
- module :: module(),
- config :: term(),
default_aggregation_mapping :: map(),
default_temporality_mapping :: map()
}).
@@ -96,7 +91,7 @@
instruments_tab :: ets:table(),
callbacks_tab :: ets:table(),
- view_aggregations_tab :: ets:table(),
+ streams_tab :: ets:table(),
metrics_tab :: ets:table(),
views :: [otel_view:t()],
@@ -105,6 +100,11 @@
resource :: otel_resource:t()
}).
+%% I think these have warnings because the new view function is ignored
+%% which is because it calls functions that use matchspecs in record defs
+-dialyzer({nowarn_function, add_view_/9}).
+-dialyzer({nowarn_function, new_view/1}).
+
-spec start_link(atom(), atom(), otel_resource:t(), otel_configuration:t()) -> {ok, pid()} | ignore | {error, term()}.
start_link(Name, RegName, Resource, Config) ->
gen_server:start_link({local, RegName}, ?MODULE, [Name, RegName, Resource, Config], []).
@@ -145,14 +145,14 @@ add_view(Provider, Name, Criteria, Config) ->
gen_server:call(Provider, {add_view, Name, Criteria, Config}).
-spec record(ets:table(), ets:table(), otel_instrument:t(), number(), opentelemetry:attributes_map()) -> ok.
-record(ViewAggregationsTab, MetricsTab, Instrument, Number, Attributes) ->
+record(StreamsTab, MetricsTab, Instrument, Number, Attributes) ->
handle_measurement(#measurement{instrument=Instrument,
value=Number,
- attributes=Attributes}, ViewAggregationsTab, MetricsTab).
+ attributes=Attributes}, StreamsTab, MetricsTab).
-spec record(otel_meter:t(), ets:table(), ets:table(), otel_instrument:t() | otel_instrument:name(), number(), opentelemetry:attributes_map()) -> ok.
-record(Meter, ViewAggregationTab, MetricsTab, Name, Number, Attributes) ->
- handle_measurement(Meter, Name, Number, Attributes, ViewAggregationTab, MetricsTab).
+record(Meter, StreamTab, MetricsTab, Name, Number, Attributes) ->
+ handle_measurement(Meter, Name, Number, Attributes, StreamTab, MetricsTab).
-spec force_flush() -> ok.
force_flush() ->
@@ -165,26 +165,24 @@ force_flush(Provider) ->
init([Name, RegName, Resource, Config]) ->
InstrumentsTab = instruments_tab(RegName),
CallbacksTab = callbacks_tab(RegName),
- ViewAggregationsTab = view_aggregations_tab(RegName),
+ StreamsTab = streams_tab(RegName),
MetricsTab = metrics_tab(RegName),
Meter = #meter{module=otel_meter_default,
instruments_tab=InstrumentsTab,
provider=RegName,
- view_aggregations_tab=ViewAggregationsTab,
+ streams_tab=StreamsTab,
metrics_tab=MetricsTab},
%% TODO: don't do this if its already set?
opentelemetry_experimental:set_default_meter(Name, {otel_meter_default, Meter}),
- %% TODO: drop View if Criteria is a wildcard instrument name and View
- %% name is not undefined
- Views = [new_view(V) || V <- maps:get(views, Config, [])],
+ Views = lists:filtermap(fun new_view/1, maps:get(views, Config, [])),
{ok, #state{shared_meter=Meter,
instruments_tab=InstrumentsTab,
callbacks_tab=CallbacksTab,
- view_aggregations_tab=ViewAggregationsTab,
+ streams_tab=StreamsTab,
metrics_tab=MetricsTab,
views=Views,
readers=[],
@@ -195,7 +193,7 @@ handle_call({add_metric_reader, ReaderId, ReaderPid, DefaultAggregationMapping,
views=Views,
instruments_tab=InstrumentsTab,
callbacks_tab=CallbacksTab,
- view_aggregations_tab=ViewAggregationsTab,
+ streams_tab=StreamsTab,
metrics_tab=MetricsTab,
resource=Resource}) ->
Reader = metric_reader(ReaderId,
@@ -204,19 +202,19 @@ handle_call({add_metric_reader, ReaderId, ReaderPid, DefaultAggregationMapping,
Temporality),
Readers1 = [Reader | Readers],
- %% create ViewAggregations entries for existing View/Instrument
+ %% create Streams entries for existing View/Instrument
%% matches for the new Reader
- _ = update_view_aggregations(InstrumentsTab, CallbacksTab, ViewAggregationsTab, Views, Readers1),
+ _ = update_streams(InstrumentsTab, CallbacksTab, StreamsTab, Views, Readers1),
- {reply, {CallbacksTab, ViewAggregationsTab, MetricsTab, Resource}, State#state{readers=Readers1}};
+ {reply, {CallbacksTab, StreamsTab, MetricsTab, Resource}, State#state{readers=Readers1}};
handle_call(resource, _From, State=#state{resource=Resource}) ->
{reply, Resource, State};
handle_call({add_instrument, Instrument}, _From, State=#state{readers=Readers,
views=Views,
instruments_tab=InstrumentsTab,
callbacks_tab=CallbacksTab,
- view_aggregations_tab=ViewAggregationsTab}) ->
- _ = add_instrument_(InstrumentsTab, CallbacksTab, ViewAggregationsTab, Instrument, Views, Readers),
+ streams_tab=StreamsTab}) ->
+ _ = add_instrument_(InstrumentsTab, CallbacksTab, StreamsTab, Instrument, Views, Readers),
{reply, ok, State};
handle_call({register_callback, Instruments, Callback, CallbackArgs}, _From, State=#state{readers=Readers,
callbacks_tab=CallbacksTab}) ->
@@ -233,12 +231,9 @@ handle_call({get_meter, Scope}, _From, State=#state{shared_meter=Meter}) ->
handle_call({add_view, Name, Criteria, Config}, _From, State=#state{views=Views,
instruments_tab=InstrumentsTab,
callbacks_tab=CallbacksTab,
- view_aggregations_tab=ViewAggregationsTab,
+ streams_tab=StreamsTab,
readers=Readers}) ->
- %% TODO: drop View if Criteria is a wildcard instrument name and View name is not undefined
- NewView = otel_view:new(Name, Criteria, Config),
- _ = update_view_aggregations(InstrumentsTab, CallbacksTab, ViewAggregationsTab, [NewView], Readers),
- {reply, true, State#state{views=[NewView | Views]}};
+ add_view_(Name, Criteria, Config, InstrumentsTab, CallbacksTab, StreamsTab, Readers, Views, State);
handle_call(force_flush, _From, State=#state{readers=Readers}) ->
[otel_metric_reader:collect(Pid) || #reader{pid=Pid} <- Readers],
{reply, ok, State}.
@@ -258,6 +253,15 @@ code_change(State) ->
%%
+add_view_(Name, Criteria, Config, InstrumentsTab, CallbacksTab, StreamsTab, Readers, Views, State) ->
+ case otel_view:new(Name, Criteria, Config) of
+ {ok, NewView} ->
+ _ = update_streams(InstrumentsTab, CallbacksTab, StreamsTab, [NewView], Readers),
+ {reply, true, State#state{views=[NewView | Views]}};
+ {error, named_wildcard_view} ->
+ {reply, false, State}
+ end.
+
instruments_tab(Name) ->
ets:new(list_to_atom(lists:concat([instruments, "_", Name])), [set,
named_table,
@@ -270,8 +274,8 @@ callbacks_tab(Name) ->
{keypos, 1},
protected]).
-view_aggregations_tab(Name) ->
- ets:new(list_to_atom(lists:concat([view_aggregations, "_", Name])), [bag,
+streams_tab(Name) ->
+ ets:new(list_to_atom(lists:concat([streams, "_", Name])), [bag,
named_table,
{keypos, 1},
public]).
@@ -282,7 +286,6 @@ metrics_tab(Name) ->
{keypos, 2},
public]).
--dialyzer({nowarn_function,new_view/1}).
new_view(ViewConfig) ->
Name = maps:get(name, ViewConfig, undefined),
Description = maps:get(description, ViewConfig, undefined),
@@ -290,37 +293,40 @@ new_view(ViewConfig) ->
AttributeKeys = maps:get(attribute_keys, ViewConfig, undefined),
AggregationModule = maps:get(aggregation_module, ViewConfig, undefined),
AggregationOptions = maps:get(aggregation_options, ViewConfig, #{}),
- otel_view:new(Name, Selector, #{description => Description,
- attribute_keys => AttributeKeys,
- aggregation_module => AggregationModule,
- aggregation_options => AggregationOptions
- }).
+ case otel_view:new(Name, Selector, #{description => Description,
+ attribute_keys => AttributeKeys,
+ aggregation_module => AggregationModule,
+ aggregation_options => AggregationOptions
+ }) of
+ {ok, View} -> {true, View};
+ {error, named_wildcard_view} -> false
+ end.
%% Match the Instrument to views and then store a per-Reader aggregation for the View
-add_instrument_(InstrumentsTab, CallbacksTab, ViewAggregationsTab, Instrument=#instrument{meter=Meter,
+add_instrument_(InstrumentsTab, CallbacksTab, StreamsTab, Instrument=#instrument{meter=Meter,
name=Name}, Views, Readers) ->
case ets:insert_new(InstrumentsTab, {{Meter, Name}, Instrument}) of
true ->
- update_view_aggregations_(Instrument, CallbacksTab, ViewAggregationsTab, Views, Readers);
+ update_streams_(Instrument, CallbacksTab, StreamsTab, Views, Readers);
false ->
?LOG_INFO("Instrument ~p already created. Ignoring attempt to create Instrument with the same name in the same Meter.", [Name]),
ok
end.
%% used when a new View is added and the Views must be re-matched with each Instrument
-update_view_aggregations(InstrumentsTab, CallbacksTab, ViewAggregationsTab, Views, Readers) ->
+update_streams(InstrumentsTab, CallbacksTab, StreamsTab, Views, Readers) ->
ets:foldl(fun({_, Instrument}, Acc) ->
- update_view_aggregations_(Instrument, CallbacksTab, ViewAggregationsTab, Views, Readers),
+ update_streams_(Instrument, CallbacksTab, StreamsTab, Views, Readers),
Acc
end, ok, InstrumentsTab).
-update_view_aggregations_(Instrument=#instrument{meter=Meter,
- name=Name}, CallbacksTab, ViewAggregationsTab, Views, Readers) ->
+update_streams_(Instrument=#instrument{meter=Meter,
+ name=Name}, CallbacksTab, StreamsTab, Views, Readers) ->
Key = {Meter, Name},
ViewMatches = otel_view:match_instrument_to_views(Instrument, Views),
lists:foreach(fun(Reader=#reader{id=ReaderId}) ->
Matches = per_reader_aggregations(Reader, Instrument, ViewMatches),
- [true = ets:insert(ViewAggregationsTab, {Key, M}) || M <- Matches],
+ [true = ets:insert(StreamsTab, {Key, M}) || M <- Matches],
case {Instrument#instrument.callback, Instrument#instrument.callback_args} of
{undefined, _} ->
ok;
@@ -351,60 +357,69 @@ metric_reader(ReaderId, ReaderPid, DefaultAggregationMapping, Temporality) ->
%% a Measurement's Instrument is matched against Views
-%% each matched View+Reader becomes a ViewAggregation
-%% for each ViewAggregation a Measurement updates a Metric (`#metric')
-%% active metrics are indexed by the ViewAggregation name + the Measurement's Attributes
+%% each matched View+Reader becomes a Stream
+%% for each Stream a Measurement updates a Metric (`#metric')
+%% active metrics are indexed by the Stream name + the Measurement's Attributes
handle_measurement(#measurement{instrument=#instrument{meter=Meter,
name=Name},
value=Value,
attributes=Attributes},
- ViewAggregationsTab, MetricsTab) ->
- Matches = ets:match(ViewAggregationsTab, {{Meter, Name}, '$1'}),
+ StreamsTab, MetricsTab) ->
+ Matches = ets:match(StreamsTab, {{Meter, Name}, '$1'}),
update_aggregations(Value, Attributes, Matches, MetricsTab).
-handle_measurement(Meter, Name, Number, Attributes, ViewAggregationsTab, MetricsTab) ->
- Matches = ets:match(ViewAggregationsTab, {{Meter, Name}, '$1'}),
+handle_measurement(Meter, Name, Number, Attributes, StreamsTab, MetricsTab) ->
+ Matches = ets:match(StreamsTab, {{Meter, Name}, '$1'}),
update_aggregations(Number, Attributes, Matches, MetricsTab).
-update_aggregations(Value, Attributes, ViewAggregations, MetricsTab) ->
- lists:foreach(fun([ViewAggregation=#view_aggregation{}]) ->
- otel_aggregation:maybe_init_aggregate(MetricsTab,
- ViewAggregation,
- Value,
- Attributes);
+update_aggregations(Value, Attributes, Streams, MetricsTab) ->
+ lists:foreach(fun([Stream=#stream{instrument=Instrument}]) ->
+ maybe_init_aggregate(Value, Instrument, MetricsTab, Stream, Attributes);
(_) ->
ok
- end, ViewAggregations).
+ end, Streams).
+
+maybe_init_aggregate(Value, #instrument{kind=Kind} = Instrument, _MetricsTab, _Stream, _Attributes)
+ when Value < 0, Kind == ?KIND_COUNTER orelse Kind == ?KIND_HISTOGRAM ->
+ ?LOG_INFO("Discarding negative value for instrument ~s of type ~s", [Instrument#instrument.name, Kind]),
+ ok;
+
+maybe_init_aggregate(Value, _Instrument, MetricsTab, Stream, Attributes) ->
+ otel_aggregation:maybe_init_aggregate(MetricsTab, Stream, Value, Attributes).
%% create an aggregation for each Reader and its possibly unique aggregation/temporality
-per_reader_aggregations(Reader, Instrument, ViewAggregations) ->
- [view_aggregation_for_reader(Instrument, ViewAggregation, View, Reader)
- || {View, ViewAggregation} <- ViewAggregations].
+per_reader_aggregations(Reader, Instrument, Streams) ->
+ [stream_for_reader(Instrument, Stream, View, Reader)
+ || {View, Stream} <- Streams].
-view_aggregation_for_reader(Instrument=#instrument{kind=Kind}, ViewAggregation, View=#view{attribute_keys=AttributeKeys},
+stream_for_reader(Instrument=#instrument{kind=Kind}, Stream, View=#view{attribute_keys=AttributeKeys},
Reader=#reader{id=Id,
default_temporality_mapping=ReaderTemporalityMapping}) ->
AggregationModule = aggregation_module(Instrument, View, Reader),
Temporality = maps:get(Kind, ReaderTemporalityMapping, ?TEMPORALITY_CUMULATIVE),
- ViewAggregation#view_aggregation{
+ Forget = do_forget(Kind, Temporality),
+
+ Stream#stream{
reader=Id,
attribute_keys=AttributeKeys,
aggregation_module=AggregationModule,
- aggregation_options=#{},
+ forget=Forget,
temporality=Temporality};
-view_aggregation_for_reader(Instrument=#instrument{kind=Kind}, ViewAggregation, View,
+stream_for_reader(Instrument=#instrument{kind=Kind}, Stream, View,
Reader=#reader{id=Id,
default_temporality_mapping=ReaderTemporalityMapping}) ->
AggregationModule = aggregation_module(Instrument, View, Reader),
Temporality = maps:get(Kind, ReaderTemporalityMapping, ?TEMPORALITY_CUMULATIVE),
- ViewAggregation#view_aggregation{
+ Forget = do_forget(Kind, Temporality),
+
+ Stream#stream{
reader=Id,
attribute_keys=undefined,
aggregation_module=AggregationModule,
- aggregation_options=#{},
+ forget=Forget,
temporality=Temporality}.
@@ -434,3 +449,15 @@ report_cb(#{view_name := Name,
stacktrace := StackTrace}) ->
{"failed to create view: name=~ts exception=~ts",
[Name, otel_utils:format_exception(Class, Exception, StackTrace)]}.
+
+
+do_forget(_, ?TEMPORALITY_DELTA) ->
+ true;
+do_forget(?KIND_OBSERVABLE_COUNTER, _) ->
+ true;
+do_forget(?KIND_OBSERVABLE_GAUGE, _) ->
+ true;
+do_forget(?KIND_OBSERVABLE_UPDOWNCOUNTER, _) ->
+ true;
+do_forget(_, _) ->
+ false.
diff --git a/apps/opentelemetry_experimental/src/otel_meter_server_sup.erl b/apps/opentelemetry_experimental/src/otel_meter_server_sup.erl
index dcba7606..265646b7 100644
--- a/apps/opentelemetry_experimental/src/otel_meter_server_sup.erl
+++ b/apps/opentelemetry_experimental/src/otel_meter_server_sup.erl
@@ -29,7 +29,7 @@
start_link(Name, Resource, Opts) ->
supervisor:start_link(?MODULE, [Name, Resource, Opts]).
--dialyzer({nowarn_function, provider_pid/1}).
+%% eqwalizer:ignore waiting on sup_ref to be exported https://github.com/erlang/otp/pull/7205
-spec provider_pid(supervisor:sup_ref()) -> pid() | restarting | undefined.
provider_pid(SupPid) ->
Children = supervisor:which_children(SupPid),
diff --git a/apps/opentelemetry_experimental/src/otel_metric_reader.erl b/apps/opentelemetry_experimental/src/otel_metric_reader.erl
index 429b490d..c5316504 100644
--- a/apps/opentelemetry_experimental/src/otel_metric_reader.erl
+++ b/apps/opentelemetry_experimental/src/otel_metric_reader.erl
@@ -26,6 +26,7 @@
-export([start_link/3,
collect/1,
+ checkpoint_generation/1,
shutdown/1]).
-export([init/1,
@@ -36,15 +37,14 @@
terminate/2,
code_change/1]).
--include_lib("opentelemetry_api/include/opentelemetry.hrl").
-include_lib("opentelemetry_api_experimental/include/otel_metrics.hrl").
--include_lib("kernel/include/logger.hrl").
-include("otel_view.hrl").
-include("otel_metrics.hrl").
-record(state,
{
exporter,
+ %% eqwalizer:ignore waiting on sup_ref to be exported https://github.com/erlang/otp/pull/7205
provider_sup :: supervisor:sup_ref(),
id :: atom(),
default_aggregation_mapping :: #{otel_instrument:kind() => module()},
@@ -52,15 +52,16 @@
export_interval_ms :: integer() | undefined,
tref :: reference() | undefined,
callbacks_tab :: ets:table(),
- view_aggregation_tab :: ets:table(),
+ streams_tab :: ets:table(),
metrics_tab :: ets:table(),
config :: #{},
- resource :: otel_resource:t() | undefined
+ resource :: otel_resource:t() | undefined,
+ generation_ref :: atomics:atomics_ref()
}).
%% -spec start_link(atom(), map()) -> {ok, pid()} | ignore | {error, term()}.
-%% start_link(ChildId, CallbacksTable, ViewAggregationTable, MetricsTable, Config) ->
-%% gen_server:start_link({local, ChildId}, ?MODULE, [ChildId, CallbacksTable, ViewAggregationTable, MetricsTable, Config], []).
+%% start_link(ChildId, CallbacksTable, StreamsTable, MetricsTable, Config) ->
+%% gen_server:start_link({local, ChildId}, ?MODULE, [ChildId, CallbacksTable, StreamsTable, MetricsTable, Config], []).
start_link(ReaderId, ProviderSup, Config) ->
gen_server:start_link(?MODULE, [ReaderId, ProviderSup, Config], []).
@@ -70,6 +71,16 @@ collect(ReaderPid) ->
shutdown(ReaderPid) ->
gen_server:call(ReaderPid, shutdown).
+%% Get the current checkpoint generation.
+checkpoint_generation(ReaderId) ->
+ GenerationRef = persistent_term:get({?MODULE, ReaderId}),
+ atomics:get(GenerationRef, 1).
+
+%% Increment and return the previous checkpoint generation.
+inc_checkpoint_generation(ReaderId) ->
+ GenerationRef = persistent_term:get({?MODULE, ReaderId}),
+ atomics:add_get(GenerationRef, 1, 1) - 1.
+
init([ReaderId, ProviderSup, Config]) ->
erlang:process_flag(trap_exit, true),
ExporterModuleConfig = maps:get(exporter, Config, undefined),
@@ -79,7 +90,7 @@ init([ReaderId, ProviderSup, Config]) ->
Temporality = maps:get(default_temporality_mapping, Config, #{}),
%% if a periodic reader is needed then this value is set
- %% somehow need to do a default of 10000 millis, but only if this is a periodic reader
+ %% somehow need to do a default of 10000 MILLIS, but only if this is a periodic reader
ExporterIntervalMs = maps:get(export_interval_ms, Config, undefined),
TRef = case ExporterIntervalMs of
@@ -88,6 +99,17 @@ init([ReaderId, ProviderSup, Config]) ->
_ ->
erlang:send_after(ExporterIntervalMs, self(), collect)
end,
+
+ GenerationRef =
+ try persistent_term:get({?MODULE, ReaderId})
+ catch
+ error:badarg ->
+ GenerationRef0 = atomics:new(1, []),
+ persistent_term:put({?MODULE, ReaderId}, GenerationRef0),
+ GenerationRef0
+ end,
+
+ %% eqwalizer:fixme get an unbound record error until the fixme for state record is resolved
{ok, #state{exporter=Exporter,
provider_sup=ProviderSup,
id=ReaderId,
@@ -95,19 +117,21 @@ init([ReaderId, ProviderSup, Config]) ->
temporality_mapping=Temporality,
export_interval_ms=ExporterIntervalMs,
tref=TRef,
+ generation_ref=GenerationRef,
config=Config}, {continue, register_with_server}}.
+%% eqwalizer:fixme get an unbound record error until the fixme for state record is resolved
handle_continue(register_with_server, State=#state{provider_sup=ProviderSup,
id=ReaderId,
default_aggregation_mapping=DefaultAggregationMapping,
temporality_mapping=Temporality}) ->
ServerPid = otel_meter_server_sup:provider_pid(ProviderSup),
- {CallbacksTab, ViewAggregationTab, MetricsTab, Resource} =
+ {CallbacksTab, StreamsTab, MetricsTab, Resource} =
otel_meter_server:add_metric_reader(ServerPid, ReaderId, self(),
DefaultAggregationMapping,
Temporality),
{noreply, State#state{callbacks_tab=CallbacksTab,
- view_aggregation_tab=ViewAggregationTab,
+ streams_tab=StreamsTab,
metrics_tab=MetricsTab,
resource=Resource}}.
@@ -119,6 +143,7 @@ handle_call(_, _From, State) ->
handle_cast(_, State) ->
{noreply, State}.
+%% eqwalizer:fixme get an unbound record error until the fixme for state record is resolved
handle_info(collect, State=#state{exporter=undefined,
export_interval_ms=ExporterIntervalMs,
tref=TRef}) when TRef =/= undefined andalso
@@ -131,12 +156,12 @@ handle_info(collect, State=#state{id=ReaderId,
export_interval_ms=undefined,
tref=undefined,
callbacks_tab=CallbacksTab,
- view_aggregation_tab=ViewAggregationTab,
+ streams_tab=StreamsTab,
metrics_tab=MetricsTab,
resource=Resource
}) ->
%% collect from view aggregations table and then export
- Metrics = collect_(CallbacksTab, ViewAggregationTab, MetricsTab, ReaderId),
+ Metrics = collect_(CallbacksTab, StreamsTab, MetricsTab, ReaderId),
otel_exporter:export_metrics(ExporterModule, Metrics, Resource, Config),
@@ -146,7 +171,7 @@ handle_info(collect, State=#state{id=ReaderId,
export_interval_ms=ExporterIntervalMs,
tref=TRef,
callbacks_tab=CallbacksTab,
- view_aggregation_tab=ViewAggregationTab,
+ streams_tab=StreamsTab,
metrics_tab=MetricsTab,
resource=Resource
}) when TRef =/= undefined andalso
@@ -155,8 +180,7 @@ handle_info(collect, State=#state{id=ReaderId,
NewTRef = erlang:send_after(ExporterIntervalMs, self(), collect),
%% collect from view aggregations table and then export
- Metrics = collect_(CallbacksTab, ViewAggregationTab, MetricsTab, ReaderId),
-
+ Metrics = collect_(CallbacksTab, StreamsTab, MetricsTab, ReaderId),
otel_exporter:export_metrics(ExporterModule, Metrics, Resource, Config),
@@ -175,8 +199,8 @@ code_change(State) ->
%%
-spec collect_(any(), ets:table(), any(), atom()) -> [any()].
-collect_(CallbacksTab, ViewAggregationTab, MetricsTab, ReaderId) ->
- _ = run_callbacks(ReaderId, CallbacksTab, ViewAggregationTab, MetricsTab),
+collect_(CallbacksTab, StreamsTab, MetricsTab, ReaderId) ->
+ _ = run_callbacks(ReaderId, CallbacksTab, StreamsTab, MetricsTab),
%% Need to be able to efficiently get all from VIEW_AGGREGATIONS_TAB that apply to this reader
@@ -187,55 +211,50 @@ collect_(CallbacksTab, ViewAggregationTab, MetricsTab, ReaderId) ->
%% use the information (temporality) from the VIEW_AGGREGATIONS_TAB entry to reset the
%% METRICS_TAB entry value (like setting value back to 0 for DELTA)
- %% ViewAggregationTab is a `bag' so to iterate over every ViewAggregation for
- %% each Instrument we use `first'/`next' and lookup the list of ViewAggregations
+ %% StreamsTab is a `bag' so to iterate over every Stream for
+ %% each Instrument we use `first'/`next' and lookup the list of Streams
%% by the key (Instrument)
- Key = ets:first(ViewAggregationTab),
+ Key = ets:first(StreamsTab),
- %% get the collection start time after running callbacks so any initialized
- %% metrics have a start time before the collection start time.
- CollectionStartTime = opentelemetry:timestamp(),
- collect_(CallbacksTab, ViewAggregationTab, MetricsTab, CollectionStartTime, ReaderId, [], Key).
+ Generation = inc_checkpoint_generation(ReaderId),
+ collect_(CallbacksTab, StreamsTab, MetricsTab, Generation, ReaderId, [], Key).
-run_callbacks(ReaderId, CallbacksTab, ViewAggregationTab, MetricsTab) ->
+run_callbacks(ReaderId, CallbacksTab, StreamsTab, MetricsTab) ->
try ets:lookup_element(CallbacksTab, ReaderId, 2) of
Callbacks ->
- otel_observables:run_callbacks(Callbacks, ReaderId, ViewAggregationTab, MetricsTab)
+ otel_observables:run_callbacks(Callbacks, ReaderId, StreamsTab, MetricsTab)
catch
error:badarg ->
[]
end.
-collect_(_CallbacksTab, _ViewAggregationTab, _MetricsTab, _CollectionStartTime, _ReaderId, MetricsAcc, '$end_of_table') ->
+collect_(_CallbacksTab, _StreamsTab, _MetricsTab, _Generation, _ReaderId, MetricsAcc, '$end_of_table') ->
MetricsAcc;
-collect_(CallbacksTab, ViewAggregationTab, MetricsTab, CollectionStartTime, ReaderId, MetricsAcc, Key) ->
- ViewAggregations = ets:lookup_element(ViewAggregationTab, Key, 2),
- collect_(CallbacksTab, ViewAggregationTab, MetricsTab, CollectionStartTime, ReaderId,
+collect_(CallbacksTab, StreamsTab, MetricsTab, Generation, ReaderId, MetricsAcc, Key) ->
+ Streams = ets:lookup_element(StreamsTab, Key, 2),
+ collect_(CallbacksTab, StreamsTab, MetricsTab, Generation, ReaderId,
checkpoint_metrics(MetricsTab,
- CollectionStartTime,
+ Generation,
ReaderId,
- ViewAggregations) ++ MetricsAcc,
- ets:next(ViewAggregationTab, Key)).
+ Streams) ++ MetricsAcc,
+ ets:next(StreamsTab, Key)).
-checkpoint_metrics(MetricsTab, CollectionStartTime, Id, ViewAggregations) ->
- lists:foldl(fun(#view_aggregation{aggregation_module=otel_aggregation_drop}, Acc) ->
+checkpoint_metrics(MetricsTab, Generation, Id, Streams) ->
+ lists:foldl(fun(#stream{aggregation_module=otel_aggregation_drop}, Acc) ->
Acc;
- (ViewAggregation=#view_aggregation{name=Name,
+ (Stream=#stream{name=Name,
reader=ReaderId,
instrument=Instrument=#instrument{unit=Unit},
aggregation_module=AggregationModule,
description=Description
}, Acc) when Id =:= ReaderId ->
- AggregationModule:checkpoint(MetricsTab,
- ViewAggregation,
- CollectionStartTime),
Data = AggregationModule:collect(MetricsTab,
- ViewAggregation,
- CollectionStartTime),
+ Stream,
+ Generation),
[metric(Instrument, Name, Description, Unit, Data) | Acc];
(_, Acc) ->
Acc
- end, [], ViewAggregations).
+ end, [], Streams).
metric(#instrument{meter=Meter}, Name, Description, Unit, Data) ->
#metric{scope=otel_meter_default:scope(Meter),
diff --git a/apps/opentelemetry_experimental/src/otel_metric_reader_sup.erl b/apps/opentelemetry_experimental/src/otel_metric_reader_sup.erl
index f5490643..4e73d58e 100644
--- a/apps/opentelemetry_experimental/src/otel_metric_reader_sup.erl
+++ b/apps/opentelemetry_experimental/src/otel_metric_reader_sup.erl
@@ -34,15 +34,14 @@ init([ProviderSup, Opts]) ->
SupFlags = #{strategy => one_for_one,
intensity => 5,
period => 10},
- ChildSpecs = [begin
- #{id => ReaderId,
- start => {Module, start_link, [ReaderId, ProviderSup, ReaderConfig]},
- type => worker,
- restart => permanent,
- shutdown => 1000}
- end || #{id := ReaderId,
- module := Module,
- config := ReaderConfig} <- Readers
- ],
-
+ ChildSpecs = lists:map(
+ fun(#{id := ReaderId,module := Module, config := ReaderConfig}) ->
+ #{id => ReaderId,
+ start => {Module, start_link, [ReaderId, ProviderSup, ReaderConfig]},
+ type => worker,
+ restart => permanent,
+ shutdown => 1000}
+ end,
+ Readers
+ ),
{ok, {SupFlags, ChildSpecs}}.
diff --git a/apps/opentelemetry_experimental/src/otel_observables.erl b/apps/opentelemetry_experimental/src/otel_observables.erl
index 74b9d04a..fd8d4aaa 100644
--- a/apps/opentelemetry_experimental/src/otel_observables.erl
+++ b/apps/opentelemetry_experimental/src/otel_observables.erl
@@ -21,42 +21,44 @@
-include_lib("kernel/include/logger.hrl").
-include_lib("opentelemetry_api_experimental/include/otel_metrics.hrl").
--include("otel_metrics.hrl").
-include("otel_view.hrl").
-type callbacks() :: [{otel_instrument:callback(), otel_instrument:callback_args(), otel_instrument:t()}].
%% call each callback and associate the result with the Instruments it observes
-spec run_callbacks(callbacks(), atom(), ets:table(), ets:table()) -> ok.
-run_callbacks(Callbacks, ReaderId, ViewAggregationTab, MetricsTab) ->
+run_callbacks(Callbacks, ReaderId, StreamTab, MetricsTab) ->
lists:foreach(fun({Callback, CallbackArgs, Instruments})
when is_list(Instruments) ->
Results = Callback(CallbackArgs),
handle_instruments_observations(Results,
Instruments,
- ViewAggregationTab,
+ StreamTab,
MetricsTab,
ReaderId);
({Callback, CallbackArgs, Instrument}) ->
Results = Callback(CallbackArgs),
- %% eqwalizer:ignore we know this is [otel_instrument:observation()] but eqwalizer doesn't
+ %% when not a list of instruments it isn't expecting named observation
+ %% results so we use handle_instrument instead of handle_instruments
+ %% but we can't type that correctly so have to use a `fixme'
+ %% eqwalizer:fixme can maybe do better typing to not have to ignore this
handle_instrument_observations(Results,
Instrument,
- ViewAggregationTab,
+ StreamTab,
MetricsTab,
ReaderId)
end, Callbacks).
-%% lookup ViewAggregations for Instrument and aggregate each observation
+%% lookup Streams for Instrument and aggregate each observation
-spec handle_instrument_observations([otel_instrument:observation()], otel_instrument:t(),
ets:table(), ets:table(), atom()) -> ok.
handle_instrument_observations(Results, #instrument{meter=Meter,
name=Name},
- ViewAggregationTab, MetricsTab, ReaderId) ->
- try ets:lookup_element(ViewAggregationTab, {Meter, Name}, 2) of
- ViewAggregations ->
- [handle_observations(MetricsTab, ViewAggregation, Results)
- || #view_aggregation{reader=Id}=ViewAggregation <- ViewAggregations,
+ StreamTab, MetricsTab, ReaderId) ->
+ try ets:lookup_element(StreamTab, {Meter, Name}, 2) of
+ Streams ->
+ [handle_observations(MetricsTab, Stream, Results)
+ || #stream{reader=Id}=Stream <- Streams,
Id =:= ReaderId],
ok
catch
@@ -68,39 +70,39 @@ handle_instrument_observations(Results, #instrument{meter=Meter,
%% handle results for a multi-instrument callback
-spec handle_instruments_observations([otel_instrument:named_observations()], [otel_instrument:t()],
ets:table(), ets:table(), atom()) -> ok.
-handle_instruments_observations([], _Instruments, _ViewAggregationTab, _MetricsTab, _ReaderId) ->
+handle_instruments_observations([], _Instruments, _StreamTab, _MetricsTab, _ReaderId) ->
ok;
handle_instruments_observations([{InstrumentName, Results} | Rest], Instruments,
- ViewAggregationTab, MetricsTab, ReaderId) ->
+ StreamTab, MetricsTab, ReaderId) ->
case lists:keyfind(InstrumentName, #instrument.name, Instruments) of
false ->
?LOG_DEBUG("Unknown Instrument ~p used in metric callback", [InstrumentName]);
Instrument ->
- handle_instrument_observations(Results, Instrument, ViewAggregationTab, MetricsTab, ReaderId)
+ handle_instrument_observations(Results, Instrument, StreamTab, MetricsTab, ReaderId)
end,
- handle_instruments_observations(Rest, Instruments, ViewAggregationTab, MetricsTab, ReaderId);
-handle_instruments_observations([Result | Rest], Instruments, ViewAggregationTab, MetricsTab, ReaderId) ->
+ handle_instruments_observations(Rest, Instruments, StreamTab, MetricsTab, ReaderId);
+handle_instruments_observations([Result | Rest], Instruments, StreamTab, MetricsTab, ReaderId) ->
?LOG_DEBUG("Each multi-instrument callback result must be a tuple of "
"type {atom(), [{number(), map()}]} but got ~p", [Result]),
- handle_instruments_observations(Rest, Instruments, ViewAggregationTab, MetricsTab, ReaderId);
-handle_instruments_observations(Results, _Instruments, _ViewAggregationTab, _MetricsTab, _ReaderId) ->
+ handle_instruments_observations(Rest, Instruments, StreamTab, MetricsTab, ReaderId);
+handle_instruments_observations(Results, _Instruments, _StreamTab, _MetricsTab, _ReaderId) ->
?LOG_DEBUG("Multi-instrument callback result must be a list of type "
"[{atom(), [{number(), map()}]}] but got ~p", [Results]),
ok.
%% update aggregation for each observation
-handle_observations(_MetricsTab, _ViewAggregation, []) ->
+handle_observations(_MetricsTab, _Stream, []) ->
ok;
-handle_observations(MetricsTab, ViewAggregation, [{Number, Attributes} | Rest])
+handle_observations(MetricsTab, Stream, [{Number, Attributes} | Rest])
when is_number(Number),
is_map(Attributes) ->
- _ = otel_aggregation:maybe_init_aggregate(MetricsTab, ViewAggregation, Number, Attributes),
- handle_observations(MetricsTab, ViewAggregation, Rest);
-handle_observations(MetricsTab, ViewAggregation, [Result | Rest]) ->
+ _ = otel_aggregation:maybe_init_aggregate(MetricsTab, Stream, Number, Attributes),
+ handle_observations(MetricsTab, Stream, Rest);
+handle_observations(MetricsTab, Stream, [Result | Rest]) ->
?LOG_DEBUG("Each metric callback result must be of type {number(), map()} but got ~p", [Result]),
- handle_observations(MetricsTab, ViewAggregation, Rest);
-handle_observations(_MetricsTab, _ViewAggregation, Result) ->
+ handle_observations(MetricsTab, Stream, Rest);
+handle_observations(_MetricsTab, _Stream, Result) ->
?LOG_DEBUG("Metric callback return must be a list of type [{number(), map()}] or "
"[{atom(), [{number(), map()}]}] but got", [Result]),
ok.
diff --git a/apps/opentelemetry_experimental/src/otel_otlp_metrics.erl b/apps/opentelemetry_experimental/src/otel_otlp_metrics.erl
index 1b6f1c33..e15f81f1 100644
--- a/apps/opentelemetry_experimental/src/otel_otlp_metrics.erl
+++ b/apps/opentelemetry_experimental/src/otel_otlp_metrics.erl
@@ -108,6 +108,7 @@ to_histogram_data_points(#histogram_datapoint{
max=Max
}) ->
#{attributes => otel_otlp_common:to_attributes(Attributes),
+ %% eqwalizer:ignore start_time in histogram_datapoint has to support matchspec entries
start_time_unix_nano => opentelemetry:timestamp_to_nano(StartTime),
time_unix_nano => opentelemetry:timestamp_to_nano(CollectionStartTime),
count => Count,
diff --git a/apps/opentelemetry_experimental/src/otel_view.erl b/apps/opentelemetry_experimental/src/otel_view.erl
index c2e9e960..c4df76bf 100644
--- a/apps/opentelemetry_experimental/src/otel_view.erl
+++ b/apps/opentelemetry_experimental/src/otel_view.erl
@@ -21,6 +21,7 @@
new/3,
match_instrument_to_views/2]).
+-include_lib("kernel/include/logger.hrl").
-include_lib("opentelemetry_api_experimental/include/otel_metrics.hrl").
-include_lib("opentelemetry_api/include/opentelemetry.hrl").
-include("otel_metrics.hrl").
@@ -50,11 +51,31 @@
-include_lib("opentelemetry_api/include/gradualizer.hrl").
+%% ignore dialyzer warnings in functions using matchspecs or related to those that do
+-dialyzer({nowarn_function, do_new/2}).
+-dialyzer({nowarn_function, criteria_to_instrument_matchspec/1}).
+-dialyzer({nowarn_function, maybe_init_meter/1}).
+-dialyzer({nowarn_function, update_meter_name/2}).
+-dialyzer({nowarn_function, update_meter_version/2}).
+-dialyzer({nowarn_function, update_meter_schema_url/2}).
+
+-spec new(criteria() | undefined, config()) -> {ok, t()} | {error, named_wildcard_view}.
+new(Criteria, Config) ->
+ new(undefined, Criteria, Config).
+
+-spec new(name(), criteria() | undefined, config()) -> {ok, t()} | {error, named_wildcard_view}.
+new(undefined, Criteria, Config) ->
+ {ok, do_new(Criteria, Config)};
+new(Name, #{instrument_name := '*'}, _Config) ->
+ ?LOG_INFO("Wildacrd Views can not have a name, discarding view ~s", [Name]),
+ {error, named_wildcard_view};
+new(Name, Criteria, Config) ->
+ View = do_new(Criteria, Config),
+ {ok, View#view{name=Name}}.
+
%% no name means Instrument name is used
%% must reject wildcard Criteria in this case
--dialyzer({nowarn_function,new/2}).
--spec new(criteria() | undefined, config()) -> t().
-new(Criteria, Config) ->
+do_new(Criteria, Config) ->
CriteriaInstrumentName = view_name_from_criteria(Criteria),
Matchspec = criteria_to_instrument_matchspec(Criteria),
%% no name given so use the name of the instrument in the selection
@@ -66,19 +87,11 @@ new(Criteria, Config) ->
aggregation_module=maps:get(aggregation_module, Config, undefined),
aggregation_options=maps:get(aggregation_options, Config, #{})}.
--dialyzer({nowarn_function,new/3}).
--spec new(name(), criteria() | undefined, config()) -> t().
-new(undefined, Criteria, Config) ->
- new(Criteria, Config);
-new(Name, Criteria, Config) ->
- View = new(Criteria, Config),
- View#view{name=Name}.
-
--dialyzer({nowarn_function,match_instrument_to_views/2}).
--spec match_instrument_to_views(otel_instrument:t(), [t()]) -> [{t() | undefined, #view_aggregation{}}].
+-spec match_instrument_to_views(otel_instrument:t(), [t()]) -> [{t() | undefined, #stream{}}].
match_instrument_to_views(Instrument=#instrument{name=InstrumentName,
meter=Meter,
- description=Description}, Views) ->
+ description=Description,
+ advisory_params=AdvisoryParams}, Views) ->
IsMonotonic = otel_instrument:is_monotonic(Instrument),
Temporality = otel_instrument:temporality(Instrument),
Scope = otel_meter:scope(Meter),
@@ -91,89 +104,95 @@ match_instrument_to_views(Instrument=#instrument{name=InstrumentName,
[] ->
false;
_ ->
- %% `reader' needs to be undefined and is set
- %% for each in `otel_meter_server'
- %% eqwalizer:ignore see above
- {true, {View, #view_aggregation{name=value_or(ViewName,
- InstrumentName),
- scope=Scope,
- instrument=Instrument,
- temporality=Temporality,
- is_monotonic=IsMonotonic,
- attribute_keys=AttributeKeys,
- aggregation_options=AggregationOptions,
- description=value_or(ViewDescription,
- Description)
- }}}
+ AggregationOptions1 = aggragation_options(AggregationOptions, AdvisoryParams),
+ {true, {View, #stream{name=value_or(ViewName,
+ InstrumentName),
+ scope=Scope,
+ instrument=Instrument,
+ temporality=Temporality,
+ is_monotonic=IsMonotonic,
+ attribute_keys=AttributeKeys,
+ aggregation_options=AggregationOptions1,
+ description=value_or(ViewDescription,
+ Description)
+ }}}
end
end, Views) of
[] ->
- [{undefined, #view_aggregation{name=InstrumentName,
- scope=Scope,
- instrument=Instrument,
- temporality=Temporality,
- is_monotonic=IsMonotonic,
- attribute_keys=undefined,
- aggregation_options=#{},
- description=Description}}];
+ AggregationOptions1 = aggragation_options(#{}, AdvisoryParams),
+ [{undefined, #stream{name=InstrumentName,
+ scope=Scope,
+ instrument=Instrument,
+ temporality=Temporality,
+ is_monotonic=IsMonotonic,
+ attribute_keys=undefined,
+ aggregation_options=AggregationOptions1,
+ description=Description}}];
Aggs ->
Aggs
end.
%%
+aggragation_options(#{explicit_bucket_boundaries := _} = AggregationOptions, _AdvisoryParams) ->
+ AggregationOptions;
+aggragation_options(AggregationOptions, #{explicit_bucket_boundaries := Boundaries}) ->
+ maps:put(explicit_bucket_boundaries, Boundaries, AggregationOptions);
+aggragation_options(AggregationOptions, _AdvisoryParams) ->
+ AggregationOptions.
+
value_or(undefined, Other) ->
Other;
value_or(Value, _Other) ->
Value.
--dialyzer({nowarn_function,criteria_to_instrument_matchspec/1}).
--spec criteria_to_instrument_matchspec(map() | undefined) -> ets:compiled_match_spec().
+-spec criteria_to_instrument_matchspec(map() | undefined) -> ets:comp_match_spec().
criteria_to_instrument_matchspec(Criteria) when is_map(Criteria) ->
Instrument =
- maps:fold(fun(instrument_name, InstrumentName, InstrumentAcc) ->
- InstrumentAcc#instrument{name=InstrumentName};
- (instrument_kind, Kind, InstrumentAcc) ->
- InstrumentAcc#instrument{kind=Kind};
- (instrument_unit, Unit, InstrumentAcc) ->
- InstrumentAcc#instrument{unit=Unit};
- (meter_name, MeterName, InstrumentAcc) ->
- Meter = maybe_init_meter(InstrumentAcc),
- Meter1 = update_meter_name(MeterName, Meter),
- InstrumentAcc#instrument{meter=Meter1};
- (meter_version, MeterVersion, InstrumentAcc) ->
- Meter = maybe_init_meter(InstrumentAcc),
- Meter1 = update_meter_version(MeterVersion, Meter),
- InstrumentAcc#instrument{meter=Meter1};
- (meter_schema_url, SchemaUrl, InstrumentAcc) ->
- Meter = maybe_init_meter(InstrumentAcc),
- Meter1 = update_meter_schema_url(SchemaUrl, Meter),
- InstrumentAcc#instrument{meter=Meter1}
- %% eqwalizer:ignore building a matchspec and don't want '_' polluting the type
- end, #instrument{_='_'}, Criteria),
+ maps:fold(fun(instrument_name, '*', InstrumentAcc) ->
+ InstrumentAcc;
+ (instrument_name, InstrumentName, InstrumentAcc) ->
+ InstrumentAcc#instrument{name=InstrumentName};
+ (instrument_kind, Kind, InstrumentAcc) ->
+ InstrumentAcc#instrument{kind=Kind};
+ (instrument_unit, Unit, InstrumentAcc) ->
+ InstrumentAcc#instrument{unit=Unit};
+ (meter_name, MeterName, InstrumentAcc) ->
+ Meter = maybe_init_meter(InstrumentAcc),
+ Meter1 = update_meter_name(MeterName, Meter),
+ InstrumentAcc#instrument{meter=Meter1};
+ (meter_version, MeterVersion, InstrumentAcc) ->
+ Meter = maybe_init_meter(InstrumentAcc),
+ Meter1 = update_meter_version(MeterVersion, Meter),
+ InstrumentAcc#instrument{meter=Meter1};
+ (meter_schema_url, SchemaUrl, InstrumentAcc) ->
+ Meter = maybe_init_meter(InstrumentAcc),
+ Meter1 = update_meter_schema_url(SchemaUrl, Meter),
+ InstrumentAcc#instrument{meter=Meter1}
+ %% eqwalizer:ignore using ignore as an ets matchspec workaround
+ end, #instrument{_='_'}, Criteria),
ets:match_spec_compile([{Instrument, [], [true]}]);
criteria_to_instrument_matchspec(_) ->
- %% eqwalizer:ignore building a matchspec and don't want '_' polluting the type
+ %% eqwalizer:ignore using ignore as an ets matchspec workaround
ets:match_spec_compile([{#instrument{_='_'}, [], [true]}]).
--dialyzer({nowarn_function,maybe_init_meter/1}).
maybe_init_meter(#instrument{meter='_'}) ->
{'_', #meter{instrumentation_scope=#instrumentation_scope{_='_'},
_='_'}}.
--dialyzer({nowarn_function,update_meter_name/2}).
update_meter_name(MeterName, {_, Meter=#meter{instrumentation_scope=Scope}}) ->
{'_', Meter#meter{instrumentation_scope=Scope#instrumentation_scope{name=MeterName}}}.
--dialyzer({nowarn_function,update_meter_version/2}).
update_meter_version(MeterVersion, {_, Meter=#meter{instrumentation_scope=Scope}}) ->
{'_', Meter#meter{instrumentation_scope=Scope#instrumentation_scope{version=MeterVersion}}}.
--dialyzer({nowarn_function,update_meter_schema_url/2}).
update_meter_schema_url(SchemaUrl, {_, Meter=#meter{instrumentation_scope=Scope}}) ->
{'_', Meter#meter{instrumentation_scope=Scope#instrumentation_scope{schema_url=SchemaUrl}}}.
view_name_from_criteria(Criteria) when is_map(Criteria) ->
- maps:get(instrument_name, Criteria, undefined);
+ case maps:get(instrument_name, Criteria, undefined) of
+ '*' -> undefined;
+ Name -> Name
+ end;
view_name_from_criteria(_) ->
undefined.
diff --git a/apps/opentelemetry_experimental/src/otel_view.hrl b/apps/opentelemetry_experimental/src/otel_view.hrl
index 84800f63..8e117bf3 100644
--- a/apps/opentelemetry_experimental/src/otel_view.hrl
+++ b/apps/opentelemetry_experimental/src/otel_view.hrl
@@ -6,12 +6,12 @@
aggregation_module :: module(),
aggregation_options=#{} :: map()}).
--record(view_aggregation,
+-record(stream,
{%% name of the view or instrument if the view has no name
name :: atom(),
scope :: opentelemetry:instrumentation_scope(),
instrument :: otel_instrument:t(),
- reader :: reference(),
+ reader :: reference() | undefined,
attribute_keys :: [opentelemetry:attribute_key()] | undefined,
@@ -22,5 +22,10 @@
is_monotonic :: boolean(),
%% description from the view or the instrument if the view has no name
- description :: unicode:unicode_binary() | undefined
+ description :: unicode:unicode_binary() | undefined,
+
+ %% whether to forget metrics if they aren't recorded to during a
+ %% collection cycle. This is the case for Observables and Delta
+ %% temporality metrics
+ forget :: boolean() | undefined
}).
diff --git a/apps/opentelemetry_experimental/test/otel_metrics_SUITE.erl b/apps/opentelemetry_experimental/test/otel_metrics_SUITE.erl
index ab9d4355..fe0296c7 100644
--- a/apps/opentelemetry_experimental/test/otel_metrics_SUITE.erl
+++ b/apps/opentelemetry_experimental/test/otel_metrics_SUITE.erl
@@ -29,7 +29,7 @@
attributes=MetricAttributes,
start_time=StartTime,
time=Time} <- MetricDatapoints, StartTime =< Time]),
- ?assertMatch([], lists:sort(Datapoints) -- SortedDatapoints, SortedDatapoints)
+ ?assert(is_subset(Datapoints, SortedDatapoints), SortedDatapoints)
after
5000 ->
ct:fail({metric_receive_timeout, ?LINE})
@@ -54,7 +54,7 @@
attributes=MetricAttributes,
start_time=StartTime,
time=Time} <- MetricDatapoints, StartTime =< Time]),
- ?assertMatch([], lists:sort(Datapoints) -- SortedDatapoints, SortedDatapoints)
+ ?assert(is_subset(Datapoints, SortedDatapoints), SortedDatapoints)
after
5000 ->
ct:fail({metric_receive_timeout, ?LINE})
@@ -64,26 +64,28 @@
-define(assertNotReceive(Name, Description, Unit),
(fun() ->
receive
- M=#metric{name=MetricName,
- description=MetricDescription,
- unit=MetricUnit}
+ {otel_metric, M=#metric{name=MetricName,
+ description=MetricDescription,
+ unit=MetricUnit}}
when MetricName =:= Name,
MetricDescription =:= Description,
MetricUnit =:= Unit ->
ct:fail({metric_received, M})
after
- 0 ->
+ 50 ->
ok
end
end)()).
all() ->
- [default_view, provider_test, view_creation_test, counter_add, multiple_readers,
+ [default_view, provider_test, view_creation_test, wildcard_view, counter_add, multiple_readers,
explicit_histograms, delta_explicit_histograms, delta_counter, cumulative_counter,
kill_reader, kill_server, observable_counter, observable_updown_counter, observable_gauge,
multi_instrument_callback, using_macros, float_counter, float_updown_counter, float_histogram,
sync_filtered_attributes, async_filtered_attributes, delta_observable_counter,
- bad_observable_return, default_resource
+ bad_observable_return, default_resource, histogram_aggregation_options, advisory_params,
+ sync_delta_histogram, async_cumulative_page_faults, async_delta_page_faults,
+ async_attribute_removal, sync_cumulative_histogram
].
init_per_suite(Config) ->
@@ -168,6 +170,51 @@ init_per_testcase(delta_explicit_histograms, Config) ->
{ok, _} = application:ensure_all_started(opentelemetry_experimental),
+ Config;
+init_per_testcase(sync_delta_histogram, Config) ->
+ DeltaHistogramTemporality = maps:put(?KIND_HISTOGRAM, ?TEMPORALITY_DELTA, default_temporality_mapping()),
+ application:load(opentelemetry_experimental),
+ ok = application:set_env(opentelemetry_experimental, readers, [#{id => otel_test_reader,
+ module => otel_metric_reader,
+ config => #{exporter => {otel_metric_exporter_pid, self()},
+ default_temporality_mapping =>
+ DeltaHistogramTemporality}}]),
+
+ {ok, _} = application:ensure_all_started(opentelemetry_experimental),
+
+ Config;
+init_per_testcase(sync_cumulative_histogram, Config) ->
+ DeltaHistogramTemporality = maps:put(?KIND_HISTOGRAM, ?TEMPORALITY_CUMULATIVE, default_temporality_mapping()),
+ application:load(opentelemetry_experimental),
+ ok = application:set_env(opentelemetry_experimental, readers, [#{id => otel_test_reader,
+ module => otel_metric_reader,
+ config => #{exporter => {otel_metric_exporter_pid, self()},
+ default_temporality_mapping =>
+ DeltaHistogramTemporality}}]),
+
+ {ok, _} = application:ensure_all_started(opentelemetry_experimental),
+
+ Config;
+init_per_testcase(async_cumulative_page_faults, Config) ->
+ application:load(opentelemetry_experimental),
+ ok = application:set_env(opentelemetry_experimental, readers, [#{id => otel_test_reader,
+ module => otel_metric_reader,
+ config => #{exporter => {otel_metric_exporter_pid, self()}}}]),
+
+ {ok, _} = application:ensure_all_started(opentelemetry_experimental),
+
+ Config;
+init_per_testcase(async_delta_page_faults, Config) ->
+ DeltaCounterTemporality = maps:put(?KIND_OBSERVABLE_COUNTER, ?TEMPORALITY_DELTA, default_temporality_mapping()),
+ application:load(opentelemetry_experimental),
+ ok = application:set_env(opentelemetry_experimental, readers, [#{id => otel_test_reader,
+ module => otel_metric_reader,
+ config => #{exporter => {otel_metric_exporter_pid, self()},
+ default_temporality_mapping =>
+ DeltaCounterTemporality}}]),
+
+ {ok, _} = application:ensure_all_started(opentelemetry_experimental),
+
Config;
init_per_testcase(delta_observable_counter, Config) ->
DeltaObservableCounterTemporality = maps:put(?KIND_OBSERVABLE_COUNTER, ?TEMPORALITY_DELTA, default_temporality_mapping()),
@@ -271,10 +318,18 @@ float_counter(_Config) ->
?assertEqual(ok, ?counter_add(CounterName, 5.5, #{<<"c">> => <<"b">>})),
?assertEqual(ok, ?counter_add(CounterName, 5, #{<<"c">> => <<"b">>})),
+ %% without attributes
+ ?assertEqual(ok, ?counter_add(CounterName, 1.2)),
+ ?assertEqual(ok, otel_counter:add(Counter, 2.1)),
+
+ %% negative values are discarded
+ ?assertEqual(ok, ?counter_add(CounterName, -2, #{<<"c">> => <<"b">>})),
+ ?assertEqual(ok, otel_counter:add(Counter, -2)),
+
otel_meter_server:force_flush(),
?assertSumReceive(f_counter, <<"macro made counter description">>, kb,
- [{20.8, #{<<"c">> => <<"b">>}}]),
+ [{3.3, #{}}, {20.8, #{<<"c">> => <<"b">>}}]),
ok.
@@ -295,10 +350,15 @@ float_updown_counter(_Config) ->
?assertEqual(ok, ?updown_counter_add(CounterName, -5.5, #{<<"c">> => <<"b">>})),
?assertEqual(ok, ?updown_counter_add(CounterName, 5, #{<<"c">> => <<"b">>})),
+ %% without attributes
+ ?assertEqual(ok, ?updown_counter_add(CounterName, 1.2)),
+ ?assertEqual(ok, otel_updown_counter:add(Counter, 2.1)),
+
+
otel_meter_server:force_flush(),
?assertSumReceive(f_counter, <<"macro made updown counter description">>, kb,
- [{10.0, #{<<"c">> => <<"b">>}}]),
+ [{3.3, #{}}, {10.0, #{<<"c">> => <<"b">>}}]),
ok.
@@ -319,6 +379,14 @@ float_histogram(_Config) ->
?assertEqual(ok, otel_histogram:record(Counter, 10.3, #{<<"c">> => <<"b">>})),
?assertEqual(ok, ?histogram_record(CounterName, 5.5, #{<<"c">> => <<"b">>})),
+ %% without attributes
+ ?assertEqual(ok, ?histogram_record(CounterName, 1.2)),
+ ?assertEqual(ok, otel_histogram:record(Counter, 2.1)),
+
+ %% negative values are discarded
+ ?assertEqual(ok, ?histogram_record(CounterName, -2, #{<<"c">> => <<"b">>})),
+ ?assertEqual(ok, otel_histogram:record(Counter, -2)),
+
%% float type accepts integers
?assertEqual(ok, ?histogram_record(CounterName, 5, #{<<"c">> => <<"b">>})),
@@ -334,7 +402,9 @@ float_histogram(_Config) ->
min=Min,
max=Max,
sum=Sum} <- Datapoints],
- ?assertEqual([], [{#{<<"c">> => <<"b">>}, [0,1,1,2,0,0,0,0,0,0,0,0,0,0,0,0], 5, 10.3, 31.1}]
+ ?assertEqual([], [
+ {#{<<"c">> => <<"b">>}, [0,1,1,2,0,0,0,0,0,0,0,0,0,0,0,0], 5, 10.3, 31.1},
+ {#{}, [0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0], 1.2, 2.1, 3.3}]
-- AttributeBuckets, AttributeBuckets)
after
5000 ->
@@ -414,8 +484,7 @@ provider_test(_Config) ->
%% sum agg is default delta temporality so counter will reset
?assertEqual(ok, otel_counter:add(Counter, 7, #{<<"c">> => <<"b">>})),
otel_meter_server:force_flush(),
- ?assertSumReceive(a_counter, <<"counter description">>, kb, [{7, #{<<"c">> => <<"b">>}},
- {0, #{<<"a">> => <<"b">>, <<"d">> => <<"e">>}}]),
+ ?assertSumReceive(a_counter, <<"counter description">>, kb, [{7, #{<<"c">> => <<"b">>}}]),
ok.
@@ -441,17 +510,17 @@ view_creation_test(_Config) ->
?assert(otel_meter_server:add_view(view_a, #{instrument_name => a_counter}, #{aggregation_module => otel_aggregation_sum})),
- View = otel_view:new(#{instrument_name => a_counter}, #{aggregation_module => otel_aggregation_sum}),
+ {ok, View} = otel_view:new(#{instrument_name => a_counter}, #{aggregation_module => otel_aggregation_sum}),
%% view name becomes the instrument name
?assertEqual(a_counter, View#view.name),
Matches = otel_view:match_instrument_to_views(Counter, [View]),
?assertMatch([_], Matches),
- ViewUnitMatch = otel_view:new(#{instrument_name => CounterName, instrument_unit => CounterUnit}, #{aggregation_module => otel_aggregation_sum}),
+ {ok, ViewUnitMatch} = otel_view:new(#{instrument_name => CounterName, instrument_unit => CounterUnit}, #{aggregation_module => otel_aggregation_sum}),
?assertMatch([{#view{}, _}], otel_view:match_instrument_to_views(Counter, [ViewUnitMatch])),
- ViewUnitNotMatch = otel_view:new(#{instrument_name => CounterName, instrument_unit => not_matching}, #{aggregation_module => otel_aggregation_sum}),
+ {ok, ViewUnitNotMatch} = otel_view:new(#{instrument_name => CounterName, instrument_unit => not_matching}, #{aggregation_module => otel_aggregation_sum}),
?assertMatch([{undefined, _}], otel_view:match_instrument_to_views(Counter, [ViewUnitNotMatch])),
%% views require a unique name
@@ -467,6 +536,36 @@ view_creation_test(_Config) ->
ok.
+wildcard_view(_Config) ->
+ Meter = opentelemetry_experimental:get_meter(),
+
+ ViewCriteria = #{instrument_name => '*'},
+ ViewConfig = #{aggregation_module => otel_aggregation_drop},
+
+ ?assert(otel_meter_server:add_view(ViewCriteria, ViewConfig)),
+
+ CounterName = a_counter,
+ CounterDesc = <<"counter description">>,
+ CounterUnit = kb,
+
+ Counter = otel_counter:create(Meter, CounterName,
+ #{description => CounterDesc,
+ unit => CounterUnit}),
+
+ ?assertEqual(ok, otel_counter:add(Counter, 1, #{})),
+
+ otel_meter_server:force_flush(),
+
+ ?assertNotReceive(CounterName, CounterDesc, CounterUnit),
+
+ {ok, View} = otel_view:new(ViewCriteria, ViewConfig),
+ ?assertMatch([{#view{}, _}], otel_view:match_instrument_to_views(Counter, [View])),
+
+ %% not possible to create wildcard views with a name
+ {error, named_wildcard_view} = otel_view:new(view_name, ViewCriteria, ViewConfig),
+
+ ok.
+
counter_add(_Config) ->
Meter = opentelemetry_experimental:get_meter(),
@@ -624,12 +723,7 @@ delta_explicit_histograms(_Config) ->
min=Min,
max=Max,
sum=Sum} <- Datapoints1],
- ?assertEqual([], [{#{<<"c">> => <<"b">>}, [0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0], 88, 88, 88},
- {#{<<"a">> => <<"b">>,<<"d">> => <<"e">>},
- [0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0],
- infinity,-9.223372036854776e18,0}
- ]
- -- AttributeBuckets1, AttributeBuckets1)
+ ?assertEqual([], [{#{<<"c">> => <<"b">>}, [0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0], 88, 88, 88}] -- AttributeBuckets1, AttributeBuckets1)
after
5000 ->
ct:fail(histogram_receive_timeout)
@@ -833,7 +927,7 @@ kill_server(_Config) ->
otel_meter_server:force_flush(),
%% at this time a crashed meter server will mean losing the recorded metrics up to that point
- ?assertNotReceive(a_counter, <<"counter description">>, kb),
+ ?assertSumReceive(a_counter, <<"counter description">>, kb, []),
?assertSumReceive(z_counter, <<"counter description">>, kb, [{9, #{<<"c">> => <<"b">>}}]),
ok.
@@ -1138,3 +1232,596 @@ bad_observable_return(_Config) ->
?assertSumReceive(CounterName2, <<"observable counter 2 description">>, kb, [{8, #{}}]),
ok.
+
+advisory_params(_Config) ->
+ DefaultMeter = otel_meter_default,
+
+ Meter = opentelemetry_experimental:get_meter(),
+ ?assertMatch({DefaultMeter, _}, Meter),
+
+ % explicit_bucket_boundaries allowed only for histograms
+ Counter = otel_counter:create(Meter, invalid_1,
+ #{advisory_params => #{explicit_bucket_boundaries => [1, 2, 3]}}),
+ ?assertEqual(Counter#instrument.advisory_params, #{}),
+
+ % advisory parameters different from explicit_bucket_boundaries are not allowed
+ Counter1 = otel_counter:create(Meter, invalid_2, #{advisory_params => #{invalid => invalid}}),
+ ?assertEqual(Counter1#instrument.advisory_params, #{}),
+
+ % explicit_bucket_boundaries should be an ordered list of numbers
+ Histo1 = otel_histogram:create(Meter, invalid_3,
+ #{advisory_params => #{explicit_bucket_boundaries => invalid}}),
+ ?assertEqual(Histo1#instrument.advisory_params, #{}),
+
+ Histo2 = otel_histogram:create(Meter, invalid_4,
+ #{advisory_params => #{explicit_bucket_boundaries => [2,1,4]}}),
+ ?assertEqual(Histo2#instrument.advisory_params, #{}),
+
+ % when valid use the explicit_bucket_boundaries from advisory_params if not set in a view
+ Histogram = otel_histogram:create(Meter, a_histogram,
+ #{advisory_params => #{explicit_bucket_boundaries => [10, 20, 30]}}),
+ ?assertEqual(Histogram#instrument.advisory_params, #{explicit_bucket_boundaries => [10, 20, 30]}),
+
+ ?assertEqual(ok, otel_histogram:record(Histogram, 15, #{<<"a">> => <<"1">>})),
+ ?assertEqual(ok, otel_histogram:record(Histogram, 50, #{<<"a">> => <<"1">>})),
+ ?assertEqual(ok, otel_histogram:record(Histogram, 26, #{<<"a">> => <<"2">>})),
+
+ otel_meter_server:force_flush(),
+
+ receive
+ {otel_metric, #metric{name=a_histogram,
+ data=#histogram{datapoints=Datapoints}}} ->
+ AttributeBuckets =
+ lists:sort([{Attributes, Buckets, Min, Max, Sum} || #histogram_datapoint{bucket_counts=Buckets,
+ attributes=Attributes,
+ min=Min,
+ max=Max,
+ sum=Sum} <- Datapoints]),
+ ?assertEqual([], [{#{<<"a">> => <<"1">>}, [0,1,0,1], 15, 50, 65},
+ {#{<<"a">> => <<"2">>}, [0,0,1,0], 26, 26, 26}]
+ -- AttributeBuckets, AttributeBuckets)
+ after
+ 5000 ->
+ ct:fail(histogram_receive_timeout)
+ end,
+
+ % explicit_bucket_boundaries from view have precedence
+ ?assert(otel_meter_server:add_view(view, #{instrument_name => b_histogram}, #{
+ aggregation_module => otel_aggregation_histogram_explicit,
+ aggregation_options => #{explicit_bucket_boundaries => [10, 100]}})),
+
+ HistogramB = otel_histogram:create(Meter, b_histogram,
+ #{advisory_params => #{explicit_bucket_boundaries => [10, 20, 30]}}),
+ ?assertEqual(HistogramB#instrument.advisory_params, #{explicit_bucket_boundaries => [10, 20, 30]}),
+
+ ?assertEqual(ok, otel_histogram:record(HistogramB, 15, #{<<"a">> => <<"1">>})),
+ ?assertEqual(ok, otel_histogram:record(HistogramB, 50, #{<<"a">> => <<"1">>})),
+ ?assertEqual(ok, otel_histogram:record(HistogramB, 26, #{<<"a">> => <<"2">>})),
+
+ otel_meter_server:force_flush(),
+
+ receive
+ {otel_metric, #metric{name=view,
+ data=#histogram{datapoints=DatapointsB}}} ->
+ AttributeBucketsB =
+ lists:sort([{Attributes, Buckets, Min, Max, Sum} || #histogram_datapoint{bucket_counts=Buckets,
+ attributes=Attributes,
+ min=Min,
+ max=Max,
+ sum=Sum} <- DatapointsB]),
+ ?assertEqual([], [{#{<<"a">> => <<"1">>}, [0,2,0], 15, 50, 65},
+ {#{<<"a">> => <<"2">>}, [0,1,0], 26, 26, 26}]
+ -- AttributeBucketsB, AttributeBucketsB)
+ after
+ 1000 ->
+ ct:fail(histogram_receive_timeout)
+ end.
+
+histogram_aggregation_options(_Config) ->
+ DefaultMeter = otel_meter_default,
+
+ Meter = opentelemetry_experimental:get_meter(),
+ ?assertMatch({DefaultMeter, _}, Meter),
+
+ ?assert(otel_meter_server:add_view(view, #{instrument_name => histogram}, #{
+ aggregation_module => otel_aggregation_histogram_explicit,
+ aggregation_options => #{explicit_bucket_boundaries => [10, 100]}})),
+
+ Histogram = otel_histogram:create(Meter, histogram, #{}),
+
+ ?assertEqual(ok, otel_histogram:record(Histogram, 15, #{<<"a">> => <<"1">>})),
+ ?assertEqual(ok, otel_histogram:record(Histogram, 50, #{<<"a">> => <<"1">>})),
+ ?assertEqual(ok, otel_histogram:record(Histogram, 26, #{<<"a">> => <<"2">>})),
+
+ otel_meter_server:force_flush(),
+
+ receive
+ {otel_metric, #metric{name=view,
+ data=#histogram{datapoints=DatapointsB}}} ->
+ AttributeBucketsB =
+ lists:sort([{Attributes, Buckets, Min, Max, Sum} || #histogram_datapoint{bucket_counts=Buckets,
+ attributes=Attributes,
+ min=Min,
+ max=Max,
+ sum=Sum} <- DatapointsB]),
+ ?assertEqual([], [{#{<<"a">> => <<"1">>}, [0,2,0], 15, 50, 65},
+ {#{<<"a">> => <<"2">>}, [0,1,0], 26, 26, 26}]
+ -- AttributeBucketsB, AttributeBucketsB)
+ after
+ 1000 ->
+ ct:fail(histogram_receive_timeout)
+ end.
+
+sync_delta_histogram(_Config) ->
+ DefaultMeter = otel_meter_default,
+
+ Meter = opentelemetry_experimental:get_meter(),
+ ?assertMatch({DefaultMeter, _}, Meter),
+
+ ?assert(otel_meter_server:add_view(http_req_view, #{instrument_name => http_requests}, #{
+ aggregation_module => otel_aggregation_histogram_explicit,
+ aggregation_options => #{explicit_bucket_boundaries => []}})),
+
+ HttpReqHistogram = otel_histogram:create(Meter, http_requests, #{}),
+
+ ?assertEqual(ok, otel_histogram:record(HttpReqHistogram, 50, #{verb => <<"GET">>,
+ status => 200})),
+ ?assertEqual(ok, otel_histogram:record(HttpReqHistogram, 100, #{verb => <<"GET">>,
+ status => 200})),
+ ?assertEqual(ok, otel_histogram:record(HttpReqHistogram, 1, #{verb => <<"GET">>,
+ status => 500})),
+
+
+ otel_meter_server:force_flush(),
+
+ receive
+ {otel_metric, #metric{name=http_req_view,
+ data=#histogram{datapoints=Datapoints}}} ->
+ AttributeBuckets =
+ lists:sort([{Attributes, Buckets, Min, Max, Sum} || #histogram_datapoint{bucket_counts=Buckets,
+ attributes=Attributes,
+ min=Min,
+ max=Max,
+ sum=Sum} <- Datapoints]),
+ ?assertEqual([], [{#{status => 200,verb => <<"GET">>},[2],50,100,150},
+ {#{status => 500,verb => <<"GET">>},[1],1,1,1}]
+ -- AttributeBuckets, AttributeBuckets)
+ after
+ 1000 ->
+ ct:fail(histogram_receive_timeout)
+ end,
+
+ otel_meter_server:force_flush(),
+
+ %% TODO: check for nothing
+
+ receive
+ {otel_metric, #metric{name=http_req_view,
+ data=#histogram{datapoints=[]}}} ->
+ ok
+ end,
+
+ ?assertEqual(ok, otel_histogram:record(HttpReqHistogram, 5, #{verb => <<"GET">>,
+ status => 500})),
+ ?assertEqual(ok, otel_histogram:record(HttpReqHistogram, 2, #{verb => <<"GET">>,
+ status => 500})),
+
+ otel_meter_server:force_flush(),
+
+ receive
+ {otel_metric, #metric{name=http_req_view,
+ data=#histogram{datapoints=Datapoints1}}} ->
+ AttributeBuckets1 =
+ lists:sort([{Attributes, Buckets, Min, Max, Sum} || #histogram_datapoint{bucket_counts=Buckets,
+ attributes=Attributes,
+ min=Min,
+ max=Max,
+ sum=Sum} <- Datapoints1]),
+ ?assertEqual([], [{#{status => 500,verb => <<"GET">>},[2],2,5,7}]
+ -- AttributeBuckets1, AttributeBuckets1)
+ after
+ 1000 ->
+ ct:fail(histogram_receive_timeout)
+ end,
+
+ ?assertEqual(ok, otel_histogram:record(HttpReqHistogram, 100, #{verb => <<"GET">>,
+ status => 200})),
+
+ otel_meter_server:force_flush(),
+
+ receive
+ {otel_metric, #metric{name=http_req_view,
+ data=#histogram{datapoints=Datapoints2}}} ->
+ AttributeBuckets2 =
+ lists:sort([{Attributes, Buckets, Min, Max, Sum} || #histogram_datapoint{bucket_counts=Buckets,
+ attributes=Attributes,
+ min=Min,
+ max=Max,
+ sum=Sum} <- Datapoints2]),
+ ?assertEqual([], [{#{status => 200,verb => <<"GET">>},[1],100,100,100}]
+ -- AttributeBuckets2, AttributeBuckets2)
+ after
+ 1000 ->
+ ct:fail(histogram_receive_timeout)
+ end,
+
+ ?assertEqual(ok, otel_histogram:record(HttpReqHistogram, 200, #{verb => <<"GET">>,
+ status => 200})),
+ ?assertEqual(ok, otel_histogram:record(HttpReqHistogram, 30, #{verb => <<"GET">>,
+ status => 200})),
+ ?assertEqual(ok, otel_histogram:record(HttpReqHistogram, 50, #{verb => <<"GET">>,
+ status => 200})),
+ otel_meter_server:force_flush(),
+
+ receive
+ {otel_metric, #metric{name=http_req_view,
+ data=#histogram{datapoints=Datapoints3}}} ->
+ AttributeBuckets3 =
+ lists:sort([{Attributes, Buckets, Min, Max, Sum} || #histogram_datapoint{bucket_counts=Buckets,
+ attributes=Attributes,
+ min=Min,
+ max=Max,
+ sum=Sum} <- Datapoints3]),
+ ?assertEqual([], [{#{status => 200,verb => <<"GET">>},[3],30,200,280}]
+ -- AttributeBuckets3, AttributeBuckets3)
+ after
+ 1000 ->
+ ct:fail(histogram_receive_timeout)
+ end,
+ ok.
+
+sync_cumulative_histogram(_Config) ->
+ DefaultMeter = otel_meter_default,
+
+ Meter = opentelemetry_experimental:get_meter(),
+ ?assertMatch({DefaultMeter, _}, Meter),
+
+ ?assert(otel_meter_server:add_view(http_req_view, #{instrument_name => http_requests}, #{
+ aggregation_module => otel_aggregation_histogram_explicit,
+ aggregation_options => #{explicit_bucket_boundaries => []}})),
+
+ HttpReqHistogram = otel_histogram:create(Meter, http_requests, #{}),
+
+ ?assertEqual(ok, otel_histogram:record(HttpReqHistogram, 50, #{verb => <<"GET">>,
+ status => 200})),
+ ?assertEqual(ok, otel_histogram:record(HttpReqHistogram, 100, #{verb => <<"GET">>,
+ status => 200})),
+ ?assertEqual(ok, otel_histogram:record(HttpReqHistogram, 1, #{verb => <<"GET">>,
+ status => 500})),
+
+
+ otel_meter_server:force_flush(),
+
+ receive
+ {otel_metric, #metric{name=http_req_view,
+ data=#histogram{datapoints=Datapoints}}} ->
+ AttributeBuckets =
+ lists:sort([{Attributes, Buckets, Min, Max, Sum} || #histogram_datapoint{bucket_counts=Buckets,
+ attributes=Attributes,
+ min=Min,
+ max=Max,
+ sum=Sum} <- Datapoints]),
+ ?assertEqual([], [{#{status => 200,verb => <<"GET">>},[2],50,100,150},
+ {#{status => 500,verb => <<"GET">>},[1],1,1,1}]
+ -- AttributeBuckets, AttributeBuckets)
+ after
+ 1000 ->
+ ct:fail(histogram_receive_timeout)
+ end,
+
+ otel_meter_server:force_flush(),
+
+ receive
+ {otel_metric, #metric{name=http_req_view,
+ data=#histogram{datapoints=Datapoints0}}} ->
+ AttributeBuckets0 =
+ lists:sort([{Attributes, Buckets, Min, Max, Sum} || #histogram_datapoint{bucket_counts=Buckets,
+ attributes=Attributes,
+ min=Min,
+ max=Max,
+ sum=Sum} <- Datapoints0]),
+ ?assertEqual([], [{#{status => 200,verb => <<"GET">>},[2],50,100,150},
+ {#{status => 500,verb => <<"GET">>},[1],1,1,1}]
+ -- AttributeBuckets0, AttributeBuckets0)
+ after
+ 1000 ->
+ ct:fail(histogram_receive_timeout)
+ end,
+
+ ?assertEqual(ok, otel_histogram:record(HttpReqHistogram, 5, #{verb => <<"GET">>,
+ status => 500})),
+ ?assertEqual(ok, otel_histogram:record(HttpReqHistogram, 2, #{verb => <<"GET">>,
+ status => 500})),
+
+ otel_meter_server:force_flush(),
+
+ receive
+ {otel_metric, #metric{name=http_req_view,
+ data=#histogram{datapoints=Datapoints1}}} ->
+ AttributeBuckets1 =
+ lists:sort([{Attributes, Buckets, Min, Max, Sum} || #histogram_datapoint{bucket_counts=Buckets,
+ attributes=Attributes,
+ min=Min,
+ max=Max,
+ sum=Sum} <- Datapoints1]),
+ ?assertEqual([], [{#{status => 200,verb => <<"GET">>},[2],50,100,150},
+ {#{status => 500,verb => <<"GET">>},[3],1,5,8}]
+ -- AttributeBuckets1, AttributeBuckets1)
+ after
+ 1000 ->
+ ct:fail(histogram_receive_timeout)
+ end,
+
+ ?assertEqual(ok, otel_histogram:record(HttpReqHistogram, 100, #{verb => <<"GET">>,
+ status => 200})),
+
+ otel_meter_server:force_flush(),
+
+ receive
+ {otel_metric, #metric{name=http_req_view,
+ data=#histogram{datapoints=Datapoints2}}} ->
+ AttributeBuckets2 =
+ lists:sort([{Attributes, Buckets, Min, Max, Sum} || #histogram_datapoint{bucket_counts=Buckets,
+ attributes=Attributes,
+ min=Min,
+ max=Max,
+ sum=Sum} <- Datapoints2]),
+ ?assertEqual([], [{#{status => 200,verb => <<"GET">>},[3],50,100,250},
+ {#{status => 500,verb => <<"GET">>},[3],1,5,8}]
+ -- AttributeBuckets2, AttributeBuckets2)
+ after
+ 1000 ->
+ ct:fail(histogram_receive_timeout)
+ end,
+
+ ?assertEqual(ok, otel_histogram:record(HttpReqHistogram, 100, #{verb => <<"GET">>,
+ status => 200})),
+ ?assertEqual(ok, otel_histogram:record(HttpReqHistogram, 30, #{verb => <<"GET">>,
+ status => 200})),
+ ?assertEqual(ok, otel_histogram:record(HttpReqHistogram, 50, #{verb => <<"GET">>,
+ status => 200})),
+ otel_meter_server:force_flush(),
+
+ receive
+ {otel_metric, #metric{name=http_req_view,
+ data=#histogram{datapoints=Datapoints3}}} ->
+ AttributeBuckets3 =
+ lists:sort([{Attributes, Buckets, Min, Max, Sum} || #histogram_datapoint{bucket_counts=Buckets,
+ attributes=Attributes,
+ min=Min,
+ max=Max,
+ sum=Sum} <- Datapoints3]),
+ ?assertEqual([], [{#{status => 200,verb => <<"GET">>},[6],30,100,430},
+ {#{status => 500,verb => <<"GET">>},[3],1,5,8}]
+ -- AttributeBuckets3, AttributeBuckets3)
+ after
+ 1000 ->
+ ct:fail(histogram_receive_timeout)
+ end,
+ ok.
+
+async_cumulative_page_faults(_Config) ->
+ DefaultMeter = otel_meter_default,
+
+ Meter = opentelemetry_experimental:get_meter(),
+ ?assertMatch({DefaultMeter, _}, Meter),
+
+ CounterName = page_faults,
+ CounterDesc = <<"number of page faults">>,
+ CounterUnit = 1,
+
+ ?assert(otel_meter_server:add_view(#{instrument_name => CounterName},
+ #{aggregation_module => otel_aggregation_sum})),
+
+ %% use an atomic to change the returned value of the observable callback on each call
+ IntervalCounter = atomics:new(1, []),
+ Pid1001 = #{pid => 1001},
+ Pid1002 = #{pid => 1002},
+ Pid1003 = #{pid => 1003},
+
+ %% tuple of the measurements to return from the observable callback for each time interval
+ %% and the corresponding expected metrics to get from the exporter.
+ MeasurementsAndExpected = {{[{50, Pid1001}, {30, Pid1002}],
+ [{50, Pid1001}, {30, Pid1002}]},
+ {[{53, Pid1001}, {38, Pid1002}],
+ [{53, Pid1001}, {38, Pid1002}]},
+ {[{56, Pid1001}, {42, Pid1002}],
+ [{56, Pid1001}, {42, Pid1002}]},
+ {[{60, Pid1001}, {47, Pid1002}],
+ [{60, Pid1001}, {47, Pid1002}]},
+ {[{53, Pid1002}, {5, Pid1003}],
+ [{53, Pid1002}, {5, Pid1003}]},
+ {[{10, Pid1001}, {57, Pid1002}, {8, Pid1003}],
+ [{10, Pid1001}, {57, Pid1002}, {8, Pid1003}]}},
+
+ Counter = otel_meter:create_observable_counter(Meter, CounterName,
+ fun(_Args) ->
+ Interval = atomics:add_get(IntervalCounter, 1, 1),
+ element(1, element(Interval, MeasurementsAndExpected))
+ end,
+ [],
+ #{description => CounterDesc,
+ unit => CounterUnit}),
+
+ ?assertMatch(#instrument{meter = {DefaultMeter,_},
+ module = DefaultMeter,
+ name = CounterName,
+ description = CounterDesc,
+ kind = observable_counter,
+ unit = CounterUnit,
+ callback=_}, Counter),
+
+ lists:foreach(fun({_, Expected}) ->
+ otel_meter_server:force_flush(),
+
+ %% verify the delta metrics
+ check_observer_results(CounterName, Expected)
+ end, tuple_to_list(MeasurementsAndExpected)),
+
+ ok.
+
+async_delta_page_faults(_Config) ->
+ DefaultMeter = otel_meter_default,
+
+ Meter = opentelemetry_experimental:get_meter(),
+ ?assertMatch({DefaultMeter, _}, Meter),
+
+ CounterName = page_faults,
+ CounterDesc = <<"number of page faults">>,
+ CounterUnit = 1,
+
+ ?assert(otel_meter_server:add_view(#{instrument_name => CounterName},
+ #{aggregation_module => otel_aggregation_sum})),
+
+ %% use an atomic to change the returned value of the observable callback on each call
+ IntervalCounter = atomics:new(1, []),
+ Pid1001 = #{pid => 1001},
+ Pid1002 = #{pid => 1002},
+ Pid1003 = #{pid => 1003},
+
+ %% tuple of the measurements to return from the observable callback for each time interval
+ %% and the corresponding expected metrics to get from the exporter.
+ MeasurementsAndExpected = {{[{50, Pid1001}, {30, Pid1002}],
+ [{50, Pid1001}, {30, Pid1002}]},
+ {[{53, Pid1001}, {38, Pid1002}],
+ [{3, Pid1001}, {8, Pid1002}]},
+ {[{56, Pid1001}, {42, Pid1002}],
+ [{3, Pid1001}, {4, Pid1002}]},
+ {[{60, Pid1001}, {47, Pid1002}],
+ [{4, Pid1001}, {5, Pid1002}]},
+ {[{53, Pid1002}, {5, Pid1003}],
+ [{6, Pid1002}, {5, Pid1003}]},
+ {[{10, Pid1001}, {57, Pid1002}, {8, Pid1003}],
+ [{10, Pid1001}, {4, Pid1002}, {3, Pid1003}]}},
+
+ Counter = otel_meter:create_observable_counter(Meter, CounterName,
+ fun(_Args) ->
+ Interval = atomics:add_get(IntervalCounter, 1, 1),
+ element(1, element(Interval, MeasurementsAndExpected))
+ end,
+ [],
+ #{description => CounterDesc,
+ unit => CounterUnit}),
+
+ ?assertMatch(#instrument{meter = {DefaultMeter,_},
+ module = DefaultMeter,
+ name = CounterName,
+ description = CounterDesc,
+ kind = observable_counter,
+ unit = CounterUnit,
+ callback=_}, Counter),
+
+ lists:foldl(fun({_, Expected}, {LastPid1001StartTime, LastPid1002StartTime}) ->
+ otel_meter_server:force_flush(),
+
+ %% verify the delta metrics
+ Results = check_observer_results(CounterName, Expected),
+
+ %% check that the start times change on each collection
+ Pid1001StartTime1 =
+ case lists:keyfind(#{pid => 1001}, 2, Results) of
+ false ->
+ false;
+ {_, _, Pid1001StartTime, _} ->
+ ?assertNotEqual(Pid1001StartTime, LastPid1001StartTime),
+ Pid1001StartTime
+ end,
+
+ Pid1002StartTime1 =
+ case lists:keyfind(#{pid => 1001}, 2, Results) of
+ false ->
+ false;
+ {_, _, Pid1002StartTime, _} ->
+ ?assertNotEqual(Pid1002StartTime, LastPid1002StartTime),
+ Pid1002StartTime
+ end,
+
+ {Pid1001StartTime1, Pid1002StartTime1}
+ end, {0, 0}, tuple_to_list(MeasurementsAndExpected)),
+
+ ok.
+
+async_attribute_removal(_Config) ->
+ DefaultMeter = otel_meter_default,
+
+ Meter = opentelemetry_experimental:get_meter(),
+ ?assertMatch({DefaultMeter, _}, Meter),
+
+ CounterName = page_faults,
+ CounterDesc = <<"number of page faults">>,
+ CounterUnit = 1,
+
+ ?assert(otel_meter_server:add_view(#{instrument_name => CounterName},
+ #{aggregation_module => otel_aggregation_sum,
+ attribute_keys => []})),
+
+ %% use an atomic to change the returned value of the observable callback on each call
+ IntervalCounter = atomics:new(1, []),
+ Pid1001 = #{pid => 1001},
+ Pid1002 = #{pid => 1002},
+ Pid1003 = #{pid => 1003},
+
+ %% tuple of the measurements to return from the observable callback for each time interval
+ %% and the corresponding expected metrics to get from the exporter.
+ MeasurementsAndExpected = {{[{50, Pid1001}, {30, Pid1002}],
+ [{80, #{}}]},
+ {[{53, Pid1001}, {38, Pid1002}],
+ [{91, #{}}]},
+ {[{56, Pid1001}, {42, Pid1002}],
+ [{98, #{}}]},
+ {[{60, Pid1001}, {47, Pid1002}],
+ [{107, #{}}]},
+ {[{53, Pid1002}, {5, Pid1003}],
+ [{58, #{}}]},
+ {[{10, Pid1001}, {57, Pid1002}, {8, Pid1003}],
+ [{75, #{}}]}},
+
+ Counter = otel_meter:create_observable_counter(Meter, CounterName,
+ fun(_Args) ->
+ Interval = atomics:add_get(IntervalCounter, 1, 1),
+ element(1, element(Interval, MeasurementsAndExpected))
+ end,
+ [],
+ #{description => CounterDesc,
+ unit => CounterUnit}),
+
+ ?assertMatch(#instrument{meter = {DefaultMeter,_},
+ module = DefaultMeter,
+ name = CounterName,
+ description = CounterDesc,
+ kind = observable_counter,
+ unit = CounterUnit,
+ callback=_}, Counter),
+
+ lists:foreach(fun({_, Expected}) ->
+ otel_meter_server:force_flush(),
+ check_observer_results(CounterName, Expected)
+ end, tuple_to_list(MeasurementsAndExpected)),
+
+ ok.
+%%
+
+check_observer_results(MetricName, Expected) ->
+ receive
+ {otel_metric, #metric{name=Name,
+ data=#sum{datapoints=MetricDatapoints}}}
+ when MetricName =:= Name ->
+ Datapoints =
+ [{MetricValue, MetricAttributes, StartTime, Time} ||
+ #datapoint{value=MetricValue,
+ attributes=MetricAttributes,
+ start_time=StartTime,
+ time=Time
+ } <- MetricDatapoints, StartTime =< Time
+ ],
+
+ DatapointsWithoutTime = [{V, A} || {V, A, _, _} <- Datapoints],
+ ?assert(is_subset(Expected, DatapointsWithoutTime), {Expected, MetricDatapoints}),
+ Datapoints
+ after
+ 5000 ->
+ ct:fail({metric_receive_timeout, ?LINE})
+ end.
+
+is_subset(List1, List2) ->
+ sets:is_subset(sets:from_list(List1), sets:from_list(List2)).
diff --git a/apps/opentelemetry_exporter/README.md b/apps/opentelemetry_exporter/README.md
index ea831b5b..d75320e6 100644
--- a/apps/opentelemetry_exporter/README.md
+++ b/apps/opentelemetry_exporter/README.md
@@ -156,24 +156,28 @@ The second element of the configuration tuple is a configuration map. It can con
- `compression` - an atom. Setting it to `gzip` enables gzip compression.
- `ssl_options` - a list of SSL options. See Erlang's [SSL docs](https://www.erlang.org/doc/man/ssl.html#TLS/DTLS%20OPTION%20DESCRIPTIONS%20-%20CLIENT) for what options are available.
-## Contributing
+### Upgrading OpenTelemetry Protos
-This project uses a submodule during development, it is not needed if the application is being used as a dependency, so be sure to clone with the option `recurse-submodules`:
+The protos are in a separate repository,
+[opentelemetry-proto](https://github.com/open-telemetry/opentelemetry-proto/),
+and used as a submodule in this repo. To update the Erlang protobuf modules and
+GRPC client first update the submodule and then use the [rebar3 grpcbox
+plugin](https://github.com/tsloughter/grpcbox_plugin/) to generate the client:
```shell
-$ git clone --recurse-submodules https://github.com/opentelemetry-beam/opentelemetry_exporter
-```
+$ pushd apps/opentelemetry_exporter/opentelemetry-proto
+$ git fetch origin
+$ git checkout
+$ popd
-### Upgrading OpenTelemetry Protos
+# bug in grpcbox plugin means we need to delete _pb files first to regenerate them
+$ rm./apps/opentelemetry_exporter/src/opentelemetry_exporter_trace_service_pb.erl ./apps/opentelemetry_exporter/src/opentelemetry_exporter_metrics_service_pb.erl ./apps/opentelemetry_exporter/src/opentelemetry_exporter_logs_service_pb.erl
-The protos are in a separate repository, [opentelemetry-proto](https://github.com/open-telemetry/opentelemetry-proto/), and used as a submodule in this repo. To update the Erlang protobuf modules and GRPC client first update the submodule and then use the [rebar3 grpcbox plugin](https://github.com/tsloughter/grpcbox_plugin/) to generate the client:
-
-```shell
-$ git submodule update --remote opentelemetry-proto
$ rebar3 grpc gen -t client
-===> Writing src/trace_service_pb.erl
-===> Writing src/opentelemetry_proto_collector_trace_v_1_trace_service_client.erl (forcibly overwriting)
-$ mv src/opentelemetry_proto_collector_trace_v_1_trace_service_client.erl src/opentelemetry_trace_service.erl
+...
+$ mv apps/opentelemetry_exporter/src/opentelemetry_proto_collector_trace_v_1_trace_service_client.erl apps/opentelemetry_exporter/src/opentelemetry_trace_service.erl
+$ mv apps/opentelemetry_exporter/src/opentelemetry_proto_collector_logs_v_1_logs_service_client.erl apps/opentelemetry_exporter/src/opentelemetry_logs_service.erl
+$ mv apps/opentelemetry_exporter/src/opentelemetry_proto_collector_metrics_v_1_metrics_service_client.erl apps/opentelemetry_exporter/src/opentelemetry_metrics_service.erl
```
-Then open `src/opentelemetry_trace_service.erl` and fix the module name.
+Then open each moved module and fix the module name.
diff --git a/apps/opentelemetry_exporter/src/opentelemetry_exporter.erl b/apps/opentelemetry_exporter/src/opentelemetry_exporter.erl
index e1412349..18494596 100644
--- a/apps/opentelemetry_exporter/src/opentelemetry_exporter.erl
+++ b/apps/opentelemetry_exporter/src/opentelemetry_exporter.erl
@@ -438,9 +438,20 @@ headers_to_grpc_metadata(Headers) ->
%% make all headers into list strings
headers(List) when is_list(List) ->
- [{unicode:characters_to_list(X), unicode:characters_to_list(Y)} || {X, Y} <- List];
+ Headers = [{unicode:characters_to_list(X), unicode:characters_to_list(Y)} || {X, Y} <- List],
+ add_user_agent(Headers);
headers(_) ->
- [].
+ add_user_agent([]).
+
+add_user_agent(Headers) ->
+ case lists:search(fun({Header, _}) -> string:to_lower(Header) == "user-agent" end, Headers) of
+ {value, _} -> Headers;
+ false -> [{"user-agent", user_agent()} | Headers]
+ end.
+
+user_agent() ->
+ {ok, ExporterVsn} = application:get_key(opentelemetry_exporter, vsn),
+ lists:flatten(io_lib:format("OTel-OTLP-Exporter-erlang/~s", [ExporterVsn])).
recompose_endpoint(Endpoint) ->
case parse_endpoint(Endpoint) of
diff --git a/apps/opentelemetry_exporter/test/opentelemetry_exporter_SUITE.erl b/apps/opentelemetry_exporter/test/opentelemetry_exporter_SUITE.erl
index af52cf50..3eacb3c4 100644
--- a/apps/opentelemetry_exporter/test/opentelemetry_exporter_SUITE.erl
+++ b/apps/opentelemetry_exporter/test/opentelemetry_exporter_SUITE.erl
@@ -18,7 +18,7 @@ groups() ->
[{functional, [], [configuration, span_round_trip, ets_instrumentation_info]},
{grpc, [], [verify_export, verify_metrics_export]},
{grpc_gzip, [], [verify_export]},
- {http_protobuf, [], [verify_export]},
+ {http_protobuf, [], [verify_export, user_agent]},
{http_protobuf_gzip, [], [verify_export]}].
init_per_suite(Config) ->
@@ -168,18 +168,24 @@ verify_metrics_export(Config) ->
configuration(_Config) ->
try
%% Default conf
- ?assertEqual(#{endpoint => #{host => "localhost", path => "/v1/traces", port => 4318, scheme => "http"},
- headers => [], protocol => http_protobuf, ssl_options => [], compression => undefined,
- timeout_ms => 30000},
+ ?assertMatch(#{endpoint := #{host := "localhost", path := "/v1/traces", port := 4318, scheme := "http"},
+ headers := [{"user-agent", _} | _],
+ protocol := http_protobuf,
+ ssl_options := [],
+ compression := undefined,
+ timeout_ms := 30000},
opentelemetry_exporter:init_conf(traces, #{})),
%% Only grpc is implemented for logs and metrics, expecting it to be used by default,
%% no default path must be set for grpc
- MetricsAndLogsDefaultExpected =
- #{endpoint => #{host => "localhost", path => [], port => 4317, scheme => "http"},
- headers => [], protocol => grpc, ssl_options => [], compression => undefined,
- timeout_ms => 30000},
- ?assertEqual(MetricsAndLogsDefaultExpected, opentelemetry_exporter:init_conf(logs, #{})),
- ?assertEqual(MetricsAndLogsDefaultExpected, opentelemetry_exporter:init_conf(metrics, #{})),
+ [?assertMatch(
+ #{endpoint := #{host := "localhost", path := [], port := 4317, scheme := "http"},
+ headers := [{"user-agent", _} | _],
+ protocol := grpc,
+ ssl_options := [],
+ compression := undefined,
+ timeout_ms := 30000},
+ opentelemetry_exporter:init_conf(OtelSignal, #{})
+ ) || OtelSignal <- [logs, metrics]],
?assertMatch(#{endpoint := #{scheme := "http", host := "localhost", port := 9090, path := []},
protocol := http_protobuf,
@@ -267,7 +273,7 @@ configuration(_Config) ->
?assertMatch(#{endpoint :=
#{host := "localhost", path := "/v1/traces", port := 4343,
scheme := "http"},
- headers := [{"key1", "value1"}],
+ headers := [{"user-agent", _}, {"key1", "value1"}],
protocol := http_protobuf
},
opentelemetry_exporter:init_conf(traces, #{})),
@@ -277,7 +283,7 @@ configuration(_Config) ->
?assertMatch(#{endpoint :=
#{host := "localhost", path := "/internal/v1/traces", port := 4343,
scheme := "http"},
- headers := [{"key1", "value1"}],
+ headers := [{"user-agent", _}, {"key1", "value1"}],
compression := undefined,
protocol := http_protobuf},
opentelemetry_exporter:init_conf(traces, #{})),
@@ -308,7 +314,7 @@ configuration(_Config) ->
?assertMatch(#{endpoint :=
#{host := "localhost", path := "/traces/path", port := 5353,
scheme := "http"},
- headers := [{"key2", "value2"}],
+ headers := [{"user-agent", _}, {"key2", "value2"}],
compression := undefined,
protocol := http_protobuf},
opentelemetry_exporter:init_conf(traces, #{})),
@@ -529,3 +535,48 @@ verify_export(Config) ->
?assertMatch(ok, opentelemetry_exporter:export(traces, Tid, Resource, State)),
ok.
+
+user_agent(Config) ->
+ Protocol = ?config(protocol, Config),
+ Compression = ?config(compression, Config),
+ Port = 4318,
+
+ {ok, State} = opentelemetry_exporter:init(
+ traces,
+ test_exporter,
+ #{protocol => Protocol,
+ compression => Compression,
+ endpoints => [{http, "localhost", Port, []}]}),
+
+ Tid = ets:new(span_tab, [duplicate_bag, {keypos, #span.instrumentation_scope}]),
+
+ TraceId = otel_id_generator:generate_trace_id(),
+ SpanId = otel_id_generator:generate_span_id(),
+
+ ParentSpan =
+ #span{name = <<"span-1">>,
+ trace_id = TraceId,
+ span_id = SpanId,
+ kind = ?SPAN_KIND_CLIENT,
+ start_time = opentelemetry:timestamp(),
+ end_time = opentelemetry:timestamp(),
+ status = #status{code=?OTEL_STATUS_UNSET, message = <<"hello I'm unset">>},
+ links = otel_links:new([], 128, 128, 128),
+ events = otel_events:new(128, 128, 128),
+ instrumentation_scope = #instrumentation_scope{name = <<"tracer-1">>,
+ version = <<"0.0.1">>},
+ attributes = otel_attributes:new([{<<"attr-2">>, <<"value-2">>}], 128, 128)},
+ true = ets:insert(Tid, ParentSpan),
+ Resource = otel_resource_env_var:get_resource([]),
+
+ meck:new(httpc),
+ meck:expect(httpc, request, fun(post, {_, Headers, "application/x-protobuf", _}, _, _, _) ->
+ {_, UserAgent} = lists:keyfind("user-agent", 1, Headers),
+ {ok, ExporterVsn} = application:get_key(opentelemetry_exporter, vsn),
+ ExpectedUserAgent = lists:flatten(io_lib:format("OTel-OTLP-Exporter-erlang/~s", [ExporterVsn])),
+ ?assertEqual(ExpectedUserAgent, UserAgent),
+ {ok, {{"1.1", 200, ""}, [], <<>>}}
+ end),
+ ?assertMatch(ok, opentelemetry_exporter:export(traces, Tid, Resource, State)),
+ ?assert(meck:validate(httpc)),
+ meck:unload(httpc).
diff --git a/config/otel-collector-config.yaml b/config/otel-collector-config.yaml
index f82a594a..26729c04 100644
--- a/config/otel-collector-config.yaml
+++ b/config/otel-collector-config.yaml
@@ -14,8 +14,8 @@ exporters:
zipkin:
endpoint: "http://zipkin:9411/api/v2/spans"
- jaeger:
- endpoint: jaeger-all-in-one:14250
+ otlp/jaeger:
+ endpoint: jaeger-all-in-one:4317
tls:
insecure: true
@@ -31,7 +31,7 @@ service:
traces:
receivers: [otlp]
processors: [batch]
- exporters: [logging, zipkin, jaeger]
+ exporters: [logging, zipkin, otlp/jaeger]
metrics:
receivers: [otlp]
processors: [batch]
diff --git a/docker-compose.yml b/docker-compose.yml
index 28cfa01b..d50fbaf2 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -1,7 +1,7 @@
version: "3"
services:
otel:
- image: otel/opentelemetry-collector-contrib:0.79.0
+ image: otel/opentelemetry-collector-contrib:0.94.0
command: ["--config=/conf/otel-collector-config.yaml"]
privileged: true
ports:
@@ -20,8 +20,5 @@ services:
jaeger-all-in-one:
image: jaegertracing/all-in-one:latest
- restart: always
ports:
- "16686:16686"
- - "14268"
- - "14250"
diff --git a/rebar.config b/rebar.config
index 9a445f3c..d59ca64f 100644
--- a/rebar.config
+++ b/rebar.config
@@ -30,7 +30,8 @@
{profiles,
[{test, [{erl_opts, [nowarn_export_all]},
- {ct_opts, [{ct_hooks, [cth_surefire]}]}]},
+ {ct_opts, [{ct_hooks, [cth_surefire]}]},
+ {deps, [{meck, ">= 0.0.0"}]}]},
{interop, [{deps, [jsone]},
{extra_src_dirs, ["interop"]}]},
diff --git a/renovate.json b/renovate.json
new file mode 100644
index 00000000..a08e452e
--- /dev/null
+++ b/renovate.json
@@ -0,0 +1,13 @@
+{
+ "$schema": "https://docs.renovatebot.com/renovate-schema.json",
+ "extends": [
+ "config:recommended"
+ ],
+ "packageRules": [
+ {
+ "matchDatasources": ["hex"],
+ "matchUpdateTypes": ["patch", "pin", "digest"],
+ "addLabels": ["skip-changelog"]
+ }
+ ]
+}
diff --git a/test/otel_metric_tests.exs b/test/otel_metric_tests.exs
index e377f30b..e5e20b18 100644
--- a/test/otel_metric_tests.exs
+++ b/test/otel_metric_tests.exs
@@ -1,5 +1,5 @@
defmodule OtelMetricTests do
- use ExUnit.Case, async: true
+ use ExUnit.Case, async: false
require OpenTelemetryAPIExperimental.Counter, as: Counter
require OpenTelemetryAPIExperimental.UpDownCounter, as: UpDownCounter
diff --git a/test/otel_tests.exs b/test/otel_tests.exs
index 6da0f584..786e0f65 100644
--- a/test/otel_tests.exs
+++ b/test/otel_tests.exs
@@ -1,5 +1,5 @@
defmodule OtelTests do
- use ExUnit.Case, async: true
+ use ExUnit.Case, async: false
require OpenTelemetry.Tracer, as: Tracer
require OpenTelemetry.Span, as: Span