Provide metrics on root URL

This commit is contained in:
2022-02-20 22:47:48 +01:00
parent 79e83afafb
commit c8a043528b
23 changed files with 869 additions and 127 deletions

View File

@@ -0,0 +1,52 @@
defmodule KafkaexLagExporter.Application do
# See https://hexdocs.pm/elixir/Application.html
# for more information on OTP Applications
@moduledoc false
use Application
@impl true
def start(_type, _args) do
consumer_group_opts = [
# setting for the ConsumerGroup
heartbeat_interval: 1_000,
# this setting will be forwarded to the GenConsumer
commit_interval: 1_000
]
gen_consumer_impl = KafkaexLagExporter.ConsumerOffsetsGenConsumer
consumer_group_name = "offsets_group"
topic_names = ["__consumer_offsets"]
children = [
KafkaexLagExporter.PromEx,
# Start the Telemetry supervisor
KafkaexLagExporterWeb.Telemetry,
# Start the PubSub system
{Phoenix.PubSub, name: KafkaexLagExporter.PubSub},
# Start the Endpoint (http/https)
KafkaexLagExporterWeb.Endpoint,
# Start a worker by calling: KafkaexLagExporter.Worker.start_link(arg)
# {KafkaexLagExporter.Worker, arg}
%{
id: KafkaEx.ConsumerGroup,
start:
{KafkaEx.ConsumerGroup, :start_link,
[gen_consumer_impl, consumer_group_name, topic_names, consumer_group_opts]}
}
]
# See https://hexdocs.pm/elixir/Supervisor.html
# for other strategies and supported options
opts = [strategy: :one_for_one, name: KafkaexLagExporter.Supervisor]
Supervisor.start_link(children, opts)
end
# Tell Phoenix to update the endpoint configuration
# whenever the application is updated.
@impl true
def config_change(changed, _new, removed) do
KafkaexLagExporterWeb.Endpoint.config_change(changed, removed)
:ok
end
end

View File

@@ -0,0 +1,52 @@
defmodule KafkaexLagExporter.ConsumerOffsetsGenConsumer do
@moduledoc """
Genserver implementation to consume new messages on topic '__consumer_offsets'
"""
use KafkaEx.GenConsumer
alias KafkaEx.Protocol.Fetch.Message
require Logger
def init(_topic, _partition, _extra_args) do
{:ok, %{}}
end
def get() do
GenServer.cast(__MODULE__, {:get})
end
def handle_call({:get}, _from, state) do
{:reply, state}
end
def handle_call({:push, topic, offset}, _from, state) do
new_state = Map.put(state, topic, offset)
# IO.puts "new state"
# IO.inspect new_state
{:reply, new_state}
end
def handle_message_set(message_set, state) do
for %Message{key: key, offset: offset} <- message_set do
consumer_group = get_consumer_group(key)
Logger.info("consumer_group '#{consumer_group}' has offset '#{offset}'}")
# GenServer.call(__MODULE__, {:push, consumer_group, offset})
end
{:async_commit, state}
end
defp get_consumer_group(<<prefix, version, postfix::binary-size(2), consumer_group::binary>>) do
Logger.debug(fn -> "prefix: " <> inspect(prefix) end)
Logger.debug(fn -> "version: " <> inspect(version) end)
Logger.debug(fn -> "postfix: " <> inspect(postfix) end)
consumer_group
end
end

View File

@@ -0,0 +1,76 @@
defmodule KafkaexLagExporter.PromEx do
@moduledoc """
Be sure to add the following to finish setting up PromEx:
1. Update your configuration (config.exs, dev.exs, prod.exs, releases.exs, etc) to
configure the necessary bit of PromEx. Be sure to check out `PromEx.Config` for
more details regarding configuring PromEx:
```
config :kafkaex_lag_exporter, KafkaexLagExporter.PromEx,
disabled: false,
manual_metrics_start_delay: :no_delay,
drop_metrics_groups: [],
grafana: :disabled,
metrics_server: :disabled
```
2. Add this module to your application supervision tree. It should be one of the first
things that is started so that no Telemetry events are missed. For example, if PromEx
is started after your Repo module, you will miss Ecto's init events and the dashboards
will be missing some data points:
```
def start(_type, _args) do
children = [
KafkaexLagExporter.PromEx,
...
]
...
end
```
3. Update your `endpoint.ex` file to expose your metrics (or configure a standalone
server using the `:metrics_server` config options). Be sure to put this plug before
your `Plug.Telemetry` entry so that you can avoid having calls to your `/metrics`
endpoint create their own metrics and logs which can pollute your logs/metrics given
that Prometheus will scrape at a regular interval and that can get noisy:
```
defmodule KafkaexLagExporterWeb.Endpoint do
use Phoenix.Endpoint, otp_app: :kafkaex_lag_exporter
...
plug PromEx.Plug, prom_ex_module: KafkaexLagExporter.PromEx
...
end
```
4. Update the list of plugins in the `plugins/0` function return list to reflect your
application's dependencies. Also update the list of dashboards that are to be uploaded
to Grafana in the `dashboards/0` function.
"""
use PromEx, otp_app: :kafkaex_lag_exporter
alias PromEx.Plugins
@impl true
def plugins do
[
# PromEx built in plugins
Plugins.Application,
Plugins.Beam
# {Plugins.Phoenix, router: KafkaexLagExporterWeb.Router, endpoint: KafkaexLagExporterWeb.Endpoint},
# Plugins.Ecto,
# Plugins.Oban,
# Plugins.PhoenixLiveView,
# Plugins.Absinthe,
# Plugins.Broadway,
# Add your own PromEx metrics plugins
# KafkaexLagExporter.Users.PromExPlugin
]
end
end