Provide metrics on root URL

This commit is contained in:
2022-02-20 22:47:48 +01:00
parent 79e83afafb
commit c8a043528b
23 changed files with 869 additions and 127 deletions

View File

@@ -1,4 +1,4 @@
# Used by "mix format"
[ [
inputs: ["{mix,.formatter}.exs", "{config,lib,test}/**/*.{ex,exs}"] import_deps: [:phoenix],
inputs: ["*.{ex,exs}", "{config,lib,test}/**/*.{ex,exs}"]
] ]

View File

@@ -1,21 +1,18 @@
# KafkaexLagExporter # KafkaexLagExporter
**TODO: Add description** To start your Phoenix server:
## Installation * Install dependencies with `mix deps.get`
* Start Phoenix endpoint with `mix phx.server` or inside IEx with `iex -S mix phx.server`
If [available in Hex](https://hex.pm/docs/publish), the package can be installed Now you can visit [`localhost:4000`](http://localhost:4000) from your browser.
by adding `kafkaex_lag_exporter` to your list of dependencies in `mix.exs`:
```elixir Ready to run in production? Please [check our deployment guides](https://hexdocs.pm/phoenix/deployment.html).
def deps do
[
{:kafkaex_lag_exporter, "~> 0.1.0"}
]
end
```
Documentation can be generated with [ExDoc](https://github.com/elixir-lang/ex_doc) ## Learn more
and published on [HexDocs](https://hexdocs.pm). Once published, the docs can
be found at <https://hexdocs.pm/kafkaex_lag_exporter>.
* Official website: https://www.phoenixframework.org/
* Guides: https://hexdocs.pm/phoenix/overview.html
* Docs: https://hexdocs.pm/phoenix
* Forum: https://elixirforum.com/c/phoenix-forum
* Source: https://github.com/phoenixframework/phoenix

View File

@@ -1,82 +1,99 @@
# This file is responsible for configuring your application
# and its dependencies with the aid of the Config module.
#
# This configuration file is loaded before any dependency and
# is restricted to this project.
# General application configuration
import Config import Config
# Configures the endpoint
config :kafkaex_lag_exporter, KafkaexLagExporterWeb.Endpoint,
url: [host: "localhost"],
render_errors: [view: KafkaexLagExporterWeb.ErrorView, accepts: ~w(json), layout: false],
pubsub_server: KafkaexLagExporter.PubSub,
live_view: [signing_salt: "sSgJfjNf"]
# Configures Elixir's Logger
config :logger, :console,
format: "$time $metadata[$level] $message\n",
level: :info,
metadata: [:request_id]
# Use Jason for JSON parsing in Phoenix
config :phoenix, :json_library, Jason
config :kafka_ex, config :kafka_ex,
# A list of brokers to connect to. This can be in either of the following formats # A list of brokers to connect to. This can be in either of the following formats
# #
# * [{"HOST", port}...] # * [{"HOST", port}...]
# * CSV - `"HOST:PORT,HOST:PORT[,...]"` # * CSV - `"HOST:PORT,HOST:PORT[,...]"`
# * {mod, fun, args} # * {mod, fun, args}
# * &arity_zero_fun/0 # * &arity_zero_fun/0
# * fn -> ... end # * fn -> ... end
# #
# If you receive :leader_not_available # If you receive :leader_not_available
# errors when producing messages, it may be necessary to modify "advertised.host.name" in the # errors when producing messages, it may be necessary to modify "advertised.host.name" in the
# server.properties file. # server.properties file.
# In the case below you would set "advertised.host.name=localhost" # In the case below you would set "advertised.host.name=localhost"
# brokers: [ # brokers: [
# {"kafka1", 9092}, # {"kafka1", 9092},
# {"kafka2", 9092}, # {"kafka2", 9092},
# {"kafka3", 9092} # {"kafka3", 9092}
# ], # ],
brokers: "localhost:9093,localhost:9094,localhost:9095", brokers: "localhost:9093,localhost:9094,localhost:9095",
# #
# OR: # OR:
# brokers: "localhost:9092,localhost:9093,localhost:9094" # brokers: "localhost:9092,localhost:9093,localhost:9094"
# #
# It may be useful to configure your brokers at runtime, for example if you use # It may be useful to configure your brokers at runtime, for example if you use
# service discovery instead of storing your broker hostnames in a config file. # service discovery instead of storing your broker hostnames in a config file.
# To do this, you can use `{mod, fun, args}` or a zero-arity function, and `KafkaEx` # To do this, you can use `{mod, fun, args}` or a zero-arity function, and `KafkaEx`
# will invoke your callback when fetching the `:brokers` configuration. # will invoke your callback when fetching the `:brokers` configuration.
# Note that when using this approach you must return a list of host/port pairs. # Note that when using this approach you must return a list of host/port pairs.
# #
# the default consumer group for worker processes, must be a binary (string) # the default consumer group for worker processes, must be a binary (string)
# NOTE if you are on Kafka < 0.8.2 or if you want to disable the use of # NOTE if you are on Kafka < 0.8.2 or if you want to disable the use of
# consumer groups, set this to :no_consumer_group (this is the # consumer groups, set this to :no_consumer_group (this is the
# only exception to the requirement that this value be a binary) # only exception to the requirement that this value be a binary)
consumer_group: "kafka_ex", consumer_group: "kafka_ex",
# The client_id is the logical grouping of a set of kafka clients. # The client_id is the logical grouping of a set of kafka clients.
client_id: "kafka_ex", client_id: "kafka_ex",
# Set this value to true if you do not want the default # Set this value to true if you do not want the default
# `KafkaEx.Server` worker to start during application start-up - # `KafkaEx.Server` worker to start during application start-up -
# i.e., if you want to start your own set of named workers # i.e., if you want to start your own set of named workers
disable_default_worker: false, disable_default_worker: false,
# Timeout value, in msec, for synchronous operations (e.g., network calls). # Timeout value, in msec, for synchronous operations (e.g., network calls).
# If this value is greater than GenServer's default timeout of 5000, it will also # If this value is greater than GenServer's default timeout of 5000, it will also
# be used as the timeout for work dispatched via KafkaEx.Server.call (e.g., KafkaEx.metadata). # be used as the timeout for work dispatched via KafkaEx.Server.call (e.g., KafkaEx.metadata).
# In those cases, it should be considered a 'total timeout', encompassing both network calls and # In those cases, it should be considered a 'total timeout', encompassing both network calls and
# wait time for the genservers. # wait time for the genservers.
sync_timeout: 3000, sync_timeout: 3000,
# Supervision max_restarts - the maximum amount of restarts allowed in a time frame # Supervision max_restarts - the maximum amount of restarts allowed in a time frame
max_restarts: 10, max_restarts: 10,
# Supervision max_seconds - the time frame in which :max_restarts applies # Supervision max_seconds - the time frame in which :max_restarts applies
max_seconds: 60, max_seconds: 60,
# Interval in milliseconds that GenConsumer waits to commit offsets. # Interval in milliseconds that GenConsumer waits to commit offsets.
commit_interval: 5_000, commit_interval: 5_000,
# Threshold number of messages consumed for GenConsumer to commit offsets # Threshold number of messages consumed for GenConsumer to commit offsets
# to the broker. # to the broker.
commit_threshold: 100, commit_threshold: 100,
# Interval in milliseconds to wait before reconnect to kafka # Interval in milliseconds to wait before reconnect to kafka
sleep_for_reconnect: 400, sleep_for_reconnect: 400,
# This is the flag that enables use of ssl # This is the flag that enables use of ssl
use_ssl: false, use_ssl: false,
# see SSL OPTION DESCRIPTIONS - CLIENT SIDE at http://erlang.org/doc/man/ssl.html # see SSL OPTION DESCRIPTIONS - CLIENT SIDE at http://erlang.org/doc/man/ssl.html
# for supported options # for supported options
ssl_options: [ ssl_options: [
# cacertfile: File.cwd!() <> "/ssl/ca-cert", # cacertfile: File.cwd!() <> "/ssl/ca-cert",
# certfile: File.cwd!() <> "/ssl/cert.pem", # certfile: File.cwd!() <> "/ssl/cert.pem",
# keyfile: File.cwd!() <> "/ssl/key.pem" # keyfile: File.cwd!() <> "/ssl/key.pem"
], ],
# set this to the version of the kafka broker that you are using # set this to the version of the kafka broker that you are using
# include only major.minor.patch versions. must be at least 0.8.0 # include only major.minor.patch versions. must be at least 0.8.0
# use "kayrock" for the new client # use "kayrock" for the new client
kafka_version: "3.1.0" kafka_version: "3.1.0"
config :logger, # Import environment specific config. This must remain at the bottom
level: :info, # of this file so it overrides the configuration defined above.
truncate: 4096 import_config "#{config_env()}.exs"
env_config = Path.expand("#{Mix.env()}.exs", __DIR__)
if File.exists?(env_config) do
import_config(env_config)
end

51
config/dev.exs Normal file
View File

@@ -0,0 +1,51 @@
import Config
# For development, we disable any cache and enable
# debugging and code reloading.
#
# The watchers configuration can be used to run external
# watchers to your application. For example, we use it
# with esbuild to bundle .js and .css sources.
config :kafkaex_lag_exporter, KafkaexLagExporterWeb.Endpoint,
# Binding to loopback ipv4 address prevents access from other machines.
# Change to `ip: {0, 0, 0, 0}` to allow access from other machines.
http: [ip: {127, 0, 0, 1}, port: 4000],
check_origin: false,
code_reloader: true,
debug_errors: true,
secret_key_base: "lttLR5uTgFy2WzfJLo+uXLWnogim+X/ZoJ9aqOWlJew3TsFm8dYXvsk1OpYUy2F8",
watchers: []
# ## SSL Support
#
# In order to use HTTPS in development, a self-signed
# certificate can be generated by running the following
# Mix task:
#
# mix phx.gen.cert
#
# Note that this task requires Erlang/OTP 20 or later.
# Run `mix help phx.gen.cert` for more information.
#
# The `http:` config above can be replaced with:
#
# https: [
# port: 4001,
# cipher_suite: :strong,
# keyfile: "priv/cert/selfsigned_key.pem",
# certfile: "priv/cert/selfsigned.pem"
# ],
#
# If desired, both `http:` and `https:` keys can be
# configured to run both http and https servers on
# different ports.
# Do not include metadata nor timestamps in development logs
config :logger, :console, format: "[$level] $message\n"
# Set a higher stacktrace during development. Avoid configuring such
# in production as building large stacktraces may be expensive.
config :phoenix, :stacktrace_depth, 20
# Initialize plugs at runtime for faster development compilation
config :phoenix, :plug_init_mode, :runtime

50
config/prod.exs Normal file
View File

@@ -0,0 +1,50 @@
import Config
# For production, don't forget to configure the url host
# to something meaningful, Phoenix uses this information
# when generating URLs.
#
# Note we also include the path to a cache manifest
# containing the digested version of static files. This
# manifest is generated by the `mix phx.digest` task,
# which you should run after static files are built and
# before starting your production server.
config :kafkaex_lag_exporter, KafkaexLagExporterWeb.Endpoint,
cache_static_manifest: "priv/static/cache_manifest.json"
# Do not print debug messages in production
config :logger, level: :info
# ## SSL Support
#
# To get SSL working, you will need to add the `https` key
# to the previous section and set your `:url` port to 443:
#
# config :kafkaex_lag_exporter, KafkaexLagExporterWeb.Endpoint,
# ...,
# url: [host: "example.com", port: 443],
# https: [
# ...,
# port: 443,
# cipher_suite: :strong,
# keyfile: System.get_env("SOME_APP_SSL_KEY_PATH"),
# certfile: System.get_env("SOME_APP_SSL_CERT_PATH")
# ]
#
# The `cipher_suite` is set to `:strong` to support only the
# latest and more secure SSL ciphers. This means old browsers
# and clients may not be supported. You can set it to
# `:compatible` for wider support.
#
# `:keyfile` and `:certfile` expect an absolute path to the key
# and cert in disk or a relative path inside priv, for example
# "priv/ssl/server.key". For all supported SSL configuration
# options, see https://hexdocs.pm/plug/Plug.SSL.html#configure/1
#
# We also recommend setting `force_ssl` in your endpoint, ensuring
# no data is ever sent via http, always redirecting to https:
#
# config :kafkaex_lag_exporter, KafkaexLagExporterWeb.Endpoint,
# force_ssl: [hsts: true]
#
# Check `Plug.SSL` for all available options in `force_ssl`.

52
config/runtime.exs Normal file
View File

@@ -0,0 +1,52 @@
import Config
# config/runtime.exs is executed for all environments, including
# during releases. It is executed after compilation and before the
# system starts, so it is typically used to load production configuration
# and secrets from environment variables or elsewhere. Do not define
# any compile-time configuration in here, as it won't be applied.
# The block below contains prod specific runtime configuration.
# Start the phoenix server if environment is set and running in a release
if System.get_env("PHX_SERVER") && System.get_env("RELEASE_NAME") do
config :kafkaex_lag_exporter, KafkaexLagExporterWeb.Endpoint, server: true
end
if config_env() == :prod do
# The secret key base is used to sign/encrypt cookies and other secrets.
# A default value is used in config/dev.exs and config/test.exs but you
# want to use a different value for prod and you most likely don't want
# to check this value into version control, so we use an environment
# variable instead.
secret_key_base =
System.get_env("SECRET_KEY_BASE") ||
raise """
environment variable SECRET_KEY_BASE is missing.
You can generate one by calling: mix phx.gen.secret
"""
host = System.get_env("PHX_HOST") || "example.com"
port = String.to_integer(System.get_env("PORT") || "4000")
config :kafkaex_lag_exporter, KafkaexLagExporterWeb.Endpoint,
url: [host: host, port: 443],
http: [
# Enable IPv6 and bind on all interfaces.
# Set it to {0, 0, 0, 0, 0, 0, 0, 1} for local network only access.
# See the documentation on https://hexdocs.pm/plug_cowboy/Plug.Cowboy.html
# for details about using IPv6 vs IPv4 and loopback vs public addresses.
ip: {0, 0, 0, 0, 0, 0, 0, 0},
port: port
],
secret_key_base: secret_key_base
# ## Using releases
#
# If you are doing OTP releases, you need to instruct Phoenix
# to start each relevant endpoint:
#
# config :kafkaex_lag_exporter, KafkaexLagExporterWeb.Endpoint, server: true
#
# Then you can assemble a release by calling `mix release`.
# See `mix help release` for more information.
end

14
config/test.exs Normal file
View File

@@ -0,0 +1,14 @@
import Config
# We don't run a server during test. If one is required,
# you can enable the server option below.
config :kafkaex_lag_exporter, KafkaexLagExporterWeb.Endpoint,
http: [ip: {127, 0, 0, 1}, port: 4002],
secret_key_base: "+3V7k0WsFksjqGwm5O54NJQX4Sz9LLr8CSJp+4X6UOXBX6IUwzMOqrRQOsziQ6mv",
server: false
# Print only warnings and errors during test
config :logger, level: :warn
# Initialize plugs at runtime for faster test compilation
config :phoenix, :plug_init_mode, :runtime

View File

@@ -25,6 +25,59 @@
<excludeFolder url="file://$MODULE_DIR$/_build/dev/lib/credo" /> <excludeFolder url="file://$MODULE_DIR$/_build/dev/lib/credo" />
<excludeFolder url="file://$MODULE_DIR$/_build/dev/lib/erlex" /> <excludeFolder url="file://$MODULE_DIR$/_build/dev/lib/erlex" />
<excludeFolder url="file://$MODULE_DIR$/_build/dev/lib/dialyxir" /> <excludeFolder url="file://$MODULE_DIR$/_build/dev/lib/dialyxir" />
<excludeFolder url="file://$MODULE_DIR$/_build/dev/lib/hpax" />
<excludeFolder url="file://$MODULE_DIR$/_build/dev/lib/mint" />
<excludeFolder url="file://$MODULE_DIR$/_build/dev/lib/telemetry_metrics" />
<excludeFolder url="file://$MODULE_DIR$/_build/dev/lib/castore" />
<excludeFolder url="file://$MODULE_DIR$/_build/dev/lib/nimble_options" />
<excludeFolder url="file://$MODULE_DIR$/_build/dev/lib/telemetry_poller" />
<excludeFolder url="file://$MODULE_DIR$/_build/dev/lib/telemetry_metrics_prometheus_core" />
<excludeFolder url="file://$MODULE_DIR$/_build/dev/lib/ranch" />
<excludeFolder url="file://$MODULE_DIR$/_build/dev/lib/nimble_pool" />
<excludeFolder url="file://$MODULE_DIR$/_build/dev/lib/telemetry" />
<excludeFolder url="file://$MODULE_DIR$/_build/dev/lib/cowlib" />
<excludeFolder url="file://$MODULE_DIR$/_build/dev/lib/finch" />
<excludeFolder url="file://$MODULE_DIR$/_build/dev/lib/mime" />
<excludeFolder url="file://$MODULE_DIR$/_build/dev/lib/plug_crypto" />
<excludeFolder url="file://$MODULE_DIR$/_build/dev/lib/cowboy" />
<excludeFolder url="file://$MODULE_DIR$/_build/dev/lib/prom_ex" />
<excludeFolder url="file://$MODULE_DIR$/_build/dev/lib/cowboy_telemetry" />
<excludeFolder url="file://$MODULE_DIR$/_build/dev/lib/plug_cowboy" />
<excludeFolder url="file://$MODULE_DIR$/_build/dev/lib/plug" />
<excludeFolder url="file://$MODULE_DIR$/_build/dev/lib/phoenix_view" />
<excludeFolder url="file://$MODULE_DIR$/_build/dev/lib/phoenix_pubsub" />
<excludeFolder url="file://$MODULE_DIR$/_build/dev/lib/phoenix" />
<excludeFolder url="file://$MODULE_DIR$/_build/test/lib/bunt" />
<excludeFolder url="file://$MODULE_DIR$/_build/test/lib/hpax" />
<excludeFolder url="file://$MODULE_DIR$/_build/test/lib/mint" />
<excludeFolder url="file://$MODULE_DIR$/_build/test/lib/finch" />
<excludeFolder url="file://$MODULE_DIR$/_build/test/lib/jason" />
<excludeFolder url="file://$MODULE_DIR$/_build/test/lib/ranch" />
<excludeFolder url="file://$MODULE_DIR$/_build/test/lib/varint" />
<excludeFolder url="file://$MODULE_DIR$/_build/test/lib/castore" />
<excludeFolder url="file://$MODULE_DIR$/_build/test/lib/kayrock" />
<excludeFolder url="file://$MODULE_DIR$/_build/test/lib/crc32cer" />
<excludeFolder url="file://$MODULE_DIR$/_build/test/lib/telemetry" />
<excludeFolder url="file://$MODULE_DIR$/_build/test/lib/connection" />
<excludeFolder url="file://$MODULE_DIR$/_build/test/lib/file_system" />
<excludeFolder url="file://$MODULE_DIR$/_build/test/lib/nimble_pool" />
<excludeFolder url="file://$MODULE_DIR$/_build/test/lib/phoenix_view" />
<excludeFolder url="file://$MODULE_DIR$/_build/test/lib/nimble_options" />
<excludeFolder url="file://$MODULE_DIR$/_build/test/lib/telemetry_poller" />
<excludeFolder url="file://$MODULE_DIR$/_build/test/lib/telemetry_metrics" />
<excludeFolder url="file://$MODULE_DIR$/_build/test/lib/telemetry_metrics_prometheus_core" />
<excludeFolder url="file://$MODULE_DIR$/_build/test/lib/credo" />
<excludeFolder url="file://$MODULE_DIR$/_build/test/lib/phoenix_pubsub" />
<excludeFolder url="file://$MODULE_DIR$/_build/test/lib/kafka_ex" />
<excludeFolder url="file://$MODULE_DIR$/_build/test/lib/cowlib" />
<excludeFolder url="file://$MODULE_DIR$/_build/test/lib/mime" />
<excludeFolder url="file://$MODULE_DIR$/_build/test/lib/plug_crypto" />
<excludeFolder url="file://$MODULE_DIR$/_build/test/lib/cowboy" />
<excludeFolder url="file://$MODULE_DIR$/_build/test/lib/cowboy_telemetry" />
<excludeFolder url="file://$MODULE_DIR$/_build/test/lib/plug" />
<excludeFolder url="file://$MODULE_DIR$/_build/test/lib/plug_cowboy" />
<excludeFolder url="file://$MODULE_DIR$/_build/test/lib/phoenix" />
<excludeFolder url="file://$MODULE_DIR$/_build/test/lib/prom_ex" />
</content> </content>
<orderEntry type="sourceFolder" forTests="false" /> <orderEntry type="sourceFolder" forTests="false" />
<orderEntry type="library" name="kafka_ex" level="project" /> <orderEntry type="library" name="kafka_ex" level="project" />
@@ -37,5 +90,44 @@
<orderEntry type="library" name="inch_ex" level="project" /> <orderEntry type="library" name="inch_ex" level="project" />
<orderEntry type="library" name="dialyxir" level="project" /> <orderEntry type="library" name="dialyxir" level="project" />
<orderEntry type="library" name="erlex" level="project" /> <orderEntry type="library" name="erlex" level="project" />
<orderEntry type="library" name="prom_ex" level="project" />
<orderEntry type="library" name="finch" level="project" />
<orderEntry type="library" name="telemetry" level="project" />
<orderEntry type="library" name="telemetry_poller" level="project" />
<orderEntry type="library" name="telemetry_metrics" level="project" />
<orderEntry type="library" name="telemetry_metrics_prometheus_core" level="project" />
<orderEntry type="library" name="plug_cowboy" level="project" />
<orderEntry type="library" name="phoenix" level="project" />
<orderEntry type="library" name="phoenix_live_view" level="project" />
<orderEntry type="library" name="plug" level="project" />
<orderEntry type="library" name="ecto" level="project" />
<orderEntry type="library" name="oban" level="project" />
<orderEntry type="library" name="absinthe" level="project" />
<orderEntry type="library" name="broadway" level="project" />
<orderEntry type="library" name="bypass" level="project" />
<orderEntry type="library" name="doctor" level="project" />
<orderEntry type="library" name="git_hooks" level="project" />
<orderEntry type="library" name="mint" level="project" />
<orderEntry type="library" name="castore" level="project" />
<orderEntry type="library" name="nimble_pool" level="project" />
<orderEntry type="library" name="nimble_options" level="project" />
<orderEntry type="library" name="cowboy" level="project" />
<orderEntry type="library" name="x509" level="project" />
<orderEntry type="library" name="cowboy_telemetry" level="project" />
<orderEntry type="library" name="hackney" level="project" />
<orderEntry type="library" name="kadabra" level="project" />
<orderEntry type="library" name="mime" level="project" />
<orderEntry type="library" name="plug_crypto" level="project" />
<orderEntry type="library" name="hpax" level="project" />
<orderEntry type="library" name="stream_data" level="project" />
<orderEntry type="library" name="hpack" level="project" />
<orderEntry type="library" name="phoenix_pubsub" level="project" />
<orderEntry type="library" name="phoenix_view" level="project" />
<orderEntry type="library" name="ecto_sql" level="project" />
<orderEntry type="library" name="gettext" level="project" />
<orderEntry type="library" name="phoenix_html" level="project" />
<orderEntry type="library" name="phx_new" level="project" />
<orderEntry type="library" name="websocket_client" level="project" />
<orderEntry type="library" name="esbuild" level="project" />
</component> </component>
</module> </module>

View File

@@ -1,32 +1,13 @@
defmodule KafkaexLagExporter do defmodule KafkaexLagExporter do
@moduledoc """ @moduledoc """
Supervisor to start the '__consumer__offsets' watcher child KafkaexLagExporter keeps the contexts that define your domain
and business logic.
Contexts are also responsible for managing your data, regardless
if it comes from the database, an external API or others.
""" """
use Application def hello() do
:world
def start(_type, _args) do
import Supervisor.Spec
consumer_group_opts = [
# setting for the ConsumerGroup
heartbeat_interval: 1_000,
# this setting will be forwarded to the GenConsumer
commit_interval: 1_000
]
gen_consumer_impl = ConsumerOffsetsGenConsumer
consumer_group_name = "offsets_group"
topic_names = ["__consumer_offsets"]
children = [
supervisor(
KafkaEx.ConsumerGroup,
[gen_consumer_impl, consumer_group_name, topic_names, consumer_group_opts]
)
]
Supervisor.start_link(children, strategy: :one_for_one)
end end
end end

View File

@@ -0,0 +1,52 @@
defmodule KafkaexLagExporter.Application do
# See https://hexdocs.pm/elixir/Application.html
# for more information on OTP Applications
@moduledoc false
use Application
@impl true
def start(_type, _args) do
consumer_group_opts = [
# setting for the ConsumerGroup
heartbeat_interval: 1_000,
# this setting will be forwarded to the GenConsumer
commit_interval: 1_000
]
gen_consumer_impl = KafkaexLagExporter.ConsumerOffsetsGenConsumer
consumer_group_name = "offsets_group"
topic_names = ["__consumer_offsets"]
children = [
KafkaexLagExporter.PromEx,
# Start the Telemetry supervisor
KafkaexLagExporterWeb.Telemetry,
# Start the PubSub system
{Phoenix.PubSub, name: KafkaexLagExporter.PubSub},
# Start the Endpoint (http/https)
KafkaexLagExporterWeb.Endpoint,
# Start a worker by calling: KafkaexLagExporter.Worker.start_link(arg)
# {KafkaexLagExporter.Worker, arg}
%{
id: KafkaEx.ConsumerGroup,
start:
{KafkaEx.ConsumerGroup, :start_link,
[gen_consumer_impl, consumer_group_name, topic_names, consumer_group_opts]}
}
]
# See https://hexdocs.pm/elixir/Supervisor.html
# for other strategies and supported options
opts = [strategy: :one_for_one, name: KafkaexLagExporter.Supervisor]
Supervisor.start_link(children, opts)
end
# Tell Phoenix to update the endpoint configuration
# whenever the application is updated.
@impl true
def config_change(changed, _new, removed) do
KafkaexLagExporterWeb.Endpoint.config_change(changed, removed)
:ok
end
end

View File

@@ -1,5 +1,4 @@
defmodule ConsumerOffsetsGenConsumer do defmodule KafkaexLagExporter.ConsumerOffsetsGenConsumer do
@moduledoc """ @moduledoc """
Genserver implementation to consume new messages on topic '__consumer_offsets' Genserver implementation to consume new messages on topic '__consumer_offsets'
""" """
@@ -10,11 +9,34 @@ defmodule ConsumerOffsetsGenConsumer do
require Logger require Logger
def init(_topic, _partition, _extra_args) do
{:ok, %{}}
end
def get() do
GenServer.cast(__MODULE__, {:get})
end
def handle_call({:get}, _from, state) do
{:reply, state}
end
def handle_call({:push, topic, offset}, _from, state) do
new_state = Map.put(state, topic, offset)
# IO.puts "new state"
# IO.inspect new_state
{:reply, new_state}
end
def handle_message_set(message_set, state) do def handle_message_set(message_set, state) do
for %Message{key: key, offset: offset} <- message_set do for %Message{key: key, offset: offset} <- message_set do
consumer_group = get_consumer_group(key) consumer_group = get_consumer_group(key)
Logger.info("consumer_group '#{consumer_group}' has offset '#{offset}'}") Logger.info("consumer_group '#{consumer_group}' has offset '#{offset}'}")
# GenServer.call(__MODULE__, {:push, consumer_group, offset})
end end
{:async_commit, state} {:async_commit, state}
@@ -27,5 +49,4 @@ defmodule ConsumerOffsetsGenConsumer do
consumer_group consumer_group
end end
end end

View File

@@ -0,0 +1,76 @@
defmodule KafkaexLagExporter.PromEx do
@moduledoc """
Be sure to add the following to finish setting up PromEx:
1. Update your configuration (config.exs, dev.exs, prod.exs, releases.exs, etc) to
configure the necessary bit of PromEx. Be sure to check out `PromEx.Config` for
more details regarding configuring PromEx:
```
config :kafkaex_lag_exporter, KafkaexLagExporter.PromEx,
disabled: false,
manual_metrics_start_delay: :no_delay,
drop_metrics_groups: [],
grafana: :disabled,
metrics_server: :disabled
```
2. Add this module to your application supervision tree. It should be one of the first
things that is started so that no Telemetry events are missed. For example, if PromEx
is started after your Repo module, you will miss Ecto's init events and the dashboards
will be missing some data points:
```
def start(_type, _args) do
children = [
KafkaexLagExporter.PromEx,
...
]
...
end
```
3. Update your `endpoint.ex` file to expose your metrics (or configure a standalone
server using the `:metrics_server` config options). Be sure to put this plug before
your `Plug.Telemetry` entry so that you can avoid having calls to your `/metrics`
endpoint create their own metrics and logs which can pollute your logs/metrics given
that Prometheus will scrape at a regular interval and that can get noisy:
```
defmodule KafkaexLagExporterWeb.Endpoint do
use Phoenix.Endpoint, otp_app: :kafkaex_lag_exporter
...
plug PromEx.Plug, prom_ex_module: KafkaexLagExporter.PromEx
...
end
```
4. Update the list of plugins in the `plugins/0` function return list to reflect your
application's dependencies. Also update the list of dashboards that are to be uploaded
to Grafana in the `dashboards/0` function.
"""
use PromEx, otp_app: :kafkaex_lag_exporter
alias PromEx.Plugins
@impl true
def plugins do
[
# PromEx built in plugins
Plugins.Application,
Plugins.Beam
# {Plugins.Phoenix, router: KafkaexLagExporterWeb.Router, endpoint: KafkaexLagExporterWeb.Endpoint},
# Plugins.Ecto,
# Plugins.Oban,
# Plugins.PhoenixLiveView,
# Plugins.Absinthe,
# Plugins.Broadway,
# Add your own PromEx metrics plugins
# KafkaexLagExporter.Users.PromExPlugin
]
end
end

View File

@@ -0,0 +1,75 @@
defmodule KafkaexLagExporterWeb do
@moduledoc """
The entrypoint for defining your web interface, such
as controllers, views, channels and so on.
This can be used in your application as:
use KafkaexLagExporterWeb, :controller
use KafkaexLagExporterWeb, :view
The definitions below will be executed for every view,
controller, etc, so keep them short and clean, focused
on imports, uses and aliases.
Do NOT define functions inside the quoted expressions
below. Instead, define any helper function in modules
and import those modules here.
"""
def controller do
quote do
use Phoenix.Controller, namespace: KafkaexLagExporterWeb
import Plug.Conn
alias KafkaexLagExporterWeb.Router.Helpers, as: Routes
end
end
def view do
quote do
use Phoenix.View,
root: "lib/kafkaex_lag_exporter_web/templates",
namespace: KafkaexLagExporterWeb
# Import convenience functions from controllers
import Phoenix.Controller,
only: [get_flash: 1, get_flash: 2, view_module: 1, view_template: 1]
# Include shared imports and aliases for views
unquote(view_helpers())
end
end
def router do
quote do
use Phoenix.Router
import Plug.Conn
import Phoenix.Controller
end
end
def channel do
quote do
use Phoenix.Channel
end
end
defp view_helpers do
quote do
# Import basic rendering functionality (render, render_layout, etc)
import Phoenix.View
import KafkaexLagExporterWeb.ErrorHelpers
alias KafkaexLagExporterWeb.Router.Helpers, as: Routes
end
end
@doc """
When used, dispatch to the appropriate controller/view/etc.
"""
defmacro __using__(which) when is_atom(which) do
apply(__MODULE__, which, [])
end
end

View File

@@ -0,0 +1,45 @@
defmodule KafkaexLagExporterWeb.Endpoint do
use Phoenix.Endpoint, otp_app: :kafkaex_lag_exporter
plug PromEx.Plug, path: "/", prom_ex_module: KafkaexLagExporter.PromEx
# The session will be stored in the cookie and signed,
# this means its contents can be read but not tampered with.
# Set :encryption_salt if you would also like to encrypt it.
@session_options [
store: :cookie,
key: "_kafkaex_lag_exporter_key",
signing_salt: "f/R6/xEO"
]
# socket "/live", Phoenix.LiveView.Socket, websocket: [connect_info: [session: @session_options]]
# Serve at "/" the static files from "priv/static" directory.
#
# You should set gzip to true if you are running phx.digest
# when deploying your static files in production.
plug Plug.Static,
at: "/",
from: :kafkaex_lag_exporter,
gzip: false,
only: ~w(assets fonts images favicon.ico robots.txt)
# Code reloading can be explicitly enabled under the
# :code_reloader configuration of your endpoint.
if code_reloading? do
plug Phoenix.CodeReloader
end
plug Plug.RequestId
plug Plug.Telemetry, event_prefix: [:phoenix, :endpoint]
plug Plug.Parsers,
parsers: [:urlencoded, :multipart, :json],
pass: ["*/*"],
json_decoder: Phoenix.json_library()
plug Plug.MethodOverride
plug Plug.Head
plug Plug.Session, @session_options
plug KafkaexLagExporterWeb.Router
end

View File

@@ -0,0 +1,11 @@
defmodule KafkaexLagExporterWeb.Router do
use KafkaexLagExporterWeb, :router
pipeline :api do
plug :accepts, ["json"]
end
scope "/api", KafkaexLagExporterWeb do
pipe_through :api
end
end

View File

@@ -0,0 +1,50 @@
defmodule KafkaexLagExporterWeb.Telemetry do
@moduledoc false
use Supervisor
import Telemetry.Metrics
def start_link(arg) do
Supervisor.start_link(__MODULE__, arg, name: __MODULE__)
end
@impl true
def init(_arg) do
children = [
# Telemetry poller will execute the given period measurements
# every 10_000ms. Learn more here: https://hexdocs.pm/telemetry_metrics
{:telemetry_poller, measurements: periodic_measurements(), period: 10_000}
# Add reporters as children of your supervision tree.
# {Telemetry.Metrics.ConsoleReporter, metrics: metrics()}
]
Supervisor.init(children, strategy: :one_for_one)
end
def metrics do
[
# Phoenix Metrics
summary("phoenix.endpoint.stop.duration",
unit: {:native, :millisecond}
),
summary("phoenix.router_dispatch.stop.duration",
tags: [:route],
unit: {:native, :millisecond}
),
# VM Metrics
summary("vm.memory.total", unit: {:byte, :kilobyte}),
summary("vm.total_run_queue_lengths.total"),
summary("vm.total_run_queue_lengths.cpu"),
summary("vm.total_run_queue_lengths.io")
]
end
defp periodic_measurements do
[
# A module, function and arguments to be invoked periodically.
# This function must call :telemetry.execute/3 and a metric must be added above.
# {KafkaexLagExporterWeb, :count_users, []}
]
end
end

View File

@@ -0,0 +1,16 @@
defmodule KafkaexLagExporterWeb.ErrorHelpers do
@moduledoc """
Conveniences for translating and building error messages.
"""
@doc """
Translates an error message.
"""
def translate_error({msg, opts}) do
# Because the error messages we show in our forms and APIs
# are defined inside Ecto, we need to translate them dynamically.
Enum.reduce(opts, msg, fn {key, value}, acc ->
String.replace(acc, "%{#{key}}", fn _ -> to_string(value) end)
end)
end
end

View File

@@ -0,0 +1,16 @@
defmodule KafkaexLagExporterWeb.ErrorView do
use KafkaexLagExporterWeb, :view
# If you want to customize a particular status code
# for a certain format, you may uncomment below.
# def render("500.json", _assigns) do
# %{errors: %{detail: "Internal Server Error"}}
# end
# By default, Phoenix returns the status message from
# the template name. For example, "404.json" becomes
# "Not Found".
def template_not_found(template, _assigns) do
%{errors: %{detail: Phoenix.Controller.status_message_from_template(template)}}
end
end

24
mix.exs
View File

@@ -6,25 +6,41 @@ defmodule KafkaexLagExporter.MixProject do
app: :kafkaex_lag_exporter, app: :kafkaex_lag_exporter,
version: "0.1.0", version: "0.1.0",
elixir: "~> 1.13", elixir: "~> 1.13",
elixirc_paths: elixirc_paths(Mix.env()),
compilers: Mix.compilers(),
start_permanent: Mix.env() == :prod, start_permanent: Mix.env() == :prod,
deps: deps() deps: deps()
] ]
end end
# Run "mix help compile.app" to learn about applications. # Configuration for the OTP application.
#
# Type `mix help compile.app` for more information.
def application do def application do
[ [
extra_applications: [:logger], mod: {KafkaexLagExporter.Application, []},
mod: { KafkaexLagExporter, [] }, extra_applications: [:logger, :runtime_tools]
] ]
end end
# Run "mix help deps" to learn about dependencies. # Specifies which paths to compile per environment.
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
# Specifies your project dependencies.
#
# Type `mix help deps` for examples and options.
defp deps do defp deps do
[ [
{:phoenix, "~> 1.6.6"},
{:telemetry_metrics, "~> 0.6"},
{:telemetry_poller, "~> 1.0"},
{:jason, "~> 1.2"},
{:plug_cowboy, "~> 2.5"},
{:credo, "~> 1.6", only: [:dev, :test], runtime: false}, {:credo, "~> 1.6", only: [:dev, :test], runtime: false},
{:dialyxir, "~> 1.0", only: [:dev], runtime: false}, {:dialyxir, "~> 1.0", only: [:dev], runtime: false},
{:kafka_ex, "~> 0.12.1"}, {:kafka_ex, "~> 0.12.1"},
{:prom_ex, "~> 1.6.0"}
] ]
end end
end end

View File

@@ -1,13 +1,35 @@
%{ %{
"bunt": {:hex, :bunt, "0.2.0", "951c6e801e8b1d2cbe58ebbd3e616a869061ddadcc4863d0a2182541acae9a38", [:mix], [], "hexpm", "7af5c7e09fe1d40f76c8e4f9dd2be7cebd83909f31fee7cd0e9eadc567da8353"}, "bunt": {:hex, :bunt, "0.2.0", "951c6e801e8b1d2cbe58ebbd3e616a869061ddadcc4863d0a2182541acae9a38", [:mix], [], "hexpm", "7af5c7e09fe1d40f76c8e4f9dd2be7cebd83909f31fee7cd0e9eadc567da8353"},
"castore": {:hex, :castore, "0.1.15", "dbb300827d5a3ec48f396ca0b77ad47058578927e9ebe792abd99fcbc3324326", [:mix], [], "hexpm", "c69379b907673c7e6eb229f09a0a09b60bb27cfb9625bcb82ea4c04ba82a8442"},
"connection": {:hex, :connection, "1.1.0", "ff2a49c4b75b6fb3e674bfc5536451607270aac754ffd1bdfe175abe4a6d7a68", [:mix], [], "hexpm", "722c1eb0a418fbe91ba7bd59a47e28008a189d47e37e0e7bb85585a016b2869c"}, "connection": {:hex, :connection, "1.1.0", "ff2a49c4b75b6fb3e674bfc5536451607270aac754ffd1bdfe175abe4a6d7a68", [:mix], [], "hexpm", "722c1eb0a418fbe91ba7bd59a47e28008a189d47e37e0e7bb85585a016b2869c"},
"cowboy": {:hex, :cowboy, "2.9.0", "865dd8b6607e14cf03282e10e934023a1bd8be6f6bacf921a7e2a96d800cd452", [:make, :rebar3], [{:cowlib, "2.11.0", [hex: :cowlib, repo: "hexpm", optional: false]}, {:ranch, "1.8.0", [hex: :ranch, repo: "hexpm", optional: false]}], "hexpm", "2c729f934b4e1aa149aff882f57c6372c15399a20d54f65c8d67bef583021bde"},
"cowboy_telemetry": {:hex, :cowboy_telemetry, "0.4.0", "f239f68b588efa7707abce16a84d0d2acf3a0f50571f8bb7f56a15865aae820c", [:rebar3], [{:cowboy, "~> 2.7", [hex: :cowboy, repo: "hexpm", optional: false]}, {:telemetry, "~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "7d98bac1ee4565d31b62d59f8823dfd8356a169e7fcbb83831b8a5397404c9de"},
"cowlib": {:hex, :cowlib, "2.11.0", "0b9ff9c346629256c42ebe1eeb769a83c6cb771a6ee5960bd110ab0b9b872063", [:make, :rebar3], [], "hexpm", "2b3e9da0b21c4565751a6d4901c20d1b4cc25cbb7fd50d91d2ab6dd287bc86a9"},
"crc32cer": {:hex, :crc32cer, "0.1.10", "fb87abbf34b72f180f8c3a908cd1826c6cb9a59787d156a29e05de9e98be385e", [:rebar3], [], "hexpm", "5b1f47efd0a1b4b7411f1f35e14d3c8c6da6e6a2a725ec8f2cf1ab13703e5f38"}, "crc32cer": {:hex, :crc32cer, "0.1.10", "fb87abbf34b72f180f8c3a908cd1826c6cb9a59787d156a29e05de9e98be385e", [:rebar3], [], "hexpm", "5b1f47efd0a1b4b7411f1f35e14d3c8c6da6e6a2a725ec8f2cf1ab13703e5f38"},
"credo": {:hex, :credo, "1.6.3", "0a9f8925dbc8f940031b789f4623fc9a0eea99d3eed600fe831e403eb96c6a83", [:mix], [{:bunt, "~> 0.2.0", [hex: :bunt, repo: "hexpm", optional: false]}, {:file_system, "~> 0.2.8", [hex: :file_system, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "1167cde00e6661d740fc54da2ee268e35d3982f027399b64d3e2e83af57a1180"}, "credo": {:hex, :credo, "1.6.3", "0a9f8925dbc8f940031b789f4623fc9a0eea99d3eed600fe831e403eb96c6a83", [:mix], [{:bunt, "~> 0.2.0", [hex: :bunt, repo: "hexpm", optional: false]}, {:file_system, "~> 0.2.8", [hex: :file_system, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "1167cde00e6661d740fc54da2ee268e35d3982f027399b64d3e2e83af57a1180"},
"dialyxir": {:hex, :dialyxir, "1.1.0", "c5aab0d6e71e5522e77beff7ba9e08f8e02bad90dfbeffae60eaf0cb47e29488", [:mix], [{:erlex, ">= 0.2.6", [hex: :erlex, repo: "hexpm", optional: false]}], "hexpm", "07ea8e49c45f15264ebe6d5b93799d4dd56a44036cf42d0ad9c960bc266c0b9a"}, "dialyxir": {:hex, :dialyxir, "1.1.0", "c5aab0d6e71e5522e77beff7ba9e08f8e02bad90dfbeffae60eaf0cb47e29488", [:mix], [{:erlex, ">= 0.2.6", [hex: :erlex, repo: "hexpm", optional: false]}], "hexpm", "07ea8e49c45f15264ebe6d5b93799d4dd56a44036cf42d0ad9c960bc266c0b9a"},
"erlex": {:hex, :erlex, "0.2.6", "c7987d15e899c7a2f34f5420d2a2ea0d659682c06ac607572df55a43753aa12e", [:mix], [], "hexpm", "2ed2e25711feb44d52b17d2780eabf998452f6efda104877a3881c2f8c0c0c75"}, "erlex": {:hex, :erlex, "0.2.6", "c7987d15e899c7a2f34f5420d2a2ea0d659682c06ac607572df55a43753aa12e", [:mix], [], "hexpm", "2ed2e25711feb44d52b17d2780eabf998452f6efda104877a3881c2f8c0c0c75"},
"file_system": {:hex, :file_system, "0.2.10", "fb082005a9cd1711c05b5248710f8826b02d7d1784e7c3451f9c1231d4fc162d", [:mix], [], "hexpm", "41195edbfb562a593726eda3b3e8b103a309b733ad25f3d642ba49696bf715dc"}, "file_system": {:hex, :file_system, "0.2.10", "fb082005a9cd1711c05b5248710f8826b02d7d1784e7c3451f9c1231d4fc162d", [:mix], [], "hexpm", "41195edbfb562a593726eda3b3e8b103a309b733ad25f3d642ba49696bf715dc"},
"finch": {:hex, :finch, "0.9.1", "ab2b0151ba88543e221cb50bf0734860db55e8748816ee16e4997fe205f7b315", [:mix], [{:castore, "~> 0.1", [hex: :castore, repo: "hexpm", optional: false]}, {:mint, "~> 1.3", [hex: :mint, repo: "hexpm", optional: false]}, {:nimble_options, "~> 0.4.0", [hex: :nimble_options, repo: "hexpm", optional: false]}, {:nimble_pool, "~> 0.2", [hex: :nimble_pool, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "6d6b898a59d19f84958eaffec40580f5a9ff88a31e93156707fa8b1d552aa425"},
"hpax": {:hex, :hpax, "0.1.1", "2396c313683ada39e98c20a75a82911592b47e5c24391363343bde74f82396ca", [:mix], [], "hexpm", "0ae7d5a0b04a8a60caf7a39fcf3ec476f35cc2cc16c05abea730d3ce6ac6c826"},
"jason": {:hex, :jason, "1.3.0", "fa6b82a934feb176263ad2df0dbd91bf633d4a46ebfdffea0c8ae82953714946", [:mix], [{:decimal, "~> 1.0 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: true]}], "hexpm", "53fc1f51255390e0ec7e50f9cb41e751c260d065dcba2bf0d08dc51a4002c2ac"}, "jason": {:hex, :jason, "1.3.0", "fa6b82a934feb176263ad2df0dbd91bf633d4a46ebfdffea0c8ae82953714946", [:mix], [{:decimal, "~> 1.0 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: true]}], "hexpm", "53fc1f51255390e0ec7e50f9cb41e751c260d065dcba2bf0d08dc51a4002c2ac"},
"kafka_ex": {:hex, :kafka_ex, "0.12.1", "83f93a0b04d392b7e0c35234f4c444990f03b616ce4e7121119b89772d28facc", [:mix], [{:kayrock, "~> 0.1.12", [hex: :kayrock, repo: "hexpm", optional: false]}], "hexpm", "a395791c0528a248b0dac5d40d1eef8dd0706530a83cfa6ad7007eab9576fee8"}, "kafka_ex": {:hex, :kafka_ex, "0.12.1", "83f93a0b04d392b7e0c35234f4c444990f03b616ce4e7121119b89772d28facc", [:mix], [{:kayrock, "~> 0.1.12", [hex: :kayrock, repo: "hexpm", optional: false]}], "hexpm", "a395791c0528a248b0dac5d40d1eef8dd0706530a83cfa6ad7007eab9576fee8"},
"kayrock": {:hex, :kayrock, "0.1.14", "49aa3d6ff987c6ccf9c7cfe31d669161dfa16c5f83257b98f48a02246c461711", [:mix], [{:connection, "~>1.1.0", [hex: :connection, repo: "hexpm", optional: false]}, {:crc32cer, "~>0.1.8", [hex: :crc32cer, repo: "hexpm", optional: false]}, {:varint, "~>1.2.0", [hex: :varint, repo: "hexpm", optional: false]}], "hexpm", "7ea2b3613a59fdff9f2e22ebd00bd7eac14290a41b6ec7d4385d9489d9bb6d89"}, "kayrock": {:hex, :kayrock, "0.1.14", "49aa3d6ff987c6ccf9c7cfe31d669161dfa16c5f83257b98f48a02246c461711", [:mix], [{:connection, "~>1.1.0", [hex: :connection, repo: "hexpm", optional: false]}, {:crc32cer, "~>0.1.8", [hex: :crc32cer, repo: "hexpm", optional: false]}, {:varint, "~>1.2.0", [hex: :varint, repo: "hexpm", optional: false]}], "hexpm", "7ea2b3613a59fdff9f2e22ebd00bd7eac14290a41b6ec7d4385d9489d9bb6d89"},
"mime": {:hex, :mime, "2.0.2", "0b9e1a4c840eafb68d820b0e2158ef5c49385d17fb36855ac6e7e087d4b1dcc5", [:mix], [], "hexpm", "e6a3f76b4c277739e36c2e21a2c640778ba4c3846189d5ab19f97f126df5f9b7"},
"mint": {:hex, :mint, "1.4.1", "49b3b6ea35a9a38836d2ad745251b01ca9ec062f7cb66f546bf22e6699137126", [:mix], [{:castore, "~> 0.1.0", [hex: :castore, repo: "hexpm", optional: true]}, {:hpax, "~> 0.1.1", [hex: :hpax, repo: "hexpm", optional: false]}], "hexpm", "cd261766e61011a9079cccf8fa9d826e7a397c24fbedf0e11b49312bea629b58"},
"nimble_options": {:hex, :nimble_options, "0.4.0", "c89babbab52221a24b8d1ff9e7d838be70f0d871be823165c94dd3418eea728f", [:mix], [], "hexpm", "e6701c1af326a11eea9634a3b1c62b475339ace9456c1a23ec3bc9a847bca02d"},
"nimble_pool": {:hex, :nimble_pool, "0.2.6", "91f2f4c357da4c4a0a548286c84a3a28004f68f05609b4534526871a22053cde", [:mix], [], "hexpm", "1c715055095d3f2705c4e236c18b618420a35490da94149ff8b580a2144f653f"},
"phoenix": {:hex, :phoenix, "1.6.6", "281c8ce8dccc9f60607346b72cdfc597c3dde134dd9df28dff08282f0b751754", [:mix], [{:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:phoenix_pubsub, "~> 2.0", [hex: :phoenix_pubsub, repo: "hexpm", optional: false]}, {:phoenix_view, "~> 1.0", [hex: :phoenix_view, repo: "hexpm", optional: false]}, {:plug, "~> 1.10", [hex: :plug, repo: "hexpm", optional: false]}, {:plug_cowboy, "~> 2.2", [hex: :plug_cowboy, repo: "hexpm", optional: true]}, {:plug_crypto, "~> 1.2", [hex: :plug_crypto, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "807bd646e64cd9dc83db016199715faba72758e6db1de0707eef0a2da4924364"},
"phoenix_pubsub": {:hex, :phoenix_pubsub, "2.0.0", "a1ae76717bb168cdeb10ec9d92d1480fec99e3080f011402c0a2d68d47395ffb", [:mix], [], "hexpm", "c52d948c4f261577b9c6fa804be91884b381a7f8f18450c5045975435350f771"},
"phoenix_view": {:hex, :phoenix_view, "1.1.2", "1b82764a065fb41051637872c7bd07ed2fdb6f5c3bd89684d4dca6e10115c95a", [:mix], [{:phoenix_html, "~> 2.14.2 or ~> 3.0", [hex: :phoenix_html, repo: "hexpm", optional: true]}], "hexpm", "7ae90ad27b09091266f6adbb61e1d2516a7c3d7062c6789d46a7554ec40f3a56"},
"plug": {:hex, :plug, "1.13.3", "93b299039c21a8b82cc904d13812bce4ced45cf69153e8d35ca16ffb3e8c5d98", [:mix], [{:mime, "~> 1.0 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:plug_crypto, "~> 1.1.1 or ~> 1.2", [hex: :plug_crypto, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4.3 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "98c8003e4faf7b74a9ac41bee99e328b08f069bf932747d4a7532e97ae837a17"},
"plug_cowboy": {:hex, :plug_cowboy, "2.5.2", "62894ccd601cf9597e2c23911ff12798a8a18d237e9739f58a6b04e4988899fe", [:mix], [{:cowboy, "~> 2.7", [hex: :cowboy, repo: "hexpm", optional: false]}, {:cowboy_telemetry, "~> 0.3", [hex: :cowboy_telemetry, repo: "hexpm", optional: false]}, {:plug, "~> 1.7", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm", "ea6e87f774c8608d60c8d34022a7d073bd7680a0a013f049fc62bf35efea1044"},
"plug_crypto": {:hex, :plug_crypto, "1.2.2", "05654514ac717ff3a1843204b424477d9e60c143406aa94daf2274fdd280794d", [:mix], [], "hexpm", "87631c7ad914a5a445f0a3809f99b079113ae4ed4b867348dd9eec288cecb6db"},
"prom_ex": {:hex, :prom_ex, "1.6.0", "a243cf27e71a2f53abfa9428680bcb89983923bb65149309e945c5f1f1ea0c2d", [:mix], [{:absinthe, ">= 1.6.0", [hex: :absinthe, repo: "hexpm", optional: true]}, {:broadway, ">= 1.0.0", [hex: :broadway, repo: "hexpm", optional: true]}, {:ecto, ">= 3.5.0", [hex: :ecto, repo: "hexpm", optional: true]}, {:finch, "~> 0.9.0", [hex: :finch, repo: "hexpm", optional: false]}, {:jason, "~> 1.2", [hex: :jason, repo: "hexpm", optional: false]}, {:oban, ">= 2.4.0", [hex: :oban, repo: "hexpm", optional: true]}, {:phoenix, ">= 1.5.0", [hex: :phoenix, repo: "hexpm", optional: true]}, {:phoenix_live_view, ">= 0.14.0", [hex: :phoenix_live_view, repo: "hexpm", optional: true]}, {:plug, ">= 1.12.1", [hex: :plug, repo: "hexpm", optional: true]}, {:plug_cowboy, "~> 2.5.1", [hex: :plug_cowboy, repo: "hexpm", optional: false]}, {:telemetry, "~> 1.0.0", [hex: :telemetry, repo: "hexpm", optional: false]}, {:telemetry_metrics, "~> 0.6.1", [hex: :telemetry_metrics, repo: "hexpm", optional: false]}, {:telemetry_metrics_prometheus_core, "~> 1.0.2", [hex: :telemetry_metrics_prometheus_core, repo: "hexpm", optional: false]}, {:telemetry_poller, "~> 1.0.0", [hex: :telemetry_poller, repo: "hexpm", optional: false]}], "hexpm", "1c0242b8ade3f8343394645076c9d7c0a814f7876c2100fc719cb387922397db"},
"ranch": {:hex, :ranch, "1.8.0", "8c7a100a139fd57f17327b6413e4167ac559fbc04ca7448e9be9057311597a1d", [:make, :rebar3], [], "hexpm", "49fbcfd3682fab1f5d109351b61257676da1a2fdbe295904176d5e521a2ddfe5"},
"telemetry": {:hex, :telemetry, "1.0.0", "0f453a102cdf13d506b7c0ab158324c337c41f1cc7548f0bc0e130bbf0ae9452", [:rebar3], [], "hexpm", "73bc09fa59b4a0284efb4624335583c528e07ec9ae76aca96ea0673850aec57a"},
"telemetry_metrics": {:hex, :telemetry_metrics, "0.6.1", "315d9163a1d4660aedc3fee73f33f1d355dcc76c5c3ab3d59e76e3edf80eef1f", [:mix], [{:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "7be9e0871c41732c233be71e4be11b96e56177bf15dde64a8ac9ce72ac9834c6"},
"telemetry_metrics_prometheus_core": {:hex, :telemetry_metrics_prometheus_core, "1.0.2", "c98b1c580de637bfeac00db41b9fb91fb4c3548ee3d512a8ed7299172312eaf3", [:mix], [{:telemetry, "~> 0.4 or ~> 1.0.0", [hex: :telemetry, repo: "hexpm", optional: false]}, {:telemetry_metrics, "~> 0.6", [hex: :telemetry_metrics, repo: "hexpm", optional: false]}], "hexpm", "48351a0d56f80e38c997b44232b1043e0a081670d16766eee920e6254175b730"},
"telemetry_poller": {:hex, :telemetry_poller, "1.0.0", "db91bb424e07f2bb6e73926fcafbfcbcb295f0193e0a00e825e589a0a47e8453", [:rebar3], [{:telemetry, "~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "b3a24eafd66c3f42da30fc3ca7dda1e9d546c12250a2d60d7b81d264fbec4f6e"},
"varint": {:hex, :varint, "1.2.0", "61bffd9dcc2d5242d59f75694506b4d4013bb103f6a23e34b94f89cebb0c1ab3", [:mix], [], "hexpm", "d94941ed8b9d1a5fdede9103a5e52035bd0aaf35081d44e67713a36799927e47"}, "varint": {:hex, :varint, "1.2.0", "61bffd9dcc2d5242d59f75694506b4d4013bb103f6a23e34b94f89cebb0c1ab3", [:mix], [], "hexpm", "d94941ed8b9d1a5fdede9103a5e52035bd0aaf35081d44e67713a36799927e47"},
} }

View File

@@ -0,0 +1,17 @@
defmodule KafkaexLagExporterWeb.ErrorViewTest do
use KafkaexLagExporterWeb.ConnCase, async: true
# Bring render/3 and render_to_string/3 for testing custom views
import Phoenix.View
test "renders 404.json" do
assert render(KafkaexLagExporterWeb.ErrorView, "404.json", []) == %{
errors: %{detail: "Not Found"}
}
end
test "renders 500.json" do
assert render(KafkaexLagExporterWeb.ErrorView, "500.json", []) ==
%{errors: %{detail: "Internal Server Error"}}
end
end

View File

@@ -0,0 +1,34 @@
defmodule KafkaexLagExporterWeb.ChannelCase do
@moduledoc """
This module defines the test case to be used by
channel tests.
Such tests rely on `Phoenix.ChannelTest` and also
import other functionality to make it easier
to build common data structures and query the data layer.
Finally, if the test case interacts with the database,
we enable the SQL sandbox, so changes done to the database
are reverted at the end of every test. If you are using
PostgreSQL, you can even run database tests asynchronously
by setting `use KafkaexLagExporterWeb.ChannelCase, async: true`, although
this option is not recommended for other databases.
"""
use ExUnit.CaseTemplate
using do
quote do
# Import conveniences for testing with channels
import Phoenix.ChannelTest
import KafkaexLagExporterWeb.ChannelCase
# The default endpoint for testing
@endpoint KafkaexLagExporterWeb.Endpoint
end
end
setup _tags do
:ok
end
end

37
test/support/conn_case.ex Normal file
View File

@@ -0,0 +1,37 @@
defmodule KafkaexLagExporterWeb.ConnCase do
@moduledoc """
This module defines the test case to be used by
tests that require setting up a connection.
Such tests rely on `Phoenix.ConnTest` and also
import other functionality to make it easier
to build common data structures and query the data layer.
Finally, if the test case interacts with the database,
we enable the SQL sandbox, so changes done to the database
are reverted at the end of every test. If you are using
PostgreSQL, you can even run database tests asynchronously
by setting `use KafkaexLagExporterWeb.ConnCase, async: true`, although
this option is not recommended for other databases.
"""
use ExUnit.CaseTemplate
using do
quote do
# Import conveniences for testing with connections
import Plug.Conn
import Phoenix.ConnTest
import KafkaexLagExporterWeb.ConnCase
alias KafkaexLagExporterWeb.Router.Helpers, as: Routes
# The default endpoint for testing
@endpoint KafkaexLagExporterWeb.Endpoint
end
end
setup _tags do
{:ok, conn: Phoenix.ConnTest.build_conn()}
end
end