Provide metrics on root URL
This commit is contained in:
@@ -1,4 +1,4 @@
|
||||
# Used by "mix format"
|
||||
[
|
||||
inputs: ["{mix,.formatter}.exs", "{config,lib,test}/**/*.{ex,exs}"]
|
||||
import_deps: [:phoenix],
|
||||
inputs: ["*.{ex,exs}", "{config,lib,test}/**/*.{ex,exs}"]
|
||||
]
|
||||
|
||||
25
README.md
25
README.md
@@ -1,21 +1,18 @@
|
||||
# KafkaexLagExporter
|
||||
|
||||
**TODO: Add description**
|
||||
To start your Phoenix server:
|
||||
|
||||
## Installation
|
||||
* Install dependencies with `mix deps.get`
|
||||
* Start Phoenix endpoint with `mix phx.server` or inside IEx with `iex -S mix phx.server`
|
||||
|
||||
If [available in Hex](https://hex.pm/docs/publish), the package can be installed
|
||||
by adding `kafkaex_lag_exporter` to your list of dependencies in `mix.exs`:
|
||||
Now you can visit [`localhost:4000`](http://localhost:4000) from your browser.
|
||||
|
||||
```elixir
|
||||
def deps do
|
||||
[
|
||||
{:kafkaex_lag_exporter, "~> 0.1.0"}
|
||||
]
|
||||
end
|
||||
```
|
||||
Ready to run in production? Please [check our deployment guides](https://hexdocs.pm/phoenix/deployment.html).
|
||||
|
||||
Documentation can be generated with [ExDoc](https://github.com/elixir-lang/ex_doc)
|
||||
and published on [HexDocs](https://hexdocs.pm). Once published, the docs can
|
||||
be found at <https://hexdocs.pm/kafkaex_lag_exporter>.
|
||||
## Learn more
|
||||
|
||||
* Official website: https://www.phoenixframework.org/
|
||||
* Guides: https://hexdocs.pm/phoenix/overview.html
|
||||
* Docs: https://hexdocs.pm/phoenix
|
||||
* Forum: https://elixirforum.com/c/phoenix-forum
|
||||
* Source: https://github.com/phoenixframework/phoenix
|
||||
|
||||
@@ -1,5 +1,28 @@
|
||||
# This file is responsible for configuring your application
|
||||
# and its dependencies with the aid of the Config module.
|
||||
#
|
||||
# This configuration file is loaded before any dependency and
|
||||
# is restricted to this project.
|
||||
|
||||
# General application configuration
|
||||
import Config
|
||||
|
||||
# Configures the endpoint
|
||||
config :kafkaex_lag_exporter, KafkaexLagExporterWeb.Endpoint,
|
||||
url: [host: "localhost"],
|
||||
render_errors: [view: KafkaexLagExporterWeb.ErrorView, accepts: ~w(json), layout: false],
|
||||
pubsub_server: KafkaexLagExporter.PubSub,
|
||||
live_view: [signing_salt: "sSgJfjNf"]
|
||||
|
||||
# Configures Elixir's Logger
|
||||
config :logger, :console,
|
||||
format: "$time $metadata[$level] $message\n",
|
||||
level: :info,
|
||||
metadata: [:request_id]
|
||||
|
||||
# Use Jason for JSON parsing in Phoenix
|
||||
config :phoenix, :json_library, Jason
|
||||
|
||||
config :kafka_ex,
|
||||
# A list of brokers to connect to. This can be in either of the following formats
|
||||
#
|
||||
@@ -13,11 +36,11 @@ config :kafka_ex,
|
||||
# errors when producing messages, it may be necessary to modify "advertised.host.name" in the
|
||||
# server.properties file.
|
||||
# In the case below you would set "advertised.host.name=localhost"
|
||||
# brokers: [
|
||||
# {"kafka1", 9092},
|
||||
# {"kafka2", 9092},
|
||||
# {"kafka3", 9092}
|
||||
# ],
|
||||
# brokers: [
|
||||
# {"kafka1", 9092},
|
||||
# {"kafka2", 9092},
|
||||
# {"kafka3", 9092}
|
||||
# ],
|
||||
brokers: "localhost:9093,localhost:9094,localhost:9095",
|
||||
#
|
||||
# OR:
|
||||
@@ -62,21 +85,15 @@ config :kafka_ex,
|
||||
# see SSL OPTION DESCRIPTIONS - CLIENT SIDE at http://erlang.org/doc/man/ssl.html
|
||||
# for supported options
|
||||
ssl_options: [
|
||||
# cacertfile: File.cwd!() <> "/ssl/ca-cert",
|
||||
# certfile: File.cwd!() <> "/ssl/cert.pem",
|
||||
# keyfile: File.cwd!() <> "/ssl/key.pem"
|
||||
# cacertfile: File.cwd!() <> "/ssl/ca-cert",
|
||||
# certfile: File.cwd!() <> "/ssl/cert.pem",
|
||||
# keyfile: File.cwd!() <> "/ssl/key.pem"
|
||||
],
|
||||
# set this to the version of the kafka broker that you are using
|
||||
# include only major.minor.patch versions. must be at least 0.8.0
|
||||
# use "kayrock" for the new client
|
||||
kafka_version: "3.1.0"
|
||||
|
||||
config :logger,
|
||||
level: :info,
|
||||
truncate: 4096
|
||||
|
||||
env_config = Path.expand("#{Mix.env()}.exs", __DIR__)
|
||||
|
||||
if File.exists?(env_config) do
|
||||
import_config(env_config)
|
||||
end
|
||||
# Import environment specific config. This must remain at the bottom
|
||||
# of this file so it overrides the configuration defined above.
|
||||
import_config "#{config_env()}.exs"
|
||||
|
||||
51
config/dev.exs
Normal file
51
config/dev.exs
Normal file
@@ -0,0 +1,51 @@
|
||||
import Config
|
||||
|
||||
# For development, we disable any cache and enable
|
||||
# debugging and code reloading.
|
||||
#
|
||||
# The watchers configuration can be used to run external
|
||||
# watchers to your application. For example, we use it
|
||||
# with esbuild to bundle .js and .css sources.
|
||||
config :kafkaex_lag_exporter, KafkaexLagExporterWeb.Endpoint,
|
||||
# Binding to loopback ipv4 address prevents access from other machines.
|
||||
# Change to `ip: {0, 0, 0, 0}` to allow access from other machines.
|
||||
http: [ip: {127, 0, 0, 1}, port: 4000],
|
||||
check_origin: false,
|
||||
code_reloader: true,
|
||||
debug_errors: true,
|
||||
secret_key_base: "lttLR5uTgFy2WzfJLo+uXLWnogim+X/ZoJ9aqOWlJew3TsFm8dYXvsk1OpYUy2F8",
|
||||
watchers: []
|
||||
|
||||
# ## SSL Support
|
||||
#
|
||||
# In order to use HTTPS in development, a self-signed
|
||||
# certificate can be generated by running the following
|
||||
# Mix task:
|
||||
#
|
||||
# mix phx.gen.cert
|
||||
#
|
||||
# Note that this task requires Erlang/OTP 20 or later.
|
||||
# Run `mix help phx.gen.cert` for more information.
|
||||
#
|
||||
# The `http:` config above can be replaced with:
|
||||
#
|
||||
# https: [
|
||||
# port: 4001,
|
||||
# cipher_suite: :strong,
|
||||
# keyfile: "priv/cert/selfsigned_key.pem",
|
||||
# certfile: "priv/cert/selfsigned.pem"
|
||||
# ],
|
||||
#
|
||||
# If desired, both `http:` and `https:` keys can be
|
||||
# configured to run both http and https servers on
|
||||
# different ports.
|
||||
|
||||
# Do not include metadata nor timestamps in development logs
|
||||
config :logger, :console, format: "[$level] $message\n"
|
||||
|
||||
# Set a higher stacktrace during development. Avoid configuring such
|
||||
# in production as building large stacktraces may be expensive.
|
||||
config :phoenix, :stacktrace_depth, 20
|
||||
|
||||
# Initialize plugs at runtime for faster development compilation
|
||||
config :phoenix, :plug_init_mode, :runtime
|
||||
50
config/prod.exs
Normal file
50
config/prod.exs
Normal file
@@ -0,0 +1,50 @@
|
||||
import Config
|
||||
|
||||
# For production, don't forget to configure the url host
|
||||
# to something meaningful, Phoenix uses this information
|
||||
# when generating URLs.
|
||||
#
|
||||
# Note we also include the path to a cache manifest
|
||||
# containing the digested version of static files. This
|
||||
# manifest is generated by the `mix phx.digest` task,
|
||||
# which you should run after static files are built and
|
||||
# before starting your production server.
|
||||
config :kafkaex_lag_exporter, KafkaexLagExporterWeb.Endpoint,
|
||||
cache_static_manifest: "priv/static/cache_manifest.json"
|
||||
|
||||
# Do not print debug messages in production
|
||||
config :logger, level: :info
|
||||
|
||||
# ## SSL Support
|
||||
#
|
||||
# To get SSL working, you will need to add the `https` key
|
||||
# to the previous section and set your `:url` port to 443:
|
||||
#
|
||||
# config :kafkaex_lag_exporter, KafkaexLagExporterWeb.Endpoint,
|
||||
# ...,
|
||||
# url: [host: "example.com", port: 443],
|
||||
# https: [
|
||||
# ...,
|
||||
# port: 443,
|
||||
# cipher_suite: :strong,
|
||||
# keyfile: System.get_env("SOME_APP_SSL_KEY_PATH"),
|
||||
# certfile: System.get_env("SOME_APP_SSL_CERT_PATH")
|
||||
# ]
|
||||
#
|
||||
# The `cipher_suite` is set to `:strong` to support only the
|
||||
# latest and more secure SSL ciphers. This means old browsers
|
||||
# and clients may not be supported. You can set it to
|
||||
# `:compatible` for wider support.
|
||||
#
|
||||
# `:keyfile` and `:certfile` expect an absolute path to the key
|
||||
# and cert in disk or a relative path inside priv, for example
|
||||
# "priv/ssl/server.key". For all supported SSL configuration
|
||||
# options, see https://hexdocs.pm/plug/Plug.SSL.html#configure/1
|
||||
#
|
||||
# We also recommend setting `force_ssl` in your endpoint, ensuring
|
||||
# no data is ever sent via http, always redirecting to https:
|
||||
#
|
||||
# config :kafkaex_lag_exporter, KafkaexLagExporterWeb.Endpoint,
|
||||
# force_ssl: [hsts: true]
|
||||
#
|
||||
# Check `Plug.SSL` for all available options in `force_ssl`.
|
||||
52
config/runtime.exs
Normal file
52
config/runtime.exs
Normal file
@@ -0,0 +1,52 @@
|
||||
import Config
|
||||
|
||||
# config/runtime.exs is executed for all environments, including
|
||||
# during releases. It is executed after compilation and before the
|
||||
# system starts, so it is typically used to load production configuration
|
||||
# and secrets from environment variables or elsewhere. Do not define
|
||||
# any compile-time configuration in here, as it won't be applied.
|
||||
# The block below contains prod specific runtime configuration.
|
||||
|
||||
# Start the phoenix server if environment is set and running in a release
|
||||
if System.get_env("PHX_SERVER") && System.get_env("RELEASE_NAME") do
|
||||
config :kafkaex_lag_exporter, KafkaexLagExporterWeb.Endpoint, server: true
|
||||
end
|
||||
|
||||
if config_env() == :prod do
|
||||
# The secret key base is used to sign/encrypt cookies and other secrets.
|
||||
# A default value is used in config/dev.exs and config/test.exs but you
|
||||
# want to use a different value for prod and you most likely don't want
|
||||
# to check this value into version control, so we use an environment
|
||||
# variable instead.
|
||||
secret_key_base =
|
||||
System.get_env("SECRET_KEY_BASE") ||
|
||||
raise """
|
||||
environment variable SECRET_KEY_BASE is missing.
|
||||
You can generate one by calling: mix phx.gen.secret
|
||||
"""
|
||||
|
||||
host = System.get_env("PHX_HOST") || "example.com"
|
||||
port = String.to_integer(System.get_env("PORT") || "4000")
|
||||
|
||||
config :kafkaex_lag_exporter, KafkaexLagExporterWeb.Endpoint,
|
||||
url: [host: host, port: 443],
|
||||
http: [
|
||||
# Enable IPv6 and bind on all interfaces.
|
||||
# Set it to {0, 0, 0, 0, 0, 0, 0, 1} for local network only access.
|
||||
# See the documentation on https://hexdocs.pm/plug_cowboy/Plug.Cowboy.html
|
||||
# for details about using IPv6 vs IPv4 and loopback vs public addresses.
|
||||
ip: {0, 0, 0, 0, 0, 0, 0, 0},
|
||||
port: port
|
||||
],
|
||||
secret_key_base: secret_key_base
|
||||
|
||||
# ## Using releases
|
||||
#
|
||||
# If you are doing OTP releases, you need to instruct Phoenix
|
||||
# to start each relevant endpoint:
|
||||
#
|
||||
# config :kafkaex_lag_exporter, KafkaexLagExporterWeb.Endpoint, server: true
|
||||
#
|
||||
# Then you can assemble a release by calling `mix release`.
|
||||
# See `mix help release` for more information.
|
||||
end
|
||||
14
config/test.exs
Normal file
14
config/test.exs
Normal file
@@ -0,0 +1,14 @@
|
||||
import Config
|
||||
|
||||
# We don't run a server during test. If one is required,
|
||||
# you can enable the server option below.
|
||||
config :kafkaex_lag_exporter, KafkaexLagExporterWeb.Endpoint,
|
||||
http: [ip: {127, 0, 0, 1}, port: 4002],
|
||||
secret_key_base: "+3V7k0WsFksjqGwm5O54NJQX4Sz9LLr8CSJp+4X6UOXBX6IUwzMOqrRQOsziQ6mv",
|
||||
server: false
|
||||
|
||||
# Print only warnings and errors during test
|
||||
config :logger, level: :warn
|
||||
|
||||
# Initialize plugs at runtime for faster test compilation
|
||||
config :phoenix, :plug_init_mode, :runtime
|
||||
@@ -25,6 +25,59 @@
|
||||
<excludeFolder url="file://$MODULE_DIR$/_build/dev/lib/credo" />
|
||||
<excludeFolder url="file://$MODULE_DIR$/_build/dev/lib/erlex" />
|
||||
<excludeFolder url="file://$MODULE_DIR$/_build/dev/lib/dialyxir" />
|
||||
<excludeFolder url="file://$MODULE_DIR$/_build/dev/lib/hpax" />
|
||||
<excludeFolder url="file://$MODULE_DIR$/_build/dev/lib/mint" />
|
||||
<excludeFolder url="file://$MODULE_DIR$/_build/dev/lib/telemetry_metrics" />
|
||||
<excludeFolder url="file://$MODULE_DIR$/_build/dev/lib/castore" />
|
||||
<excludeFolder url="file://$MODULE_DIR$/_build/dev/lib/nimble_options" />
|
||||
<excludeFolder url="file://$MODULE_DIR$/_build/dev/lib/telemetry_poller" />
|
||||
<excludeFolder url="file://$MODULE_DIR$/_build/dev/lib/telemetry_metrics_prometheus_core" />
|
||||
<excludeFolder url="file://$MODULE_DIR$/_build/dev/lib/ranch" />
|
||||
<excludeFolder url="file://$MODULE_DIR$/_build/dev/lib/nimble_pool" />
|
||||
<excludeFolder url="file://$MODULE_DIR$/_build/dev/lib/telemetry" />
|
||||
<excludeFolder url="file://$MODULE_DIR$/_build/dev/lib/cowlib" />
|
||||
<excludeFolder url="file://$MODULE_DIR$/_build/dev/lib/finch" />
|
||||
<excludeFolder url="file://$MODULE_DIR$/_build/dev/lib/mime" />
|
||||
<excludeFolder url="file://$MODULE_DIR$/_build/dev/lib/plug_crypto" />
|
||||
<excludeFolder url="file://$MODULE_DIR$/_build/dev/lib/cowboy" />
|
||||
<excludeFolder url="file://$MODULE_DIR$/_build/dev/lib/prom_ex" />
|
||||
<excludeFolder url="file://$MODULE_DIR$/_build/dev/lib/cowboy_telemetry" />
|
||||
<excludeFolder url="file://$MODULE_DIR$/_build/dev/lib/plug_cowboy" />
|
||||
<excludeFolder url="file://$MODULE_DIR$/_build/dev/lib/plug" />
|
||||
<excludeFolder url="file://$MODULE_DIR$/_build/dev/lib/phoenix_view" />
|
||||
<excludeFolder url="file://$MODULE_DIR$/_build/dev/lib/phoenix_pubsub" />
|
||||
<excludeFolder url="file://$MODULE_DIR$/_build/dev/lib/phoenix" />
|
||||
<excludeFolder url="file://$MODULE_DIR$/_build/test/lib/bunt" />
|
||||
<excludeFolder url="file://$MODULE_DIR$/_build/test/lib/hpax" />
|
||||
<excludeFolder url="file://$MODULE_DIR$/_build/test/lib/mint" />
|
||||
<excludeFolder url="file://$MODULE_DIR$/_build/test/lib/finch" />
|
||||
<excludeFolder url="file://$MODULE_DIR$/_build/test/lib/jason" />
|
||||
<excludeFolder url="file://$MODULE_DIR$/_build/test/lib/ranch" />
|
||||
<excludeFolder url="file://$MODULE_DIR$/_build/test/lib/varint" />
|
||||
<excludeFolder url="file://$MODULE_DIR$/_build/test/lib/castore" />
|
||||
<excludeFolder url="file://$MODULE_DIR$/_build/test/lib/kayrock" />
|
||||
<excludeFolder url="file://$MODULE_DIR$/_build/test/lib/crc32cer" />
|
||||
<excludeFolder url="file://$MODULE_DIR$/_build/test/lib/telemetry" />
|
||||
<excludeFolder url="file://$MODULE_DIR$/_build/test/lib/connection" />
|
||||
<excludeFolder url="file://$MODULE_DIR$/_build/test/lib/file_system" />
|
||||
<excludeFolder url="file://$MODULE_DIR$/_build/test/lib/nimble_pool" />
|
||||
<excludeFolder url="file://$MODULE_DIR$/_build/test/lib/phoenix_view" />
|
||||
<excludeFolder url="file://$MODULE_DIR$/_build/test/lib/nimble_options" />
|
||||
<excludeFolder url="file://$MODULE_DIR$/_build/test/lib/telemetry_poller" />
|
||||
<excludeFolder url="file://$MODULE_DIR$/_build/test/lib/telemetry_metrics" />
|
||||
<excludeFolder url="file://$MODULE_DIR$/_build/test/lib/telemetry_metrics_prometheus_core" />
|
||||
<excludeFolder url="file://$MODULE_DIR$/_build/test/lib/credo" />
|
||||
<excludeFolder url="file://$MODULE_DIR$/_build/test/lib/phoenix_pubsub" />
|
||||
<excludeFolder url="file://$MODULE_DIR$/_build/test/lib/kafka_ex" />
|
||||
<excludeFolder url="file://$MODULE_DIR$/_build/test/lib/cowlib" />
|
||||
<excludeFolder url="file://$MODULE_DIR$/_build/test/lib/mime" />
|
||||
<excludeFolder url="file://$MODULE_DIR$/_build/test/lib/plug_crypto" />
|
||||
<excludeFolder url="file://$MODULE_DIR$/_build/test/lib/cowboy" />
|
||||
<excludeFolder url="file://$MODULE_DIR$/_build/test/lib/cowboy_telemetry" />
|
||||
<excludeFolder url="file://$MODULE_DIR$/_build/test/lib/plug" />
|
||||
<excludeFolder url="file://$MODULE_DIR$/_build/test/lib/plug_cowboy" />
|
||||
<excludeFolder url="file://$MODULE_DIR$/_build/test/lib/phoenix" />
|
||||
<excludeFolder url="file://$MODULE_DIR$/_build/test/lib/prom_ex" />
|
||||
</content>
|
||||
<orderEntry type="sourceFolder" forTests="false" />
|
||||
<orderEntry type="library" name="kafka_ex" level="project" />
|
||||
@@ -37,5 +90,44 @@
|
||||
<orderEntry type="library" name="inch_ex" level="project" />
|
||||
<orderEntry type="library" name="dialyxir" level="project" />
|
||||
<orderEntry type="library" name="erlex" level="project" />
|
||||
<orderEntry type="library" name="prom_ex" level="project" />
|
||||
<orderEntry type="library" name="finch" level="project" />
|
||||
<orderEntry type="library" name="telemetry" level="project" />
|
||||
<orderEntry type="library" name="telemetry_poller" level="project" />
|
||||
<orderEntry type="library" name="telemetry_metrics" level="project" />
|
||||
<orderEntry type="library" name="telemetry_metrics_prometheus_core" level="project" />
|
||||
<orderEntry type="library" name="plug_cowboy" level="project" />
|
||||
<orderEntry type="library" name="phoenix" level="project" />
|
||||
<orderEntry type="library" name="phoenix_live_view" level="project" />
|
||||
<orderEntry type="library" name="plug" level="project" />
|
||||
<orderEntry type="library" name="ecto" level="project" />
|
||||
<orderEntry type="library" name="oban" level="project" />
|
||||
<orderEntry type="library" name="absinthe" level="project" />
|
||||
<orderEntry type="library" name="broadway" level="project" />
|
||||
<orderEntry type="library" name="bypass" level="project" />
|
||||
<orderEntry type="library" name="doctor" level="project" />
|
||||
<orderEntry type="library" name="git_hooks" level="project" />
|
||||
<orderEntry type="library" name="mint" level="project" />
|
||||
<orderEntry type="library" name="castore" level="project" />
|
||||
<orderEntry type="library" name="nimble_pool" level="project" />
|
||||
<orderEntry type="library" name="nimble_options" level="project" />
|
||||
<orderEntry type="library" name="cowboy" level="project" />
|
||||
<orderEntry type="library" name="x509" level="project" />
|
||||
<orderEntry type="library" name="cowboy_telemetry" level="project" />
|
||||
<orderEntry type="library" name="hackney" level="project" />
|
||||
<orderEntry type="library" name="kadabra" level="project" />
|
||||
<orderEntry type="library" name="mime" level="project" />
|
||||
<orderEntry type="library" name="plug_crypto" level="project" />
|
||||
<orderEntry type="library" name="hpax" level="project" />
|
||||
<orderEntry type="library" name="stream_data" level="project" />
|
||||
<orderEntry type="library" name="hpack" level="project" />
|
||||
<orderEntry type="library" name="phoenix_pubsub" level="project" />
|
||||
<orderEntry type="library" name="phoenix_view" level="project" />
|
||||
<orderEntry type="library" name="ecto_sql" level="project" />
|
||||
<orderEntry type="library" name="gettext" level="project" />
|
||||
<orderEntry type="library" name="phoenix_html" level="project" />
|
||||
<orderEntry type="library" name="phx_new" level="project" />
|
||||
<orderEntry type="library" name="websocket_client" level="project" />
|
||||
<orderEntry type="library" name="esbuild" level="project" />
|
||||
</component>
|
||||
</module>
|
||||
@@ -1,32 +1,13 @@
|
||||
defmodule KafkaexLagExporter do
|
||||
|
||||
@moduledoc """
|
||||
Supervisor to start the '__consumer__offsets' watcher child
|
||||
KafkaexLagExporter keeps the contexts that define your domain
|
||||
and business logic.
|
||||
|
||||
Contexts are also responsible for managing your data, regardless
|
||||
if it comes from the database, an external API or others.
|
||||
"""
|
||||
|
||||
use Application
|
||||
|
||||
def start(_type, _args) do
|
||||
import Supervisor.Spec
|
||||
|
||||
consumer_group_opts = [
|
||||
# setting for the ConsumerGroup
|
||||
heartbeat_interval: 1_000,
|
||||
# this setting will be forwarded to the GenConsumer
|
||||
commit_interval: 1_000
|
||||
]
|
||||
|
||||
gen_consumer_impl = ConsumerOffsetsGenConsumer
|
||||
consumer_group_name = "offsets_group"
|
||||
topic_names = ["__consumer_offsets"]
|
||||
|
||||
children = [
|
||||
supervisor(
|
||||
KafkaEx.ConsumerGroup,
|
||||
[gen_consumer_impl, consumer_group_name, topic_names, consumer_group_opts]
|
||||
)
|
||||
]
|
||||
|
||||
Supervisor.start_link(children, strategy: :one_for_one)
|
||||
def hello() do
|
||||
:world
|
||||
end
|
||||
end
|
||||
|
||||
52
lib/kafkaex_lag_exporter/application.ex
Normal file
52
lib/kafkaex_lag_exporter/application.ex
Normal file
@@ -0,0 +1,52 @@
|
||||
defmodule KafkaexLagExporter.Application do
|
||||
# See https://hexdocs.pm/elixir/Application.html
|
||||
# for more information on OTP Applications
|
||||
@moduledoc false
|
||||
|
||||
use Application
|
||||
|
||||
@impl true
|
||||
def start(_type, _args) do
|
||||
consumer_group_opts = [
|
||||
# setting for the ConsumerGroup
|
||||
heartbeat_interval: 1_000,
|
||||
# this setting will be forwarded to the GenConsumer
|
||||
commit_interval: 1_000
|
||||
]
|
||||
|
||||
gen_consumer_impl = KafkaexLagExporter.ConsumerOffsetsGenConsumer
|
||||
consumer_group_name = "offsets_group"
|
||||
topic_names = ["__consumer_offsets"]
|
||||
|
||||
children = [
|
||||
KafkaexLagExporter.PromEx,
|
||||
# Start the Telemetry supervisor
|
||||
KafkaexLagExporterWeb.Telemetry,
|
||||
# Start the PubSub system
|
||||
{Phoenix.PubSub, name: KafkaexLagExporter.PubSub},
|
||||
# Start the Endpoint (http/https)
|
||||
KafkaexLagExporterWeb.Endpoint,
|
||||
# Start a worker by calling: KafkaexLagExporter.Worker.start_link(arg)
|
||||
# {KafkaexLagExporter.Worker, arg}
|
||||
%{
|
||||
id: KafkaEx.ConsumerGroup,
|
||||
start:
|
||||
{KafkaEx.ConsumerGroup, :start_link,
|
||||
[gen_consumer_impl, consumer_group_name, topic_names, consumer_group_opts]}
|
||||
}
|
||||
]
|
||||
|
||||
# See https://hexdocs.pm/elixir/Supervisor.html
|
||||
# for other strategies and supported options
|
||||
opts = [strategy: :one_for_one, name: KafkaexLagExporter.Supervisor]
|
||||
Supervisor.start_link(children, opts)
|
||||
end
|
||||
|
||||
# Tell Phoenix to update the endpoint configuration
|
||||
# whenever the application is updated.
|
||||
@impl true
|
||||
def config_change(changed, _new, removed) do
|
||||
KafkaexLagExporterWeb.Endpoint.config_change(changed, removed)
|
||||
:ok
|
||||
end
|
||||
end
|
||||
@@ -1,5 +1,4 @@
|
||||
defmodule ConsumerOffsetsGenConsumer do
|
||||
|
||||
defmodule KafkaexLagExporter.ConsumerOffsetsGenConsumer do
|
||||
@moduledoc """
|
||||
Genserver implementation to consume new messages on topic '__consumer_offsets'
|
||||
"""
|
||||
@@ -10,11 +9,34 @@ defmodule ConsumerOffsetsGenConsumer do
|
||||
|
||||
require Logger
|
||||
|
||||
def init(_topic, _partition, _extra_args) do
|
||||
{:ok, %{}}
|
||||
end
|
||||
|
||||
def get() do
|
||||
GenServer.cast(__MODULE__, {:get})
|
||||
end
|
||||
|
||||
def handle_call({:get}, _from, state) do
|
||||
{:reply, state}
|
||||
end
|
||||
|
||||
def handle_call({:push, topic, offset}, _from, state) do
|
||||
new_state = Map.put(state, topic, offset)
|
||||
|
||||
# IO.puts "new state"
|
||||
# IO.inspect new_state
|
||||
|
||||
{:reply, new_state}
|
||||
end
|
||||
|
||||
def handle_message_set(message_set, state) do
|
||||
for %Message{key: key, offset: offset} <- message_set do
|
||||
consumer_group = get_consumer_group(key)
|
||||
|
||||
Logger.info("consumer_group '#{consumer_group}' has offset '#{offset}'}")
|
||||
|
||||
# GenServer.call(__MODULE__, {:push, consumer_group, offset})
|
||||
end
|
||||
|
||||
{:async_commit, state}
|
||||
@@ -27,5 +49,4 @@ defmodule ConsumerOffsetsGenConsumer do
|
||||
|
||||
consumer_group
|
||||
end
|
||||
|
||||
end
|
||||
76
lib/kafkaex_lag_exporter/prom_ex.ex
Normal file
76
lib/kafkaex_lag_exporter/prom_ex.ex
Normal file
@@ -0,0 +1,76 @@
|
||||
defmodule KafkaexLagExporter.PromEx do
|
||||
@moduledoc """
|
||||
Be sure to add the following to finish setting up PromEx:
|
||||
|
||||
1. Update your configuration (config.exs, dev.exs, prod.exs, releases.exs, etc) to
|
||||
configure the necessary bit of PromEx. Be sure to check out `PromEx.Config` for
|
||||
more details regarding configuring PromEx:
|
||||
```
|
||||
config :kafkaex_lag_exporter, KafkaexLagExporter.PromEx,
|
||||
disabled: false,
|
||||
manual_metrics_start_delay: :no_delay,
|
||||
drop_metrics_groups: [],
|
||||
grafana: :disabled,
|
||||
metrics_server: :disabled
|
||||
```
|
||||
|
||||
2. Add this module to your application supervision tree. It should be one of the first
|
||||
things that is started so that no Telemetry events are missed. For example, if PromEx
|
||||
is started after your Repo module, you will miss Ecto's init events and the dashboards
|
||||
will be missing some data points:
|
||||
```
|
||||
def start(_type, _args) do
|
||||
children = [
|
||||
KafkaexLagExporter.PromEx,
|
||||
|
||||
...
|
||||
]
|
||||
|
||||
...
|
||||
end
|
||||
```
|
||||
|
||||
3. Update your `endpoint.ex` file to expose your metrics (or configure a standalone
|
||||
server using the `:metrics_server` config options). Be sure to put this plug before
|
||||
your `Plug.Telemetry` entry so that you can avoid having calls to your `/metrics`
|
||||
endpoint create their own metrics and logs which can pollute your logs/metrics given
|
||||
that Prometheus will scrape at a regular interval and that can get noisy:
|
||||
```
|
||||
defmodule KafkaexLagExporterWeb.Endpoint do
|
||||
use Phoenix.Endpoint, otp_app: :kafkaex_lag_exporter
|
||||
|
||||
...
|
||||
|
||||
plug PromEx.Plug, prom_ex_module: KafkaexLagExporter.PromEx
|
||||
|
||||
...
|
||||
end
|
||||
```
|
||||
|
||||
4. Update the list of plugins in the `plugins/0` function return list to reflect your
|
||||
application's dependencies. Also update the list of dashboards that are to be uploaded
|
||||
to Grafana in the `dashboards/0` function.
|
||||
"""
|
||||
|
||||
use PromEx, otp_app: :kafkaex_lag_exporter
|
||||
|
||||
alias PromEx.Plugins
|
||||
|
||||
@impl true
|
||||
def plugins do
|
||||
[
|
||||
# PromEx built in plugins
|
||||
Plugins.Application,
|
||||
Plugins.Beam
|
||||
# {Plugins.Phoenix, router: KafkaexLagExporterWeb.Router, endpoint: KafkaexLagExporterWeb.Endpoint},
|
||||
# Plugins.Ecto,
|
||||
# Plugins.Oban,
|
||||
# Plugins.PhoenixLiveView,
|
||||
# Plugins.Absinthe,
|
||||
# Plugins.Broadway,
|
||||
|
||||
# Add your own PromEx metrics plugins
|
||||
# KafkaexLagExporter.Users.PromExPlugin
|
||||
]
|
||||
end
|
||||
end
|
||||
75
lib/kafkaex_lag_exporter_web.ex
Normal file
75
lib/kafkaex_lag_exporter_web.ex
Normal file
@@ -0,0 +1,75 @@
|
||||
defmodule KafkaexLagExporterWeb do
|
||||
@moduledoc """
|
||||
The entrypoint for defining your web interface, such
|
||||
as controllers, views, channels and so on.
|
||||
|
||||
This can be used in your application as:
|
||||
|
||||
use KafkaexLagExporterWeb, :controller
|
||||
use KafkaexLagExporterWeb, :view
|
||||
|
||||
The definitions below will be executed for every view,
|
||||
controller, etc, so keep them short and clean, focused
|
||||
on imports, uses and aliases.
|
||||
|
||||
Do NOT define functions inside the quoted expressions
|
||||
below. Instead, define any helper function in modules
|
||||
and import those modules here.
|
||||
"""
|
||||
|
||||
def controller do
|
||||
quote do
|
||||
use Phoenix.Controller, namespace: KafkaexLagExporterWeb
|
||||
|
||||
import Plug.Conn
|
||||
alias KafkaexLagExporterWeb.Router.Helpers, as: Routes
|
||||
end
|
||||
end
|
||||
|
||||
def view do
|
||||
quote do
|
||||
use Phoenix.View,
|
||||
root: "lib/kafkaex_lag_exporter_web/templates",
|
||||
namespace: KafkaexLagExporterWeb
|
||||
|
||||
# Import convenience functions from controllers
|
||||
import Phoenix.Controller,
|
||||
only: [get_flash: 1, get_flash: 2, view_module: 1, view_template: 1]
|
||||
|
||||
# Include shared imports and aliases for views
|
||||
unquote(view_helpers())
|
||||
end
|
||||
end
|
||||
|
||||
def router do
|
||||
quote do
|
||||
use Phoenix.Router
|
||||
|
||||
import Plug.Conn
|
||||
import Phoenix.Controller
|
||||
end
|
||||
end
|
||||
|
||||
def channel do
|
||||
quote do
|
||||
use Phoenix.Channel
|
||||
end
|
||||
end
|
||||
|
||||
defp view_helpers do
|
||||
quote do
|
||||
# Import basic rendering functionality (render, render_layout, etc)
|
||||
import Phoenix.View
|
||||
|
||||
import KafkaexLagExporterWeb.ErrorHelpers
|
||||
alias KafkaexLagExporterWeb.Router.Helpers, as: Routes
|
||||
end
|
||||
end
|
||||
|
||||
@doc """
|
||||
When used, dispatch to the appropriate controller/view/etc.
|
||||
"""
|
||||
defmacro __using__(which) when is_atom(which) do
|
||||
apply(__MODULE__, which, [])
|
||||
end
|
||||
end
|
||||
45
lib/kafkaex_lag_exporter_web/endpoint.ex
Normal file
45
lib/kafkaex_lag_exporter_web/endpoint.ex
Normal file
@@ -0,0 +1,45 @@
|
||||
defmodule KafkaexLagExporterWeb.Endpoint do
|
||||
use Phoenix.Endpoint, otp_app: :kafkaex_lag_exporter
|
||||
|
||||
plug PromEx.Plug, path: "/", prom_ex_module: KafkaexLagExporter.PromEx
|
||||
|
||||
# The session will be stored in the cookie and signed,
|
||||
# this means its contents can be read but not tampered with.
|
||||
# Set :encryption_salt if you would also like to encrypt it.
|
||||
@session_options [
|
||||
store: :cookie,
|
||||
key: "_kafkaex_lag_exporter_key",
|
||||
signing_salt: "f/R6/xEO"
|
||||
]
|
||||
|
||||
# socket "/live", Phoenix.LiveView.Socket, websocket: [connect_info: [session: @session_options]]
|
||||
|
||||
# Serve at "/" the static files from "priv/static" directory.
|
||||
#
|
||||
# You should set gzip to true if you are running phx.digest
|
||||
# when deploying your static files in production.
|
||||
plug Plug.Static,
|
||||
at: "/",
|
||||
from: :kafkaex_lag_exporter,
|
||||
gzip: false,
|
||||
only: ~w(assets fonts images favicon.ico robots.txt)
|
||||
|
||||
# Code reloading can be explicitly enabled under the
|
||||
# :code_reloader configuration of your endpoint.
|
||||
if code_reloading? do
|
||||
plug Phoenix.CodeReloader
|
||||
end
|
||||
|
||||
plug Plug.RequestId
|
||||
plug Plug.Telemetry, event_prefix: [:phoenix, :endpoint]
|
||||
|
||||
plug Plug.Parsers,
|
||||
parsers: [:urlencoded, :multipart, :json],
|
||||
pass: ["*/*"],
|
||||
json_decoder: Phoenix.json_library()
|
||||
|
||||
plug Plug.MethodOverride
|
||||
plug Plug.Head
|
||||
plug Plug.Session, @session_options
|
||||
plug KafkaexLagExporterWeb.Router
|
||||
end
|
||||
11
lib/kafkaex_lag_exporter_web/router.ex
Normal file
11
lib/kafkaex_lag_exporter_web/router.ex
Normal file
@@ -0,0 +1,11 @@
|
||||
defmodule KafkaexLagExporterWeb.Router do
|
||||
use KafkaexLagExporterWeb, :router
|
||||
|
||||
pipeline :api do
|
||||
plug :accepts, ["json"]
|
||||
end
|
||||
|
||||
scope "/api", KafkaexLagExporterWeb do
|
||||
pipe_through :api
|
||||
end
|
||||
end
|
||||
50
lib/kafkaex_lag_exporter_web/telemetry.ex
Normal file
50
lib/kafkaex_lag_exporter_web/telemetry.ex
Normal file
@@ -0,0 +1,50 @@
|
||||
defmodule KafkaexLagExporterWeb.Telemetry do
|
||||
@moduledoc false
|
||||
|
||||
use Supervisor
|
||||
import Telemetry.Metrics
|
||||
|
||||
def start_link(arg) do
|
||||
Supervisor.start_link(__MODULE__, arg, name: __MODULE__)
|
||||
end
|
||||
|
||||
@impl true
|
||||
def init(_arg) do
|
||||
children = [
|
||||
# Telemetry poller will execute the given period measurements
|
||||
# every 10_000ms. Learn more here: https://hexdocs.pm/telemetry_metrics
|
||||
{:telemetry_poller, measurements: periodic_measurements(), period: 10_000}
|
||||
# Add reporters as children of your supervision tree.
|
||||
# {Telemetry.Metrics.ConsoleReporter, metrics: metrics()}
|
||||
]
|
||||
|
||||
Supervisor.init(children, strategy: :one_for_one)
|
||||
end
|
||||
|
||||
def metrics do
|
||||
[
|
||||
# Phoenix Metrics
|
||||
summary("phoenix.endpoint.stop.duration",
|
||||
unit: {:native, :millisecond}
|
||||
),
|
||||
summary("phoenix.router_dispatch.stop.duration",
|
||||
tags: [:route],
|
||||
unit: {:native, :millisecond}
|
||||
),
|
||||
|
||||
# VM Metrics
|
||||
summary("vm.memory.total", unit: {:byte, :kilobyte}),
|
||||
summary("vm.total_run_queue_lengths.total"),
|
||||
summary("vm.total_run_queue_lengths.cpu"),
|
||||
summary("vm.total_run_queue_lengths.io")
|
||||
]
|
||||
end
|
||||
|
||||
defp periodic_measurements do
|
||||
[
|
||||
# A module, function and arguments to be invoked periodically.
|
||||
# This function must call :telemetry.execute/3 and a metric must be added above.
|
||||
# {KafkaexLagExporterWeb, :count_users, []}
|
||||
]
|
||||
end
|
||||
end
|
||||
16
lib/kafkaex_lag_exporter_web/views/error_helpers.ex
Normal file
16
lib/kafkaex_lag_exporter_web/views/error_helpers.ex
Normal file
@@ -0,0 +1,16 @@
|
||||
defmodule KafkaexLagExporterWeb.ErrorHelpers do
|
||||
@moduledoc """
|
||||
Conveniences for translating and building error messages.
|
||||
"""
|
||||
|
||||
@doc """
|
||||
Translates an error message.
|
||||
"""
|
||||
def translate_error({msg, opts}) do
|
||||
# Because the error messages we show in our forms and APIs
|
||||
# are defined inside Ecto, we need to translate them dynamically.
|
||||
Enum.reduce(opts, msg, fn {key, value}, acc ->
|
||||
String.replace(acc, "%{#{key}}", fn _ -> to_string(value) end)
|
||||
end)
|
||||
end
|
||||
end
|
||||
16
lib/kafkaex_lag_exporter_web/views/error_view.ex
Normal file
16
lib/kafkaex_lag_exporter_web/views/error_view.ex
Normal file
@@ -0,0 +1,16 @@
|
||||
defmodule KafkaexLagExporterWeb.ErrorView do
|
||||
use KafkaexLagExporterWeb, :view
|
||||
|
||||
# If you want to customize a particular status code
|
||||
# for a certain format, you may uncomment below.
|
||||
# def render("500.json", _assigns) do
|
||||
# %{errors: %{detail: "Internal Server Error"}}
|
||||
# end
|
||||
|
||||
# By default, Phoenix returns the status message from
|
||||
# the template name. For example, "404.json" becomes
|
||||
# "Not Found".
|
||||
def template_not_found(template, _assigns) do
|
||||
%{errors: %{detail: Phoenix.Controller.status_message_from_template(template)}}
|
||||
end
|
||||
end
|
||||
24
mix.exs
24
mix.exs
@@ -6,25 +6,41 @@ defmodule KafkaexLagExporter.MixProject do
|
||||
app: :kafkaex_lag_exporter,
|
||||
version: "0.1.0",
|
||||
elixir: "~> 1.13",
|
||||
elixirc_paths: elixirc_paths(Mix.env()),
|
||||
compilers: Mix.compilers(),
|
||||
start_permanent: Mix.env() == :prod,
|
||||
deps: deps()
|
||||
]
|
||||
end
|
||||
|
||||
# Run "mix help compile.app" to learn about applications.
|
||||
# Configuration for the OTP application.
|
||||
#
|
||||
# Type `mix help compile.app` for more information.
|
||||
def application do
|
||||
[
|
||||
extra_applications: [:logger],
|
||||
mod: { KafkaexLagExporter, [] },
|
||||
mod: {KafkaexLagExporter.Application, []},
|
||||
extra_applications: [:logger, :runtime_tools]
|
||||
]
|
||||
end
|
||||
|
||||
# Run "mix help deps" to learn about dependencies.
|
||||
# Specifies which paths to compile per environment.
|
||||
defp elixirc_paths(:test), do: ["lib", "test/support"]
|
||||
defp elixirc_paths(_), do: ["lib"]
|
||||
|
||||
# Specifies your project dependencies.
|
||||
#
|
||||
# Type `mix help deps` for examples and options.
|
||||
defp deps do
|
||||
[
|
||||
{:phoenix, "~> 1.6.6"},
|
||||
{:telemetry_metrics, "~> 0.6"},
|
||||
{:telemetry_poller, "~> 1.0"},
|
||||
{:jason, "~> 1.2"},
|
||||
{:plug_cowboy, "~> 2.5"},
|
||||
{:credo, "~> 1.6", only: [:dev, :test], runtime: false},
|
||||
{:dialyxir, "~> 1.0", only: [:dev], runtime: false},
|
||||
{:kafka_ex, "~> 0.12.1"},
|
||||
{:prom_ex, "~> 1.6.0"}
|
||||
]
|
||||
end
|
||||
end
|
||||
|
||||
22
mix.lock
22
mix.lock
@@ -1,13 +1,35 @@
|
||||
%{
|
||||
"bunt": {:hex, :bunt, "0.2.0", "951c6e801e8b1d2cbe58ebbd3e616a869061ddadcc4863d0a2182541acae9a38", [:mix], [], "hexpm", "7af5c7e09fe1d40f76c8e4f9dd2be7cebd83909f31fee7cd0e9eadc567da8353"},
|
||||
"castore": {:hex, :castore, "0.1.15", "dbb300827d5a3ec48f396ca0b77ad47058578927e9ebe792abd99fcbc3324326", [:mix], [], "hexpm", "c69379b907673c7e6eb229f09a0a09b60bb27cfb9625bcb82ea4c04ba82a8442"},
|
||||
"connection": {:hex, :connection, "1.1.0", "ff2a49c4b75b6fb3e674bfc5536451607270aac754ffd1bdfe175abe4a6d7a68", [:mix], [], "hexpm", "722c1eb0a418fbe91ba7bd59a47e28008a189d47e37e0e7bb85585a016b2869c"},
|
||||
"cowboy": {:hex, :cowboy, "2.9.0", "865dd8b6607e14cf03282e10e934023a1bd8be6f6bacf921a7e2a96d800cd452", [:make, :rebar3], [{:cowlib, "2.11.0", [hex: :cowlib, repo: "hexpm", optional: false]}, {:ranch, "1.8.0", [hex: :ranch, repo: "hexpm", optional: false]}], "hexpm", "2c729f934b4e1aa149aff882f57c6372c15399a20d54f65c8d67bef583021bde"},
|
||||
"cowboy_telemetry": {:hex, :cowboy_telemetry, "0.4.0", "f239f68b588efa7707abce16a84d0d2acf3a0f50571f8bb7f56a15865aae820c", [:rebar3], [{:cowboy, "~> 2.7", [hex: :cowboy, repo: "hexpm", optional: false]}, {:telemetry, "~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "7d98bac1ee4565d31b62d59f8823dfd8356a169e7fcbb83831b8a5397404c9de"},
|
||||
"cowlib": {:hex, :cowlib, "2.11.0", "0b9ff9c346629256c42ebe1eeb769a83c6cb771a6ee5960bd110ab0b9b872063", [:make, :rebar3], [], "hexpm", "2b3e9da0b21c4565751a6d4901c20d1b4cc25cbb7fd50d91d2ab6dd287bc86a9"},
|
||||
"crc32cer": {:hex, :crc32cer, "0.1.10", "fb87abbf34b72f180f8c3a908cd1826c6cb9a59787d156a29e05de9e98be385e", [:rebar3], [], "hexpm", "5b1f47efd0a1b4b7411f1f35e14d3c8c6da6e6a2a725ec8f2cf1ab13703e5f38"},
|
||||
"credo": {:hex, :credo, "1.6.3", "0a9f8925dbc8f940031b789f4623fc9a0eea99d3eed600fe831e403eb96c6a83", [:mix], [{:bunt, "~> 0.2.0", [hex: :bunt, repo: "hexpm", optional: false]}, {:file_system, "~> 0.2.8", [hex: :file_system, repo: "hexpm", optional: false]}, {:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: false]}], "hexpm", "1167cde00e6661d740fc54da2ee268e35d3982f027399b64d3e2e83af57a1180"},
|
||||
"dialyxir": {:hex, :dialyxir, "1.1.0", "c5aab0d6e71e5522e77beff7ba9e08f8e02bad90dfbeffae60eaf0cb47e29488", [:mix], [{:erlex, ">= 0.2.6", [hex: :erlex, repo: "hexpm", optional: false]}], "hexpm", "07ea8e49c45f15264ebe6d5b93799d4dd56a44036cf42d0ad9c960bc266c0b9a"},
|
||||
"erlex": {:hex, :erlex, "0.2.6", "c7987d15e899c7a2f34f5420d2a2ea0d659682c06ac607572df55a43753aa12e", [:mix], [], "hexpm", "2ed2e25711feb44d52b17d2780eabf998452f6efda104877a3881c2f8c0c0c75"},
|
||||
"file_system": {:hex, :file_system, "0.2.10", "fb082005a9cd1711c05b5248710f8826b02d7d1784e7c3451f9c1231d4fc162d", [:mix], [], "hexpm", "41195edbfb562a593726eda3b3e8b103a309b733ad25f3d642ba49696bf715dc"},
|
||||
"finch": {:hex, :finch, "0.9.1", "ab2b0151ba88543e221cb50bf0734860db55e8748816ee16e4997fe205f7b315", [:mix], [{:castore, "~> 0.1", [hex: :castore, repo: "hexpm", optional: false]}, {:mint, "~> 1.3", [hex: :mint, repo: "hexpm", optional: false]}, {:nimble_options, "~> 0.4.0", [hex: :nimble_options, repo: "hexpm", optional: false]}, {:nimble_pool, "~> 0.2", [hex: :nimble_pool, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "6d6b898a59d19f84958eaffec40580f5a9ff88a31e93156707fa8b1d552aa425"},
|
||||
"hpax": {:hex, :hpax, "0.1.1", "2396c313683ada39e98c20a75a82911592b47e5c24391363343bde74f82396ca", [:mix], [], "hexpm", "0ae7d5a0b04a8a60caf7a39fcf3ec476f35cc2cc16c05abea730d3ce6ac6c826"},
|
||||
"jason": {:hex, :jason, "1.3.0", "fa6b82a934feb176263ad2df0dbd91bf633d4a46ebfdffea0c8ae82953714946", [:mix], [{:decimal, "~> 1.0 or ~> 2.0", [hex: :decimal, repo: "hexpm", optional: true]}], "hexpm", "53fc1f51255390e0ec7e50f9cb41e751c260d065dcba2bf0d08dc51a4002c2ac"},
|
||||
"kafka_ex": {:hex, :kafka_ex, "0.12.1", "83f93a0b04d392b7e0c35234f4c444990f03b616ce4e7121119b89772d28facc", [:mix], [{:kayrock, "~> 0.1.12", [hex: :kayrock, repo: "hexpm", optional: false]}], "hexpm", "a395791c0528a248b0dac5d40d1eef8dd0706530a83cfa6ad7007eab9576fee8"},
|
||||
"kayrock": {:hex, :kayrock, "0.1.14", "49aa3d6ff987c6ccf9c7cfe31d669161dfa16c5f83257b98f48a02246c461711", [:mix], [{:connection, "~>1.1.0", [hex: :connection, repo: "hexpm", optional: false]}, {:crc32cer, "~>0.1.8", [hex: :crc32cer, repo: "hexpm", optional: false]}, {:varint, "~>1.2.0", [hex: :varint, repo: "hexpm", optional: false]}], "hexpm", "7ea2b3613a59fdff9f2e22ebd00bd7eac14290a41b6ec7d4385d9489d9bb6d89"},
|
||||
"mime": {:hex, :mime, "2.0.2", "0b9e1a4c840eafb68d820b0e2158ef5c49385d17fb36855ac6e7e087d4b1dcc5", [:mix], [], "hexpm", "e6a3f76b4c277739e36c2e21a2c640778ba4c3846189d5ab19f97f126df5f9b7"},
|
||||
"mint": {:hex, :mint, "1.4.1", "49b3b6ea35a9a38836d2ad745251b01ca9ec062f7cb66f546bf22e6699137126", [:mix], [{:castore, "~> 0.1.0", [hex: :castore, repo: "hexpm", optional: true]}, {:hpax, "~> 0.1.1", [hex: :hpax, repo: "hexpm", optional: false]}], "hexpm", "cd261766e61011a9079cccf8fa9d826e7a397c24fbedf0e11b49312bea629b58"},
|
||||
"nimble_options": {:hex, :nimble_options, "0.4.0", "c89babbab52221a24b8d1ff9e7d838be70f0d871be823165c94dd3418eea728f", [:mix], [], "hexpm", "e6701c1af326a11eea9634a3b1c62b475339ace9456c1a23ec3bc9a847bca02d"},
|
||||
"nimble_pool": {:hex, :nimble_pool, "0.2.6", "91f2f4c357da4c4a0a548286c84a3a28004f68f05609b4534526871a22053cde", [:mix], [], "hexpm", "1c715055095d3f2705c4e236c18b618420a35490da94149ff8b580a2144f653f"},
|
||||
"phoenix": {:hex, :phoenix, "1.6.6", "281c8ce8dccc9f60607346b72cdfc597c3dde134dd9df28dff08282f0b751754", [:mix], [{:jason, "~> 1.0", [hex: :jason, repo: "hexpm", optional: true]}, {:phoenix_pubsub, "~> 2.0", [hex: :phoenix_pubsub, repo: "hexpm", optional: false]}, {:phoenix_view, "~> 1.0", [hex: :phoenix_view, repo: "hexpm", optional: false]}, {:plug, "~> 1.10", [hex: :plug, repo: "hexpm", optional: false]}, {:plug_cowboy, "~> 2.2", [hex: :plug_cowboy, repo: "hexpm", optional: true]}, {:plug_crypto, "~> 1.2", [hex: :plug_crypto, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "807bd646e64cd9dc83db016199715faba72758e6db1de0707eef0a2da4924364"},
|
||||
"phoenix_pubsub": {:hex, :phoenix_pubsub, "2.0.0", "a1ae76717bb168cdeb10ec9d92d1480fec99e3080f011402c0a2d68d47395ffb", [:mix], [], "hexpm", "c52d948c4f261577b9c6fa804be91884b381a7f8f18450c5045975435350f771"},
|
||||
"phoenix_view": {:hex, :phoenix_view, "1.1.2", "1b82764a065fb41051637872c7bd07ed2fdb6f5c3bd89684d4dca6e10115c95a", [:mix], [{:phoenix_html, "~> 2.14.2 or ~> 3.0", [hex: :phoenix_html, repo: "hexpm", optional: true]}], "hexpm", "7ae90ad27b09091266f6adbb61e1d2516a7c3d7062c6789d46a7554ec40f3a56"},
|
||||
"plug": {:hex, :plug, "1.13.3", "93b299039c21a8b82cc904d13812bce4ced45cf69153e8d35ca16ffb3e8c5d98", [:mix], [{:mime, "~> 1.0 or ~> 2.0", [hex: :mime, repo: "hexpm", optional: false]}, {:plug_crypto, "~> 1.1.1 or ~> 1.2", [hex: :plug_crypto, repo: "hexpm", optional: false]}, {:telemetry, "~> 0.4.3 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "98c8003e4faf7b74a9ac41bee99e328b08f069bf932747d4a7532e97ae837a17"},
|
||||
"plug_cowboy": {:hex, :plug_cowboy, "2.5.2", "62894ccd601cf9597e2c23911ff12798a8a18d237e9739f58a6b04e4988899fe", [:mix], [{:cowboy, "~> 2.7", [hex: :cowboy, repo: "hexpm", optional: false]}, {:cowboy_telemetry, "~> 0.3", [hex: :cowboy_telemetry, repo: "hexpm", optional: false]}, {:plug, "~> 1.7", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm", "ea6e87f774c8608d60c8d34022a7d073bd7680a0a013f049fc62bf35efea1044"},
|
||||
"plug_crypto": {:hex, :plug_crypto, "1.2.2", "05654514ac717ff3a1843204b424477d9e60c143406aa94daf2274fdd280794d", [:mix], [], "hexpm", "87631c7ad914a5a445f0a3809f99b079113ae4ed4b867348dd9eec288cecb6db"},
|
||||
"prom_ex": {:hex, :prom_ex, "1.6.0", "a243cf27e71a2f53abfa9428680bcb89983923bb65149309e945c5f1f1ea0c2d", [:mix], [{:absinthe, ">= 1.6.0", [hex: :absinthe, repo: "hexpm", optional: true]}, {:broadway, ">= 1.0.0", [hex: :broadway, repo: "hexpm", optional: true]}, {:ecto, ">= 3.5.0", [hex: :ecto, repo: "hexpm", optional: true]}, {:finch, "~> 0.9.0", [hex: :finch, repo: "hexpm", optional: false]}, {:jason, "~> 1.2", [hex: :jason, repo: "hexpm", optional: false]}, {:oban, ">= 2.4.0", [hex: :oban, repo: "hexpm", optional: true]}, {:phoenix, ">= 1.5.0", [hex: :phoenix, repo: "hexpm", optional: true]}, {:phoenix_live_view, ">= 0.14.0", [hex: :phoenix_live_view, repo: "hexpm", optional: true]}, {:plug, ">= 1.12.1", [hex: :plug, repo: "hexpm", optional: true]}, {:plug_cowboy, "~> 2.5.1", [hex: :plug_cowboy, repo: "hexpm", optional: false]}, {:telemetry, "~> 1.0.0", [hex: :telemetry, repo: "hexpm", optional: false]}, {:telemetry_metrics, "~> 0.6.1", [hex: :telemetry_metrics, repo: "hexpm", optional: false]}, {:telemetry_metrics_prometheus_core, "~> 1.0.2", [hex: :telemetry_metrics_prometheus_core, repo: "hexpm", optional: false]}, {:telemetry_poller, "~> 1.0.0", [hex: :telemetry_poller, repo: "hexpm", optional: false]}], "hexpm", "1c0242b8ade3f8343394645076c9d7c0a814f7876c2100fc719cb387922397db"},
|
||||
"ranch": {:hex, :ranch, "1.8.0", "8c7a100a139fd57f17327b6413e4167ac559fbc04ca7448e9be9057311597a1d", [:make, :rebar3], [], "hexpm", "49fbcfd3682fab1f5d109351b61257676da1a2fdbe295904176d5e521a2ddfe5"},
|
||||
"telemetry": {:hex, :telemetry, "1.0.0", "0f453a102cdf13d506b7c0ab158324c337c41f1cc7548f0bc0e130bbf0ae9452", [:rebar3], [], "hexpm", "73bc09fa59b4a0284efb4624335583c528e07ec9ae76aca96ea0673850aec57a"},
|
||||
"telemetry_metrics": {:hex, :telemetry_metrics, "0.6.1", "315d9163a1d4660aedc3fee73f33f1d355dcc76c5c3ab3d59e76e3edf80eef1f", [:mix], [{:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "7be9e0871c41732c233be71e4be11b96e56177bf15dde64a8ac9ce72ac9834c6"},
|
||||
"telemetry_metrics_prometheus_core": {:hex, :telemetry_metrics_prometheus_core, "1.0.2", "c98b1c580de637bfeac00db41b9fb91fb4c3548ee3d512a8ed7299172312eaf3", [:mix], [{:telemetry, "~> 0.4 or ~> 1.0.0", [hex: :telemetry, repo: "hexpm", optional: false]}, {:telemetry_metrics, "~> 0.6", [hex: :telemetry_metrics, repo: "hexpm", optional: false]}], "hexpm", "48351a0d56f80e38c997b44232b1043e0a081670d16766eee920e6254175b730"},
|
||||
"telemetry_poller": {:hex, :telemetry_poller, "1.0.0", "db91bb424e07f2bb6e73926fcafbfcbcb295f0193e0a00e825e589a0a47e8453", [:rebar3], [{:telemetry, "~> 1.0", [hex: :telemetry, repo: "hexpm", optional: false]}], "hexpm", "b3a24eafd66c3f42da30fc3ca7dda1e9d546c12250a2d60d7b81d264fbec4f6e"},
|
||||
"varint": {:hex, :varint, "1.2.0", "61bffd9dcc2d5242d59f75694506b4d4013bb103f6a23e34b94f89cebb0c1ab3", [:mix], [], "hexpm", "d94941ed8b9d1a5fdede9103a5e52035bd0aaf35081d44e67713a36799927e47"},
|
||||
}
|
||||
|
||||
17
test/kafkaex_lag_exporter_web/views/error_view_test.exs
Normal file
17
test/kafkaex_lag_exporter_web/views/error_view_test.exs
Normal file
@@ -0,0 +1,17 @@
|
||||
defmodule KafkaexLagExporterWeb.ErrorViewTest do
|
||||
use KafkaexLagExporterWeb.ConnCase, async: true
|
||||
|
||||
# Bring render/3 and render_to_string/3 for testing custom views
|
||||
import Phoenix.View
|
||||
|
||||
test "renders 404.json" do
|
||||
assert render(KafkaexLagExporterWeb.ErrorView, "404.json", []) == %{
|
||||
errors: %{detail: "Not Found"}
|
||||
}
|
||||
end
|
||||
|
||||
test "renders 500.json" do
|
||||
assert render(KafkaexLagExporterWeb.ErrorView, "500.json", []) ==
|
||||
%{errors: %{detail: "Internal Server Error"}}
|
||||
end
|
||||
end
|
||||
34
test/support/channel_case.ex
Normal file
34
test/support/channel_case.ex
Normal file
@@ -0,0 +1,34 @@
|
||||
defmodule KafkaexLagExporterWeb.ChannelCase do
|
||||
@moduledoc """
|
||||
This module defines the test case to be used by
|
||||
channel tests.
|
||||
|
||||
Such tests rely on `Phoenix.ChannelTest` and also
|
||||
import other functionality to make it easier
|
||||
to build common data structures and query the data layer.
|
||||
|
||||
Finally, if the test case interacts with the database,
|
||||
we enable the SQL sandbox, so changes done to the database
|
||||
are reverted at the end of every test. If you are using
|
||||
PostgreSQL, you can even run database tests asynchronously
|
||||
by setting `use KafkaexLagExporterWeb.ChannelCase, async: true`, although
|
||||
this option is not recommended for other databases.
|
||||
"""
|
||||
|
||||
use ExUnit.CaseTemplate
|
||||
|
||||
using do
|
||||
quote do
|
||||
# Import conveniences for testing with channels
|
||||
import Phoenix.ChannelTest
|
||||
import KafkaexLagExporterWeb.ChannelCase
|
||||
|
||||
# The default endpoint for testing
|
||||
@endpoint KafkaexLagExporterWeb.Endpoint
|
||||
end
|
||||
end
|
||||
|
||||
setup _tags do
|
||||
:ok
|
||||
end
|
||||
end
|
||||
37
test/support/conn_case.ex
Normal file
37
test/support/conn_case.ex
Normal file
@@ -0,0 +1,37 @@
|
||||
defmodule KafkaexLagExporterWeb.ConnCase do
|
||||
@moduledoc """
|
||||
This module defines the test case to be used by
|
||||
tests that require setting up a connection.
|
||||
|
||||
Such tests rely on `Phoenix.ConnTest` and also
|
||||
import other functionality to make it easier
|
||||
to build common data structures and query the data layer.
|
||||
|
||||
Finally, if the test case interacts with the database,
|
||||
we enable the SQL sandbox, so changes done to the database
|
||||
are reverted at the end of every test. If you are using
|
||||
PostgreSQL, you can even run database tests asynchronously
|
||||
by setting `use KafkaexLagExporterWeb.ConnCase, async: true`, although
|
||||
this option is not recommended for other databases.
|
||||
"""
|
||||
|
||||
use ExUnit.CaseTemplate
|
||||
|
||||
using do
|
||||
quote do
|
||||
# Import conveniences for testing with connections
|
||||
import Plug.Conn
|
||||
import Phoenix.ConnTest
|
||||
import KafkaexLagExporterWeb.ConnCase
|
||||
|
||||
alias KafkaexLagExporterWeb.Router.Helpers, as: Routes
|
||||
|
||||
# The default endpoint for testing
|
||||
@endpoint KafkaexLagExporterWeb.Endpoint
|
||||
end
|
||||
end
|
||||
|
||||
setup _tags do
|
||||
{:ok, conn: Phoenix.ConnTest.build_conn()}
|
||||
end
|
||||
end
|
||||
Reference in New Issue
Block a user