diff --git a/README.md b/README.md index 04a2627..bbc2f4a 100644 --- a/README.md +++ b/README.md @@ -32,7 +32,7 @@ Add the backend to `config.exs`: utc_log: true ``` -Configure the following example to suit your needs: +Following is a full example with the default values: ```elixir config :logger, CloudWatch, @@ -46,10 +46,22 @@ Configure the following example to suit your needs: max_timeout: 60_000 ``` -The `endpoint` may be omitted from the configuration and will default to -`amazonaws.com`. The `max_buffer_size` controls when `cloud_watch` will flush -the buffer in bytes. You may specify anything up to a maximum of 1,048,576 -bytes. If omitted, it will default to 10,485 bytes. +CloudWatch flushes the buffer when it has collected `max_buffer_size` bytes of +messages or `max_timeout` milliseconds have elapsed. `max_buffer_size` can be +anything up to a maximum of 1,048,576 bytes. If omitted, it will default to +10,485 bytes. + +CloudWatch supports getting AWS credentials and other defaults from +[EC2 instance metadata](https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ec2-instance-metadata.html) + +In that case, a minimal configuration is: + + ```elixir + config :logger, CloudWatch, + log_group_name: "api" + ``` + +`log_stream_name` defaults to the instance id. ## Alternative AWS client library: ExAws diff --git a/lib/cloud_watch.ex b/lib/cloud_watch.ex index 8c012b9..9e8192b 100644 --- a/lib/cloud_watch.ex +++ b/lib/cloud_watch.ex @@ -9,13 +9,32 @@ defmodule CloudWatch do alias CloudWatch.InputLogEvent alias CloudWatch.AwsProxy - def init(_) do - state = configure(Application.get_env(:logger, CloudWatch, [])) + @spec init(module()) :: {:ok, term()} | {:error, term()} + def init(__MODULE__) do + # Because this is a plugin to Logger, we can't rely on application + # dependencies to be started before this is called + {:ok, _} = Application.ensure_all_started(:hackney) + + env = Application.get_env(:logger, __MODULE__, []) + state = configure(env) + + # If AWS keys are not defined statically, get them from the instance metadata. + # This may fail while the instance is starting up, so retry quickly. + # Otherwise refresh every 10 minutes, as the keys expire periodically. + unless state.access_key_id do + if state.client do + Process.send_after(self(), :refresh_creds, 300_000) + else + Process.send_after(self(), :refresh_creds, 200) + end + end + Process.send_after(self(), :flush, state.max_timeout) {:ok, state} end def handle_call({:configure, opts}, _) do + Application.put_env(:logger, __MODULE__, opts) {:ok, :ok, configure(opts)} end @@ -25,21 +44,23 @@ defmodule CloudWatch do def handle_event({level, _gl, {Logger, msg, ts, md}}, state) do case Logger.compare_levels(level, state.level) do - :lt -> {:ok, state} + :lt -> + {:ok, state} _ -> %{buffer: buffer, buffer_size: buffer_size} = state + message = state.format |> Logger.Formatter.format(level, msg, ts, md) |> IO.chardata_to_string + buffer = List.insert_at(buffer, -1, %InputLogEvent{message: message, timestamp: ts}) - state - |> Map.merge(%{buffer: buffer, buffer_size: buffer_size + byte_size(message) + 26}) - |> flush() + + flush(%{state | buffer: buffer, buffer_size: buffer_size + byte_size(message) + 26}) end end def handle_event(:flush, state) do - {:ok, Map.merge(state, %{buffer: [], buffer_size: 0})} + {:ok, %{state | buffer: [], buffer_size: 0}} end def handle_info(:flush, state) do @@ -48,6 +69,16 @@ defmodule CloudWatch do {:ok, flushed_state} end + def handle_info(:refresh_creds, state) do + state = configure_aws(state) + if state.client do + Process.send_after(self(), :refresh_creds, 300_000) + else + Process.send_after(self(), :refresh_creds, 200) + end + {:ok, state} + end + def handle_info(_msg, state) do {:ok, state} end @@ -60,60 +91,148 @@ defmodule CloudWatch do :ok end - defp configure(opts) do - opts = Keyword.merge(Application.get_env(:logger, CloudWatch, []), opts) - format = Logger.Formatter.compile(Keyword.get(opts, :format, @default_format)) - level = Keyword.get(opts, :level, @default_level) - log_group_name = Keyword.get(opts, :log_group_name) - log_stream_name = Keyword.get(opts, :log_stream_name) - max_buffer_size = Keyword.get(opts, :max_buffer_size, @default_max_buffer_size) - max_timeout = Keyword.get(opts, :max_timeout, @default_max_timeout) + @spec configure(Keyword.t) :: Map.t + def configure(opts) do + state = %{ + access_key_id: opts[:access_key_id], + secret_access_key: opts[:secret_access_key], + region: opts[:region], + endpoint: opts[:endpoint], + client: nil, + buffer: [], buffer_size: 0, + level: opts[:level] || @default_level, + format: Logger.Formatter.compile(opts[:format] || @default_format), + log_group_name: opts[:log_group_name], + log_stream_name: opts[:log_stream_name], + max_buffer_size: opts[:max_buffer_size] || @default_max_buffer_size, + max_timeout: opts[:max_timeout] || @default_max_timeout, + sequence_token: nil, flushed_at: nil + } - # AWS configuration, only if needed by the AWS library - region = Keyword.get(opts, :region) - access_key_id = Keyword.get(opts, :access_key_id) - endpoint = Keyword.get(opts, :endpoint, @default_endpoint) - secret_access_key = Keyword.get(opts, :secret_access_key) - client = AwsProxy.client(access_key_id, secret_access_key, region, endpoint) - %{buffer: [], buffer_size: 0, client: client, format: format, level: level, log_group_name: log_group_name, - log_stream_name: log_stream_name, max_buffer_size: max_buffer_size, max_timeout: max_timeout, - sequence_token: nil, flushed_at: nil} + if state.access_key_id do + # Static AWS config + %{state | client: AwsProxy.client(state.access_key_id, state.secret_access_key, state.region, state.endpoint)} + else + configure_aws(state) + end end - defp flush(_state, _opts \\ [force: false]) + def configure_aws(state) do + case System.get_env("AWS_CONTAINER_CREDENTIALS_RELATIVE_URI") do + nil -> + # https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ec2-instance-metadata.html + case get_metadata("http://169.254.169.254/latest/meta-data/iam/security-credentials/") do + {:ok, ""} -> + state + {:ok, role} -> + {:ok, json} = get_metadata("http://169.254.169.254/latest/meta-data/iam/security-credentials/" <> role) + {:ok, creds} = Poison.decode(json) + access_key_id = Map.get(creds, "AccessKeyId") + secret_access_key = Map.get(creds, "SecretAccessKey") + region = state.region || metadata_region() + endpoint = state.endpoint || metadata_endpoint() || @default_endpoint + client = AwsProxy.client(access_key_id, secret_access_key, region, endpoint) + + log_stream_name = state.log_stream_name || metadata_instance_id() + + %{state | client: client, log_stream_name: log_stream_name} + _ -> + state + end + uri -> + # https://docs.aws.amazon.com/AmazonECS/latest/developerguide/task-iam-roles.html + # This is untested + case get_metadata("http://169.254.170.2" <> uri) do + {:ok, json} -> + {:ok, creds} = Poison.decode(json) + access_key_id = Map.get(creds, "AccessKeyId") + secret_access_key = Map.get(creds, "SecretAccessKey") + region = state.region + endpoint = state.endpoint || @default_endpoint + client = AwsProxy.client(access_key_id, secret_access_key, region, endpoint) + %{state | client: client} + _ -> + state + end + end + end + + defp flush(state, opts \\ [force: false]) defp flush(%{buffer: buffer, buffer_size: buffer_size, max_buffer_size: max_buffer_size} = state, [force: false]) when buffer_size < max_buffer_size and length(buffer) < 10_000 do {:ok, state} end - - defp flush(%{buffer: []} = state, _opts), do: {:ok, state} + + defp flush(%{buffer: []} = state, _opts), do: {:ok, state} + + # Client not configured yet + defp flush(%{client: nil} = state, _opts), do: {:ok, state} defp flush(state, opts) do - case AwsProxy.put_log_events(state.client, %{logEvents: Enum.sort_by(state.buffer, &(&1.timestamp)), - logGroupName: state.log_group_name, logStreamName: state.log_stream_name, sequenceToken: state.sequence_token}) do - {:ok, %{"nextSequenceToken" => next_sequence_token}, _} -> - {:ok, Map.merge(state, %{buffer: [], buffer_size: 0, sequence_token: next_sequence_token})} - {:error, {"DataAlreadyAcceptedException", "The given batch of log events has already been accepted. The next batch can be sent with sequenceToken: " <> next_sequence_token}} -> - state - |> Map.put(:sequence_token, next_sequence_token) - |> flush(opts) - {:error, {"InvalidSequenceTokenException", "The given sequenceToken is invalid. The next expected sequenceToken is: " <> next_sequence_token}} -> - state - |> Map.put(:sequence_token, next_sequence_token) - |> flush(opts) - {:error, {"ResourceNotFoundException", "The specified log group does not exist."}} -> - AwsProxy.create_log_group(state.client, %{logGroupName: state.log_group_name}) - AwsProxy.create_log_stream(state.client, %{logGroupName: state.log_group_name, - logStreamName: state.log_stream_name}) - flush(state, opts) - {:error, {"ResourceNotFoundException", "The specified log stream does not exist."}} -> - AwsProxy.create_log_stream(state.client, %{logGroupName: state.log_group_name, - logStreamName: state.log_stream_name}) - flush(state, opts) - {:error, %HTTPoison.Error{id: nil, reason: reason}} when reason in [:closed, :connect_timeout, :timeout] -> - state - |> flush(opts) + events = %{logEvents: Enum.sort_by(state.buffer, &(&1.timestamp)), + logGroupName: state.log_group_name, logStreamName: state.log_stream_name, + sequenceToken: state.sequence_token} + + case AwsProxy.put_log_events(state.client, events) do + {:ok, %{"nextSequenceToken" => next_sequence_token}, _} -> + {:ok, %{state | buffer: [], buffer_size: 0, sequence_token: next_sequence_token}} + {:error, {"DataAlreadyAcceptedException", + "The given batch of log events has already been accepted. The next batch can be sent with sequenceToken: " <> next_sequence_token}} -> + flush(%{state | sequence_token: next_sequence_token}, opts) + {:error, {"InvalidSequenceTokenException", + "The given sequenceToken is invalid. The next expected sequenceToken is: " <> next_sequence_token}} -> + flush(%{state | sequence_token: next_sequence_token}, opts) + {:error, {"ResourceNotFoundException", "The specified log group does not exist."}} -> + {:ok, _, _} = AwsProxy.create_log_group(state.client, %{logGroupName: state.log_group_name}) + {:ok, _, _} = AwsProxy.create_log_stream(state.client, %{logGroupName: state.log_group_name, + logStreamName: state.log_stream_name}) + flush(state, opts) + {:error, {"ResourceNotFoundException", "The specified log stream does not exist."}} -> + {:ok, _, _} = AwsProxy.create_log_stream(state.client, %{logGroupName: state.log_group_name, + logStreamName: state.log_stream_name}) + flush(state, opts) + {:error, %HTTPoison.Error{id: nil, reason: reason}} when reason in [:closed, :connect_timeout, :timeout] -> + flush(state, opts) + end + end + + def get_metadata(url) do + case :hackney.request(:get, url, [], "", []) do + {:ok, 200, _resp_headers, client_ref} -> + :hackney.body(client_ref) + _ -> + nil + end + end + + def get_metadata!(url) do + case :hackney.request(:get, url, [], "", []) do + {:ok, 200, _resp_headers, client_ref} -> + {:ok, body} = :hackney.body(client_ref) + body + _ -> + nil end end + + def metadata_endpoint do + get_metadata!("http://169.254.169.254/latest/meta-data/services/domain") + end + + def metadata_instance_id do + get_metadata!("http://169.254.169.254/latest/meta-data/instance-id") + end + + def metadata_region do + url = "http://169.254.169.254/latest/meta-data/placement/availability-zone" + case :hackney.request(:get, url, [], "", []) do + {:ok, 200, _resp_headers, client_ref} -> + {:ok, body} = :hackney.body(client_ref) + String.slice(body, Range.new(0, -2)) + _ -> + nil + end + end + end diff --git a/lib/cloud_watch/aws_proxy.ex b/lib/cloud_watch/aws_proxy.ex index a80482f..12b72a6 100644 --- a/lib/cloud_watch/aws_proxy.ex +++ b/lib/cloud_watch/aws_proxy.ex @@ -8,7 +8,7 @@ defmodule CloudWatch.AwsProxy do cond do Code.ensure_loaded?(AWS) -> # AWS CloudWatch Logs implemented using aws-elixir - # See https://github.com/jkakar/aws-elixir + # https://github.com/aws-beam/aws-elixir # # AWS credentials are configured in CloudWatch def client(access_key_id, secret_access_key, region, endpoint) do diff --git a/lib/cloud_watch/input_log_event.ex b/lib/cloud_watch/input_log_event.ex index 877db69..71bc79c 100644 --- a/lib/cloud_watch/input_log_event.ex +++ b/lib/cloud_watch/input_log_event.ex @@ -10,6 +10,7 @@ defmodule CloudWatch.InputLogEvent do |> Kernel.-(@epoch) |> Kernel.*(1000) |> Kernel.+(milliseconds) + %{message: message, timestamp: timestamp} |> Poison.Encoder.encode(options) |> IO.chardata_to_string diff --git a/mix.exs b/mix.exs index bc3e785..1cb37e1 100644 --- a/mix.exs +++ b/mix.exs @@ -17,7 +17,7 @@ defmodule CloudWatch.Mixfile do # # Type "mix help compile.app" for more information def application do - [applications: [:logger]] + [extra_applications: [:logger]] end # This makes sure your factory and any other modules in test/support are compiled @@ -35,8 +35,11 @@ defmodule CloudWatch.Mixfile do # # Type "mix help deps" for more examples and options defp deps do - [{:aws, "~> 0.5.0", optional: true}, - {:httpoison, "~> 0.11.1"}, + [ + {:aws, "~> 0.5.0", optional: true}, + {:httpoison, "~> 0.11"}, + {:poison, "~> 3.1"}, + {:hackney, "~> 1.8"}, {:credo, "~> 0.4.13", only: :dev}, {:mock, "~> 0.2.0", only: :test}, {:ex_doc, ">= 0.0.0", only: :dev}] diff --git a/mix.lock b/mix.lock index d6ce20b..7bbeb31 100644 --- a/mix.lock +++ b/mix.lock @@ -1,20 +1,25 @@ -%{"aws": {:hex, :aws, "0.5.0", "e3916abd9d1fb4c182cbeac8a964ddeb32de9b20d441b66fba838ade06a4a278", [:mix], [{:httpoison, "~> 0.11.1", [hex: :httpoison, repo: "hexpm", optional: false]}, {:poison, "~> 3.1", [hex: :poison, repo: "hexpm", optional: false]}, {:timex, "~> 3.1", [hex: :timex, repo: "hexpm", optional: false]}], "hexpm"}, +%{ + "aws": {:hex, :aws, "0.5.0", "e3916abd9d1fb4c182cbeac8a964ddeb32de9b20d441b66fba838ade06a4a278", [:mix], [{:httpoison, "~> 0.11.1", [hex: :httpoison, repo: "hexpm", optional: false]}, {:poison, "~> 3.1", [hex: :poison, repo: "hexpm", optional: false]}, {:timex, "~> 3.1", [hex: :timex, repo: "hexpm", optional: false]}], "hexpm"}, "bunt": {:hex, :bunt, "0.1.6", "5d95a6882f73f3b9969fdfd1953798046664e6f77ec4e486e6fafc7caad97c6f", [:mix], [], "hexpm"}, "certifi": {:hex, :certifi, "1.2.1", "c3904f192bd5284e5b13f20db3ceac9626e14eeacfbb492e19583cf0e37b22be", [:rebar3], [], "hexpm"}, - "combine": {:hex, :combine, "0.9.6", "8d1034a127d4cbf6924c8a5010d3534d958085575fa4d9b878f200d79ac78335", [:mix], [], "hexpm"}, - "credo": {:hex, :credo, "0.4.13", "0e9d0479c7e0591e36b093bf17fbdde012e2a69b5571e2df8d0b75b4fc8adc74", [:mix], [{:bunt, "~> 0.1.6", [hex: :bunt, repo: "hexpm", optional: false]}], "hexpm"}, - "earmark": {:hex, :earmark, "1.2.2", "f718159d6b65068e8daeef709ccddae5f7fdc770707d82e7d126f584cd925b74", [:mix], [], "hexpm"}, - "ex_doc": {:hex, :ex_doc, "0.16.2", "3b3e210ebcd85a7c76b4e73f85c5640c011d2a0b2f06dcdf5acdb2ae904e5084", [:mix], [{:earmark, "~> 1.1", [hex: :earmark, repo: "hexpm", optional: false]}], "hexpm"}, - "gettext": {:hex, :gettext, "0.13.1", "5e0daf4e7636d771c4c71ad5f3f53ba09a9ae5c250e1ab9c42ba9edccc476263", [:mix], [], "hexpm"}, + "combine": {:hex, :combine, "0.10.0", "eff8224eeb56498a2af13011d142c5e7997a80c8f5b97c499f84c841032e429f", [:mix], [], "hexpm"}, + "credo": {:hex, :credo, "0.4.14", "594a965ae2224997fae9e705e881983911a052c701b05bd3bcf89af44b5f6b45", [:mix], [{:bunt, "~> 0.1.6", [hex: :bunt, repo: "hexpm", optional: false]}], "hexpm"}, + "earmark": {:hex, :earmark, "1.2.5", "4d21980d5d2862a2e13ec3c49ad9ad783ffc7ca5769cf6ff891a4553fbaae761", [:mix], [], "hexpm"}, + "ex_doc": {:hex, :ex_doc, "0.19.1", "519bb9c19526ca51d326c060cb1778d4a9056b190086a8c6c115828eaccea6cf", [:mix], [{:earmark, "~> 1.1", [hex: :earmark, repo: "hexpm", optional: false]}, {:makeup_elixir, "~> 0.7", [hex: :makeup_elixir, repo: "hexpm", optional: false]}], "hexpm"}, + "gettext": {:hex, :gettext, "0.15.0", "40a2b8ce33a80ced7727e36768499fc9286881c43ebafccae6bab731e2b2b8ce", [:mix], [], "hexpm"}, "hackney": {:hex, :hackney, "1.8.6", "21a725db3569b3fb11a6af17d5c5f654052ce9624219f1317e8639183de4a423", [:rebar3], [{:certifi, "1.2.1", [hex: :certifi, repo: "hexpm", optional: false]}, {:idna, "5.0.2", [hex: :idna, repo: "hexpm", optional: false]}, {:metrics, "1.0.1", [hex: :metrics, repo: "hexpm", optional: false]}, {:mimerl, "1.0.2", [hex: :mimerl, repo: "hexpm", optional: false]}, {:ssl_verify_fun, "1.1.1", [hex: :ssl_verify_fun, repo: "hexpm", optional: false]}], "hexpm"}, "httpoison": {:hex, :httpoison, "0.11.2", "9e59f17a473ef6948f63c51db07320477bad8ba88cf1df60a3eee01150306665", [:mix], [{:hackney, "~> 1.8.0", [hex: :hackney, repo: "hexpm", optional: false]}], "hexpm"}, "idna": {:hex, :idna, "5.0.2", "ac203208ada855d95dc591a764b6e87259cb0e2a364218f215ad662daa8cd6b4", [:rebar3], [{:unicode_util_compat, "0.2.0", [hex: :unicode_util_compat, repo: "hexpm", optional: false]}], "hexpm"}, - "meck": {:hex, :meck, "0.8.4", "59ca1cd971372aa223138efcf9b29475bde299e1953046a0c727184790ab1520", [:make, :rebar], [], "hexpm"}, + "makeup": {:hex, :makeup, "0.5.1", "966c5c2296da272d42f1de178c1d135e432662eca795d6dc12e5e8787514edf7", [:mix], [{:nimble_parsec, "~> 0.2.2", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm"}, + "makeup_elixir": {:hex, :makeup_elixir, "0.8.0", "1204a2f5b4f181775a0e456154830524cf2207cf4f9112215c05e0b76e4eca8b", [:mix], [{:makeup, "~> 0.5.0", [hex: :makeup, repo: "hexpm", optional: false]}, {:nimble_parsec, "~> 0.2.2", [hex: :nimble_parsec, repo: "hexpm", optional: false]}], "hexpm"}, + "meck": {:hex, :meck, "0.8.12", "1f7b1a9f5d12c511848fec26bbefd09a21e1432eadb8982d9a8aceb9891a3cf2", [:rebar3], [], "hexpm"}, "metrics": {:hex, :metrics, "1.0.1", "25f094dea2cda98213cecc3aeff09e940299d950904393b2a29d191c346a8486", [:rebar3], [], "hexpm"}, "mimerl": {:hex, :mimerl, "1.0.2", "993f9b0e084083405ed8252b99460c4f0563e41729ab42d9074fd5e52439be88", [:rebar3], [], "hexpm"}, - "mock": {:hex, :mock, "0.2.0", "5991877be6bb514b647dbd6f4869bc12bd7f2829df16e86c98d6108f966d34d7", [:mix], [{:meck, "~> 0.8.2", [hex: :meck, repo: "hexpm", optional: false]}], "hexpm"}, + "mock": {:hex, :mock, "0.2.1", "bfdba786903e77f9c18772dee472d020ceb8ef000783e737725a4c8f54ad28ec", [:mix], [{:meck, "~> 0.8.2", [hex: :meck, repo: "hexpm", optional: false]}], "hexpm"}, + "nimble_parsec": {:hex, :nimble_parsec, "0.2.2", "d526b23bdceb04c7ad15b33c57c4526bf5f50aaa70c7c141b4b4624555c68259", [:mix], [], "hexpm"}, "poison": {:hex, :poison, "3.1.0", "d9eb636610e096f86f25d9a46f35a9facac35609a7591b3be3326e99a0484665", [:mix], [], "hexpm"}, "ssl_verify_fun": {:hex, :ssl_verify_fun, "1.1.1", "28a4d65b7f59893bc2c7de786dec1e1555bd742d336043fe644ae956c3497fbe", [:make, :rebar], [], "hexpm"}, - "timex": {:hex, :timex, "3.1.21", "7d1ec0f73c4668bea71f38af6357d75992f356a7e032c69620c5e87ca4b95c66", [:mix], [{:combine, "~> 0.7", [hex: :combine, repo: "hexpm", optional: false]}, {:gettext, "~> 0.10", [hex: :gettext, repo: "hexpm", optional: false]}, {:tzdata, "~> 0.1.8 or ~> 0.5", [hex: :tzdata, repo: "hexpm", optional: false]}], "hexpm"}, - "tzdata": {:hex, :tzdata, "0.5.12", "1c17b68692c6ba5b6ab15db3d64cc8baa0f182043d5ae9d4b6d35d70af76f67b", [:mix], [{:hackney, "~> 1.0", [hex: :hackney, repo: "hexpm", optional: false]}], "hexpm"}, - "unicode_util_compat": {:hex, :unicode_util_compat, "0.2.0", "dbbccf6781821b1c0701845eaf966c9b6d83d7c3bfc65ca2b78b88b8678bfa35", [:rebar3], [], "hexpm"}} + "timex": {:hex, :timex, "3.3.0", "e0695aa0ddb37d460d93a2db34d332c2c95a40c27edf22fbfea22eb8910a9c8d", [:mix], [{:combine, "~> 0.10", [hex: :combine, repo: "hexpm", optional: false]}, {:gettext, "~> 0.10", [hex: :gettext, repo: "hexpm", optional: false]}, {:tzdata, "~> 0.1.8 or ~> 0.5", [hex: :tzdata, repo: "hexpm", optional: false]}], "hexpm"}, + "tzdata": {:hex, :tzdata, "0.5.17", "50793e3d85af49736701da1a040c415c97dc1caf6464112fd9bd18f425d3053b", [:mix], [{:hackney, "~> 1.0", [hex: :hackney, repo: "hexpm", optional: false]}], "hexpm"}, + "unicode_util_compat": {:hex, :unicode_util_compat, "0.2.0", "dbbccf6781821b1c0701845eaf966c9b6d83d7c3bfc65ca2b78b88b8678bfa35", [:rebar3], [], "hexpm"}, +} diff --git a/test/cloud_watch_test.exs b/test/cloud_watch_test.exs index 17ad69a..fcaafa9 100644 --- a/test/cloud_watch_test.exs +++ b/test/cloud_watch_test.exs @@ -12,7 +12,10 @@ defmodule CloudWatchTest do setup_all do {:ok, _} = Cycler.start_link Logger.add_backend(@backend) - :ok = Logger.configure_backend(@backend, [format: "$message", level: :info, log_group_name: "testLogGroup", log_stream_name: "testLogStream", max_buffer_size: 39]) + config = [format: "$message", level: :info, log_group_name: "testLogGroup", + log_stream_name: "testLogStream", max_buffer_size: 39, + access_key_id: "fake"] + :ok = Logger.configure_backend(@backend, config) end setup do