diff --git a/.credo.exs b/.credo.exs
index c929de1a3..80db9ca01 100644
--- a/.credo.exs
+++ b/.credo.exs
@@ -70,7 +70,6 @@
#
{Credo.Check.Consistency.ExceptionNames, []},
{Credo.Check.Consistency.LineEndings, []},
- {Credo.Check.Consistency.ParameterPatternMatching, []},
{Credo.Check.Consistency.SpaceAroundOperators, []},
{Credo.Check.Consistency.SpaceInParentheses, []},
{Credo.Check.Consistency.TabsOrSpaces, []},
@@ -81,7 +80,6 @@
# You can customize the priority of any check
# Priority values are: `low, normal, high, higher`
#
- {Credo.Check.Design.AliasUsage, false},
# You can also customize the exit_status of each check.
# If you don't want TODO comments to cause `mix credo` to fail, just
# set this value to 0 (zero).
@@ -92,40 +90,26 @@
#
## Readability Checks
#
- {Credo.Check.Readability.AliasOrder, []},
{Credo.Check.Readability.FunctionNames, []},
- {Credo.Check.Readability.LargeNumbers, []},
- {Credo.Check.Readability.MaxLineLength, [priority: :low, max_length: 120]},
{Credo.Check.Readability.ModuleAttributeNames, []},
- {Credo.Check.Readability.ModuleDoc, []},
{Credo.Check.Readability.ModuleNames, []},
{Credo.Check.Readability.ParenthesesInCondition, []},
- {Credo.Check.Readability.ParenthesesOnZeroArityDefs, []},
{Credo.Check.Readability.PredicateFunctionNames, []},
- {Credo.Check.Readability.PreferImplicitTry, []},
{Credo.Check.Readability.RedundantBlankLines, []},
{Credo.Check.Readability.Semicolons, []},
{Credo.Check.Readability.SpaceAfterCommas, []},
- {Credo.Check.Readability.StringSigils, []},
{Credo.Check.Readability.TrailingBlankLine, []},
{Credo.Check.Readability.TrailingWhiteSpace, []},
- {Credo.Check.Readability.UnnecessaryAliasExpansion, []},
{Credo.Check.Readability.VariableNames, []},
#
## Refactoring Opportunities
#
- {Credo.Check.Refactor.CondStatements, []},
{Credo.Check.Refactor.CyclomaticComplexity, []},
{Credo.Check.Refactor.FunctionArity, []},
{Credo.Check.Refactor.LongQuoteBlocks, []},
- # {Credo.Check.Refactor.MapInto, []},
{Credo.Check.Refactor.MatchInCondition, []},
- {Credo.Check.Refactor.NegatedConditionsInUnless, []},
- {Credo.Check.Refactor.NegatedConditionsWithElse, []},
{Credo.Check.Refactor.Nesting, []},
- {Credo.Check.Refactor.UnlessWithElse, []},
- {Credo.Check.Refactor.WithClauses, []},
#
## Warnings
@@ -156,32 +140,57 @@
#
# Controversial and experimental checks (opt-in, just replace `false` with `[]`)
#
- {Credo.Check.Consistency.MultiAliasImportRequireUse, false},
{Credo.Check.Consistency.UnusedVariableNames, false},
{Credo.Check.Design.DuplicatedCode, false},
{Credo.Check.Readability.AliasAs, false},
- {Credo.Check.Readability.BlockPipe, false},
{Credo.Check.Readability.ImplTrue, false},
- {Credo.Check.Readability.MultiAlias, false},
{Credo.Check.Readability.SeparateAliasRequire, false},
- {Credo.Check.Readability.SinglePipe, false},
{Credo.Check.Readability.Specs, false},
- {Credo.Check.Readability.StrictModuleLayout, false},
{Credo.Check.Readability.WithCustomTaggedTuple, false},
{Credo.Check.Refactor.ABCSize, false},
{Credo.Check.Refactor.AppendSingleItem, false},
{Credo.Check.Refactor.DoubleBooleanNegation, false},
{Credo.Check.Refactor.ModuleDependencies, false},
{Credo.Check.Refactor.NegatedIsNil, false},
- {Credo.Check.Refactor.PipeChainStart, false},
{Credo.Check.Refactor.VariableRebinding, false},
{Credo.Check.Warning.LeakyEnvironment, false},
{Credo.Check.Warning.MapGetUnsafePass, false},
- {Credo.Check.Warning.UnsafeToAtom, false}
+ {Credo.Check.Warning.UnsafeToAtom, false},
#
# Custom checks can be created using `mix credo.gen.check`.
#
+
+ # Disable rules enforced by Styler
+ {Credo.Check.Consistency.MultiAliasImportRequireUse, false},
+ {Credo.Check.Consistency.ParameterPatternMatching, false},
+ {Credo.Check.Design.AliasUsage, false},
+ {Credo.Check.Readability.AliasOrder, false},
+ {Credo.Check.Readability.BlockPipe, false},
+ {Credo.Check.Readability.LargeNumbers, false},
+ {Credo.Check.Readability.ModuleDoc, false},
+ {Credo.Check.Readability.MultiAlias, false},
+ {Credo.Check.Readability.OneArityFunctionInPipe, false},
+ {Credo.Check.Readability.ParenthesesOnZeroArityDefs, false},
+ {Credo.Check.Readability.PipeIntoAnonymousFunctions, false},
+ {Credo.Check.Readability.PreferImplicitTry, false},
+ {Credo.Check.Readability.SinglePipe, false},
+ {Credo.Check.Readability.StrictModuleLayout, false},
+ {Credo.Check.Readability.StringSigils, false},
+ {Credo.Check.Readability.UnnecessaryAliasExpansion, false},
+ {Credo.Check.Readability.WithSingleClause, false},
+ {Credo.Check.Refactor.CaseTrivialMatches, false},
+ {Credo.Check.Refactor.CondStatements, false},
+ {Credo.Check.Refactor.FilterCount, false},
+ {Credo.Check.Refactor.MapInto, false},
+ {Credo.Check.Refactor.MapJoin, false},
+ {Credo.Check.Refactor.NegatedConditionsInUnless, false},
+ {Credo.Check.Refactor.NegatedConditionsWithElse, false},
+ {Credo.Check.Refactor.PipeChainStart, false},
+ {Credo.Check.Refactor.RedundantWithClauseResult, false},
+ {Credo.Check.Refactor.UnlessWithElse, false},
+ {Credo.Check.Refactor.WithClauses, false},
+ {Credo.Check.Readability.MaxLineLength, false}
]
}
]
diff --git a/.formatter.exs b/.formatter.exs
index 6a9bacda1..f41ab4b42 100644
--- a/.formatter.exs
+++ b/.formatter.exs
@@ -1,6 +1,6 @@
[
import_deps: [:ecto, :phoenix],
- plugins: [Phoenix.LiveView.HTMLFormatter],
+ plugins: [Phoenix.LiveView.HTMLFormatter, Styler],
inputs: ["*.{heex,ex,exs}", "priv/*/seeds.exs", "{config,lib,test}/**/*.{heex,ex,exs}"],
subdirectories: ["priv/*/migrations"]
]
diff --git a/config/config.exs b/config/config.exs
index 2cbc764b4..96b3dbf6c 100644
--- a/config/config.exs
+++ b/config/config.exs
@@ -7,6 +7,38 @@
# General application configuration
import Config
+# 12 hours in seconds
+max_session_time = 12 * 60 * 60
+
+# Addresses an issue with Oban
+# https://github.com/oban-bg/oban/issues/493#issuecomment-1187001822
+config :arrow, Arrow.Repo,
+ parameters: [
+ tcp_keepalives_idle: "60",
+ tcp_keepalives_interval: "5",
+ tcp_keepalives_count: "3"
+ ],
+ socket_options: [keepalive: true]
+
+config :arrow, ArrowWeb.AuthManager,
+ issuer: "arrow",
+ max_session_time: max_session_time,
+ # 30 minutes
+ idle_time: 30 * 60
+
+# Configures the endpoint
+config :arrow, ArrowWeb.Endpoint,
+ url: [host: "localhost"],
+ render_errors: [view: ArrowWeb.ErrorView, accepts: ~w(html json)],
+ pubsub_server: Arrow.PubSub,
+ live_view: [signing_salt: "35DDvOCJ"]
+
+# Configures Oban, the job processing library
+config :arrow, Oban,
+ engine: Oban.Engines.Basic,
+ queues: [default: 10, gtfs_import: 1],
+ repo: Arrow.Repo
+
config :arrow,
ecto_repos: [Arrow.Repo],
aws_rds_mod: ExAws.RDS,
@@ -44,28 +76,7 @@ config :arrow,
hastus_export_storage_request_fn: {ExAws, :request},
use_username_prefix?: false
-# Addresses an issue with Oban
-# https://github.com/oban-bg/oban/issues/493#issuecomment-1187001822
-config :arrow, Arrow.Repo,
- parameters: [
- tcp_keepalives_idle: "60",
- tcp_keepalives_interval: "5",
- tcp_keepalives_count: "3"
- ],
- socket_options: [keepalive: true]
-
-# Configures the endpoint
-config :arrow, ArrowWeb.Endpoint,
- url: [host: "localhost"],
- render_errors: [view: ArrowWeb.ErrorView, accepts: ~w(html json)],
- pubsub_server: Arrow.PubSub,
- live_view: [signing_salt: "35DDvOCJ"]
-
-# Configures Oban, the job processing library
-config :arrow, Oban,
- engine: Oban.Engines.Basic,
- queues: [default: 10, gtfs_import: 1],
- repo: Arrow.Repo
+config :elixir, :time_zone_database, Tzdata.TimeZoneDatabase
config :esbuild,
version: "0.17.11",
@@ -91,6 +102,25 @@ config :esbuild,
}
]
+config :ex_aws, json_codec: Jason
+
+config :ja_serializer,
+ key_format: :underscored
+
+# Configures Elixir's Logger
+config :logger, :console,
+ format: "$time $metadata[$level] $message\n",
+ metadata: [:request_id]
+
+config :mime, :types, %{
+ "application/vnd.api+json" => ["json-api"]
+}
+
+config :phoenix, :format_encoders, "json-api": Jason
+
+# Use Jason for JSON parsing in Phoenix
+config :phoenix, :json_library, Jason
+
# Configure tailwind (the version is required)
config :tailwind,
version: "3.4.0",
@@ -103,15 +133,6 @@ config :tailwind,
cd: Path.expand("../assets", __DIR__)
]
-# 12 hours in seconds
-max_session_time = 12 * 60 * 60
-
-config :arrow, ArrowWeb.AuthManager,
- issuer: "arrow",
- max_session_time: max_session_time,
- # 30 minutes
- idle_time: 30 * 60
-
config :ueberauth, Ueberauth,
providers: [
keycloak:
@@ -124,27 +145,6 @@ config :ueberauth, Ueberauth,
authorization_params_passthrough: ~w"prompt login_hint"}
]
-# Configures Elixir's Logger
-config :logger, :console,
- format: "$time $metadata[$level] $message\n",
- metadata: [:request_id]
-
-# Use Jason for JSON parsing in Phoenix
-config :phoenix, :json_library, Jason
-
-config :phoenix, :format_encoders, "json-api": Jason
-
-config :mime, :types, %{
- "application/vnd.api+json" => ["json-api"]
-}
-
-config :ex_aws, json_codec: Jason
-
-config :ja_serializer,
- key_format: :underscored
-
-config :elixir, :time_zone_database, Tzdata.TimeZoneDatabase
-
# Import environment specific config. This must remain at the bottom
# of this file so it overrides the configuration defined above.
import_config "#{config_env()}.exs"
diff --git a/config/dev.exs b/config/dev.exs
index 4737c45d5..5ea6d9717 100644
--- a/config/dev.exs
+++ b/config/dev.exs
@@ -9,26 +9,14 @@ config :arrow, Arrow.Repo,
show_sensitive_data_on_connection_error: true,
pool_size: 10
+config :arrow, ArrowWeb.AuthManager, secret_key: "test key"
+
# For development, we disable any cache and enable
# debugging and code reloading.
#
# The watchers configuration can be used to run external
# watchers to your application. For example, we use it
# with webpack to recompile .js and .css sources.
-config :arrow, ArrowWeb.Endpoint,
- http: [port: 4000],
- debug_errors: true,
- code_reloader: true,
- check_origin: false,
- secret_key_base: "local_secret_key_base_at_least_64_bytes_________________________________",
- watchers: [
- esbuild: {Esbuild, :install_and_run, [:default, ~w(--sourcemap=inline --watch)]},
- node:
- ~w(assets/node_modules/.bin/tsc --project assets --noEmit --watch --preserveWatchOutput),
- tailwind: {Tailwind, :install_and_run, [:default, ~w(--watch)]}
- ]
-
-config :arrow, ArrowWeb.AuthManager, secret_key: "test key"
# ## SSL Support
#
@@ -53,6 +41,17 @@ config :arrow, ArrowWeb.AuthManager, secret_key: "test key"
# If desired, both `http:` and `https:` keys can be
# configured to run both http and https servers on
# different ports.
+config :arrow, ArrowWeb.Endpoint,
+ http: [port: 4000],
+ debug_errors: true,
+ code_reloader: true,
+ check_origin: false,
+ secret_key_base: "local_secret_key_base_at_least_64_bytes_________________________________",
+ watchers: [
+ esbuild: {Esbuild, :install_and_run, [:default, ~w(--sourcemap=inline --watch)]},
+ node: ~w(assets/node_modules/.bin/tsc --project assets --noEmit --watch --preserveWatchOutput),
+ tailwind: {Tailwind, :install_and_run, [:default, ~w(--watch)]}
+ ]
# Watch static and templates for browser reloading.
config :arrow, ArrowWeb.Endpoint,
@@ -64,16 +63,9 @@ config :arrow, ArrowWeb.Endpoint,
]
]
-config :ueberauth, Ueberauth,
- providers: [
- keycloak: {Arrow.Ueberauth.Strategy.Fake, [groups: ["admin"]]}
- ]
-
config :arrow, :redirect_http?, false
-
# Enable dev routes for dashboard and mailbox
config :arrow, dev_routes: true
-
# Set prefix env for s3 uploads
config :arrow,
shape_storage_enabled?: true,
@@ -87,16 +79,21 @@ config :arrow,
# Do not include metadata nor timestamps in development logs
config :logger, :console, format: "[$level] $message\n"
+# Initialize plugs at runtime for faster development compilation
+config :phoenix, :plug_init_mode, :runtime
+
# Set a higher stacktrace during development. Avoid configuring such
# in production as building large stacktraces may be expensive.
config :phoenix, :stacktrace_depth, 20
-# Initialize plugs at runtime for faster development compilation
-config :phoenix, :plug_init_mode, :runtime
-
config :phoenix_live_view,
# Include HEEx debug annotations as HTML comments in rendered markup
debug_heex_annotations: true
+config :ueberauth, Ueberauth,
+ providers: [
+ keycloak: {Arrow.Ueberauth.Strategy.Fake, [groups: ["admin"]]}
+ ]
+
# Enable helpful, but potentially expensive runtime checks
# enable_expensive_runtime_checks: true
diff --git a/config/prod.exs b/config/prod.exs
index 7a96250cc..03044458a 100644
--- a/config/prod.exs
+++ b/config/prod.exs
@@ -10,24 +10,23 @@ import Config
# which you should run after static files are built and
# before starting your production server.
+config :arrow, Arrow.Repo, ssl: true
+config :arrow, ArrowWeb.AuthManager, secret_key: {System, :get_env, ["ARROW_AUTH_SECRET"]}
+
+config :arrow,
+ run_migrations_at_startup?: true,
+ ex_aws_requester: {ExAws, :request}
+
config :arrow,
shape_storage_enabled?: true,
gtfs_archive_storage_enabled?: true,
hastus_export_storage_enabled?: true
-config :arrow, ArrowWeb.AuthManager, secret_key: {System, :get_env, ["ARROW_AUTH_SECRET"]}
-
# Do not print debug messages in production
config :logger,
backends: [:console, Sentry.LoggerBackend],
level: :info
-config :arrow,
- run_migrations_at_startup?: true,
- ex_aws_requester: {ExAws, :request}
-
-config :arrow, Arrow.Repo, ssl: true
-
# ## SSL Support
#
# To get SSL working, you will need to add the `https` key
diff --git a/config/runtime.exs b/config/runtime.exs
index b3311b5f1..baa7ad445 100644
--- a/config/runtime.exs
+++ b/config/runtime.exs
@@ -20,15 +20,15 @@ config :arrow, Arrow.OpenRouteServiceAPI,
client: Arrow.OpenRouteServiceAPI.Client
if is_binary(keycloak_issuer) and not is_test? do
- config :arrow,
- keycloak_client_uuid: System.fetch_env!("KEYCLOAK_CLIENT_UUID"),
- keycloak_api_base: System.fetch_env!("KEYCLOAK_API_BASE")
-
keycloak_opts = [
client_id: System.fetch_env!("KEYCLOAK_CLIENT_ID"),
client_secret: System.fetch_env!("KEYCLOAK_CLIENT_SECRET")
]
+ config :arrow,
+ keycloak_client_uuid: System.fetch_env!("KEYCLOAK_CLIENT_UUID"),
+ keycloak_api_base: System.fetch_env!("KEYCLOAK_API_BASE")
+
config :ueberauth_oidcc,
issuers: [
%{
@@ -45,6 +45,10 @@ if config_env() == :prod do
sentry_env = System.get_env("SENTRY_ENV")
if not is_nil(sentry_env) do
+ config :logger, Sentry.LoggerBackend,
+ level: :warning,
+ capture_log_messages: true
+
config :sentry,
dsn: System.fetch_env!("SENTRY_DSN"),
environment_name: sentry_env,
@@ -53,26 +57,15 @@ if config_env() == :prod do
tags: %{
env: sentry_env
}
-
- config :logger, Sentry.LoggerBackend,
- level: :warning,
- capture_log_messages: true
end
- config :arrow, ArrowWeb.Endpoint,
- http: [:inet6, port: System.get_env("PORT", "4000")],
- url: [host: System.get_env("HOST"), port: 443, scheme: "https"],
- cache_static_manifest: "priv/static/cache_manifest.json",
- server: true,
- secret_key_base: System.fetch_env!("SECRET_KEY_BASE")
-
pool_size =
case System.get_env("DATABASE_POOL_SIZE") do
nil -> 10
val -> String.to_integer(val)
end
- port = System.get_env("DATABASE_PORT") |> String.to_integer()
+ port = "DATABASE_PORT" |> System.get_env() |> String.to_integer()
config :arrow, Arrow.Repo,
username: System.get_env("DATABASE_USER"),
@@ -85,6 +78,13 @@ if config_env() == :prod do
queue_target: 30_000,
queue_interval: 120_000
+ config :arrow, ArrowWeb.Endpoint,
+ http: [:inet6, port: System.get_env("PORT", "4000")],
+ url: [host: System.get_env("HOST"), port: 443, scheme: "https"],
+ cache_static_manifest: "priv/static/cache_manifest.json",
+ server: true,
+ secret_key_base: System.fetch_env!("SECRET_KEY_BASE")
+
config :arrow,
shape_storage_prefix_env: System.get_env("S3_PREFIX"),
gtfs_archive_storage_prefix_env: System.get_env("S3_PREFIX"),
@@ -94,11 +94,6 @@ end
sync_enabled = System.get_env("ARROW_SYNC_ENABLED") == "true"
if sync_enabled && config_env() != :test do
- config :arrow,
- sync_enabled: true,
- sync_domain: System.fetch_env!("ARROW_DOMAIN"),
- sync_api_key: System.fetch_env!("ARROW_API_KEY")
-
config :arrow, Oban,
plugins: [
{Oban.Plugins.Cron,
@@ -107,6 +102,11 @@ if sync_enabled && config_env() != :test do
{"0 * * * *", Arrow.SyncWorker}
]}
]
+
+ config :arrow,
+ sync_enabled: true,
+ sync_domain: System.fetch_env!("ARROW_DOMAIN"),
+ sync_api_key: System.fetch_env!("ARROW_API_KEY")
else
config :arrow,
sync_enabled: false
diff --git a/config/test.exs b/config/test.exs
index 96f1fc346..139bb4194 100644
--- a/config/test.exs
+++ b/config/test.exs
@@ -1,12 +1,6 @@
import Config
-config :arrow,
- shape_storage_enabled?: false,
- shape_storage_request_fn: {Arrow.Mock.ExAws.Request, :request},
- gtfs_archive_storage_enabled?: false,
- gtfs_archive_storage_request_fn: {Arrow.Mock.ExAws.Request, :request},
- hastus_export_storage_enabled?: false,
- hastus_export_storage_request_fn: {Arrow.Mock.ExAws.Request, :request}
+alias Arrow.Mock.ExAws.Request
# Configure your database
config :arrow, Arrow.Repo,
@@ -16,26 +10,52 @@ config :arrow, Arrow.Repo,
hostname: "localhost",
pool: Ecto.Adapters.SQL.Sandbox
+config :arrow, ArrowWeb.AuthManager, secret_key: "test key"
+
config :arrow, ArrowWeb.Endpoint,
http: [port: 4002],
secret_key_base: "local_secret_key_base_at_least_64_bytes_________________________________",
server: true
-config :arrow, ArrowWeb.AuthManager, secret_key: "test key"
-
# Prevent Oban from running jobs and plugins during test runs
config :arrow, Oban, testing: :inline
+config :arrow, env: :test
-config :ueberauth, Ueberauth,
- providers: [
- keycloak: {Arrow.Ueberauth.Strategy.Fake, [groups: ["admin"]]}
- ]
+config :arrow,
+ fetch_adjustments?: false,
+ http_client: Arrow.HTTPMock
# Configure Keycloak
config :arrow,
keycloak_api_base: "https://keycloak.example/auth/realm/",
keycloak_client_uuid: "UUID"
+config :arrow,
+ shape_storage_enabled?: false,
+ shape_storage_request_fn: {Request, :request},
+ gtfs_archive_storage_enabled?: false,
+ gtfs_archive_storage_request_fn: {Request, :request},
+ hastus_export_storage_enabled?: false,
+ hastus_export_storage_request_fn: {Request, :request}
+
+config :arrow,
+ sync_enabled: false,
+ sync_api_key: "test-key",
+ sync_domain: "https://test.example.com"
+
+config :ex_aws,
+ access_key_id: "test_access_key_id",
+ secret_access_key: "test_secret_access_key",
+ region: "us-east-1"
+
+# Print only warnings and errors during test
+config :logger, level: :warning
+
+config :ueberauth, Ueberauth,
+ providers: [
+ keycloak: {Arrow.Ueberauth.Strategy.Fake, [groups: ["admin"]]}
+ ]
+
config :ueberauth_oidcc,
providers: [
keycloak: [
@@ -46,28 +66,9 @@ config :ueberauth_oidcc,
]
]
-config :arrow,
- fetch_adjustments?: false,
- http_client: Arrow.HTTPMock
-
-# Print only warnings and errors during test
-config :logger, level: :warning
-
-config :arrow, env: :test
-
config :wallaby,
driver: Wallaby.Chrome,
otp_app: :arrow,
screenshot_dir: "test/integration/screenshots",
screenshot_on_failure: true,
- max_wait_time: 10000
-
-config :ex_aws,
- access_key_id: "test_access_key_id",
- secret_access_key: "test_secret_access_key",
- region: "us-east-1"
-
-config :arrow,
- sync_enabled: false,
- sync_api_key: "test-key",
- sync_domain: "https://test.example.com"
+ max_wait_time: 10_000
diff --git a/lib/arrow/adjustment.ex b/lib/arrow/adjustment.ex
index 72287f9b7..fa27f9fc4 100644
--- a/lib/arrow/adjustment.ex
+++ b/lib/arrow/adjustment.ex
@@ -6,8 +6,10 @@ defmodule Arrow.Adjustment do
"""
use Ecto.Schema
+
import Ecto.Changeset
import Ecto.Query
+
alias Arrow.Repo
@type t :: %__MODULE__{
diff --git a/lib/arrow/auth_token.ex b/lib/arrow/auth_token.ex
index 0dde202ef..898c17ca5 100644
--- a/lib/arrow/auth_token.ex
+++ b/lib/arrow/auth_token.ex
@@ -5,6 +5,7 @@ defmodule Arrow.AuthToken do
Primarily used by gtfs_creator to fetch disruption information.
"""
use Ecto.Schema
+
import Ecto.Changeset
@type t :: %__MODULE__{
@@ -39,7 +40,7 @@ defmodule Arrow.AuthToken do
token
nil ->
- token = :crypto.strong_rand_bytes(16) |> Base.encode16() |> String.downcase()
+ token = 16 |> :crypto.strong_rand_bytes() |> Base.encode16() |> String.downcase()
auth_token = %__MODULE__{
username: username,
diff --git a/lib/arrow/db_structure.ex b/lib/arrow/db_structure.ex
index a4b3dd254..e39f796da 100644
--- a/lib/arrow/db_structure.ex
+++ b/lib/arrow/db_structure.ex
@@ -17,7 +17,9 @@ defmodule Arrow.DBStructure do
"""
import Ecto.Query
+
alias Arrow.DBStructure.Table
+ alias Ecto.Adapters.SQL
@temp_table "temp_join_table"
@type db_structure :: [Table.t()]
@@ -129,8 +131,7 @@ defmodule Arrow.DBStructure do
end
defp nullify_optional_fkeys(repo, structure) do
- structure
- |> Enum.each(fn table ->
+ Enum.each(structure, fn table ->
Enum.each(table.optional_fkeys, fn column ->
set = Keyword.new([{column, nil}])
table.name |> from(update: [set: ^set]) |> repo.update_all([])
@@ -159,7 +160,7 @@ defmodule Arrow.DBStructure do
defp add_optional_fkeys(repo, structure, data) do
%{num_rows: _, rows: _} =
- Ecto.Adapters.SQL.query!(
+ SQL.query!(
repo,
"CREATE TEMP TABLE " <> @temp_table <> " (table_id INT, fkey_value INT)",
[]
@@ -177,14 +178,14 @@ defmodule Arrow.DBStructure do
{_num_inserted, _return} = repo.insert_all(@temp_table, temp_rows)
- from(t in table.name,
- join: j in @temp_table,
- on: t.id == j.table_id,
- update: [
- set: [{^fkey_column, field(j, :fkey_value)}]
- ]
+ repo.update_all(
+ from(t in table.name,
+ join: j in @temp_table,
+ on: t.id == j.table_id,
+ update: [set: [{^fkey_column, field(j, :fkey_value)}]]
+ ),
+ []
)
- |> repo.update_all([])
@temp_table |> from() |> repo.delete_all()
end)
@@ -194,7 +195,7 @@ defmodule Arrow.DBStructure do
defp reset_sequences(repo, structure) do
Enum.each(structure, fn table ->
Enum.each(table.sequences, fn {seq_col, seq_name} ->
- max_id = from(t in table.name, select: max(field(t, ^seq_col))) |> repo.one()
+ max_id = repo.one(from(t in table.name, select: max(field(t, ^seq_col))))
reset_sequence(repo, seq_name, max_id)
end)
end)
@@ -205,7 +206,7 @@ defmodule Arrow.DBStructure do
end
defp reset_sequence(repo, seq_name, max_id) do
- Ecto.Adapters.SQL.query!(
+ SQL.query!(
repo,
"ALTER SEQUENCE #{seq_name} RESTART WITH #{max_id + 1}",
[]
diff --git a/lib/arrow/disruption.ex b/lib/arrow/disruption.ex
index 722c5dcfc..1a49feb98 100644
--- a/lib/arrow/disruption.ex
+++ b/lib/arrow/disruption.ex
@@ -7,18 +7,21 @@ defmodule Arrow.Disruption do
- Trip short names (Commuter Rail only)
"""
use Ecto.Schema
+
import Ecto.Query
alias Arrow.Disruption.Note
- alias Arrow.{DisruptionRevision, Repo}
+ alias Arrow.DisruptionRevision
+ alias Arrow.Repo
+ alias Ecto.Association.NotLoaded
alias Ecto.Changeset
alias Ecto.Multi
@type id :: integer
@type t :: %__MODULE__{
id: id,
- published_revision: DisruptionRevision.t() | Ecto.Association.NotLoaded.t(),
- notes: [Note.t()] | Ecto.Association.NotLoaded.t(),
+ published_revision: DisruptionRevision.t() | NotLoaded.t(),
+ notes: [Note.t()] | NotLoaded.t(),
last_published_at: DateTime.t() | nil,
inserted_at: DateTime.t(),
updated_at: DateTime.t()
@@ -64,7 +67,8 @@ defmodule Arrow.Disruption do
Multi.new()
|> Multi.insert(:disruption, %__MODULE__{})
|> Multi.insert(:revision, fn %{disruption: %{id: id}} ->
- DisruptionRevision.new(disruption_id: id)
+ [disruption_id: id]
+ |> DisruptionRevision.new()
|> DisruptionRevision.changeset(attrs)
end)
|> maybe_add_note(author_id, note_params(attrs))
@@ -119,12 +123,7 @@ defmodule Arrow.Disruption do
@spec latest_revision_id(id) :: DisruptionRevision.id()
def latest_revision_id(id) do
- from(r in DisruptionRevision,
- where: r.disruption_id == ^id,
- select: max(r.id),
- group_by: r.disruption_id
- )
- |> Repo.one!()
+ Repo.one!(from(r in DisruptionRevision, where: r.disruption_id == ^id, select: max(r.id), group_by: r.disruption_id))
end
@doc """
@@ -166,14 +165,15 @@ defmodule Arrow.Disruption do
"""
@spec latest_vs_published() :: {[t()], [t()]}
def latest_vs_published do
- from([disruptions: d, latest_ids: l] in with_latest_revision_ids(),
- join: r in assoc(d, :revisions),
- on: r.disruption_id == d.id,
- where: is_nil(d.published_revision_id) or d.published_revision_id != l.latest_revision_id,
- where: r.id == d.published_revision_id or r.id == l.latest_revision_id,
- preload: [revisions: {r, ^DisruptionRevision.associations()}]
+ Arrow.Repo.all(
+ from([disruptions: d, latest_ids: l] in with_latest_revision_ids(),
+ join: r in assoc(d, :revisions),
+ on: r.disruption_id == d.id,
+ where: is_nil(d.published_revision_id) or d.published_revision_id != l.latest_revision_id,
+ where: r.id == d.published_revision_id or r.id == l.latest_revision_id,
+ preload: [revisions: {r, ^DisruptionRevision.associations()}]
+ )
)
- |> Arrow.Repo.all()
end
@doc """
diff --git a/lib/arrow/disruption/day_of_week.ex b/lib/arrow/disruption/day_of_week.ex
index 04e961295..60b96438f 100644
--- a/lib/arrow/disruption/day_of_week.ex
+++ b/lib/arrow/disruption/day_of_week.ex
@@ -4,6 +4,7 @@ defmodule Arrow.Disruption.DayOfWeek do
"""
use Ecto.Schema
+
import Ecto.Changeset
@type t :: %__MODULE__{
@@ -51,7 +52,7 @@ defmodule Arrow.Disruption.DayOfWeek do
is_nil(start_time) or is_nil(end_time) ->
changeset
- not (Time.compare(start_time, end_time) == :lt) ->
+ not Time.before?(start_time, end_time) ->
add_error(changeset, :days_of_week, "start time should be before end time")
true ->
diff --git a/lib/arrow/disruption/exception.ex b/lib/arrow/disruption/exception.ex
index d14b52f91..009af6cdc 100644
--- a/lib/arrow/disruption/exception.ex
+++ b/lib/arrow/disruption/exception.ex
@@ -4,6 +4,7 @@ defmodule Arrow.Disruption.Exception do
"""
use Ecto.Schema
+
import Ecto.Changeset
@type t :: %__MODULE__{
diff --git a/lib/arrow/disruption/note.ex b/lib/arrow/disruption/note.ex
index 82d3828f8..cf87051cc 100644
--- a/lib/arrow/disruption/note.ex
+++ b/lib/arrow/disruption/note.ex
@@ -4,6 +4,7 @@ defmodule Arrow.Disruption.Note do
"""
use Ecto.Schema
+
import Ecto.Changeset
alias Arrow.Disruption
diff --git a/lib/arrow/disruption/trip_short_name.ex b/lib/arrow/disruption/trip_short_name.ex
index 2804d40b1..8637050f5 100644
--- a/lib/arrow/disruption/trip_short_name.ex
+++ b/lib/arrow/disruption/trip_short_name.ex
@@ -5,6 +5,7 @@ defmodule Arrow.Disruption.TripShortName do
"""
use Ecto.Schema
+
import Ecto.Changeset
@type t :: %__MODULE__{
diff --git a/lib/arrow/disruption_revision.ex b/lib/arrow/disruption_revision.ex
index 3316ce0da..7f83a28a7 100644
--- a/lib/arrow/disruption_revision.ex
+++ b/lib/arrow/disruption_revision.ex
@@ -4,10 +4,16 @@ defmodule Arrow.DisruptionRevision do
"""
use Ecto.Schema
+
import Ecto.Query
- alias Arrow.{Adjustment, Disruption, Repo}
- alias Arrow.Disruption.{DayOfWeek, Exception, TripShortName}
+ alias Arrow.Adjustment
+ alias Arrow.Disruption
+ alias Arrow.Disruption.DayOfWeek
+ alias Arrow.Disruption.Exception
+ alias Arrow.Disruption.TripShortName
+ alias Arrow.Repo
+ alias Ecto.Association.NotLoaded
alias Ecto.Changeset
@type id :: integer
@@ -20,11 +26,11 @@ defmodule Arrow.DisruptionRevision do
description: String.t(),
adjustment_kind: atom() | nil,
note_body: String.t() | nil,
- disruption: Disruption.t() | Ecto.Association.NotLoaded.t(),
- days_of_week: [DayOfWeek.t()] | Ecto.Association.NotLoaded.t(),
- exceptions: [Exception.t()] | Ecto.Association.NotLoaded.t(),
- trip_short_names: [TripShortName.t()] | Ecto.Association.NotLoaded.t(),
- adjustments: [Adjustment.t()] | Ecto.Association.NotLoaded.t(),
+ disruption: Disruption.t() | NotLoaded.t(),
+ days_of_week: [DayOfWeek.t()] | NotLoaded.t(),
+ exceptions: [Exception.t()] | NotLoaded.t(),
+ trip_short_names: [TripShortName.t()] | NotLoaded.t(),
+ adjustments: [Adjustment.t()] | NotLoaded.t(),
inserted_at: DateTime.t() | nil,
updated_at: DateTime.t() | nil,
title: String.t()
@@ -106,11 +112,11 @@ defmodule Arrow.DisruptionRevision do
%__MODULE__{},
revision
|> clone_fields()
- |> Map.merge(%{adjustments: adjustments})
+ |> Map.put(:adjustments, adjustments)
|> Map.merge(
- ~w(days_of_week exceptions trip_short_names)a
- |> Enum.map(fn assoc -> {assoc, Enum.map(Map.get(revision, assoc), &clone_fields/1)} end)
- |> Enum.into(%{})
+ Map.new(~w(days_of_week exceptions trip_short_names)a, fn assoc ->
+ {assoc, Enum.map(Map.get(revision, assoc), &clone_fields/1)}
+ end)
)
)
end
@@ -122,21 +128,22 @@ defmodule Arrow.DisruptionRevision do
"""
@spec new(Enum.t()) :: t()
def new(attrs \\ %{}) do
- %__MODULE__{adjustments: [], days_of_week: [], exceptions: [], trip_short_names: []}
- |> struct!(attrs)
+ struct!(%__MODULE__{adjustments: [], days_of_week: [], exceptions: [], trip_short_names: []}, attrs)
end
@spec publish!([integer()]) :: :ok
def publish!(ids) do
Repo.transaction(fn ->
# Update disruptions only where the published revision is changing
- from(d in Disruption,
- join: dr in assoc(d, :revisions),
- where: dr.id in ^ids,
- where: dr.id != d.published_revision_id or is_nil(d.published_revision_id),
- update: [set: [published_revision_id: dr.id, last_published_at: fragment("now()")]]
+ Repo.update_all(
+ from(d in Disruption,
+ join: dr in assoc(d, :revisions),
+ where: dr.id in ^ids,
+ where: dr.id != d.published_revision_id or is_nil(d.published_revision_id),
+ update: [set: [published_revision_id: dr.id, last_published_at: fragment("now()")]]
+ ),
+ []
)
- |> Repo.update_all([])
# since GTFS creator doesn't know about deleted disruptions, consider any currently
# deleted disruptions part of this publishing notice.
@@ -166,13 +173,14 @@ defmodule Arrow.DisruptionRevision do
@spec publish_deleted!() :: :ok
defp publish_deleted! do
- from(
- [disruptions: d, revisions: r] in Disruption.with_latest_revisions(),
- where: r.is_active == false,
- where: is_nil(d.published_revision_id) or d.published_revision_id != r.id,
- update: [set: [published_revision_id: r.id, last_published_at: fragment("now()")]]
+ Repo.update_all(
+ from([disruptions: d, revisions: r] in Disruption.with_latest_revisions(),
+ where: r.is_active == false,
+ where: is_nil(d.published_revision_id) or d.published_revision_id != r.id,
+ update: [set: [published_revision_id: r.id, last_published_at: fragment("now()")]]
+ ),
+ []
)
- |> Repo.update_all([])
:ok
end
@@ -183,9 +191,7 @@ defmodule Arrow.DisruptionRevision do
cond do
adjustments == [] ->
- Changeset.validate_required(changeset, :adjustment_kind,
- message: "is required without adjustments"
- )
+ Changeset.validate_required(changeset, :adjustment_kind, message: "is required without adjustments")
adjustments != [] and kind not in [nil, ""] ->
Changeset.add_error(changeset, :adjustment_kind, "cannot be set with adjustments")
@@ -278,7 +284,7 @@ defmodule Arrow.DisruptionRevision do
is_nil(start_date) or is_nil(end_date) ->
changeset
- Date.compare(start_date, end_date) == :gt ->
+ Date.after?(start_date, end_date) ->
Changeset.add_error(changeset, :start_date, "can't be after end date")
true ->
@@ -287,7 +293,7 @@ defmodule Arrow.DisruptionRevision do
end
defp date_range(start_date, end_date) do
- if Date.compare(start_date, end_date) == :gt do
+ if Date.after?(start_date, end_date) do
Date.range(start_date, end_date, -1)
else
Date.range(start_date, end_date)
diff --git a/lib/arrow/disruptions.ex b/lib/arrow/disruptions.ex
index dc3f645da..7e34047ae 100644
--- a/lib/arrow/disruptions.ex
+++ b/lib/arrow/disruptions.ex
@@ -4,11 +4,11 @@ defmodule Arrow.Disruptions do
"""
import Ecto.Query, warn: false
- alias Arrow.Repo
alias Arrow.Disruptions.DisruptionV2
alias Arrow.Disruptions.Limit
alias Arrow.Disruptions.ReplacementService
+ alias Arrow.Repo
alias Arrow.Shuttles
@preloads [
@@ -65,7 +65,7 @@ defmodule Arrow.Disruptions do
|> Repo.insert()
case disruption_v2 do
- {:ok, disruption_v2} -> {:ok, disruption_v2 |> Repo.preload(@preloads)}
+ {:ok, disruption_v2} -> {:ok, Repo.preload(disruption_v2, @preloads)}
err -> err
end
end
@@ -89,7 +89,7 @@ defmodule Arrow.Disruptions do
|> Repo.update()
case update_disruption_v2 do
- {:ok, disruption_v2} -> {:ok, disruption_v2 |> Repo.preload(@preloads)}
+ {:ok, disruption_v2} -> {:ok, Repo.preload(disruption_v2, @preloads)}
err -> err
end
end
@@ -138,7 +138,7 @@ defmodule Arrow.Disruptions do
"""
def get_replacement_service!(id),
- do: Repo.get!(ReplacementService, id) |> Repo.preload(@preloads[:replacement_services])
+ do: ReplacementService |> Repo.get!(id) |> Repo.preload(@preloads[:replacement_services])
@doc """
Creates a replacement_service.
@@ -159,7 +159,7 @@ defmodule Arrow.Disruptions do
|> Repo.insert()
case create_replacement_service do
- {:ok, rs} -> {:ok, rs |> Repo.preload(@preloads[:replacement_services])}
+ {:ok, rs} -> {:ok, Repo.preload(rs, @preloads[:replacement_services])}
err -> err
end
end
@@ -183,7 +183,7 @@ defmodule Arrow.Disruptions do
|> Repo.update()
case update_replacement_service do
- {:ok, rs} -> {:ok, rs |> Repo.preload(@preloads[:replacement_services])}
+ {:ok, rs} -> {:ok, Repo.preload(rs, @preloads[:replacement_services])}
err -> err
end
end
@@ -227,19 +227,14 @@ defmodule Arrow.Disruptions do
"""
def get_limits_in_date_range(start_date, end_date) do
- from(l in Limit,
- join: d in assoc(l, :disruption),
- where: d.is_active == true,
- where: l.start_date <= ^end_date and l.end_date >= ^start_date,
- preload: [
- :disruption,
- :route,
- :start_stop,
- :end_stop,
- limit_day_of_weeks: :limit
- ]
+ Repo.all(
+ from(l in Limit,
+ join: d in assoc(l, :disruption),
+ where: d.is_active == true,
+ where: l.start_date <= ^end_date and l.end_date >= ^start_date,
+ preload: [:disruption, :route, :start_stop, :end_stop, limit_day_of_weeks: :limit]
+ )
)
- |> Repo.all()
end
@spec replacement_service_trips_with_times(ReplacementService.t(), String.t()) :: map()
@@ -250,8 +245,7 @@ defmodule Arrow.Disruptions do
day_of_week_data = source_workbook_data["#{day_of_week} headways and runtimes"]
{first_trips, last_trips, headway_periods} =
- Enum.reduce(day_of_week_data, {%{}, %{}, %{}}, fn data,
- {first_trips, last_trips, headway_periods} ->
+ Enum.reduce(day_of_week_data, {%{}, %{}, %{}}, fn data, {first_trips, last_trips, headway_periods} ->
case data do
%{"first_trip_0" => first_trip_0, "first_trip_1" => first_trip_1} ->
{%{0 => first_trip_0, 1 => first_trip_1}, last_trips, headway_periods}
@@ -307,8 +301,7 @@ defmodule Arrow.Disruptions do
end)
{_, stop_times} =
- Enum.reduce(shuttle_route.route_stops, {start_time, []}, fn route_stop,
- {current_stop_time, stop_times} ->
+ Enum.reduce(shuttle_route.route_stops, {start_time, []}, fn route_stop, {current_stop_time, stop_times} ->
{if is_nil(route_stop.time_to_next_stop) do
current_stop_time
else
@@ -333,21 +326,11 @@ defmodule Arrow.Disruptions do
}
end
- defp do_make_trip_start_times(
- first_trip_start_time,
- last_trip_start_time,
- trip_start_times,
- _headway_periods
- )
+ defp do_make_trip_start_times(first_trip_start_time, last_trip_start_time, trip_start_times, _headway_periods)
when first_trip_start_time > last_trip_start_time,
do: trip_start_times
- defp do_make_trip_start_times(
- first_trip_start_time,
- last_trip_start_time,
- trip_start_times,
- headway_periods
- ) do
+ defp do_make_trip_start_times(first_trip_start_time, last_trip_start_time, trip_start_times, headway_periods) do
headway =
headway_periods |> Map.get(start_of_hour(first_trip_start_time)) |> Map.get("headway")
@@ -383,10 +366,7 @@ defmodule Arrow.Disruptions do
{nil, nil}
end
- def start_end_dates(%DisruptionV2{
- limits: limits,
- replacement_services: replacement_services
- }) do
+ def start_end_dates(%DisruptionV2{limits: limits, replacement_services: replacement_services}) do
min_date =
(limits ++ replacement_services)
|> Enum.map(& &1.start_date)
diff --git a/lib/arrow/disruptions/disruption_v2.ex b/lib/arrow/disruptions/disruption_v2.ex
index 930b1c2a6..81c5e708c 100644
--- a/lib/arrow/disruptions/disruption_v2.ex
+++ b/lib/arrow/disruptions/disruption_v2.ex
@@ -5,10 +5,13 @@ defmodule Arrow.Disruptions.DisruptionV2 do
See: https://github.com/mbta/gtfs_creator/blob/ab5aac52561027aa13888e4c4067a8de177659f6/gtfs_creator2/disruptions/disruption.py
"""
use Ecto.Schema
+
import Ecto.Changeset
alias Arrow.Disruptions
+ alias Arrow.Disruptions.Limit
alias Arrow.Hastus.Export
+ alias Ecto.Association.NotLoaded
@type t :: %__MODULE__{
title: String.t() | nil,
@@ -17,10 +20,9 @@ defmodule Arrow.Disruptions.DisruptionV2 do
description: String.t() | nil,
inserted_at: DateTime.t() | nil,
updated_at: DateTime.t() | nil,
- limits: [Arrow.Disruptions.Limit.t()] | Ecto.Association.NotLoaded.t(),
- replacement_services:
- [Disruptions.ReplacementService.t()] | Ecto.Association.NotLoaded.t(),
- hastus_exports: [Arrow.Hastus.Export.t()] | Ecto.Association.NotLoaded.t()
+ limits: [Limit.t()] | NotLoaded.t(),
+ replacement_services: [Disruptions.ReplacementService.t()] | NotLoaded.t(),
+ hastus_exports: [Arrow.Hastus.Export.t()] | NotLoaded.t()
}
schema "disruptionsv2" do
@@ -29,7 +31,7 @@ defmodule Arrow.Disruptions.DisruptionV2 do
field :is_active, :boolean
field :description, :string
- has_many :limits, Arrow.Disruptions.Limit,
+ has_many :limits, Limit,
foreign_key: :disruption_id,
on_replace: :delete
@@ -49,14 +51,13 @@ defmodule Arrow.Disruptions.DisruptionV2 do
disruption_v2
|> cast(attrs, [:title, :is_active, :description])
|> cast(attrs, [:mode], force_changes: true)
- |> cast_assoc(:limits, with: &Arrow.Disruptions.Limit.changeset/2)
+ |> cast_assoc(:limits, with: &Limit.changeset/2)
|> cast_assoc(:replacement_services, with: &Disruptions.ReplacementService.changeset/2)
|> validate_required([:title, :mode, :is_active])
end
def new(attrs \\ %{}) do
- %__MODULE__{limits: [], replacement_services: [], mode: :subway}
- |> struct!(attrs)
+ struct!(%__MODULE__{limits: [], replacement_services: [], mode: :subway}, attrs)
end
@doc """
diff --git a/lib/arrow/disruptions/limit.ex b/lib/arrow/disruptions/limit.ex
index 4956d4d1a..5fd354c2c 100644
--- a/lib/arrow/disruptions/limit.ex
+++ b/lib/arrow/disruptions/limit.ex
@@ -2,11 +2,14 @@ defmodule Arrow.Disruptions.Limit do
@moduledoc "schema for a limit for the db"
use Ecto.Schema
+
import Ecto.Changeset
alias Arrow.Disruptions.DisruptionV2
- alias Arrow.Gtfs.{Route, Stop}
+ alias Arrow.Gtfs.Route
+ alias Arrow.Gtfs.Stop
alias Arrow.Limits.LimitDayOfWeek
+ alias Ecto.Association.NotLoaded
@default_day_of_weeks_list [
%LimitDayOfWeek{day_name: :monday},
@@ -21,11 +24,11 @@ defmodule Arrow.Disruptions.Limit do
@type t :: %__MODULE__{
start_date: Date.t() | nil,
end_date: Date.t() | nil,
- disruption: DisruptionV2.t() | Ecto.Association.NotLoaded.t(),
- route: Route.t() | Ecto.Association.NotLoaded.t(),
- start_stop: Stop.t() | Ecto.Association.NotLoaded.t(),
- end_stop: Stop.t() | Ecto.Association.NotLoaded.t(),
- limit_day_of_weeks: [LimitDayOfWeek.t()] | Ecto.Association.NotLoaded.t()
+ disruption: DisruptionV2.t() | NotLoaded.t(),
+ route: Route.t() | NotLoaded.t(),
+ start_stop: Stop.t() | NotLoaded.t(),
+ end_stop: Stop.t() | NotLoaded.t(),
+ limit_day_of_weeks: [LimitDayOfWeek.t()] | NotLoaded.t()
}
schema "limits" do
@@ -33,10 +36,10 @@ defmodule Arrow.Disruptions.Limit do
field :end_date, :date
field :check_for_overlap, :boolean, default: true
field :editing?, :boolean, virtual: true, default: false
- belongs_to :disruption, Arrow.Disruptions.DisruptionV2
- belongs_to :route, Arrow.Gtfs.Route, type: :string
- belongs_to :start_stop, Arrow.Gtfs.Stop, type: :string
- belongs_to :end_stop, Arrow.Gtfs.Stop, type: :string
+ belongs_to :disruption, DisruptionV2
+ belongs_to :route, Route, type: :string
+ belongs_to :start_stop, Stop, type: :string
+ belongs_to :end_stop, Stop, type: :string
has_many :limit_day_of_weeks, LimitDayOfWeek, on_replace: :delete, preload_order: [:day_name]
timestamps(type: :utc_datetime)
@@ -81,8 +84,7 @@ defmodule Arrow.Disruptions.Limit do
"""
@spec new(Enum.t()) :: t()
def new(attrs \\ %{}) do
- %__MODULE__{limit_day_of_weeks: @default_day_of_weeks_list}
- |> struct!(attrs)
+ struct!(%__MODULE__{limit_day_of_weeks: @default_day_of_weeks_list}, attrs)
end
@spec validate_start_date_before_end_date(Ecto.Changeset.t(t())) :: Ecto.Changeset.t(t())
@@ -94,7 +96,7 @@ defmodule Arrow.Disruptions.Limit do
is_nil(start_date) or is_nil(end_date) ->
changeset
- Date.compare(start_date, end_date) == :gt ->
+ Date.after?(start_date, end_date) ->
add_error(changeset, :start_date, "start date should not be after end date")
true ->
@@ -114,9 +116,7 @@ defmodule Arrow.Disruptions.Limit do
end
@spec dow_in_date_range(Date.t() | nil, Date.t() | nil) :: MapSet.t(LimitDayOfWeek.day_name())
- defp dow_in_date_range(start_date, end_date)
- when is_nil(start_date)
- when is_nil(end_date) do
+ defp dow_in_date_range(start_date, end_date) when is_nil(start_date) when is_nil(end_date) do
MapSet.new(~w[monday tuesday wednesday thursday friday saturday sunday]a)
end
diff --git a/lib/arrow/disruptions/replacement_service.ex b/lib/arrow/disruptions/replacement_service.ex
index 652f59536..2c48e3ae8 100644
--- a/lib/arrow/disruptions/replacement_service.ex
+++ b/lib/arrow/disruptions/replacement_service.ex
@@ -5,6 +5,7 @@ defmodule Arrow.Disruptions.ReplacementService do
See related: https://github.com/mbta/gtfs_creator/blob/ab5aac52561027aa13888e4c4067a8de177659f6/gtfs_creator2/disruptions/activated_shuttles.py
"""
use Ecto.Schema
+
import Ecto.Changeset
import Ecto.Query, only: [from: 2]
@@ -13,6 +14,7 @@ defmodule Arrow.Disruptions.ReplacementService do
alias Arrow.Repo.MapForForm
alias Arrow.Shuttles
alias Arrow.Shuttles.Shuttle
+ alias Ecto.Association.NotLoaded
@type stop_time :: %{stop_id: String.t(), stop_time: String.t()}
@type direction_id :: String.t()
@@ -25,8 +27,8 @@ defmodule Arrow.Disruptions.ReplacementService do
end_date: Date.t() | nil,
source_workbook_data: map(),
source_workbook_filename: String.t(),
- disruption: DisruptionV2.t() | Ecto.Association.NotLoaded.t(),
- shuttle: Shuttle.t() | Ecto.Association.NotLoaded.t(),
+ disruption: DisruptionV2.t() | NotLoaded.t(),
+ shuttle: Shuttle.t() | NotLoaded.t(),
timetable: %{weekday: timetable(), saturday: timetable(), sunday: timetable()} | nil
}
@@ -83,7 +85,7 @@ defmodule Arrow.Disruptions.ReplacementService do
is_nil(start_date) or is_nil(end_date) ->
changeset
- Date.compare(start_date, end_date) == :gt ->
+ Date.after?(start_date, end_date) ->
add_error(changeset, :start_date, "start date should not be after end date")
true ->
@@ -93,13 +95,11 @@ defmodule Arrow.Disruptions.ReplacementService do
def add_timetable(%__MODULE__{} = replacement_service) do
timetable =
- schedule_service_types()
- |> Enum.map(fn service_type ->
+ Map.new(schedule_service_types(), fn service_type ->
{service_type, trips_with_times(replacement_service, service_type)}
end)
- |> Enum.into(%{})
- %__MODULE__{replacement_service | timetable: timetable}
+ %{replacement_service | timetable: timetable}
end
@spec get_replacement_services_with_timetables(Date.t(), Date.t()) ::
@@ -133,10 +133,7 @@ defmodule Arrow.Disruptions.ReplacementService do
last_trips: %{0 => String.t(), 1 => String.t()}
}
}
- def first_last_trip_times(
- %__MODULE__{} = replacement_service,
- schedule_service_types \\ schedule_service_types()
- ) do
+ def first_last_trip_times(%__MODULE__{} = replacement_service, schedule_service_types \\ schedule_service_types()) do
schedule_service_types
|> Enum.map(fn service_type ->
service_type_abbreviation = Map.get(@service_type_to_workbook_abbreviation, service_type)
@@ -158,17 +155,12 @@ defmodule Arrow.Disruptions.ReplacementService do
end)
end
- defp trips_with_times(
- %__MODULE__{source_workbook_data: workbook_data} = replacement_service,
- service_type_atom
- ) do
+ defp trips_with_times(%__MODULE__{source_workbook_data: workbook_data} = replacement_service, service_type_atom) do
service_type_abbreviation = Map.get(@service_type_to_workbook_abbreviation, service_type_atom)
if day_of_week_data =
Map.get(workbook_data, workbook_column_from_day_of_week(service_type_abbreviation)) do
do_trips_with_times(replacement_service, day_of_week_data)
- else
- nil
end
end
@@ -186,17 +178,11 @@ defmodule Arrow.Disruptions.ReplacementService do
{first_trips, %{0 => last_trip_0, 1 => last_trip_1}, headway_periods}
end
- defp reduce_workbook_data(
- %{"start_time" => start_time} = headway_period,
- {first_trips, last_trips, headway_periods}
- ) do
+ defp reduce_workbook_data(%{"start_time" => start_time} = headway_period, {first_trips, last_trips, headway_periods}) do
{first_trips, last_trips, Map.put(headway_periods, start_time, headway_period)}
end
- defp do_trips_with_times(
- %__MODULE__{shuttle: shuttle},
- day_of_week_data
- ) do
+ defp do_trips_with_times(%__MODULE__{shuttle: shuttle}, day_of_week_data) do
# to do: find a way to ensure that display_stop_id is always populate on every shuttle route stop
# regardless of from where the shuttle comes
# (e.g. if a shuttle comes from a join, it should still have display_stop_id populated)
@@ -251,8 +237,7 @@ defmodule Arrow.Disruptions.ReplacementService do
end)
{_, stop_times} =
- Enum.reduce(shuttle_route.route_stops, {start_time, []}, fn route_stop,
- {current_stop_time, stop_times} ->
+ Enum.reduce(shuttle_route.route_stops, {start_time, []}, fn route_stop, {current_stop_time, stop_times} ->
{if is_nil(route_stop.time_to_next_stop) do
current_stop_time
else
@@ -277,8 +262,7 @@ defmodule Arrow.Disruptions.ReplacementService do
defp next_trip_start_time(last_trip_start, last_trip_start, _headway_periods), do: :done
- defp next_trip_start_time(trip_start, last_trip_start, headway_periods)
- when trip_start < last_trip_start do
+ defp next_trip_start_time(trip_start, last_trip_start, headway_periods) when trip_start < last_trip_start do
headway =
headway_periods
|> Map.get_lazy(start_of_hour(trip_start), fn ->
diff --git a/lib/arrow/disruptions/replacement_service_upload.ex b/lib/arrow/disruptions/replacement_service_upload.ex
index 91178249d..fd1d2e6a2 100644
--- a/lib/arrow/disruptions/replacement_service_upload.ex
+++ b/lib/arrow/disruptions/replacement_service_upload.ex
@@ -1,10 +1,8 @@
defmodule Arrow.Disruptions.ReplacementServiceUpload do
@moduledoc "functions for extracting shuttle replacement services from xlsx uploads"
- alias Arrow.Disruptions.ReplacementServiceUpload.{
- FirstTrip,
- LastTrip,
- Runtimes
- }
+ alias Arrow.Disruptions.ReplacementServiceUpload.FirstTrip
+ alias Arrow.Disruptions.ReplacementServiceUpload.LastTrip
+ alias Arrow.Disruptions.ReplacementServiceUpload.Runtimes
require Logger
@@ -58,7 +56,7 @@ defmodule Arrow.Disruptions.ReplacementServiceUpload do
{:ok, {:ok, versioned_data}}
else
{:error, error} ->
- {:ok, {:error, [{format_warning(), []} | error |> Enum.map(&error_to_error_message/1)]}}
+ {:ok, {:error, [{format_warning(), []} | Enum.map(error, &error_to_error_message/1)]}}
end
rescue
e ->
@@ -72,11 +70,11 @@ defmodule Arrow.Disruptions.ReplacementServiceUpload do
@spec error_to_error_message(error_tab()) :: tuple()
def error_to_error_message({tab_name, errors}) when is_binary(tab_name) and is_list(errors) do
- {"#{tab_name}", errors |> Enum.map(&error_to_error_message/1)}
+ {"#{tab_name}", Enum.map(errors, &error_to_error_message/1)}
end
def error_to_error_message({idx, {:error, row_data}}) when is_list(row_data) do
- row_errors = Enum.into(row_data, %{}) |> Enum.map(fn {k, v} -> "#{error_type(k)}: #{v}" end)
+ row_errors = row_data |> Map.new() |> Enum.map(fn {k, v} -> "#{error_type(k)}: #{v}" end)
"Row #{idx}, #{row_errors}"
end
@@ -105,7 +103,7 @@ defmodule Arrow.Disruptions.ReplacementServiceUpload do
@spec add_version(list(valid_tab())) :: {:ok, versioned_data()}
def add_version(data) do
- {:ok, data |> Enum.into(%{"version" => @version})}
+ {:ok, Enum.into(data, %{"version" => @version})}
end
@spec get_xlsx_tab_tids(any()) :: {:error, list(String.t())} | {:ok, map()}
@@ -153,8 +151,8 @@ defmodule Arrow.Disruptions.ReplacementServiceUpload do
|> Enum.map(&parse_tab/1)
|> Enum.split_with(&(elem(&1, 0) == :ok))
|> case do
- {rows, []} -> {:ok, rows |> Enum.map(&elem(&1, 1))}
- {_, errors} -> {:error, errors |> Enum.map(&elem(&1, 1))}
+ {rows, []} -> {:ok, Enum.map(rows, &elem(&1, 1))}
+ {_, errors} -> {:error, Enum.map(errors, &elem(&1, 1))}
end
end
@@ -240,7 +238,7 @@ defmodule Arrow.Disruptions.ReplacementServiceUpload do
end
defp headers_as_string do
- @headers_regex |> Enum.map_join(", ", &header_to_string/1)
+ Enum.map_join(@headers_regex, ", ", &header_to_string/1)
end
@spec validate_headers(list(xlsxir_types())) ::
@@ -262,7 +260,7 @@ defmodule Arrow.Disruptions.ReplacementServiceUpload do
|> Enum.split_with(&elem(&1, 1))
|> case do
{headers, []} ->
- {:ok, headers |> Enum.map(fn {key, _val} -> elem(key, 0) end)}
+ {:ok, Enum.map(headers, fn {key, _val} -> elem(key, 0) end)}
{_, missing} ->
missing_header =
@@ -308,9 +306,9 @@ defmodule Arrow.Disruptions.ReplacementServiceUpload do
{:error, ["Duplicate row(s) for #{Enum.join(dups, " and ")} trip times"]}
is_nil(first) or is_nil(last) ->
- values = [{first, "First"}, {last, "Last"}] |> Enum.reject(&elem(&1, 0))
+ values = Enum.reject([{first, "First"}, {last, "Last"}], &elem(&1, 0))
- {:error, ["Missing row for #{values |> Enum.map_join(" and ", &elem(&1, 1))} trip times"]}
+ {:error, ["Missing row for #{Enum.map_join(values, " and ", &elem(&1, 1))} trip times"]}
first_trips_after_last?(first, last) ->
{:error, ["First trip times must be after Last trip times"]}
@@ -320,10 +318,10 @@ defmodule Arrow.Disruptions.ReplacementServiceUpload do
end
end
- defp first_trips_after_last?(
- %{first_trip_0: first_trip_0, first_trip_1: first_trip_1},
- %{last_trip_0: last_trip_0, last_trip_1: last_trip_1}
- ) do
+ defp first_trips_after_last?(%{first_trip_0: first_trip_0, first_trip_1: first_trip_1}, %{
+ last_trip_0: last_trip_0,
+ last_trip_1: last_trip_1
+ }) do
first_trip_0 > last_trip_0 or first_trip_1 > last_trip_1
end
@@ -349,7 +347,7 @@ defmodule Arrow.Disruptions.ReplacementServiceUpload do
end)
|> Enum.split_with(fn {_i, r} -> elem(r, 0) == :ok end)
|> case do
- {rows, []} -> {:ok, rows |> Enum.map(fn {_idx, {:ok, data}} -> Map.new(data) end)}
+ {rows, []} -> {:ok, Enum.map(rows, fn {_idx, {:ok, data}} -> Map.new(data) end)}
{_, errors} -> {:error, errors}
end
end
@@ -454,7 +452,7 @@ defmodule Arrow.Disruptions.ReplacementServiceUpload do
def truncate_seconds(time_string) when is_binary(time_string) do
case String.split(time_string, ":") do
- [_hr, _min, _sec] -> {:ok, String.split(time_string, ":") |> Enum.take(2) |> Enum.join(":")}
+ [_hr, _min, _sec] -> {:ok, time_string |> String.split(":") |> Enum.take(2) |> Enum.join(":")}
[_hr, _min] -> {:ok, time_string}
_ -> {:error, time_string}
end
@@ -476,7 +474,7 @@ defmodule Arrow.Disruptions.ReplacementServiceUpload do
end
def to_time_int_list(time_string) when is_binary(time_string) do
- {:ok, String.split(time_string, ":") |> Enum.map(&String.to_integer/1) |> Enum.take(2)}
+ {:ok, time_string |> String.split(":") |> Enum.map(&String.to_integer/1) |> Enum.take(2)}
end
def to_time_int_list(time_string) do
diff --git a/lib/arrow/gtfs.ex b/lib/arrow/gtfs.ex
index 7b6f17879..1833df563 100644
--- a/lib/arrow/gtfs.ex
+++ b/lib/arrow/gtfs.ex
@@ -2,15 +2,17 @@ defmodule Arrow.Gtfs do
@moduledoc """
GTFS import logic.
"""
+ import Ecto.Query
+
+ alias Arrow.Gtfs.FeedInfo
alias Arrow.Gtfs.Importable
alias Arrow.Gtfs.JobHelper
alias Arrow.Repo
alias Arrow.Repo.ForeignKeyConstraint
- import Ecto.Query
require Logger
- @import_timeout_ms :timer.minutes(10)
+ @import_timeout_ms to_timeout(minute: 10)
@doc """
Loads a GTFS archive into Arrow's gtfs_* DB tables,
@@ -40,9 +42,7 @@ defmodule Arrow.Gtfs do
if validate_only? do
"doesn't matter for validation"
else
- Arrow.Repo.one(
- from info in Arrow.Gtfs.FeedInfo, where: info.id == "mbta-ma-us", select: info.version
- )
+ Arrow.Repo.one(from info in FeedInfo, where: info.id == "mbta-ma-us", select: info.version)
end
with :ok <- validate_required_files(unzip),
@@ -92,9 +92,7 @@ defmodule Arrow.Gtfs do
end
end
- {elapsed_ms, result} =
- fn -> Repo.transaction(transaction, timeout: @import_timeout_ms) end
- |> :timer.tc(:millisecond)
+ {elapsed_ms, result} = :timer.tc(fn -> Repo.transaction(transaction, timeout: @import_timeout_ms) end, :millisecond)
action = if validate_only?, do: "validation", else: "import"
Logger.info("GTFS archive #{action} transaction completed elapsed_ms=#{elapsed_ms}")
@@ -123,7 +121,8 @@ defmodule Arrow.Gtfs do
:ok
else
missing =
- MapSet.difference(required_files(), files)
+ required_files()
+ |> MapSet.difference(files)
|> Enum.sort()
|> Enum.join(",")
@@ -140,7 +139,7 @@ defmodule Arrow.Gtfs do
defp importable_schemas do
# Listed in the order in which they should be imported.
[
- Arrow.Gtfs.FeedInfo,
+ FeedInfo,
Arrow.Gtfs.Agency,
Arrow.Gtfs.Checkpoint,
Arrow.Gtfs.Level,
@@ -178,9 +177,7 @@ defmodule Arrow.Gtfs do
# from non-GTFS tables.
fkey_names = Enum.map_join(external_fkeys, ",", & &1.name)
- Logger.info(
- "temporarily dropping external foreign keys referencing GTFS tables fkey_names=#{fkey_names}"
- )
+ Logger.info("temporarily dropping external foreign keys referencing GTFS tables fkey_names=#{fkey_names}")
Enum.each(external_fkeys, &ForeignKeyConstraint.drop/1)
@@ -193,9 +190,7 @@ defmodule Arrow.Gtfs do
defp add_external_fkeys(external_fkeys) do
fkey_names = Enum.map_join(external_fkeys, ",", & &1.name)
- Logger.info(
- "re-adding external foreign keys referencing GTFS tables fkey_names=#{fkey_names}"
- )
+ Logger.info("re-adding external foreign keys referencing GTFS tables fkey_names=#{fkey_names}")
Enum.each(external_fkeys, fn fkey ->
Logger.info("re-adding foreign key fkey_name=#{fkey.name}")
diff --git a/lib/arrow/gtfs/agency.ex b/lib/arrow/gtfs/agency.ex
index 2764bb07a..7f14442b4 100644
--- a/lib/arrow/gtfs/agency.ex
+++ b/lib/arrow/gtfs/agency.ex
@@ -6,6 +6,7 @@ defmodule Arrow.Gtfs.Agency do
table contents should be considered read-only otherwise.
"""
use Arrow.Gtfs.Schema
+
import Ecto.Changeset
@type t :: %__MODULE__{
diff --git a/lib/arrow/gtfs/calendar.ex b/lib/arrow/gtfs/calendar.ex
index 0079a7a96..c1a0c99d6 100644
--- a/lib/arrow/gtfs/calendar.ex
+++ b/lib/arrow/gtfs/calendar.ex
@@ -6,12 +6,15 @@ defmodule Arrow.Gtfs.Calendar do
table contents should be considered read-only otherwise.
"""
use Arrow.Gtfs.Schema
+
import Ecto.Changeset
+ alias Arrow.Gtfs.Service
+
@primary_key false
@type t :: %__MODULE__{
- service: Arrow.Gtfs.Service.t() | Ecto.Association.NotLoaded.t(),
+ service: Service.t() | Ecto.Association.NotLoaded.t(),
monday: boolean,
tuesday: boolean,
wednesday: boolean,
@@ -24,7 +27,7 @@ defmodule Arrow.Gtfs.Calendar do
}
schema "gtfs_calendars" do
- belongs_to :service, Arrow.Gtfs.Service, primary_key: true
+ belongs_to :service, Service, primary_key: true
for day <- ~w[monday tuesday wednesday thursday friday saturday sunday]a do
field day, :boolean
@@ -42,9 +45,7 @@ defmodule Arrow.Gtfs.Calendar do
attrs,
~w[service_id monday tuesday wednesday thursday friday saturday sunday start_date end_date]a
)
- |> validate_required(
- ~w[service_id monday tuesday wednesday thursday friday saturday sunday start_date end_date]a
- )
+ |> validate_required(~w[service_id monday tuesday wednesday thursday friday saturday sunday start_date end_date]a)
|> assoc_constraint(:service)
|> validate_start_date_not_after_end_date()
end
diff --git a/lib/arrow/gtfs/calendar_date.ex b/lib/arrow/gtfs/calendar_date.ex
index cd8aa6f13..15310ad17 100644
--- a/lib/arrow/gtfs/calendar_date.ex
+++ b/lib/arrow/gtfs/calendar_date.ex
@@ -6,10 +6,13 @@ defmodule Arrow.Gtfs.CalendarDate do
table contents should be considered read-only otherwise.
"""
use Arrow.Gtfs.Schema
+
import Ecto.Changeset
+ alias Arrow.Gtfs.Service
+
@type t :: %__MODULE__{
- service: Arrow.Gtfs.Service.t() | Ecto.Association.NotLoaded.t(),
+ service: Service.t() | Ecto.Association.NotLoaded.t(),
date: Date.t(),
exception_type: atom,
holiday_name: String.t() | nil
@@ -18,7 +21,7 @@ defmodule Arrow.Gtfs.CalendarDate do
@primary_key false
schema "gtfs_calendar_dates" do
- belongs_to :service, Arrow.Gtfs.Service, primary_key: true
+ belongs_to :service, Service, primary_key: true
field :date, :date, primary_key: true
field :exception_type, Ecto.Enum, values: [added: 1, removed: 2]
field :holiday_name, :string
diff --git a/lib/arrow/gtfs/checkpoint.ex b/lib/arrow/gtfs/checkpoint.ex
index 76e126515..f344a1493 100644
--- a/lib/arrow/gtfs/checkpoint.ex
+++ b/lib/arrow/gtfs/checkpoint.ex
@@ -6,6 +6,7 @@ defmodule Arrow.Gtfs.Checkpoint do
table contents should be considered read-only otherwise.
"""
use Arrow.Gtfs.Schema
+
import Ecto.Changeset
@type t :: %__MODULE__{
diff --git a/lib/arrow/gtfs/direction.ex b/lib/arrow/gtfs/direction.ex
index d9513619d..c883d4e04 100644
--- a/lib/arrow/gtfs/direction.ex
+++ b/lib/arrow/gtfs/direction.ex
@@ -6,10 +6,13 @@ defmodule Arrow.Gtfs.Direction do
table contents should be considered read-only otherwise.
"""
use Arrow.Gtfs.Schema
+
import Ecto.Changeset
+ alias Arrow.Gtfs.Route
+
@type t :: %__MODULE__{
- route: Arrow.Gtfs.Route.t() | Ecto.Association.NotLoaded.t(),
+ route: Route.t() | Ecto.Association.NotLoaded.t(),
direction_id: 0 | 1,
desc: String.t(),
destination: String.t()
@@ -18,7 +21,7 @@ defmodule Arrow.Gtfs.Direction do
@primary_key false
schema "gtfs_directions" do
- belongs_to :route, Arrow.Gtfs.Route, primary_key: true
+ belongs_to :route, Route, primary_key: true
field :direction_id, :integer, primary_key: true
field :desc, :string
field :destination, :string
diff --git a/lib/arrow/gtfs/feed_info.ex b/lib/arrow/gtfs/feed_info.ex
index 477cbbf96..a96653609 100644
--- a/lib/arrow/gtfs/feed_info.ex
+++ b/lib/arrow/gtfs/feed_info.ex
@@ -6,6 +6,7 @@ defmodule Arrow.Gtfs.FeedInfo do
table contents should be considered read-only otherwise.
"""
use Arrow.Gtfs.Schema
+
import Ecto.Changeset
@type t :: %__MODULE__{
@@ -40,9 +41,7 @@ defmodule Arrow.Gtfs.FeedInfo do
attrs,
~w[id publisher_name publisher_url lang start_date end_date version contact_email]a
)
- |> validate_required(
- ~w[id publisher_name publisher_url lang start_date end_date version contact_email]a
- )
+ |> validate_required(~w[id publisher_name publisher_url lang start_date end_date version contact_email]a)
|> validate_start_date_before_end_date()
end
@@ -50,7 +49,7 @@ defmodule Arrow.Gtfs.FeedInfo do
start_date = fetch_field!(changeset, :start_date)
end_date = fetch_field!(changeset, :end_date)
- if Date.compare(start_date, end_date) == :lt do
+ if Date.before?(start_date, end_date) do
changeset
else
add_error(changeset, :dates, "start date should be before end date")
diff --git a/lib/arrow/gtfs/import_worker.ex b/lib/arrow/gtfs/import_worker.ex
index cac604089..2f0eb56e1 100644
--- a/lib/arrow/gtfs/import_worker.ex
+++ b/lib/arrow/gtfs/import_worker.ex
@@ -19,6 +19,8 @@ defmodule Arrow.Gtfs.ImportWorker do
states: Oban.Job.states() -- [:completed, :discarded, :cancelled]
]
+ alias Arrow.Gtfs.JobHelper
+
@impl Oban.Worker
def perform(%Oban.Job{args: %{"s3_uri" => s3_uri, "archive_version" => new_version}} = job) do
with {:ok, unzip} <- Arrow.Gtfs.Archive.to_unzip_struct(s3_uri) do
@@ -32,10 +34,10 @@ defmodule Arrow.Gtfs.ImportWorker do
# unresponsive for even longer.
# Import jobs generally take around 5 minutes.
@impl Oban.Worker
- def timeout(_job), do: :timer.minutes(10)
+ def timeout(_job), do: to_timeout(minute: 10)
- @spec check_jobs(Arrow.Gtfs.JobHelper.status_filter()) :: list(map)
+ @spec check_jobs(JobHelper.status_filter()) :: list(map)
def check_jobs(status_filter) do
- Arrow.Gtfs.JobHelper.check_jobs(__MODULE__, status_filter)
+ JobHelper.check_jobs(__MODULE__, status_filter)
end
end
diff --git a/lib/arrow/gtfs/importable.ex b/lib/arrow/gtfs/importable.ex
index ceca8b45d..258e452d7 100644
--- a/lib/arrow/gtfs/importable.ex
+++ b/lib/arrow/gtfs/importable.ex
@@ -120,14 +120,15 @@ defmodule Arrow.Gtfs.Importable do
# then converts it back to a plain map compatible with `Repo.insert_all`.
@spec cast_to_insertable(csv_row(), module) :: %{atom => term}
defp cast_to_insertable(row, schema) do
- struct(schema)
+ schema
+ |> struct()
|> schema.changeset(row)
|> Changeset.apply_action!(:insert)
|> ImportHelper.schema_struct_to_map()
end
defp replace_headers(csv_stream, mappings) do
- blob_with_headers = Enum.at(csv_stream, 0) |> to_string()
+ blob_with_headers = csv_stream |> Enum.at(0) |> to_string()
adjusted = Regex.replace(~r/[^,\n]+/f, blob_with_headers, &Map.get(mappings, &1, &1))
Stream.concat([adjusted], Stream.drop(csv_stream, 1))
diff --git a/lib/arrow/gtfs/job_helper.ex b/lib/arrow/gtfs/job_helper.ex
index 8db0772e8..1ea8435fe 100644
--- a/lib/arrow/gtfs/job_helper.ex
+++ b/lib/arrow/gtfs/job_helper.ex
@@ -27,7 +27,8 @@ defmodule Arrow.Gtfs.JobHelper do
worker = inspect(worker_mod)
states = Map.fetch!(job_filters(), status_filter)
- Arrow.Repo.all(from j in Oban.Job, where: [worker: ^worker], where: j.state in ^states)
+ from(j in Oban.Job, where: [worker: ^worker], where: j.state in ^states)
+ |> Arrow.Repo.all()
|> Enum.map(
&Map.take(
&1,
diff --git a/lib/arrow/gtfs/level.ex b/lib/arrow/gtfs/level.ex
index b2ac47647..85d52c318 100644
--- a/lib/arrow/gtfs/level.ex
+++ b/lib/arrow/gtfs/level.ex
@@ -6,6 +6,7 @@ defmodule Arrow.Gtfs.Level do
table contents should be considered read-only otherwise.
"""
use Arrow.Gtfs.Schema
+
import Ecto.Changeset
@type t :: %__MODULE__{
diff --git a/lib/arrow/gtfs/line.ex b/lib/arrow/gtfs/line.ex
index d9d78fc21..3a703c0f1 100644
--- a/lib/arrow/gtfs/line.ex
+++ b/lib/arrow/gtfs/line.ex
@@ -6,6 +6,7 @@ defmodule Arrow.Gtfs.Line do
table contents should be considered read-only otherwise.
"""
use Arrow.Gtfs.Schema
+
import Ecto.Changeset
@type t :: %__MODULE__{
diff --git a/lib/arrow/gtfs/route.ex b/lib/arrow/gtfs/route.ex
index 97d30ea62..9a88ad956 100644
--- a/lib/arrow/gtfs/route.ex
+++ b/lib/arrow/gtfs/route.ex
@@ -6,11 +6,17 @@ defmodule Arrow.Gtfs.Route do
table contents should be considered read-only otherwise.
"""
use Arrow.Gtfs.Schema
+
import Ecto.Changeset
+ alias Arrow.Gtfs.Agency
+ alias Arrow.Gtfs.Line
+ alias Arrow.Gtfs.RoutePattern
+ alias Ecto.Association.NotLoaded
+
@type t :: %__MODULE__{
id: String.t(),
- agency: Arrow.Gtfs.Agency.t() | Ecto.Association.NotLoaded.t(),
+ agency: Agency.t() | NotLoaded.t(),
short_name: String.t() | nil,
long_name: String.t() | nil,
desc: String.t(),
@@ -20,16 +26,16 @@ defmodule Arrow.Gtfs.Route do
text_color: String.t() | nil,
sort_order: integer,
fare_class: String.t(),
- line: Arrow.Gtfs.Line.t() | Ecto.Association.NotLoaded.t(),
+ line: Line.t() | NotLoaded.t(),
listed_route: atom,
network_id: String.t(),
- route_patterns: list(Arrow.Gtfs.RoutePattern.t()) | Ecto.Association.NotLoaded.t()
+ route_patterns: list(RoutePattern.t()) | NotLoaded.t()
}
@route_type_values Enum.with_index(~w[light_rail heavy_rail commuter_rail bus ferry]a)
schema "gtfs_routes" do
- belongs_to :agency, Arrow.Gtfs.Agency
+ belongs_to :agency, Agency
field :short_name, :string
field :long_name, :string
field :desc, :string
@@ -41,13 +47,13 @@ defmodule Arrow.Gtfs.Route do
field :text_color, :string
field :sort_order, :integer
field :fare_class, :string
- belongs_to :line, Arrow.Gtfs.Line
+ belongs_to :line, Line
field :listed_route, Ecto.Enum, values: Enum.with_index(~w[Included Excluded]a)
field :network_id, :string
has_many :directions, Arrow.Gtfs.Direction
has_many :trips, Arrow.Gtfs.Trip
- has_many :route_patterns, Arrow.Gtfs.RoutePattern
+ has_many :route_patterns, RoutePattern
end
def changeset(route, attrs) do
diff --git a/lib/arrow/gtfs/route_pattern.ex b/lib/arrow/gtfs/route_pattern.ex
index c44ae4934..383c9335d 100644
--- a/lib/arrow/gtfs/route_pattern.ex
+++ b/lib/arrow/gtfs/route_pattern.ex
@@ -6,34 +6,35 @@ defmodule Arrow.Gtfs.RoutePattern do
table contents should be considered read-only otherwise.
"""
use Arrow.Gtfs.Schema
+
import Ecto.Changeset
+ alias Arrow.Gtfs.Route
+ alias Arrow.Gtfs.Trip
+ alias Ecto.Association.NotLoaded
+
@type t :: %__MODULE__{
id: String.t(),
- route: Arrow.Gtfs.Route.t() | Ecto.Association.NotLoaded.t(),
+ route: Route.t() | NotLoaded.t(),
direction_id: 0 | 1,
- directions: list(Arrow.Gtfs.Direction.t()) | Ecto.Association.NotLoaded.t(),
+ directions: list(Arrow.Gtfs.Direction.t()) | NotLoaded.t(),
name: String.t(),
time_desc: String.t() | nil,
typicality: atom,
sort_order: integer,
# The Trip that exemplifies this RoutePattern.
- representative_trip: Arrow.Gtfs.Trip.t() | Ecto.Association.NotLoaded.t(),
+ representative_trip: Trip.t() | NotLoaded.t(),
# All the Trips that use this RoutePattern.
- trips: list(Arrow.Gtfs.Trip.t()) | Ecto.Association.NotLoaded.t(),
+ trips: list(Trip.t()) | NotLoaded.t(),
canonical: atom
}
- @typicality_values Enum.with_index(
- ~w[not_defined typical deviation atypical diversion typical_but_unscheduled]a
- )
+ @typicality_values Enum.with_index(~w[not_defined typical deviation atypical diversion typical_but_unscheduled]a)
- @canonicality_values Enum.with_index(
- ~w[no_canonical_patterns_defined_for_route canonical not_canonical]a
- )
+ @canonicality_values Enum.with_index(~w[no_canonical_patterns_defined_for_route canonical not_canonical]a)
schema "gtfs_route_patterns" do
- belongs_to :route, Arrow.Gtfs.Route, type: :string
+ belongs_to :route, Route, type: :string
field :direction_id, :integer
# I couldn't find a way to directly associate the specific Direction
# here--composite FK relations aren't supported.
@@ -45,8 +46,8 @@ defmodule Arrow.Gtfs.RoutePattern do
field :time_desc, :string
field :typicality, Ecto.Enum, values: @typicality_values
field :sort_order, :integer
- belongs_to :representative_trip, Arrow.Gtfs.Trip
- has_many :trips, Arrow.Gtfs.Trip
+ belongs_to :representative_trip, Trip
+ has_many :trips, Trip
field :canonical, Ecto.Enum, values: @canonicality_values
end
@@ -62,9 +63,7 @@ defmodule Arrow.Gtfs.RoutePattern do
attrs,
~w[id route_id direction_id name time_desc typicality sort_order representative_trip_id canonical]a
)
- |> validate_required(
- ~w[id route_id direction_id name typicality sort_order representative_trip_id canonical]a
- )
+ |> validate_required(~w[id route_id direction_id name typicality sort_order representative_trip_id canonical]a)
|> assoc_constraint(:route)
|> assoc_constraint(:representative_trip)
end
diff --git a/lib/arrow/gtfs/schema.ex b/lib/arrow/gtfs/schema.ex
index e8b28b956..65527f066 100644
--- a/lib/arrow/gtfs/schema.ex
+++ b/lib/arrow/gtfs/schema.ex
@@ -11,9 +11,10 @@ defmodule Arrow.Gtfs.Schema do
defmacro __using__(_) do
quote do
- use Ecto.Schema
@behaviour Arrow.Gtfs.Importable
+ use Ecto.Schema
+
import Arrow.Gtfs.ImportHelper
@primary_key {:id, :string, []}
diff --git a/lib/arrow/gtfs/service.ex b/lib/arrow/gtfs/service.ex
index e50f7c45f..9539496da 100644
--- a/lib/arrow/gtfs/service.ex
+++ b/lib/arrow/gtfs/service.ex
@@ -7,16 +7,22 @@ defmodule Arrow.Gtfs.Service do
table contents should be considered read-only otherwise.
"""
use Arrow.Gtfs.Schema
+
import Ecto.Changeset
+ alias Arrow.Gtfs.Calendar
+ alias Arrow.Gtfs.CalendarDate
+ alias Arrow.Gtfs.Importable
+ alias Ecto.Association.NotLoaded
+
@type t :: %__MODULE__{
- calendar: Arrow.Gtfs.Calendar.t() | Ecto.Association.NotLoaded.t(),
- calendar_dates: list(Arrow.Gtfs.CalendarDate.t()) | Ecto.Association.NotLoaded.t()
+ calendar: Calendar.t() | NotLoaded.t(),
+ calendar_dates: list(CalendarDate.t()) | NotLoaded.t()
}
schema "gtfs_services" do
- has_one :calendar, Arrow.Gtfs.Calendar
- has_many :calendar_dates, Arrow.Gtfs.CalendarDate
+ has_one :calendar, Calendar
+ has_many :calendar_dates, CalendarDate
has_many :trips, Arrow.Gtfs.Trip
end
@@ -27,10 +33,10 @@ defmodule Arrow.Gtfs.Service do
|> validate_required(~w[id]a)
end
- @impl Arrow.Gtfs.Importable
+ @impl Importable
def filenames, do: ["calendar.txt", "calendar_dates.txt"]
- @impl Arrow.Gtfs.Importable
+ @impl Importable
def import(unzip) do
# This table's IDs are the union of those found in
# calendar.txt and calendar_dates.txt.
@@ -41,6 +47,6 @@ defmodule Arrow.Gtfs.Service do
|> Stream.uniq_by(& &1["service_id"])
|> Stream.map(&%{"id" => Map.fetch!(&1, "service_id")})
- Arrow.Gtfs.Importable.cast_and_insert(service_rows, __MODULE__)
+ Importable.cast_and_insert(service_rows, __MODULE__)
end
end
diff --git a/lib/arrow/gtfs/shape.ex b/lib/arrow/gtfs/shape.ex
index 74ef8795d..5a8d20a70 100644
--- a/lib/arrow/gtfs/shape.ex
+++ b/lib/arrow/gtfs/shape.ex
@@ -8,17 +8,23 @@ defmodule Arrow.Gtfs.Shape do
table contents should be considered read-only otherwise.
"""
use Arrow.Gtfs.Schema
+
import Ecto.Changeset
+ alias Arrow.Gtfs.Importable
+ alias Arrow.Gtfs.ShapePoint
+ alias Arrow.Gtfs.Trip
+ alias Ecto.Association.NotLoaded
+
@type t :: %__MODULE__{
id: String.t(),
- points: list(Arrow.Gtfs.ShapePoint.t()) | Ecto.Association.NotLoaded.t(),
- trips: list(Arrow.Gtfs.Trip.t()) | Ecto.Association.NotLoaded.t()
+ points: list(ShapePoint.t()) | NotLoaded.t(),
+ trips: list(Trip.t()) | NotLoaded.t()
}
schema "gtfs_shapes" do
- has_many :points, Arrow.Gtfs.ShapePoint
- has_many :trips, Arrow.Gtfs.Trip
+ has_many :points, ShapePoint
+ has_many :trips, Trip
end
# This shape's points should be put in a separate list and imported
@@ -31,16 +37,16 @@ defmodule Arrow.Gtfs.Shape do
|> validate_required(~w[id]a)
end
- @impl Arrow.Gtfs.Importable
+ @impl Importable
def filenames, do: ["shapes.txt"]
- @impl Arrow.Gtfs.Importable
+ @impl Importable
def import(unzip) do
[filename] = filenames()
unzip
|> Arrow.Gtfs.ImportHelper.stream_csv_rows(filename)
|> Stream.uniq_by(& &1["shape_id"])
- |> Arrow.Gtfs.Importable.cast_and_insert(__MODULE__)
+ |> Importable.cast_and_insert(__MODULE__)
end
end
diff --git a/lib/arrow/gtfs/shape_point.ex b/lib/arrow/gtfs/shape_point.ex
index d88815cf0..16be2a271 100644
--- a/lib/arrow/gtfs/shape_point.ex
+++ b/lib/arrow/gtfs/shape_point.ex
@@ -8,10 +8,14 @@ defmodule Arrow.Gtfs.ShapePoint do
table contents should be considered read-only otherwise.
"""
use Arrow.Gtfs.Schema
+
import Ecto.Changeset
+ alias Arrow.Gtfs.Importable
+ alias Arrow.Gtfs.Shape
+
@type t :: %__MODULE__{
- shape: Arrow.Gtfs.Shape.t() | Ecto.Association.NotLoaded.t(),
+ shape: Shape.t() | Ecto.Association.NotLoaded.t(),
sequence: integer,
lat: float,
lon: float,
@@ -21,7 +25,7 @@ defmodule Arrow.Gtfs.ShapePoint do
@primary_key false
schema "gtfs_shape_points" do
- belongs_to :shape, Arrow.Gtfs.Shape, primary_key: true
+ belongs_to :shape, Shape, primary_key: true
field :lat, :float
field :lon, :float
field :sequence, :integer, primary_key: true
@@ -40,12 +44,12 @@ defmodule Arrow.Gtfs.ShapePoint do
|> assoc_constraint(:shape)
end
- @impl Arrow.Gtfs.Importable
+ @impl Importable
def filenames, do: ["shapes.txt"]
- @impl Arrow.Gtfs.Importable
+ @impl Importable
def import(unzip) do
- Arrow.Gtfs.Importable.import_using_copy(
+ Importable.import_using_copy(
__MODULE__,
unzip,
header_mappings: %{
diff --git a/lib/arrow/gtfs/stop.ex b/lib/arrow/gtfs/stop.ex
index e8cea9435..fdda5e635 100644
--- a/lib/arrow/gtfs/stop.ex
+++ b/lib/arrow/gtfs/stop.ex
@@ -6,9 +6,15 @@ defmodule Arrow.Gtfs.Stop do
table contents should be considered read-only otherwise.
"""
use Arrow.Gtfs.Schema
+
import Ecto.Changeset
import Ecto.Query
+
+ alias Arrow.Gtfs.Level
+ alias Arrow.Gtfs.Stop
+ alias Arrow.Gtfs.StopTime
alias Arrow.Repo
+ alias Ecto.Association.NotLoaded
@derive {Jason.Encoder, only: [:name, :desc, :lat, :lon, :id]}
@@ -24,24 +30,20 @@ defmodule Arrow.Gtfs.Stop do
zone_id: String.t() | nil,
address: String.t() | nil,
url: String.t() | nil,
- level: Arrow.Gtfs.Level.t() | Ecto.Association.NotLoaded.t() | nil,
+ level: Level.t() | NotLoaded.t() | nil,
location_type: atom,
- parent_station: t() | Ecto.Association.NotLoaded.t() | nil,
+ parent_station: t() | NotLoaded.t() | nil,
wheelchair_boarding: atom,
municipality: String.t() | nil,
on_street: String.t() | nil,
at_street: String.t() | nil,
vehicle_type: atom,
- times: list(Arrow.Gtfs.StopTime.t()) | Ecto.Association.NotLoaded.t()
+ times: list(StopTime.t()) | NotLoaded.t()
}
- @location_type_values Enum.with_index(
- ~w[stop_platform parent_station entrance_exit generic_node boarding_area]a
- )
+ @location_type_values Enum.with_index(~w[stop_platform parent_station entrance_exit generic_node boarding_area]a)
- @wheelchair_boarding_values Enum.with_index(
- ~w[no_info_inherit_from_parent accessible not_accessible]a
- )
+ @wheelchair_boarding_values Enum.with_index(~w[no_info_inherit_from_parent accessible not_accessible]a)
@vehicle_type_values Enum.with_index(~w[light_rail heavy_rail commuter_rail bus ferry]a)
@@ -56,15 +58,15 @@ defmodule Arrow.Gtfs.Stop do
field :zone_id, :string
field :address, :string
field :url, :string
- belongs_to :level, Arrow.Gtfs.Level
+ belongs_to :level, Level
field :location_type, Ecto.Enum, values: @location_type_values
- belongs_to :parent_station, Arrow.Gtfs.Stop
+ belongs_to :parent_station, Stop
field :wheelchair_boarding, Ecto.Enum, values: @wheelchair_boarding_values
field :municipality, :string
field :on_street, :string
field :at_street, :string
field :vehicle_type, Ecto.Enum, values: @vehicle_type_values
- has_many :times, Arrow.Gtfs.StopTime
+ has_many :times, StopTime
end
def changeset(stop, attrs) do
@@ -99,7 +101,7 @@ defmodule Arrow.Gtfs.Stop do
iex> Arrow.Gtfs.Stop.get_stops_within_mile(nil, {42.3774, -72.1189})
[%Arrow.Gtfs.Stop, ...]
"""
- @spec get_stops_within_mile(String.t() | nil, {float(), float()}) :: list(Arrow.Gtfs.Stop.t())
+ @spec get_stops_within_mile(String.t() | nil, {float(), float()}) :: list(Stop.t())
def get_stops_within_mile(arrow_stop_id, {lat, lon}) do
conditions =
dynamic(
@@ -119,7 +121,7 @@ defmodule Arrow.Gtfs.Stop do
end
query =
- from(s in Arrow.Gtfs.Stop,
+ from(s in Stop,
where: ^conditions
)
diff --git a/lib/arrow/gtfs/stop_time.ex b/lib/arrow/gtfs/stop_time.ex
index a994173d4..539aa7a2b 100644
--- a/lib/arrow/gtfs/stop_time.ex
+++ b/lib/arrow/gtfs/stop_time.ex
@@ -6,19 +6,26 @@ defmodule Arrow.Gtfs.StopTime do
table contents should be considered read-only otherwise.
"""
use Arrow.Gtfs.Schema
+
import Ecto.Changeset
+ alias Arrow.Gtfs.Checkpoint
+ alias Arrow.Gtfs.Importable
+ alias Arrow.Gtfs.Stop
+ alias Arrow.Gtfs.Trip
+ alias Ecto.Association.NotLoaded
+
@type t :: %__MODULE__{
- trip: Arrow.Gtfs.Trip.t() | Ecto.Association.NotLoaded.t(),
+ trip: Trip.t() | NotLoaded.t(),
stop_sequence: integer,
arrival_time: String.t(),
departure_time: String.t(),
- stop: Arrow.Gtfs.Stop.t() | Ecto.Association.NotLoaded.t(),
+ stop: Stop.t() | NotLoaded.t(),
stop_headsign: String.t() | nil,
pickup_type: atom,
drop_off_type: atom,
timepoint: atom | nil,
- checkpoint: Arrow.Gtfs.Checkpoint.t() | Ecto.Association.NotLoaded.t() | nil,
+ checkpoint: Checkpoint.t() | NotLoaded.t() | nil,
continuous_pickup: atom | nil,
continuous_drop_off: atom | nil
}
@@ -40,7 +47,7 @@ defmodule Arrow.Gtfs.StopTime do
@primary_key false
schema "gtfs_stop_times" do
- belongs_to :trip, Arrow.Gtfs.Trip, primary_key: true
+ belongs_to :trip, Trip, primary_key: true
field :stop_sequence, :integer, primary_key: true
# arrival_time and departure_time are kept as timestamps, to preserve after-midnight times like 24:15:00.
@@ -48,12 +55,12 @@ defmodule Arrow.Gtfs.StopTime do
field :arrival_time, :string
field :departure_time, :string
- belongs_to :stop, Arrow.Gtfs.Stop
+ belongs_to :stop, Stop
field :stop_headsign, :string
field :pickup_type, Ecto.Enum, values: @pickup_drop_off_types
field :drop_off_type, Ecto.Enum, values: @pickup_drop_off_types
field :timepoint, Ecto.Enum, values: Enum.with_index(~w[approximate exact]a)
- belongs_to :checkpoint, Arrow.Gtfs.Checkpoint
+ belongs_to :checkpoint, Checkpoint
field :continuous_pickup, Ecto.Enum, values: @continuous_pickup_drop_off_types
field :continuous_drop_off, Ecto.Enum, values: @continuous_pickup_drop_off_types
end
@@ -70,17 +77,15 @@ defmodule Arrow.Gtfs.StopTime do
attrs,
~w[trip_id stop_sequence arrival_time departure_time stop_id stop_headsign pickup_type drop_off_type timepoint checkpoint_id continuous_pickup continuous_drop_off]a
)
- |> validate_required(
- ~w[trip_id stop_sequence arrival_time departure_time stop_id pickup_type drop_off_type]a
- )
+ |> validate_required(~w[trip_id stop_sequence arrival_time departure_time stop_id pickup_type drop_off_type]a)
|> assoc_constraint(:trip)
|> assoc_constraint(:stop)
|> assoc_constraint(:checkpoint)
end
- @impl Arrow.Gtfs.Importable
+ @impl Importable
def filenames, do: ["stop_times.txt"]
- @impl Arrow.Gtfs.Importable
- def import(unzip), do: Arrow.Gtfs.Importable.import_using_copy(__MODULE__, unzip)
+ @impl Importable
+ def import(unzip), do: Importable.import_using_copy(__MODULE__, unzip)
end
diff --git a/lib/arrow/gtfs/trip.ex b/lib/arrow/gtfs/trip.ex
index 2a47d3b7b..bcd8f12c8 100644
--- a/lib/arrow/gtfs/trip.ex
+++ b/lib/arrow/gtfs/trip.ex
@@ -6,39 +6,45 @@ defmodule Arrow.Gtfs.Trip do
table contents should be considered read-only otherwise.
"""
use Arrow.Gtfs.Schema
+
import Ecto.Changeset
+ alias Arrow.Gtfs.Importable
+ alias Arrow.Gtfs.Route
+ alias Arrow.Gtfs.RoutePattern
+ alias Arrow.Gtfs.Service
+ alias Arrow.Gtfs.Shape
+ alias Arrow.Gtfs.StopTime
+ alias Ecto.Association.NotLoaded
+
@type t :: %__MODULE__{
id: String.t(),
- route: Arrow.Gtfs.Route.t() | Ecto.Association.NotLoaded.t(),
- service: Arrow.Gtfs.Service.t() | Ecto.Association.NotLoaded.t(),
+ route: Route.t() | NotLoaded.t(),
+ service: Service.t() | NotLoaded.t(),
headsign: String.t(),
short_name: String.t() | nil,
direction_id: 0 | 1,
- directions: list(Arrow.Gtfs.Direction.t()) | Ecto.Association.NotLoaded.t(),
+ directions: list(Arrow.Gtfs.Direction.t()) | NotLoaded.t(),
block_id: String.t() | nil,
- shape: Arrow.Gtfs.Shape.t() | Ecto.Association.NotLoaded.t() | nil,
- shape_points: list(Arrow.Gtfs.ShapePoint.t()) | Ecto.Association.NotLoaded.t() | nil,
+ shape: Shape.t() | NotLoaded.t() | nil,
+ shape_points: list(Arrow.Gtfs.ShapePoint.t()) | NotLoaded.t() | nil,
wheelchair_accessible: atom,
route_type: atom | nil,
# The RoutePattern that this Trip follows.
- route_pattern: Arrow.Gtfs.RoutePattern.t() | Ecto.Association.NotLoaded.t(),
+ route_pattern: RoutePattern.t() | NotLoaded.t(),
# The RoutePattern, if any, for which this is the *representative* Trip.
- representing_route_pattern:
- Arrow.Gtfs.RoutePattern.t() | Ecto.Association.NotLoaded.t() | nil,
+ representing_route_pattern: RoutePattern.t() | NotLoaded.t() | nil,
bikes_allowed: atom,
- stop_times: list(Arrow.Gtfs.StopTime.t()) | Ecto.Association.NotLoaded.t()
+ stop_times: list(StopTime.t()) | NotLoaded.t()
}
- @wheelchair_accessibility_values Enum.with_index(
- ~w[no_information_inherit_from_parent accessible not_accessible]a
- )
+ @wheelchair_accessibility_values Enum.with_index(~w[no_information_inherit_from_parent accessible not_accessible]a)
@route_type_values Enum.with_index(~w[light_rail heavy_rail commuter_rail bus ferry]a)
@bike_boarding_values Enum.with_index(~w[no_information bikes_allowed bikes_not_allowed]a)
schema "gtfs_trips" do
- belongs_to :route, Arrow.Gtfs.Route
- belongs_to :service, Arrow.Gtfs.Service
+ belongs_to :route, Route
+ belongs_to :service, Service
field :headsign, :string
field :short_name, :string
field :direction_id, :integer
@@ -49,17 +55,16 @@ defmodule Arrow.Gtfs.Trip do
# manually look up the relevant Direction from `directions`.
has_many :directions, through: [:route, :directions]
field :block_id, :string
- belongs_to :shape, Arrow.Gtfs.Shape
+ belongs_to :shape, Shape
has_many :shape_points, through: [:shape, :points]
field :wheelchair_accessible, Ecto.Enum, values: @wheelchair_accessibility_values
field :route_type, Ecto.Enum, values: @route_type_values
- belongs_to :route_pattern, Arrow.Gtfs.RoutePattern
+ belongs_to :route_pattern, RoutePattern
- has_one :representing_route_pattern, Arrow.Gtfs.RoutePattern,
- foreign_key: :representative_trip_id
+ has_one :representing_route_pattern, RoutePattern, foreign_key: :representative_trip_id
field :bikes_allowed, Ecto.Enum, values: @bike_boarding_values
- has_many :stop_times, Arrow.Gtfs.StopTime, preload_order: [:stop_sequence]
+ has_many :stop_times, StopTime, preload_order: [:stop_sequence]
end
def changeset(trip, attrs) do
@@ -83,12 +88,12 @@ defmodule Arrow.Gtfs.Trip do
|> assoc_constraint(:route_pattern)
end
- @impl Arrow.Gtfs.Importable
+ @impl Importable
def filenames, do: ["trips.txt"]
- @impl Arrow.Gtfs.Importable
+ @impl Importable
def import(unzip) do
- Arrow.Gtfs.Importable.import_using_copy(
+ Importable.import_using_copy(
__MODULE__,
unzip,
header_mappings: %{
diff --git a/lib/arrow/gtfs/validation_worker.ex b/lib/arrow/gtfs/validation_worker.ex
index 73d5a9144..013d085ac 100644
--- a/lib/arrow/gtfs/validation_worker.ex
+++ b/lib/arrow/gtfs/validation_worker.ex
@@ -19,6 +19,8 @@ defmodule Arrow.Gtfs.ValidationWorker do
states: Oban.Job.states() -- [:completed, :discarded, :cancelled]
]
+ alias Arrow.Gtfs.JobHelper
+
@impl Oban.Worker
def perform(%Oban.Job{args: %{"s3_uri" => s3_uri, "archive_version" => new_version}} = job) do
with {:ok, unzip} <- Arrow.Gtfs.Archive.to_unzip_struct(s3_uri) do
@@ -32,10 +34,10 @@ defmodule Arrow.Gtfs.ValidationWorker do
# unresponsive for even longer.
# Validation jobs generally take around 2-3 minutes.
@impl Oban.Worker
- def timeout(_job), do: :timer.minutes(10)
+ def timeout(_job), do: to_timeout(minute: 10)
- @spec check_jobs(Arrow.Gtfs.JobHelper.status_filter()) :: list(map)
+ @spec check_jobs(JobHelper.status_filter()) :: list(map)
def check_jobs(status_filter) do
- Arrow.Gtfs.JobHelper.check_jobs(__MODULE__, status_filter)
+ JobHelper.check_jobs(__MODULE__, status_filter)
end
end
diff --git a/lib/arrow/hastus.ex b/lib/arrow/hastus.ex
index d382150b1..b1536f9f6 100644
--- a/lib/arrow/hastus.ex
+++ b/lib/arrow/hastus.ex
@@ -5,6 +5,9 @@ defmodule Arrow.Hastus do
import Ecto.Query, warn: false
+ alias Arrow.Hastus.Export
+ alias Arrow.Hastus.Service
+ alias Arrow.Hastus.ServiceDate
alias Arrow.Repo
@preloads [
@@ -14,8 +17,6 @@ defmodule Arrow.Hastus do
services: [:service_dates, derived_limits: [:start_stop, :end_stop]]
]
- alias Arrow.Hastus.Export
-
@doc """
Returns the list of exports.
@@ -110,8 +111,6 @@ defmodule Arrow.Hastus do
Export.changeset(export, attrs)
end
- alias Arrow.Hastus.Service
-
@doc """
Returns the list of hastus_services.
@@ -206,8 +205,6 @@ defmodule Arrow.Hastus do
Service.changeset(service, attrs)
end
- alias Arrow.Hastus.ServiceDate
-
@doc """
Returns the list of hastus_service_dates.
@@ -337,7 +334,8 @@ defmodule Arrow.Hastus do
def export_download_url(%Export{s3_path: "s3://" <> s3_path}) do
[bucket, path] = String.split(s3_path, "/", parts: 2)
- ExAws.Config.new(:s3)
+ :s3
+ |> ExAws.Config.new()
|> ExAws.S3.presigned_url(:get, bucket, path)
end
end
diff --git a/lib/arrow/hastus/derived_limit.ex b/lib/arrow/hastus/derived_limit.ex
index 437fae3c7..a27aeb340 100644
--- a/lib/arrow/hastus/derived_limit.ex
+++ b/lib/arrow/hastus/derived_limit.ex
@@ -2,18 +2,20 @@ defmodule Arrow.Hastus.DerivedLimit do
@moduledoc "schema for a disruption limit derived from a HASTUS export"
use Ecto.Schema
+
import Ecto.Changeset
alias Arrow.Gtfs.Stop
alias Arrow.Hastus.Service
+ alias Ecto.Association.NotLoaded
@type t :: %__MODULE__{
id: integer,
- service: Service.t() | Ecto.Association.NotLoaded.t(),
+ service: Service.t() | NotLoaded.t(),
service_id: integer,
- start_stop: Stop.t() | Ecto.Association.NotLoaded.t(),
+ start_stop: Stop.t() | NotLoaded.t(),
start_stop_id: String.t(),
- end_stop: Stop.t() | Ecto.Association.NotLoaded.t(),
+ end_stop: Stop.t() | NotLoaded.t(),
end_stop_id: String.t()
}
diff --git a/lib/arrow/hastus/export.ex b/lib/arrow/hastus/export.ex
index 4cfb2043c..560e822ec 100644
--- a/lib/arrow/hastus/export.ex
+++ b/lib/arrow/hastus/export.ex
@@ -9,13 +9,14 @@ defmodule Arrow.Hastus.Export do
alias Arrow.Gtfs.Line
alias Arrow.Hastus.Service
alias Arrow.Hastus.TripRouteDirection
+ alias Ecto.Association.NotLoaded
@type t :: %__MODULE__{
s3_path: String.t(),
- services: list(Service.t()) | Ecto.Association.NotLoaded.t(),
- trip_route_directions: list(TripRouteDirection.t()) | Ecto.Association.NotLoaded.t(),
- line: Line.t() | Ecto.Association.NotLoaded.t(),
- disruption: DisruptionV2.t() | Ecto.Association.NotLoaded.t()
+ services: list(Service.t()) | NotLoaded.t(),
+ trip_route_directions: list(TripRouteDirection.t()) | NotLoaded.t(),
+ line: Line.t() | NotLoaded.t(),
+ disruption: DisruptionV2.t() | NotLoaded.t()
}
schema "hastus_exports" do
diff --git a/lib/arrow/hastus/export_upload.ex b/lib/arrow/hastus/export_upload.ex
index 7a0b82c09..0dd19b8e0 100644
--- a/lib/arrow/hastus/export_upload.ex
+++ b/lib/arrow/hastus/export_upload.ex
@@ -5,10 +5,13 @@ defmodule Arrow.Hastus.ExportUpload do
import Ecto.Query, only: [from: 2]
- require Logger
-
+ alias Arrow.Gtfs.Route
+ alias Arrow.Gtfs.StopTime
+ alias Arrow.Gtfs.Trip
alias Arrow.Hastus.TripRouteDirection
+ require Logger
+
@type t :: %__MODULE__{
services: list(map()),
line_id: String.t(),
@@ -48,10 +51,10 @@ defmodule Arrow.Hastus.ExportUpload do
with {:ok, zip_bin, file_map} <- read_zip(zip_path, tmp_dir),
{:ok, zip_bin, file_map, amended?} <- amend_service_ids(zip_bin, file_map, tmp_dir),
- revenue_trips <- Stream.filter(file_map["all_trips.txt"], &revenue_trip?/1),
+ revenue_trips = Stream.filter(file_map["all_trips.txt"], &revenue_trip?/1),
:ok <- validate_trip_shapes(revenue_trips, file_map["all_shapes.txt"]),
:ok <- validate_trip_blocks(revenue_trips),
- public_stop_times <-
+ public_stop_times =
filter_out_private_stop_times(
file_map["all_stop_times.txt"],
file_map["all_stops.txt"]
@@ -82,9 +85,7 @@ defmodule Arrow.Hastus.ExportUpload do
end
rescue
e ->
- Logger.warning(
- "Hastus.ExportUpload failed to parse zip, message=#{Exception.format(:error, e, __STACKTRACE__)}"
- )
+ Logger.warning("Hastus.ExportUpload failed to parse zip, message=#{Exception.format(:error, e, __STACKTRACE__)}")
# Must be wrapped in an ok tuple for caller, consume_uploaded_entry/3
{:ok, {:error, "Could not parse zip."}}
@@ -236,12 +237,10 @@ defmodule Arrow.Hastus.ExportUpload do
missing_files = Enum.filter(@filenames, &(get_unzipped_file_path(&1, tmp_dir) not in unzip))
if Enum.any?(missing_files) do
- {:error,
- "The following files are missing from the export: #{Enum.join(missing_files, ", ")}"}
+ {:error, "The following files are missing from the export: #{Enum.join(missing_files, ", ")}"}
else
map =
- @filenames
- |> Enum.map(fn filename ->
+ Map.new(@filenames, fn filename ->
data =
filename
|> get_unzipped_file_path(tmp_dir)
@@ -250,7 +249,6 @@ defmodule Arrow.Hastus.ExportUpload do
{to_string(filename), data}
end)
- |> Map.new()
{:ok, map}
end
@@ -260,9 +258,7 @@ defmodule Arrow.Hastus.ExportUpload do
shape_ids = MapSet.new(shapes, & &1["shape_id"])
case revenue_trips
- |> Stream.filter(
- &(&1["shape_id"] in [nil, ""] or not MapSet.member?(shape_ids, &1["shape_id"]))
- )
+ |> Stream.filter(&(&1["shape_id"] in [nil, ""] or not MapSet.member?(shape_ids, &1["shape_id"])))
|> Enum.map(& &1["trip_id"]) do
[] ->
:ok
@@ -295,11 +291,11 @@ defmodule Arrow.Hastus.ExportUpload do
lines =
Arrow.Repo.all(
- from st in Arrow.Gtfs.StopTime,
+ from st in StopTime,
where: st.stop_id in ^exported_stop_ids,
- join: t in Arrow.Gtfs.Trip,
+ join: t in Trip,
on: t.id == st.trip_id,
- join: r in Arrow.Gtfs.Route,
+ join: r in Route,
on: r.id == t.route_id,
select: r.line_id,
distinct: r.line_id
@@ -320,11 +316,11 @@ defmodule Arrow.Hastus.ExportUpload do
canonical_stops_by_branch =
Enum.group_by(
Arrow.Repo.all(
- from t in Arrow.Gtfs.Trip,
+ from t in Trip,
where:
t.route_id in ["Green-B", "Green-C", "Green-D", "Green-E"] and
t.service_id == "canonical",
- join: st in Arrow.Gtfs.StopTime,
+ join: st in StopTime,
on: t.id == st.trip_id,
select: %{route_id: t.route_id, stop_id: st.stop_id}
),
@@ -336,9 +332,7 @@ defmodule Arrow.Hastus.ExportUpload do
|> Enum.group_by(&{&1["route_id"], &1["via_variant"], &1["avi_code"]})
|> Map.values()
|> Enum.map(&List.first/1)
- |> Enum.map(
- &infer_green_line_branch_for_trip(&1, canonical_stops_by_branch, stop_times_by_trip_id)
- )
+ |> Enum.map(&infer_green_line_branch_for_trip(&1, canonical_stops_by_branch, stop_times_by_trip_id))
|> Enum.group_by(&elem(&1, 0))
case result do
@@ -353,7 +347,7 @@ defmodule Arrow.Hastus.ExportUpload do
{:error, message}
%{ok: trip_route_directions} ->
- {:ok, trip_route_directions |> Enum.map(&elem(&1, 1))}
+ {:ok, Enum.map(trip_route_directions, &elem(&1, 1))}
end
end
@@ -522,13 +516,14 @@ defmodule Arrow.Hastus.ExportUpload do
Enum.reduce(stop_id_sets, &MapSet.union/2)
end)
- derived_limits =
+ for_result =
for visited_stops <- visited_stops_per_time_window,
seq <- canonical_stop_sequences,
{start_stop_id, end_stop_id} <- limits_from_sequence(seq, visited_stops) do
%{start_stop_id: start_stop_id, end_stop_id: end_stop_id}
end
- |> Enum.uniq()
+
+ derived_limits = Enum.uniq(for_result)
Map.put(service, :derived_limits, derived_limits)
end
@@ -538,7 +533,7 @@ defmodule Arrow.Hastus.ExportUpload do
@spec line_id_to_route_ids(String.t()) :: [String.t()]
defp line_id_to_route_ids(line_id) do
Arrow.Repo.all(
- from r in Arrow.Gtfs.Route,
+ from r in Route,
where: r.line_id == ^line_id,
where: r.network_id in ["rapid_transit", "commuter_rail"],
select: r.id
@@ -548,16 +543,16 @@ defmodule Arrow.Hastus.ExportUpload do
# Returns a list of lists with the direction_id=0 canonical stop sequence(s) for the given routes.
@spec stop_sequences_for_routes([String.t()]) :: [[stop_id :: String.t()]]
defp stop_sequences_for_routes(route_ids) do
- Arrow.Repo.all(
- from t in Arrow.Gtfs.Trip,
- where: t.direction_id == 0,
- where: t.service_id == "canonical",
- where: t.route_id in ^route_ids,
- join: st in Arrow.Gtfs.StopTime,
- on: t.id == st.trip_id,
- order_by: [t.id, st.stop_sequence],
- select: %{trip_id: t.id, stop_id: st.stop_id}
+ from(t in Trip,
+ where: t.direction_id == 0,
+ where: t.service_id == "canonical",
+ where: t.route_id in ^route_ids,
+ join: st in StopTime,
+ on: t.id == st.trip_id,
+ order_by: [t.id, st.stop_sequence],
+ select: %{trip_id: t.id, stop_id: st.stop_id}
)
+ |> Arrow.Repo.all()
|> Stream.chunk_by(& &1.trip_id)
|> Enum.map(fn stops -> Enum.map(stops, & &1.stop_id) end)
end
@@ -572,11 +567,11 @@ defmodule Arrow.Hastus.ExportUpload do
{route_id :: String.t(), trp_direction :: String.t()} => 0 | 1
}
defp trp_direction_to_direction_id([route_id | _]) do
- Arrow.Repo.all(
- from d in Arrow.Gtfs.Direction,
- where: d.route_id == ^route_id,
- select: {d.desc, d.direction_id}
+ from(d in Arrow.Gtfs.Direction,
+ where: d.route_id == ^route_id,
+ select: {d.desc, d.direction_id}
)
+ |> Arrow.Repo.all()
|> Map.new()
end
@@ -694,16 +689,13 @@ defmodule Arrow.Hastus.ExportUpload do
end
# Make sure dates are sorted before we start
- defp merge_adjacent_service_dates([], merged_dates),
- do: Enum.sort_by(merged_dates, & &1.start_date, Date)
+ defp merge_adjacent_service_dates([], merged_dates), do: Enum.sort_by(merged_dates, & &1.start_date, Date)
# Only one date for the current service, just add it as-is
- defp merge_adjacent_service_dates([date], []),
- do: merge_adjacent_service_dates([], [date])
+ defp merge_adjacent_service_dates([date], []), do: merge_adjacent_service_dates([], [date])
# Last date in the list, prepend it to list
- defp merge_adjacent_service_dates([date], merged_dates),
- do: merge_adjacent_service_dates([], [date | merged_dates])
+ defp merge_adjacent_service_dates([date], merged_dates), do: merge_adjacent_service_dates([], [date | merged_dates])
defp merge_adjacent_service_dates(
[
@@ -722,11 +714,9 @@ defmodule Arrow.Hastus.ExportUpload do
end
end
- defp extract_date_parts(date_string),
- do: Regex.run(~r/^(\d{4})(\d{2})(\d{2})/, date_string, capture: :all_but_first)
+ defp extract_date_parts(date_string), do: Regex.run(~r/^(\d{4})(\d{2})(\d{2})/, date_string, capture: :all_but_first)
- defp revenue_trip?(%{"route_id" => route_id, "trp_is_in_service" => "X"}),
- do: Regex.match?(~r/^\d+_*-.+$/, route_id)
+ defp revenue_trip?(%{"route_id" => route_id, "trp_is_in_service" => "X"}), do: Regex.match?(~r/^\d+_*-.+$/, route_id)
defp revenue_trip?(_), do: false
@@ -734,8 +724,7 @@ defmodule Arrow.Hastus.ExportUpload do
private_stop_ids =
stops
|> Stream.filter(&(&1["stp_is_public"] != "X"))
- |> Stream.map(& &1["stop_id"])
- |> MapSet.new()
+ |> MapSet.new(& &1["stop_id"])
Stream.filter(stop_times, &(&1["stop_id"] not in private_stop_ids))
end
diff --git a/lib/arrow/hastus/service.ex b/lib/arrow/hastus/service.ex
index 884eef520..076d240e3 100644
--- a/lib/arrow/hastus/service.ex
+++ b/lib/arrow/hastus/service.ex
@@ -2,16 +2,20 @@ defmodule Arrow.Hastus.Service do
@moduledoc "schema for a HASTUS service for the db"
use Ecto.Schema
+
import Ecto.Changeset
- alias Arrow.Hastus.{DerivedLimit, Export, ServiceDate}
+ alias Arrow.Hastus.DerivedLimit
+ alias Arrow.Hastus.Export
+ alias Arrow.Hastus.ServiceDate
+ alias Ecto.Association.NotLoaded
@type t :: %__MODULE__{
name: String.t(),
- service_dates: list(ServiceDate) | Ecto.Association.NotLoaded.t(),
- derived_limits: list(DerivedLimit.t()) | Ecto.Association.NotLoaded.t(),
+ service_dates: list(ServiceDate) | NotLoaded.t(),
+ derived_limits: list(DerivedLimit.t()) | NotLoaded.t(),
import?: boolean(),
- export: Export.t() | Ecto.Association.NotLoaded.t()
+ export: Export.t() | NotLoaded.t()
}
schema "hastus_services" do
@@ -24,7 +28,7 @@ defmodule Arrow.Hastus.Service do
has_many :derived_limits, DerivedLimit, on_replace: :delete, foreign_key: :service_id
- belongs_to :export, Arrow.Hastus.Export
+ belongs_to :export, Export
timestamps(type: :utc_datetime)
end
diff --git a/lib/arrow/hastus/service_date.ex b/lib/arrow/hastus/service_date.ex
index bbd51dc3d..b4790a24d 100644
--- a/lib/arrow/hastus/service_date.ex
+++ b/lib/arrow/hastus/service_date.ex
@@ -2,6 +2,7 @@ defmodule Arrow.Hastus.ServiceDate do
@moduledoc "schema for a HASTUS service date for the db"
use Ecto.Schema
+
import Ecto.Changeset
alias Arrow.Hastus.Service
@@ -15,7 +16,7 @@ defmodule Arrow.Hastus.ServiceDate do
schema "hastus_service_dates" do
field :start_date, :date
field :end_date, :date
- belongs_to :service, Arrow.Hastus.Service, on_replace: :delete
+ belongs_to :service, Service, on_replace: :delete
timestamps(type: :utc_datetime)
end
@@ -49,7 +50,7 @@ defmodule Arrow.Hastus.ServiceDate do
is_nil(start_date) or is_nil(end_date) ->
changeset
- Date.compare(start_date, end_date) == :gt ->
+ Date.after?(start_date, end_date) ->
add_error(changeset, :start_date, "start date must be less than or equal to end date")
true ->
diff --git a/lib/arrow/limits.ex b/lib/arrow/limits.ex
index c24b94086..8a86aa2b6 100644
--- a/lib/arrow/limits.ex
+++ b/lib/arrow/limits.ex
@@ -4,6 +4,7 @@ defmodule Arrow.Limits do
"""
alias Arrow.Disruptions.Limit
+ alias Arrow.Limits.LimitDayOfWeek
alias Arrow.Repo
@preloads [:route, :start_stop, :end_stop, :limit_day_of_weeks]
@@ -102,8 +103,6 @@ defmodule Arrow.Limits do
Limit.changeset(limit, attrs)
end
- alias Arrow.Limits.LimitDayOfWeek
-
@doc """
Returns the list of limit_day_of_weeks.
diff --git a/lib/arrow/limits/limit_day_of_week.ex b/lib/arrow/limits/limit_day_of_week.ex
index 0abc26576..151a185ed 100644
--- a/lib/arrow/limits/limit_day_of_week.ex
+++ b/lib/arrow/limits/limit_day_of_week.ex
@@ -2,6 +2,7 @@ defmodule Arrow.Limits.LimitDayOfWeek do
@moduledoc "schema for a limit day of week for the db"
use Ecto.Schema
+
import Ecto.Changeset
alias Arrow.Disruptions.Limit
@@ -28,7 +29,7 @@ defmodule Arrow.Limits.LimitDayOfWeek do
field :end_time, :string
field :active?, :boolean, source: :is_active, default: false
field :all_day?, :boolean, virtual: true
- belongs_to :limit, Arrow.Disruptions.Limit
+ belongs_to :limit, Limit
timestamps(type: :utc_datetime)
end
@@ -41,12 +42,8 @@ defmodule Arrow.Limits.LimitDayOfWeek do
|> cast(attrs, [:active?, :day_name, :start_time, :end_time, :limit_id, :all_day?])
|> validate_required([:day_name])
|> validate_required_times()
- |> validate_format(:start_time, time_regex,
- message: "must be a valid GTFS time in format HH:MM"
- )
- |> validate_format(:end_time, time_regex,
- message: "must be a valid GTFS time in format HH:MM"
- )
+ |> validate_format(:start_time, time_regex, message: "must be a valid GTFS time in format HH:MM")
+ |> validate_format(:end_time, time_regex, message: "must be a valid GTFS time in format HH:MM")
|> validate_start_time_before_end_time()
|> validate_in_date_range(opts[:date_range_day_of_weeks])
|> assoc_constraint(:limit)
diff --git a/lib/arrow/mock/exaws/request.ex b/lib/arrow/mock/exaws/request.ex
index 55b0f8e0f..b191c81bd 100644
--- a/lib/arrow/mock/exaws/request.ex
+++ b/lib/arrow/mock/exaws/request.ex
@@ -7,7 +7,7 @@ defmodule Arrow.Mock.ExAws.Request do
{:ok,
%{
body:
- "AlewifeHarvardViaBrattle-71.1,42.1 -71.2,42.2 -71.3,42.3",
+ ~s(AlewifeHarvardViaBrattle-71.1,42.1 -71.2,42.2 -71.3,42.3),
headers: [],
status_code: 200
}}
diff --git a/lib/arrow/open_route_service_api.ex b/lib/arrow/open_route_service_api.ex
index db45d5952..716aa20eb 100644
--- a/lib/arrow/open_route_service_api.ex
+++ b/lib/arrow/open_route_service_api.ex
@@ -63,14 +63,7 @@ defmodule Arrow.OpenRouteServiceAPI do
{:ok,
%DirectionsResponse{
coordinates: Enum.map(coordinates, fn [lon, lat] -> %{"lat" => lat, "lon" => lon} end),
- segments:
- segments
- |> Enum.map(
- &%{
- distance: &1["distance"],
- duration: &1["duration"]
- }
- ),
+ segments: Enum.map(segments, &%{distance: &1["distance"], duration: &1["duration"]}),
summary:
summary
|> List.wrap()
diff --git a/lib/arrow/open_route_service_api/client.ex b/lib/arrow/open_route_service_api/client.ex
index d227c24ad..8aa2519f1 100644
--- a/lib/arrow/open_route_service_api/client.ex
+++ b/lib/arrow/open_route_service_api/client.ex
@@ -1,10 +1,11 @@
defmodule Arrow.OpenRouteServiceAPI.Client do
- @behaviour Arrow.OpenRouteServiceAPI.Client
@moduledoc """
An HTTP Client that reaches out to Open Route Service
Based on mbta/skate's implementation
"""
+ @behaviour Arrow.OpenRouteServiceAPI.Client
+
alias Arrow.OpenRouteServiceAPI.DirectionsRequest
@callback get_directions(DirectionsRequest.t()) :: {:ok, map()} | {:error, any()}
@@ -90,8 +91,7 @@ defmodule Arrow.OpenRouteServiceAPI.Client do
defp api_base_url, do: Application.get_env(:arrow, Arrow.OpenRouteServiceAPI)[:api_base_url]
- defp directions_path,
- do: "v2/directions/driving-hgv/geojson"
+ defp directions_path, do: "v2/directions/driving-hgv/geojson"
defp headers(nil) do
headers()
diff --git a/lib/arrow/open_route_service_api/directions_request.ex b/lib/arrow/open_route_service_api/directions_request.ex
index 808168314..313b9a40e 100644
--- a/lib/arrow/open_route_service_api/directions_request.ex
+++ b/lib/arrow/open_route_service_api/directions_request.ex
@@ -9,17 +9,12 @@ defmodule Arrow.OpenRouteServiceAPI.DirectionsRequest do
defmodule ProfileParams do
@moduledoc false
defmodule HgvRestrictions do
- @derive Jason.Encoder
@moduledoc false
+ @derive Jason.Encoder
@type t :: %__MODULE__{length: float(), width: float(), height: float()}
defstruct [:length, :width, :height]
- def bus_40ft,
- do: %HgvRestrictions{
- length: 12.192,
- width: 3.2004,
- height: 3.5052
- }
+ def bus_40ft, do: %HgvRestrictions{length: 12.192, width: 3.2004, height: 3.5052}
end
@type t :: %{restrictions: HgvRestrictions.t()}
@@ -42,8 +37,7 @@ defmodule Arrow.OpenRouteServiceAPI.DirectionsRequest do
options: %{
vehicle_type: "bus",
profile_params: %{
- restrictions:
- Arrow.OpenRouteServiceAPI.DirectionsRequest.Options.ProfileParams.HgvRestrictions.bus_40ft()
+ restrictions: Arrow.OpenRouteServiceAPI.DirectionsRequest.Options.ProfileParams.HgvRestrictions.bus_40ft()
}
}
end
diff --git a/lib/arrow/repo.ex b/lib/arrow/repo.ex
index 63adc5d17..c0f006b16 100644
--- a/lib/arrow/repo.ex
+++ b/lib/arrow/repo.ex
@@ -26,8 +26,7 @@ defmodule Arrow.Repo do
cacertfile: Path.join(:code.priv_dir(:arrow), "aws-cert-bundle.pem"),
verify: :verify_peer,
server_name_indication: String.to_charlist(hostname),
- verify_fun:
- {&:ssl_verify_hostname.verify_fun/3, [check_hostname: String.to_charlist(hostname)]}
+ verify_fun: {&:ssl_verify_hostname.verify_fun/3, [check_hostname: String.to_charlist(hostname)]}
]
)
end
diff --git a/lib/arrow/repo/foreign_key_constraint.ex b/lib/arrow/repo/foreign_key_constraint.ex
index 130e172fe..14dbe437c 100644
--- a/lib/arrow/repo/foreign_key_constraint.ex
+++ b/lib/arrow/repo/foreign_key_constraint.ex
@@ -3,7 +3,9 @@ defmodule Arrow.Repo.ForeignKeyConstraint do
Schema allowing Arrow to introspect its DB's foreign key constraints.
"""
use Ecto.Schema
+
import Ecto.Query
+
alias Arrow.Repo
@type t :: %__MODULE__{
@@ -45,11 +47,7 @@ defmodule Arrow.Repo.ForeignKeyConstraint do
"""
@spec external_constraints_referencing_tables(list(String.t() | atom)) :: list(t())
def external_constraints_referencing_tables(tables) when is_list(tables) do
- from(fk in __MODULE__,
- where: fk.referenced_table in ^tables,
- where: fk.origin_table not in ^tables
- )
- |> Repo.all()
+ Repo.all(from(fk in __MODULE__, where: fk.referenced_table in ^tables, where: fk.origin_table not in ^tables))
end
@doc """
diff --git a/lib/arrow/repo/migrator.ex b/lib/arrow/repo/migrator.ex
index 5f30bcb40..45ee1be46 100644
--- a/lib/arrow/repo/migrator.ex
+++ b/lib/arrow/repo/migrator.ex
@@ -3,6 +3,7 @@ defmodule Arrow.Repo.Migrator do
GenServer which runs on startup to run Ecto migrations, then terminates.
"""
use GenServer, restart: :transient
+
require Logger
@opts [module: Ecto.Migrator, migrate_synchronously?: false]
diff --git a/lib/arrow/shuttles.ex b/lib/arrow/shuttles.ex
index 932d7a818..bde1281aa 100644
--- a/lib/arrow/shuttles.ex
+++ b/lib/arrow/shuttles.ex
@@ -5,20 +5,20 @@ defmodule Arrow.Shuttles do
import Ecto.Query, warn: false
+ alias Arrow.Gtfs.Route, as: GtfsRoute
+ alias Arrow.Gtfs.Stop, as: GtfsStop
alias Arrow.OpenRouteServiceAPI
alias Arrow.OpenRouteServiceAPI.DirectionsResponse
alias Arrow.OpenRouteServiceAPI.ErrorResponse
alias Arrow.Repo
- alias ArrowWeb.ErrorHelpers
-
- alias Arrow.Gtfs.Route, as: GtfsRoute
- alias Arrow.Gtfs.Stop, as: GtfsStop
alias Arrow.Shuttles.KML
alias Arrow.Shuttles.RouteStop
alias Arrow.Shuttles.Shape
alias Arrow.Shuttles.ShapesUpload
alias Arrow.Shuttles.ShapeUpload
+ alias Arrow.Shuttles.Shuttle
alias Arrow.Shuttles.Stop
+ alias ArrowWeb.ErrorHelpers
@preloads [routes: [:shape, route_stops: [:stop, :gtfs_stop]]]
@@ -115,17 +115,15 @@ defmodule Arrow.Shuttles do
def create_shapes(shapes) do
changesets = Enum.map(shapes, fn shape -> create_shape(shape) end)
- case Enum.all?(changesets, fn changeset -> Kernel.match?({:ok, _shape}, changeset) end) do
- true ->
- {:ok, changesets}
-
- _ ->
- errors =
- changesets
- |> Enum.filter(fn changeset -> Kernel.match?({:error, _}, changeset) end)
- |> Enum.map(&handle_create_error/1)
+ if Enum.all?(changesets, fn changeset -> Kernel.match?({:ok, _shape}, changeset) end) do
+ {:ok, changesets}
+ else
+ errors =
+ changesets
+ |> Enum.filter(fn changeset -> Kernel.match?({:error, _}, changeset) end)
+ |> Enum.map(&handle_create_error/1)
- {:error, {"Failed to upload some shapes", errors}}
+ {:error, {"Failed to upload some shapes", errors}}
end
end
@@ -160,8 +158,7 @@ defmodule Arrow.Shuttles do
{:error, "Shape #{name} already exists, delete the shape to save a new one"}
{:error, :already_exists} ->
- {:error,
- "File for shape #{attrs.name} already exists, delete the shape to save a new one"}
+ {:error, "File for shape #{attrs.name} already exists, delete the shape to save a new one"}
{:error, e} ->
{:error, e}
@@ -197,7 +194,7 @@ defmodule Arrow.Shuttles do
path = get_shape_upload_path(filename)
case do_upload_shape(content, bucket, path, request_fn) do
- error = {:error, _} -> error
+ {:error, _} = error -> error
{:ok, _} -> {:ok, %{bucket: bucket, prefix: prefix, path: path}}
end
else
@@ -278,13 +275,14 @@ defmodule Arrow.Shuttles do
Returns a list of shuttles using the given shape
"""
def shuttles_using_shape(%Shape{} = shape) do
- from(s in Arrow.Shuttles.Shuttle,
- join: r in assoc(s, :routes),
- where: r.shape_id == ^shape.id,
- distinct: s,
- select: s
+ Repo.all(
+ from(s in Arrow.Shuttles.Shuttle,
+ join: r in assoc(s, :routes),
+ where: r.shape_id == ^shape.id,
+ distinct: s,
+ select: s
+ )
)
- |> Repo.all()
end
@doc """
@@ -300,8 +298,6 @@ defmodule Arrow.Shuttles do
Shape.changeset(shape, attrs)
end
- alias Arrow.Shuttles.Shuttle
-
@doc """
Returns the list of shuttles.
@@ -312,7 +308,7 @@ defmodule Arrow.Shuttles do
"""
def list_shuttles do
- Repo.all(Shuttle) |> Repo.preload(@preloads)
+ Shuttle |> Repo.all() |> Repo.preload(@preloads)
end
@doc """
@@ -330,7 +326,7 @@ defmodule Arrow.Shuttles do
"""
def get_shuttle!(id) do
- Repo.get!(Shuttle, id) |> Repo.preload(@preloads) |> populate_display_stop_ids()
+ Shuttle |> Repo.get!(id) |> Repo.preload(@preloads) |> populate_display_stop_ids()
end
@doc """
@@ -424,17 +420,11 @@ defmodule Arrow.Shuttles do
else: {:error, "Missing id for stop"}
end
- def get_stop_coordinates(%RouteStop{
- display_stop: %Stop{} = display_stop,
- display_stop_id: _display_stop_id
- }) do
+ def get_stop_coordinates(%RouteStop{display_stop: %Stop{} = display_stop, display_stop_id: _display_stop_id}) do
get_stop_coordinates(display_stop)
end
- def get_stop_coordinates(%RouteStop{
- display_stop: %GtfsStop{} = display_stop,
- display_stop_id: _display_stop_id
- }) do
+ def get_stop_coordinates(%RouteStop{display_stop: %GtfsStop{} = display_stop, display_stop_id: _display_stop_id}) do
get_stop_coordinates(display_stop)
end
@@ -471,11 +461,11 @@ defmodule Arrow.Shuttles do
@spec get_travel_times(list(%{lat: number(), lon: number()})) ::
{:ok, list(number())} | {:error, any()}
def get_travel_times(coordinates) do
- coordinates = coordinates |> Enum.map(&Map.new(&1, fn {k, v} -> {to_string(k), v} end))
+ coordinates = Enum.map(coordinates, &Map.new(&1, fn {k, v} -> {to_string(k), v} end))
case OpenRouteServiceAPI.directions(coordinates) do
{:ok, %DirectionsResponse{segments: segments}} ->
- {:ok, segments |> Enum.map(&round(&1.duration))}
+ {:ok, Enum.map(segments, &round(&1.duration))}
{:error, %ErrorResponse{type: :no_route}} ->
{:error, "Unable to retrieve estimates: no route between stops found"}
@@ -511,7 +501,7 @@ defmodule Arrow.Shuttles do
"""
@spec stop_display_name(Stop.t() | GtfsStop.t()) :: String.t()
def stop_display_name(%Stop{stop_desc: stop_desc, stop_name: stop_name}),
- do: if(stop_desc != "", do: stop_desc, else: stop_name)
+ do: if(stop_desc == "", do: stop_name, else: stop_desc)
def stop_display_name(%GtfsStop{desc: desc, name: name}), do: desc || name
@@ -536,7 +526,7 @@ defmodule Arrow.Shuttles do
matching_stops = Repo.all(stops_query)
- arrow_stop_ids = matching_stops |> Enum.map(& &1.stop_id) |> MapSet.new()
+ arrow_stop_ids = MapSet.new(matching_stops, & &1.stop_id)
matching_gtfs_stops =
gtfs_stops_query |> Repo.all() |> Enum.filter(&(!MapSet.member?(arrow_stop_ids, &1.id)))
diff --git a/lib/arrow/shuttles/definition_upload.ex b/lib/arrow/shuttles/definition_upload.ex
index 5f178d011..dfd61949d 100644
--- a/lib/arrow/shuttles/definition_upload.ex
+++ b/lib/arrow/shuttles/definition_upload.ex
@@ -43,10 +43,7 @@ defmodule Arrow.Shuttles.DefinitionUpload do
end
end
- defp parse_direction_tabs(%{
- @direction_0_tab_name => direction_0_tab_tid,
- @direction_1_tab_name => direction_1_tab_tid
- }) do
+ defp parse_direction_tabs(%{@direction_0_tab_name => direction_0_tab_tid, @direction_1_tab_name => direction_1_tab_tid}) do
case {parse_direction_tab(direction_0_tab_tid, @direction_0_tab_name),
parse_direction_tab(direction_1_tab_tid, @direction_1_tab_name)} do
{{:errors, errors0}, {:errors, errors1}} -> {:errors, errors0 ++ errors1}
@@ -82,7 +79,7 @@ defmodule Arrow.Shuttles.DefinitionUpload do
|> Enum.reverse()
if Enum.empty?(errors) do
- {:ok, stop_ids |> Enum.map(&Integer.to_string(&1))}
+ {:ok, Enum.map(stop_ids, &Integer.to_string(&1))}
else
{:errors, errors}
end
diff --git a/lib/arrow/shuttles/kml.ex b/lib/arrow/shuttles/kml.ex
index 1faeb31b9..c96dfa6ce 100644
--- a/lib/arrow/shuttles/kml.ex
+++ b/lib/arrow/shuttles/kml.ex
@@ -2,11 +2,10 @@ defmodule Arrow.Shuttles.KML do
@moduledoc """
A struct for the full KML representation of a shape to be used with Saxy.Builder
"""
- @derive {Saxy.Builder,
- name: "kml", attributes: [:xmlns], children: [Folder: &__MODULE__.build_shape/1]}
-
import Saxy.XML
+ @derive {Saxy.Builder, name: "kml", attributes: [:xmlns], children: [Folder: &__MODULE__.build_shape/1]}
+
defstruct [:xmlns, :Folder]
def build_shape(%{name: name, coordinates: coordinates}) do
diff --git a/lib/arrow/shuttles/route.ex b/lib/arrow/shuttles/route.ex
index ad3b256fa..efd201ff5 100644
--- a/lib/arrow/shuttles/route.ex
+++ b/lib/arrow/shuttles/route.ex
@@ -1,31 +1,32 @@
defmodule Arrow.Shuttles.Route do
@moduledoc "schema for a shuttle route for the db"
use Ecto.Schema
+
import Ecto.Changeset
alias Arrow.Repo
alias Arrow.Shuttles
+ alias Arrow.Shuttles.RouteStop
alias Arrow.Shuttles.ShapesUpload
+ alias Ecto.Association.NotLoaded
@direction_0_desc_values [:Outbound, :South, :West]
@direction_1_desc_values [:Inbound, :North, :East]
def direction_desc_values, do: @direction_0_desc_values ++ @direction_1_desc_values
- def direction_desc_values(direction_id) when direction_id in [:"0", "0"],
- do: @direction_0_desc_values
+ def direction_desc_values(direction_id) when direction_id in [:"0", "0"], do: @direction_0_desc_values
- def direction_desc_values(direction_id) when direction_id in [:"1", "1"],
- do: @direction_1_desc_values
+ def direction_desc_values(direction_id) when direction_id in [:"1", "1"], do: @direction_1_desc_values
@type t :: %__MODULE__{
destination: String.t(),
direction_id: :"0" | :"1",
direction_desc: :Inbound | :Outbound | :North | :South | :East | :West,
waypoint: String.t(),
- shuttle: Shuttles.Shuttle.t() | Ecto.Association.NotLoaded.t() | nil,
- shape: Shuttles.Shape.t() | Ecto.Association.NotLoaded.t() | nil,
- route_stops: [Shuttles.RouteStop.t()] | Ecto.Association.NotLoaded.t() | nil
+ shuttle: Shuttles.Shuttle.t() | NotLoaded.t() | nil,
+ shape: Shuttles.Shape.t() | NotLoaded.t() | nil,
+ route_stops: [Shuttles.RouteStop.t()] | NotLoaded.t() | nil
}
schema "shuttle_routes" do
@@ -36,7 +37,7 @@ defmodule Arrow.Shuttles.Route do
belongs_to :shuttle, Arrow.Shuttles.Shuttle
belongs_to :shape, Arrow.Shuttles.Shape
- has_many :route_stops, Arrow.Shuttles.RouteStop,
+ has_many :route_stops, RouteStop,
foreign_key: :shuttle_route_id,
preload_order: [asc: :stop_sequence],
on_replace: :delete
@@ -60,7 +61,7 @@ defmodule Arrow.Shuttles.Route do
changeset
|> then(&if error, do: add_error(&1, :shape_id, error), else: &1)
|> cast_assoc(:route_stops,
- with: &Arrow.Shuttles.RouteStop.changeset(&1, &2, coordinates),
+ with: &RouteStop.changeset(&1, &2, coordinates),
sort_param: :route_stops_sort,
drop_param: :route_stops_drop
)
@@ -88,7 +89,8 @@ defmodule Arrow.Shuttles.Route do
case Shuttles.get_shapes_upload(shape) do
{:ok, %Ecto.Changeset{} = changeset} ->
coordinates =
- ShapesUpload.shapes_map_view(changeset)
+ changeset
+ |> ShapesUpload.shapes_map_view()
|> Map.get(:shapes)
|> List.first()
|> Map.get(:coordinates)
diff --git a/lib/arrow/shuttles/route_stop.ex b/lib/arrow/shuttles/route_stop.ex
index 9d4c75d18..be49957bc 100644
--- a/lib/arrow/shuttles/route_stop.ex
+++ b/lib/arrow/shuttles/route_stop.ex
@@ -1,11 +1,13 @@
defmodule Arrow.Shuttles.RouteStop do
@moduledoc "schema for a shuttle route stop for the db"
use Ecto.Schema
+
import Ecto.Changeset
alias Arrow.Gtfs.Stop, as: GtfsStop
alias Arrow.Shuttles
alias Arrow.Shuttles.Stop
+ alias Ecto.Association.NotLoaded
@type t :: %__MODULE__{
direction_id: :"0" | :"1",
@@ -15,9 +17,9 @@ defmodule Arrow.Shuttles.RouteStop do
display_stop: Arrow.Shuttles.Stop.t() | Arrow.Gtfs.Stop.t() | nil,
inserted_at: DateTime.t() | nil,
updated_at: DateTime.t() | nil,
- shuttle_route: Arrow.Gtfs.Level.t() | Ecto.Association.NotLoaded.t() | nil,
- stop: Arrow.Shuttles.Stop.t() | Ecto.Association.NotLoaded.t() | nil,
- gtfs_stop: Arrow.Gtfs.Stop.t() | Ecto.Association.NotLoaded.t() | nil,
+ shuttle_route: Arrow.Gtfs.Level.t() | NotLoaded.t() | nil,
+ stop: Arrow.Shuttles.Stop.t() | NotLoaded.t() | nil,
+ gtfs_stop: Arrow.Gtfs.Stop.t() | NotLoaded.t() | nil,
gtfs_stop_id: String.t() | nil
}
@@ -39,8 +41,7 @@ defmodule Arrow.Shuttles.RouteStop do
@doc false
def changeset(route_stop, attrs, coordinates \\ nil) do
change =
- route_stop
- |> cast(attrs, [
+ cast(route_stop, attrs, [
:direction_id,
:stop_id,
:gtfs_stop_id,
@@ -89,9 +90,8 @@ defmodule Arrow.Shuttles.RouteStop do
@spec maybe_validate_stop_distance(Ecto.Changeset.t(), [[float()]] | nil) :: Ecto.Changeset.t()
defp maybe_validate_stop_distance(changeset, nil), do: changeset
- defp maybe_validate_stop_distance(changeset, _shape_coordinates)
- when changeset.action in [:replace, :delete],
- do: changeset
+ defp maybe_validate_stop_distance(changeset, _shape_coordinates) when changeset.action in [:replace, :delete],
+ do: changeset
defp maybe_validate_stop_distance(changeset, shape_coordinates) do
stop =
diff --git a/lib/arrow/shuttles/shape.ex b/lib/arrow/shuttles/shape.ex
index cb952798a..abf396448 100644
--- a/lib/arrow/shuttles/shape.ex
+++ b/lib/arrow/shuttles/shape.ex
@@ -1,11 +1,12 @@
defmodule Arrow.Shuttles.Shape do
@moduledoc "schema for shuttle shapes for the db"
- @derive {Phoenix.Param, key: :name}
-
use Ecto.Schema
+
import Ecto.Changeset
+ @derive {Phoenix.Param, key: :name}
+
@derive {Jason.Encoder, only: [:id, :name, :inserted_at, :updated_at]}
@type id :: integer
@@ -52,8 +53,7 @@ defmodule Arrow.Shuttles.Shape do
changeset,
:name,
~r/^[A-Z][A-Za-z0-9]*To[A-Z][A-Za-z0-9]*(?:Via[A-Z][A-Za-z0-9]*)?(?:-S)?$/,
- message:
- "should be PascalCase using only letters and numbers and include start and end location"
+ message: "should be PascalCase using only letters and numbers and include start and end location"
)
else
changeset
diff --git a/lib/arrow/shuttles/shape_upload.ex b/lib/arrow/shuttles/shape_upload.ex
index aa7a5ecbf..29afe3070 100644
--- a/lib/arrow/shuttles/shape_upload.ex
+++ b/lib/arrow/shuttles/shape_upload.ex
@@ -1,6 +1,7 @@
defmodule Arrow.Shuttles.ShapeUpload do
@moduledoc "schema for shuttle shapes as an embedded schema"
use Ecto.Schema
+
import Ecto.Changeset
@type t :: %__MODULE__{
diff --git a/lib/arrow/shuttles/shapes_upload.ex b/lib/arrow/shuttles/shapes_upload.ex
index 9a2613d3c..c2e5766bf 100644
--- a/lib/arrow/shuttles/shapes_upload.ex
+++ b/lib/arrow/shuttles/shapes_upload.ex
@@ -1,16 +1,19 @@
defmodule Arrow.Shuttles.ShapesUpload do
@moduledoc "schema for shapes upload"
use Ecto.Schema
+
import Ecto.Changeset
+ alias Arrow.Shuttles.ShapeUpload
+
@type t :: %__MODULE__{
filename: String.t(),
- shapes: list(Arrow.Shuttles.ShapeUpload.t())
+ shapes: list(ShapeUpload.t())
}
embedded_schema do
field :filename, :string
- embeds_many :shapes, Arrow.Shuttles.ShapeUpload
+ embeds_many :shapes, ShapeUpload
end
@doc false
@@ -29,9 +32,7 @@ defmodule Arrow.Shuttles.ShapesUpload do
{:ok, shapes}
else
{:error, reason} ->
- {:error,
- {"Failed to upload shapes from #{filename} because the provided xml was invalid",
- [reason]}}
+ {:error, {"Failed to upload shapes from #{filename} because the provided xml was invalid", [reason]}}
end
end
@@ -66,7 +67,7 @@ defmodule Arrow.Shuttles.ShapesUpload do
@doc """
Parses one or many Shapes from a map of the KML/XML
"""
- @spec shapes_from_kml(map) :: {:ok, list(Arrow.Shuttles.ShapeUpload.t())} | {:error, any}
+ @spec shapes_from_kml(map) :: {:ok, list(ShapeUpload.t())} | {:error, any}
def shapes_from_kml(saxy_shapes) do
placemarks = saxy_shapes["kml"]["Folder"]["Placemark"]
@@ -77,8 +78,7 @@ defmodule Arrow.Shuttles.ShapesUpload do
%{"LineString" => %{"coordinates" => nil}, "name" => _name} ->
error =
- {"Failed to parse shape from kml, no coordinates were found. Check your whitespace.",
- [inspect(placemarks)]}
+ {"Failed to parse shape from kml, no coordinates were found. Check your whitespace.", [inspect(placemarks)]}
{:error, error}
diff --git a/lib/arrow/shuttles/shuttle.ex b/lib/arrow/shuttles/shuttle.ex
index 8a086c360..46aba3369 100644
--- a/lib/arrow/shuttles/shuttle.ex
+++ b/lib/arrow/shuttles/shuttle.ex
@@ -1,11 +1,13 @@
defmodule Arrow.Shuttles.Shuttle do
@moduledoc "schema for a shuttle for the db"
use Ecto.Schema
+
import Ecto.Changeset
import Ecto.Query
alias Arrow.Disruptions.ReplacementService
alias Arrow.Repo
+ alias Arrow.Shuttles.Route
@type id :: integer
@type t :: %__MODULE__{
@@ -22,7 +24,7 @@ defmodule Arrow.Shuttles.Shuttle do
field :disrupted_route_id, :string
field :suffix, :string
- has_many :routes, Arrow.Shuttles.Route, preload_order: [asc: :direction_id]
+ has_many :routes, Route, preload_order: [asc: :direction_id]
timestamps(type: :utc_datetime)
end
@@ -32,9 +34,7 @@ defmodule Arrow.Shuttles.Shuttle do
shuttle
|> cast(attrs, [:shuttle_name, :disrupted_route_id, :status, :suffix])
|> then(fn changeset ->
- cast_assoc(changeset, :routes,
- with: &Arrow.Shuttles.Route.changeset(&1, &2, get_field(changeset, :status) == :active)
- )
+ cast_assoc(changeset, :routes, with: &Route.changeset(&1, &2, get_field(changeset, :status) == :active))
end)
|> validate_required([:shuttle_name, :status])
|> validate_required_for(:status)
@@ -64,8 +64,7 @@ defmodule Arrow.Shuttles.Shuttle do
"all stops except the last in each direction must have a time to next stop"
)
- routes
- |> Enum.any?(fn route -> is_nil(route.data.shape) end) ->
+ Enum.any?(routes, fn route -> is_nil(route.data.shape) end) ->
add_error(
changeset,
:status,
diff --git a/lib/arrow/shuttles/stop.ex b/lib/arrow/shuttles/stop.ex
index 052330dc1..8237683b1 100644
--- a/lib/arrow/shuttles/stop.ex
+++ b/lib/arrow/shuttles/stop.ex
@@ -1,11 +1,12 @@
defmodule Arrow.Shuttles.Stop do
@moduledoc false
- @derive {Phoenix.Param, key: :stop_id}
-
use Ecto.Schema
+
import Ecto.Changeset
+ @derive {Phoenix.Param, key: :stop_id}
+
@derive {Jason.Encoder, only: [:stop_name, :stop_desc, :stop_lat, :stop_lon, :stop_id]}
@type id :: integer
diff --git a/lib/arrow/stops.ex b/lib/arrow/stops.ex
index 5bd4edc49..03cf68527 100644
--- a/lib/arrow/stops.ex
+++ b/lib/arrow/stops.ex
@@ -4,8 +4,8 @@ defmodule Arrow.Stops do
"""
import Ecto.Query, warn: false
- alias Arrow.Repo
+ alias Arrow.Repo
alias Arrow.Shuttles.Stop
@doc """
@@ -18,11 +18,7 @@ defmodule Arrow.Stops do
"""
def list_stops(params \\ %{}) do
- from(
- s in Stop,
- order_by: ^order_by(params["order_by"])
- )
- |> Repo.all()
+ Repo.all(from(s in Stop, order_by: ^order_by(params["order_by"])))
end
@doc """
@@ -123,7 +119,7 @@ defmodule Arrow.Stops do
{:error, %Ecto.Changeset{}}
"""
- def delete_stop(%Arrow.Shuttles.Stop{} = stop) do
+ def delete_stop(%Stop{} = stop) do
Repo.delete(stop)
end
@@ -155,7 +151,7 @@ defmodule Arrow.Stops do
[%Arrow.Shuttles.Stop{}, ...]
"""
@spec get_stops_within_mile(String.t() | nil, {float(), float()}) ::
- list(Arrow.Shuttles.Stop.t())
+ list(Stop.t())
def get_stops_within_mile(stop_id, {lat, lon}) do
conditions =
dynamic(
@@ -174,7 +170,7 @@ defmodule Arrow.Stops do
end
query =
- from(s in Arrow.Shuttles.Stop,
+ from(s in Stop,
where: ^conditions
)
diff --git a/lib/arrow/sync_worker.ex b/lib/arrow/sync_worker.ex
index 973178fe0..124c9ff0b 100644
--- a/lib/arrow/sync_worker.ex
+++ b/lib/arrow/sync_worker.ex
@@ -14,11 +14,16 @@ defmodule Arrow.SyncWorker do
# Prevent duplicate jobs within an hour
unique: [period: 3600]
- alias Arrow.{Repo, Shuttles, Stops}
- alias Arrow.Shuttles.{Shape, ShapesUpload, Stop}
+ import Ecto.Query
+
+ alias Arrow.Repo
+ alias Arrow.Shuttles
+ alias Arrow.Shuttles.Shape
+ alias Arrow.Shuttles.ShapesUpload
+ alias Arrow.Shuttles.Stop
+ alias Arrow.Stops
require Logger
- import Ecto.Query
@impl Oban.Worker
def perform(%Oban.Job{}) do
@@ -36,7 +41,7 @@ defmodule Arrow.SyncWorker do
end
@impl Oban.Worker
- def timeout(_job), do: :timer.minutes(10)
+ def timeout(_job), do: to_timeout(minute: 10)
defp sync_stops do
Logger.info("Starting stops sync")
@@ -44,12 +49,12 @@ defmodule Arrow.SyncWorker do
case fetch_prod_data("/api/shuttle-stops") do
{:ok, %{"data" => stops_data}} ->
existing_stop_ids =
- Repo.all(from(s in Stop, select: s.stop_id))
+ from(s in Stop, select: s.stop_id)
+ |> Repo.all()
|> MapSet.new()
new_stops =
- stops_data
- |> Enum.reject(fn stop -> stop["attributes"]["stop_id"] in existing_stop_ids end)
+ Enum.reject(stops_data, fn stop -> stop["attributes"]["stop_id"] in existing_stop_ids end)
sync_results = new_stops |> Enum.map(&create_stop_from_api_data/1) |> Enum.frequencies()
@@ -68,12 +73,12 @@ defmodule Arrow.SyncWorker do
case fetch_prod_data("/api/shapes") do
{:ok, %{"data" => shapes_data}} ->
existing_shape_names =
- Repo.all(from(s in Shape, select: s.name))
+ from(s in Shape, select: s.name)
+ |> Repo.all()
|> MapSet.new()
new_shapes =
- shapes_data
- |> Enum.reject(fn shape -> shape["attributes"]["name"] in existing_shape_names end)
+ Enum.reject(shapes_data, fn shape -> shape["attributes"]["name"] in existing_shape_names end)
sync_results = new_shapes |> Enum.map(&create_shape_from_api_data/1) |> Enum.frequencies()
@@ -108,7 +113,7 @@ defmodule Arrow.SyncWorker do
defp create_stop_from_api_data(%{"attributes" => attributes}) do
stop_params =
- %{
+ Map.new(%{
stop_id: attributes["stop_id"],
stop_name: attributes["stop_name"],
stop_desc: attributes["stop_desc"],
@@ -123,8 +128,7 @@ defmodule Arrow.SyncWorker do
municipality: attributes["municipality"],
on_street: attributes["on_street"],
at_street: attributes["at_street"]
- }
- |> Map.new()
+ })
case Stops.create_stop(stop_params) do
{:ok, stop} ->
diff --git a/lib/arrow_web.ex b/lib/arrow_web.ex
index 323bea1e9..d34f1dc8f 100644
--- a/lib/arrow_web.ex
+++ b/lib/arrow_web.ex
@@ -23,9 +23,10 @@ defmodule ArrowWeb do
def router do
quote do
use Phoenix.Router
+
+ import Phoenix.Controller
import Phoenix.LiveView.Router
import Plug.Conn
- import Phoenix.Controller
end
end
@@ -39,9 +40,10 @@ defmodule ArrowWeb do
def controller do
quote do
use Phoenix.Controller, namespace: ArrowWeb
+ use Gettext, backend: ArrowWeb.Gettext
import Plug.Conn
- use Gettext, backend: ArrowWeb.Gettext
+
alias ArrowWeb.Router.Helpers, as: Routes
unquote(verified_routes())
@@ -51,37 +53,40 @@ defmodule ArrowWeb do
def html do
quote do
use Phoenix.Component
+ use PhoenixHTMLHelpers
+ use Gettext, backend: ArrowWeb.Gettext
+
+ import ArrowWeb.ErrorHelpers
# Import convenience functions from controllers
import Phoenix.Controller,
only: [get_csrf_token: 0, view_module: 1, view_template: 1]
+ import Phoenix.HTML
+ import Phoenix.HTML.Form
+ import ReactPhoenix.ClientSide
+
+ alias ArrowWeb.Router.Helpers, as: Routes
+
# Include general helpers for rendering HTML
unquote(html_helpers())
# https://hexdocs.pm/phoenix_html/changelog.html#v4-0-0-2023-12-19
# Use all HTML functionality (forms, tags, etc)
# Still needed for old style Phoenix HTML like ,
- import Phoenix.HTML
- import Phoenix.HTML.Form
- use PhoenixHTMLHelpers
-
- import ArrowWeb.ErrorHelpers
- use Gettext, backend: ArrowWeb.Gettext
- alias ArrowWeb.Router.Helpers, as: Routes
# Import the `react_component` helper
- import ReactPhoenix.ClientSide
end
end
defp html_helpers do
quote do
+ use Gettext, backend: ArrowWeb.Gettext
+
+ import ArrowWeb.CoreComponents
+ import ArrowWeb.Helpers
# HTML escaping functionality
import Phoenix.HTML
# Core UI components and translation
- import ArrowWeb.CoreComponents
- import ArrowWeb.Helpers
- use Gettext, backend: ArrowWeb.Gettext
# Shortcut for generating JS commands
alias Phoenix.LiveView.JS
@@ -96,10 +101,11 @@ defmodule ArrowWeb do
use Phoenix.LiveView,
layout: {ArrowWeb.LayoutView, :live}
+ import PhoenixLiveReact
+
unquote(html_helpers())
# Import the `live_react_component` helper
- import PhoenixLiveReact
end
end
@@ -107,10 +113,11 @@ defmodule ArrowWeb do
quote do
use Phoenix.LiveComponent
+ import PhoenixLiveReact
+
unquote(html_helpers())
# Import the `live_react_component` helper
- import PhoenixLiveReact
end
end
diff --git a/lib/arrow_web/auth_manager/error_handler.ex b/lib/arrow_web/auth_manager/error_handler.ex
index 7879cd127..f0c9bdbad 100644
--- a/lib/arrow_web/auth_manager/error_handler.ex
+++ b/lib/arrow_web/auth_manager/error_handler.ex
@@ -3,11 +3,11 @@ defmodule ArrowWeb.AuthManager.ErrorHandler do
Plug to handle if user is not authenticated.
"""
+ @behaviour Guardian.Plug.ErrorHandler
+
alias ArrowWeb.Router.Helpers, as: Routes
alias Phoenix.Controller
- @behaviour Guardian.Plug.ErrorHandler
-
@impl Guardian.Plug.ErrorHandler
def auth_error(conn, error, _opts) do
provider = Application.get_env(:arrow, :ueberauth_provider)
diff --git a/lib/arrow_web/components/core_components.ex b/lib/arrow_web/components/core_components.ex
index fb566e75e..e6588fefa 100644
--- a/lib/arrow_web/components/core_components.ex
+++ b/lib/arrow_web/components/core_components.ex
@@ -21,9 +21,13 @@ defmodule ArrowWeb.CoreComponents do
endpoint: ArrowWeb.Endpoint,
statics: ArrowWeb.static_paths()
- alias Phoenix.LiveView.JS
use Gettext, backend: ArrowWeb.Gettext
+ alias Phoenix.HTML.Form
+ alias Phoenix.HTML.FormField
+ alias Phoenix.LiveView.JS
+ alias Phoenix.LiveView.Socket
+
@doc """
Renders a modal.
@@ -286,8 +290,7 @@ defmodule ArrowWeb.CoreComponents do
values: ~w(checkbox color date datetime-local email file month number password
range search select tel text textarea time url week hidden)
- attr :field, Phoenix.HTML.FormField,
- doc: "a form field struct retrieved from the form, for example: @form[:email]"
+ attr :field, FormField, doc: "a form field struct retrieved from the form, for example: @form[:email]"
attr :errors, :list, default: []
attr :checked, :boolean, doc: "the checked flag for checkbox inputs"
@@ -296,13 +299,12 @@ defmodule ArrowWeb.CoreComponents do
attr :multiple, :boolean, default: false, doc: "the multiple flag for select inputs"
attr :class, :string, default: nil
- attr :rest, :global,
- include: ~w(accept autocomplete capture cols disabled form list max maxlength min minlength
+ attr :rest, :global, include: ~w(accept autocomplete capture cols disabled form list max maxlength min minlength
multiple pattern placeholder readonly required rows size step)
slot :inner_block
- def input(%{field: %Phoenix.HTML.FormField{} = field} = assigns) do
+ def input(%{field: %FormField{} = field} = assigns) do
errors = if Phoenix.Component.used_input?(field), do: field.errors, else: []
assigns
@@ -316,7 +318,7 @@ defmodule ArrowWeb.CoreComponents do
def input(%{type: "checkbox"} = assigns) do
assigns =
assign_new(assigns, :checked, fn ->
- Phoenix.HTML.Form.normalize_value("checkbox", assigns[:value])
+ Form.normalize_value("checkbox", assigns[:value])
end)
~H"""
@@ -408,18 +410,17 @@ defmodule ArrowWeb.CoreComponents do
def custom_normalize_value("text", value) when is_map(value) do
iodata = Jason.encode_to_iodata!(value)
- Phoenix.HTML.Form.normalize_value("text", iodata)
+ Form.normalize_value("text", iodata)
end
def custom_normalize_value(type, value) do
- Phoenix.HTML.Form.normalize_value(type, value)
+ Form.normalize_value(type, value)
end
@doc """
LiveSelect with styling
"""
- attr :field, Phoenix.HTML.FormField,
- doc: "a form field struct retrieved from the form, for example: @form[:email]"
+ attr :field, FormField, doc: "a form field struct retrieved from the form, for example: @form[:email]"
attr :class, :string, default: nil
attr :label, :string, default: nil
@@ -431,7 +432,7 @@ defmodule ArrowWeb.CoreComponents do
attr :target, :any, default: nil
attr :update_min_len, :integer
- def live_select(%{field: %Phoenix.HTML.FormField{} = field} = assigns) do
+ def live_select(%{field: %FormField{} = field} = assigns) do
assigns =
assigns
|> assign(:errors, Enum.map(field.errors, &translate_error(&1)))
@@ -818,8 +819,7 @@ defmodule ArrowWeb.CoreComponents do
JS.show(js,
to: selector,
transition:
- {"transition-all transform ease-out duration-300",
- "opacity-0 translate-y-4 sm:translate-y-0 sm:scale-95",
+ {"transition-all transform ease-out duration-300", "opacity-0 translate-y-4 sm:translate-y-0 sm:scale-95",
"opacity-100 translate-y-0 sm:scale-100"}
)
end
@@ -829,8 +829,7 @@ defmodule ArrowWeb.CoreComponents do
to: selector,
time: 200,
transition:
- {"transition-all transform ease-in duration-200",
- "opacity-100 translate-y-0 sm:scale-100",
+ {"transition-all transform ease-in duration-200", "opacity-100 translate-y-0 sm:scale-100",
"opacity-0 translate-y-4 sm:translate-y-0 sm:scale-95"}
)
end
@@ -902,16 +901,16 @@ defmodule ArrowWeb.CoreComponents do
See the `phx:download-file` event in `app.tsx` for the client-side implementation.
"""
@spec send_download(
- Phoenix.LiveView.Socket.t(),
+ Socket.t(),
filename :: String.t(),
data :: String.t() | {:binary, binary()}
- ) :: Phoenix.LiveView.Socket.t()
+ ) :: Socket.t()
@spec send_download(
- Phoenix.LiveView.Socket.t(),
+ Socket.t(),
filename :: String.t(),
data :: String.t() | {:binary, binary()},
opts :: Keyword.t()
- ) :: Phoenix.LiveView.Socket.t()
+ ) :: Socket.t()
def send_download(socket, filename, data, opts \\ []) do
content_type =
case {Keyword.fetch(opts, :content_type), data} do
diff --git a/lib/arrow_web/components/disruption_components.ex b/lib/arrow_web/components/disruption_components.ex
index 2fd8f9fe8..aeef9ba39 100644
--- a/lib/arrow_web/components/disruption_components.ex
+++ b/lib/arrow_web/components/disruption_components.ex
@@ -1,11 +1,11 @@
defmodule ArrowWeb.DisruptionComponents do
@moduledoc false
- alias Arrow.Disruptions.ReplacementService
use ArrowWeb, :live_component
alias Arrow.Adjustment
alias Arrow.Disruptions.DisruptionV2
alias Arrow.Disruptions.Limit
+ alias Arrow.Disruptions.ReplacementService
alias Arrow.Hastus.Export
alias Arrow.Hastus.Service
alias Arrow.Limits.LimitDayOfWeek
diff --git a/lib/arrow_web/components/edit_disruption_form.ex b/lib/arrow_web/components/edit_disruption_form.ex
index 7bfa0e5d8..c3eadbdff 100644
--- a/lib/arrow_web/components/edit_disruption_form.ex
+++ b/lib/arrow_web/components/edit_disruption_form.ex
@@ -2,11 +2,11 @@ defmodule ArrowWeb.EditDisruptionForm do
@moduledoc false
use ArrowWeb, :live_component
+ import Phoenix.HTML.Form
+
alias Arrow.Disruptions
alias Arrow.Disruptions.DisruptionV2
- import Phoenix.HTML.Form
-
attr :disruption, DisruptionV2, required: true
attr :icon_paths, :map, required: true
@@ -148,11 +148,7 @@ defmodule ArrowWeb.EditDisruptionForm do
{:ok, socket}
end
- def handle_event(
- "validate",
- %{"disruption_v2" => disruption_v2_params},
- socket
- ) do
+ def handle_event("validate", %{"disruption_v2" => disruption_v2_params}, socket) do
form =
socket.assigns.disruption
|> Disruptions.change_disruption_v2(disruption_v2_params)
diff --git a/lib/arrow_web/components/edit_hastus_export_form.ex b/lib/arrow_web/components/edit_hastus_export_form.ex
index 5f71e43e4..0f7f32f0f 100644
--- a/lib/arrow_web/components/edit_hastus_export_form.ex
+++ b/lib/arrow_web/components/edit_hastus_export_form.ex
@@ -366,11 +366,7 @@ defmodule ArrowWeb.EditHastusExportForm do
{:noreply, socket}
end
- def handle_event(
- "delete_service_date",
- %{"service_index" => service_index, "date_index" => date_index},
- socket
- ) do
+ def handle_event("delete_service_date", %{"service_index" => service_index, "date_index" => date_index}, socket) do
{service_index, _} = Integer.parse(service_index)
{date_index, _} = Integer.parse(date_index)
@@ -490,11 +486,7 @@ defmodule ArrowWeb.EditHastusExportForm do
{:noreply, socket}
end
- defp handle_progress(
- :hastus_export,
- %UploadEntry{client_name: client_name} = entry,
- socket
- ) do
+ defp handle_progress(:hastus_export, %UploadEntry{client_name: client_name} = entry, socket) do
socket = socket |> clear_flash() |> assign(error: nil)
case consume_uploaded_entry(
@@ -557,8 +549,7 @@ defmodule ArrowWeb.EditHastusExportForm do
not MapSet.subset?(active_day_of_weeks, relevant_day_of_weeks)
end
- defp get_service_date_warning(_service_id, start_date, end_date)
- when start_date == "" or end_date == "" do
+ defp get_service_date_warning(_service_id, start_date, end_date) when start_date == "" or end_date == "" do
""
end
@@ -587,14 +578,11 @@ defmodule ArrowWeb.EditHastusExportForm do
defp error_to_string(:not_accepted), do: "You have selected an unacceptable file type"
defp error_to_string(_), do: "Upload failed. Please try again or contact an engineer"
- defp get_invalid_trips_file_name(:trips_with_invalid_shapes),
- do: "trips_with_invalid_shapes.txt"
+ defp get_invalid_trips_file_name(:trips_with_invalid_shapes), do: "trips_with_invalid_shapes.txt"
- defp get_invalid_trips_file_name(:trips_with_invalid_blocks),
- do: "trips_with_invalid_blocks.txt"
+ defp get_invalid_trips_file_name(:trips_with_invalid_blocks), do: "trips_with_invalid_blocks.txt"
- defp get_invalid_trips_error_message({:trips_with_invalid_shapes, _}),
- do: "Some trips have invalid or missing shapes."
+ defp get_invalid_trips_error_message({:trips_with_invalid_shapes, _}), do: "Some trips have invalid or missing shapes."
defp get_invalid_trips_error_message({:trips_with_invalid_blocks, _}),
do: "Some trips have invalid or missing block IDs."
diff --git a/lib/arrow_web/components/edit_limit_form.ex b/lib/arrow_web/components/edit_limit_form.ex
index 4a356ef08..e775018bd 100644
--- a/lib/arrow_web/components/edit_limit_form.ex
+++ b/lib/arrow_web/components/edit_limit_form.ex
@@ -2,13 +2,13 @@ defmodule ArrowWeb.EditLimitForm do
@moduledoc false
use ArrowWeb, :live_component
+ import Ecto.Query, only: [from: 2]
+ import Phoenix.HTML.Form
+
alias Arrow.Disruptions.Limit
alias Arrow.Limits
alias Arrow.Limits.LimitDayOfWeek
- import Phoenix.HTML.Form
- import Ecto.Query, only: [from: 2]
-
attr :limit, Limit, required: true
attr :icon_paths, :map, required: true
@@ -237,17 +237,17 @@ defmodule ArrowWeb.EditLimitForm do
defp get_stops_for_route(nil), do: []
defp get_stops_for_route(route_id) do
- Arrow.Repo.all(
- from t in Arrow.Gtfs.Trip,
- where: t.route_id == ^route_id and t.direction_id == 0 and t.service_id == "canonical",
- join: st in Arrow.Gtfs.StopTime,
- on: t.id == st.trip_id,
- join: s in Arrow.Gtfs.Stop,
- on: s.id == st.stop_id,
- where: s.location_type == :stop_platform,
- select: s,
- order_by: st.stop_sequence
+ from(t in Arrow.Gtfs.Trip,
+ where: t.route_id == ^route_id and t.direction_id == 0 and t.service_id == "canonical",
+ join: st in Arrow.Gtfs.StopTime,
+ on: t.id == st.trip_id,
+ join: s in Arrow.Gtfs.Stop,
+ on: s.id == st.stop_id,
+ where: s.location_type == :stop_platform,
+ select: s,
+ order_by: st.stop_sequence
)
+ |> Arrow.Repo.all()
|> Enum.uniq_by(& &1.parent_station_id)
|> Enum.map(&{&1.name, &1.parent_station_id})
end
@@ -257,8 +257,7 @@ defmodule ArrowWeb.EditLimitForm do
# doesn't work for these subforms because the hidden inputs mark the
# subform as used. So instead we check the user controlled values of each of
# these subforms.
- form[:limit_day_of_weeks].value
- |> Enum.any?(fn dow ->
+ Enum.any?(form[:limit_day_of_weeks].value, fn dow ->
dow_form =
case dow do
%Ecto.Changeset{} = dow_changeset ->
@@ -282,8 +281,7 @@ defmodule ArrowWeb.EditLimitForm do
end)
end
- defp get_limit_date_range_warning(end_date)
- when end_date in ["", nil] do
+ defp get_limit_date_range_warning(end_date) when end_date in ["", nil] do
""
end
diff --git a/lib/arrow_web/components/edit_replacement_service_form.ex b/lib/arrow_web/components/edit_replacement_service_form.ex
index f5b815701..fcecafcc1 100644
--- a/lib/arrow_web/components/edit_replacement_service_form.ex
+++ b/lib/arrow_web/components/edit_replacement_service_form.ex
@@ -214,7 +214,7 @@ defmodule ArrowWeb.EditReplacementServiceForm do
end
defp get_shuttle_map_props(shuttle_id) do
- %{layers: Shuttles.get_shuttle!(shuttle_id).routes |> ShapeView.routes_to_layers()}
+ %{layers: ShapeView.routes_to_layers(Shuttles.get_shuttle!(shuttle_id).routes)}
end
defp empty_input_value?("") do
diff --git a/lib/arrow_web/components/stop_input.ex b/lib/arrow_web/components/stop_input.ex
index 9dd96771f..303402bc0 100644
--- a/lib/arrow_web/components/stop_input.ex
+++ b/lib/arrow_web/components/stop_input.ex
@@ -78,9 +78,7 @@ defmodule ArrowWeb.StopInput do
end
@spec option_for_stop(Stop.t() | GtfsStop.t()) :: {String.t(), String.t()}
- defp option_for_stop(%Stop{stop_id: stop_id} = stop),
- do: {"#{stop_id} - #{Shuttles.stop_display_name(stop)}", stop_id}
+ defp option_for_stop(%Stop{stop_id: stop_id} = stop), do: {"#{stop_id} - #{Shuttles.stop_display_name(stop)}", stop_id}
- defp option_for_stop(%GtfsStop{id: id} = gtfs_stop),
- do: {"#{id} - #{Shuttles.stop_display_name(gtfs_stop)}", id}
+ defp option_for_stop(%GtfsStop{id: id} = gtfs_stop), do: {"#{id} - #{Shuttles.stop_display_name(gtfs_stop)}", id}
end
diff --git a/lib/arrow_web/controllers/api/adjustment_controller.ex b/lib/arrow_web/controllers/api/adjustment_controller.ex
index 5e3f33aeb..b534fb5ec 100644
--- a/lib/arrow_web/controllers/api/adjustment_controller.ex
+++ b/lib/arrow_web/controllers/api/adjustment_controller.ex
@@ -1,13 +1,16 @@
defmodule ArrowWeb.API.AdjustmentController do
use ArrowWeb, :controller
- alias Arrow.{Adjustment, Repo}
+
import Ecto.Query
+ alias Arrow.Adjustment
+ alias Arrow.Repo
+
@filters ~w{route_id source}
@spec index(Plug.Conn.t(), map()) :: Plug.Conn.t()
def index(conn, params) do
- query = params |> take_filters |> format_filters |> build_query
+ query = params |> take_filters() |> format_filters() |> build_query()
render(conn, "index.json-api", data: Repo.all(query))
end
@@ -18,11 +21,9 @@ defmodule ArrowWeb.API.AdjustmentController do
end
@spec compose_query({String.t(), String.t()}, Ecto.Query.t()) :: Ecto.Query.t()
- defp compose_query({"route_id", route_id}, query),
- do: from(d in query, where: d.route_id == ^route_id)
+ defp compose_query({"route_id", route_id}, query), do: from(d in query, where: d.route_id == ^route_id)
- defp compose_query({"source", source}, query),
- do: from(d in query, where: d.source == ^source)
+ defp compose_query({"source", source}, query), do: from(d in query, where: d.source == ^source)
@spec take_filters(map()) :: map()
defp take_filters(params) do
@@ -35,8 +36,7 @@ defmodule ArrowWeb.API.AdjustmentController do
end
@spec do_format_filter({String.t(), String.t()}) :: [{String.t(), String.t()}]
- defp do_format_filter({filter, value})
- when filter in @filters do
+ defp do_format_filter({filter, value}) when filter in @filters do
[{filter, value}]
end
diff --git a/lib/arrow_web/controllers/api/db_dump_controller.ex b/lib/arrow_web/controllers/api/db_dump_controller.ex
index eace289f2..f679ee4a0 100644
--- a/lib/arrow_web/controllers/api/db_dump_controller.ex
+++ b/lib/arrow_web/controllers/api/db_dump_controller.ex
@@ -1,7 +1,8 @@
defmodule ArrowWeb.API.DBDumpController do
- alias ArrowWeb.Plug.Authorize
use ArrowWeb, :controller
+ alias ArrowWeb.Plug.Authorize
+
plug(Authorize, :db_dump)
@spec show(Plug.Conn.t(), map()) :: Plug.Conn.t()
diff --git a/lib/arrow_web/controllers/api/disruption_controller.ex b/lib/arrow_web/controllers/api/disruption_controller.ex
index ed6959191..83eb7eed8 100644
--- a/lib/arrow_web/controllers/api/disruption_controller.ex
+++ b/lib/arrow_web/controllers/api/disruption_controller.ex
@@ -1,7 +1,11 @@
defmodule ArrowWeb.API.DisruptionController do
use ArrowWeb, :controller
+
import Ecto.Query
- alias Arrow.{Disruption, DisruptionRevision, Repo}
+
+ alias Arrow.Disruption
+ alias Arrow.DisruptionRevision
+ alias Arrow.Repo
alias ArrowWeb.API.Util
@spec index(Plug.Conn.t(), map()) :: Plug.Conn.t()
@@ -10,15 +14,16 @@ defmodule ArrowWeb.API.DisruptionController do
{:ok, end_date} <- parse_date_param(params, "end_date"),
:ok <- Util.validate_date_order(start_date, end_date) do
data =
- from(d in Disruption,
- join: dr in assoc(d, :revisions),
- order_by: [d.id, dr.id],
- where: dr.id >= d.published_revision_id or is_nil(d.published_revision_id),
- where: dr.is_active,
- where: dr.start_date <= ^end_date and dr.end_date >= ^start_date,
- preload: [revisions: {dr, ^DisruptionRevision.associations()}]
+ Repo.all(
+ from(d in Disruption,
+ join: dr in assoc(d, :revisions),
+ order_by: [d.id, dr.id],
+ where: dr.id >= d.published_revision_id or is_nil(d.published_revision_id),
+ where: dr.is_active,
+ where: dr.start_date <= ^end_date and dr.end_date >= ^start_date,
+ preload: [revisions: {dr, ^DisruptionRevision.associations()}]
+ )
)
- |> Repo.all()
render(conn, "index.json-api", data: data)
else
diff --git a/lib/arrow_web/controllers/api/gtfs_import_controller.ex b/lib/arrow_web/controllers/api/gtfs_import_controller.ex
index cd0d9fcdf..605f0e34e 100644
--- a/lib/arrow_web/controllers/api/gtfs_import_controller.ex
+++ b/lib/arrow_web/controllers/api/gtfs_import_controller.ex
@@ -1,11 +1,15 @@
defmodule ArrowWeb.API.GtfsImportController do
use ArrowWeb, :controller
-
use Plug.ErrorHandler
- require Logger
import Ecto.Query
+ alias Arrow.Gtfs.Archive
+ alias Arrow.Gtfs.ImportWorker
+ alias Arrow.Gtfs.ValidationWorker
+
+ require Logger
+
@type error_tuple :: {:error, term} | {:error, status :: atom, term}
@doc """
@@ -14,7 +18,7 @@ defmodule ArrowWeb.API.GtfsImportController do
When unsuccessful, responds with non-200 status and an error message in plaintext.
"""
def enqueue_import(conn, _) do
- enqueue_job(conn, Arrow.Gtfs.ImportWorker)
+ enqueue_job(conn, ImportWorker)
end
@doc """
@@ -31,7 +35,7 @@ defmodule ArrowWeb.API.GtfsImportController do
"""
def import_status(conn, params) do
case Map.fetch(params, "id") do
- {:ok, id} -> check_status(conn, id, Arrow.Gtfs.ImportWorker, "import")
+ {:ok, id} -> check_status(conn, id, ImportWorker, "import")
:error -> send_resp(conn, :bad_request, "missing `id` query parameter")
end
end
@@ -42,7 +46,7 @@ defmodule ArrowWeb.API.GtfsImportController do
When unsuccessful, responds with non-200 status and an error message in plaintext.
"""
def enqueue_validation(conn, _) do
- enqueue_job(conn, Arrow.Gtfs.ValidationWorker)
+ enqueue_job(conn, ValidationWorker)
end
@doc """
@@ -59,7 +63,7 @@ defmodule ArrowWeb.API.GtfsImportController do
"""
def validation_status(conn, params) do
case Map.fetch(params, "id") do
- {:ok, id} -> check_status(conn, id, Arrow.Gtfs.ValidationWorker, "validation")
+ {:ok, id} -> check_status(conn, id, ValidationWorker, "validation")
:error -> send_resp(conn, :bad_request, "missing `id` query parameter")
end
end
@@ -71,14 +75,13 @@ defmodule ArrowWeb.API.GtfsImportController do
See Arrow.Gtfs.JobHelper.status_filter for available filters.
"""
- def check_jobs(conn, %{"status_filter" => status_filter})
- when status_filter in @status_filters do
+ def check_jobs(conn, %{"status_filter" => status_filter}) when status_filter in @status_filters do
status_filter = String.to_existing_atom(status_filter)
info = %{
queue_state: Oban.check_queue(queue: :gtfs_import),
- import_jobs: Arrow.Gtfs.ImportWorker.check_jobs(status_filter),
- validate_jobs: Arrow.Gtfs.ValidationWorker.check_jobs(status_filter)
+ import_jobs: ImportWorker.check_jobs(status_filter),
+ validate_jobs: ValidationWorker.check_jobs(status_filter)
}
json(conn, info)
@@ -134,22 +137,24 @@ defmodule ArrowWeb.API.GtfsImportController do
{:ok, s3_uri} <- upload_zip(zip_iodata) do
changeset = worker_mod.new(%{s3_uri: s3_uri, archive_version: version})
- case Oban.insert(changeset) do
- {:ok, %Oban.Job{conflict?: true} = job} ->
- {:error,
- "tried to insert a duplicate GTFS import or validation job existing_job_id=#{job.id} archive_version=\"#{version}\" worker=#{inspect(worker_mod)}"}
+ case_result =
+ case Oban.insert(changeset) do
+ {:ok, %Oban.Job{conflict?: true} = job} ->
+ {:error,
+ "tried to insert a duplicate GTFS import or validation job existing_job_id=#{job.id} archive_version=\"#{version}\" worker=#{inspect(worker_mod)}"}
- {:ok, job} ->
- Logger.info(
- "job enqueued for GTFS archive job_id=#{job.id} archive_version=\"#{version}\" worker=#{inspect(worker_mod)}"
- )
+ {:ok, job} ->
+ Logger.info(
+ "job enqueued for GTFS archive job_id=#{job.id} archive_version=\"#{version}\" worker=#{inspect(worker_mod)}"
+ )
- {:ok, %{id: job.id}}
+ {:ok, %{id: job.id}}
- {:error, reason} ->
- {:error, :internal_server_error, "failed to enqueue job, reason: #{reason}"}
- end
- |> to_resp(conn)
+ {:error, reason} ->
+ {:error, :internal_server_error, "failed to enqueue job, reason: #{reason}"}
+ end
+
+ to_resp(case_result, conn)
else
# Returned when `read_whole_body` fails
{:error, reason, %Plug.Conn{} = conn} -> to_resp({:error, reason}, conn)
@@ -161,18 +166,20 @@ defmodule ArrowWeb.API.GtfsImportController do
defp check_status(conn, id, worker_mod, job_description) do
worker_name = inspect(worker_mod)
- with {:ok, id} <- parse_job_id(id) do
- job_status =
- Arrow.Repo.one(
- from job in Oban.Job,
- where: job.id == ^id,
- where: job.worker == ^worker_name,
- select: job.state
- )
+ with_result =
+ with {:ok, id} <- parse_job_id(id) do
+ job_status =
+ Arrow.Repo.one(
+ from job in Oban.Job,
+ where: job.id == ^id,
+ where: job.worker == ^worker_name,
+ select: job.state
+ )
+
+ report_job_status(job_status, "could not find #{job_description} job with id #{id}")
+ end
- report_job_status(job_status, "could not find #{job_description} job with id #{id}")
- end
- |> to_resp(conn)
+ to_resp(with_result, conn)
end
@spec report_job_status(String.t() | nil, String.t()) :: {:ok, term} | error_tuple
@@ -221,7 +228,7 @@ defmodule ArrowWeb.API.GtfsImportController do
@spec get_unzip(iodata) :: {:ok, Unzip.t()} | error_tuple
defp get_unzip(zip_iodata) do
zip_iodata
- |> Arrow.Gtfs.Archive.from_iodata()
+ |> Archive.from_iodata()
|> Unzip.new()
|> case do
{:ok, _} = success -> success
@@ -246,13 +253,12 @@ defmodule ArrowWeb.API.GtfsImportController do
@spec upload_zip(iodata) :: {:ok, String.t()} | error_tuple
defp upload_zip(zip_iodata) do
- case Arrow.Gtfs.Archive.upload_to_s3(zip_iodata) do
+ case Archive.upload_to_s3(zip_iodata) do
{:ok, _s3_uri} = success ->
success
{:error, reason} ->
- {:error, :internal_server_error,
- "failed to upload archive to S3, reason: #{inspect(reason)}"}
+ {:error, :internal_server_error, "failed to upload archive to S3, reason: #{inspect(reason)}"}
end
end
diff --git a/lib/arrow_web/controllers/api/notice_controller.ex b/lib/arrow_web/controllers/api/notice_controller.ex
index 1103069fb..382d13123 100644
--- a/lib/arrow_web/controllers/api/notice_controller.ex
+++ b/lib/arrow_web/controllers/api/notice_controller.ex
@@ -1,6 +1,8 @@
defmodule ArrowWeb.API.NoticeController do
- alias ArrowWeb.Plug.Authorize
use ArrowWeb, :controller
+
+ alias ArrowWeb.Plug.Authorize
+
require Logger
plug(Authorize, :publish_notice)
diff --git a/lib/arrow_web/controllers/api/service_schedule_controller.ex b/lib/arrow_web/controllers/api/service_schedule_controller.ex
index 5c5de79b1..3d41db550 100644
--- a/lib/arrow_web/controllers/api/service_schedule_controller.ex
+++ b/lib/arrow_web/controllers/api/service_schedule_controller.ex
@@ -51,8 +51,7 @@ defmodule ArrowWeb.API.ServiceScheduleController do
}
end
- conn
- |> json(response_body)
+ json(conn, response_body)
end
end
diff --git a/lib/arrow_web/controllers/api/shapes_controller.ex b/lib/arrow_web/controllers/api/shapes_controller.ex
index 194489fc7..2ec5a5cc6 100644
--- a/lib/arrow_web/controllers/api/shapes_controller.ex
+++ b/lib/arrow_web/controllers/api/shapes_controller.ex
@@ -1,5 +1,6 @@
defmodule ArrowWeb.API.ShapesController do
use ArrowWeb, :controller
+
alias Arrow.Shuttles
@spec index(Plug.Conn.t(), map()) :: Plug.Conn.t()
diff --git a/lib/arrow_web/controllers/api/shuttle_controller.ex b/lib/arrow_web/controllers/api/shuttle_controller.ex
index 88359dd3e..00ac7e7b5 100644
--- a/lib/arrow_web/controllers/api/shuttle_controller.ex
+++ b/lib/arrow_web/controllers/api/shuttle_controller.ex
@@ -1,5 +1,6 @@
defmodule ArrowWeb.API.ShuttleController do
use ArrowWeb, :controller
+
import Ecto.Query, only: [from: 2]
alias Arrow.Repo
@@ -9,16 +10,17 @@ defmodule ArrowWeb.API.ShuttleController do
@spec index(Conn.t(), map()) :: Conn.t()
def index(conn, _params) do
data =
- from(s in Shuttle,
- where: s.status == :active,
- join: r in assoc(s, :routes),
- join: rs in assoc(r, :route_stops),
- join: sh in assoc(r, :shape),
- left_join: gs in assoc(rs, :gtfs_stop),
- left_join: st in assoc(rs, :stop),
- preload: [routes: {r, route_stops: {rs, [:gtfs_stop, :stop]}, shape: sh}]
+ Repo.all(
+ from(s in Shuttle,
+ where: s.status == :active,
+ join: r in assoc(s, :routes),
+ join: rs in assoc(r, :route_stops),
+ join: sh in assoc(r, :shape),
+ left_join: gs in assoc(rs, :gtfs_stop),
+ left_join: st in assoc(rs, :stop),
+ preload: [routes: {r, route_stops: {rs, [:gtfs_stop, :stop]}, shape: sh}]
+ )
)
- |> Repo.all()
render(conn, "index.json-api", data: data)
end
diff --git a/lib/arrow_web/controllers/api/stops_controller.ex b/lib/arrow_web/controllers/api/stops_controller.ex
index 1f57de7cf..296c2e3fb 100644
--- a/lib/arrow_web/controllers/api/stops_controller.ex
+++ b/lib/arrow_web/controllers/api/stops_controller.ex
@@ -1,5 +1,6 @@
defmodule ArrowWeb.API.StopsController do
use ArrowWeb, :controller
+
alias Arrow.Stops
@spec index(Plug.Conn.t(), map()) :: Plug.Conn.t()
diff --git a/lib/arrow_web/controllers/api_json/disruption_view.ex b/lib/arrow_web/controllers/api_json/disruption_view.ex
index eb742df94..38b973e8c 100644
--- a/lib/arrow_web/controllers/api_json/disruption_view.ex
+++ b/lib/arrow_web/controllers/api_json/disruption_view.ex
@@ -2,14 +2,16 @@ defmodule ArrowWeb.API.DisruptionView do
use ArrowWeb, :html
use JaSerializer.PhoenixView
+ alias ArrowWeb.API.DisruptionRevisionView
+
attributes([:last_published_at])
has_many :revisions,
- serializer: ArrowWeb.API.DisruptionRevisionView,
+ serializer: DisruptionRevisionView,
include: true
has_one :published_revision,
- serializer: ArrowWeb.API.DisruptionRevisionView,
+ serializer: DisruptionRevisionView,
include: false
def published_revision(disruption, _conn) do
diff --git a/lib/arrow_web/controllers/api_json/stops_view.ex b/lib/arrow_web/controllers/api_json/stops_view.ex
index fa7ec518c..4415fec77 100644
--- a/lib/arrow_web/controllers/api_json/stops_view.ex
+++ b/lib/arrow_web/controllers/api_json/stops_view.ex
@@ -25,7 +25,7 @@ defmodule ArrowWeb.API.StopsView do
stop
|> Map.from_struct()
|> Enum.reject(fn {_, v} -> is_nil(v) end)
- |> Enum.into(%{})
+ |> Map.new()
|> Map.take(@fields)
end
diff --git a/lib/arrow_web/controllers/auth_controller.ex b/lib/arrow_web/controllers/auth_controller.ex
index 3b2082b51..d1e46884b 100644
--- a/lib/arrow_web/controllers/auth_controller.ex
+++ b/lib/arrow_web/controllers/auth_controller.ex
@@ -1,5 +1,6 @@
defmodule ArrowWeb.AuthController do
use ArrowWeb, :controller
+
require Logger
plug(Ueberauth)
@@ -59,10 +60,7 @@ defmodule ArrowWeb.AuthController do
|> redirect(to: redirect_path)
end
- def callback(
- %{assigns: %{ueberauth_failure: %Ueberauth.Failure{errors: errors}}} = conn,
- _params
- ) do
+ def callback(%{assigns: %{ueberauth_failure: %Ueberauth.Failure{errors: errors}}} = conn, _params) do
Logger.warning("failed to authenticate errors=#{inspect(errors)}")
send_resp(conn, 401, "unauthenticated")
diff --git a/lib/arrow_web/controllers/disruption_controller.ex b/lib/arrow_web/controllers/disruption_controller.ex
index f8b53eca0..3fe54a1df 100644
--- a/lib/arrow_web/controllers/disruption_controller.ex
+++ b/lib/arrow_web/controllers/disruption_controller.ex
@@ -1,8 +1,11 @@
defmodule ArrowWeb.DisruptionController do
use ArrowWeb, :controller
- alias __MODULE__.{Filters, Index}
- alias Arrow.{Adjustment, Disruption, DisruptionRevision}
+ alias __MODULE__.Filters
+ alias __MODULE__.Index
+ alias Arrow.Adjustment
+ alias Arrow.Disruption
+ alias Arrow.DisruptionRevision
alias ArrowWeb.ErrorHelpers
alias ArrowWeb.Plug.Authorize
alias Ecto.Changeset
@@ -14,10 +17,7 @@ defmodule ArrowWeb.DisruptionController do
plug(Authorize, :delete_disruption when action in [:delete])
@spec update_row_status(Conn.t(), Conn.params()) :: Conn.t()
- def update_row_status(%{assigns: %{current_user: user}} = conn, %{
- "id" => id,
- "revision" => attrs
- }) do
+ def update_row_status(%{assigns: %{current_user: user}} = conn, %{"id" => id, "revision" => attrs}) do
{:ok, _} = Disruption.update(id, user.id, attrs)
conn
@@ -40,7 +40,7 @@ defmodule ArrowWeb.DisruptionController do
@spec new(Conn.t(), Conn.params()) :: Conn.t()
def new(conn, _params) do
- changeset = DisruptionRevision.new() |> Changeset.change()
+ changeset = Changeset.change(DisruptionRevision.new())
render(conn, "new.html", adjustments: Adjustment.all(), changeset: changeset, note_body: "")
end
diff --git a/lib/arrow_web/controllers/disruption_controller/filters.ex b/lib/arrow_web/controllers/disruption_controller/filters.ex
index e16680ee4..ca459143b 100644
--- a/lib/arrow_web/controllers/disruption_controller/filters.ex
+++ b/lib/arrow_web/controllers/disruption_controller/filters.ex
@@ -5,20 +5,14 @@ defmodule ArrowWeb.DisruptionController.Filters do
index (including e.g. sorting, in the table view) are considered filters.
"""
- alias __MODULE__.{Calendar, Table}
+ @behaviour ArrowWeb.DisruptionController.Filters.Behaviour
+
import __MODULE__.Helpers
- @empty_set MapSet.new()
+ alias __MODULE__.Calendar
+ alias __MODULE__.Table
- defmodule Behaviour do
- @moduledoc "Required behaviour for `Filters` sub-modules."
- @callback from_params(Plug.Conn.params()) :: struct
- @callback resettable?(struct) :: boolean
- @callback reset(struct) :: struct
- @callback to_params(struct) :: Plug.Conn.params()
- end
-
- @behaviour Behaviour
+ @empty_set MapSet.new()
@type t :: %__MODULE__{
kinds: MapSet.t(atom()),
@@ -43,8 +37,7 @@ defmodule ArrowWeb.DisruptionController.Filters do
view_mod = if(params["view"] == "calendar", do: Calendar, else: Table)
%__MODULE__{
- kinds:
- params |> Map.get("kinds", []) |> Enum.map(&String.to_existing_atom/1) |> MapSet.new(),
+ kinds: params |> Map.get("kinds", []) |> MapSet.new(&String.to_existing_atom/1),
only_approved?: not is_nil(params["only_approved"]),
search: if(params["search"] in [nil, ""], do: nil, else: params["search"]),
view: view_mod.from_params(params)
@@ -69,7 +62,7 @@ defmodule ArrowWeb.DisruptionController.Filters do
@spec toggle_only_approved(t()) :: t()
def toggle_only_approved(%__MODULE__{only_approved?: only_approved} = filters) do
- %__MODULE__{filters | only_approved?: !only_approved}
+ %{filters | only_approved?: !only_approved}
end
@spec toggle_view(%__MODULE__{}) :: %__MODULE__{}
diff --git a/lib/arrow_web/controllers/disruption_controller/filters/behaviour.ex b/lib/arrow_web/controllers/disruption_controller/filters/behaviour.ex
new file mode 100644
index 000000000..61134819b
--- /dev/null
+++ b/lib/arrow_web/controllers/disruption_controller/filters/behaviour.ex
@@ -0,0 +1,7 @@
+defmodule ArrowWeb.DisruptionController.Filters.Behaviour do
+ @moduledoc "Required behaviour for `Filters` sub-modules."
+ @callback from_params(Plug.Conn.params()) :: struct
+ @callback resettable?(struct) :: boolean
+ @callback reset(struct) :: struct
+ @callback to_params(struct) :: Plug.Conn.params()
+end
diff --git a/lib/arrow_web/controllers/disruption_controller/index.ex b/lib/arrow_web/controllers/disruption_controller/index.ex
index 3ed749937..8c145ee5f 100644
--- a/lib/arrow_web/controllers/disruption_controller/index.ex
+++ b/lib/arrow_web/controllers/disruption_controller/index.ex
@@ -3,15 +3,18 @@ defmodule ArrowWeb.DisruptionController.Index do
Builds and executes the database queries for the disruptions index.
"""
- alias Arrow.{Adjustment, Disruption, Repo}
- alias ArrowWeb.DisruptionController.Filters
import Ecto.Query
+ alias Arrow.Adjustment
+ alias Arrow.Disruption
+ alias Arrow.Repo
+ alias ArrowWeb.DisruptionController.Filters
+
@spec all(Filters.t() | nil) :: [Disruption.t()]
def all(filters \\ nil), do: base_query() |> apply_filters(filters) |> Repo.all()
defp apply_filter({:include_past?, false}, query) do
- cutoff = Date.utc_today() |> Date.add(-7)
+ cutoff = Date.add(Date.utc_today(), -7)
from [revisions: r] in query, where: is_nil(r.end_date) or r.end_date > ^cutoff
end
diff --git a/lib/arrow_web/controllers/disruption_html.ex b/lib/arrow_web/controllers/disruption_html.ex
index 870f9e90e..f6191f152 100644
--- a/lib/arrow_web/controllers/disruption_html.ex
+++ b/lib/arrow_web/controllers/disruption_html.ex
@@ -1,9 +1,12 @@
defmodule ArrowWeb.DisruptionView do
use ArrowWeb, :html
- alias Arrow.{Adjustment, DisruptionRevision, Permissions}
- alias __MODULE__.{DaysOfWeek, Form}
alias __MODULE__.Calendar, as: DCalendar
+ alias __MODULE__.DaysOfWeek
+ alias __MODULE__.Form
+ alias Arrow.Adjustment
+ alias Arrow.DisruptionRevision
+ alias Arrow.Permissions
alias ArrowWeb.DisruptionController.Filters
alias Phoenix.Controller
@@ -119,6 +122,6 @@ defmodule ArrowWeb.DisruptionView do
end
def date(dt) do
- DateTime.shift_zone!(dt, "America/New_York") |> Calendar.strftime("%m/%d/%y")
+ dt |> DateTime.shift_zone!("America/New_York") |> Calendar.strftime("%m/%d/%y")
end
end
diff --git a/lib/arrow_web/controllers/disruption_html/calendar.ex b/lib/arrow_web/controllers/disruption_html/calendar.ex
index 16f4f75b1..4a433f18f 100644
--- a/lib/arrow_web/controllers/disruption_html/calendar.ex
+++ b/lib/arrow_web/controllers/disruption_html/calendar.ex
@@ -1,8 +1,10 @@
defmodule ArrowWeb.DisruptionView.Calendar do
@moduledoc "An interface between Ecto structs and the `DisruptionCalendar` React component."
- alias Arrow.{Adjustment, Disruption, DisruptionRevision}
+ alias Arrow.Adjustment
+ alias Arrow.Disruption
alias Arrow.Disruption.DayOfWeek
+ alias Arrow.DisruptionRevision
alias ArrowWeb.Endpoint
alias ArrowWeb.Router.Helpers, as: Routes
@@ -17,10 +19,7 @@ defmodule ArrowWeb.DisruptionView.Calendar do
Enum.flat_map(disruptions, &events/1)
end
- defp events(%Disruption{
- id: id,
- revisions: [%{adjustments: [], adjustment_kind: kind} = revision]
- }) do
+ defp events(%Disruption{id: id, revisions: [%{adjustments: [], adjustment_kind: kind} = revision]}) do
events(id, revision, kind, "(disruption #{id})")
end
@@ -45,7 +44,8 @@ defmodule ArrowWeb.DisruptionView.Calendar do
day_numbers = MapSet.new(days_of_week, &DayOfWeek.day_number/1)
excluded_dates = MapSet.new(exceptions, & &1.excluded_date)
- Date.range(start_date, end_date)
+ start_date
+ |> Date.range(end_date)
|> Enum.filter(&(Date.day_of_week(&1) in day_numbers))
|> Enum.reject(&(&1 in excluded_dates))
|> Enum.chunk_while([], &chunk_dates/2, &chunk_dates/1)
diff --git a/lib/arrow_web/controllers/disruption_html/days_of_week.ex b/lib/arrow_web/controllers/disruption_html/days_of_week.ex
index e0f149ef5..912edddd2 100644
--- a/lib/arrow_web/controllers/disruption_html/days_of_week.ex
+++ b/lib/arrow_web/controllers/disruption_html/days_of_week.ex
@@ -65,19 +65,18 @@ defmodule ArrowWeb.DisruptionView.DaysOfWeek do
{format_day(day_name, format), describe_times(start_time, end_time)}
end
- defp describe_days_with_contiguous_times(
- %{day_name: first_day, start_time: start_time},
- %{day_name: last_day, end_time: end_time}
- ) do
+ defp describe_days_with_contiguous_times(%{day_name: first_day, start_time: start_time}, %{
+ day_name: last_day,
+ end_time: end_time
+ }) do
from = format_day(first_day, :short) <> " " <> describe_start_time(start_time)
to = format_day(last_day, :short) <> " " <> describe_end_time(end_time)
[from <> " – " <> to]
end
- defp describe_days_with_same_times(
- %{day_name: first_day, start_time: start_time, end_time: end_time},
- %{day_name: last_day}
- ) do
+ defp describe_days_with_same_times(%{day_name: first_day, start_time: start_time, end_time: end_time}, %{
+ day_name: last_day
+ }) do
[
format_day(first_day, :short) <> " – " <> format_day(last_day, :short),
describe_times(start_time, end_time)
diff --git a/lib/arrow_web/controllers/disruption_html/form.ex b/lib/arrow_web/controllers/disruption_html/form.ex
index 7cae4781f..a29fbace7 100644
--- a/lib/arrow_web/controllers/disruption_html/form.ex
+++ b/lib/arrow_web/controllers/disruption_html/form.ex
@@ -1,8 +1,9 @@
defmodule ArrowWeb.DisruptionView.Form do
@moduledoc "An interface between Ecto structs and the `DisruptionForm` React component."
- alias Arrow.{Adjustment, DisruptionRevision}
+ alias Arrow.Adjustment
alias Arrow.Disruption.DayOfWeek
+ alias Arrow.DisruptionRevision
alias ArrowWeb.DisruptionView
alias ArrowWeb.Router.Helpers, as: Routes
alias Ecto.Changeset
@@ -35,9 +36,9 @@ defmodule ArrowWeb.DisruptionView.Form do
"rowApproved" => row_approved,
"adjustmentKind" => adjustment_kind,
"adjustments" => Enum.map(adjustments, &encode_adjustment/1),
- "daysOfWeek" => days_of_week |> Enum.map(&encode_day_of_week/1) |> Enum.into(%{}),
+ "daysOfWeek" => Map.new(days_of_week, &encode_day_of_week/1),
"exceptions" => Enum.map(exceptions, & &1.excluded_date),
- "tripShortNames" => trip_short_names |> Enum.map_join(",", & &1.trip_short_name),
+ "tripShortNames" => Enum.map_join(trip_short_names, ",", & &1.trip_short_name),
"title" => title
},
"iconPaths" => icon_paths(conn),
@@ -53,18 +54,13 @@ defmodule ArrowWeb.DisruptionView.Form do
}
end
- defp encode_day_of_week(%DayOfWeek{
- day_name: day_name,
- start_time: start_time,
- end_time: end_time
- }) do
+ defp encode_day_of_week(%DayOfWeek{day_name: day_name, start_time: start_time, end_time: end_time}) do
{day_name, %{"start" => start_time, "end" => end_time}}
end
defp icon_paths(conn) do
Adjustment.kinds()
- |> Enum.map(&{&1, DisruptionView.adjustment_kind_icon_path(conn, &1)})
- |> Enum.into(%{})
+ |> Map.new(&{&1, DisruptionView.adjustment_kind_icon_path(conn, &1)})
|> Map.put(:subway, Routes.static_path(conn, "/images/icon-mode-subway-small.svg"))
end
end
diff --git a/lib/arrow_web/controllers/disruption_v2_controller.ex b/lib/arrow_web/controllers/disruption_v2_controller.ex
index 431ceeebc..f47385ff1 100644
--- a/lib/arrow_web/controllers/disruption_v2_controller.ex
+++ b/lib/arrow_web/controllers/disruption_v2_controller.ex
@@ -1,7 +1,8 @@
defmodule ArrowWeb.DisruptionV2Controller do
use ArrowWeb, :controller
- alias ArrowWeb.DisruptionV2Controller.{Filters, Index}
+ alias ArrowWeb.DisruptionV2Controller.Filters
+ alias ArrowWeb.DisruptionV2Controller.Index
alias ArrowWeb.Plug.Authorize
alias Plug.Conn
diff --git a/lib/arrow_web/controllers/disruption_v2_controller/filters.ex b/lib/arrow_web/controllers/disruption_v2_controller/filters.ex
index d9c64b96f..ef9eb0f32 100644
--- a/lib/arrow_web/controllers/disruption_v2_controller/filters.ex
+++ b/lib/arrow_web/controllers/disruption_v2_controller/filters.ex
@@ -5,20 +5,14 @@ defmodule ArrowWeb.DisruptionV2Controller.Filters do
index (including e.g. sorting, in the table view) are considered filters.
"""
- alias __MODULE__.{Calendar, Table}
+ @behaviour ArrowWeb.DisruptionV2Controller.Filters.Behaviour
+
import __MODULE__.Helpers
- @empty_set MapSet.new()
+ alias __MODULE__.Calendar
+ alias __MODULE__.Table
- defmodule Behaviour do
- @moduledoc "Required behaviour for `Filters` sub-modules."
- @callback from_params(Plug.Conn.params()) :: struct
- @callback resettable?(struct) :: boolean
- @callback reset(struct) :: struct
- @callback to_params(struct) :: Plug.Conn.params()
- end
-
- @behaviour Behaviour
+ @empty_set MapSet.new()
@type t :: %__MODULE__{
kinds: MapSet.t(atom()),
@@ -47,8 +41,7 @@ defmodule ArrowWeb.DisruptionV2Controller.Filters do
else: params["search"]
%__MODULE__{
- kinds:
- params |> Map.get("kinds", []) |> Enum.map(&String.to_existing_atom/1) |> MapSet.new(),
+ kinds: params |> Map.get("kinds", []) |> MapSet.new(&String.to_existing_atom/1),
only_approved?: not is_nil(params["only_approved"]),
search: search,
view: view_mod.from_params(params)
@@ -73,7 +66,7 @@ defmodule ArrowWeb.DisruptionV2Controller.Filters do
@spec toggle_only_approved(t()) :: t()
def toggle_only_approved(%__MODULE__{only_approved?: only_approved} = filters) do
- %__MODULE__{filters | only_approved?: !only_approved}
+ %{filters | only_approved?: !only_approved}
end
@spec toggle_view(%__MODULE__{}) :: %__MODULE__{}
diff --git a/lib/arrow_web/controllers/disruption_v2_controller/filters/behaviour.ex b/lib/arrow_web/controllers/disruption_v2_controller/filters/behaviour.ex
new file mode 100644
index 000000000..27af1a6d0
--- /dev/null
+++ b/lib/arrow_web/controllers/disruption_v2_controller/filters/behaviour.ex
@@ -0,0 +1,7 @@
+defmodule ArrowWeb.DisruptionV2Controller.Filters.Behaviour do
+ @moduledoc "Required behaviour for `Filters` sub-modules."
+ @callback from_params(Plug.Conn.params()) :: struct
+ @callback resettable?(struct) :: boolean
+ @callback reset(struct) :: struct
+ @callback to_params(struct) :: Plug.Conn.params()
+end
diff --git a/lib/arrow_web/controllers/disruption_v2_controller/index.ex b/lib/arrow_web/controllers/disruption_v2_controller/index.ex
index fc1902d0c..e60892aeb 100644
--- a/lib/arrow_web/controllers/disruption_v2_controller/index.ex
+++ b/lib/arrow_web/controllers/disruption_v2_controller/index.ex
@@ -24,8 +24,7 @@ defmodule ArrowWeb.DisruptionV2Controller.Index do
@empty_set MapSet.new()
@spec all(Filters.t() | nil) :: [DisruptionV2.t()]
- def all(filters),
- do: apply_to_disruptions(Disruptions.list_disruptionsv2(), filters)
+ def all(filters), do: apply_to_disruptions(Disruptions.list_disruptionsv2(), filters)
@spec apply_to_disruptions([DisruptionV2.t()], Filters.t()) :: [DisruptionV2.t()]
def apply_to_disruptions(disruptions, filters) do
@@ -67,8 +66,7 @@ defmodule ArrowWeb.DisruptionV2Controller.Index do
)
end
- defp apply_kinds_filter(_disruption, %Filters{kinds: kinds}) when kinds == @empty_set,
- do: true
+ defp apply_kinds_filter(_disruption, %Filters{kinds: kinds}) when kinds == @empty_set, do: true
defp apply_kinds_filter(disruption, %Filters{kinds: kinds}) do
kind_routes = kinds |> Enum.map(&@disruption_kind_routes[&1]) |> List.flatten()
@@ -76,14 +74,12 @@ defmodule ArrowWeb.DisruptionV2Controller.Index do
Enum.any?(disruption.limits, fn limit -> limit.route.id in kind_routes end)
end
- defp apply_only_approved_filter(disruption, %Filters{only_approved?: true}),
- do: disruption.is_active
+ defp apply_only_approved_filter(disruption, %Filters{only_approved?: true}), do: disruption.is_active
- defp apply_only_approved_filter(_disruption, %Filters{only_approved?: false}),
- do: true
+ defp apply_only_approved_filter(_disruption, %Filters{only_approved?: false}), do: true
defp apply_past_filter(disruption, %Filters{view: %Table{include_past?: false}}) do
- cutoff = Date.utc_today() |> Date.add(-7)
+ cutoff = Date.add(Date.utc_today(), -7)
{_start_date, end_date} = Disruptions.start_end_dates(disruption)
@@ -111,10 +107,7 @@ defmodule ArrowWeb.DisruptionV2Controller.Index do
end)
end
- defp replacement_services_contains?(
- %DisruptionV2{replacement_services: replacement_services},
- search
- ) do
+ defp replacement_services_contains?(%DisruptionV2{replacement_services: replacement_services}, search) do
Enum.any?(replacement_services, fn replacement_service ->
string_contains?(replacement_service.shuttle.shuttle_name, search)
end)
diff --git a/lib/arrow_web/controllers/disruption_v2_html.ex b/lib/arrow_web/controllers/disruption_v2_html.ex
index 8af8484f8..32c957ee9 100644
--- a/lib/arrow_web/controllers/disruption_v2_html.ex
+++ b/lib/arrow_web/controllers/disruption_v2_html.ex
@@ -116,7 +116,7 @@ defmodule ArrowWeb.DisruptionV2View do
defp limits(disruption) do
limits = Enum.map(disruption.limits, &Map.put(&1, :derived?, false))
- derived_limits =
+ for_result =
for export <- disruption.hastus_exports,
%{import?: true} = service <- export.services,
derived_limit <- service.derived_limits do
@@ -127,12 +127,13 @@ defmodule ArrowWeb.DisruptionV2View do
end_stop: derived_limit.end_stop
}
end
- # Because incomplete details are shown for derived limits in this view,
- # ones that are actually different (e.g. are derived from different services with different service dates)
- # can appear as duplicates.
- # Deduplicate them on the info shown, to avoid confusion.
- |> Enum.uniq_by(&{&1.line_id, &1.start_stop.name, &1.end_stop.name})
+ derived_limits = Enum.uniq_by(for_result, &{&1.line_id, &1.start_stop.name, &1.end_stop.name})
+
+ # Because incomplete details are shown for derived limits in this view,
+ # ones that are actually different (e.g. are derived from different services with different service dates)
+ # can appear as duplicates.
+ # Deduplicate them on the info shown, to avoid confusion.
limits ++ derived_limits
end
diff --git a/lib/arrow_web/controllers/disruption_v2_html/calendar.ex b/lib/arrow_web/controllers/disruption_v2_html/calendar.ex
index 97afbe576..98af83875 100644
--- a/lib/arrow_web/controllers/disruption_v2_html/calendar.ex
+++ b/lib/arrow_web/controllers/disruption_v2_html/calendar.ex
@@ -1,7 +1,9 @@
defmodule ArrowWeb.DisruptionV2View.Calendar do
@moduledoc "An interface between Ecto structs and the `DisruptionCalendar` React component."
- alias Arrow.Disruptions.{DisruptionV2, Limit, ReplacementService}
+ alias Arrow.Disruptions.DisruptionV2
+ alias Arrow.Disruptions.Limit
+ alias Arrow.Disruptions.ReplacementService
alias Arrow.Limits.LimitDayOfWeek
alias Arrow.Shuttles.Shuttle
alias ArrowWeb.Endpoint
@@ -33,12 +35,7 @@ defmodule ArrowWeb.DisruptionV2View.Calendar do
defp events(
disruption_id,
- %Limit{
- start_date: start_date,
- end_date: end_date,
- limit_day_of_weeks: day_of_weeks,
- route_id: route_id
- },
+ %Limit{start_date: start_date, end_date: end_date, limit_day_of_weeks: day_of_weeks, route_id: route_id},
event_title,
is_active
) do
@@ -47,7 +44,8 @@ defmodule ArrowWeb.DisruptionV2View.Calendar do
|> Enum.filter(& &1.active?)
|> MapSet.new(&LimitDayOfWeek.day_number/1)
- Date.range(start_date, end_date)
+ start_date
+ |> Date.range(end_date)
|> Enum.filter(&(Date.day_of_week(&1) in day_numbers))
|> Enum.chunk_while([], &chunk_dates/2, &chunk_dates/1)
|> Enum.map(&{List.last(&1), List.first(&1)})
@@ -72,15 +70,12 @@ defmodule ArrowWeb.DisruptionV2View.Calendar do
defp events(
disruption_id,
- %ReplacementService{
- start_date: start_date,
- end_date: end_date,
- shuttle: %Shuttle{disrupted_route_id: route_id}
- },
+ %ReplacementService{start_date: start_date, end_date: end_date, shuttle: %Shuttle{disrupted_route_id: route_id}},
event_title,
is_active
) do
- Date.range(start_date, end_date)
+ start_date
+ |> Date.range(end_date)
|> Enum.chunk_while([], &chunk_dates/2, &chunk_dates/1)
|> Enum.map(&{List.last(&1), List.first(&1)})
|> Enum.map(fn
@@ -116,8 +111,7 @@ defmodule ArrowWeb.DisruptionV2View.Calendar do
defp route_class(nil), do: "none"
- defp route_class(route_id),
- do: route_id |> DisruptionV2.route() |> to_string() |> String.replace("_", "-")
+ defp route_class(route_id), do: route_id |> DisruptionV2.route() |> to_string() |> String.replace("_", "-")
defp status_class(true), do: "approved"
defp status_class(false), do: "pending"
diff --git a/lib/arrow_web/controllers/error_html.ex b/lib/arrow_web/controllers/error_html.ex
index 58a880147..4123e5707 100644
--- a/lib/arrow_web/controllers/error_html.ex
+++ b/lib/arrow_web/controllers/error_html.ex
@@ -1,5 +1,6 @@
defmodule ArrowWeb.ErrorView do
use ArrowWeb, :html
+
alias JaSerializer.ErrorSerializer
# If you want to customize your error pages,
diff --git a/lib/arrow_web/controllers/my_token_controller.ex b/lib/arrow_web/controllers/my_token_controller.ex
index 68e3b621a..d9156d92f 100644
--- a/lib/arrow_web/controllers/my_token_controller.ex
+++ b/lib/arrow_web/controllers/my_token_controller.ex
@@ -1,5 +1,6 @@
defmodule ArrowWeb.MyTokenController do
use ArrowWeb, :controller
+
alias Arrow.AuthToken
@spec show(Plug.Conn.t(), Plug.Conn.params()) :: Plug.Conn.t()
diff --git a/lib/arrow_web/controllers/note_controller.ex b/lib/arrow_web/controllers/note_controller.ex
index cd6f639fa..d756a906b 100644
--- a/lib/arrow_web/controllers/note_controller.ex
+++ b/lib/arrow_web/controllers/note_controller.ex
@@ -7,10 +7,7 @@ defmodule ArrowWeb.NoteController do
plug(Authorize, :create_note when action in [:create])
- def create(%{assigns: %{current_user: user}} = conn, %{
- "id" => disruption_id,
- "note" => note_attrs
- }) do
+ def create(%{assigns: %{current_user: user}} = conn, %{"id" => disruption_id, "note" => note_attrs}) do
case Disruption.add_note(String.to_integer(disruption_id), user.id, note_attrs) do
{:ok, _} ->
redirect(conn, to: Routes.disruption_path(conn, :show, disruption_id))
diff --git a/lib/arrow_web/controllers/shape_controller.ex b/lib/arrow_web/controllers/shape_controller.ex
index 2a1d778dd..a03d2987f 100644
--- a/lib/arrow_web/controllers/shape_controller.ex
+++ b/lib/arrow_web/controllers/shape_controller.ex
@@ -1,12 +1,13 @@
defmodule ArrowWeb.ShapeController do
- require Logger
- alias Arrow.Shuttles.ShapesUpload
- alias ArrowWeb.ErrorHelpers
- alias Ecto.Changeset
use ArrowWeb, :controller
alias Arrow.Shuttles
+ alias Arrow.Shuttles.ShapesUpload
+ alias ArrowWeb.ErrorHelpers
alias ArrowWeb.Plug.Authorize
+ alias Ecto.Changeset
+
+ require Logger
plug(Authorize, :view_disruption when action in [:index, :show, :download])
plug(Authorize, :create_disruption when action in [:new, :create])
@@ -36,7 +37,7 @@ defmodule ArrowWeb.ShapeController do
:info,
"Successfully parsed shapes from file"
)
- |> render(:select, form: changeset |> Phoenix.Component.to_form())
+ |> render(:select, form: Phoenix.Component.to_form(changeset))
else
conn
|> put_flash(
@@ -86,8 +87,7 @@ defmodule ArrowWeb.ShapeController do
{:ok, changesets} ->
saved_shape_names =
- changesets
- |> Enum.map(fn {:ok, changeset} -> changeset.name end)
+ Enum.map(changesets, fn {:ok, changeset} -> changeset.name end)
conn
|> put_flash(
diff --git a/lib/arrow_web/controllers/shape_html.ex b/lib/arrow_web/controllers/shape_html.ex
index cc7650eb2..946519621 100644
--- a/lib/arrow_web/controllers/shape_html.ex
+++ b/lib/arrow_web/controllers/shape_html.ex
@@ -1,8 +1,12 @@
defmodule ArrowWeb.ShapeView do
use ArrowWeb, :html
+
alias Arrow.Gtfs.Stop, as: GtfsStop
alias Arrow.Shuttles
- alias Arrow.Shuttles.{Route, RouteStop, Shape, ShapesUpload}
+ alias Arrow.Shuttles.Route
+ alias Arrow.Shuttles.RouteStop
+ alias Arrow.Shuttles.Shape
+ alias Arrow.Shuttles.ShapesUpload
alias Arrow.Shuttles.Stop, as: ArrowStop
alias Phoenix.Controller
@@ -77,8 +81,7 @@ defmodule ArrowWeb.ShapeView do
end
end
- defp render_route_stop(%RouteStop{gtfs_stop_id: gtfs_stop_id} = route_stop)
- when not is_nil(gtfs_stop_id) do
+ defp render_route_stop(%RouteStop{gtfs_stop_id: gtfs_stop_id} = route_stop) when not is_nil(gtfs_stop_id) do
gtfs_stop = Arrow.Repo.get(GtfsStop, gtfs_stop_id)
if gtfs_stop do
diff --git a/lib/arrow_web/controllers/stop_controller.ex b/lib/arrow_web/controllers/stop_controller.ex
index dac2e193b..220c228c0 100644
--- a/lib/arrow_web/controllers/stop_controller.ex
+++ b/lib/arrow_web/controllers/stop_controller.ex
@@ -23,8 +23,7 @@ defmodule ArrowWeb.StopController do
conn
|> put_flash(
:errors,
- {"Error creating stop, please try again",
- ErrorHelpers.changeset_error_messages(changeset)}
+ {"Error creating stop, please try again", ErrorHelpers.changeset_error_messages(changeset)}
)
|> redirect(to: ~p"/stops/new")
end
@@ -44,8 +43,7 @@ defmodule ArrowWeb.StopController do
conn
|> put_flash(
:errors,
- {"Error updating stop, please try again",
- ErrorHelpers.changeset_error_messages(changeset)}
+ {"Error updating stop, please try again", ErrorHelpers.changeset_error_messages(changeset)}
)
|> redirect(to: ~p"/stops/#{stop}/edit")
end
diff --git a/lib/arrow_web/controllers/timetable_controller.ex b/lib/arrow_web/controllers/timetable_controller.ex
index 87f87f371..11cfe0125 100644
--- a/lib/arrow_web/controllers/timetable_controller.ex
+++ b/lib/arrow_web/controllers/timetable_controller.ex
@@ -12,8 +12,7 @@ defmodule ArrowWeb.TimetableController do
|> ReplacementService.add_timetable()
available_days_of_week =
- ReplacementService.schedule_service_types()
- |> Enum.reject(&is_nil(replacement_service.timetable[&1]))
+ Enum.reject(ReplacementService.schedule_service_types(), &is_nil(replacement_service.timetable[&1]))
day_of_week =
if day_of_week = Map.get(params, "day_of_week") do
@@ -59,8 +58,7 @@ defmodule ArrowWeb.TimetableController do
end)
[first_stop, last_stop] =
- [List.first(sample_trip), List.last(sample_trip)]
- |> Enum.map(fn stop ->
+ Enum.map([List.first(sample_trip), List.last(sample_trip)], fn stop ->
stop
|> Map.get(:stop_id)
|> Shuttles.stop_or_gtfs_stop_for_stop_id()
diff --git a/lib/arrow_web/helpers.ex b/lib/arrow_web/helpers.ex
index 40e5713ab..2d63b31b4 100644
--- a/lib/arrow_web/helpers.ex
+++ b/lib/arrow_web/helpers.ex
@@ -33,10 +33,5 @@ defmodule ArrowWeb.Helpers do
end
def mode_labels,
- do: [
- subway: "Subway/Light Rail",
- commuter_rail: "Commuter Rail",
- bus: "Bus",
- silver_line: "Silver Line"
- ]
+ do: [subway: "Subway/Light Rail", commuter_rail: "Commuter Rail", bus: "Bus", silver_line: "Silver Line"]
end
diff --git a/lib/arrow_web/live/disruption_v2_live/disruption_v2_view_live.ex b/lib/arrow_web/live/disruption_v2_live/disruption_v2_view_live.ex
index a77f303c0..ec2538f57 100644
--- a/lib/arrow_web/live/disruption_v2_live/disruption_v2_view_live.ex
+++ b/lib/arrow_web/live/disruption_v2_live/disruption_v2_view_live.ex
@@ -1,10 +1,15 @@
defmodule ArrowWeb.DisruptionV2ViewLive do
+ @moduledoc false
use ArrowWeb, :live_view
- alias Arrow.{Adjustment, Disruptions, Limits}
- alias Arrow.Disruptions.{DisruptionV2, Limit, ReplacementService}
+ alias Arrow.Adjustment
+ alias Arrow.Disruptions
+ alias Arrow.Disruptions.DisruptionV2
+ alias Arrow.Disruptions.Limit
+ alias Arrow.Disruptions.ReplacementService
alias Arrow.Hastus
alias Arrow.Hastus.Export
+ alias Arrow.Limits
alias ArrowWeb.DisruptionComponents
@impl true
@@ -92,7 +97,7 @@ defmodule ArrowWeb.DisruptionV2ViewLive do
|> put_flash(:info, "Limit deleted successfully")}
{:error, %Ecto.Changeset{} = _changeset} ->
- {:noreply, socket |> put_flash(:error, "Error when deleting limit!")}
+ {:noreply, put_flash(socket, :error, "Error when deleting limit!")}
end
end
@@ -104,8 +109,7 @@ defmodule ArrowWeb.DisruptionV2ViewLive do
{:ok, _} ->
disruption = %{
socket.assigns.disruption
- | hastus_exports:
- Enum.reject(socket.assigns.disruption.hastus_exports, &(&1.id == parsed_id))
+ | hastus_exports: Enum.reject(socket.assigns.disruption.hastus_exports, &(&1.id == parsed_id))
}
{:noreply,
@@ -118,11 +122,7 @@ defmodule ArrowWeb.DisruptionV2ViewLive do
end
end
- def handle_event(
- "delete_replacement_service",
- %{"replacement_service" => replacement_service_id},
- socket
- ) do
+ def handle_event("delete_replacement_service", %{"replacement_service" => replacement_service_id}, socket) do
{parsed_id, _} = Integer.parse(replacement_service_id)
replacement_service = Disruptions.get_replacement_service!(parsed_id)
@@ -130,8 +130,7 @@ defmodule ArrowWeb.DisruptionV2ViewLive do
{:ok, _} ->
disruption = %{
socket.assigns.disruption
- | replacement_services:
- Enum.reject(socket.assigns.disruption.replacement_services, &(&1.id == parsed_id))
+ | replacement_services: Enum.reject(socket.assigns.disruption.replacement_services, &(&1.id == parsed_id))
}
{:noreply,
@@ -168,8 +167,7 @@ defmodule ArrowWeb.DisruptionV2ViewLive do
defp icon_paths(socket) do
Adjustment.kinds()
- |> Enum.map(&{&1, adjustment_kind_icon_path(socket, &1)})
- |> Enum.into(%{})
+ |> Map.new(&{&1, adjustment_kind_icon_path(socket, &1)})
|> Map.put(
:subway,
Phoenix.VerifiedRoutes.static_path(socket, "/images/icon-mode-subway-small.svg")
@@ -231,7 +229,7 @@ defmodule ArrowWeb.DisruptionV2ViewLive do
defp apply_action(socket, :duplicate_limit, %{"id" => id, "limit_id" => limit_id}) do
disruption = Disruptions.get_disruption_v2!(id)
- limit = Limits.get_limit!(limit_id) |> Map.put(:id, nil)
+ limit = limit_id |> Limits.get_limit!() |> Map.put(:id, nil)
socket
|> assign(:title, "edit disruption")
@@ -273,10 +271,7 @@ defmodule ArrowWeb.DisruptionV2ViewLive do
|> assign(:editing, replacement_service)
end
- defp apply_action(socket, :edit_replacement_service, %{
- "id" => id,
- "replacement_service_id" => replacement_service_id
- }) do
+ defp apply_action(socket, :edit_replacement_service, %{"id" => id, "replacement_service_id" => replacement_service_id}) do
disruption = Disruptions.get_disruption_v2!(id)
replacement_service = Disruptions.get_replacement_service!(replacement_service_id)
diff --git a/lib/arrow_web/live/shuttle_live/shuttle_live.ex b/lib/arrow_web/live/shuttle_live/shuttle_live.ex
index e2549b060..34397a4ad 100644
--- a/lib/arrow_web/live/shuttle_live/shuttle_live.ex
+++ b/lib/arrow_web/live/shuttle_live/shuttle_live.ex
@@ -1,9 +1,13 @@
defmodule ArrowWeb.ShuttleViewLive do
+ @moduledoc false
use ArrowWeb, :live_view
+
import Phoenix.HTML.Form
alias Arrow.Shuttles
- alias Arrow.Shuttles.{DefinitionUpload, Shuttle}
+ alias Arrow.Shuttles.DefinitionUpload
+ alias Arrow.Shuttles.RouteStop
+ alias Arrow.Shuttles.Shuttle
alias ArrowWeb.ShapeView
embed_templates "shuttle_live/*"
@@ -376,7 +380,7 @@ defmodule ArrowWeb.ShuttleViewLive do
def handle_event("live_select_change", %{"text" => text, "id" => live_select_id}, socket) do
shapes =
Shuttles.list_shapes()
- |> Enum.filter(&(String.downcase(&1.name) |> String.contains?(String.downcase(text))))
+ |> Enum.filter(&(&1.name |> String.downcase() |> String.contains?(String.downcase(text))))
|> Enum.map(&shape_option_mapper/1)
send_update(LiveSelect.Component, id: live_select_id, options: shapes)
@@ -404,15 +408,7 @@ defmodule ArrowWeb.ShuttleViewLive do
{:noreply, socket}
end
- def handle_event(
- "reorder_stops",
- %{
- "direction_id" => direction_id,
- "old" => old,
- "new" => new
- },
- socket
- ) do
+ def handle_event("reorder_stops", %{"direction_id" => direction_id, "old" => old, "new" => new}, socket) do
direction_id = String.to_existing_atom(direction_id)
changeset = socket.assigns.form.source
@@ -466,8 +462,7 @@ defmodule ArrowWeb.ShuttleViewLive do
{:error, error} ->
{:noreply,
- socket
- |> update(:errors, fn errors ->
+ update(socket, :errors, fn errors ->
put_in(errors, [:route_stops, Access.key(direction_id_string)], error)
end)}
end
@@ -476,10 +471,10 @@ defmodule ArrowWeb.ShuttleViewLive do
@spec get_stop_travel_times(list({:ok, any()})) ::
{:ok, list(number())} | {:error, any()}
defp get_stop_travel_times(stop_coordinates) do
- stop_coordinates = stop_coordinates |> Enum.map(fn {:ok, c} -> c end)
+ stop_coordinates = Enum.map(stop_coordinates, fn {:ok, c} -> c end)
if length(stop_coordinates) > 1 do
- stop_coordinates |> Shuttles.get_travel_times()
+ Shuttles.get_travel_times(stop_coordinates)
else
{:error, "Incomplete stop data, please provide more than one stop"}
end
@@ -527,7 +522,7 @@ defmodule ArrowWeb.ShuttleViewLive do
max_stop_sequence =
existing_stops |> Enum.map(& &1.stop_sequence) |> Enum.max(fn -> 0 end)
- new_route_stop = %Arrow.Shuttles.RouteStop{
+ new_route_stop = %RouteStop{
direction_id: direction_id,
stop_sequence: max_stop_sequence + 1
}
@@ -554,8 +549,7 @@ defmodule ArrowWeb.ShuttleViewLive do
|> List.insert_at(new, moved_route_stop)
|> Enum.reduce({[], 1}, fn route_stop, {route_stop_changes, stop_sequence} ->
{route_stop_changes ++
- [Arrow.Shuttles.RouteStop.changeset(route_stop, %{stop_sequence: stop_sequence})],
- stop_sequence + 1}
+ [RouteStop.changeset(route_stop, %{stop_sequence: stop_sequence})], stop_sequence + 1}
end)
Ecto.Changeset.put_assoc(
@@ -603,8 +597,8 @@ defmodule ArrowWeb.ShuttleViewLive do
stop_ids
|> Enum.with_index(1)
|> Enum.map(fn {stop_id, i} ->
- Arrow.Shuttles.RouteStop.changeset(
- %Arrow.Shuttles.RouteStop{},
+ RouteStop.changeset(
+ %RouteStop{},
%{
direction_id: direction_id,
stop_sequence: i,
@@ -642,10 +636,10 @@ defmodule ArrowWeb.ShuttleViewLive do
end)
|> case do
{valid_route_stops, []} ->
- {:ok, valid_route_stops |> Enum.flat_map(&elem(&1, 1))}
+ {:ok, Enum.flat_map(valid_route_stops, &elem(&1, 1))}
{_, errors} ->
- {:error, errors |> Enum.flat_map(&elem(&1, 1))}
+ {:error, Enum.flat_map(errors, &elem(&1, 1))}
end
case new_route_stops do
@@ -655,8 +649,7 @@ defmodule ArrowWeb.ShuttleViewLive do
direction_id = Ecto.Changeset.get_field(route_changeset, :direction_id)
direction_new_route_stops =
- route_stops
- |> Enum.filter(&(Ecto.Changeset.get_field(&1, :direction_id) == direction_id))
+ Enum.filter(route_stops, &(Ecto.Changeset.get_field(&1, :direction_id) == direction_id))
Ecto.Changeset.put_assoc(
route_changeset,
@@ -682,14 +675,7 @@ defmodule ArrowWeb.ShuttleViewLive do
end
{:error, errors} ->
- socket
- |> put_flash(
- :errors,
- {
- "Failed to upload definition: ",
- errors |> Enum.map(&translate_error/1)
- }
- )
+ put_flash(socket, :errors, {"Failed to upload definition: ", Enum.map(errors, &translate_error/1)})
end
end
end
diff --git a/lib/arrow_web/live/stop_live/stop_live.ex b/lib/arrow_web/live/stop_live/stop_live.ex
index 049d58e38..f91b95aaf 100644
--- a/lib/arrow_web/live/stop_live/stop_live.ex
+++ b/lib/arrow_web/live/stop_live/stop_live.ex
@@ -1,9 +1,11 @@
defmodule ArrowWeb.StopViewLive do
+ @moduledoc false
use ArrowWeb, :live_view
alias Arrow.Gtfs.Stop, as: GtfsStop
alias Arrow.Shuttles.Stop
alias Arrow.Stops
+
embed_templates "stop_live/*"
@doc """
@@ -128,7 +130,7 @@ defmodule ArrowWeb.StopViewLive do
end
def handle_event("validate", %{"stop" => stop_params}, socket) do
- form = Stops.change_stop(socket.assigns.stop, stop_params) |> to_form(action: :validate)
+ form = socket.assigns.stop |> Stops.change_stop(stop_params) |> to_form(action: :validate)
{existing_stops, existing_gtfs_stops} =
with %{"stop_lat" => lat, "stop_lon" => lon, "stop_id" => stop_id} <- stop_params,
@@ -141,8 +143,7 @@ defmodule ArrowWeb.StopViewLive do
end
{:noreply,
- socket
- |> assign(
+ assign(socket,
stop_map_props: stop_params,
form: form,
existing_stops: existing_stops,
diff --git a/lib/arrow_web/plug/assign_user.ex b/lib/arrow_web/plug/assign_user.ex
index e46c218b5..52c56d85a 100644
--- a/lib/arrow_web/plug/assign_user.ex
+++ b/lib/arrow_web/plug/assign_user.ex
@@ -3,6 +3,7 @@ defmodule ArrowWeb.Plug.AssignUser do
Associates a connection with an `Arrow.Account.User`.
"""
import Plug.Conn
+
alias Arrow.Accounts.User
@spec init(Plug.opts()) :: Plug.opts()
diff --git a/lib/arrow_web/plug/authorize.ex b/lib/arrow_web/plug/authorize.ex
index 077b60b44..d7fdb8d74 100644
--- a/lib/arrow_web/plug/authorize.ex
+++ b/lib/arrow_web/plug/authorize.ex
@@ -3,6 +3,7 @@ defmodule ArrowWeb.Plug.Authorize do
Checks a user's permissions and sends them a 403 if they are unauthorized.
"""
import Plug.Conn
+
alias Arrow.Accounts.User
alias Arrow.Permissions
alias ArrowWeb.Router.Helpers, as: Routes
diff --git a/lib/arrow_web/router.ex b/lib/arrow_web/router.ex
index a51e31cf8..c714080ca 100644
--- a/lib/arrow_web/router.ex
+++ b/lib/arrow_web/router.ex
@@ -1,8 +1,13 @@
defmodule ArrowWeb.Router do
- alias ArrowWeb.API.GtfsImportController
use ArrowWeb, :router
+
import Phoenix.LiveDashboard.Router
+ alias ArrowWeb.API.GtfsImportController
+ alias ArrowWeb.AuthManager.Pipeline
+ alias ArrowWeb.Plug.AssignUser
+ alias Guardian.Plug.EnsureAuthenticated
+
pipeline :browser do
plug(:accepts, ["html"])
plug(:fetch_session)
@@ -29,16 +34,16 @@ defmodule ArrowWeb.Router do
end
pipeline :authenticate do
- plug(ArrowWeb.AuthManager.Pipeline)
- plug(Guardian.Plug.EnsureAuthenticated)
- plug(ArrowWeb.Plug.AssignUser)
+ plug(Pipeline)
+ plug(EnsureAuthenticated)
+ plug(AssignUser)
end
pipeline :authenticate_api do
- plug(ArrowWeb.AuthManager.Pipeline)
+ plug(Pipeline)
plug(ArrowWeb.TryApiTokenAuth)
- plug(Guardian.Plug.EnsureAuthenticated)
- plug(ArrowWeb.Plug.AssignUser)
+ plug(EnsureAuthenticated)
+ plug(AssignUser)
end
scope "/", ArrowWeb do
diff --git a/lib/arrow_web/telemetry.ex b/lib/arrow_web/telemetry.ex
index ed2844475..421e1c9c3 100644
--- a/lib/arrow_web/telemetry.ex
+++ b/lib/arrow_web/telemetry.ex
@@ -4,6 +4,7 @@ defmodule ArrowWeb.Telemetry do
"""
use Supervisor
+
import Telemetry.Metrics
def start_link(arg) do
diff --git a/lib/arrow_web/try_api_token_auth.ex b/lib/arrow_web/try_api_token_auth.ex
index 31e1e38ea..97573f4ae 100644
--- a/lib/arrow_web/try_api_token_auth.ex
+++ b/lib/arrow_web/try_api_token_auth.ex
@@ -4,6 +4,9 @@ defmodule ArrowWeb.TryApiTokenAuth do
"""
import Plug.Conn
+
+ alias ArrowWeb.TryApiTokenAuth.Local
+
require Logger
def init(options), do: options
@@ -13,7 +16,7 @@ defmodule ArrowWeb.TryApiTokenAuth do
with [token | _] <- api_key_values,
token = String.downcase(token),
- auth_token = %Arrow.AuthToken{} <-
+ %Arrow.AuthToken{} = auth_token <-
Arrow.Repo.get_by(Arrow.AuthToken, token: token),
api_login_module = api_login_module_for_token(auth_token),
conn = api_login_module.sign_in(conn, auth_token),
@@ -25,9 +28,7 @@ defmodule ArrowWeb.TryApiTokenAuth do
conn
reason ->
- Logger.info(
- "unable to login in API client api_key=#{inspect(api_key_values)} reason=#{inspect(reason)}"
- )
+ Logger.info("unable to login in API client api_key=#{inspect(api_key_values)} reason=#{inspect(reason)}")
conn |> send_resp(401, "unauthenticated") |> halt()
end
@@ -36,11 +37,11 @@ defmodule ArrowWeb.TryApiTokenAuth do
defp api_login_module_for_token(auth_token)
defp api_login_module_for_token(%Arrow.AuthToken{username: "gtfs_creator_ci@mbta.com"}) do
- ArrowWeb.TryApiTokenAuth.Local
+ Local
end
defp api_login_module_for_token(%Arrow.AuthToken{username: "fake_uid"}) do
- ArrowWeb.TryApiTokenAuth.Local
+ Local
end
defp api_login_module_for_token(_token) do
diff --git a/lib/arrow_web/try_api_token_auth/keycloak.ex b/lib/arrow_web/try_api_token_auth/keycloak.ex
index 18bf8faee..748994584 100644
--- a/lib/arrow_web/try_api_token_auth/keycloak.ex
+++ b/lib/arrow_web/try_api_token_auth/keycloak.ex
@@ -8,18 +8,10 @@ defmodule ArrowWeb.TryApiTokenAuth.Keycloak do
def sign_in(conn, auth_token) do
with {:ok, user_id} <- lookup_user_id(auth_token.username),
{:ok, roles} <- lookup_user_roles(user_id) do
- conn
- |> Guardian.Plug.sign_in(
- ArrowWeb.AuthManager,
- auth_token.username,
- %{roles: roles},
- ttl: {0, :second}
- )
+ Guardian.Plug.sign_in(conn, ArrowWeb.AuthManager, auth_token.username, %{roles: roles}, ttl: {0, :second})
else
other ->
- Logger.warning(
- "unexpected response when logging #{auth_token.username} in via Keycloak API: #{inspect(other)}"
- )
+ Logger.warning("unexpected response when logging #{auth_token.username} in via Keycloak API: #{inspect(other)}")
conn
end
@@ -89,9 +81,10 @@ defmodule ArrowWeb.TryApiTokenAuth.Keycloak do
params: params,
hackney: [
ssl_options:
- Keyword.merge(
+ Keyword.put(
:httpc.ssl_verify_host_options(true),
- versions: [:"tlsv1.3", :"tlsv1.2"]
+ :versions,
+ [:"tlsv1.3", :"tlsv1.2"]
)
]
) do
diff --git a/lib/arrow_web/try_api_token_auth/local.ex b/lib/arrow_web/try_api_token_auth/local.ex
index 833688bdb..d16a8fc85 100644
--- a/lib/arrow_web/try_api_token_auth/local.ex
+++ b/lib/arrow_web/try_api_token_auth/local.ex
@@ -4,16 +4,11 @@ defmodule ArrowWeb.TryApiTokenAuth.Local do
"""
alias Plug.Conn
+
require Logger
@spec sign_in(Conn.t(), Arrow.AuthToken.t()) :: Conn.t()
def sign_in(%Conn{} = conn, %Arrow.AuthToken{} = auth_token) do
- conn
- |> Guardian.Plug.sign_in(
- ArrowWeb.AuthManager,
- auth_token.username,
- %{roles: ["read-only"]},
- ttl: {0, :second}
- )
+ Guardian.Plug.sign_in(conn, ArrowWeb.AuthManager, auth_token.username, %{roles: ["read-only"]}, ttl: {0, :second})
end
end
diff --git a/lib/mix/tasks/copy_db.ex b/lib/mix/tasks/copy_db.ex
index 38238c1ba..419cc7ce2 100644
--- a/lib/mix/tasks/copy_db.ex
+++ b/lib/mix/tasks/copy_db.ex
@@ -1,12 +1,13 @@
defmodule Mix.Tasks.CopyDb do
+ @shortdoc "Copies database"
@moduledoc """
Mix task to clone the Arrow database (dev or prod) in AWS locally.
"""
use Mix.Task
+
require Logger
- @shortdoc "Copies database"
@impl Mix.Task
def run(_args) do
# Load the DBStructure module now, so that relevant atoms like :route_id are
diff --git a/lib/mix/tasks/import_gtfs.ex b/lib/mix/tasks/import_gtfs.ex
index 4a26d9ddb..e90a83349 100644
--- a/lib/mix/tasks/import_gtfs.ex
+++ b/lib/mix/tasks/import_gtfs.ex
@@ -1,4 +1,6 @@
defmodule Mix.Tasks.ImportGtfs do
+ @shortdoc "Imports MBTA_GTFS.zip"
+
@moduledoc """
Mix task to import a GTFS-static feed into the local Arrow database.
"""
@@ -6,8 +8,6 @@ defmodule Mix.Tasks.ImportGtfs do
require Logger
- @shortdoc "Imports MBTA_GTFS.zip"
-
@impl Mix.Task
def run(args) do
with {:ok, gtfs_path} <- get_gtfs_path(args),
@@ -37,13 +37,13 @@ defmodule Mix.Tasks.ImportGtfs do
Mix.shell().info("No path to MBTA_GTFS.zip provided.")
use_tmp_dir_feed? = fn ->
- "Would you like to use the feed previously downloaded by this mix task? (timestamp: #{tmp_file_timestamp!()})"
- |> Mix.shell().yes?()
+ Mix.shell().yes?(
+ "Would you like to use the feed previously downloaded by this mix task? (timestamp: #{tmp_file_timestamp!()})"
+ )
end
use_downloaded_feed? = fn ->
- "Would you like to download and use the latest feed from #{feed_url()}?"
- |> Mix.shell().yes?()
+ Mix.shell().yes?("Would you like to download and use the latest feed from #{feed_url()}?")
end
cond do
diff --git a/mix.exs b/mix.exs
index f2129554c..d688a62a3 100644
--- a/mix.exs
+++ b/mix.exs
@@ -88,15 +88,11 @@ defmodule Arrow.MixProject do
{:wallaby, "~> 0.30", runtime: false, only: :test},
{:sentry, "~> 10.7"},
{:tailwind, "~> 0.2", runtime: Mix.env() == :dev},
- {:heroicons,
- github: "tailwindlabs/heroicons",
- tag: "v2.1.1",
- sparse: "optimized",
- app: false,
- compile: false},
+ {:heroicons, github: "tailwindlabs/heroicons", tag: "v2.1.1", sparse: "optimized", app: false, compile: false},
{:sax_map, "~> 1.2"},
{:unzip, "~> 0.12.0"},
- {:xlsxir, "~> 1.6"}
+ {:xlsxir, "~> 1.6"},
+ {:styler, "~> 1.4", only: [:dev, :test], runtime: false}
]
end
diff --git a/mix.lock b/mix.lock
index e369cd614..61716b05f 100644
--- a/mix.lock
+++ b/mix.lock
@@ -65,6 +65,7 @@
"saxy": {:hex, :saxy, "1.6.0", "02cb4e9bd045f25ac0c70fae8164754878327ee393c338a090288210b02317ee", [:mix], [], "hexpm", "ef42eb4ac983ca77d650fbdb68368b26570f6cc5895f0faa04d34a6f384abad3"},
"sentry": {:hex, :sentry, "10.10.0", "d058b635f3796947545c8057a42996f6dbefd12152da947209b56d16af41b161", [:mix], [{:hackney, "~> 1.8", [hex: :hackney, repo: "hexpm", optional: true]}, {:jason, "~> 1.1", [hex: :jason, repo: "hexpm", optional: true]}, {:nimble_options, "~> 1.0", [hex: :nimble_options, repo: "hexpm", optional: false]}, {:nimble_ownership, "~> 0.3.0 or ~> 1.0", [hex: :nimble_ownership, repo: "hexpm", optional: false]}, {:phoenix, "~> 1.6", [hex: :phoenix, repo: "hexpm", optional: true]}, {:phoenix_live_view, "~> 0.20 or ~> 1.0", [hex: :phoenix_live_view, repo: "hexpm", optional: true]}, {:plug, "~> 1.6", [hex: :plug, repo: "hexpm", optional: true]}, {:telemetry, "~> 0.4 or ~> 1.0", [hex: :telemetry, repo: "hexpm", optional: true]}], "hexpm", "7c7ddd3cfdd63fcee53b1e28f9a653037e6927b2b1dbd300b7aeee9687c7a8f6"},
"ssl_verify_fun": {:hex, :ssl_verify_fun, "1.1.7", "354c321cf377240c7b8716899e182ce4890c5938111a1296add3ec74cf1715df", [:make, :mix, :rebar3], [], "hexpm", "fe4c190e8f37401d30167c8c405eda19469f34577987c76dde613e838bbc67f8"},
+ "styler": {:hex, :styler, "1.4.2", "420da8a9d10324625b75690ca9f2468bc00ee6eb78dead827e562368f9feabbb", [:mix], [], "hexpm", "ca22538b203b2424eef99a227e081143b9a9a4b26da75f26d920537fcd778832"},
"sweet_xml": {:hex, :sweet_xml, "0.7.5", "803a563113981aaac202a1dbd39771562d0ad31004ddbfc9b5090bdcd5605277", [:mix], [], "hexpm", "193b28a9b12891cae351d81a0cead165ffe67df1b73fe5866d10629f4faefb12"},
"table_rex": {:hex, :table_rex, "4.1.0", "fbaa8b1ce154c9772012bf445bfb86b587430fb96f3b12022d3f35ee4a68c918", [:mix], [], "hexpm", "95932701df195d43bc2d1c6531178fc8338aa8f38c80f098504d529c43bc2601"},
"tailwind": {:hex, :tailwind, "0.3.1", "a89d2835c580748c7a975ad7dd3f2ea5e63216dc16d44f9df492fbd12c094bed", [:mix], [], "hexpm", "98a45febdf4a87bc26682e1171acdedd6317d0919953c353fcd1b4f9f4b676a2"},
diff --git a/priv/repo/seeds.exs b/priv/repo/seeds.exs
index 735e5e8ba..2825d6dbe 100644
--- a/priv/repo/seeds.exs
+++ b/priv/repo/seeds.exs
@@ -10,9 +10,13 @@
# We recommend using the bang functions (`insert!`, `update!`
# and so on) as they will fail if something goes wrong.
-alias Arrow.Repo
alias Arrow.Gtfs
-alias Arrow.Shuttles.{Shape, Stop, Shuttle, Route, RouteStop}
+alias Arrow.Repo
+alias Arrow.Shuttles.Route
+alias Arrow.Shuttles.RouteStop
+alias Arrow.Shuttles.Shape
+alias Arrow.Shuttles.Shuttle
+alias Arrow.Shuttles.Stop
# For testing locally with dependency on /import-gtfs
Repo.insert(%Gtfs.Route{
@@ -25,7 +29,7 @@ Repo.insert(%Gtfs.Route{
url: "https://www.mbta.com/schedules/Red",
color: "DA291C",
text_color: "FFFFFF",
- sort_order: 10010,
+ sort_order: 10_010,
fare_class: "Rapid Transit",
line: %Gtfs.Line{
id: "line-Red",
@@ -155,8 +159,7 @@ Repo.insert!(%Stop{
id: 1,
stop_id: "9070065",
stop_name: "Porter - Massachusetts Avenue @ Mount Vernon St",
- stop_desc:
- "Porter - Red Line Ashmont/Braintree Shuttle - Massachusetts Avenue @ Mount Vernon St",
+ stop_desc: "Porter - Red Line Ashmont/Braintree Shuttle - Massachusetts Avenue @ Mount Vernon St",
platform_code: nil,
platform_name: "Ashmont/Braintree Shuttle",
parent_station: nil,
diff --git a/priv/repo/structure.sql b/priv/repo/structure.sql
index 2f3c327b1..969df2688 100644
--- a/priv/repo/structure.sql
+++ b/priv/repo/structure.sql
@@ -30,20 +30,6 @@ CREATE EXTENSION IF NOT EXISTS btree_gist WITH SCHEMA public;
COMMENT ON EXTENSION btree_gist IS 'support for indexing common datatypes in GiST';
---
--- Name: postgis; Type: EXTENSION; Schema: -; Owner: -
---
-
-CREATE EXTENSION IF NOT EXISTS postgis WITH SCHEMA public;
-
-
---
--- Name: EXTENSION postgis; Type: COMMENT; Schema: -; Owner: -
---
-
-COMMENT ON EXTENSION postgis IS 'PostGIS geometry and geography spatial types and functions';
-
-
--
-- Name: day_name; Type: TYPE; Schema: public; Owner: -
--
@@ -741,6 +727,39 @@ CREATE TABLE public.gtfs_trips (
);
+--
+-- Name: hastus_derived_limits; Type: TABLE; Schema: public; Owner: -
+--
+
+CREATE TABLE public.hastus_derived_limits (
+ id bigint NOT NULL,
+ service_id bigint NOT NULL,
+ start_stop_id character varying(255) NOT NULL,
+ end_stop_id character varying(255) NOT NULL,
+ inserted_at timestamp with time zone NOT NULL,
+ updated_at timestamp with time zone NOT NULL
+);
+
+
+--
+-- Name: hastus_derived_limits_id_seq; Type: SEQUENCE; Schema: public; Owner: -
+--
+
+CREATE SEQUENCE public.hastus_derived_limits_id_seq
+ START WITH 1
+ INCREMENT BY 1
+ NO MINVALUE
+ NO MAXVALUE
+ CACHE 1;
+
+
+--
+-- Name: hastus_derived_limits_id_seq; Type: SEQUENCE OWNED BY; Schema: public; Owner: -
+--
+
+ALTER SEQUENCE public.hastus_derived_limits_id_seq OWNED BY public.hastus_derived_limits.id;
+
+
--
-- Name: hastus_exports; Type: TABLE; Schema: public; Owner: -
--
@@ -1317,6 +1336,13 @@ ALTER TABLE ONLY public.disruptions ALTER COLUMN id SET DEFAULT nextval('public.
ALTER TABLE ONLY public.disruptionsv2 ALTER COLUMN id SET DEFAULT nextval('public.disruptionsv2_id_seq'::regclass);
+--
+-- Name: hastus_derived_limits id; Type: DEFAULT; Schema: public; Owner: -
+--
+
+ALTER TABLE ONLY public.hastus_derived_limits ALTER COLUMN id SET DEFAULT nextval('public.hastus_derived_limits_id_seq'::regclass);
+
+
--
-- Name: hastus_exports id; Type: DEFAULT; Schema: public; Owner: -
--
@@ -1616,6 +1642,14 @@ ALTER TABLE ONLY public.gtfs_trips
ADD CONSTRAINT gtfs_trips_pkey PRIMARY KEY (id);
+--
+-- Name: hastus_derived_limits hastus_derived_limits_pkey; Type: CONSTRAINT; Schema: public; Owner: -
+--
+
+ALTER TABLE ONLY public.hastus_derived_limits
+ ADD CONSTRAINT hastus_derived_limits_pkey PRIMARY KEY (id);
+
+
--
-- Name: hastus_exports hastus_exports_pkey; Type: CONSTRAINT; Schema: public; Owner: -
--
@@ -2220,6 +2254,30 @@ ALTER TABLE ONLY public.gtfs_trips
ADD CONSTRAINT gtfs_trips_shape_id_fkey FOREIGN KEY (shape_id) REFERENCES public.gtfs_shapes(id);
+--
+-- Name: hastus_derived_limits hastus_derived_limits_end_stop_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: -
+--
+
+ALTER TABLE ONLY public.hastus_derived_limits
+ ADD CONSTRAINT hastus_derived_limits_end_stop_id_fkey FOREIGN KEY (end_stop_id) REFERENCES public.gtfs_stops(id);
+
+
+--
+-- Name: hastus_derived_limits hastus_derived_limits_service_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: -
+--
+
+ALTER TABLE ONLY public.hastus_derived_limits
+ ADD CONSTRAINT hastus_derived_limits_service_id_fkey FOREIGN KEY (service_id) REFERENCES public.hastus_services(id) ON DELETE CASCADE;
+
+
+--
+-- Name: hastus_derived_limits hastus_derived_limits_start_stop_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: -
+--
+
+ALTER TABLE ONLY public.hastus_derived_limits
+ ADD CONSTRAINT hastus_derived_limits_start_stop_id_fkey FOREIGN KEY (start_stop_id) REFERENCES public.gtfs_stops(id);
+
+
--
-- Name: hastus_exports hastus_exports_disruption_id_fkey; Type: FK CONSTRAINT; Schema: public; Owner: -
--
@@ -2435,4 +2493,5 @@ INSERT INTO public."schema_migrations" (version) VALUES (20250402181804);
INSERT INTO public."schema_migrations" (version) VALUES (20250403191728);
INSERT INTO public."schema_migrations" (version) VALUES (20250410180228);
INSERT INTO public."schema_migrations" (version) VALUES (20250501125059);
+INSERT INTO public."schema_migrations" (version) VALUES (20250601120000);
INSERT INTO public."schema_migrations" (version) VALUES (20250602151911);
diff --git a/test/arrow/account/user_test.exs b/test/arrow/account/user_test.exs
index e12f1e03f..8c3efcdef 100644
--- a/test/arrow/account/user_test.exs
+++ b/test/arrow/account/user_test.exs
@@ -1,5 +1,6 @@
defmodule Arrow.Account.UserTest do
use ExUnit.Case
+
alias Arrow.Accounts.User
test "roles is a MapSet" do
diff --git a/test/arrow/adjustment_fetcher_test.exs b/test/arrow/adjustment_fetcher_test.exs
index 1f524811f..9ea2e0a41 100644
--- a/test/arrow/adjustment_fetcher_test.exs
+++ b/test/arrow/adjustment_fetcher_test.exs
@@ -1,13 +1,16 @@
defmodule Arrow.AdjustmentFetcherTest do
use Arrow.DataCase
- alias Arrow.{Adjustment, AdjustmentFetcher, HTTPMock, Repo}
-
import Arrow.Factory
import ExUnit.CaptureLog
import Mox
- @test_json [%{id: "foo", attributes: %{route_id: "bar"}}] |> Jason.encode!()
+ alias Arrow.Adjustment
+ alias Arrow.AdjustmentFetcher
+ alias Arrow.HTTPMock
+ alias Arrow.Repo
+
+ @test_json Jason.encode!([%{id: "foo", attributes: %{route_id: "bar"}}])
setup :verify_on_exit!
@@ -46,7 +49,7 @@ defmodule Arrow.AdjustmentFetcherTest do
describe "fetch/0" do
defp setup_successful_request do
- HTTPMock |> expect(:get, fn _url -> {:ok, %{status_code: 200, body: @test_json}} end)
+ expect(HTTPMock, :get, fn _url -> {:ok, %{status_code: 200, body: @test_json}} end)
end
test "inserts data" do
@@ -98,21 +101,18 @@ defmodule Arrow.AdjustmentFetcherTest do
end
test "handles a failure to fetch the adjustments" do
- HTTPMock |> expect(:get, fn _url -> {:ok, %{status_code: 403, body: "forbid"}} end)
-
+ expect(HTTPMock, :get, fn _url -> {:ok, %{status_code: 403, body: "forbid"}} end)
assert {:error, %{status_code: 403}} = AdjustmentFetcher.fetch()
end
test "handles a failure to decode the adjustments" do
- HTTPMock |> expect(:get, fn _url -> {:ok, %{status_code: 200, body: "not JSON"}} end)
-
+ expect(HTTPMock, :get, fn _url -> {:ok, %{status_code: 200, body: "not JSON"}} end)
assert {:error, %Jason.DecodeError{}} = AdjustmentFetcher.fetch()
end
test "leaves existing adjustments intact on failure" do
Repo.insert!(%Adjustment{source: "gtfs_creator", source_label: "foo", route_id: "bar"})
- HTTPMock |> expect(:get, fn _url -> {:error, "oops"} end)
-
+ expect(HTTPMock, :get, fn _url -> {:error, "oops"} end)
AdjustmentFetcher.fetch()
assert [%Adjustment{source_label: "foo", source: "gtfs_creator"}] = Repo.all(Adjustment)
diff --git a/test/arrow/adjustment_test.exs b/test/arrow/adjustment_test.exs
index 41dbf0f84..9acb930a4 100644
--- a/test/arrow/adjustment_test.exs
+++ b/test/arrow/adjustment_test.exs
@@ -1,6 +1,7 @@
defmodule Arrow.AdjustmentTest do
@moduledoc false
use Arrow.DataCase
+
alias Arrow.Adjustment
alias Arrow.Repo
diff --git a/test/arrow/application_test.exs b/test/arrow/application_test.exs
index 666b266bf..5732fa8b2 100644
--- a/test/arrow/application_test.exs
+++ b/test/arrow/application_test.exs
@@ -1,6 +1,7 @@
defmodule Arrow.ApplicationTest do
@moduledoc false
use ExUnit.Case, async: true
+
import Arrow.Application
describe "migrate_children/1" do
diff --git a/test/arrow/auth_token_test.exs b/test/arrow/auth_token_test.exs
index fe7da5992..1289cf949 100644
--- a/test/arrow/auth_token_test.exs
+++ b/test/arrow/auth_token_test.exs
@@ -1,5 +1,6 @@
defmodule Arrow.AuthTokenTest do
use Arrow.DataCase
+
alias Arrow.AuthToken
alias Arrow.Repo
diff --git a/test/arrow/disruption_revision_test.exs b/test/arrow/disruption_revision_test.exs
index 6d5bf5d3e..98d156969 100644
--- a/test/arrow/disruption_revision_test.exs
+++ b/test/arrow/disruption_revision_test.exs
@@ -1,6 +1,7 @@
defmodule Arrow.DisruptionRevisionTest do
@moduledoc false
use Arrow.DataCase
+
alias Arrow.DisruptionRevision
describe "adjustment_kinds/1" do
diff --git a/test/arrow/disruption_test.exs b/test/arrow/disruption_test.exs
index 468092b82..2415f3964 100644
--- a/test/arrow/disruption_test.exs
+++ b/test/arrow/disruption_test.exs
@@ -1,7 +1,9 @@
defmodule Arrow.DisruptionTest do
@moduledoc false
use Arrow.DataCase
+
import Ecto.Query
+
alias Arrow.Disruption
alias Arrow.Disruption.DayOfWeek
alias Arrow.Disruption.Note
diff --git a/test/arrow/disruptions/limit_test.exs b/test/arrow/disruptions/limit_test.exs
index cd7cd831d..09f6c9ad9 100644
--- a/test/arrow/disruptions/limit_test.exs
+++ b/test/arrow/disruptions/limit_test.exs
@@ -45,19 +45,21 @@ defmodule Arrow.Disruptions.LimitTest do
{:day_name, {"Dates specified above do not include a #{day}", []}}
end
- [
- tuesday: true,
- friday: false,
- sunday: false
- ]
- |> Enum.each(fn {day, valid?} ->
- changeset = Enum.find(dow_changesets, &(get_field(&1, :day_name) == day))
- assert changeset.valid? == valid?
-
- if not valid? do
- assert expected_error.(day) in changeset.errors
+ Enum.each(
+ [
+ tuesday: true,
+ friday: false,
+ sunday: false
+ ],
+ fn {day, valid?} ->
+ changeset = Enum.find(dow_changesets, &(get_field(&1, :day_name) == day))
+ assert changeset.valid? == valid?
+
+ if not valid? do
+ assert expected_error.(day) in changeset.errors
+ end
end
- end)
+ )
end
end
end
diff --git a/test/arrow/disruptions/replacement_service_test.exs b/test/arrow/disruptions/replacement_service_test.exs
index 4a88fd25c..c7ab0a07f 100644
--- a/test/arrow/disruptions/replacement_service_test.exs
+++ b/test/arrow/disruptions/replacement_service_test.exs
@@ -230,7 +230,8 @@ defmodule Arrow.Disruptions.ReplacementServiceTest do
end
defp first_stop_time_of_last_trip(replacement_service, schedule_service_type, direction_id) do
- get_in(replacement_service.timetable, [
+ replacement_service.timetable
+ |> get_in([
schedule_service_type,
direction_id,
Access.at(-1),
diff --git a/test/arrow/disruptions/replacement_service_upload_test.exs b/test/arrow/disruptions/replacement_service_upload_test.exs
index dc38ecf94..81249d4e0 100644
--- a/test/arrow/disruptions/replacement_service_upload_test.exs
+++ b/test/arrow/disruptions/replacement_service_upload_test.exs
@@ -2,8 +2,8 @@ defmodule Arrow.Disruptions.ReplacementServiceUploadTest do
@moduledoc false
use Arrow.DataCase
- import ExUnit.CaptureLog
import Arrow.Disruptions.ReplacementServiceUpload
+ import ExUnit.CaptureLog
@xlsx_dir "test/support/fixtures/xlsx/disruption_v2_live"
diff --git a/test/arrow/disruptions_test.exs b/test/arrow/disruptions_test.exs
index b3a1089bd..21cecafdf 100644
--- a/test/arrow/disruptions_test.exs
+++ b/test/arrow/disruptions_test.exs
@@ -1,15 +1,15 @@
defmodule Arrow.DisruptionsTest do
- alias Arrow.DisruptionsFixtures
- alias Arrow.ShuttlesFixtures
use Arrow.DataCase
alias Arrow.Disruptions
+ alias Arrow.DisruptionsFixtures
+ alias Arrow.ShuttlesFixtures
describe "disruptionsv2" do
- alias Arrow.Disruptions.DisruptionV2
-
import Arrow.DisruptionsFixtures
+ alias Arrow.Disruptions.DisruptionV2
+
@invalid_attrs %{title: "foobar", description: "barfoo", mode: nil, is_active: true}
test "list_disruptionsv2/0 returns all disruptionsv2" do
@@ -84,10 +84,10 @@ defmodule Arrow.DisruptionsTest do
end
describe "replacement_services" do
- alias Arrow.Disruptions.ReplacementService
-
import Arrow.DisruptionsFixtures
+ alias Arrow.Disruptions.ReplacementService
+
@invalid_attrs %{
reason: nil,
start_date: nil,
diff --git a/test/arrow/geo_test.exs b/test/arrow/geo_test.exs
index 728beed18..5110a3bb1 100644
--- a/test/arrow/geo_test.exs
+++ b/test/arrow/geo_test.exs
@@ -1,5 +1,6 @@
defmodule Arrow.GeoTest do
use ExUnit.Case
+
alias Arrow.Geo
describe "haversine_distance/2" do
diff --git a/test/arrow/gtfs/agency_test.exs b/test/arrow/gtfs/agency_test.exs
index 8f907e25f..b424d682d 100644
--- a/test/arrow/gtfs/agency_test.exs
+++ b/test/arrow/gtfs/agency_test.exs
@@ -1,5 +1,6 @@
defmodule Arrow.Gtfs.AgencyTest do
use Arrow.DataCase
+
alias Arrow.Gtfs.Agency
describe "database" do
diff --git a/test/arrow/gtfs/import_helper_test.exs b/test/arrow/gtfs/import_helper_test.exs
index f4b2947c3..100628316 100644
--- a/test/arrow/gtfs/import_helper_test.exs
+++ b/test/arrow/gtfs/import_helper_test.exs
@@ -1,4 +1,5 @@
defmodule Arrow.Gtfs.ImportHelperTest do
use ExUnit.Case, async: true
+
doctest Arrow.Gtfs.ImportHelper, import: true
end
diff --git a/test/arrow/gtfs/service_test.exs b/test/arrow/gtfs/service_test.exs
index 7fe3b4649..59158215e 100644
--- a/test/arrow/gtfs/service_test.exs
+++ b/test/arrow/gtfs/service_test.exs
@@ -1,5 +1,6 @@
defmodule Arrow.Gtfs.ServiceTest do
use Arrow.DataCase
+
alias Arrow.Gtfs.Calendar
alias Arrow.Gtfs.CalendarDate
alias Arrow.Gtfs.Service
diff --git a/test/arrow/gtfs/stop_test.exs b/test/arrow/gtfs/stop_test.exs
index 587107046..9a2f126f0 100644
--- a/test/arrow/gtfs/stop_test.exs
+++ b/test/arrow/gtfs/stop_test.exs
@@ -1,8 +1,10 @@
defmodule Arrow.Gtfs.StopTest do
use Arrow.DataCase
- alias Arrow.Gtfs.Stop
+
import Arrow.Factory
+ alias Arrow.Gtfs.Stop
+
test "get_stops_within_mile/2 returns stops roughly within one mile of a stop" do
harvard_lat = 42.3744
harvard_lon = -71.1182
diff --git a/test/arrow/gtfs/time_helper_test.exs b/test/arrow/gtfs/time_helper_test.exs
index 85639f69e..81482ba8f 100644
--- a/test/arrow/gtfs/time_helper_test.exs
+++ b/test/arrow/gtfs/time_helper_test.exs
@@ -1,4 +1,5 @@
defmodule Arrow.Gtfs.TimeHelperTest do
use ExUnit.Case, async: true
+
doctest Arrow.Gtfs.TimeHelper, import: true
end
diff --git a/test/arrow/hastus/export_upload_test.exs b/test/arrow/hastus/export_upload_test.exs
index b602128cc..0a7436d79 100644
--- a/test/arrow/hastus/export_upload_test.exs
+++ b/test/arrow/hastus/export_upload_test.exs
@@ -2,6 +2,7 @@ defmodule Arrow.Hastus.ExportUploadTest do
@moduledoc false
use Arrow.DataCase, async: true
+ alias Arrow.Gtfs.Stop
alias Arrow.Hastus.ExportUpload
@export_dir "test/support/fixtures/hastus"
@@ -57,8 +58,7 @@ defmodule Arrow.Hastus.ExportUploadTest do
test "gives validation errors for invalid exports", %{export: export} do
data = ExportUpload.extract_data_from_upload(%{path: "#{@export_dir}/#{export}"}, "uid")
- assert {:ok,
- {:error, {:trips_with_invalid_shapes, ["67307092-LRV42024-hlb44uf1-Weekday-01"]}}} =
+ assert {:ok, {:error, {:trips_with_invalid_shapes, ["67307092-LRV42024-hlb44uf1-Weekday-01"]}}} =
data
end
@@ -499,12 +499,7 @@ defmodule Arrow.Hastus.ExportUploadTest do
|> Enum.each(&insert_canonical(&1, line, service, context.direction_descs))
end
- defp insert_canonical(
- {route_id, {stop_sequence0, stop_sequence1}},
- line,
- service,
- {dir_desc0, dir_desc1}
- ) do
+ defp insert_canonical({route_id, {stop_sequence0, stop_sequence1}}, line, service, {dir_desc0, dir_desc1}) do
route = insert(:gtfs_route, id: route_id, line: line)
direction0 = insert(:gtfs_direction, direction_id: 0, route: route, desc: dir_desc0)
@@ -550,7 +545,7 @@ defmodule Arrow.Hastus.ExportUploadTest do
stop_sequence0
|> Enum.with_index(1)
|> Enum.each(fn {stop_id, stop_sequence} ->
- stop = maybe_insert(:gtfs_stop, [id: stop_id], Arrow.Gtfs.Stop)
+ stop = maybe_insert(:gtfs_stop, [id: stop_id], Stop)
insert(:gtfs_stop_time,
trip: trip0,
@@ -562,7 +557,7 @@ defmodule Arrow.Hastus.ExportUploadTest do
stop_sequence1
|> Enum.with_index(1)
|> Enum.each(fn {stop_id, stop_sequence} ->
- stop = maybe_insert(:gtfs_stop, [id: stop_id], Arrow.Gtfs.Stop)
+ stop = maybe_insert(:gtfs_stop, [id: stop_id], Stop)
insert(:gtfs_stop_time,
trip: trip1,
diff --git a/test/arrow/hastus_test.exs b/test/arrow/hastus_test.exs
index 1d0915d9f..5385e468f 100644
--- a/test/arrow/hastus_test.exs
+++ b/test/arrow/hastus_test.exs
@@ -4,10 +4,10 @@ defmodule Arrow.HastusTest do
alias Arrow.Hastus
describe "exports" do
- alias Arrow.Hastus.Export
-
import Arrow.HastusFixtures
+ alias Arrow.Hastus.Export
+
@invalid_attrs %{s3_path: nil}
test "list_exports/0 returns all exports" do
@@ -58,10 +58,10 @@ defmodule Arrow.HastusTest do
end
describe "hastus_services" do
- alias Arrow.Hastus.Service
-
import Arrow.HastusFixtures
+ alias Arrow.Hastus.Service
+
@invalid_attrs %{name: nil}
test "list_hastus_services/0 returns all hastus_services" do
@@ -112,10 +112,10 @@ defmodule Arrow.HastusTest do
end
describe "hastus_service_dates" do
- alias Arrow.Hastus.ServiceDate
-
import Arrow.HastusFixtures
+ alias Arrow.Hastus.ServiceDate
+
@invalid_attrs %{start_date: nil, end_date: nil}
test "list_hastus_service_dates/0 returns all hastus_service_dates" do
diff --git a/test/arrow/limits_test.exs b/test/arrow/limits_test.exs
index 986d90892..046908edb 100644
--- a/test/arrow/limits_test.exs
+++ b/test/arrow/limits_test.exs
@@ -2,13 +2,14 @@ defmodule Arrow.LimitsTest do
use Arrow.DataCase
import Arrow.Factory
+
alias Arrow.Limits
describe "limits" do
- alias Arrow.Disruptions.Limit
-
import Arrow.LimitsFixtures
+ alias Arrow.Disruptions.Limit
+
@invalid_attrs %{start_date: nil, end_date: nil}
test "list_limits/0 returns all limits" do
@@ -83,10 +84,10 @@ defmodule Arrow.LimitsTest do
end
describe "limit_day_of_weeks" do
- alias Arrow.Limits.LimitDayOfWeek
-
import Arrow.LimitsFixtures
+ alias Arrow.Limits.LimitDayOfWeek
+
@invalid_attrs %{day_name: nil, start_time: nil, end_time: nil}
test "list_limit_day_of_weeks/0 returns all limit_day_of_weeks" do
diff --git a/test/arrow/open_route_service_api/client_test.exs b/test/arrow/open_route_service_api/client_test.exs
index 4a062b7e2..65e9e3b96 100644
--- a/test/arrow/open_route_service_api/client_test.exs
+++ b/test/arrow/open_route_service_api/client_test.exs
@@ -1,4 +1,5 @@
defmodule Arrow.OpenRouteServiceAPI.ClientTest do
use ExUnit.Case, async: false
+
doctest Arrow.OpenRouteServiceAPI.Client
end
diff --git a/test/arrow/open_route_service_api_test.exs b/test/arrow/open_route_service_api_test.exs
index e4198d4ef..2a1edd55c 100644
--- a/test/arrow/open_route_service_api_test.exs
+++ b/test/arrow/open_route_service_api_test.exs
@@ -1,15 +1,16 @@
defmodule Arrow.OpenRouteServiceAPITest do
- alias Arrow.OpenRouteServiceAPI.DirectionsRequest
- alias Arrow.OpenRouteServiceAPI.DirectionsResponse
use ExUnit.Case, async: false
+ import Arrow.Factory
import Mox
- import Arrow.Factory
+ alias Arrow.OpenRouteServiceAPI.DirectionsRequest
+ alias Arrow.OpenRouteServiceAPI.DirectionsResponse
+ alias Arrow.OpenRouteServiceAPI.MockClient
setup do
stub(
- Arrow.OpenRouteServiceAPI.MockClient,
+ MockClient,
:get_directions,
fn
%DirectionsRequest{
@@ -67,7 +68,7 @@ defmodule Arrow.OpenRouteServiceAPITest do
test "parses directions" do
expect(
- Arrow.OpenRouteServiceAPI.MockClient,
+ MockClient,
:get_directions,
fn _ ->
{:ok,
@@ -107,7 +108,7 @@ defmodule Arrow.OpenRouteServiceAPITest do
test "unknown errors from ORS return `type: :unknown`" do
expect(
- Arrow.OpenRouteServiceAPI.MockClient,
+ MockClient,
:get_directions,
fn _ ->
{:error, %{"code" => -1}}
@@ -123,7 +124,7 @@ defmodule Arrow.OpenRouteServiceAPITest do
test "point not found errors from ORS return `type: :no_route`" do
expect(
- Arrow.OpenRouteServiceAPI.MockClient,
+ MockClient,
:get_directions,
fn _ ->
{:error, %{"code" => 2010}}
@@ -139,7 +140,7 @@ defmodule Arrow.OpenRouteServiceAPITest do
test "route not found errors from ORS return `type: :no_route`" do
expect(
- Arrow.OpenRouteServiceAPI.MockClient,
+ MockClient,
:get_directions,
fn _ ->
{:error, %{"code" => 2009}}
diff --git a/test/arrow/permissions_test.exs b/test/arrow/permissions_test.exs
index 2bdce4d63..8936ef33f 100644
--- a/test/arrow/permissions_test.exs
+++ b/test/arrow/permissions_test.exs
@@ -1,5 +1,6 @@
defmodule Arrow.PermissionsTest do
use ExUnit.Case
+
alias Arrow.Accounts.User
alias Arrow.Permissions
diff --git a/test/arrow/repo/foreign_key_constraint_test.exs b/test/arrow/repo/foreign_key_constraint_test.exs
index a4efa5b34..876d352b1 100644
--- a/test/arrow/repo/foreign_key_constraint_test.exs
+++ b/test/arrow/repo/foreign_key_constraint_test.exs
@@ -102,9 +102,7 @@ defmodule Arrow.Repo.ForeignKeyConstraintTest do
assert [] = ForeignKeyConstraint.external_constraints_referencing_tables(["a"])
- assert "c_a_id_fkey" not in Repo.all(
- from fk in Arrow.Repo.ForeignKeyConstraint, select: fk.name
- )
+ assert "c_a_id_fkey" not in Repo.all(from fk in Arrow.Repo.ForeignKeyConstraint, select: fk.name)
end
end
diff --git a/test/arrow/repo/migrator_test.exs b/test/arrow/repo/migrator_test.exs
index 77ed79809..f0dff6eee 100644
--- a/test/arrow/repo/migrator_test.exs
+++ b/test/arrow/repo/migrator_test.exs
@@ -13,6 +13,7 @@ end
defmodule Arrow.Repo.MigratorTest do
@moduledoc false
use ExUnit.Case
+
import ExUnit.CaptureLog
alias Arrow.Repo.Migrator
diff --git a/test/arrow/repo_test.exs b/test/arrow/repo_test.exs
index 86a11d56f..5f7e7110f 100644
--- a/test/arrow/repo_test.exs
+++ b/test/arrow/repo_test.exs
@@ -1,8 +1,10 @@
defmodule Arrow.RepoTest do
use ExUnit.Case, async: false
+
import Test.Support.Helpers
defmodule FakeAwsRds do
+ @moduledoc false
def generate_db_auth_token(_, _, _, _) do
"iam_token"
end
diff --git a/test/arrow/shuttle/kml_test.exs b/test/arrow/shuttle/kml_test.exs
index 0006270ed..c107b2ddd 100644
--- a/test/arrow/shuttle/kml_test.exs
+++ b/test/arrow/shuttle/kml_test.exs
@@ -18,8 +18,7 @@ defmodule Arrow.Shuttles.KMLTest do
{"name", [], ["some shape"]},
{"LineString", [],
[
- {"coordinates", [],
- ["-71.14163,42.39551 -71.14163,42.39551 -71.14163,42.39551"]}
+ {"coordinates", [], ["-71.14163,42.39551 -71.14163,42.39551 -71.14163,42.39551"]}
]}
]}
]}
diff --git a/test/arrow/shuttle/route_stop_test.exs b/test/arrow/shuttle/route_stop_test.exs
index 59ba3ab27..66008c3e1 100644
--- a/test/arrow/shuttle/route_stop_test.exs
+++ b/test/arrow/shuttle/route_stop_test.exs
@@ -1,10 +1,10 @@
defmodule Arrow.Shuttles.RouteStopTest do
use Arrow.DataCase
- alias Arrow.Shuttles.RouteStop
-
import Arrow.Factory
+ alias Arrow.Shuttles.RouteStop
+
describe "changeset/2" do
test "handles GTFS stop" do
gtfs_stop = insert(:gtfs_stop)
@@ -45,8 +45,7 @@ defmodule Arrow.Shuttles.RouteStopTest do
assert %Ecto.Changeset{
valid?: false,
errors: [
- display_stop_id:
- {"not a valid stop ID '%{display_stop_id}'", [display_stop_id: "invalid_id"]}
+ display_stop_id: {"not a valid stop ID '%{display_stop_id}'", [display_stop_id: "invalid_id"]}
]
} = changeset
end
diff --git a/test/arrow/shuttle/shape_kml_test.exs b/test/arrow/shuttle/shape_kml_test.exs
index 6fa0d51c8..b2d427554 100644
--- a/test/arrow/shuttle/shape_kml_test.exs
+++ b/test/arrow/shuttle/shape_kml_test.exs
@@ -14,8 +14,7 @@ defmodule Arrow.Shuttles.ShapeKMLTest do
{"name", [], ["some shape"]},
{"LineString", [],
[
- {"coordinates", [],
- ["-71.14163,42.39551 -71.14163,42.39551 -71.14163,42.39551"]}
+ {"coordinates", [], ["-71.14163,42.39551 -71.14163,42.39551 -71.14163,42.39551"]}
]}
]} = Saxy.Builder.build(@required_struct)
end
diff --git a/test/arrow/shuttle/shuttle_test.exs b/test/arrow/shuttle/shuttle_test.exs
index a8106e9c2..4105673da 100644
--- a/test/arrow/shuttle/shuttle_test.exs
+++ b/test/arrow/shuttle/shuttle_test.exs
@@ -1,12 +1,13 @@
defmodule Arrow.Shuttles.ShuttleTest do
use Arrow.DataCase
- alias Arrow.Shuttles.Shuttle
-
import Arrow.Factory
import Arrow.ShuttlesFixtures
import Test.Support.Helpers
+ alias Arrow.Shuttles.Route
+ alias Arrow.Shuttles.Shuttle
+
describe "changeset/2" do
test "cannot mark a shuttle as active without at least two shuttle_stops per shuttle_route" do
shuttle = shuttle_fixture()
@@ -26,7 +27,7 @@ defmodule Arrow.Shuttles.ShuttleTest do
[stop1, stop2, stop3, stop4] = insert_list(4, :gtfs_stop)
route0
- |> Arrow.Shuttles.Route.changeset(%{
+ |> Route.changeset(%{
"route_stops" => [
%{
"direction_id" => "0",
@@ -45,7 +46,7 @@ defmodule Arrow.Shuttles.ShuttleTest do
|> Arrow.Repo.update()
route1
- |> Arrow.Shuttles.Route.changeset(%{
+ |> Route.changeset(%{
"route_stops" => [
%{
"direction_id" => "1",
@@ -124,7 +125,7 @@ defmodule Arrow.Shuttles.ShuttleTest do
[stop1, stop2, stop3, stop4] = insert_list(4, :gtfs_stop)
route0
- |> Arrow.Shuttles.Route.changeset(%{
+ |> Route.changeset(%{
"route_stops" => [
%{
"direction_id" => "0",
@@ -142,7 +143,7 @@ defmodule Arrow.Shuttles.ShuttleTest do
|> Arrow.Repo.update()
route1
- |> Arrow.Shuttles.Route.changeset(%{
+ |> Route.changeset(%{
"route_stops" => [
%{
"direction_id" => "1",
@@ -166,9 +167,7 @@ defmodule Arrow.Shuttles.ShuttleTest do
assert %Ecto.Changeset{
valid?: false,
errors: [
- status:
- {"all stops except the last in each direction must have a time to next stop",
- []}
+ status: {"all stops except the last in each direction must have a time to next stop", []}
]
} = changeset
end
@@ -180,7 +179,7 @@ defmodule Arrow.Shuttles.ShuttleTest do
[stop1, stop2, stop3, stop4] = insert_list(4, :gtfs_stop)
route0
- |> Arrow.Shuttles.Route.changeset(%{
+ |> Route.changeset(%{
"route_stops" => [
%{
"direction_id" => "0",
@@ -198,7 +197,7 @@ defmodule Arrow.Shuttles.ShuttleTest do
|> Arrow.Repo.update()
route1
- |> Arrow.Shuttles.Route.changeset(%{
+ |> Route.changeset(%{
"route_stops" => [
%{
"direction_id" => "1",
@@ -278,7 +277,7 @@ defmodule Arrow.Shuttles.ShuttleTest do
[stop1, stop2, stop3, stop4] = insert_list(4, :gtfs_stop)
route0
- |> Arrow.Shuttles.Route.changeset(%{
+ |> Route.changeset(%{
"route_stops" => [
%{
"direction_id" => "0",
@@ -296,7 +295,7 @@ defmodule Arrow.Shuttles.ShuttleTest do
|> Arrow.Repo.update()
route1
- |> Arrow.Shuttles.Route.changeset(%{
+ |> Route.changeset(%{
"route_stops" => [
%{
"direction_id" => "1",
@@ -326,8 +325,7 @@ defmodule Arrow.Shuttles.ShuttleTest do
assert %Ecto.Changeset{
valid?: false,
errors: [
- status:
- {"cannot set to a non-active status while in use as a replacement service", []}
+ status: {"cannot set to a non-active status while in use as a replacement service", []}
]
} = changeset
end
@@ -339,7 +337,7 @@ defmodule Arrow.Shuttles.ShuttleTest do
[stop1, stop2, stop3, stop4] = insert_list(4, :gtfs_stop)
route0
- |> Arrow.Shuttles.Route.changeset(%{
+ |> Route.changeset(%{
"route_stops" => [
%{
"direction_id" => "0",
@@ -357,7 +355,7 @@ defmodule Arrow.Shuttles.ShuttleTest do
|> Arrow.Repo.update()
route1
- |> Arrow.Shuttles.Route.changeset(%{
+ |> Route.changeset(%{
"route_stops" => [
%{
"direction_id" => "1",
@@ -416,7 +414,7 @@ defmodule Arrow.Shuttles.ShuttleTest do
{:ok, _} =
route0
- |> Arrow.Shuttles.Route.changeset(route0_attrs)
+ |> Route.changeset(route0_attrs)
|> Arrow.Repo.update()
route1_attrs = %{
@@ -438,7 +436,7 @@ defmodule Arrow.Shuttles.ShuttleTest do
{:ok, _} =
route1
- |> Arrow.Shuttles.Route.changeset(route1_attrs)
+ |> Route.changeset(route1_attrs)
|> Arrow.Repo.update()
shuttle = Arrow.Shuttles.get_shuttle!(shuttle.id)
@@ -489,7 +487,7 @@ defmodule Arrow.Shuttles.ShuttleTest do
{:ok, _} =
route0
- |> Arrow.Shuttles.Route.changeset(%{
+ |> Route.changeset(%{
"shape_id" => shape.id,
"route_stops" => [
%{
@@ -509,7 +507,7 @@ defmodule Arrow.Shuttles.ShuttleTest do
{:ok, _} =
route1
- |> Arrow.Shuttles.Route.changeset(%{
+ |> Route.changeset(%{
"shape_id" => shape.id,
"route_stops" => [
%{
@@ -545,7 +543,7 @@ defmodule Arrow.Shuttles.ShuttleTest do
{:ok, _} =
route0
- |> Arrow.Shuttles.Route.changeset(%{
+ |> Route.changeset(%{
"shape_id" => shape.id,
"route_stops" => [
%{
@@ -565,7 +563,7 @@ defmodule Arrow.Shuttles.ShuttleTest do
{:ok, _} =
route1
- |> Arrow.Shuttles.Route.changeset(%{
+ |> Route.changeset(%{
"shape_id" => shape.id,
"route_stops" => [
%{
@@ -621,7 +619,7 @@ defmodule Arrow.Shuttles.ShuttleTest do
{:ok, _} =
route0
- |> Arrow.Shuttles.Route.changeset(route0_attrs)
+ |> Route.changeset(route0_attrs)
|> Arrow.Repo.update()
route1_attrs = %{
@@ -643,7 +641,7 @@ defmodule Arrow.Shuttles.ShuttleTest do
{:ok, _} =
route1
- |> Arrow.Shuttles.Route.changeset(route1_attrs)
+ |> Route.changeset(route1_attrs)
|> Arrow.Repo.update()
shuttle = Arrow.Shuttles.get_shuttle!(shuttle.id)
diff --git a/test/arrow/shuttles_test.exs b/test/arrow/shuttles_test.exs
index f92904b73..c8be32558 100644
--- a/test/arrow/shuttles_test.exs
+++ b/test/arrow/shuttles_test.exs
@@ -2,12 +2,16 @@ defmodule Arrow.ShuttlesTest do
use Arrow.DataCase
import Arrow.Factory
- alias Arrow.Shuttles
- alias Arrow.Shuttles.Shape
import Arrow.ShuttlesFixtures
import Arrow.StopsFixtures
- import Test.Support.Helpers
import Mox
+ import Test.Support.Helpers
+
+ alias Arrow.OpenRouteServiceAPI.DirectionsRequest
+ alias Arrow.OpenRouteServiceAPI.MockClient
+ alias Arrow.Shuttles
+ alias Arrow.Shuttles.Shape
+ alias Arrow.Shuttles.Stop
setup :verify_on_exit!
@@ -124,13 +128,11 @@ defmodule Arrow.ShuttlesTest do
end
end
- alias Arrow.Shuttles
-
describe "shuttles" do
- alias Arrow.Shuttles.Shuttle
-
import Arrow.ShuttlesFixtures
+ alias Arrow.Shuttles.Shuttle
+
@invalid_attrs %{status: nil, shuttle_name: nil}
test "list_shuttles/0 returns all shuttles" do
@@ -168,10 +170,10 @@ defmodule Arrow.ShuttlesTest do
shuttle = shuttle_fixture()
[route1, route2] = shuttle.routes
destination = unique_shuttle_route_destination()
- updated_route1 = Map.merge(route1, %{destination: destination})
+ updated_route1 = Map.put(route1, :destination, destination)
update_attrs =
- Map.from_struct(%Shuttle{
+ Map.from_struct(%{
shuttle
| routes: [Map.from_struct(updated_route1), Map.from_struct(route2)]
})
@@ -187,11 +189,11 @@ defmodule Arrow.ShuttlesTest do
first_route = List.first(routes)
new_shape = shape_fixture()
# Updated shape is set by shape_id param
- updated_route1 = Map.merge(List.first(routes), %{shape_id: new_shape.id})
+ updated_route1 = Map.put(List.first(routes), :shape_id, new_shape.id)
existing_route2 = Enum.at(routes, 1)
update_attrs =
- Map.from_struct(%Shuttle{
+ Map.from_struct(%{
shuttle
| routes: [Map.from_struct(updated_route1), Map.from_struct(existing_route2)]
})
@@ -236,7 +238,7 @@ defmodule Arrow.ShuttlesTest do
stop = stop_fixture()
stop_id = stop.stop_id
- assert %Arrow.Shuttles.Stop{stop_id: ^stop_id} =
+ assert %Stop{stop_id: ^stop_id} =
Shuttles.stop_or_gtfs_stop_for_stop_id(stop_id)
end
@@ -252,7 +254,7 @@ defmodule Arrow.ShuttlesTest do
_gtfs_stop = insert(:gtfs_stop)
stop_id = stop.stop_id
- assert %Arrow.Shuttles.Stop{stop_id: ^stop_id} =
+ assert %Stop{stop_id: ^stop_id} =
Shuttles.stop_or_gtfs_stop_for_stop_id(stop_id)
end
@@ -291,7 +293,7 @@ defmodule Arrow.ShuttlesTest do
test "finds Arrow stop by stop ID" do
insert(:stop, %{stop_id: "12"})
- assert [%Arrow.Shuttles.Stop{stop_id: "12"}] =
+ assert [%Stop{stop_id: "12"}] =
Shuttles.stops_or_gtfs_stops_by_search_string("1")
end
@@ -299,7 +301,7 @@ defmodule Arrow.ShuttlesTest do
stop = insert(:stop, %{stop_desc: "Description"})
stop_id = stop.stop_id
- assert [%Arrow.Shuttles.Stop{stop_id: ^stop_id}] =
+ assert [%Stop{stop_id: ^stop_id}] =
Shuttles.stops_or_gtfs_stops_by_search_string("Des")
end
@@ -307,7 +309,7 @@ defmodule Arrow.ShuttlesTest do
stop = insert(:stop, %{stop_name: "Name"})
stop_id = stop.stop_id
- assert [%Arrow.Shuttles.Stop{stop_id: ^stop_id}] =
+ assert [%Stop{stop_id: ^stop_id}] =
Shuttles.stops_or_gtfs_stops_by_search_string("Na")
end
@@ -341,7 +343,7 @@ defmodule Arrow.ShuttlesTest do
gtfs_stop = insert(:gtfs_stop, %{desc: "Description B"})
gtfs_stop_id = gtfs_stop.id
- assert [%Arrow.Shuttles.Stop{stop_id: ^stop_id}, %Arrow.Gtfs.Stop{id: ^gtfs_stop_id}] =
+ assert [%Stop{stop_id: ^stop_id}, %Arrow.Gtfs.Stop{id: ^gtfs_stop_id}] =
Shuttles.stops_or_gtfs_stops_by_search_string("Des")
end
@@ -350,7 +352,7 @@ defmodule Arrow.ShuttlesTest do
insert(:gtfs_stop, %{id: "stop"})
- assert [%Arrow.Shuttles.Stop{stop_id: "stop"}] =
+ assert [%Stop{stop_id: "stop"}] =
Shuttles.stops_or_gtfs_stops_by_search_string("st")
end
end
@@ -358,9 +360,9 @@ defmodule Arrow.ShuttlesTest do
describe "get_travel_times/1" do
test "calculates travel time between coordinates" do
expect(
- Arrow.OpenRouteServiceAPI.MockClient,
+ MockClient,
:get_directions,
- fn %Arrow.OpenRouteServiceAPI.DirectionsRequest{
+ fn %DirectionsRequest{
coordinates: [[-71.11934, 42.38758], [-71.1202, 42.373396]] = coordinates
} ->
{:ok,
@@ -391,9 +393,9 @@ defmodule Arrow.ShuttlesTest do
test "handles atom keys for coordinates" do
expect(
- Arrow.OpenRouteServiceAPI.MockClient,
+ MockClient,
:get_directions,
- fn %Arrow.OpenRouteServiceAPI.DirectionsRequest{
+ fn %DirectionsRequest{
coordinates: [[-71.11934, 42.38758], [-71.1202, 42.373396]] = coordinates
} ->
{:ok,
@@ -424,9 +426,9 @@ defmodule Arrow.ShuttlesTest do
test "errors if it cannot determine a route between the coordinates" do
expect(
- Arrow.OpenRouteServiceAPI.MockClient,
+ MockClient,
:get_directions,
- fn %Arrow.OpenRouteServiceAPI.DirectionsRequest{} -> {:error, %{"code" => 2010}} end
+ fn %DirectionsRequest{} -> {:error, %{"code" => 2010}} end
)
coord1 = %{"lat" => 42.38758, "lon" => -71.11934}
@@ -438,9 +440,9 @@ defmodule Arrow.ShuttlesTest do
test "errors if OpenRouteService returns an unknown error" do
expect(
- Arrow.OpenRouteServiceAPI.MockClient,
+ MockClient,
:get_directions,
- fn %Arrow.OpenRouteServiceAPI.DirectionsRequest{} -> {:error, %{"code" => -1}} end
+ fn %DirectionsRequest{} -> {:error, %{"code" => -1}} end
)
coord1 = %{"lat" => 42.38758, "lon" => -71.11934}
@@ -519,7 +521,7 @@ defmodule Arrow.ShuttlesTest do
test "raises if a stop has missing lat/lon data" do
stop2 =
%{stop_lat: 42.373396, stop_lon: -70.1202}
- |> stop_fixture
+ |> stop_fixture()
|> Map.drop([:stop_lat, :stop_lon])
assert {:error, error_message} = Shuttles.get_stop_coordinates(stop2)
diff --git a/test/arrow/stops_test.exs b/test/arrow/stops_test.exs
index ea2a6018f..6fc9132ba 100644
--- a/test/arrow/stops_test.exs
+++ b/test/arrow/stops_test.exs
@@ -5,10 +5,10 @@ defmodule Arrow.StopsTest do
alias Arrow.Stops
describe "stops" do
- alias Arrow.Shuttles.Stop
-
import Arrow.StopsFixtures
+ alias Arrow.Shuttles.Stop
+
@invalid_attrs %{
stop_id: nil,
stop_name: nil,
diff --git a/test/arrow/sync_worker_test.exs b/test/arrow/sync_worker_test.exs
index 3df16dd93..32aac22ad 100644
--- a/test/arrow/sync_worker_test.exs
+++ b/test/arrow/sync_worker_test.exs
@@ -1,18 +1,20 @@
defmodule Arrow.SyncWorkerTest do
use Arrow.DataCase
+
import Arrow.ShuttlesFixtures
import Arrow.StopsFixtures
import Mox
- alias Arrow.{Shuttles, Stops, SyncWorker}
+ alias Arrow.Shuttles
+ alias Arrow.Stops
+ alias Arrow.SyncWorker
# Ensure mocks are verified when the test exits
setup :verify_on_exit!
describe "perform/1" do
test "runs sync successfully" do
- Arrow.HTTPMock
- |> expect(:get, 2, fn
+ expect(Arrow.HTTPMock, :get, 2, fn
"https://test.example.com/api/shuttle-stops", _headers ->
{:ok, %{status_code: 200, body: Jason.encode!(%{"data" => []})}}
@@ -58,8 +60,7 @@ defmodule Arrow.SyncWorkerTest do
]
}
- Arrow.HTTPMock
- |> expect(:get, 2, fn
+ expect(Arrow.HTTPMock, :get, 2, fn
"https://test.example.com/api/shuttle-stops", _headers ->
{:ok, %{status_code: 200, body: Jason.encode!(api_stops)}}
@@ -82,8 +83,7 @@ defmodule Arrow.SyncWorkerTest do
end
test "handles API errors gracefully" do
- Arrow.HTTPMock
- |> expect(:get, 1, fn
+ expect(Arrow.HTTPMock, :get, 1, fn
"https://test.example.com/api/shuttle-stops", _headers ->
{:ok, %{status_code: 500}}
end)
@@ -95,8 +95,7 @@ defmodule Arrow.SyncWorkerTest do
end
test "handles malformed JSON responses" do
- Arrow.HTTPMock
- |> expect(:get, 1, fn
+ expect(Arrow.HTTPMock, :get, 1, fn
"https://test.example.com/api/shuttle-stops", _headers ->
{:ok, %{status_code: 200, body: "invalid json"}}
end)
@@ -133,8 +132,7 @@ defmodule Arrow.SyncWorkerTest do
]
}
- Arrow.HTTPMock
- |> expect(:get, 2, fn
+ expect(Arrow.HTTPMock, :get, 2, fn
"https://test.example.com/api/shuttle-stops", _headers ->
{:ok, %{status_code: 200, body: Jason.encode!(api_stops)}}
@@ -169,8 +167,7 @@ defmodule Arrow.SyncWorkerTest do
"bucket" => "test-bucket",
"path" => "shapes/ExistingToPlace-S.kml",
"prefix" => "shapes/",
- "download_url" =>
- "https://test-bucket.s3.amazonaws.com/shapes/ExistingToPlace-S.kml"
+ "download_url" => "https://test-bucket.s3.amazonaws.com/shapes/ExistingToPlace-S.kml"
}
},
%{
diff --git a/test/arrow/telemetry_test.exs b/test/arrow/telemetry_test.exs
index be847d0fc..c7df1d959 100644
--- a/test/arrow/telemetry_test.exs
+++ b/test/arrow/telemetry_test.exs
@@ -14,6 +14,7 @@ defmodule Arrow.TelemetryTest do
@moduledoc false
use ExUnit.Case, async: true
use Oban.Testing, repo: Arrow.Repo
+
import ExUnit.CaptureLog
describe "oban.job.exception listener" do
diff --git a/test/arrow/ueberauth/strategy/fake_test.exs b/test/arrow/ueberauth/strategy/fake_test.exs
index 6d73a9e67..85eb70a93 100644
--- a/test/arrow/ueberauth/strategy/fake_test.exs
+++ b/test/arrow/ueberauth/strategy/fake_test.exs
@@ -1,12 +1,14 @@
defmodule Arrow.Ueberauth.Strategy.FakeTest do
use ExUnit.Case
- import Plug.Test
+
import Arrow.Ueberauth.Strategy.Fake
+ import Plug.Test
describe "implements all the callbacks" do
test "handle_request!/1" do
conn =
- conn(:get, "/auth/keycloak")
+ :get
+ |> conn("/auth/keycloak")
|> init_test_session(%{})
|> handle_request!()
diff --git a/test/arrow_web/auth_manager/error_handler_test.exs b/test/arrow_web/auth_manager/error_handler_test.exs
index e1738aa6e..4c9de676d 100644
--- a/test/arrow_web/auth_manager/error_handler_test.exs
+++ b/test/arrow_web/auth_manager/error_handler_test.exs
@@ -1,6 +1,8 @@
defmodule ArrowWeb.AuthManager.ErrorHandlerTest do
use ArrowWeb.ConnCase
+ alias ArrowWeb.AuthManager.ErrorHandler
+
describe "auth_error/3" do
test "redirects to login if there's no refresh key", %{conn: conn} do
provider = Application.get_env(:arrow, :ueberauth_provider)
@@ -8,7 +10,7 @@ defmodule ArrowWeb.AuthManager.ErrorHandlerTest do
conn =
conn
|> init_test_session(%{})
- |> ArrowWeb.AuthManager.ErrorHandler.auth_error({:some_type, :reason}, [])
+ |> ErrorHandler.auth_error({:some_type, :reason}, [])
assert html_response(conn, 302) =~ "\"/auth/#{provider}?prompt=login\""
end
@@ -17,9 +19,10 @@ defmodule ArrowWeb.AuthManager.ErrorHandlerTest do
provider = Application.get_env(:arrow, :ueberauth_provider)
conn =
- build_conn(:get, "/some/path")
+ :get
+ |> build_conn("/some/path")
|> init_test_session(%{})
- |> ArrowWeb.AuthManager.ErrorHandler.auth_error({:some_type, :reason}, [])
+ |> ErrorHandler.auth_error({:some_type, :reason}, [])
assert get_session(conn, :auth_orig_path) == "/some/path"
assert html_response(conn, 302) =~ "\"/auth/#{provider}?prompt=login\""
@@ -29,9 +32,10 @@ defmodule ArrowWeb.AuthManager.ErrorHandlerTest do
provider = Application.get_env(:arrow, :ueberauth_provider)
conn =
- build_conn(:post, "/some/path")
+ :post
+ |> build_conn("/some/path")
|> init_test_session(%{})
- |> ArrowWeb.AuthManager.ErrorHandler.auth_error({:some_type, :reason}, [])
+ |> ErrorHandler.auth_error({:some_type, :reason}, [])
assert is_nil(get_session(conn, :auth_orig_path))
assert html_response(conn, 302) =~ "\"/auth/#{provider}?prompt=login\""
diff --git a/test/arrow_web/controllers/api/adjustment_controller_test.exs b/test/arrow_web/controllers/api/adjustment_controller_test.exs
index d8b158ad6..b13aff626 100644
--- a/test/arrow_web/controllers/api/adjustment_controller_test.exs
+++ b/test/arrow_web/controllers/api/adjustment_controller_test.exs
@@ -1,6 +1,8 @@
defmodule ArrowWeb.API.AdjustmentControllerTest do
use ArrowWeb.ConnCase
- alias Arrow.{Adjustment, Repo}
+
+ alias Arrow.Adjustment
+ alias Arrow.Repo
describe "index/2" do
@tag :authenticated
diff --git a/test/arrow_web/controllers/api/disruption_controller_test.exs b/test/arrow_web/controllers/api/disruption_controller_test.exs
index fbc05d3e6..5f0e0b945 100644
--- a/test/arrow_web/controllers/api/disruption_controller_test.exs
+++ b/test/arrow_web/controllers/api/disruption_controller_test.exs
@@ -1,7 +1,10 @@
defmodule ArrowWeb.API.DisruptionControllerTest do
use ArrowWeb.ConnCase
+
import Arrow.Factory
- alias Arrow.{Disruption, Repo}
+
+ alias Arrow.Disruption
+ alias Arrow.Repo
alias Ecto.Changeset
describe "index/2" do
diff --git a/test/arrow_web/controllers/api/notice_controller_test.exs b/test/arrow_web/controllers/api/notice_controller_test.exs
index c1f2f921d..f33ca4e71 100644
--- a/test/arrow_web/controllers/api/notice_controller_test.exs
+++ b/test/arrow_web/controllers/api/notice_controller_test.exs
@@ -1,5 +1,6 @@
defmodule ArrowWeb.API.PublishNoticeControllerTest do
use ArrowWeb.ConnCase
+
import Arrow.Factory
import ExUnit.CaptureLog
diff --git a/test/arrow_web/controllers/api/replacement_service_controller_test.exs b/test/arrow_web/controllers/api/replacement_service_controller_test.exs
index 2c45ef59f..76a6f59cd 100644
--- a/test/arrow_web/controllers/api/replacement_service_controller_test.exs
+++ b/test/arrow_web/controllers/api/replacement_service_controller_test.exs
@@ -50,10 +50,8 @@ defmodule ArrowWeb.API.ReplacementServiceControllerTest do
active_end_date_formatted = Date.to_iso8601(active_end)
res =
- get(
- conn,
- "/api/replacement-service?start_date=#{active_end_date_formatted}&end_date=#{active_end_date_formatted}"
- )
+ conn
+ |> get("/api/replacement-service?start_date=#{active_end_date_formatted}&end_date=#{active_end_date_formatted}")
|> json_response(200)
assert %{
diff --git a/test/arrow_web/controllers/api/shapes_controller_test.exs b/test/arrow_web/controllers/api/shapes_controller_test.exs
index b300cbe4d..1c92baa43 100644
--- a/test/arrow_web/controllers/api/shapes_controller_test.exs
+++ b/test/arrow_web/controllers/api/shapes_controller_test.exs
@@ -1,5 +1,6 @@
defmodule ArrowWeb.API.ShapesControllerTest do
use ArrowWeb.ConnCase
+
import Arrow.ShuttlesFixtures
import Test.Support.Helpers
@@ -19,7 +20,7 @@ defmodule ArrowWeb.API.ShapesControllerTest do
assert Enum.count(data) == 3
- shape_ids = data |> Enum.map(fn %{"id" => id} -> String.to_integer(id) end) |> MapSet.new()
+ shape_ids = MapSet.new(data, fn %{"id" => id} -> String.to_integer(id) end)
assert shape_ids == MapSet.new([shape1.id, shape2.id, shape3.id])
end
diff --git a/test/arrow_web/controllers/api/shuttle_controller_test.exs b/test/arrow_web/controllers/api/shuttle_controller_test.exs
index 1bcdf3075..6f93be5a1 100644
--- a/test/arrow_web/controllers/api/shuttle_controller_test.exs
+++ b/test/arrow_web/controllers/api/shuttle_controller_test.exs
@@ -1,8 +1,10 @@
defmodule ArrowWeb.API.ShuttleControllerTest do
use ArrowWeb.ConnCase
- import Arrow.ShuttlesFixtures
import Arrow.Factory
+ import Arrow.ShuttlesFixtures
+
+ alias Arrow.Shuttles.Route
describe "index/2" do
@tag :authenticated
@@ -23,7 +25,7 @@ defmodule ArrowWeb.API.ShuttleControllerTest do
stop_map = %{stop1.id => stop1, stop2.id => stop2, stop3.id => stop3, stop4.id => stop4}
route0
- |> Arrow.Shuttles.Route.changeset(%{
+ |> Route.changeset(%{
"route_stops" => [
%{
"direction_id" => "0",
@@ -41,7 +43,7 @@ defmodule ArrowWeb.API.ShuttleControllerTest do
|> Arrow.Repo.update()
route1
- |> Arrow.Shuttles.Route.changeset(%{
+ |> Route.changeset(%{
"route_stops" => [
%{
"direction_id" => "1",
@@ -67,7 +69,8 @@ defmodule ArrowWeb.API.ShuttleControllerTest do
|> Arrow.Repo.update()
res =
- get(conn, "/api/shuttles")
+ conn
+ |> get("/api/shuttles")
|> json_response(200)
route0_id = to_string(route0.id)
@@ -93,7 +96,7 @@ defmodule ArrowWeb.API.ShuttleControllerTest do
Enum.each(included, fn
%{"type" => "shuttle_route", "attributes" => attributes, "id" => id} ->
- route = route_map[id |> String.to_integer()]
+ route = route_map[String.to_integer(id)]
assert to_string(route.destination) == attributes["destination"]
assert to_string(route.direction_id) == attributes["direction_id"]
# Will always be disabled in test because we don't actually upload shape files
diff --git a/test/arrow_web/controllers/api/stops_controller_test.exs b/test/arrow_web/controllers/api/stops_controller_test.exs
index bc081ba25..6fc1d33b1 100644
--- a/test/arrow_web/controllers/api/stops_controller_test.exs
+++ b/test/arrow_web/controllers/api/stops_controller_test.exs
@@ -1,5 +1,6 @@
defmodule ArrowWeb.API.StopsControllerTest do
use ArrowWeb.ConnCase
+
import Arrow.Factory
describe "index/2" do
diff --git a/test/arrow_web/controllers/auth_controller_test.exs b/test/arrow_web/controllers/auth_controller_test.exs
index 5234b9078..52a9160db 100644
--- a/test/arrow_web/controllers/auth_controller_test.exs
+++ b/test/arrow_web/controllers/auth_controller_test.exs
@@ -1,6 +1,9 @@
defmodule ArrowWeb.Controllers.AuthControllerTest do
use ArrowWeb.ConnCase
+ alias ArrowWeb.Router.Helpers
+ alias Ueberauth.Auth.Credentials
+
describe "callback" do
test "handles missing roles (keycloak)", %{conn: conn} do
current_time = System.system_time(:second)
@@ -8,7 +11,7 @@ defmodule ArrowWeb.Controllers.AuthControllerTest do
auth = %Ueberauth.Auth{
uid: "foo@mbta.com",
provider: :keycloak,
- credentials: %Ueberauth.Auth.Credentials{
+ credentials: %Credentials{
expires_at: current_time + 1_000,
other: %{id_token: "id_token"}
},
@@ -49,7 +52,7 @@ defmodule ArrowWeb.Controllers.AuthControllerTest do
auth = %Ueberauth.Auth{
uid: "foo@mbta.com",
provider: :keycloak,
- credentials: %Ueberauth.Auth.Credentials{
+ credentials: %Credentials{
expires_at: current_time + 1_000,
other: %{id_token: "id_token"}
},
@@ -67,10 +70,10 @@ defmodule ArrowWeb.Controllers.AuthControllerTest do
conn =
conn
|> assign(:ueberauth_auth, auth)
- |> get(ArrowWeb.Router.Helpers.auth_path(conn, :callback, "keycloak"))
+ |> get(Helpers.auth_path(conn, :callback, "keycloak"))
assert Guardian.Plug.authenticated?(conn)
- conn = get(conn, ArrowWeb.Router.Helpers.auth_path(conn, :logout, "keycloak"))
+ conn = get(conn, Helpers.auth_path(conn, :logout, "keycloak"))
refute Guardian.Plug.authenticated?(conn)
end
@@ -83,7 +86,7 @@ defmodule ArrowWeb.Controllers.AuthControllerTest do
auth = %Ueberauth.Auth{
uid: "foo@mbta.com",
provider: :keycloak,
- credentials: %Ueberauth.Auth.Credentials{
+ credentials: %Credentials{
expires_at: current_time + 1_000,
other: %{id_token: "id_token"}
},
@@ -109,7 +112,7 @@ defmodule ArrowWeb.Controllers.AuthControllerTest do
auth = %Ueberauth.Auth{
uid: "foo@mbta.com",
provider: :keycloak,
- credentials: %Ueberauth.Auth.Credentials{
+ credentials: %Credentials{
expires_at: current_time + 1_000,
other: %{id_token: "id_token"}
},
diff --git a/test/arrow_web/controllers/disruption_controller/filters_test.exs b/test/arrow_web/controllers/disruption_controller/filters_test.exs
index b6cda1362..96a39c5bd 100644
--- a/test/arrow_web/controllers/disruption_controller/filters_test.exs
+++ b/test/arrow_web/controllers/disruption_controller/filters_test.exs
@@ -2,7 +2,8 @@ defmodule ArrowWeb.DisruptionController.FiltersTest do
use ExUnit.Case, async: true
alias ArrowWeb.DisruptionController.Filters
- alias ArrowWeb.DisruptionController.Filters.{Calendar, Table}
+ alias ArrowWeb.DisruptionController.Filters.Calendar
+ alias ArrowWeb.DisruptionController.Filters.Table
defp set(items \\ []), do: MapSet.new(items)
diff --git a/test/arrow_web/controllers/disruption_controller/index_test.exs b/test/arrow_web/controllers/disruption_controller/index_test.exs
index 26074271b..1a2de0449 100644
--- a/test/arrow_web/controllers/disruption_controller/index_test.exs
+++ b/test/arrow_web/controllers/disruption_controller/index_test.exs
@@ -1,9 +1,11 @@
defmodule ArrowWeb.DisruptionController.IndexTest do
use Arrow.DataCase, async: true
+
import Arrow.Factory
- alias ArrowWeb.DisruptionController.{Filters, Index}
+ alias ArrowWeb.DisruptionController.Filters
alias ArrowWeb.DisruptionController.Filters.Table
+ alias ArrowWeb.DisruptionController.Index
describe "all/0" do
test "gets disruptions whose latest revision is active" do
diff --git a/test/arrow_web/controllers/disruption_controller_test.exs b/test/arrow_web/controllers/disruption_controller_test.exs
index 8e80e9da6..03252cd67 100644
--- a/test/arrow_web/controllers/disruption_controller_test.exs
+++ b/test/arrow_web/controllers/disruption_controller_test.exs
@@ -1,9 +1,12 @@
defmodule ArrowWeb.DisruptionControllerTest do
use ArrowWeb.ConnCase, async: true
- alias Arrow.{Disruption, DisruptionRevision, Repo}
import Arrow.Factory
+ alias Arrow.Disruption
+ alias Arrow.DisruptionRevision
+ alias Arrow.Repo
+
describe "index/2" do
@tag :authenticated
test "anyone can list disruptions", %{conn: conn} do
@@ -178,7 +181,7 @@ defmodule ArrowWeb.DisruptionControllerTest do
)
params = %{
- "revision" => string_params_for(:disruption_revision) |> Map.delete("exceptions")
+ "revision" => :disruption_revision |> string_params_for() |> Map.delete("exceptions")
}
_ =
@@ -187,7 +190,8 @@ defmodule ArrowWeb.DisruptionControllerTest do
|> redirected_to()
assert %{exceptions: []} =
- Repo.get!(DisruptionRevision, Disruption.latest_revision_id(id))
+ DisruptionRevision
+ |> Repo.get!(Disruption.latest_revision_id(id))
|> Repo.preload(:exceptions)
end
diff --git a/test/arrow_web/controllers/disruption_v2_controller/filters_test.exs b/test/arrow_web/controllers/disruption_v2_controller/filters_test.exs
index 65bdd9693..2e951ef06 100644
--- a/test/arrow_web/controllers/disruption_v2_controller/filters_test.exs
+++ b/test/arrow_web/controllers/disruption_v2_controller/filters_test.exs
@@ -2,7 +2,8 @@ defmodule ArrowWeb.DisruptionV2Controller.FiltersTest do
use ExUnit.Case, async: true
alias ArrowWeb.DisruptionV2Controller.Filters
- alias ArrowWeb.DisruptionV2Controller.Filters.{Calendar, Table}
+ alias ArrowWeb.DisruptionV2Controller.Filters.Calendar
+ alias ArrowWeb.DisruptionV2Controller.Filters.Table
defp set(items \\ []), do: MapSet.new(items)
diff --git a/test/arrow_web/controllers/feed_controller_test.exs b/test/arrow_web/controllers/feed_controller_test.exs
index b5215ccbe..61fa5a4e5 100644
--- a/test/arrow_web/controllers/feed_controller_test.exs
+++ b/test/arrow_web/controllers/feed_controller_test.exs
@@ -1,5 +1,6 @@
defmodule ArrowWeb.FeedControllerTest do
use ArrowWeb.ConnCase
+
import Arrow.Factory
describe "GET /feed" do
diff --git a/test/arrow_web/controllers/note_controller_test.exs b/test/arrow_web/controllers/note_controller_test.exs
index 1faba9563..7f3d672db 100644
--- a/test/arrow_web/controllers/note_controller_test.exs
+++ b/test/arrow_web/controllers/note_controller_test.exs
@@ -1,9 +1,10 @@
defmodule ArrowWeb.NoteControllerTest do
use ArrowWeb.ConnCase, async: true
- alias Arrow.Disruption
import Arrow.Factory
+ alias Arrow.Disruption
+
describe "create/2" do
@tag :authenticated_admin
test "inserts a note when valid", %{conn: conn} do
diff --git a/test/arrow_web/controllers/shape_controller_test.exs b/test/arrow_web/controllers/shape_controller_test.exs
index 98ef2dfbb..30c9ae906 100644
--- a/test/arrow_web/controllers/shape_controller_test.exs
+++ b/test/arrow_web/controllers/shape_controller_test.exs
@@ -1,11 +1,12 @@
defmodule ArrowWeb.ShapeControllerTest do
use ArrowWeb.ConnCase, async: true
- alias Arrow.Repo
- alias Arrow.Shuttles.Shape
import Arrow.ShuttlesFixtures
import Test.Support.Helpers
+ alias Arrow.Repo
+ alias Arrow.Shuttles.Shape
+
@upload_attrs %{
name: "some name-S",
filename: %Plug.Upload{
diff --git a/test/arrow_web/controllers/stop_html_test.exs b/test/arrow_web/controllers/stop_html_test.exs
index 935a51a4a..d50d2600e 100644
--- a/test/arrow_web/controllers/stop_html_test.exs
+++ b/test/arrow_web/controllers/stop_html_test.exs
@@ -1,6 +1,7 @@
defmodule ArrowWeb.StopViewTest do
@moduledoc false
use ExUnit.Case
+
alias ArrowWeb.StopView
describe "format_timestamp" do
diff --git a/test/arrow_web/live/disruption_v2_live_test.exs b/test/arrow_web/live/disruption_v2_live_test.exs
index 288b7328f..6fe2dfee4 100644
--- a/test/arrow_web/live/disruption_v2_live_test.exs
+++ b/test/arrow_web/live/disruption_v2_live_test.exs
@@ -1,8 +1,10 @@
defmodule ArrowWeb.DisruptionV2LiveTest do
use ArrowWeb.ConnCase
+ import Arrow.DisruptionsFixtures
+ import Arrow.LimitsFixtures
+ import Arrow.ShuttlesFixtures
import Phoenix.LiveViewTest
- import Arrow.{DisruptionsFixtures, LimitsFixtures, ShuttlesFixtures}
alias Arrow.Disruptions.DisruptionV2
@@ -29,8 +31,7 @@ defmodule ArrowWeb.DisruptionV2LiveTest do
day_of_week = limit_day_of_week_fixture(limit_id: limit.id)
%{
- disruption_v2:
- struct(disruption_v2, limits: [struct(limit, limit_day_of_weeks: [day_of_week])])
+ disruption_v2: struct(disruption_v2, limits: [struct(limit, limit_day_of_weeks: [day_of_week])])
}
end
@@ -176,12 +177,10 @@ defmodule ArrowWeb.DisruptionV2LiveTest do
|> render_change(%{replacement_service: valid_attrs})
replacement_service_workbook_filename =
- replacement_service_form
- |> Floki.attribute("#display_replacement_service_source_workbook_filename", "value")
+ Floki.attribute(replacement_service_form, "#display_replacement_service_source_workbook_filename", "value")
replacement_service_workbook_data =
- replacement_service_form
- |> Floki.attribute("#replacement_service_source_workbook_data", "value")
+ Floki.attribute(replacement_service_form, "#replacement_service_source_workbook_data", "value")
assert ["some source_workbook_filename"] = replacement_service_workbook_filename
assert [^data] = replacement_service_workbook_data
diff --git a/test/arrow_web/live/shuttle_live/shuttle_live_test.exs b/test/arrow_web/live/shuttle_live/shuttle_live_test.exs
index c746e5fee..f2c02ec80 100644
--- a/test/arrow_web/live/shuttle_live/shuttle_live_test.exs
+++ b/test/arrow_web/live/shuttle_live/shuttle_live_test.exs
@@ -1,10 +1,12 @@
defmodule ArrowWeb.ShuttleLiveTest do
use ArrowWeb.ConnCase
- import Phoenix.LiveViewTest
import Arrow.Factory
import Arrow.ShuttlesFixtures
import Mox
+ import Phoenix.LiveViewTest
+
+ alias Arrow.Shuttles.Route
setup :verify_on_exit!
@@ -130,7 +132,7 @@ defmodule ArrowWeb.ShuttleLiveTest do
stop_id = new_gtfs_stop.id
direction_0_route
- |> Arrow.Shuttles.Route.changeset(%{
+ |> Route.changeset(%{
"route_stops" => [
%{
"direction_id" => "0",
@@ -187,7 +189,7 @@ defmodule ArrowWeb.ShuttleLiveTest do
gtfs_stop = insert(:gtfs_stop)
direction_0_route
- |> Arrow.Shuttles.Route.changeset(%{
+ |> Route.changeset(%{
"route_stops" => [
%{
"direction_id" => "0",
@@ -280,7 +282,7 @@ defmodule ArrowWeb.ShuttleLiveTest do
[gtfs_stop1, gtfs_stop2, gtfs_stop3] = insert_list(3, :gtfs_stop)
direction_0_route
- |> Arrow.Shuttles.Route.changeset(%{
+ |> Route.changeset(%{
"route_stops" => [
%{
"direction_id" => "0",
@@ -546,8 +548,7 @@ defmodule ArrowWeb.ShuttleLiveTest do
file_input(new_live, "#shuttle-form", :definition, [
%{
name: "invalid_missing_tab.xlsx",
- content:
- File.read!("test/support/fixtures/xlsx/shuttle_live/invalid_missing_tab.xlsx")
+ content: File.read!("test/support/fixtures/xlsx/shuttle_live/invalid_missing_tab.xlsx")
}
])
@@ -564,8 +565,7 @@ defmodule ArrowWeb.ShuttleLiveTest do
file_input(new_live, "#shuttle-form", :definition, [
%{
name: "invalid_missing_data.xlsx",
- content:
- File.read!("test/support/fixtures/xlsx/shuttle_live/invalid_missing_data.xlsx")
+ content: File.read!("test/support/fixtures/xlsx/shuttle_live/invalid_missing_data.xlsx")
}
])
@@ -585,8 +585,7 @@ defmodule ArrowWeb.ShuttleLiveTest do
file_input(new_live, "#shuttle-form", :definition, [
%{
name: "invalid_missing_headers.xlsx",
- content:
- File.read!("test/support/fixtures/xlsx/shuttle_live/invalid_missing_headers.xlsx")
+ content: File.read!("test/support/fixtures/xlsx/shuttle_live/invalid_missing_headers.xlsx")
}
])
diff --git a/test/arrow_web/live/stop_live/stop_live_test.exs b/test/arrow_web/live/stop_live/stop_live_test.exs
index 553906f4d..9cecc281d 100644
--- a/test/arrow_web/live/stop_live/stop_live_test.exs
+++ b/test/arrow_web/live/stop_live/stop_live_test.exs
@@ -1,8 +1,8 @@
defmodule ArrowWeb.StopLiveTest do
use ArrowWeb.ConnCase
- import Phoenix.LiveViewTest
import Arrow.StopsFixtures
+ import Phoenix.LiveViewTest
alias Arrow.Shuttles.Stop
@@ -69,9 +69,7 @@ defmodule ArrowWeb.StopLiveTest do
test "creates stop and redirects to index when data is valid", %{conn: conn} do
{:ok, new_live, _html} = live(conn, ~p"/stops/new")
- form =
- new_live
- |> form("#stop-form", stop: @create_attrs)
+ form = form(new_live, "#stop-form", stop: @create_attrs)
render_submit(form)
@@ -86,9 +84,7 @@ defmodule ArrowWeb.StopLiveTest do
test "renders errors when data is invalid", %{conn: conn} do
{:ok, new_live, _html} = live(conn, ~p"/stops/new")
- form =
- new_live
- |> form("#stop-form", stop: @invalid_attrs)
+ form = form(new_live, "#stop-form", stop: @invalid_attrs)
refute render_submit(form) =~ ~r/phx-trigger-action/
@@ -131,9 +127,7 @@ defmodule ArrowWeb.StopLiveTest do
test "renders errors when data is invalid", %{conn: conn, stop: stop} do
{:ok, edit_live, _html} = live(conn, ~p"/stops/#{stop}/edit")
- form =
- edit_live
- |> form("#stop-form", stop: @invalid_attrs)
+ form = form(edit_live, "#stop-form", stop: @invalid_attrs)
refute render_submit(form) =~ ~r/phx-trigger-action/
diff --git a/test/arrow_web/plug/assign_user_test.exs b/test/arrow_web/plug/assign_user_test.exs
index 53b49f92a..cf26a6cfc 100644
--- a/test/arrow_web/plug/assign_user_test.exs
+++ b/test/arrow_web/plug/assign_user_test.exs
@@ -1,5 +1,6 @@
defmodule ArrowWeb.Plug.AssignUserTest do
use ArrowWeb.ConnCase
+
alias Arrow.Accounts.User
alias ArrowWeb.Plug.AssignUser
diff --git a/test/arrow_web/plug/authorize_test.exs b/test/arrow_web/plug/authorize_test.exs
index 27edee8fc..0bf1cd5cc 100644
--- a/test/arrow_web/plug/authorize_test.exs
+++ b/test/arrow_web/plug/authorize_test.exs
@@ -1,5 +1,6 @@
defmodule ArrowWeb.Plug.AuthorizeTest do
use ArrowWeb.ConnCase
+
alias ArrowWeb.Plug.AssignUser
alias ArrowWeb.Plug.Authorize
diff --git a/test/arrow_web/try_api_token_auth/keycloak_test.exs b/test/arrow_web/try_api_token_auth/keycloak_test.exs
index 9b1f2edb0..d254d2f42 100644
--- a/test/arrow_web/try_api_token_auth/keycloak_test.exs
+++ b/test/arrow_web/try_api_token_auth/keycloak_test.exs
@@ -1,13 +1,13 @@
defmodule ArrowWeb.TryApiTokenAuth.KeycloakTest do
use ArrowWeb.ConnCase
+
import ExUnit.CaptureLog
+ import Mox
import Test.Support.Helpers
alias Arrow.HTTPMock
alias ArrowWeb.TryApiTokenAuth.Keycloak
- import Mox
-
setup :verify_on_exit!
setup do
diff --git a/test/arrow_web/try_api_token_auth_test.exs b/test/arrow_web/try_api_token_auth_test.exs
index a56fea092..5b1ac4ccc 100644
--- a/test/arrow_web/try_api_token_auth_test.exs
+++ b/test/arrow_web/try_api_token_auth_test.exs
@@ -1,9 +1,10 @@
defmodule ArrowWeb.TryApiTokenAuthTest do
use ArrowWeb.ConnCase
- alias Arrow.HTTPMock
import Mox
+ alias Arrow.HTTPMock
+
describe "init/1" do
test "passes options through unchanged" do
assert ArrowWeb.TryApiTokenAuth.init([]) == []
diff --git a/test/arrow_web/views/disruption_v2_view/calendar_test.exs b/test/arrow_web/views/disruption_v2_view/calendar_test.exs
index 606424cd8..596bc375b 100644
--- a/test/arrow_web/views/disruption_v2_view/calendar_test.exs
+++ b/test/arrow_web/views/disruption_v2_view/calendar_test.exs
@@ -1,7 +1,9 @@
defmodule ArrowWeb.DisruptionV2View.CalendarTest do
use ExUnit.Case, async: true
- alias Arrow.Disruptions.{DisruptionV2, Limit, ReplacementService}
+ alias Arrow.Disruptions.DisruptionV2
+ alias Arrow.Disruptions.Limit
+ alias Arrow.Disruptions.ReplacementService
alias Arrow.Limits.LimitDayOfWeek
alias Arrow.Shuttles.Shuttle
alias ArrowWeb.DisruptionV2View.Calendar, as: DCalendar
diff --git a/test/arrow_web/views/disruption_view/calendar_test.exs b/test/arrow_web/views/disruption_view/calendar_test.exs
index e99b6a886..12a508fdb 100644
--- a/test/arrow_web/views/disruption_view/calendar_test.exs
+++ b/test/arrow_web/views/disruption_view/calendar_test.exs
@@ -1,8 +1,11 @@
defmodule ArrowWeb.DisruptionView.CalendarTest do
use ExUnit.Case, async: true
- alias Arrow.{Adjustment, Disruption, DisruptionRevision}
- alias Arrow.Disruption.{DayOfWeek, Exception}
+ alias Arrow.Adjustment
+ alias Arrow.Disruption
+ alias Arrow.Disruption.DayOfWeek
+ alias Arrow.Disruption.Exception
+ alias Arrow.DisruptionRevision
alias ArrowWeb.DisruptionView.Calendar, as: DCalendar
describe "props/1" do
diff --git a/test/arrow_web/views/disruption_view/form_test.exs b/test/arrow_web/views/disruption_view/form_test.exs
index 8d49d91f2..386bddeb9 100644
--- a/test/arrow_web/views/disruption_view/form_test.exs
+++ b/test/arrow_web/views/disruption_view/form_test.exs
@@ -1,8 +1,11 @@
defmodule ArrowWeb.DisruptionView.FormTest do
use ArrowWeb.ConnCase, async: true
- alias Arrow.{Adjustment, DisruptionRevision}
- alias Arrow.Disruption.{DayOfWeek, Exception, TripShortName}
+ alias Arrow.Adjustment
+ alias Arrow.Disruption.DayOfWeek
+ alias Arrow.Disruption.Exception
+ alias Arrow.Disruption.TripShortName
+ alias Arrow.DisruptionRevision
alias ArrowWeb.DisruptionView.Form
alias Ecto.Changeset
@@ -18,22 +21,21 @@ defmodule ArrowWeb.DisruptionView.FormTest do
]
changeset =
- %DisruptionRevision{
- start_date: ~D[2021-01-01],
- end_date: ~D[2021-01-31],
- row_approved: true,
- description: "a disruption for testing",
- note_body: "some note",
- adjustments: [hd(adjustments)],
- days_of_week: [%DayOfWeek{day_name: "monday", start_time: ~T[21:15:00], end_time: nil}],
- exceptions: [%Exception{excluded_date: ~D[2021-01-11]}],
- trip_short_names: [
- %TripShortName{trip_short_name: "one"},
- %TripShortName{trip_short_name: "two"}
- ],
- title: "disruption title"
- }
- |> Changeset.change(%{end_date: ~D[2021-02-28]})
+ Changeset.change(
+ %DisruptionRevision{
+ start_date: ~D[2021-01-01],
+ end_date: ~D[2021-01-31],
+ row_approved: true,
+ description: "a disruption for testing",
+ note_body: "some note",
+ adjustments: [hd(adjustments)],
+ days_of_week: [%DayOfWeek{day_name: "monday", start_time: ~T[21:15:00], end_time: nil}],
+ exceptions: [%Exception{excluded_date: ~D[2021-01-11]}],
+ trip_short_names: [%TripShortName{trip_short_name: "one"}, %TripShortName{trip_short_name: "two"}],
+ title: "disruption title"
+ },
+ %{end_date: ~D[2021-02-28]}
+ )
expected_adjustments = [
%{"id" => 1, "label" => "Kendall", "kind" => "red_line"},
diff --git a/test/integration/disruptions_v2_test.exs b/test/integration/disruptions_v2_test.exs
index 23eab5bcb..4f37d09c5 100644
--- a/test/integration/disruptions_v2_test.exs
+++ b/test/integration/disruptions_v2_test.exs
@@ -1,9 +1,11 @@
defmodule Arrow.Integration.DisruptionsV2Test do
use ExUnit.Case
use Wallaby.Feature
+
+ import Arrow.DisruptionsFixtures
+ import Arrow.LimitsFixtures
import Wallaby.Browser, except: [text: 1]
import Wallaby.Query
- import Arrow.{DisruptionsFixtures, LimitsFixtures}
@moduletag :integration
@@ -47,9 +49,7 @@ defmodule Arrow.Integration.DisruptionsV2Test do
day_of_week = limit_day_of_week_fixture(limit_id: limit.id)
replacement_service =
- replacement_service_fixture(
- Map.put_new(replacement_service_attrs, :disruption_id, disruption_v2.id)
- )
+ replacement_service_fixture(Map.put_new(replacement_service_attrs, :disruption_id, disruption_v2.id))
struct(disruption_v2,
limits: [struct(limit, limit_day_of_weeks: [day_of_week])],
diff --git a/test/integration/disruptionsv2/hastus_export_section_test.exs b/test/integration/disruptionsv2/hastus_export_section_test.exs
index 8ac427201..49758fa4b 100644
--- a/test/integration/disruptionsv2/hastus_export_section_test.exs
+++ b/test/integration/disruptionsv2/hastus_export_section_test.exs
@@ -1,10 +1,12 @@
defmodule Arrow.Integration.Disruptionsv2.HastusExportSectionTest do
use ExUnit.Case
use Wallaby.Feature
+
+ import Arrow.DisruptionsFixtures
+ import Arrow.Factory
+ import Arrow.HastusFixtures
import Wallaby.Browser, except: [text: 1]
import Wallaby.Query
- import Arrow.{DisruptionsFixtures, HastusFixtures}
- import Arrow.Factory
@moduletag :integration
@@ -239,12 +241,7 @@ defmodule Arrow.Integration.Disruptionsv2.HastusExportSectionTest do
)
end
- defp upload_and_assert_deduplicated_service_ids(
- session,
- disruption_id,
- export_path,
- assert_service_ids
- ) do
+ defp upload_and_assert_deduplicated_service_ids(session, disruption_id, export_path, assert_service_ids) do
session
|> visit("/disruptions/#{disruption_id}")
|> scroll_down()
diff --git a/test/mix/tasks/copy_db_test.exs b/test/mix/tasks/copy_db_test.exs
index 9558ae79e..d54ded1e1 100644
--- a/test/mix/tasks/copy_db_test.exs
+++ b/test/mix/tasks/copy_db_test.exs
@@ -1,7 +1,11 @@
defmodule Mix.Tasks.CopyDbTest do
use Arrow.DataCase
+
import ExUnit.CaptureLog
+ alias Arrow.Disruption.DayOfWeek
+ alias Mix.Tasks.CopyDb
+
setup do
http_client = Application.get_env(:arrow, :http_client)
on_exit(fn -> Application.put_env(:arrow, :http_client, http_client) end)
@@ -39,7 +43,7 @@ defmodule Mix.Tasks.CopyDbTest do
Application.put_env(:arrow, :http_client, Fake.HTTPoison.Happy)
pre_populate_db()
- Mix.Tasks.CopyDb.run([])
+ CopyDb.run([])
assert [
%Arrow.Disruption{
@@ -59,25 +63,25 @@ defmodule Mix.Tasks.CopyDbTest do
}
],
days_of_week: [
- %Arrow.Disruption.DayOfWeek{
+ %DayOfWeek{
id: 21,
day_name: "monday",
end_time: ~T[15:00:00],
start_time: ~T[08:00:00]
},
- %Arrow.Disruption.DayOfWeek{
+ %DayOfWeek{
id: 22,
day_name: "tuesday",
end_time: nil,
start_time: nil
},
- %Arrow.Disruption.DayOfWeek{
+ %DayOfWeek{
id: 23,
day_name: "wednesday",
end_time: nil,
start_time: nil
},
- %Arrow.Disruption.DayOfWeek{
+ %DayOfWeek{
id: 24,
day_name: "thursday",
end_time: nil,
@@ -99,7 +103,8 @@ defmodule Mix.Tasks.CopyDbTest do
}
}
] =
- Arrow.Repo.all(from d in Arrow.Disruption, order_by: d.id)
+ from(d in Arrow.Disruption, order_by: d.id)
+ |> Arrow.Repo.all()
|> Arrow.Repo.preload(published_revision: Arrow.DisruptionRevision.associations())
assert [
@@ -116,7 +121,7 @@ defmodule Mix.Tasks.CopyDbTest do
log =
capture_log(fn ->
- Mix.Tasks.CopyDb.run([])
+ CopyDb.run([])
end)
assert log =~ "invalid JSON"
@@ -127,7 +132,7 @@ defmodule Mix.Tasks.CopyDbTest do
log =
capture_log(fn ->
- Mix.Tasks.CopyDb.run([])
+ CopyDb.run([])
end)
assert log =~ "issue with request: 401"
@@ -136,7 +141,9 @@ defmodule Mix.Tasks.CopyDbTest do
end
defmodule Fake.HTTPoison do
+ @moduledoc false
defmodule Happy do
+ @moduledoc false
def start do
{:ok, nil}
end
@@ -242,6 +249,7 @@ defmodule Fake.HTTPoison do
end
defmodule Sad.InvalidJson do
+ @moduledoc false
def start do
{:ok, nil}
end
@@ -252,6 +260,7 @@ defmodule Fake.HTTPoison do
end
defmodule Sad.Status401 do
+ @moduledoc false
def start do
{:ok, nil}
end
diff --git a/test/support/channel_case.ex b/test/support/channel_case.ex
index f9720a905..acdd436e6 100644
--- a/test/support/channel_case.ex
+++ b/test/support/channel_case.ex
@@ -15,6 +15,8 @@ defmodule ArrowWeb.ChannelCase do
use ExUnit.CaseTemplate
+ alias Ecto.Adapters.SQL.Sandbox
+
using do
quote do
# Import conveniences for testing with channels
@@ -26,10 +28,10 @@ defmodule ArrowWeb.ChannelCase do
end
setup tags do
- :ok = Ecto.Adapters.SQL.Sandbox.checkout(Arrow.Repo)
+ :ok = Sandbox.checkout(Arrow.Repo)
if !tags[:async] do
- Ecto.Adapters.SQL.Sandbox.mode(Arrow.Repo, {:shared, self()})
+ Sandbox.mode(Arrow.Repo, {:shared, self()})
end
:ok
diff --git a/test/support/conn_case.ex b/test/support/conn_case.ex
index 678a06e49..a10c67b12 100644
--- a/test/support/conn_case.ex
+++ b/test/support/conn_case.ex
@@ -16,27 +16,31 @@ defmodule ArrowWeb.ConnCase do
"""
use ExUnit.CaseTemplate
+
import Plug.Test
+ alias Ecto.Adapters.SQL.Sandbox
+
using do
quote do
- # The default endpoint for testing
- @endpoint ArrowWeb.Endpoint
-
use ArrowWeb, :verified_routes
- # Import conveniences for testing with connections
- import Plug.Conn
import Phoenix.ConnTest
+ import Plug.Conn
+
alias ArrowWeb.Router.Helpers, as: Routes
+ # The default endpoint for testing
+ @endpoint ArrowWeb.Endpoint
+
+ # Import conveniences for testing with connections
end
end
setup tags do
- :ok = Ecto.Adapters.SQL.Sandbox.checkout(Arrow.Repo)
+ :ok = Sandbox.checkout(Arrow.Repo)
if !tags[:async] do
- Ecto.Adapters.SQL.Sandbox.mode(Arrow.Repo, {:shared, self()})
+ Sandbox.mode(Arrow.Repo, {:shared, self()})
end
cond do
@@ -50,10 +54,7 @@ defmodule ArrowWeb.ConnCase do
{:ok, conn: build_conn("test_user", [])}
true ->
- {:ok,
- conn:
- Phoenix.ConnTest.build_conn()
- |> Plug.Conn.put_req_header("x-forwarded-proto", "https")}
+ {:ok, conn: Plug.Conn.put_req_header(Phoenix.ConnTest.build_conn(), "x-forwarded-proto", "https")}
end
end
diff --git a/test/support/data_case.ex b/test/support/data_case.ex
index 1d76efc26..60a320e0a 100644
--- a/test/support/data_case.ex
+++ b/test/support/data_case.ex
@@ -16,23 +16,25 @@ defmodule Arrow.DataCase do
use ExUnit.CaseTemplate
+ alias Ecto.Adapters.SQL.Sandbox
+
using do
quote do
- alias Arrow.Repo
-
+ import Arrow.DataCase
+ import Arrow.Factory
import Ecto
import Ecto.Changeset
import Ecto.Query
- import Arrow.DataCase
- import Arrow.Factory
+
+ alias Arrow.Repo
end
end
setup tags do
- :ok = Ecto.Adapters.SQL.Sandbox.checkout(Arrow.Repo)
+ :ok = Sandbox.checkout(Arrow.Repo)
if !tags[:async] do
- Ecto.Adapters.SQL.Sandbox.mode(Arrow.Repo, {:shared, self()})
+ Sandbox.mode(Arrow.Repo, {:shared, self()})
end
:ok
diff --git a/test/support/factory.ex b/test/support/factory.ex
index 04092c68a..9c828a0f0 100644
--- a/test/support/factory.ex
+++ b/test/support/factory.ex
@@ -4,6 +4,8 @@ defmodule Arrow.Factory do
use ExMachina.Ecto, repo: Arrow.Repo
use Arrow.OpenRouteServiceFactory
+ alias Arrow.Hastus.DerivedLimit
+
def adjustment_factory do
%Arrow.Adjustment{
route_id: "Red",
@@ -21,7 +23,7 @@ defmodule Arrow.Factory do
def disruption_revision_factory(attrs) do
%Arrow.DisruptionRevision{
start_date: Date.utc_today(),
- end_date: Date.utc_today() |> Date.add(6),
+ end_date: Date.add(Date.utc_today(), 6),
description: sequence("Description"),
adjustment_kind: :bus,
disruption: build(:disruption),
@@ -105,28 +107,30 @@ defmodule Arrow.Factory do
end
def gtfs_stop_factory(attrs \\ %{}) do
- %Arrow.Gtfs.Stop{
- id: sequence(:source_label, &"gtfs-stop-#{&1}"),
- code: nil,
- name: "Test Stop",
- desc: nil,
- platform_code: nil,
- platform_name: nil,
- lat: 42.3601,
- lon: -71.0589,
- zone_id: nil,
- address: nil,
- url: nil,
- level: nil,
- location_type: :stop_platform,
- parent_station: nil,
- wheelchair_boarding: :accessible,
- municipality: "Boston",
- on_street: nil,
- at_street: nil,
- vehicle_type: :bus
- }
- |> merge_attributes(attrs)
+ merge_attributes(
+ %Arrow.Gtfs.Stop{
+ id: sequence(:source_label, &"gtfs-stop-#{&1}"),
+ code: nil,
+ name: "Test Stop",
+ desc: nil,
+ platform_code: nil,
+ platform_name: nil,
+ lat: 42.3601,
+ lon: -71.0589,
+ zone_id: nil,
+ address: nil,
+ url: nil,
+ level: nil,
+ location_type: :stop_platform,
+ parent_station: nil,
+ wheelchair_boarding: :accessible,
+ municipality: "Boston",
+ on_street: nil,
+ at_street: nil,
+ vehicle_type: :bus
+ },
+ attrs
+ )
end
def gtfs_route_factory do
@@ -248,7 +252,7 @@ defmodule Arrow.Factory do
%Arrow.Disruptions.ReplacementService{
reason: "Maintenance",
start_date: Date.utc_today(),
- end_date: Date.utc_today() |> Date.add(6),
+ end_date: Date.add(Date.utc_today(), 6),
source_workbook_data: build(:replacement_service_workbook_data),
source_workbook_filename: "file.xlsx",
disruption: build(:disruption_v2),
@@ -308,10 +312,10 @@ defmodule Arrow.Factory do
end
def derived_limit_factory do
- %Arrow.Hastus.DerivedLimit{
+ %DerivedLimit{
start_stop: build(:gtfs_stop),
end_stop: build(:gtfs_stop),
- service: not_loaded(Arrow.Hastus.DerivedLimit, :service)
+ service: not_loaded(DerivedLimit, :service)
}
end
diff --git a/test/support/fixtures/shuttles_fixtures.ex b/test/support/fixtures/shuttles_fixtures.ex
index dc2fd08aa..8c6a54ae7 100644
--- a/test/support/fixtures/shuttles_fixtures.ex
+++ b/test/support/fixtures/shuttles_fixtures.ex
@@ -4,16 +4,15 @@ defmodule Arrow.ShuttlesFixtures do
entities via the `Arrow.Shuttles` context.
"""
+ import Arrow.Factory
+
alias Arrow.Repo
alias Arrow.Shuttles.Shape
- import Arrow.Factory
-
@doc """
Generate valid coords
"""
- def coords,
- do: ["-71.14163,42.39551", "-71.14209,42.39643", "-71.14285,42.39624", "-71.14292,42.39623"]
+ def coords, do: ["-71.14163,42.39551", "-71.14209,42.39643", "-71.14285,42.39624", "-71.14292,42.39623"]
@doc """
Generate a unique shape name.
@@ -66,8 +65,7 @@ defmodule Arrow.ShuttlesFixtures do
@doc """
Generate a unique shuttle route destination.
"""
- def unique_shuttle_route_destination,
- do: "some shuttle_route_destination#{System.unique_integer([:positive])}"
+ def unique_shuttle_route_destination, do: "some shuttle_route_destination#{System.unique_integer([:positive])}"
defp shuttle_route_stops(include_times_to_next_stop) do
[stop1, stop2, stop3, stop4] = insert_list(4, :gtfs_stop)
diff --git a/test/test_helper.exs b/test/test_helper.exs
index 97dc2dd56..8ab504abb 100644
--- a/test/test_helper.exs
+++ b/test/test_helper.exs
@@ -5,6 +5,4 @@ ExUnit.start(exclude: [:integration])
Ecto.Adapters.SQL.Sandbox.mode(Arrow.Repo, :manual)
Mox.defmock(Arrow.OpenRouteServiceAPI.MockClient, for: Arrow.OpenRouteServiceAPI.Client)
-Application.put_env(:arrow, Arrow.OpenRouteServiceAPI,
- client: Arrow.OpenRouteServiceAPI.MockClient
-)
+Application.put_env(:arrow, Arrow.OpenRouteServiceAPI, client: Arrow.OpenRouteServiceAPI.MockClient)