diff --git a/zero-phoenix/.formatter.exs b/zero-phoenix/.formatter.exs
new file mode 100644
index 0000000..2bed17c
--- /dev/null
+++ b/zero-phoenix/.formatter.exs
@@ -0,0 +1,3 @@
+[
+ inputs: ["mix.exs", "{config,lib,test}/**/*.{ex,exs}"]
+]
diff --git a/zero-phoenix/.gitignore b/zero-phoenix/.gitignore
index ee144dd..33e70b1 100644
--- a/zero-phoenix/.gitignore
+++ b/zero-phoenix/.gitignore
@@ -14,3 +14,5 @@ erl_crash.dump
# secrets file as long as you replace its contents by environment
# variables.
/config/prod.secret.exs
+.idea/*
+zerophoenix.iml
diff --git a/zero-phoenix/README.md b/zero-phoenix/README.md
index 88f2d0a..a8bc815 100644
--- a/zero-phoenix/README.md
+++ b/zero-phoenix/README.md
@@ -6,11 +6,11 @@ The purpose of this example is to provide details as to how one would go about u
## Software requirements
-- [Elixir 1.4.0 or higher](http://elixir-lang.org/install.html)
+- [Elixir 1.6.1 or higher](http://elixir-lang.org/install.html)
-- [Phoenix 1.2.0 or higher](http://www.phoenixframework.org/docs/installation)
+- [Phoenix 1.3.0 or higher](http://www.phoenixframework.org/docs/installation)
-- PostgreSQL 9.6.x or higher
+- PostgreSQL 10.2.0 or higher
## Communication
@@ -43,15 +43,13 @@ The purpose of this example is to provide details as to how one would go about u
3. create, migrate, and seed the database
```
- $ mix ecto.create
- $ mix ecto.migrate
- $ mix ecto.seed
+ $ mix ecto.setup
```
4. start the server
```
- $ mix phoenix.server
+ $ mix phx.server
```
5. navigate to our application within the browser
@@ -87,7 +85,7 @@ The purpose of this example is to provide details as to how one would go about u
1. create the project
```
- $ mix phoenix.new zero_phoenix --no-brunch
+ $ mix phx.new zero_phoenix --no-brunch
```
Note: Just answer 'Y' to all the prompts that appear.
@@ -111,47 +109,19 @@ The purpose of this example is to provide details as to how one would go about u
config/test.exs
```
-5. generate an API for representing our `Person` resource
+6. create the database
```
- $ mix phoenix.gen.json Person people first_name:string last_name:string username:string email:string
+ $ mix ecto.create
```
-6. replace the generated `Person` model with the following:
-
- `web/models/person.ex`:
-
- ```elixir
- defmodule ZeroPhoenix.Person do
- use ZeroPhoenix.Web, :model
-
- @required_fields ~w(first_name last_name username email)
- @optional_fields ~w()
-
- schema "people" do
- field :first_name, :string
- field :last_name, :string
- field :username, :string
- field :email, :string
-
- has_many :friendships, ZeroPhoenix.Friendship
- has_many :friends, through: [:friendships, :friend]
-
- timestamps()
- end
+7. generate an API for representing our `Person` resource
- @doc """
- Builds a changeset based on the `struct` and `params`.
- """
- def changeset(struct, params \\ %{}) do
- struct
- |> cast(params, @required_fields)
- |> validate_required(@required_fields)
- end
- end
+ ```
+ $ mix phx.gen.json Accounts Person people first_name:string last_name:string username:string email:string
```
-7. add the resource to your api scope in which should look as follows after the edit:
+8. add the resource to your api scope in which should look as follows after the edit:
`web/router.ex`:
@@ -165,43 +135,79 @@ The purpose of this example is to provide details as to how one would go about u
Note: When creating an API, one doesn't require a new or edit actions. Thus, this is the reason that we are excluding them from this resource.
-8. create and migrate the database
+9. migrate the database
```
- $ mix ecto.create
$ mix ecto.migrate
```
-9. generate a `Friendship` model which representing our join model:
+10. generate a `Friendship` model which representing our join model:
```
- $ mix phoenix.gen.model Friendship friendships person_id:references:people friend_id:references:people
+ $ mix phx.gen.schema Accounts.Friendship friendships person_id:references:people friend_id:references:people
+ ```
+
+11. migrate the database
+
+ ```
+ $ mix ecto.migrate
+ ```
+
+12. replace the generated `Person` model with the following:
+
+ `web/models/person.ex`:
+
+ ```elixir
+ defmodule ZeroPhoenix.Accounts.Person do
+ use Ecto.Schema
+ import Ecto.Changeset
+ alias ZeroPhoenix.Accounts.Person
+ alias ZeroPhoenix.Accounts.Friendship
+
+ schema "people" do
+ field(:email, :string)
+ field(:first_name, :string)
+ field(:last_name, :string)
+ field(:username, :string)
+
+ has_many(:friendships, Friendship)
+ has_many(:friends, through: [:friendships, :friend])
+
+ timestamps()
+ end
+
+ @doc false
+ def changeset(%Person{} = person, attrs) do
+ person
+ |> cast(attrs, [:first_name, :last_name, :username, :email])
+ |> validate_required([:first_name, :last_name, :username, :email])
+ end
+ end
```
-10. replace the generated `Friendship` model with the following:
+13. replace the generated `Friendship` model with the following:
`web/models/friendship.ex`:
```elixir
- defmodule ZeroPhoenix.Friendship do
- use ZeroPhoenix.Web, :model
+ defmodule ZeroPhoenix.Accounts.Friendship do
+ use Ecto.Schema
+ import Ecto.Changeset
+ alias ZeroPhoenix.Accounts.Friendship
- @required_fields ~w(person_id friend_id)
- @optional_fields ~w()
+ @required_fields [:person_id, :friend_id]
schema "friendships" do
- belongs_to :person, ZeroPhoenix.Person
- belongs_to :friend, ZeroPhoenix.Person
+ belongs_to(:person, ZeroPhoenix.Person)
+ belongs_to(:friend, ZeroPhoenix.Person)
timestamps()
end
- @doc """
- Builds a changeset based on the `struct` and `params`.
- """
- def changeset(struct, params \\ %{}) do
- struct
- |> cast(params, @required_fields)
+ @doc false
+ def changeset(%Friendship{} = friendship, attrs) do
+ friendship
+ |> cast(attrs, @required_fields)
|> validate_required(@required_fields)
end
end
@@ -209,70 +215,91 @@ The purpose of this example is to provide details as to how one would go about u
Note: We want `friend_id` to reference the `people` table because our `friend_id` really represents a `Person` model.
-11. migrate the database
-
- ```
- $ mix ecto.migrate
- ```
-
-12. create the seeds file
+14. create the seeds file
`priv/repo/seeds.exs`:
-
```
alias ZeroPhoenix.Repo
- alias ZeroPhoenix.Person
- alias ZeroPhoenix.Friendship
+ alias ZeroPhoenix.Accounts.Person
+ alias ZeroPhoenix.Accounts.Friendship
# reset the datastore
+ Repo.delete_all(Friendship)
Repo.delete_all(Person)
# insert people
- me = Repo.insert!(%Person{ first_name: "Steven", last_name: "Luscher", email: "steveluscher@fb.com", username: "steveluscher" })
- dhh = Repo.insert!(%Person{ first_name: "David", last_name: "Heinemeier Hansson", email: "dhh@37signals.com", username: "dhh" })
- ezra = Repo.insert!(%Person{ first_name: "Ezra", last_name: "Zygmuntowicz", email: "ezra@merbivore.com", username: "ezra" })
- matz = Repo.insert!(%Person{ first_name: "Yukihiro", last_name: "Matsumoto", email: "matz@heroku.com", username: "matz" })
+ me =
+ Repo.insert!(%Person{
+ first_name: "Steven",
+ last_name: "Luscher",
+ email: "steveluscher@fb.com",
+ username: "steveluscher"
+ })
+
+ dhh =
+ Repo.insert!(%Person{
+ first_name: "David",
+ last_name: "Heinemeier Hansson",
+ email: "dhh@37signals.com",
+ username: "dhh"
+ })
+
+ ezra =
+ Repo.insert!(%Person{
+ first_name: "Ezra",
+ last_name: "Zygmuntowicz",
+ email: "ezra@merbivore.com",
+ username: "ezra"
+ })
+
+ matz =
+ Repo.insert!(%Person{
+ first_name: "Yukihiro",
+ last_name: "Matsumoto",
+ email: "matz@heroku.com",
+ username: "matz"
+ })
me
|> Ecto.build_assoc(:friendships)
- |> Friendship.changeset( %{ person_id: me.id, friend_id: matz.id } )
- |> Repo.insert
+ |> Friendship.changeset(%{person_id: me.id, friend_id: matz.id})
+ |> Repo.insert()
dhh
|> Ecto.build_assoc(:friendships)
- |> Friendship.changeset( %{ person_id: dhh.id, friend_id: ezra.id } )
- |> Repo.insert
+ |> Friendship.changeset(%{person_id: dhh.id, friend_id: ezra.id})
+ |> Repo.insert()
dhh
|> Ecto.build_assoc(:friendships)
- |> Friendship.changeset( %{ person_id: dhh.id, friend_id: matz.id } )
- |> Repo.insert
+ |> Friendship.changeset(%{person_id: dhh.id, friend_id: matz.id})
+ |> Repo.insert()
ezra
|> Ecto.build_assoc(:friendships)
- |> Friendship.changeset( %{ person_id: ezra.id, friend_id: dhh.id } )
- |> Repo.insert
+ |> Friendship.changeset(%{person_id: ezra.id, friend_id: dhh.id})
+ |> Repo.insert()
ezra
|> Ecto.build_assoc(:friendships)
- |> Friendship.changeset( %{ person_id: ezra.id, friend_id: matz.id } )
- |> Repo.insert
+ |> Friendship.changeset(%{person_id: ezra.id, friend_id: matz.id})
+ |> Repo.insert()
matz
|> Ecto.build_assoc(:friendships)
- |> Friendship.changeset( %{ person_id: matz.id, friend_id: me.id } )
- |> Repo.insert
+ |> Friendship.changeset(%{person_id: matz.id, friend_id: me.id})
+ |> Repo.insert()
matz
|> Ecto.build_assoc(:friendships)
- |> Friendship.changeset( %{ person_id: matz.id, friend_id: ezra.id } )
- |> Repo.insert
+ |> Friendship.changeset(%{person_id: matz.id, friend_id: ezra.id})
+ |> Repo.insert()
matz
|> Ecto.build_assoc(:friendships)
- |> Friendship.changeset( %{ person_id: matz.id, friend_id: dhh.id } )
- |> Repo.insert
+ |> Friendship.changeset(%{person_id: matz.id, friend_id: dhh.id})
+ |> Repo.insert()
```
13. seed the database
@@ -286,26 +313,16 @@ The purpose of this example is to provide details as to how one would go about u
```elixir
defp deps do
[
- {:phoenix, "~> 1.2.0"},
+ {:phoenix, "~> 1.3.0"},
{:phoenix_pubsub, "~> 1.0"},
- {:ecto, github: "elixir-ecto/ecto", override: true},
- {:phoenix_ecto, "~> 3.0"},
+ {:phoenix_ecto, "~> 3.2"},
{:postgrex, ">= 0.0.0"},
- {:phoenix_html, "~> 2.6"},
+ {:phoenix_html, "~> 2.10"},
{:phoenix_live_reload, "~> 1.0", only: :dev},
{:gettext, "~> 0.11"},
{:cowboy, "~> 1.0"},
{:absinthe_plug, "~> 1.3"}
- ]
- end
- ```
-
-15. Add `absinthe_plug` application to your `mix.exs` application as follows:
-
- ```elixir
- def application do
- [mod: {ZeroPhoenix, []},
- applications: [:phoenix, :phoenix_pubsub, :phoenix_html, :cowboy, :logger, :gettext, :phoenix_ecto, :postgrex, :absinthe_plug]]
+ ]
end
```
@@ -317,25 +334,27 @@ The purpose of this example is to provide details as to how one would go about u
17. add the GraphQL schema which represents our entry point into our GraphQL structure:
- `web/graphql/schema.ex`:
+ `lib/zero_phoenix_web/graphql/schema.ex`:
```elixir
- defmodule ZeroPhoenix.Graphql.Schema do
+ defmodule ZeroPhoenixWeb.Graphql.Schema do
use Absinthe.Schema
- import_types ZeroPhoenix.Graphql.Types.Person
+ import_types(ZeroPhoenixWeb.Graphql.Types.Person)
alias ZeroPhoenix.Repo
+ alias ZeroPhoenix.Accounts.Person
query do
field :person, type: :person do
- arg :id, non_null(:id)
- resolve fn %{id: id}, _info ->
- case ZeroPhoenix.Person|> Repo.get(id) do
- nil -> {:error, "Person id #{id} not found"}
+ arg(:id, non_null(:id))
+
+ resolve(fn %{id: id}, _info ->
+ case Person |> Repo.get(id) do
+ nil -> {:error, "Person id #{id} not found"}
person -> {:ok, person}
end
- end
+ end)
end
end
end
@@ -343,10 +362,10 @@ The purpose of this example is to provide details as to how one would go about u
18. add our Person type which will be performing queries against:
- `web/graphql/types/person.ex`:
+ `lib/zero_phoenix_web/graphql/types/person.ex`:
```elixir
- defmodule ZeroPhoenix.Graphql.Types.Person do
+ defmodule ZeroPhoenixWeb.Graphql.Types.Person do
use Absinthe.Schema.Notation
import Ecto
@@ -393,7 +412,7 @@ The purpose of this example is to provide details as to how one would go about u
20. start the server
```
- $ mix phoenix.server
+ $ mix phx.server
```
21. navigate to our application within the browser
@@ -453,4 +472,4 @@ ZeroPhoenix is released under the [MIT license](https://mit-license.org).
## Copyright
-copyright:: (c) Copyright 2016 Conrad Taylor. All Rights Reserved.
+copyright:: (c) Copyright 2016 - 2018 Conrad Taylor. All Rights Reserved.
diff --git a/zero-phoenix/config/config.exs b/zero-phoenix/config/config.exs
index 83c359c..92739b0 100644
--- a/zero-phoenix/config/config.exs
+++ b/zero-phoenix/config/config.exs
@@ -10,10 +10,10 @@ config :zero_phoenix,
ecto_repos: [ZeroPhoenix.Repo]
# Configures the endpoint
-config :zero_phoenix, ZeroPhoenix.Endpoint,
+config :zero_phoenix, ZeroPhoenixWeb.Endpoint,
url: [host: "localhost"],
- secret_key_base: "bS5peykLEJ5cQDZ+u5M+ncgdhOyKND6P/vSLoLKdqaRgGqe1QHuejc5XvfifUUUo",
- render_errors: [view: ZeroPhoenix.ErrorView, accepts: ~w(html json)],
+ secret_key_base: "nvgjFfsHcYw2DWvEX4Rtzj4DONDO4t7+lt4alSLGPFhP58bvBoz7xVv36Co96Yl9",
+ render_errors: [view: ZeroPhoenixWeb.ErrorView, accepts: ~w(html json)],
pubsub: [name: ZeroPhoenix.PubSub,
adapter: Phoenix.PubSub.PG2]
diff --git a/zero-phoenix/config/dev.exs b/zero-phoenix/config/dev.exs
index d4a5dc8..3202ac1 100644
--- a/zero-phoenix/config/dev.exs
+++ b/zero-phoenix/config/dev.exs
@@ -6,22 +6,37 @@ use Mix.Config
# The watchers configuration can be used to run external
# watchers to your application. For example, we use it
# with brunch.io to recompile .js and .css sources.
-config :zero_phoenix, ZeroPhoenix.Endpoint,
+config :zero_phoenix, ZeroPhoenixWeb.Endpoint,
http: [port: 4000],
debug_errors: true,
code_reloader: true,
check_origin: false,
watchers: []
+# ## SSL Support
+#
+# In order to use HTTPS in development, a self-signed
+# certificate can be generated by running the following
+# command from your terminal:
+#
+# openssl req -new -newkey rsa:4096 -days 365 -nodes -x509 -subj "/C=US/ST=Denial/L=Springfield/O=Dis/CN=www.example.com" -keyout priv/server.key -out priv/server.pem
+#
+# The `http:` config above can be replaced with:
+#
+# https: [port: 4000, keyfile: "priv/server.key", certfile: "priv/server.pem"],
+#
+# If desired, both `http:` and `https:` keys can be
+# configured to run both http and https servers on
+# different ports.
# Watch static and templates for browser reloading.
-config :zero_phoenix, ZeroPhoenix.Endpoint,
+config :zero_phoenix, ZeroPhoenixWeb.Endpoint,
live_reload: [
patterns: [
~r{priv/static/.*(js|css|png|jpeg|jpg|gif|svg)$},
~r{priv/gettext/.*(po)$},
- ~r{web/views/.*(ex)$},
- ~r{web/templates/.*(eex)$}
+ ~r{lib/zero_phoenix_web/views/.*(ex)$},
+ ~r{lib/zero_phoenix_web/templates/.*(eex)$}
]
]
diff --git a/zero-phoenix/config/prod.exs b/zero-phoenix/config/prod.exs
index 6674ec1..55c3981 100644
--- a/zero-phoenix/config/prod.exs
+++ b/zero-phoenix/config/prod.exs
@@ -1,20 +1,22 @@
use Mix.Config
-# For production, we configure the host to read the PORT
-# from the system environment. Therefore, you will need
-# to set PORT=80 before running your server.
+# For production, we often load configuration from external
+# sources, such as your system environment. For this reason,
+# you won't find the :http configuration below, but set inside
+# ZeroPhoenixWeb.Endpoint.init/2 when load_from_system_env is
+# true. Any dynamic configuration should be done there.
#
-# You should also configure the url host to something
-# meaningful, we use this information when generating URLs.
+# Don't forget to configure the url host to something meaningful,
+# Phoenix uses this information when generating URLs.
#
-# Finally, we also include the path to a manifest
+# Finally, we also include the path to a cache manifest
# containing the digested version of static files. This
-# manifest is generated by the mix phoenix.digest task
+# manifest is generated by the mix phx.digest task
# which you typically run after static files are built.
-config :zero_phoenix, ZeroPhoenix.Endpoint,
- http: [port: {:system, "PORT"}],
+config :zero_phoenix, ZeroPhoenixWeb.Endpoint,
+ load_from_system_env: true,
url: [host: "example.com", port: 80],
- cache_static_manifest: "priv/static/manifest.json"
+ cache_static_manifest: "priv/static/cache_manifest.json"
# Do not print debug messages in production
config :logger, level: :info
@@ -24,10 +26,11 @@ config :logger, level: :info
# To get SSL working, you will need to add the `https` key
# to the previous section and set your `:url` port to 443:
#
-# config :zero_phoenix, ZeroPhoenix.Endpoint,
+# config :zero_phoenix, ZeroPhoenixWeb.Endpoint,
# ...
# url: [host: "example.com", port: 443],
-# https: [port: 443,
+# https: [:inet6,
+# port: 443,
# keyfile: System.get_env("SOME_APP_SSL_KEY_PATH"),
# certfile: System.get_env("SOME_APP_SSL_CERT_PATH")]
#
@@ -38,7 +41,7 @@ config :logger, level: :info
# We also recommend setting `force_ssl`, ensuring no data is
# ever sent via http, always redirecting to https:
#
-# config :zero_phoenix, ZeroPhoenix.Endpoint,
+# config :zero_phoenix, ZeroPhoenixWeb.Endpoint,
# force_ssl: [hsts: true]
#
# Check `Plug.SSL` for all available options in `force_ssl`.
@@ -53,12 +56,8 @@ config :logger, level: :info
# Alternatively, you can configure exactly which server to
# start per endpoint:
#
-# config :zero_phoenix, ZeroPhoenix.Endpoint, server: true
+# config :zero_phoenix, ZeroPhoenixWeb.Endpoint, server: true
#
-# You will also need to set the application root to `.` in order
-# for the new static assets to be served after a hot upgrade:
-#
-# config :zero_phoenix, ZeroPhoenix.Endpoint, root: "."
# Finally import the config/prod.secret.exs
# which should be versioned separately.
diff --git a/zero-phoenix/config/test.exs b/zero-phoenix/config/test.exs
index 86d190b..cd4c5b6 100644
--- a/zero-phoenix/config/test.exs
+++ b/zero-phoenix/config/test.exs
@@ -2,7 +2,7 @@ use Mix.Config
# We don't run a server during test. If one is required,
# you can enable the server option below.
-config :zero_phoenix, ZeroPhoenix.Endpoint,
+config :zero_phoenix, ZeroPhoenixWeb.Endpoint,
http: [port: 4001],
server: false
diff --git a/zero-phoenix/lib/zero_phoenix.ex b/zero-phoenix/lib/zero_phoenix.ex
index 3c913e8..116c580 100644
--- a/zero-phoenix/lib/zero_phoenix.ex
+++ b/zero-phoenix/lib/zero_phoenix.ex
@@ -1,31 +1,9 @@
defmodule ZeroPhoenix do
- use Application
+ @moduledoc """
+ ZeroPhoenix keeps the contexts that define your domain
+ and business logic.
- # See http://elixir-lang.org/docs/stable/elixir/Application.html
- # for more information on OTP Applications
- def start(_type, _args) do
- import Supervisor.Spec
-
- # Define workers and child supervisors to be supervised
- children = [
- # Start the Ecto repository
- supervisor(ZeroPhoenix.Repo, []),
- # Start the endpoint when the application starts
- supervisor(ZeroPhoenix.Endpoint, []),
- # Start your own worker by calling: ZeroPhoenix.Worker.start_link(arg1, arg2, arg3)
- # worker(ZeroPhoenix.Worker, [arg1, arg2, arg3]),
- ]
-
- # See http://elixir-lang.org/docs/stable/elixir/Supervisor.html
- # for other strategies and supported options
- opts = [strategy: :one_for_one, name: ZeroPhoenix.Supervisor]
- Supervisor.start_link(children, opts)
- end
-
- # Tell Phoenix to update the endpoint configuration
- # whenever the application is updated.
- def config_change(changed, _new, removed) do
- ZeroPhoenix.Endpoint.config_change(changed, removed)
- :ok
- end
+ Contexts are also responsible for managing your data, regardless
+ if it comes from the database, an external API or others.
+ """
end
diff --git a/zero-phoenix/lib/zero_phoenix/accounts/accounts.ex b/zero-phoenix/lib/zero_phoenix/accounts/accounts.ex
new file mode 100644
index 0000000..f378d59
--- /dev/null
+++ b/zero-phoenix/lib/zero_phoenix/accounts/accounts.ex
@@ -0,0 +1,104 @@
+defmodule ZeroPhoenix.Accounts do
+ @moduledoc """
+ The Accounts context.
+ """
+
+ import Ecto.Query, warn: false
+ alias ZeroPhoenix.Repo
+
+ alias ZeroPhoenix.Accounts.Person
+
+ @doc """
+ Returns the list of people.
+
+ ## Examples
+
+ iex> list_people()
+ [%Person{}, ...]
+
+ """
+ def list_people do
+ Repo.all(Person)
+ end
+
+ @doc """
+ Gets a single person.
+
+ Raises `Ecto.NoResultsError` if the Person does not exist.
+
+ ## Examples
+
+ iex> get_person!(123)
+ %Person{}
+
+ iex> get_person!(456)
+ ** (Ecto.NoResultsError)
+
+ """
+ def get_person!(id), do: Repo.get!(Person, id)
+
+ @doc """
+ Creates a person.
+
+ ## Examples
+
+ iex> create_person(%{field: value})
+ {:ok, %Person{}}
+
+ iex> create_person(%{field: bad_value})
+ {:error, %Ecto.Changeset{}}
+
+ """
+ def create_person(attrs \\ %{}) do
+ %Person{}
+ |> Person.changeset(attrs)
+ |> Repo.insert()
+ end
+
+ @doc """
+ Updates a person.
+
+ ## Examples
+
+ iex> update_person(person, %{field: new_value})
+ {:ok, %Person{}}
+
+ iex> update_person(person, %{field: bad_value})
+ {:error, %Ecto.Changeset{}}
+
+ """
+ def update_person(%Person{} = person, attrs) do
+ person
+ |> Person.changeset(attrs)
+ |> Repo.update()
+ end
+
+ @doc """
+ Deletes a Person.
+
+ ## Examples
+
+ iex> delete_person(person)
+ {:ok, %Person{}}
+
+ iex> delete_person(person)
+ {:error, %Ecto.Changeset{}}
+
+ """
+ def delete_person(%Person{} = person) do
+ Repo.delete(person)
+ end
+
+ @doc """
+ Returns an `%Ecto.Changeset{}` for tracking person changes.
+
+ ## Examples
+
+ iex> change_person(person)
+ %Ecto.Changeset{source: %Person{}}
+
+ """
+ def change_person(%Person{} = person) do
+ Person.changeset(person, %{})
+ end
+end
diff --git a/zero-phoenix/lib/zero_phoenix/accounts/friendship.ex b/zero-phoenix/lib/zero_phoenix/accounts/friendship.ex
new file mode 100644
index 0000000..c0cb8e3
--- /dev/null
+++ b/zero-phoenix/lib/zero_phoenix/accounts/friendship.ex
@@ -0,0 +1,22 @@
+defmodule ZeroPhoenix.Accounts.Friendship do
+ use Ecto.Schema
+ import Ecto.Changeset
+ alias ZeroPhoenix.Accounts.Person
+ alias ZeroPhoenix.Accounts.Friendship
+
+ @required_fields [:person_id, :friend_id]
+
+ schema "friendships" do
+ belongs_to(:person, Person)
+ belongs_to(:friend, Person)
+
+ timestamps()
+ end
+
+ @doc false
+ def changeset(%Friendship{} = friendship, attrs) do
+ friendship
+ |> cast(attrs, @required_fields)
+ |> validate_required(@required_fields)
+ end
+end
diff --git a/zero-phoenix/lib/zero_phoenix/accounts/person.ex b/zero-phoenix/lib/zero_phoenix/accounts/person.ex
new file mode 100644
index 0000000..454be57
--- /dev/null
+++ b/zero-phoenix/lib/zero_phoenix/accounts/person.ex
@@ -0,0 +1,25 @@
+defmodule ZeroPhoenix.Accounts.Person do
+ use Ecto.Schema
+ import Ecto.Changeset
+ alias ZeroPhoenix.Accounts.Person
+ alias ZeroPhoenix.Accounts.Friendship
+
+ schema "people" do
+ field(:email, :string)
+ field(:first_name, :string)
+ field(:last_name, :string)
+ field(:username, :string)
+
+ has_many(:friendships, Friendship)
+ has_many(:friends, through: [:friendships, :friend])
+
+ timestamps()
+ end
+
+ @doc false
+ def changeset(%Person{} = person, attrs) do
+ person
+ |> cast(attrs, [:first_name, :last_name, :username, :email])
+ |> validate_required([:first_name, :last_name, :username, :email])
+ end
+end
diff --git a/zero-phoenix/lib/zero_phoenix/application.ex b/zero-phoenix/lib/zero_phoenix/application.ex
new file mode 100644
index 0000000..b4482fd
--- /dev/null
+++ b/zero-phoenix/lib/zero_phoenix/application.ex
@@ -0,0 +1,31 @@
+defmodule ZeroPhoenix.Application do
+ use Application
+
+ # See https://hexdocs.pm/elixir/Application.html
+ # for more information on OTP Applications
+ def start(_type, _args) do
+ import Supervisor.Spec
+
+ # Define workers and child supervisors to be supervised
+ children = [
+ # Start the Ecto repository
+ supervisor(ZeroPhoenix.Repo, []),
+ # Start the endpoint when the application starts
+ supervisor(ZeroPhoenixWeb.Endpoint, []),
+ # Start your own worker by calling: ZeroPhoenix.Worker.start_link(arg1, arg2, arg3)
+ # worker(ZeroPhoenix.Worker, [arg1, arg2, arg3]),
+ ]
+
+ # See https://hexdocs.pm/elixir/Supervisor.html
+ # for other strategies and supported options
+ opts = [strategy: :one_for_one, name: ZeroPhoenix.Supervisor]
+ Supervisor.start_link(children, opts)
+ end
+
+ # Tell Phoenix to update the endpoint configuration
+ # whenever the application is updated.
+ def config_change(changed, _new, removed) do
+ ZeroPhoenixWeb.Endpoint.config_change(changed, removed)
+ :ok
+ end
+end
diff --git a/zero-phoenix/lib/zero_phoenix/repo.ex b/zero-phoenix/lib/zero_phoenix/repo.ex
index 2c9fd52..c79196f 100644
--- a/zero-phoenix/lib/zero_phoenix/repo.ex
+++ b/zero-phoenix/lib/zero_phoenix/repo.ex
@@ -1,3 +1,11 @@
defmodule ZeroPhoenix.Repo do
use Ecto.Repo, otp_app: :zero_phoenix
+
+ @doc """
+ Dynamically loads the repository url from the
+ DATABASE_URL environment variable.
+ """
+ def init(_, opts) do
+ {:ok, Keyword.put(opts, :url, System.get_env("DATABASE_URL"))}
+ end
end
diff --git a/zero-phoenix/lib/zero_phoenix_web.ex b/zero-phoenix/lib/zero_phoenix_web.ex
new file mode 100644
index 0000000..df5149a
--- /dev/null
+++ b/zero-phoenix/lib/zero_phoenix_web.ex
@@ -0,0 +1,67 @@
+defmodule ZeroPhoenixWeb do
+ @moduledoc """
+ The entrypoint for defining your web interface, such
+ as controllers, views, channels and so on.
+
+ This can be used in your application as:
+
+ use ZeroPhoenixWeb, :controller
+ use ZeroPhoenixWeb, :view
+
+ The definitions below will be executed for every view,
+ controller, etc, so keep them short and clean, focused
+ on imports, uses and aliases.
+
+ Do NOT define functions inside the quoted expressions
+ below. Instead, define any helper function in modules
+ and import those modules here.
+ """
+
+ def controller do
+ quote do
+ use Phoenix.Controller, namespace: ZeroPhoenixWeb
+ import Plug.Conn
+ import ZeroPhoenixWeb.Router.Helpers
+ import ZeroPhoenixWeb.Gettext
+ end
+ end
+
+ def view do
+ quote do
+ use Phoenix.View, root: "lib/zero_phoenix_web/templates",
+ namespace: ZeroPhoenixWeb
+
+ # Import convenience functions from controllers
+ import Phoenix.Controller, only: [get_flash: 2, view_module: 1]
+
+ # Use all HTML functionality (forms, tags, etc)
+ use Phoenix.HTML
+
+ import ZeroPhoenixWeb.Router.Helpers
+ import ZeroPhoenixWeb.ErrorHelpers
+ import ZeroPhoenixWeb.Gettext
+ end
+ end
+
+ def router do
+ quote do
+ use Phoenix.Router
+ import Plug.Conn
+ import Phoenix.Controller
+ end
+ end
+
+ def channel do
+ quote do
+ use Phoenix.Channel
+ import ZeroPhoenixWeb.Gettext
+ end
+ end
+
+ @doc """
+ When used, dispatch to the appropriate controller/view/etc.
+ """
+ defmacro __using__(which) when is_atom(which) do
+ apply(__MODULE__, which, [])
+ end
+end
diff --git a/zero-phoenix/lib/zero_phoenix_web/.DS_Store b/zero-phoenix/lib/zero_phoenix_web/.DS_Store
new file mode 100644
index 0000000..0dccac6
Binary files /dev/null and b/zero-phoenix/lib/zero_phoenix_web/.DS_Store differ
diff --git a/zero-phoenix/web/channels/user_socket.ex b/zero-phoenix/lib/zero_phoenix_web/channels/user_socket.ex
similarity index 79%
rename from zero-phoenix/web/channels/user_socket.ex
rename to zero-phoenix/lib/zero_phoenix_web/channels/user_socket.ex
index 20f7cbd..5eff686 100644
--- a/zero-phoenix/web/channels/user_socket.ex
+++ b/zero-phoenix/lib/zero_phoenix_web/channels/user_socket.ex
@@ -1,8 +1,8 @@
-defmodule ZeroPhoenix.UserSocket do
+defmodule ZeroPhoenixWeb.UserSocket do
use Phoenix.Socket
## Channels
- # channel "room:*", ZeroPhoenix.RoomChannel
+ # channel "room:*", ZeroPhoenixWeb.RoomChannel
## Transports
transport :websocket, Phoenix.Transports.WebSocket
@@ -25,12 +25,12 @@ defmodule ZeroPhoenix.UserSocket do
# Socket id's are topics that allow you to identify all sockets for a given user:
#
- # def id(socket), do: "users_socket:#{socket.assigns.user_id}"
+ # def id(socket), do: "user_socket:#{socket.assigns.user_id}"
#
# Would allow you to broadcast a "disconnect" event and terminate
# all active sockets and channels for a given user:
#
- # ZeroPhoenix.Endpoint.broadcast("users_socket:#{user.id}", "disconnect", %{})
+ # ZeroPhoenixWeb.Endpoint.broadcast("user_socket:#{user.id}", "disconnect", %{})
#
# Returning `nil` makes this socket anonymous.
def id(_socket), do: nil
diff --git a/zero-phoenix/lib/zero_phoenix_web/controllers/fallback_controller.ex b/zero-phoenix/lib/zero_phoenix_web/controllers/fallback_controller.ex
new file mode 100644
index 0000000..f8f4a06
--- /dev/null
+++ b/zero-phoenix/lib/zero_phoenix_web/controllers/fallback_controller.ex
@@ -0,0 +1,20 @@
+defmodule ZeroPhoenixWeb.FallbackController do
+ @moduledoc """
+ Translates controller action results into valid `Plug.Conn` responses.
+
+ See `Phoenix.Controller.action_fallback/1` for more details.
+ """
+ use ZeroPhoenixWeb, :controller
+
+ def call(conn, {:error, %Ecto.Changeset{} = changeset}) do
+ conn
+ |> put_status(:unprocessable_entity)
+ |> render(ZeroPhoenixWeb.ChangesetView, "error.json", changeset: changeset)
+ end
+
+ def call(conn, {:error, :not_found}) do
+ conn
+ |> put_status(:not_found)
+ |> render(ZeroPhoenixWeb.ErrorView, :"404")
+ end
+end
diff --git a/zero-phoenix/lib/zero_phoenix_web/controllers/page_controller.ex b/zero-phoenix/lib/zero_phoenix_web/controllers/page_controller.ex
new file mode 100644
index 0000000..9a861b5
--- /dev/null
+++ b/zero-phoenix/lib/zero_phoenix_web/controllers/page_controller.ex
@@ -0,0 +1,7 @@
+defmodule ZeroPhoenixWeb.PageController do
+ use ZeroPhoenixWeb, :controller
+
+ def index(conn, _params) do
+ render conn, "index.html"
+ end
+end
diff --git a/zero-phoenix/lib/zero_phoenix_web/controllers/person_controller.ex b/zero-phoenix/lib/zero_phoenix_web/controllers/person_controller.ex
new file mode 100644
index 0000000..40139fb
--- /dev/null
+++ b/zero-phoenix/lib/zero_phoenix_web/controllers/person_controller.ex
@@ -0,0 +1,42 @@
+defmodule ZeroPhoenixWeb.PersonController do
+ use ZeroPhoenixWeb, :controller
+
+ alias ZeroPhoenix.Accounts
+ alias ZeroPhoenix.Accounts.Person
+
+ action_fallback ZeroPhoenixWeb.FallbackController
+
+ def index(conn, _params) do
+ people = Accounts.list_people()
+ render(conn, "index.json", people: people)
+ end
+
+ def create(conn, %{"person" => person_params}) do
+ with {:ok, %Person{} = person} <- Accounts.create_person(person_params) do
+ conn
+ |> put_status(:created)
+ |> put_resp_header("location", person_path(conn, :show, person))
+ |> render("show.json", person: person)
+ end
+ end
+
+ def show(conn, %{"id" => id}) do
+ person = Accounts.get_person!(id)
+ render(conn, "show.json", person: person)
+ end
+
+ def update(conn, %{"id" => id, "person" => person_params}) do
+ person = Accounts.get_person!(id)
+
+ with {:ok, %Person{} = person} <- Accounts.update_person(person, person_params) do
+ render(conn, "show.json", person: person)
+ end
+ end
+
+ def delete(conn, %{"id" => id}) do
+ person = Accounts.get_person!(id)
+ with {:ok, %Person{}} <- Accounts.delete_person(person) do
+ send_resp(conn, :no_content, "")
+ end
+ end
+end
diff --git a/zero-phoenix/lib/zero_phoenix/endpoint.ex b/zero-phoenix/lib/zero_phoenix_web/endpoint.ex
similarity index 64%
rename from zero-phoenix/lib/zero_phoenix/endpoint.ex
rename to zero-phoenix/lib/zero_phoenix_web/endpoint.ex
index 3223301..366950e 100644
--- a/zero-phoenix/lib/zero_phoenix/endpoint.ex
+++ b/zero-phoenix/lib/zero_phoenix_web/endpoint.ex
@@ -1,7 +1,7 @@
-defmodule ZeroPhoenix.Endpoint do
+defmodule ZeroPhoenixWeb.Endpoint do
use Phoenix.Endpoint, otp_app: :zero_phoenix
- socket "/socket", ZeroPhoenix.UserSocket
+ socket "/socket", ZeroPhoenixWeb.UserSocket
# Serve at "/" the static files from "priv/static" directory.
#
@@ -36,7 +36,22 @@ defmodule ZeroPhoenix.Endpoint do
plug Plug.Session,
store: :cookie,
key: "_zero_phoenix_key",
- signing_salt: "QXaFyZw2"
-
- plug ZeroPhoenix.Router
+ signing_salt: "PP1YkqRV"
+
+ plug ZeroPhoenixWeb.Router
+
+ @doc """
+ Callback invoked for dynamically configuring the endpoint.
+
+ It receives the endpoint configuration and checks if
+ configuration should be loaded from the system environment.
+ """
+ def init(_key, config) do
+ if config[:load_from_system_env] do
+ port = System.get_env("PORT") || raise "expected the PORT environment variable to be set"
+ {:ok, Keyword.put(config, :http, [:inet6, port: port])}
+ else
+ {:ok, config}
+ end
+ end
end
diff --git a/zero-phoenix/web/gettext.ex b/zero-phoenix/lib/zero_phoenix_web/gettext.ex
similarity index 90%
rename from zero-phoenix/web/gettext.ex
rename to zero-phoenix/lib/zero_phoenix_web/gettext.ex
index 2d2f178..a20f707 100644
--- a/zero-phoenix/web/gettext.ex
+++ b/zero-phoenix/lib/zero_phoenix_web/gettext.ex
@@ -1,11 +1,11 @@
-defmodule ZeroPhoenix.Gettext do
+defmodule ZeroPhoenixWeb.Gettext do
@moduledoc """
A module providing Internationalization with a gettext-based API.
By using [Gettext](https://hexdocs.pm/gettext),
your module gains a set of macros for translations, for example:
- import ZeroPhoenix.Gettext
+ import ZeroPhoenixWeb.Gettext
# Simple translation
gettext "Here is the string to translate"
diff --git a/zero-phoenix/lib/zero_phoenix_web/graphql/schema.ex b/zero-phoenix/lib/zero_phoenix_web/graphql/schema.ex
new file mode 100644
index 0000000..544d819
--- /dev/null
+++ b/zero-phoenix/lib/zero_phoenix_web/graphql/schema.ex
@@ -0,0 +1,21 @@
+defmodule ZeroPhoenixWeb.Graphql.Schema do
+ use Absinthe.Schema
+
+ import_types(ZeroPhoenixWeb.Graphql.Types.Person)
+
+ alias ZeroPhoenix.Repo
+ alias ZeroPhoenix.Accounts.Person
+
+ query do
+ field :person, type: :person do
+ arg(:id, non_null(:id))
+
+ resolve(fn %{id: id}, _info ->
+ case Person |> Repo.get(id) do
+ nil -> {:error, "Person id #{id} not found"}
+ person -> {:ok, person}
+ end
+ end)
+ end
+ end
+end
diff --git a/zero-phoenix/web/graphql/types/person.ex b/zero-phoenix/lib/zero_phoenix_web/graphql/types/person.ex
similarity index 60%
rename from zero-phoenix/web/graphql/types/person.ex
rename to zero-phoenix/lib/zero_phoenix_web/graphql/types/person.ex
index 6a70955..f30681c 100644
--- a/zero-phoenix/web/graphql/types/person.ex
+++ b/zero-phoenix/lib/zero_phoenix_web/graphql/types/person.ex
@@ -1,4 +1,4 @@
-defmodule ZeroPhoenix.Graphql.Types.Person do
+defmodule ZeroPhoenixWeb.Graphql.Types.Person do
use Absinthe.Schema.Notation
import Ecto
@@ -8,25 +8,25 @@ defmodule ZeroPhoenix.Graphql.Types.Person do
@desc "a person"
object :person do
@desc "unique identifier for the person"
- field :id, non_null(:string)
+ field(:id, non_null(:string))
@desc "first name of a person"
- field :first_name, non_null(:string)
+ field(:first_name, non_null(:string))
@desc "last name of a person"
- field :last_name, non_null(:string)
+ field(:last_name, non_null(:string))
@desc "username of a person"
- field :username, non_null(:string)
+ field(:username, non_null(:string))
@desc "email of a person"
- field :email, non_null(:string)
+ field(:email, non_null(:string))
@desc "a list of friends for our person"
field :friends, list_of(:person) do
- resolve fn _, %{source: person} ->
+ resolve(fn _, %{source: person} ->
{:ok, Repo.all(assoc(person, :friends))}
- end
+ end)
end
end
end
diff --git a/zero-phoenix/lib/zero_phoenix_web/router.ex b/zero-phoenix/lib/zero_phoenix_web/router.ex
new file mode 100644
index 0000000..29d1a58
--- /dev/null
+++ b/zero-phoenix/lib/zero_phoenix_web/router.ex
@@ -0,0 +1,39 @@
+defmodule ZeroPhoenixWeb.Router do
+ use ZeroPhoenixWeb, :router
+
+ pipeline :browser do
+ plug(:accepts, ["html"])
+ plug(:fetch_session)
+ plug(:fetch_flash)
+ plug(:protect_from_forgery)
+ plug(:put_secure_browser_headers)
+ end
+
+ pipeline :api do
+ plug(:accepts, ["json"])
+ end
+
+ scope "/", ZeroPhoenixWeb do
+ # Use the default browser stack
+ pipe_through(:browser)
+
+ get("/", PageController, :index)
+ end
+
+ scope "/api", ZeroPhoenix do
+ pipe_through(:api)
+
+ resources("/people", PersonController, except: [:new, :edit])
+ end
+
+ scope "/graphiql" do
+ pipe_through(:api)
+
+ forward(
+ "/",
+ Absinthe.Plug.GraphiQL,
+ schema: ZeroPhoenixWeb.Graphql.Schema,
+ interface: :simple
+ )
+ end
+end
diff --git a/zero-phoenix/web/templates/layout/app.html.eex b/zero-phoenix/lib/zero_phoenix_web/templates/layout/app.html.eex
similarity index 100%
rename from zero-phoenix/web/templates/layout/app.html.eex
rename to zero-phoenix/lib/zero_phoenix_web/templates/layout/app.html.eex
diff --git a/zero-phoenix/web/templates/page/index.html.eex b/zero-phoenix/lib/zero_phoenix_web/templates/page/index.html.eex
similarity index 93%
rename from zero-phoenix/web/templates/page/index.html.eex
rename to zero-phoenix/lib/zero_phoenix_web/templates/page/index.html.eex
index 8ff4b81..0988ea5 100644
--- a/zero-phoenix/web/templates/page/index.html.eex
+++ b/zero-phoenix/lib/zero_phoenix_web/templates/page/index.html.eex
@@ -1,5 +1,5 @@
-
<%= gettext "Welcome to %{name}", name: "Phoenix!" %>
+
<%= gettext "Welcome to %{name}!", name: "Phoenix" %>
A productive web framework that
does not compromise speed and maintainability.
diff --git a/zero-phoenix/web/views/changeset_view.ex b/zero-phoenix/lib/zero_phoenix_web/views/changeset_view.ex
similarity index 75%
rename from zero-phoenix/web/views/changeset_view.ex
rename to zero-phoenix/lib/zero_phoenix_web/views/changeset_view.ex
index ee169fc..675a0d9 100644
--- a/zero-phoenix/web/views/changeset_view.ex
+++ b/zero-phoenix/lib/zero_phoenix_web/views/changeset_view.ex
@@ -1,11 +1,11 @@
-defmodule ZeroPhoenix.ChangesetView do
- use ZeroPhoenix.Web, :view
+defmodule ZeroPhoenixWeb.ChangesetView do
+ use ZeroPhoenixWeb, :view
@doc """
Traverses and translates changeset errors.
See `Ecto.Changeset.traverse_errors/2` and
- `ZeroPhoenix.ErrorHelpers.translate_error/1` for more details.
+ `ZeroPhoenixWeb.ErrorHelpers.translate_error/1` for more details.
"""
def translate_errors(changeset) do
Ecto.Changeset.traverse_errors(changeset, &translate_error/1)
diff --git a/zero-phoenix/web/views/error_helpers.ex b/zero-phoenix/lib/zero_phoenix_web/views/error_helpers.ex
similarity index 79%
rename from zero-phoenix/web/views/error_helpers.ex
rename to zero-phoenix/lib/zero_phoenix_web/views/error_helpers.ex
index 7409094..1efabeb 100644
--- a/zero-phoenix/web/views/error_helpers.ex
+++ b/zero-phoenix/lib/zero_phoenix_web/views/error_helpers.ex
@@ -1,4 +1,4 @@
-defmodule ZeroPhoenix.ErrorHelpers do
+defmodule ZeroPhoenixWeb.ErrorHelpers do
@moduledoc """
Conveniences for translating and building error messages.
"""
@@ -9,9 +9,9 @@ defmodule ZeroPhoenix.ErrorHelpers do
Generates tag for inlined form input errors.
"""
def error_tag(form, field) do
- if error = form.errors[field] do
+ Enum.map(Keyword.get_values(form.errors, field), fn (error) ->
content_tag :span, translate_error(error), class: "help-block"
- end
+ end)
end
@doc """
@@ -32,9 +32,9 @@ defmodule ZeroPhoenix.ErrorHelpers do
# dgettext "errors", "is invalid"
#
if count = opts[:count] do
- Gettext.dngettext(ZeroPhoenix.Gettext, "errors", msg, msg, count, opts)
+ Gettext.dngettext(ZeroPhoenixWeb.Gettext, "errors", msg, msg, count, opts)
else
- Gettext.dgettext(ZeroPhoenix.Gettext, "errors", msg, opts)
+ Gettext.dgettext(ZeroPhoenixWeb.Gettext, "errors", msg, opts)
end
end
end
diff --git a/zero-phoenix/web/views/error_view.ex b/zero-phoenix/lib/zero_phoenix_web/views/error_view.ex
similarity index 82%
rename from zero-phoenix/web/views/error_view.ex
rename to zero-phoenix/lib/zero_phoenix_web/views/error_view.ex
index 28525bf..0e77fce 100644
--- a/zero-phoenix/web/views/error_view.ex
+++ b/zero-phoenix/lib/zero_phoenix_web/views/error_view.ex
@@ -1,5 +1,5 @@
-defmodule ZeroPhoenix.ErrorView do
- use ZeroPhoenix.Web, :view
+defmodule ZeroPhoenixWeb.ErrorView do
+ use ZeroPhoenixWeb, :view
def render("404.html", _assigns) do
"Page not found"
diff --git a/zero-phoenix/lib/zero_phoenix_web/views/layout_view.ex b/zero-phoenix/lib/zero_phoenix_web/views/layout_view.ex
new file mode 100644
index 0000000..2b144cf
--- /dev/null
+++ b/zero-phoenix/lib/zero_phoenix_web/views/layout_view.ex
@@ -0,0 +1,3 @@
+defmodule ZeroPhoenixWeb.LayoutView do
+ use ZeroPhoenixWeb, :view
+end
diff --git a/zero-phoenix/lib/zero_phoenix_web/views/page_view.ex b/zero-phoenix/lib/zero_phoenix_web/views/page_view.ex
new file mode 100644
index 0000000..9e70af6
--- /dev/null
+++ b/zero-phoenix/lib/zero_phoenix_web/views/page_view.ex
@@ -0,0 +1,3 @@
+defmodule ZeroPhoenixWeb.PageView do
+ use ZeroPhoenixWeb, :view
+end
diff --git a/zero-phoenix/web/views/person_view.ex b/zero-phoenix/lib/zero_phoenix_web/views/person_view.ex
similarity index 54%
rename from zero-phoenix/web/views/person_view.ex
rename to zero-phoenix/lib/zero_phoenix_web/views/person_view.ex
index ba3f67d..1daaf97 100644
--- a/zero-phoenix/web/views/person_view.ex
+++ b/zero-phoenix/lib/zero_phoenix_web/views/person_view.ex
@@ -1,12 +1,13 @@
-defmodule ZeroPhoenix.PersonView do
- use ZeroPhoenix.Web, :view
+defmodule ZeroPhoenixWeb.PersonView do
+ use ZeroPhoenixWeb, :view
+ alias ZeroPhoenixWeb.PersonView
def render("index.json", %{people: people}) do
- %{data: render_many(people, ZeroPhoenix.PersonView, "person.json")}
+ %{data: render_many(people, PersonView, "person.json")}
end
def render("show.json", %{person: person}) do
- %{data: render_one(person, ZeroPhoenix.PersonView, "person.json")}
+ %{data: render_one(person, PersonView, "person.json")}
end
def render("person.json", %{person: person}) do
@@ -14,6 +15,6 @@ defmodule ZeroPhoenix.PersonView do
first_name: person.first_name,
last_name: person.last_name,
username: person.username,
- email: person. email}
+ email: person.email}
end
end
diff --git a/zero-phoenix/mix.exs b/zero-phoenix/mix.exs
index f861699..3f13f57 100644
--- a/zero-phoenix/mix.exs
+++ b/zero-phoenix/mix.exs
@@ -2,15 +2,16 @@ defmodule ZeroPhoenix.Mixfile do
use Mix.Project
def project do
- [app: :zero_phoenix,
- version: "0.0.1",
- elixir: "~> 1.5.0-dev",
- elixirc_paths: elixirc_paths(Mix.env),
- compilers: [:phoenix, :gettext] ++ Mix.compilers,
- build_embedded: Mix.env == :prod,
- start_permanent: Mix.env == :prod,
- aliases: aliases(),
- deps: deps()]
+ [
+ app: :zero_phoenix,
+ version: "0.0.1",
+ elixir: "~> 1.6.1",
+ elixirc_paths: elixirc_paths(Mix.env()),
+ compilers: [:phoenix, :gettext] ++ Mix.compilers(),
+ start_permanent: Mix.env() == :prod,
+ aliases: aliases(),
+ deps: deps()
+ ]
end
# Configuration for the OTP application.
@@ -18,42 +19,30 @@ defmodule ZeroPhoenix.Mixfile do
# Type `mix help compile.app` for more information.
def application do
[
- mod: {ZeroPhoenix, []},
- applications:
- [
- :phoenix,
- :phoenix_pubsub,
- :phoenix_html,
- :cowboy,
- :logger,
- :gettext,
- :phoenix_ecto,
- :postgrex,
- :absinthe_plug
- ]
+ mod: {ZeroPhoenix.Application, []},
+ extra_applications: [:logger, :runtime_tools]
]
end
# Specifies which paths to compile per environment.
- defp elixirc_paths(:test), do: ["lib", "web", "test/support"]
- defp elixirc_paths(_), do: ["lib", "web"]
+ defp elixirc_paths(:test), do: ["lib", "test/support"]
+ defp elixirc_paths(_), do: ["lib"]
# Specifies your project dependencies.
#
# Type `mix help deps` for examples and options.
defp deps do
[
- {:phoenix, "~> 1.2.0"},
+ {:phoenix, "~> 1.3.0"},
{:phoenix_pubsub, "~> 1.0"},
- {:ecto, github: "elixir-ecto/ecto", override: true},
- {:phoenix_ecto, "~> 3.0"},
+ {:phoenix_ecto, "~> 3.2"},
{:postgrex, ">= 0.0.0"},
- {:phoenix_html, "~> 2.6"},
+ {:phoenix_html, "~> 2.10"},
{:phoenix_live_reload, "~> 1.0", only: :dev},
{:gettext, "~> 0.11"},
{:cowboy, "~> 1.0"},
{:absinthe_plug, "~> 1.3"}
- ]
+ ]
end
# Aliases are shortcuts or tasks specific to the current project.
@@ -63,9 +52,10 @@ defmodule ZeroPhoenix.Mixfile do
#
# See the documentation for `Mix` for more info on aliases.
defp aliases do
- ["ecto.setup": ["ecto.create", "ecto.migrate", "run priv/repo/seeds.exs"],
- "ecto.reset": ["ecto.drop", "ecto.setup"],
- "ecto.seed": ["run priv/repo/seeds.exs"],
- "test": ["ecto.create --quiet", "ecto.migrate", "test"]]
+ [
+ "ecto.setup": ["ecto.create", "ecto.migrate", "run priv/repo/seeds.exs"],
+ "ecto.reset": ["ecto.drop", "ecto.setup"],
+ test: ["ecto.create --quiet", "ecto.migrate", "test"]
+ ]
end
end
diff --git a/zero-phoenix/mix.lock b/zero-phoenix/mix.lock
index 74897a3..bb7c1a8 100644
--- a/zero-phoenix/mix.lock
+++ b/zero-phoenix/mix.lock
@@ -1,34 +1,37 @@
-%{"absinthe": {:hex, :absinthe, "1.3.0", "0b58aec87c115025c6abbbdaebdd2b5d545d5c47a342e5a8c790d5989d27b24c", [:mix], []},
- "absinthe_plug": {:hex, :absinthe_plug, "1.3.0", "52bcf04fc95463843cbd78994f7dd9b587b299c7065081582d37d59cdcc68d98", [:mix], [{:absinthe, "~> 1.3.0", [hex: :absinthe, optional: false]}, {:plug, "~> 1.3.2 or ~> 1.4", [hex: :plug, optional: false]}]},
+%{
+ "absinthe": {:hex, :absinthe, "1.4.7", "c92411307e01d0193f2eafc0d6e28273e476f66d5d20e031f879ce1fec0ad9e3", [:mix], [{:dataloader, "~> 1.0.0", [hex: :dataloader, repo: "hexpm", optional: true]}, {:decimal, "~> 1.0", [hex: :decimal, repo: "hexpm", optional: true]}], "hexpm"},
+ "absinthe_plug": {:hex, :absinthe_plug, "1.4.2", "01bf16f0a637869bcc0a1919935f08ff853501004e7549ddaa3a7788deb48965", [:mix], [{:absinthe, "~> 1.4", [hex: :absinthe, repo: "hexpm", optional: false]}, {:plug, "~> 1.3.2 or ~> 1.4", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm"},
"certifi": {:hex, :certifi, "0.4.0", "a7966efb868b179023618d29a407548f70c52466bf1849b9e8ebd0e34b7ea11f", [:rebar3], []},
"combine": {:hex, :combine, "0.9.1", "5fd778ee77032ae593bf79aedb8519d9e36283e4f869abd98c2d6029ca476db8", [:mix], []},
- "connection": {:hex, :connection, "1.0.4", "a1cae72211f0eef17705aaededacac3eb30e6625b04a6117c1b2db6ace7d5976", [:mix], []},
- "cowboy": {:hex, :cowboy, "1.1.2", "61ac29ea970389a88eca5a65601460162d370a70018afe6f949a29dca91f3bb0", [:rebar3], [{:cowlib, "~> 1.0.2", [hex: :cowlib, optional: false]}, {:ranch, "~> 1.3.2", [hex: :ranch, optional: false]}]},
- "cowlib": {:hex, :cowlib, "1.0.2", "9d769a1d062c9c3ac753096f868ca121e2730b9a377de23dec0f7e08b1df84ee", [:make], []},
- "db_connection": {:hex, :db_connection, "1.0.0-rc.4", "fad1f772c151cc6bde82412b8d72319968bc7221df8ef7d5e9d7fde7cb5c86b7", [:mix], [{:connection, "~> 1.0.2", [hex: :connection, optional: false]}, {:poolboy, "~> 1.5", [hex: :poolboy, optional: true]}, {:sbroker, "~> 1.0.0-beta.3", [hex: :sbroker, optional: true]}]},
- "decimal": {:hex, :decimal, "1.1.2", "79a769d4657b2d537b51ef3c02d29ab7141d2b486b516c109642d453ee08e00c", [:mix], []},
- "ecto": {:git, "https://github.com/elixir-ecto/ecto.git", "16529476ecaeb4af92ceeea2aa41094426910b4c", []},
- "fs": {:hex, :fs, "0.9.2", "ed17036c26c3f70ac49781ed9220a50c36775c6ca2cf8182d123b6566e49ec59", [:rebar], []},
- "gettext": {:hex, :gettext, "0.11.0", "80c1dd42d270482418fa158ec5ba073d2980e3718bacad86f3d4ad71d5667679", [:mix], []},
+ "connection": {:hex, :connection, "1.0.4", "a1cae72211f0eef17705aaededacac3eb30e6625b04a6117c1b2db6ace7d5976", [:mix], [], "hexpm"},
+ "cowboy": {:hex, :cowboy, "1.1.2", "61ac29ea970389a88eca5a65601460162d370a70018afe6f949a29dca91f3bb0", [:rebar3], [{:cowlib, "~> 1.0.2", [hex: :cowlib, repo: "hexpm", optional: false]}, {:ranch, "~> 1.3.2", [hex: :ranch, repo: "hexpm", optional: false]}], "hexpm"},
+ "cowlib": {:hex, :cowlib, "1.0.2", "9d769a1d062c9c3ac753096f868ca121e2730b9a377de23dec0f7e08b1df84ee", [:make], [], "hexpm"},
+ "db_connection": {:hex, :db_connection, "1.1.3", "89b30ca1ef0a3b469b1c779579590688561d586694a3ce8792985d4d7e575a61", [:mix], [{:connection, "~> 1.0.2", [hex: :connection, repo: "hexpm", optional: false]}, {:poolboy, "~> 1.5", [hex: :poolboy, repo: "hexpm", optional: true]}, {:sbroker, "~> 1.0", [hex: :sbroker, repo: "hexpm", optional: true]}], "hexpm"},
+ "decimal": {:hex, :decimal, "1.4.1", "ad9e501edf7322f122f7fc151cce7c2a0c9ada96f2b0155b8a09a795c2029770", [:mix], [], "hexpm"},
+ "ecto": {:hex, :ecto, "2.2.8", "a4463c0928b970f2cee722cd29aaac154e866a15882c5737e0038bbfcf03ec2c", [:mix], [{:db_connection, "~> 1.1", [hex: :db_connection, repo: "hexpm", optional: true]}, {:decimal, "~> 1.2", [hex: :decimal, repo: "hexpm", optional: false]}, {:mariaex, "~> 0.8.0", [hex: :mariaex, repo: "hexpm", optional: true]}, {:poison, "~> 2.2 or ~> 3.0", [hex: :poison, repo: "hexpm", optional: true]}, {:poolboy, "~> 1.5", [hex: :poolboy, repo: "hexpm", optional: false]}, {:postgrex, "~> 0.13.0", [hex: :postgrex, repo: "hexpm", optional: true]}, {:sbroker, "~> 1.0", [hex: :sbroker, repo: "hexpm", optional: true]}], "hexpm"},
+ "file_system": {:hex, :file_system, "0.2.4", "f0bdda195c0e46e987333e986452ec523aed21d784189144f647c43eaf307064", [:mix], [], "hexpm"},
+ "fs": {:hex, :fs, "0.9.2", "ed17036c26c3f70ac49781ed9220a50c36775c6ca2cf8182d123b6566e49ec59", [:rebar], [], "hexpm"},
+ "gettext": {:hex, :gettext, "0.14.0", "1a019a2e51d5ad3d126efe166dcdf6563768e5d06c32a99ad2281a1fa94b4c72", [:mix], [], "hexpm"},
"graphql": {:hex, :graphql, "0.3.1", "d3bb5467877456cc2b33debc75407e9216567b10e35e83d5195e2d51e835e8c7", [:mix], []},
"hackney": {:hex, :hackney, "1.6.1", "ddd22d42db2b50e6a155439c8811b8f6df61a4395de10509714ad2751c6da817", [:rebar3], [{:certifi, "0.4.0", [hex: :certifi, optional: false]}, {:idna, "1.2.0", [hex: :idna, optional: false]}, {:metrics, "1.0.1", [hex: :metrics, optional: false]}, {:mimerl, "1.0.2", [hex: :mimerl, optional: false]}, {:ssl_verify_fun, "1.1.0", [hex: :ssl_verify_fun, optional: false]}]},
"idna": {:hex, :idna, "1.2.0", "ac62ee99da068f43c50dc69acf700e03a62a348360126260e87f2b54eced86b2", [:rebar3], []},
"inflex": {:hex, :inflex, "1.5.0", "e4ff5d900280b2011b24d1ac1c4590986ee5add2ea644c9894e72213cf93ff0b", [:mix], []},
"metrics": {:hex, :metrics, "1.0.1", "25f094dea2cda98213cecc3aeff09e940299d950904393b2a29d191c346a8486", [:rebar3], []},
- "mime": {:hex, :mime, "1.1.0", "01c1d6f4083d8aa5c7b8c246ade95139620ef8effb009edde934e0ec3b28090a", [:mix], []},
+ "mime": {:hex, :mime, "1.2.0", "78adaa84832b3680de06f88f0997e3ead3b451a440d183d688085be2d709b534", [:mix], [], "hexpm"},
"mimerl": {:hex, :mimerl, "1.0.2", "993f9b0e084083405ed8252b99460c4f0563e41729ab42d9074fd5e52439be88", [:rebar3], []},
"moebius": {:hex, :moebius, "2.0.3", "5cd3c60c685876253edff169f55aab600cb6b73d50166364f13b90a904409998", [:mix], [{:inflex, "~> 1.5.0", [hex: :inflex, optional: false]}, {:poison, "~> 2.0.1", [hex: :poison, optional: true]}, {:poolboy, "~> 1.5", [hex: :poolboy, optional: false]}, {:postgrex, "~> 0.11.0", [hex: :postgrex, optional: false]}, {:timex, "~> 2.0", [hex: :timex, optional: false]}]},
- "phoenix": {:hex, :phoenix, "1.2.0", "1bdeb99c254f4c534cdf98fd201dede682297ccc62fcac5d57a2627c3b6681fb", [:mix], [{:cowboy, "~> 1.0", [hex: :cowboy, optional: true]}, {:phoenix_pubsub, "~> 1.0", [hex: :phoenix_pubsub, optional: false]}, {:plug, "~> 1.1", [hex: :plug, optional: false]}, {:poison, "~> 1.5 or ~> 2.0", [hex: :poison, optional: false]}]},
- "phoenix_ecto": {:hex, :phoenix_ecto, "3.0.0", "b947aaf03d076f5b1448f87828f22fb7710478ee38455c67cc3fe8e9a4dfd015", [:mix], [{:ecto, "~> 2.0.0-rc", [hex: :ecto, optional: false]}, {:phoenix_html, "~> 2.6", [hex: :phoenix_html, optional: true]}]},
- "phoenix_html": {:hex, :phoenix_html, "2.6.2", "944a5e581b0d899e4f4c838a69503ebd05300fe35ba228a74439e6253e10e0c0", [:mix], [{:plug, "~> 1.0", [hex: :plug, optional: false]}]},
- "phoenix_live_reload": {:hex, :phoenix_live_reload, "1.0.5", "829218c4152ba1e9848e2bf8e161fcde6b4ec679a516259442561d21fde68d0b", [:mix], [{:fs, "~> 0.9.1", [hex: :fs, optional: false]}, {:phoenix, "~> 1.0 or ~> 1.2-rc", [hex: :phoenix, optional: false]}]},
- "phoenix_pubsub": {:hex, :phoenix_pubsub, "1.0.0", "c31af4be22afeeebfaf246592778c8c840e5a1ddc7ca87610c41ccfb160c2c57", [:mix], []},
- "plug": {:hex, :plug, "1.3.5", "7503bfcd7091df2a9761ef8cecea666d1f2cc454cbbaf0afa0b6e259203b7031", [:mix], [{:cowboy, "~> 1.0.1 or ~> 1.1", [hex: :cowboy, optional: true]}, {:mime, "~> 1.0", [hex: :mime, optional: false]}]},
+ "phoenix": {:hex, :phoenix, "1.3.0", "1c01124caa1b4a7af46f2050ff11b267baa3edb441b45dbf243e979cd4c5891b", [:mix], [{:cowboy, "~> 1.0", [hex: :cowboy, repo: "hexpm", optional: true]}, {:phoenix_pubsub, "~> 1.0", [hex: :phoenix_pubsub, repo: "hexpm", optional: false]}, {:plug, "~> 1.3.3 or ~> 1.4", [hex: :plug, repo: "hexpm", optional: false]}, {:poison, "~> 2.2 or ~> 3.0", [hex: :poison, repo: "hexpm", optional: false]}], "hexpm"},
+ "phoenix_ecto": {:hex, :phoenix_ecto, "3.3.0", "702f6e164512853d29f9d20763493f2b3bcfcb44f118af2bc37bb95d0801b480", [:mix], [{:ecto, "~> 2.1", [hex: :ecto, repo: "hexpm", optional: false]}, {:phoenix_html, "~> 2.9", [hex: :phoenix_html, repo: "hexpm", optional: true]}, {:plug, "~> 1.0", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm"},
+ "phoenix_html": {:hex, :phoenix_html, "2.10.5", "4f9df6b0fb7422a9440a73182a566cb9cbe0e3ffe8884ef9337ccf284fc1ef0a", [:mix], [{:plug, "~> 1.0", [hex: :plug, repo: "hexpm", optional: false]}], "hexpm"},
+ "phoenix_live_reload": {:hex, :phoenix_live_reload, "1.1.3", "1d178429fc8950b12457d09c6afec247bfe1fcb6f36209e18fbb0221bdfe4d41", [:mix], [{:file_system, "~> 0.2.1 or ~> 0.3", [hex: :file_system, repo: "hexpm", optional: false]}, {:phoenix, "~> 1.0 or ~> 1.2 or ~> 1.3", [hex: :phoenix, repo: "hexpm", optional: false]}], "hexpm"},
+ "phoenix_pubsub": {:hex, :phoenix_pubsub, "1.0.2", "bfa7fd52788b5eaa09cb51ff9fcad1d9edfeb68251add458523f839392f034c1", [:mix], [], "hexpm"},
+ "plug": {:hex, :plug, "1.4.4", "279b547662272cd835a8ca089717201dd3be51bb4705354eaf1b0346744acc82", [:mix], [{:cowboy, "~> 1.0.1 or ~> 1.1", [hex: :cowboy, repo: "hexpm", optional: true]}, {:mime, "~> 1.0", [hex: :mime, repo: "hexpm", optional: false]}], "hexpm"},
"plug_graphql": {:hex, :plug_graphql, "0.3.1", "8c3c91b889236620cea19606ab5b770c1c533242c92d97c23b9f359be4eb4c03", [:mix], [{:cowboy, "~> 1.0", [hex: :cowboy, optional: false]}, {:graphql, "~> 0.3", [hex: :graphql, optional: false]}, {:plug, "~> 0.14 or ~> 1.0", [hex: :plug, optional: false]}, {:poison, "~> 1.5 or ~> 2.0", [hex: :poison, optional: false]}]},
- "poison": {:hex, :poison, "2.0.1", "81248a36d1b602b17ea6556bfa8952492091f01af05173de11f8b297e2bbf088", [:mix], []},
- "poolboy": {:hex, :poolboy, "1.5.1", "6b46163901cfd0a1b43d692657ed9d7e599853b3b21b95ae5ae0a777cf9b6ca8", [:rebar], []},
- "postgrex": {:hex, :postgrex, "0.11.2", "139755c1359d3c5c6d6e8b1ea72556d39e2746f61c6ddfb442813c91f53487e8", [:mix], [{:connection, "~> 1.0", [hex: :connection, optional: false]}, {:db_connection, "~> 1.0-rc", [hex: :db_connection, optional: false]}, {:decimal, "~> 1.0", [hex: :decimal, optional: false]}]},
- "ranch": {:hex, :ranch, "1.3.2", "e4965a144dc9fbe70e5c077c65e73c57165416a901bd02ea899cfd95aa890986", [:rebar3], []},
+ "poison": {:hex, :poison, "3.1.0", "d9eb636610e096f86f25d9a46f35a9facac35609a7591b3be3326e99a0484665", [:mix], [], "hexpm"},
+ "poolboy": {:hex, :poolboy, "1.5.1", "6b46163901cfd0a1b43d692657ed9d7e599853b3b21b95ae5ae0a777cf9b6ca8", [:rebar], [], "hexpm"},
+ "postgrex": {:hex, :postgrex, "0.13.5", "3d931aba29363e1443da167a4b12f06dcd171103c424de15e5f3fc2ba3e6d9c5", [:mix], [{:connection, "~> 1.0", [hex: :connection, repo: "hexpm", optional: false]}, {:db_connection, "~> 1.1", [hex: :db_connection, repo: "hexpm", optional: false]}, {:decimal, "~> 1.0", [hex: :decimal, repo: "hexpm", optional: false]}], "hexpm"},
+ "ranch": {:hex, :ranch, "1.3.2", "e4965a144dc9fbe70e5c077c65e73c57165416a901bd02ea899cfd95aa890986", [:rebar3], [], "hexpm"},
"ssl_verify_fun": {:hex, :ssl_verify_fun, "1.1.0", "edee20847c42e379bf91261db474ffbe373f8acb56e9079acb6038d4e0bf414f", [:rebar, :make], []},
"timex": {:hex, :timex, "2.2.1", "0d69012a7fd69f4cbdaa00cc5f2a5f30f1bed56072fb362ed4bddf60db343022", [:mix], [{:combine, "~> 0.7", [hex: :combine, optional: false]}, {:gettext, "~> 0.10", [hex: :gettext, optional: false]}, {:tzdata, "~> 0.1.8 or ~> 0.5", [hex: :tzdata, optional: false]}]},
- "tzdata": {:hex, :tzdata, "0.5.8", "a4ffe564783c6519e4df230a5d0e1cf44b7db7f576bcae76d05540b5da5b6143", [:mix], [{:hackney, "~> 1.0", [hex: :hackney, optional: false]}]}}
+ "tzdata": {:hex, :tzdata, "0.5.8", "a4ffe564783c6519e4df230a5d0e1cf44b7db7f576bcae76d05540b5da5b6143", [:mix], [{:hackney, "~> 1.0", [hex: :hackney, optional: false]}]},
+}
diff --git a/zero-phoenix/priv/gettext/en/LC_MESSAGES/errors.po b/zero-phoenix/priv/gettext/en/LC_MESSAGES/errors.po
index 087d374..a589998 100644
--- a/zero-phoenix/priv/gettext/en/LC_MESSAGES/errors.po
+++ b/zero-phoenix/priv/gettext/en/LC_MESSAGES/errors.po
@@ -22,6 +22,10 @@ msgstr ""
msgid "is invalid"
msgstr ""
+## From Ecto.Changeset.validate_acceptance/3
+msgid "must be accepted"
+msgstr ""
+
## From Ecto.Changeset.validate_format/3
msgid "has invalid format"
msgstr ""
@@ -39,10 +43,10 @@ msgid "does not match confirmation"
msgstr ""
## From Ecto.Changeset.no_assoc_constraint/3
-msgid "is still associated to this entry"
+msgid "is still associated with this entry"
msgstr ""
-msgid "are still associated to this entry"
+msgid "are still associated with this entry"
msgstr ""
## From Ecto.Changeset.validate_length/3
diff --git a/zero-phoenix/priv/gettext/errors.pot b/zero-phoenix/priv/gettext/errors.pot
index a228957..7b2d5ca 100644
--- a/zero-phoenix/priv/gettext/errors.pot
+++ b/zero-phoenix/priv/gettext/errors.pot
@@ -20,6 +20,10 @@ msgstr ""
msgid "is invalid"
msgstr ""
+## From Ecto.Changeset.validate_acceptance/3
+msgid "must be accepted"
+msgstr ""
+
## From Ecto.Changeset.validate_format/3
msgid "has invalid format"
msgstr ""
@@ -37,10 +41,10 @@ msgid "does not match confirmation"
msgstr ""
## From Ecto.Changeset.no_assoc_constraint/3
-msgid "is still associated to this entry"
+msgid "is still associated with this entry"
msgstr ""
-msgid "are still associated to this entry"
+msgid "are still associated with this entry"
msgstr ""
## From Ecto.Changeset.validate_length/3
diff --git a/zero-phoenix/priv/repo/migrations/20160730004705_create_person.exs b/zero-phoenix/priv/repo/migrations/20160730004705_create_person.exs
index 74bf83a..7dc7ec0 100644
--- a/zero-phoenix/priv/repo/migrations/20160730004705_create_person.exs
+++ b/zero-phoenix/priv/repo/migrations/20160730004705_create_person.exs
@@ -10,6 +10,5 @@ defmodule ZeroPhoenix.Repo.Migrations.CreatePerson do
timestamps()
end
-
end
end
diff --git a/zero-phoenix/priv/repo/migrations/20180213235043_create_people.exs b/zero-phoenix/priv/repo/migrations/20180213235043_create_people.exs
new file mode 100644
index 0000000..b5177fa
--- /dev/null
+++ b/zero-phoenix/priv/repo/migrations/20180213235043_create_people.exs
@@ -0,0 +1,15 @@
+defmodule ZeroPhoenix.Repo.Migrations.CreatePeople do
+ use Ecto.Migration
+
+ def change do
+ create table(:people) do
+ add :first_name, :string
+ add :last_name, :string
+ add :username, :string
+ add :email, :string
+
+ timestamps()
+ end
+
+ end
+end
diff --git a/zero-phoenix/priv/repo/migrations/20180214003530_create_friendships.exs b/zero-phoenix/priv/repo/migrations/20180214003530_create_friendships.exs
new file mode 100644
index 0000000..dee5f1e
--- /dev/null
+++ b/zero-phoenix/priv/repo/migrations/20180214003530_create_friendships.exs
@@ -0,0 +1,15 @@
+defmodule ZeroPhoenix.Repo.Migrations.CreateFriendships do
+ use Ecto.Migration
+
+ def change do
+ create table(:friendships) do
+ add :person_id, references(:people, on_delete: :nothing)
+ add :friend_id, references(:people, on_delete: :nothing)
+
+ timestamps()
+ end
+
+ create index(:friendships, [:person_id])
+ create index(:friendships, [:friend_id])
+ end
+end
diff --git a/zero-phoenix/priv/repo/seeds.exs b/zero-phoenix/priv/repo/seeds.exs
index 869293e..96511f0 100644
--- a/zero-phoenix/priv/repo/seeds.exs
+++ b/zero-phoenix/priv/repo/seeds.exs
@@ -5,60 +5,88 @@
# Inside the script, you can read and write to any of your
# repositories directly:
#
-# ZeroPhoenix.Repo.insert!(%ZeroPhoenix.SomeModel{})
+# ZeroPhoenix.Repo.insert!(%ZeroPhoenix.SomeSchema{})
#
# We recommend using the bang functions (`insert!`, `update!`
# and so on) as they will fail if something goes wrong.
alias ZeroPhoenix.Repo
-alias ZeroPhoenix.Person
-alias ZeroPhoenix.Friendship
+alias ZeroPhoenix.Accounts.Person
+alias ZeroPhoenix.Accounts.Friendship
# reset the datastore
+Repo.delete_all(Friendship)
Repo.delete_all(Person)
# insert people
-me = Repo.insert!(%Person{ first_name: "Steven", last_name: "Luscher", email: "steveluscher@fb.com", username: "steveluscher" })
-dhh = Repo.insert!(%Person{ first_name: "David", last_name: "Heinemeier Hansson", email: "dhh@37signals.com", username: "dhh" })
-ezra = Repo.insert!(%Person{ first_name: "Ezra", last_name: "Zygmuntowicz", email: "ezra@merbivore.com", username: "ezra" })
-matz = Repo.insert!(%Person{ first_name: "Yukihiro", last_name: "Matsumoto", email: "matz@heroku.com", username: "matz" })
+me =
+ Repo.insert!(%Person{
+ first_name: "Steven",
+ last_name: "Luscher",
+ email: "steveluscher@fb.com",
+ username: "steveluscher"
+ })
+
+dhh =
+ Repo.insert!(%Person{
+ first_name: "David",
+ last_name: "Heinemeier Hansson",
+ email: "dhh@37signals.com",
+ username: "dhh"
+ })
+
+ezra =
+ Repo.insert!(%Person{
+ first_name: "Ezra",
+ last_name: "Zygmuntowicz",
+ email: "ezra@merbivore.com",
+ username: "ezra"
+ })
+
+matz =
+ Repo.insert!(%Person{
+ first_name: "Yukihiro",
+ last_name: "Matsumoto",
+ email: "matz@heroku.com",
+ username: "matz"
+ })
me
|> Ecto.build_assoc(:friendships)
-|> Friendship.changeset( %{ person_id: me.id, friend_id: matz.id } )
-|> Repo.insert
+|> Friendship.changeset(%{person_id: me.id, friend_id: matz.id})
+|> Repo.insert()
dhh
|> Ecto.build_assoc(:friendships)
-|> Friendship.changeset( %{ person_id: dhh.id, friend_id: ezra.id } )
-|> Repo.insert
+|> Friendship.changeset(%{person_id: dhh.id, friend_id: ezra.id})
+|> Repo.insert()
dhh
|> Ecto.build_assoc(:friendships)
-|> Friendship.changeset( %{ person_id: dhh.id, friend_id: matz.id } )
-|> Repo.insert
+|> Friendship.changeset(%{person_id: dhh.id, friend_id: matz.id})
+|> Repo.insert()
ezra
|> Ecto.build_assoc(:friendships)
-|> Friendship.changeset( %{ person_id: ezra.id, friend_id: dhh.id } )
-|> Repo.insert
+|> Friendship.changeset(%{person_id: ezra.id, friend_id: dhh.id})
+|> Repo.insert()
ezra
|> Ecto.build_assoc(:friendships)
-|> Friendship.changeset( %{ person_id: ezra.id, friend_id: matz.id } )
-|> Repo.insert
+|> Friendship.changeset(%{person_id: ezra.id, friend_id: matz.id})
+|> Repo.insert()
matz
|> Ecto.build_assoc(:friendships)
-|> Friendship.changeset( %{ person_id: matz.id, friend_id: me.id } )
-|> Repo.insert
+|> Friendship.changeset(%{person_id: matz.id, friend_id: me.id})
+|> Repo.insert()
matz
|> Ecto.build_assoc(:friendships)
-|> Friendship.changeset( %{ person_id: matz.id, friend_id: ezra.id } )
-|> Repo.insert
+|> Friendship.changeset(%{person_id: matz.id, friend_id: ezra.id})
+|> Repo.insert()
matz
|> Ecto.build_assoc(:friendships)
-|> Friendship.changeset( %{ person_id: matz.id, friend_id: dhh.id } )
-|> Repo.insert
+|> Friendship.changeset(%{person_id: matz.id, friend_id: dhh.id})
+|> Repo.insert()
diff --git a/zero-phoenix/priv/static/css/app.css b/zero-phoenix/priv/static/css/app.css
index edcce5e..db4901e 100644
--- a/zero-phoenix/priv/static/css/app.css
+++ b/zero-phoenix/priv/static/css/app.css
@@ -17,11 +17,6 @@ body, form, ul, table {
/* Phoenix flash messages */
.alert:empty { display: none; }
-/* Phoenix inline forms in links and buttons */
-form.link, form.button {
- display: inline;
-}
-
/* Custom page header */
.header {
border-bottom: 1px solid #e5e5e5;
diff --git a/zero-phoenix/priv/static/js/app.js b/zero-phoenix/priv/static/js/app.js
index 3130448..57999a4 100644
--- a/zero-phoenix/priv/static/js/app.js
+++ b/zero-phoenix/priv/static/js/app.js
@@ -1,3 +1,3 @@
// for phoenix_html support, including form and button helpers
// copy the following scripts into your javascript bundle:
-// * https://raw.githubusercontent.com/phoenixframework/phoenix_html/v2.3.0/priv/static/phoenix_html.js
\ No newline at end of file
+// * https://raw.githubusercontent.com/phoenixframework/phoenix_html/v2.10.0/priv/static/phoenix_html.js
\ No newline at end of file
diff --git a/zero-phoenix/priv/static/js/phoenix.js b/zero-phoenix/priv/static/js/phoenix.js
index 1128901..c56e0dd 100644
--- a/zero-phoenix/priv/static/js/phoenix.js
+++ b/zero-phoenix/priv/static/js/phoenix.js
@@ -1,189 +1,209 @@
-(function(exports){
+(function (global, factory) {
+typeof exports === 'object' ? factory(exports) :
+typeof define === 'function' && define.amd ? define(['exports'], factory) :
+factory(global.Phoenix = global.Phoenix || {});
+}(this, (function (exports) {
"use strict";
-var _typeof = typeof Symbol === "function" && typeof Symbol.iterator === "symbol" ? function (obj) { return typeof obj; } : function (obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol ? "symbol" : typeof obj; };
-
-var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }();
-
Object.defineProperty(exports, "__esModule", {
value: true
});
+var _typeof = typeof Symbol === "function" && typeof Symbol.iterator === "symbol" ? function (obj) { return typeof obj; } : function (obj) { return obj && typeof Symbol === "function" && obj.constructor === Symbol && obj !== Symbol.prototype ? "symbol" : typeof obj; };
+
+var _slicedToArray = function () { function sliceIterator(arr, i) { var _arr = []; var _n = true; var _d = false; var _e = undefined; try { for (var _i = arr[Symbol.iterator](), _s; !(_n = (_s = _i.next()).done); _n = true) { _arr.push(_s.value); if (i && _arr.length === i) break; } } catch (err) { _d = true; _e = err; } finally { try { if (!_n && _i["return"]) _i["return"](); } finally { if (_d) throw _e; } } return _arr; } return function (arr, i) { if (Array.isArray(arr)) { return arr; } else if (Symbol.iterator in Object(arr)) { return sliceIterator(arr, i); } else { throw new TypeError("Invalid attempt to destructure non-iterable instance"); } }; }();
+
+var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }();
+
function _toConsumableArray(arr) { if (Array.isArray(arr)) { for (var i = 0, arr2 = Array(arr.length); i < arr.length; i++) { arr2[i] = arr[i]; } return arr2; } else { return Array.from(arr); } }
function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
-// Phoenix Channels JavaScript client
-//
-// ## Socket Connection
-//
-// A single connection is established to the server and
-// channels are multiplexed over the connection.
-// Connect to the server using the `Socket` class:
-//
-// let socket = new Socket("/ws", {params: {userToken: "123"}})
-// socket.connect()
-//
-// The `Socket` constructor takes the mount point of the socket,
-// the authentication params, as well as options that can be found in
-// the Socket docs, such as configuring the `LongPoll` transport, and
-// heartbeat.
-//
-// ## Channels
-//
-// Channels are isolated, concurrent processes on the server that
-// subscribe to topics and broker events between the client and server.
-// To join a channel, you must provide the topic, and channel params for
-// authorization. Here's an example chat room example where `"new_msg"`
-// events are listened for, messages are pushed to the server, and
-// the channel is joined with ok/error/timeout matches:
-//
-// let channel = socket.channel("room:123", {token: roomToken})
-// channel.on("new_msg", msg => console.log("Got message", msg) )
-// $input.onEnter( e => {
-// channel.push("new_msg", {body: e.target.val}, 10000)
-// .receive("ok", (msg) => console.log("created message", msg) )
-// .receive("error", (reasons) => console.log("create failed", reasons) )
-// .receive("timeout", () => console.log("Networking issue...") )
-// })
-// channel.join()
-// .receive("ok", ({messages}) => console.log("catching up", messages) )
-// .receive("error", ({reason}) => console.log("failed join", reason) )
-// .receive("timeout", () => console.log("Networking issue. Still waiting...") )
-//
-//
-// ## Joining
-//
-// Creating a channel with `socket.channel(topic, params)`, binds the params to
-// `channel.params`, which are sent up on `channel.join()`.
-// Subsequent rejoins will send up the modified params for
-// updating authorization params, or passing up last_message_id information.
-// Successful joins receive an "ok" status, while unsuccessful joins
-// receive "error".
-//
-// ## Duplicate Join Subscriptions
-//
-// While the client may join any number of topics on any number of channels,
-// the client may only hold a single subscription for each unique topic at any
-// given time. When attempting to create a duplicate subscription,
-// the server will close the existing channel, log a warning, and
-// spawn a new channel for the topic. The client will have their
-// `channel.onClose` callbacks fired for the existing channel, and the new
-// channel join will have its receive hooks processed as normal.
-//
-// ## Pushing Messages
-//
-// From the previous example, we can see that pushing messages to the server
-// can be done with `channel.push(eventName, payload)` and we can optionally
-// receive responses from the push. Additionally, we can use
-// `receive("timeout", callback)` to abort waiting for our other `receive` hooks
-// and take action after some period of waiting. The default timeout is 5000ms.
-//
-//
-// ## Socket Hooks
-//
-// Lifecycle events of the multiplexed connection can be hooked into via
-// `socket.onError()` and `socket.onClose()` events, ie:
-//
-// socket.onError( () => console.log("there was an error with the connection!") )
-// socket.onClose( () => console.log("the connection dropped") )
-//
-//
-// ## Channel Hooks
-//
-// For each joined channel, you can bind to `onError` and `onClose` events
-// to monitor the channel lifecycle, ie:
-//
-// channel.onError( () => console.log("there was an error!") )
-// channel.onClose( () => console.log("the channel has gone away gracefully") )
-//
-// ### onError hooks
-//
-// `onError` hooks are invoked if the socket connection drops, or the channel
-// crashes on the server. In either case, a channel rejoin is attempted
-// automatically in an exponential backoff manner.
-//
-// ### onClose hooks
-//
-// `onClose` hooks are invoked only in two cases. 1) the channel explicitly
-// closed on the server, or 2). The client explicitly closed, by calling
-// `channel.leave()`
-//
-//
-// ## Presence
-//
-// The `Presence` object provides features for syncing presence information
-// from the server with the client and handling presences joining and leaving.
-//
-// ### Syncing initial state from the server
-//
-// `Presence.syncState` is used to sync the list of presences on the server
-// with the client's state. An optional `onJoin` and `onLeave` callback can
-// be provided to react to changes in the client's local presences across
-// disconnects and reconnects with the server.
-//
-// `Presence.syncDiff` is used to sync a diff of presence join and leave
-// events from the server, as they happen. Like `syncState`, `syncDiff`
-// accepts optional `onJoin` and `onLeave` callbacks to react to a user
-// joining or leaving from a device.
-//
-// ### Listing Presences
-//
-// `Presence.list` is used to return a list of presence information
-// based on the local state of metadata. By default, all presence
-// metadata is returned, but a `listBy` function can be supplied to
-// allow the client to select which metadata to use for a given presence.
-// For example, you may have a user online from different devices with a
-// a metadata status of "online", but they have set themselves to "away"
-// on another device. In this case, they app may choose to use the "away"
-// status for what appears on the UI. The example below defines a `listBy`
-// function which prioritizes the first metadata which was registered for
-// each user. This could be the first tab they opened, or the first device
-// they came online from:
-//
-// let state = {}
-// state = Presence.syncState(state, stateFromServer)
-// let listBy = (id, {metas: [first, ...rest]}) => {
-// first.count = rest.length + 1 // count of this user's presences
-// first.id = id
-// return first
-// }
-// let onlineUsers = Presence.list(state, listBy)
-//
-//
-// ### Example Usage
-//
-// // detect if user has joined for the 1st time or from another tab/device
-// let onJoin = (id, current, newPres) => {
-// if(!current){
-// console.log("user has entered for the first time", newPres)
-// } else {
-// console.log("user additional presence", newPres)
-// }
-// }
-// // detect if user has left from all tabs/devices, or is still present
-// let onLeave = (id, current, leftPres) => {
-// if(current.metas.length === 0){
-// console.log("user has left from all devices", leftPres)
-// } else {
-// console.log("user left from a device", leftPres)
-// }
-// }
-// let presences = {} // client's initial empty presence state
-// // receive initial presence data from server, sent after join
-// myChannel.on("presences", state => {
-// presences = Presence.syncState(presences, state, onJoin, onLeave)
-// displayUsers(Presence.list(presences))
-// })
-// // receive "presence_diff" from server, containing join/leave events
-// myChannel.on("presence_diff", diff => {
-// presences = Presence.syncDiff(presences, diff, onJoin, onLeave)
-// this.setState({users: Presence.list(room.presences, listBy)})
-// })
-//
-var VSN = "1.0.0";
+/**
+ * Phoenix Channels JavaScript client
+ *
+ * ## Socket Connection
+ *
+ * A single connection is established to the server and
+ * channels are multiplexed over the connection.
+ * Connect to the server using the `Socket` class:
+ *
+ * ```javascript
+ * let socket = new Socket("/socket", {params: {userToken: "123"}})
+ * socket.connect()
+ * ```
+ *
+ * The `Socket` constructor takes the mount point of the socket,
+ * the authentication params, as well as options that can be found in
+ * the Socket docs, such as configuring the `LongPoll` transport, and
+ * heartbeat.
+ *
+ * ## Channels
+ *
+ * Channels are isolated, concurrent processes on the server that
+ * subscribe to topics and broker events between the client and server.
+ * To join a channel, you must provide the topic, and channel params for
+ * authorization. Here's an example chat room example where `"new_msg"`
+ * events are listened for, messages are pushed to the server, and
+ * the channel is joined with ok/error/timeout matches:
+ *
+ * ```javascript
+ * let channel = socket.channel("room:123", {token: roomToken})
+ * channel.on("new_msg", msg => console.log("Got message", msg) )
+ * $input.onEnter( e => {
+ * channel.push("new_msg", {body: e.target.val}, 10000)
+ * .receive("ok", (msg) => console.log("created message", msg) )
+ * .receive("error", (reasons) => console.log("create failed", reasons) )
+ * .receive("timeout", () => console.log("Networking issue...") )
+ * })
+ * channel.join()
+ * .receive("ok", ({messages}) => console.log("catching up", messages) )
+ * .receive("error", ({reason}) => console.log("failed join", reason) )
+ * .receive("timeout", () => console.log("Networking issue. Still waiting...") )
+ *```
+ *
+ * ## Joining
+ *
+ * Creating a channel with `socket.channel(topic, params)`, binds the params to
+ * `channel.params`, which are sent up on `channel.join()`.
+ * Subsequent rejoins will send up the modified params for
+ * updating authorization params, or passing up last_message_id information.
+ * Successful joins receive an "ok" status, while unsuccessful joins
+ * receive "error".
+ *
+ * ## Duplicate Join Subscriptions
+ *
+ * While the client may join any number of topics on any number of channels,
+ * the client may only hold a single subscription for each unique topic at any
+ * given time. When attempting to create a duplicate subscription,
+ * the server will close the existing channel, log a warning, and
+ * spawn a new channel for the topic. The client will have their
+ * `channel.onClose` callbacks fired for the existing channel, and the new
+ * channel join will have its receive hooks processed as normal.
+ *
+ * ## Pushing Messages
+ *
+ * From the previous example, we can see that pushing messages to the server
+ * can be done with `channel.push(eventName, payload)` and we can optionally
+ * receive responses from the push. Additionally, we can use
+ * `receive("timeout", callback)` to abort waiting for our other `receive` hooks
+ * and take action after some period of waiting. The default timeout is 5000ms.
+ *
+ *
+ * ## Socket Hooks
+ *
+ * Lifecycle events of the multiplexed connection can be hooked into via
+ * `socket.onError()` and `socket.onClose()` events, ie:
+ *
+ * ```javascript
+ * socket.onError( () => console.log("there was an error with the connection!") )
+ * socket.onClose( () => console.log("the connection dropped") )
+ * ```
+ *
+ *
+ * ## Channel Hooks
+ *
+ * For each joined channel, you can bind to `onError` and `onClose` events
+ * to monitor the channel lifecycle, ie:
+ *
+ * ```javascript
+ * channel.onError( () => console.log("there was an error!") )
+ * channel.onClose( () => console.log("the channel has gone away gracefully") )
+ * ```
+ *
+ * ### onError hooks
+ *
+ * `onError` hooks are invoked if the socket connection drops, or the channel
+ * crashes on the server. In either case, a channel rejoin is attempted
+ * automatically in an exponential backoff manner.
+ *
+ * ### onClose hooks
+ *
+ * `onClose` hooks are invoked only in two cases. 1) the channel explicitly
+ * closed on the server, or 2). The client explicitly closed, by calling
+ * `channel.leave()`
+ *
+ *
+ * ## Presence
+ *
+ * The `Presence` object provides features for syncing presence information
+ * from the server with the client and handling presences joining and leaving.
+ *
+ * ### Syncing initial state from the server
+ *
+ * `Presence.syncState` is used to sync the list of presences on the server
+ * with the client's state. An optional `onJoin` and `onLeave` callback can
+ * be provided to react to changes in the client's local presences across
+ * disconnects and reconnects with the server.
+ *
+ * `Presence.syncDiff` is used to sync a diff of presence join and leave
+ * events from the server, as they happen. Like `syncState`, `syncDiff`
+ * accepts optional `onJoin` and `onLeave` callbacks to react to a user
+ * joining or leaving from a device.
+ *
+ * ### Listing Presences
+ *
+ * `Presence.list` is used to return a list of presence information
+ * based on the local state of metadata. By default, all presence
+ * metadata is returned, but a `listBy` function can be supplied to
+ * allow the client to select which metadata to use for a given presence.
+ * For example, you may have a user online from different devices with
+ * a metadata status of "online", but they have set themselves to "away"
+ * on another device. In this case, the app may choose to use the "away"
+ * status for what appears on the UI. The example below defines a `listBy`
+ * function which prioritizes the first metadata which was registered for
+ * each user. This could be the first tab they opened, or the first device
+ * they came online from:
+ *
+ * ```javascript
+ * let state = {}
+ * state = Presence.syncState(state, stateFromServer)
+ * let listBy = (id, {metas: [first, ...rest]}) => {
+ * first.count = rest.length + 1 // count of this user's presences
+ * first.id = id
+ * return first
+ * }
+ * let onlineUsers = Presence.list(state, listBy)
+ * ```
+ *
+ *
+ * ### Example Usage
+ *```javascript
+ * // detect if user has joined for the 1st time or from another tab/device
+ * let onJoin = (id, current, newPres) => {
+ * if(!current){
+ * console.log("user has entered for the first time", newPres)
+ * } else {
+ * console.log("user additional presence", newPres)
+ * }
+ * }
+ * // detect if user has left from all tabs/devices, or is still present
+ * let onLeave = (id, current, leftPres) => {
+ * if(current.metas.length === 0){
+ * console.log("user has left from all devices", leftPres)
+ * } else {
+ * console.log("user left from a device", leftPres)
+ * }
+ * }
+ * let presences = {} // client's initial empty presence state
+ * // receive initial presence data from server, sent after join
+ * myChannel.on("presence_state", state => {
+ * presences = Presence.syncState(presences, state, onJoin, onLeave)
+ * displayUsers(Presence.list(presences))
+ * })
+ * // receive "presence_diff" from server, containing join/leave events
+ * myChannel.on("presence_diff", diff => {
+ * presences = Presence.syncDiff(presences, diff, onJoin, onLeave)
+ * this.setState({users: Presence.list(room.presences, listBy)})
+ * })
+ * ```
+ * @module phoenix
+ */
+
+var VSN = "2.0.0";
var SOCKET_STATES = { connecting: 0, open: 1, closing: 2, closed: 3 };
var DEFAULT_TIMEOUT = 10000;
+var WS_CLOSE_NORMAL = 1000;
var CHANNEL_STATES = {
closed: "closed",
errored: "errored",
@@ -198,21 +218,21 @@ var CHANNEL_EVENTS = {
reply: "phx_reply",
leave: "phx_leave"
};
+var CHANNEL_LIFECYCLE_EVENTS = [CHANNEL_EVENTS.close, CHANNEL_EVENTS.error, CHANNEL_EVENTS.join, CHANNEL_EVENTS.reply, CHANNEL_EVENTS.leave];
var TRANSPORTS = {
longpoll: "longpoll",
websocket: "websocket"
};
-var Push = function () {
-
- // Initializes the Push
- //
- // channel - The Channel
- // event - The event, for example `"phx_join"`
- // payload - The payload, for example `{user_id: 123}`
- // timeout - The push timeout in milliseconds
- //
+/**
+ * Initializes the Push
+ * @param {Channel} channel - The Channel
+ * @param {string} event - The event, for example `"phx_join"`
+ * @param {Object} payload - The payload, for example `{user_id: 123}`
+ * @param {number} timeout - The push timeout in milliseconds
+ */
+var Push = function () {
function Push(channel, event, payload, timeout) {
_classCallCheck(this, Push);
@@ -226,17 +246,24 @@ var Push = function () {
this.sent = false;
}
+ /**
+ *
+ * @param {number} timeout
+ */
+
+
_createClass(Push, [{
key: "resend",
value: function resend(timeout) {
this.timeout = timeout;
- this.cancelRefEvent();
- this.ref = null;
- this.refEvent = null;
- this.receivedResp = null;
- this.sent = false;
+ this.reset();
this.send();
}
+
+ /**
+ *
+ */
+
}, {
key: "send",
value: function send() {
@@ -249,9 +276,17 @@ var Push = function () {
topic: this.channel.topic,
event: this.event,
payload: this.payload,
- ref: this.ref
+ ref: this.ref,
+ join_ref: this.channel.joinRef()
});
}
+
+ /**
+ *
+ * @param {*} status
+ * @param {*} callback
+ */
+
}, {
key: "receive",
value: function receive(status, callback) {
@@ -265,12 +300,21 @@ var Push = function () {
// private
+ }, {
+ key: "reset",
+ value: function reset() {
+ this.cancelRefEvent();
+ this.ref = null;
+ this.refEvent = null;
+ this.receivedResp = null;
+ this.sent = false;
+ }
}, {
key: "matchReceive",
value: function matchReceive(_ref) {
- var status = _ref.status;
- var response = _ref.response;
- var ref = _ref.ref;
+ var status = _ref.status,
+ response = _ref.response,
+ ref = _ref.ref;
this.recHooks.filter(function (h) {
return h.status === status;
@@ -298,7 +342,7 @@ var Push = function () {
var _this = this;
if (this.timeoutTimer) {
- return;
+ this.cancelTimeout();
}
this.ref = this.channel.socket.makeRef();
this.refEvent = this.channel.replyEventName(this.ref);
@@ -329,6 +373,14 @@ var Push = function () {
return Push;
}();
+/**
+ *
+ * @param {string} topic
+ * @param {Object} params
+ * @param {Socket} socket
+ */
+
+
var Channel = exports.Channel = function () {
function Channel(topic, params, socket) {
var _this2 = this;
@@ -373,8 +425,11 @@ var Channel = exports.Channel = function () {
if (!_this2.isJoining()) {
return;
}
- _this2.socket.log("channel", "timeout " + _this2.topic, _this2.joinPush.timeout);
+ _this2.socket.log("channel", "timeout " + _this2.topic + " (" + _this2.joinRef() + ")", _this2.joinPush.timeout);
+ var leavePush = new Push(_this2, CHANNEL_EVENTS.leave, {}, _this2.timeout);
+ leavePush.send();
_this2.state = CHANNEL_STATES.errored;
+ _this2.joinPush.reset();
_this2.rejoinTimer.scheduleTimeout();
});
this.on(CHANNEL_EVENTS.reply, function (payload, ref) {
@@ -393,7 +448,7 @@ var Channel = exports.Channel = function () {
}, {
key: "join",
value: function join() {
- var timeout = arguments.length <= 0 || arguments[0] === undefined ? this.timeout : arguments[0];
+ var timeout = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : this.timeout;
if (this.joinedOnce) {
throw "tried to join multiple times. 'join' can only be called a single time per channel instance";
@@ -435,7 +490,7 @@ var Channel = exports.Channel = function () {
}, {
key: "push",
value: function push(event, payload) {
- var timeout = arguments.length <= 2 || arguments[2] === undefined ? this.timeout : arguments[2];
+ var timeout = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : this.timeout;
if (!this.joinedOnce) {
throw "tried to push '" + event + "' to '" + this.topic + "' before joining. Use channel.join() before pushing events";
@@ -451,30 +506,32 @@ var Channel = exports.Channel = function () {
return pushEvent;
}
- // Leaves the channel
- //
- // Unsubscribes from server events, and
- // instructs channel to terminate on server
- //
- // Triggers onClose() hooks
- //
- // To receive leave acknowledgements, use the a `receive`
- // hook to bind to the server ack, ie:
- //
- // channel.leave().receive("ok", () => alert("left!") )
- //
+ /** Leaves the channel
+ *
+ * Unsubscribes from server events, and
+ * instructs channel to terminate on server
+ *
+ * Triggers onClose() hooks
+ *
+ * To receive leave acknowledgements, use the a `receive`
+ * hook to bind to the server ack, ie:
+ *
+ * ```javascript
+ * channel.leave().receive("ok", () => alert("left!") )
+ * ```
+ */
}, {
key: "leave",
value: function leave() {
var _this3 = this;
- var timeout = arguments.length <= 0 || arguments[0] === undefined ? this.timeout : arguments[0];
+ var timeout = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : this.timeout;
this.state = CHANNEL_STATES.leaving;
var onClose = function onClose() {
_this3.socket.log("channel", "leave " + _this3.topic);
- _this3.trigger(CHANNEL_EVENTS.close, "leave", _this3.joinRef());
+ _this3.trigger(CHANNEL_EVENTS.close, "leave");
};
var leavePush = new Push(this, CHANNEL_EVENTS.leave, {}, timeout);
leavePush.receive("ok", function () {
@@ -490,12 +547,14 @@ var Channel = exports.Channel = function () {
return leavePush;
}
- // Overridable message hook
- //
- // Receives all events for specialized message handling
- // before dispatching to the channel callbacks.
- //
- // Must return the payload, modified or unmodified
+ /**
+ * Overridable message hook
+ *
+ * Receives all events for specialized message handling
+ * before dispatching to the channel callbacks.
+ *
+ * Must return the payload, modified or unmodified
+ */
}, {
key: "onMessage",
@@ -507,8 +566,18 @@ var Channel = exports.Channel = function () {
}, {
key: "isMember",
- value: function isMember(topic) {
- return this.topic === topic;
+ value: function isMember(topic, event, payload, joinRef) {
+ if (this.topic !== topic) {
+ return false;
+ }
+ var isLifecycleEvent = CHANNEL_LIFECYCLE_EVENTS.indexOf(event) >= 0;
+
+ if (joinRef && isLifecycleEvent && joinRef !== this.joinRef()) {
+ this.socket.log("channel", "dropping outdated message", { topic: topic, event: event, payload: payload, joinRef: joinRef });
+ return false;
+ } else {
+ return true;
+ }
}
}, {
key: "joinRef",
@@ -524,7 +593,7 @@ var Channel = exports.Channel = function () {
}, {
key: "rejoin",
value: function rejoin() {
- var timeout = arguments.length <= 0 || arguments[0] === undefined ? this.timeout : arguments[0];
+ var timeout = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : this.timeout;
if (this.isLeaving()) {
return;
}
@@ -532,16 +601,10 @@ var Channel = exports.Channel = function () {
}
}, {
key: "trigger",
- value: function trigger(event, payload, ref) {
- var close = CHANNEL_EVENTS.close;
- var error = CHANNEL_EVENTS.error;
- var leave = CHANNEL_EVENTS.leave;
- var join = CHANNEL_EVENTS.join;
+ value: function trigger(event, payload, ref, joinRef) {
+ var _this4 = this;
- if (ref && [close, error, leave, join].indexOf(event) >= 0 && ref !== this.joinRef()) {
- return;
- }
- var handledPayload = this.onMessage(event, payload, ref);
+ var handledPayload = this.onMessage(event, payload, ref, joinRef);
if (payload && !handledPayload) {
throw "channel onMessage callbacks must return the payload, modified or unmodified";
}
@@ -549,7 +612,7 @@ var Channel = exports.Channel = function () {
this.bindings.filter(function (bind) {
return bind.event === event;
}).map(function (bind) {
- return bind.callback(handledPayload, ref);
+ return bind.callback(handledPayload, ref, joinRef || _this4.joinRef());
});
}
}, {
@@ -587,41 +650,84 @@ var Channel = exports.Channel = function () {
return Channel;
}();
-var Socket = exports.Socket = function () {
+var Serializer = {
+ encode: function encode(msg, callback) {
+ var payload = [msg.join_ref, msg.ref, msg.topic, msg.event, msg.payload];
+ return callback(JSON.stringify(payload));
+ },
+ decode: function decode(rawPayload, callback) {
+ var _JSON$parse = JSON.parse(rawPayload),
+ _JSON$parse2 = _slicedToArray(_JSON$parse, 5),
+ join_ref = _JSON$parse2[0],
+ ref = _JSON$parse2[1],
+ topic = _JSON$parse2[2],
+ event = _JSON$parse2[3],
+ payload = _JSON$parse2[4];
+
+ return callback({ join_ref: join_ref, ref: ref, topic: topic, event: event, payload: payload });
+ }
+};
- // Initializes the Socket
- //
- // endPoint - The string WebSocket endpoint, ie, "ws://example.com/ws",
- // "wss://example.com"
- // "/ws" (inherited host & protocol)
- // opts - Optional configuration
- // transport - The Websocket Transport, for example WebSocket or Phoenix.LongPoll.
- // Defaults to WebSocket with automatic LongPoll fallback.
- // timeout - The default timeout in milliseconds to trigger push timeouts.
- // Defaults `DEFAULT_TIMEOUT`
- // heartbeatIntervalMs - The millisec interval to send a heartbeat message
- // reconnectAfterMs - The optional function that returns the millsec
- // reconnect interval. Defaults to stepped backoff of:
- //
- // function(tries){
- // return [1000, 5000, 10000][tries - 1] || 10000
- // }
- //
- // logger - The optional function for specialized logging, ie:
- // `logger: (kind, msg, data) => { console.log(`${kind}: ${msg}`, data) }
- //
- // longpollerTimeout - The maximum timeout of a long poll AJAX request.
- // Defaults to 20s (double the server long poll timer).
- //
- // params - The optional params to pass when connecting
- //
- // For IE8 support use an ES5-shim (https://github.com/es-shims/es5-shim)
- //
+/** Initializes the Socket
+ *
+ *
+ * For IE8 support use an ES5-shim (https://github.com/es-shims/es5-shim)
+ *
+ * @param {string} endPoint - The string WebSocket endpoint, ie, `"ws://example.com/socket"`,
+ * `"wss://example.com"`
+ * `"/socket"` (inherited host & protocol)
+ * @param {Object} opts - Optional configuration
+ * @param {string} opts.transport - The Websocket Transport, for example WebSocket or Phoenix.LongPoll.
+ *
+ * Defaults to WebSocket with automatic LongPoll fallback.
+ * @param {Function} opts.encode - The function to encode outgoing messages.
+ *
+ * Defaults to JSON:
+ *
+ * ```javascript
+ * (payload, callback) => callback(JSON.stringify(payload))
+ * ```
+ *
+ * @param {Function} opts.decode - The function to decode incoming messages.
+ *
+ * Defaults to JSON:
+ *
+ * ```javascript
+ * (payload, callback) => callback(JSON.parse(payload))
+ * ```
+ *
+ * @param {number} opts.timeout - The default timeout in milliseconds to trigger push timeouts.
+ *
+ * Defaults `DEFAULT_TIMEOUT`
+ * @param {number} opts.heartbeatIntervalMs - The millisec interval to send a heartbeat message
+ * @param {number} opts.reconnectAfterMs - The optional function that returns the millsec reconnect interval.
+ *
+ * Defaults to stepped backoff of:
+ *
+ * ```javascript
+ * function(tries){
+ * return [1000, 5000, 10000][tries - 1] || 10000
+ * }
+ * ```
+ * @param {Function} opts.logger - The optional function for specialized logging, ie:
+ * ```javascript
+ * logger: (kind, msg, data) => { console.log(`${kind}: ${msg}`, data) }
+ * ```
+ *
+ * @param {number} opts.longpollerTimeout - The maximum timeout of a long poll AJAX request.
+ *
+ * Defaults to 20s (double the server long poll timer).
+ *
+ * @param {Object} opts.params - The optional params to pass when connecting
+ *
+ *
+*/
+var Socket = exports.Socket = function () {
function Socket(endPoint) {
- var _this4 = this;
+ var _this5 = this;
- var opts = arguments.length <= 1 || arguments[1] === undefined ? {} : arguments[1];
+ var opts = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
_classCallCheck(this, Socket);
@@ -631,6 +737,15 @@ var Socket = exports.Socket = function () {
this.ref = 0;
this.timeout = opts.timeout || DEFAULT_TIMEOUT;
this.transport = opts.transport || window.WebSocket || LongPoll;
+ this.defaultEncoder = Serializer.encode;
+ this.defaultDecoder = Serializer.decode;
+ if (this.transport !== LongPoll) {
+ this.encode = opts.encode || this.defaultEncoder;
+ this.decode = opts.decode || this.defaultDecoder;
+ } else {
+ this.encode = this.defaultEncoder;
+ this.decode = this.defaultDecoder;
+ }
this.heartbeatIntervalMs = opts.heartbeatIntervalMs || 30000;
this.reconnectAfterMs = opts.reconnectAfterMs || function (tries) {
return [1000, 2000, 5000, 10000][tries - 1] || 10000;
@@ -639,9 +754,11 @@ var Socket = exports.Socket = function () {
this.longpollerTimeout = opts.longpollerTimeout || 20000;
this.params = opts.params || {};
this.endPoint = endPoint + "/" + TRANSPORTS.websocket;
+ this.heartbeatTimer = null;
+ this.pendingHeartbeatRef = null;
this.reconnectTimer = new Timer(function () {
- _this4.disconnect(function () {
- return _this4.connect();
+ _this5.disconnect(function () {
+ return _this5.connect();
});
}, this.reconnectAfterMs);
}
@@ -679,12 +796,15 @@ var Socket = exports.Socket = function () {
callback && callback();
}
- // params - The params to send when connecting, for example `{user_id: userToken}`
+ /**
+ *
+ * @param {Object} params - The params to send when connecting, for example `{user_id: userToken}`
+ */
}, {
key: "connect",
value: function connect(params) {
- var _this5 = this;
+ var _this6 = this;
if (params) {
console && console.log("passing params to connect is deprecated. Instead pass :params to the Socket constructor");
@@ -697,20 +817,25 @@ var Socket = exports.Socket = function () {
this.conn = new this.transport(this.endPointURL());
this.conn.timeout = this.longpollerTimeout;
this.conn.onopen = function () {
- return _this5.onConnOpen();
+ return _this6.onConnOpen();
};
this.conn.onerror = function (error) {
- return _this5.onConnError(error);
+ return _this6.onConnError(error);
};
this.conn.onmessage = function (event) {
- return _this5.onConnMessage(event);
+ return _this6.onConnMessage(event);
};
this.conn.onclose = function (event) {
- return _this5.onConnClose(event);
+ return _this6.onConnClose(event);
};
}
- // Logs the message. Override `this.logger` for specialized logging. noops by default
+ /**
+ * Logs the message. Override `this.logger` for specialized logging. noops by default
+ * @param {string} kind
+ * @param {string} msg
+ * @param {Object} data
+ */
}, {
key: "log",
@@ -748,15 +873,15 @@ var Socket = exports.Socket = function () {
}, {
key: "onConnOpen",
value: function onConnOpen() {
- var _this6 = this;
+ var _this7 = this;
- this.log("transport", "connected to " + this.endPointURL(), this.transport.prototype);
+ this.log("transport", "connected to " + this.endPointURL());
this.flushSendBuffer();
this.reconnectTimer.reset();
if (!this.conn.skipHeartbeat) {
clearInterval(this.heartbeatTimer);
this.heartbeatTimer = setInterval(function () {
- return _this6.sendHeartbeat();
+ return _this7.sendHeartbeat();
}, this.heartbeatIntervalMs);
}
this.stateChangeCallbacks.open.forEach(function (callback) {
@@ -819,7 +944,7 @@ var Socket = exports.Socket = function () {
}, {
key: "channel",
value: function channel(topic) {
- var chanParams = arguments.length <= 1 || arguments[1] === undefined ? {} : arguments[1];
+ var chanParams = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
var chan = new Channel(topic, chanParams, this);
this.channels.push(chan);
@@ -828,17 +953,20 @@ var Socket = exports.Socket = function () {
}, {
key: "push",
value: function push(data) {
- var _this7 = this;
+ var _this8 = this;
- var topic = data.topic;
- var event = data.event;
- var payload = data.payload;
- var ref = data.ref;
+ var topic = data.topic,
+ event = data.event,
+ payload = data.payload,
+ ref = data.ref,
+ join_ref = data.join_ref;
var callback = function callback() {
- return _this7.conn.send(JSON.stringify(data));
+ _this8.encode(data, function (result) {
+ _this8.conn.send(result);
+ });
};
- this.log("push", topic + " " + event + " (" + ref + ")", payload);
+ this.log("push", topic + " " + event + " (" + join_ref + ", " + ref + ")", payload);
if (this.isConnected()) {
callback();
} else {
@@ -846,7 +974,9 @@ var Socket = exports.Socket = function () {
}
}
- // Return the next message ref, accounting for overflows
+ /**
+ * Return the next message ref, accounting for overflows
+ */
}, {
key: "makeRef",
@@ -866,7 +996,14 @@ var Socket = exports.Socket = function () {
if (!this.isConnected()) {
return;
}
- this.push({ topic: "phoenix", event: "heartbeat", payload: {}, ref: this.makeRef() });
+ if (this.pendingHeartbeatRef) {
+ this.pendingHeartbeatRef = null;
+ this.log("transport", "heartbeat timeout. Attempting to re-establish connection");
+ this.conn.close(WS_CLOSE_NORMAL, "hearbeat timeout");
+ return;
+ }
+ this.pendingHeartbeatRef = this.makeRef();
+ this.push({ topic: "phoenix", event: "heartbeat", payload: {}, ref: this.pendingHeartbeatRef });
}
}, {
key: "flushSendBuffer",
@@ -881,20 +1018,28 @@ var Socket = exports.Socket = function () {
}, {
key: "onConnMessage",
value: function onConnMessage(rawMessage) {
- var msg = JSON.parse(rawMessage.data);
- var topic = msg.topic;
- var event = msg.event;
- var payload = msg.payload;
- var ref = msg.ref;
-
- this.log("receive", (payload.status || "") + " " + topic + " " + event + " " + (ref && "(" + ref + ")" || ""), payload);
- this.channels.filter(function (channel) {
- return channel.isMember(topic);
- }).forEach(function (channel) {
- return channel.trigger(event, payload, ref);
- });
- this.stateChangeCallbacks.message.forEach(function (callback) {
- return callback(msg);
+ var _this9 = this;
+
+ this.decode(rawMessage.data, function (msg) {
+ var topic = msg.topic,
+ event = msg.event,
+ payload = msg.payload,
+ ref = msg.ref,
+ join_ref = msg.join_ref;
+
+ if (ref && ref === _this9.pendingHeartbeatRef) {
+ _this9.pendingHeartbeatRef = null;
+ }
+
+ _this9.log("receive", (payload.status || "") + " " + topic + " " + event + " " + (ref && "(" + ref + ")" || ""), payload);
+ _this9.channels.filter(function (channel) {
+ return channel.isMember(topic, event, payload, join_ref);
+ }).forEach(function (channel) {
+ return channel.trigger(event, payload, ref, join_ref);
+ });
+ _this9.stateChangeCallbacks.message.forEach(function (callback) {
+ return callback(msg);
+ });
});
}
}]);
@@ -944,7 +1089,7 @@ var LongPoll = exports.LongPoll = function () {
}, {
key: "poll",
value: function poll() {
- var _this8 = this;
+ var _this10 = this;
if (!(this.readyState === SOCKET_STATES.open || this.readyState === SOCKET_STATES.connecting)) {
return;
@@ -952,11 +1097,11 @@ var LongPoll = exports.LongPoll = function () {
Ajax.request("GET", this.endpointURL(), "application/json", null, this.timeout, this.ontimeout.bind(this), function (resp) {
if (resp) {
- var status = resp.status;
- var token = resp.token;
- var messages = resp.messages;
+ var status = resp.status,
+ token = resp.token,
+ messages = resp.messages;
- _this8.token = token;
+ _this10.token = token;
} else {
var status = 0;
}
@@ -964,22 +1109,22 @@ var LongPoll = exports.LongPoll = function () {
switch (status) {
case 200:
messages.forEach(function (msg) {
- return _this8.onmessage({ data: JSON.stringify(msg) });
+ return _this10.onmessage({ data: msg });
});
- _this8.poll();
+ _this10.poll();
break;
case 204:
- _this8.poll();
+ _this10.poll();
break;
case 410:
- _this8.readyState = SOCKET_STATES.open;
- _this8.onopen();
- _this8.poll();
+ _this10.readyState = SOCKET_STATES.open;
+ _this10.onopen();
+ _this10.poll();
break;
case 0:
case 500:
- _this8.onerror();
- _this8.closeAndRetry();
+ _this10.onerror();
+ _this10.closeAndRetry();
break;
default:
throw "unhandled poll status " + status;
@@ -989,12 +1134,12 @@ var LongPoll = exports.LongPoll = function () {
}, {
key: "send",
value: function send(body) {
- var _this9 = this;
+ var _this11 = this;
Ajax.request("POST", this.endpointURL(), "application/json", body, this.timeout, this.onerror.bind(this, "timeout"), function (resp) {
if (!resp || resp.status !== 200) {
- _this9.onerror(status);
- _this9.closeAndRetry();
+ _this11.onerror(resp && resp.status);
+ _this11.closeAndRetry();
}
});
}
@@ -1021,20 +1166,20 @@ var Ajax = exports.Ajax = function () {
var req = new XDomainRequest(); // IE8, IE9
this.xdomainRequest(req, method, endPoint, body, timeout, ontimeout, callback);
} else {
- var req = window.XMLHttpRequest ? new XMLHttpRequest() : // IE7+, Firefox, Chrome, Opera, Safari
+ var _req = window.XMLHttpRequest ? new window.XMLHttpRequest() : // IE7+, Firefox, Chrome, Opera, Safari
new ActiveXObject("Microsoft.XMLHTTP"); // IE6, IE5
- this.xhrRequest(req, method, endPoint, accept, body, timeout, ontimeout, callback);
+ this.xhrRequest(_req, method, endPoint, accept, body, timeout, ontimeout, callback);
}
}
}, {
key: "xdomainRequest",
value: function xdomainRequest(req, method, endPoint, body, timeout, ontimeout, callback) {
- var _this10 = this;
+ var _this12 = this;
req.timeout = timeout;
req.open(method, endPoint);
req.onload = function () {
- var response = _this10.parseJSON(req.responseText);
+ var response = _this12.parseJSON(req.responseText);
callback && callback(response);
};
if (ontimeout) {
@@ -1049,17 +1194,17 @@ var Ajax = exports.Ajax = function () {
}, {
key: "xhrRequest",
value: function xhrRequest(req, method, endPoint, accept, body, timeout, ontimeout, callback) {
- var _this11 = this;
+ var _this13 = this;
- req.timeout = timeout;
req.open(method, endPoint, true);
+ req.timeout = timeout;
req.setRequestHeader("Content-Type", accept);
req.onerror = function () {
callback && callback(null);
};
req.onreadystatechange = function () {
- if (req.readyState === _this11.states.complete && callback) {
- var response = _this11.parseJSON(req.responseText);
+ if (req.readyState === _this13.states.complete && callback) {
+ var response = _this13.parseJSON(req.responseText);
callback(response);
}
};
@@ -1072,7 +1217,16 @@ var Ajax = exports.Ajax = function () {
}, {
key: "parseJSON",
value: function parseJSON(resp) {
- return resp && resp !== "" ? JSON.parse(resp) : null;
+ if (!resp || resp === "") {
+ return null;
+ }
+
+ try {
+ return JSON.parse(resp);
+ } catch (e) {
+ console && console.log("failed to parse JSON response", resp);
+ return null;
+ }
}
}, {
key: "serialize",
@@ -1111,7 +1265,7 @@ Ajax.states = { complete: 4 };
var Presence = exports.Presence = {
syncState: function syncState(currentState, newState, onJoin, onLeave) {
- var _this12 = this;
+ var _this14 = this;
var state = this.clone(currentState);
var joins = {};
@@ -1125,28 +1279,26 @@ var Presence = exports.Presence = {
this.map(newState, function (key, newPresence) {
var currentPresence = state[key];
if (currentPresence) {
- (function () {
- var newRefs = newPresence.metas.map(function (m) {
- return m.phx_ref;
- });
- var curRefs = currentPresence.metas.map(function (m) {
- return m.phx_ref;
- });
- var joinedMetas = newPresence.metas.filter(function (m) {
- return curRefs.indexOf(m.phx_ref) < 0;
- });
- var leftMetas = currentPresence.metas.filter(function (m) {
- return newRefs.indexOf(m.phx_ref) < 0;
- });
- if (joinedMetas.length > 0) {
- joins[key] = newPresence;
- joins[key].metas = joinedMetas;
- }
- if (leftMetas.length > 0) {
- leaves[key] = _this12.clone(currentPresence);
- leaves[key].metas = leftMetas;
- }
- })();
+ var newRefs = newPresence.metas.map(function (m) {
+ return m.phx_ref;
+ });
+ var curRefs = currentPresence.metas.map(function (m) {
+ return m.phx_ref;
+ });
+ var joinedMetas = newPresence.metas.filter(function (m) {
+ return curRefs.indexOf(m.phx_ref) < 0;
+ });
+ var leftMetas = currentPresence.metas.filter(function (m) {
+ return newRefs.indexOf(m.phx_ref) < 0;
+ });
+ if (joinedMetas.length > 0) {
+ joins[key] = newPresence;
+ joins[key].metas = joinedMetas;
+ }
+ if (leftMetas.length > 0) {
+ leaves[key] = _this14.clone(currentPresence);
+ leaves[key].metas = leftMetas;
+ }
} else {
joins[key] = newPresence;
}
@@ -1154,8 +1306,8 @@ var Presence = exports.Presence = {
return this.syncDiff(state, { joins: joins, leaves: leaves }, onJoin, onLeave);
},
syncDiff: function syncDiff(currentState, _ref2, onJoin, onLeave) {
- var joins = _ref2.joins;
- var leaves = _ref2.leaves;
+ var joins = _ref2.joins,
+ leaves = _ref2.leaves;
var state = this.clone(currentState);
if (!onJoin) {
@@ -1205,6 +1357,7 @@ var Presence = exports.Presence = {
});
},
+
// private
map: function map(obj, func) {
@@ -1217,19 +1370,25 @@ var Presence = exports.Presence = {
}
};
-// Creates a timer that accepts a `timerCalc` function to perform
-// calculated timeout retries, such as exponential backoff.
-//
-// ## Examples
-//
-// let reconnectTimer = new Timer(() => this.connect(), function(tries){
-// return [1000, 5000, 10000][tries - 1] || 10000
-// })
-// reconnectTimer.scheduleTimeout() // fires after 1000
-// reconnectTimer.scheduleTimeout() // fires after 5000
-// reconnectTimer.reset()
-// reconnectTimer.scheduleTimeout() // fires after 1000
-//
+/**
+ *
+ * Creates a timer that accepts a `timerCalc` function to perform
+ * calculated timeout retries, such as exponential backoff.
+ *
+ * ## Examples
+ *
+ * ```javascript
+ * let reconnectTimer = new Timer(() => this.connect(), function(tries){
+ * return [1000, 5000, 10000][tries - 1] || 10000
+ * })
+ * reconnectTimer.scheduleTimeout() // fires after 1000
+ * reconnectTimer.scheduleTimeout() // fires after 5000
+ * reconnectTimer.reset()
+ * reconnectTimer.scheduleTimeout() // fires after 1000
+ * ```
+ * @param {Function} callback
+ * @param {Function} timerCalc
+ */
var Timer = function () {
function Timer(callback, timerCalc) {
@@ -1248,18 +1407,20 @@ var Timer = function () {
clearTimeout(this.timer);
}
- // Cancels any previous scheduleTimeout and schedules callback
+ /**
+ * Cancels any previous scheduleTimeout and schedules callback
+ */
}, {
key: "scheduleTimeout",
value: function scheduleTimeout() {
- var _this13 = this;
+ var _this15 = this;
clearTimeout(this.timer);
this.timer = setTimeout(function () {
- _this13.tries = _this13.tries + 1;
- _this13.callback();
+ _this15.tries = _this15.tries + 1;
+ _this15.callback();
}, this.timerCalc(this.tries + 1));
}
}]);
@@ -1267,5 +1428,4 @@ var Timer = function () {
return Timer;
}();
-})(typeof(exports) === "undefined" ? window.Phoenix = window.Phoenix || {} : exports);
-
+})));
diff --git a/zero-phoenix/test/controllers/person_controller_test.exs b/zero-phoenix/test/controllers/person_controller_test.exs
deleted file mode 100644
index 5993f60..0000000
--- a/zero-phoenix/test/controllers/person_controller_test.exs
+++ /dev/null
@@ -1,63 +0,0 @@
-defmodule ZeroPhoenix.PersonControllerTest do
- use ZeroPhoenix.ConnCase
-
- alias ZeroPhoenix.Person
- @valid_attrs %{"email": "some content", first_name: "some content", last_name: "some content", username: "some content"}
- @invalid_attrs %{}
-
- setup %{conn: conn} do
- {:ok, conn: put_req_header(conn, "accept", "application/json")}
- end
-
- test "lists all entries on index", %{conn: conn} do
- conn = get conn, person_path(conn, :index)
- assert json_response(conn, 200)["data"] == []
- end
-
- test "shows chosen resource", %{conn: conn} do
- person = Repo.insert! %Person{}
- conn = get conn, person_path(conn, :show, person)
- assert json_response(conn, 200)["data"] == %{"id" => person.id,
- "first_name" => person.first_name,
- "last_name" => person.last_name,
- "username" => person.username,
- "email" => person.email}
- end
-
- test "renders page not found when id is nonexistent", %{conn: conn} do
- assert_error_sent 404, fn ->
- get conn, person_path(conn, :show, -1)
- end
- end
-
- test "creates and renders resource when data is valid", %{conn: conn} do
- conn = post conn, person_path(conn, :create), person: @valid_attrs
- assert json_response(conn, 201)["data"]["id"]
- assert Repo.get_by(Person, @valid_attrs)
- end
-
- test "does not create resource and renders errors when data is invalid", %{conn: conn} do
- conn = post conn, person_path(conn, :create), person: @invalid_attrs
- assert json_response(conn, 422)["errors"] != %{}
- end
-
- test "updates and renders chosen resource when data is valid", %{conn: conn} do
- person = Repo.insert! %Person{}
- conn = put conn, person_path(conn, :update, person), person: @valid_attrs
- assert json_response(conn, 200)["data"]["id"]
- assert Repo.get_by(Person, @valid_attrs)
- end
-
- test "does not update chosen resource and renders errors when data is invalid", %{conn: conn} do
- person = Repo.insert! %Person{}
- conn = put conn, person_path(conn, :update, person), person: @invalid_attrs
- assert json_response(conn, 422)["errors"] != %{}
- end
-
- test "deletes chosen resource", %{conn: conn} do
- person = Repo.insert! %Person{}
- conn = delete conn, person_path(conn, :delete, person)
- assert response(conn, 204)
- refute Repo.get(Person, person.id)
- end
-end
diff --git a/zero-phoenix/test/support/channel_case.ex b/zero-phoenix/test/support/channel_case.ex
index 3ff8133..1dfbc03 100644
--- a/zero-phoenix/test/support/channel_case.ex
+++ b/zero-phoenix/test/support/channel_case.ex
@@ -1,11 +1,11 @@
-defmodule ZeroPhoenix.ChannelCase do
+defmodule ZeroPhoenixWeb.ChannelCase do
@moduledoc """
This module defines the test case to be used by
channel tests.
Such tests rely on `Phoenix.ChannelTest` and also
import other functionality to make it easier
- to build and query models.
+ to build common datastructures and query the data layer.
Finally, if the test case interacts with the database,
it cannot be async. For this reason, every test runs
@@ -20,24 +20,18 @@ defmodule ZeroPhoenix.ChannelCase do
# Import conveniences for testing with channels
use Phoenix.ChannelTest
- alias ZeroPhoenix.Repo
- import Ecto
- import Ecto.Changeset
- import Ecto.Query
-
-
# The default endpoint for testing
- @endpoint ZeroPhoenix.Endpoint
+ @endpoint ZeroPhoenixWeb.Endpoint
end
end
+
setup tags do
:ok = Ecto.Adapters.SQL.Sandbox.checkout(ZeroPhoenix.Repo)
-
unless tags[:async] do
Ecto.Adapters.SQL.Sandbox.mode(ZeroPhoenix.Repo, {:shared, self()})
end
-
:ok
end
+
end
diff --git a/zero-phoenix/test/support/conn_case.ex b/zero-phoenix/test/support/conn_case.ex
index 395e4ac..f9a7206 100644
--- a/zero-phoenix/test/support/conn_case.ex
+++ b/zero-phoenix/test/support/conn_case.ex
@@ -1,11 +1,11 @@
-defmodule ZeroPhoenix.ConnCase do
+defmodule ZeroPhoenixWeb.ConnCase do
@moduledoc """
This module defines the test case to be used by
tests that require setting up a connection.
Such tests rely on `Phoenix.ConnTest` and also
import other functionality to make it easier
- to build and query models.
+ to build common datastructures and query the data layer.
Finally, if the test case interacts with the database,
it cannot be async. For this reason, every test runs
@@ -19,26 +19,20 @@ defmodule ZeroPhoenix.ConnCase do
quote do
# Import conveniences for testing with connections
use Phoenix.ConnTest
-
- alias ZeroPhoenix.Repo
- import Ecto
- import Ecto.Changeset
- import Ecto.Query
-
- import ZeroPhoenix.Router.Helpers
+ import ZeroPhoenixWeb.Router.Helpers
# The default endpoint for testing
- @endpoint ZeroPhoenix.Endpoint
+ @endpoint ZeroPhoenixWeb.Endpoint
end
end
+
setup tags do
:ok = Ecto.Adapters.SQL.Sandbox.checkout(ZeroPhoenix.Repo)
-
unless tags[:async] do
Ecto.Adapters.SQL.Sandbox.mode(ZeroPhoenix.Repo, {:shared, self()})
end
-
{:ok, conn: Phoenix.ConnTest.build_conn()}
end
+
end
diff --git a/zero-phoenix/test/support/data_case.ex b/zero-phoenix/test/support/data_case.ex
new file mode 100644
index 0000000..2782f1d
--- /dev/null
+++ b/zero-phoenix/test/support/data_case.ex
@@ -0,0 +1,53 @@
+defmodule ZeroPhoenix.DataCase do
+ @moduledoc """
+ This module defines the setup for tests requiring
+ access to the application's data layer.
+
+ You may define functions here to be used as helpers in
+ your tests.
+
+ Finally, if the test case interacts with the database,
+ it cannot be async. For this reason, every test runs
+ inside a transaction which is reset at the beginning
+ of the test unless the test case is marked as async.
+ """
+
+ use ExUnit.CaseTemplate
+
+ using do
+ quote do
+ alias ZeroPhoenix.Repo
+
+ import Ecto
+ import Ecto.Changeset
+ import Ecto.Query
+ import ZeroPhoenix.DataCase
+ end
+ end
+
+ setup tags do
+ :ok = Ecto.Adapters.SQL.Sandbox.checkout(ZeroPhoenix.Repo)
+
+ unless tags[:async] do
+ Ecto.Adapters.SQL.Sandbox.mode(ZeroPhoenix.Repo, {:shared, self()})
+ end
+
+ :ok
+ end
+
+ @doc """
+ A helper that transform changeset errors to a map of messages.
+
+ assert {:error, changeset} = Accounts.create_user(%{password: "short"})
+ assert "password is too short" in errors_on(changeset).password
+ assert %{password: ["password is too short"]} = errors_on(changeset)
+
+ """
+ def errors_on(changeset) do
+ Ecto.Changeset.traverse_errors(changeset, fn {message, opts} ->
+ Enum.reduce(opts, message, fn {key, value}, acc ->
+ String.replace(acc, "%{#{key}}", to_string(value))
+ end)
+ end)
+ end
+end
diff --git a/zero-phoenix/test/support/model_case.ex b/zero-phoenix/test/support/model_case.ex
deleted file mode 100644
index e24e908..0000000
--- a/zero-phoenix/test/support/model_case.ex
+++ /dev/null
@@ -1,65 +0,0 @@
-defmodule ZeroPhoenix.ModelCase do
- @moduledoc """
- This module defines the test case to be used by
- model tests.
-
- You may define functions here to be used as helpers in
- your model tests. See `errors_on/2`'s definition as reference.
-
- Finally, if the test case interacts with the database,
- it cannot be async. For this reason, every test runs
- inside a transaction which is reset at the beginning
- of the test unless the test case is marked as async.
- """
-
- use ExUnit.CaseTemplate
-
- using do
- quote do
- alias ZeroPhoenix.Repo
-
- import Ecto
- import Ecto.Changeset
- import Ecto.Query
- import ZeroPhoenix.ModelCase
- end
- end
-
- setup tags do
- :ok = Ecto.Adapters.SQL.Sandbox.checkout(ZeroPhoenix.Repo)
-
- unless tags[:async] do
- Ecto.Adapters.SQL.Sandbox.mode(ZeroPhoenix.Repo, {:shared, self()})
- end
-
- :ok
- end
-
- @doc """
- Helper for returning list of errors in a struct when given certain data.
-
- ## Examples
-
- Given a User schema that lists `:name` as a required field and validates
- `:password` to be safe, it would return:
-
- iex> errors_on(%User{}, %{password: "password"})
- [password: "is unsafe", name: "is blank"]
-
- You could then write your assertion like:
-
- assert {:password, "is unsafe"} in errors_on(%User{}, %{password: "password"})
-
- You can also create the changeset manually and retrieve the errors
- field directly:
-
- iex> changeset = User.changeset(%User{}, password: "password")
- iex> {:password, "is unsafe"} in changeset.errors
- true
- """
- def errors_on(struct, data) do
- struct.__struct__.changeset(struct, data)
- |> Ecto.Changeset.traverse_errors(&ZeroPhoenix.ErrorHelpers.translate_error/1)
- |> Enum.flat_map(fn {key, errors} -> for msg <- errors, do: {key, msg} end)
- end
-end
diff --git a/zero-phoenix/test/test_helper.exs b/zero-phoenix/test/test_helper.exs
index 8b8b591..e314e2e 100644
--- a/zero-phoenix/test/test_helper.exs
+++ b/zero-phoenix/test/test_helper.exs
@@ -1,4 +1,4 @@
-ExUnit.start
+ExUnit.start()
Ecto.Adapters.SQL.Sandbox.mode(ZeroPhoenix.Repo, :manual)
diff --git a/zero-phoenix/test/views/error_view_test.exs b/zero-phoenix/test/views/error_view_test.exs
deleted file mode 100644
index 0aacb7e..0000000
--- a/zero-phoenix/test/views/error_view_test.exs
+++ /dev/null
@@ -1,21 +0,0 @@
-defmodule ZeroPhoenix.ErrorViewTest do
- use ZeroPhoenix.ConnCase, async: true
-
- # Bring render/3 and render_to_string/3 for testing custom views
- import Phoenix.View
-
- test "renders 404.html" do
- assert render_to_string(ZeroPhoenix.ErrorView, "404.html", []) ==
- "Page not found"
- end
-
- test "render 500.html" do
- assert render_to_string(ZeroPhoenix.ErrorView, "500.html", []) ==
- "Internal server error"
- end
-
- test "render any other" do
- assert render_to_string(ZeroPhoenix.ErrorView, "505.html", []) ==
- "Internal server error"
- end
-end
diff --git a/zero-phoenix/test/views/layout_view_test.exs b/zero-phoenix/test/views/layout_view_test.exs
deleted file mode 100644
index 89f1718..0000000
--- a/zero-phoenix/test/views/layout_view_test.exs
+++ /dev/null
@@ -1,3 +0,0 @@
-defmodule ZeroPhoenix.LayoutViewTest do
- use ZeroPhoenix.ConnCase, async: true
-end
diff --git a/zero-phoenix/test/views/page_view_test.exs b/zero-phoenix/test/views/page_view_test.exs
deleted file mode 100644
index 7867bdb..0000000
--- a/zero-phoenix/test/views/page_view_test.exs
+++ /dev/null
@@ -1,3 +0,0 @@
-defmodule ZeroPhoenix.PageViewTest do
- use ZeroPhoenix.ConnCase, async: true
-end
diff --git a/zero-phoenix/test/zero_phoenix/accounts/accounts_test.exs b/zero-phoenix/test/zero_phoenix/accounts/accounts_test.exs
new file mode 100644
index 0000000..4e9e2e7
--- /dev/null
+++ b/zero-phoenix/test/zero_phoenix/accounts/accounts_test.exs
@@ -0,0 +1,71 @@
+defmodule ZeroPhoenix.AccountsTest do
+ use ZeroPhoenix.DataCase
+
+ alias ZeroPhoenix.Accounts
+
+ describe "people" do
+ alias ZeroPhoenix.Accounts.Person
+
+ @valid_attrs %{email: "some email", first_name: "some first_name", last_name: "some last_name", username: "some username"}
+ @update_attrs %{email: "some updated email", first_name: "some updated first_name", last_name: "some updated last_name", username: "some updated username"}
+ @invalid_attrs %{email: nil, first_name: nil, last_name: nil, username: nil}
+
+ def person_fixture(attrs \\ %{}) do
+ {:ok, person} =
+ attrs
+ |> Enum.into(@valid_attrs)
+ |> Accounts.create_person()
+
+ person
+ end
+
+ test "list_people/0 returns all people" do
+ person = person_fixture()
+ assert Accounts.list_people() == [person]
+ end
+
+ test "get_person!/1 returns the person with given id" do
+ person = person_fixture()
+ assert Accounts.get_person!(person.id) == person
+ end
+
+ test "create_person/1 with valid data creates a person" do
+ assert {:ok, %Person{} = person} = Accounts.create_person(@valid_attrs)
+ assert person.email == "some email"
+ assert person.first_name == "some first_name"
+ assert person.last_name == "some last_name"
+ assert person.username == "some username"
+ end
+
+ test "create_person/1 with invalid data returns error changeset" do
+ assert {:error, %Ecto.Changeset{}} = Accounts.create_person(@invalid_attrs)
+ end
+
+ test "update_person/2 with valid data updates the person" do
+ person = person_fixture()
+ assert {:ok, person} = Accounts.update_person(person, @update_attrs)
+ assert %Person{} = person
+ assert person.email == "some updated email"
+ assert person.first_name == "some updated first_name"
+ assert person.last_name == "some updated last_name"
+ assert person.username == "some updated username"
+ end
+
+ test "update_person/2 with invalid data returns error changeset" do
+ person = person_fixture()
+ assert {:error, %Ecto.Changeset{}} = Accounts.update_person(person, @invalid_attrs)
+ assert person == Accounts.get_person!(person.id)
+ end
+
+ test "delete_person/1 deletes the person" do
+ person = person_fixture()
+ assert {:ok, %Person{}} = Accounts.delete_person(person)
+ assert_raise Ecto.NoResultsError, fn -> Accounts.get_person!(person.id) end
+ end
+
+ test "change_person/1 returns a person changeset" do
+ person = person_fixture()
+ assert %Ecto.Changeset{} = Accounts.change_person(person)
+ end
+ end
+end
diff --git a/zero-phoenix/test/controllers/page_controller_test.exs b/zero-phoenix/test/zero_phoenix_web/controllers/page_controller_test.exs
similarity index 62%
rename from zero-phoenix/test/controllers/page_controller_test.exs
rename to zero-phoenix/test/zero_phoenix_web/controllers/page_controller_test.exs
index 9711047..9f9163f 100644
--- a/zero-phoenix/test/controllers/page_controller_test.exs
+++ b/zero-phoenix/test/zero_phoenix_web/controllers/page_controller_test.exs
@@ -1,5 +1,5 @@
-defmodule ZeroPhoenix.PageControllerTest do
- use ZeroPhoenix.ConnCase
+defmodule ZeroPhoenixWeb.PageControllerTest do
+ use ZeroPhoenixWeb.ConnCase
test "GET /", %{conn: conn} do
conn = get conn, "/"
diff --git a/zero-phoenix/test/zero_phoenix_web/controllers/person_controller_test.exs b/zero-phoenix/test/zero_phoenix_web/controllers/person_controller_test.exs
new file mode 100644
index 0000000..4145471
--- /dev/null
+++ b/zero-phoenix/test/zero_phoenix_web/controllers/person_controller_test.exs
@@ -0,0 +1,85 @@
+defmodule ZeroPhoenixWeb.PersonControllerTest do
+ use ZeroPhoenixWeb.ConnCase
+
+ alias ZeroPhoenix.Accounts
+ alias ZeroPhoenix.Accounts.Person
+
+ @create_attrs %{email: "some email", first_name: "some first_name", last_name: "some last_name", username: "some username"}
+ @update_attrs %{email: "some updated email", first_name: "some updated first_name", last_name: "some updated last_name", username: "some updated username"}
+ @invalid_attrs %{email: nil, first_name: nil, last_name: nil, username: nil}
+
+ def fixture(:person) do
+ {:ok, person} = Accounts.create_person(@create_attrs)
+ person
+ end
+
+ setup %{conn: conn} do
+ {:ok, conn: put_req_header(conn, "accept", "application/json")}
+ end
+
+ describe "index" do
+ test "lists all people", %{conn: conn} do
+ conn = get conn, person_path(conn, :index)
+ assert json_response(conn, 200)["data"] == []
+ end
+ end
+
+ describe "create person" do
+ test "renders person when data is valid", %{conn: conn} do
+ conn = post conn, person_path(conn, :create), person: @create_attrs
+ assert %{"id" => id} = json_response(conn, 201)["data"]
+
+ conn = get conn, person_path(conn, :show, id)
+ assert json_response(conn, 200)["data"] == %{
+ "id" => id,
+ "email" => "some email",
+ "first_name" => "some first_name",
+ "last_name" => "some last_name",
+ "username" => "some username"}
+ end
+
+ test "renders errors when data is invalid", %{conn: conn} do
+ conn = post conn, person_path(conn, :create), person: @invalid_attrs
+ assert json_response(conn, 422)["errors"] != %{}
+ end
+ end
+
+ describe "update person" do
+ setup [:create_person]
+
+ test "renders person when data is valid", %{conn: conn, person: %Person{id: id} = person} do
+ conn = put conn, person_path(conn, :update, person), person: @update_attrs
+ assert %{"id" => ^id} = json_response(conn, 200)["data"]
+
+ conn = get conn, person_path(conn, :show, id)
+ assert json_response(conn, 200)["data"] == %{
+ "id" => id,
+ "email" => "some updated email",
+ "first_name" => "some updated first_name",
+ "last_name" => "some updated last_name",
+ "username" => "some updated username"}
+ end
+
+ test "renders errors when data is invalid", %{conn: conn, person: person} do
+ conn = put conn, person_path(conn, :update, person), person: @invalid_attrs
+ assert json_response(conn, 422)["errors"] != %{}
+ end
+ end
+
+ describe "delete person" do
+ setup [:create_person]
+
+ test "deletes chosen person", %{conn: conn, person: person} do
+ conn = delete conn, person_path(conn, :delete, person)
+ assert response(conn, 204)
+ assert_error_sent 404, fn ->
+ get conn, person_path(conn, :show, person)
+ end
+ end
+ end
+
+ defp create_person(_) do
+ person = fixture(:person)
+ {:ok, person: person}
+ end
+end
diff --git a/zero-phoenix/test/models/friendship_test.exs b/zero-phoenix/test/zero_phoenix_web/models/friendship_test.exs
similarity index 90%
rename from zero-phoenix/test/models/friendship_test.exs
rename to zero-phoenix/test/zero_phoenix_web/models/friendship_test.exs
index 7835ec6..b7cc848 100644
--- a/zero-phoenix/test/models/friendship_test.exs
+++ b/zero-phoenix/test/zero_phoenix_web/models/friendship_test.exs
@@ -1,4 +1,4 @@
-defmodule ZeroPhoenix.FriendshipTest do
+defmodule ZeroPhoenixWeb.FriendshipTest do
use ZeroPhoenix.ModelCase
alias ZeroPhoenix.Friendship
diff --git a/zero-phoenix/test/models/person_test.exs b/zero-phoenix/test/zero_phoenix_web/models/person_test.exs
similarity index 73%
rename from zero-phoenix/test/models/person_test.exs
rename to zero-phoenix/test/zero_phoenix_web/models/person_test.exs
index 86cb57e..f2d12c0 100644
--- a/zero-phoenix/test/models/person_test.exs
+++ b/zero-phoenix/test/zero_phoenix_web/models/person_test.exs
@@ -3,7 +3,12 @@ defmodule ZeroPhoenix.PersonTest do
alias ZeroPhoenix.Person
- @valid_attrs %{"email": "some content", first_name: "some content", last_name: "some content", username: "some content"}
+ @valid_attrs %{
+ email: "some content",
+ first_name: "some content",
+ last_name: "some content",
+ username: "some content"
+ }
@invalid_attrs %{}
test "changeset with valid attributes" do
diff --git a/zero-phoenix/test/zero_phoenix_web/views/error_view_test.exs b/zero-phoenix/test/zero_phoenix_web/views/error_view_test.exs
new file mode 100644
index 0000000..1a341e5
--- /dev/null
+++ b/zero-phoenix/test/zero_phoenix_web/views/error_view_test.exs
@@ -0,0 +1,21 @@
+defmodule ZeroPhoenixWeb.ErrorViewTest do
+ use ZeroPhoenixWeb.ConnCase, async: true
+
+ # Bring render/3 and render_to_string/3 for testing custom views
+ import Phoenix.View
+
+ test "renders 404.html" do
+ assert render_to_string(ZeroPhoenixWeb.ErrorView, "404.html", []) ==
+ "Page not found"
+ end
+
+ test "render 500.html" do
+ assert render_to_string(ZeroPhoenixWeb.ErrorView, "500.html", []) ==
+ "Internal server error"
+ end
+
+ test "render any other" do
+ assert render_to_string(ZeroPhoenixWeb.ErrorView, "505.html", []) ==
+ "Internal server error"
+ end
+end
diff --git a/zero-phoenix/test/zero_phoenix_web/views/layout_view_test.exs b/zero-phoenix/test/zero_phoenix_web/views/layout_view_test.exs
new file mode 100644
index 0000000..d94ea7a
--- /dev/null
+++ b/zero-phoenix/test/zero_phoenix_web/views/layout_view_test.exs
@@ -0,0 +1,3 @@
+defmodule ZeroPhoenixWeb.LayoutViewTest do
+ use ZeroPhoenixWeb.ConnCase, async: true
+end
diff --git a/zero-phoenix/test/zero_phoenix_web/views/page_view_test.exs b/zero-phoenix/test/zero_phoenix_web/views/page_view_test.exs
new file mode 100644
index 0000000..ef351c4
--- /dev/null
+++ b/zero-phoenix/test/zero_phoenix_web/views/page_view_test.exs
@@ -0,0 +1,3 @@
+defmodule ZeroPhoenixWeb.PageViewTest do
+ use ZeroPhoenixWeb.ConnCase, async: true
+end
diff --git a/zero-phoenix/web/controllers/page_controller.ex b/zero-phoenix/web/controllers/page_controller.ex
deleted file mode 100644
index 186d682..0000000
--- a/zero-phoenix/web/controllers/page_controller.ex
+++ /dev/null
@@ -1,7 +0,0 @@
-defmodule ZeroPhoenix.PageController do
- use ZeroPhoenix.Web, :controller
-
- def index(conn, _params) do
- render conn, "index.html"
- end
-end
diff --git a/zero-phoenix/web/controllers/person_controller.ex b/zero-phoenix/web/controllers/person_controller.ex
deleted file mode 100644
index e081f9a..0000000
--- a/zero-phoenix/web/controllers/person_controller.ex
+++ /dev/null
@@ -1,55 +0,0 @@
-defmodule ZeroPhoenix.PersonController do
- use ZeroPhoenix.Web, :controller
-
- alias ZeroPhoenix.Person
-
- def index(conn, _params) do
- people = Repo.all(Person)
- render(conn, "index.json", people: people)
- end
-
- def create(conn, %{"person" => person_params}) do
- changeset = Person.changeset(%Person{}, person_params)
-
- case Repo.insert(changeset) do
- {:ok, person} ->
- conn
- |> put_status(:created)
- |> put_resp_header("location", person_path(conn, :show, person))
- |> render("show.json", person: person)
- {:error, changeset} ->
- conn
- |> put_status(:unprocessable_entity)
- |> render(ZeroPhoenix.ChangesetView, "error.json", changeset: changeset)
- end
- end
-
- def show(conn, %{"id" => id}) do
- person = Repo.get!(Person, id)
- render(conn, "show.json", person: person)
- end
-
- def update(conn, %{"id" => id, "person" => person_params}) do
- person = Repo.get!(Person, id)
- changeset = Person.changeset(person, person_params)
-
- case Repo.update(changeset) do
- {:ok, person} ->
- render(conn, "show.json", person: person)
- {:error, changeset} ->
- conn
- |> put_status(:unprocessable_entity)
- |> render(ZeroPhoenix.ChangesetView, "error.json", changeset: changeset)
- end
- end
-
- def delete(conn, %{"id" => id}) do
- person = Repo.get!(Person, id)
-
- # Here we use delete! (with a bang) because we expect
- # it to always work (and if it does not, it will raise).
- Repo.delete!(person)
-
- send_resp(conn, :no_content, "")
- end
-end
diff --git a/zero-phoenix/web/graphql/schema.ex b/zero-phoenix/web/graphql/schema.ex
deleted file mode 100644
index 4da8ecf..0000000
--- a/zero-phoenix/web/graphql/schema.ex
+++ /dev/null
@@ -1,19 +0,0 @@
-defmodule ZeroPhoenix.Graphql.Schema do
- use Absinthe.Schema
-
- import_types ZeroPhoenix.Graphql.Types.Person
-
- alias ZeroPhoenix.Repo
-
- query do
- field :person, type: :person do
- arg :id, non_null(:id)
- resolve fn %{id: id}, _info ->
- case ZeroPhoenix.Person |> Repo.get(id) do
- nil -> {:error, "Person id #{id} not found"}
- person -> {:ok, person}
- end
- end
- end
- end
-end
diff --git a/zero-phoenix/web/models/friendship.ex b/zero-phoenix/web/models/friendship.ex
deleted file mode 100644
index a3a4ec8..0000000
--- a/zero-phoenix/web/models/friendship.ex
+++ /dev/null
@@ -1,21 +0,0 @@
-defmodule ZeroPhoenix.Friendship do
- use ZeroPhoenix.Web, :model
-
- @required_fields ~w(person_id friend_id)
-
- schema "friendships" do
- belongs_to :person, ZeroPhoenix.Person
- belongs_to :friend, ZeroPhoenix.Person
-
- timestamps()
- end
-
- @doc """
- Builds a changeset based on the `struct` and `params`.
- """
- def changeset(struct, params \\ %{}) do
- struct
- |> cast(params, @required_fields)
- |> validate_required(@required_fields)
- end
-end
diff --git a/zero-phoenix/web/models/person.ex b/zero-phoenix/web/models/person.ex
deleted file mode 100644
index 47004b6..0000000
--- a/zero-phoenix/web/models/person.ex
+++ /dev/null
@@ -1,26 +0,0 @@
-defmodule ZeroPhoenix.Person do
- use ZeroPhoenix.Web, :model
-
- @required_fields ~w(first_name last_name username email)
-
- schema "people" do
- field :first_name, :string
- field :last_name, :string
- field :username, :string
- field :email, :string
-
- has_many :friendships, ZeroPhoenix.Friendship #, on_delete: :delete_all
- has_many :friends, through: [:friendships, :friend] #, on_delete: :delete_all
-
- timestamps()
- end
-
- @doc """
- Builds a changeset based on the `struct` and `params`.
- """
- def changeset(struct, params \\ %{}) do
- struct
- |> cast(params, @required_fields)
- |> validate_required(@required_fields)
- end
-end
diff --git a/zero-phoenix/web/resolvers/person_resolver.ex b/zero-phoenix/web/resolvers/person_resolver.ex
deleted file mode 100644
index 089554c..0000000
--- a/zero-phoenix/web/resolvers/person_resolver.ex
+++ /dev/null
@@ -1,11 +0,0 @@
-defmodule ZeroPhoenix.PersonResolver do
-
- alias ZeroPhoenix.Repo
-
- def find(%{id: id}, _info) do
- case ZeroPhoenix.Person |> Repo.get(id) do
- nil -> {:error, "Person id #{id} not found"}
- person -> {:ok, person}
- end
- end
-end
diff --git a/zero-phoenix/web/router.ex b/zero-phoenix/web/router.ex
deleted file mode 100644
index df3a343..0000000
--- a/zero-phoenix/web/router.ex
+++ /dev/null
@@ -1,33 +0,0 @@
-defmodule ZeroPhoenix.Router do
- use ZeroPhoenix.Web, :router
-
- pipeline :browser do
- plug :accepts, ["html"]
- plug :fetch_session
- plug :fetch_flash
- plug :protect_from_forgery
- plug :put_secure_browser_headers
- end
-
- pipeline :api do
- plug :accepts, ["json"]
- end
-
- scope "/", ZeroPhoenix do
- pipe_through :browser # Use the default browser stack
-
- get "/", PageController, :index
- end
-
- scope "/api", ZeroPhoenix do
- pipe_through :api
-
- resources "/people", PersonController, except: [:new, :edit]
- end
-
- scope "/graphiql" do
- pipe_through :api
-
- forward "/", Absinthe.Plug.GraphiQL, schema: ZeroPhoenix.Graphql.Schema, interface: :simple
- end
-end
diff --git a/zero-phoenix/web/views/layout_view.ex b/zero-phoenix/web/views/layout_view.ex
deleted file mode 100644
index 638747f..0000000
--- a/zero-phoenix/web/views/layout_view.ex
+++ /dev/null
@@ -1,3 +0,0 @@
-defmodule ZeroPhoenix.LayoutView do
- use ZeroPhoenix.Web, :view
-end
diff --git a/zero-phoenix/web/views/page_view.ex b/zero-phoenix/web/views/page_view.ex
deleted file mode 100644
index 15fccef..0000000
--- a/zero-phoenix/web/views/page_view.ex
+++ /dev/null
@@ -1,3 +0,0 @@
-defmodule ZeroPhoenix.PageView do
- use ZeroPhoenix.Web, :view
-end
diff --git a/zero-phoenix/web/web.ex b/zero-phoenix/web/web.ex
deleted file mode 100644
index 6d26d52..0000000
--- a/zero-phoenix/web/web.ex
+++ /dev/null
@@ -1,81 +0,0 @@
-defmodule ZeroPhoenix.Web do
- @moduledoc """
- A module that keeps using definitions for controllers,
- views and so on.
-
- This can be used in your application as:
-
- use ZeroPhoenix.Web, :controller
- use ZeroPhoenix.Web, :view
-
- The definitions below will be executed for every view,
- controller, etc, so keep them short and clean, focused
- on imports, uses and aliases.
-
- Do NOT define functions inside the quoted expressions
- below.
- """
-
- def model do
- quote do
- use Ecto.Schema
-
- import Ecto
- import Ecto.Changeset
- import Ecto.Query
- end
- end
-
- def controller do
- quote do
- use Phoenix.Controller
-
- alias ZeroPhoenix.Repo
- import Ecto
- import Ecto.Query
-
- import ZeroPhoenix.Router.Helpers
- import ZeroPhoenix.Gettext
- end
- end
-
- def view do
- quote do
- use Phoenix.View, root: "web/templates"
-
- # Import convenience functions from controllers
- import Phoenix.Controller, only: [get_csrf_token: 0, get_flash: 2, view_module: 1]
-
- # Use all HTML functionality (forms, tags, etc)
- use Phoenix.HTML
-
- import ZeroPhoenix.Router.Helpers
- import ZeroPhoenix.ErrorHelpers
- import ZeroPhoenix.Gettext
- end
- end
-
- def router do
- quote do
- use Phoenix.Router
- end
- end
-
- def channel do
- quote do
- use Phoenix.Channel
-
- alias ZeroPhoenix.Repo
- import Ecto
- import Ecto.Query
- import ZeroPhoenix.Gettext
- end
- end
-
- @doc """
- When used, dispatch to the appropriate controller/view/etc.
- """
- defmacro __using__(which) when is_atom(which) do
- apply(__MODULE__, which, [])
- end
-end