hexsha
stringlengths 40
40
| size
int64 2
991k
| ext
stringclasses 2
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
208
| max_stars_repo_name
stringlengths 6
106
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
sequence | max_stars_count
int64 1
33.5k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
208
| max_issues_repo_name
stringlengths 6
106
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
sequence | max_issues_count
int64 1
16.3k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
208
| max_forks_repo_name
stringlengths 6
106
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
sequence | max_forks_count
int64 1
6.91k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 2
991k
| avg_line_length
float64 1
36k
| max_line_length
int64 1
977k
| alphanum_fraction
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
f7dcdfd5b158850246f0cfffd18a874462beb4c7 | 961 | ex | Elixir | lib/learn/api/lti.ex | mark-b-kauffman/bblearn_rest_client | 63fb0da9b8027e385df927f43ec5e9ea4a517570 | [
"BSD-3-Clause"
] | null | null | null | lib/learn/api/lti.ex | mark-b-kauffman/bblearn_rest_client | 63fb0da9b8027e385df927f43ec5e9ea4a517570 | [
"BSD-3-Clause"
] | null | null | null | lib/learn/api/lti.ex | mark-b-kauffman/bblearn_rest_client | 63fb0da9b8027e385df927f43ec5e9ea4a517570 | [
"BSD-3-Clause"
] | null | null | null | defmodule Learn.Api.Lti do
require IEx
@moduledoc """
Learn.Api.Lti
"""
@v1_lti_placements "/learn/api/public/v1/lti/placements" # Since: 3300.0.0
## LTI
## Functions that call the @v1_lti endpoints
def get_v1_lti_placements(rest_client, params \\ %{}) do
params = %{offset: 0} |> Map.merge(params)
paramlist = URI.encode_query(params) # Turn the map into a parameter list string in one fell swoop.
url = "https://#{rest_client.fqdn}#{@v1_lti_placements}?#{paramlist}"
headers = [{"Content-Type", "application/json"}, {"Authorization", "Bearer #{rest_client.token["access_token"]}"}]
options = []
{status, response} = HTTPoison.get url, headers, options
{status, response}
end
## LTI convenience functions that call the current version
def get_lti_placements(rest_client, params \\ %{}) do
{code, response} = get_v1_lti_placements(rest_client, params)
{code, response}
end
end
| 32.033333 | 119 | 0.673257 |
f7dcf02d01ecade607b584790004a884f58f04ec | 12,663 | exs | Elixir | test/ecto/multi_test.exs | ohta-rh/ecto | d5f8bfdfcc6fcfb520c62bbd1dbdd8ee6f09de59 | [
"Apache-2.0"
] | null | null | null | test/ecto/multi_test.exs | ohta-rh/ecto | d5f8bfdfcc6fcfb520c62bbd1dbdd8ee6f09de59 | [
"Apache-2.0"
] | null | null | null | test/ecto/multi_test.exs | ohta-rh/ecto | d5f8bfdfcc6fcfb520c62bbd1dbdd8ee6f09de59 | [
"Apache-2.0"
] | null | null | null | defmodule Ecto.MultiTest do
use ExUnit.Case, async: true
doctest Ecto.Multi
alias Ecto.Multi
alias Ecto.Changeset
alias Ecto.TestRepo
defmodule Comment do
use Ecto.Schema
schema "comments" do
field :x, :integer
field :parent_x, :integer
end
end
def ok(x), do: {:ok, x}
def multi(x), do: Multi.new |> Multi.update(:update, Changeset.change(x.insert))
test "new" do
assert Multi.new == %Multi{}
end
test "insert changeset" do
changeset = Changeset.change(%Comment{})
multi =
Multi.new
|> Multi.insert(:comment, changeset)
assert multi.names == MapSet.new([:comment])
assert multi.operations == [{:comment, {:changeset, %{changeset | action: :insert}, []}}]
end
test "insert struct" do
struct = %Comment{}
changeset = Changeset.change(struct)
multi =
Multi.new
|> Multi.insert(:comment, struct)
assert multi.names == MapSet.new([:comment])
assert multi.operations == [{:comment, {:changeset, %{changeset | action: :insert}, []}}]
end
test "update changeset" do
changeset = Changeset.change(%Comment{})
multi =
Multi.new
|> Multi.update(:comment, changeset)
assert multi.names == MapSet.new([:comment])
assert multi.operations == [{:comment, {:changeset, %{changeset | action: :update}, []}}]
end
test "delete changeset" do
changeset = Changeset.change(%Comment{})
multi =
Multi.new
|> Multi.delete(:comment, changeset)
assert multi.names == MapSet.new([:comment])
assert multi.operations == [{:comment, {:changeset, %{changeset | action: :delete}, []}}]
end
test "delete struct" do
struct = %Comment{}
changeset = Changeset.change(struct)
multi =
Multi.new
|> Multi.delete(:comment, struct)
assert multi.names == MapSet.new([:comment])
assert multi.operations == [{:comment, {:changeset, %{changeset | action: :delete}, []}}]
end
test "error" do
multi =
Multi.new
|> Multi.error(:oops, :value)
assert multi.names == MapSet.new([:oops])
assert multi.operations == [{:oops, {:error, :value}}]
end
test "run with fun" do
fun = fn changes -> {:ok, changes} end
multi =
Multi.new
|> Multi.run(:fun, fun)
assert multi.names == MapSet.new([:fun])
assert multi.operations == [{:fun, {:run, fun}}]
end
test "run named with tuple" do
fun = fn changes -> {:ok, changes} end
multi =
Multi.new
|> Multi.run({:fun, 3}, fun)
assert multi.names == MapSet.new([{:fun, 3}])
assert multi.operations == [{{:fun, 3}, {:run, fun}}]
end
test "run named with char_list" do
fun = fn changes -> {:ok, changes} end
multi =
Multi.new
|> Multi.run('myFunction', fun)
assert multi.names == MapSet.new(['myFunction'])
assert multi.operations == [{'myFunction', {:run, fun}}]
end
test "run with mfa" do
multi =
Multi.new
|> Multi.run(:fun, __MODULE__, :ok, [])
assert multi.names == MapSet.new([:fun])
assert multi.operations == [{:fun, {:run, {__MODULE__, :ok, []}}}]
end
test "insert_all" do
multi =
Multi.new
|> Multi.insert_all(:comments, Comment, [[x: 2]])
assert multi.names == MapSet.new([:comments])
assert [{:comments, {:insert_all, Comment, [[x: 2]], []}}] = multi.operations
end
test "update_all" do
multi =
Multi.new
|> Multi.update_all(:comments, Comment, set: [x: 2])
assert multi.names == MapSet.new([:comments])
assert [{:comments, {:update_all, query, updates, []}}] = multi.operations
assert updates == [set: [x: 2]]
assert query == Ecto.Queryable.to_query(Comment)
end
test "delete_all" do
multi =
Multi.new
|> Multi.delete_all(:comments, Comment)
assert multi.names == MapSet.new([:comments])
assert [{:comments, {:delete_all, query, []}}] = multi.operations
assert query == Ecto.Queryable.to_query(Comment)
end
test "append/prepend without repetition" do
fun = fn _ -> {:ok, :ok} end
lhs = Multi.new |> Multi.run(:one, fun) |> Multi.run(:two, fun)
rhs = Multi.new |> Multi.run(:three, fun) |> Multi.run(:four, fun)
merged = Multi.append(lhs, rhs)
operations = Keyword.keys(merged.operations)
assert merged.names == MapSet.new([:one, :two, :three, :four])
assert operations == [:four, :three, :two, :one]
merged = Multi.prepend(lhs, rhs)
operations = Keyword.keys(merged.operations)
assert merged.names == MapSet.new([:one, :two, :three, :four])
assert operations == [:two, :one, :four, :three]
end
test "append/prepend with repetition" do
fun = fn _ -> {:ok, :ok} end
multi = Multi.new |> Multi.run(:run, fun)
assert_raise ArgumentError, ~r"both declared operations: \[:run\]", fn ->
Multi.append(multi, multi)
end
assert_raise ArgumentError, ~r"both declared operations: \[:run\]", fn ->
Multi.prepend(multi, multi)
end
end
test "to_list" do
changeset = Changeset.change(%Comment{id: 1}, x: 1)
multi =
Multi.new
|> Multi.insert(:insert, changeset)
|> Multi.run(:run, fn changes -> {:ok, changes} end)
|> Multi.update(:update, changeset)
|> Multi.delete(:delete, changeset)
|> Multi.insert_all(:insert_all, Comment, [[x: 1]])
|> Multi.update_all(:update_all, Comment, set: [x: 1])
|> Multi.delete_all(:delete_all, Comment)
assert [
{:insert, {:insert, _, []}},
{:run, {:run, _}},
{:update, {:update, _, []}},
{:delete, {:delete, _, []}},
{:insert_all, {:insert_all, _, _, []}},
{:update_all, {:update_all, _, _, []}},
{:delete_all, {:delete_all, _, []}},
] = Ecto.Multi.to_list(multi)
end
test "add changeset with invalid action" do
changeset = %{Changeset.change(%Comment{}) | action: :invalid}
assert_raise ArgumentError, ~r"an action already set to :invalid", fn ->
Multi.new |> Multi.insert(:changeset, changeset)
end
end
test "add run with invalid arity" do
assert_raise FunctionClauseError, fn ->
Multi.new |> Multi.run(:run, fn -> nil end)
end
end
test "repeating an operation" do
fun = fn _ -> {:ok, :ok} end
assert_raise RuntimeError, ~r":run is already a member", fn ->
Multi.new |> Multi.run(:run, fun) |> Multi.run(:run, fun)
end
end
describe "merge/2" do
test "with fun" do
changeset = Changeset.change(%Comment{})
multi =
Multi.new
|> Multi.insert(:insert, changeset)
|> Multi.merge(fn data ->
Multi.new |> Multi.update(:update, Changeset.change(data.insert))
end)
assert {:ok, data} = TestRepo.transaction(multi)
assert %Comment{} = data.insert
assert %Comment{} = data.update
end
test "with mfa" do
changeset = Changeset.change(%Comment{})
multi =
Multi.new
|> Multi.insert(:insert, changeset)
|> Multi.merge(__MODULE__, :multi, [])
assert {:ok, data} = TestRepo.transaction(multi)
assert %Comment{} = data.insert
assert %Comment{} = data.update
end
test "rollbacks on errors" do
error = fn _ -> {:error, :error} end
ok = fn _ -> {:ok, :ok} end
multi =
Multi.new
|> Multi.run(:outside_ok, ok)
|> Multi.merge(fn _ ->
Multi.new
|> Multi.run(:inside_ok, ok)
|> Multi.run(:inside_error, error)
end)
|> Multi.run(:outside_error, error)
assert {:error, :inside_error, :error, data} = TestRepo.transaction(multi)
assert :ok == data.outside_ok
assert :ok == data.inside_ok
end
test "does not allow repeated operations" do
fun = fn _ -> {:ok, :ok} end
multi =
Multi.new
|> Multi.merge(fn _ ->
Multi.new |> Multi.run(:run, fun)
end)
|> Multi.run(:run, fun)
assert_raise RuntimeError, ~r"found in both Ecto.Multi: \[:run\]", fn ->
TestRepo.transaction(multi)
end
multi =
Multi.new
|> Multi.merge(fn _ -> Multi.new |> Multi.run(:run, fun) end)
|> Multi.merge(fn _ -> Multi.new |> Multi.run(:run, fun) end)
assert_raise RuntimeError, ~r"found in both Ecto.Multi: \[:run\]", fn ->
TestRepo.transaction(multi)
end
end
end
describe "Repo.transaction" do
test "on success" do
changeset = Changeset.change(%Comment{id: 1}, x: 1)
multi =
Multi.new
|> Multi.insert(:insert, changeset)
|> Multi.run(:run, fn changes -> {:ok, changes} end)
|> Multi.update(:update, changeset)
|> Multi.delete(:delete, changeset)
|> Multi.insert_all(:insert_all, Comment, [[x: 1]])
|> Multi.update_all(:update_all, Comment, set: [x: 1])
|> Multi.delete_all(:delete_all, Comment)
assert {:ok, changes} = TestRepo.transaction(multi)
assert_received {:transaction, _}
assert {:messages, actions} = Process.info(self(), :messages)
assert actions == [{:insert, {nil, "comments"}}, {:update, {nil, "comments"}}, {:delete, {nil, "comments"}}, {:insert_all, {nil, "comments"}, [[x: 1]]},
{:update_all, {nil, "comments"}}, {:delete_all, {nil, "comments"}}]
assert %Comment{} = changes.insert
assert %Comment{} = changes.update
assert %Comment{} = changes.delete
assert {1, nil} = changes.insert_all
assert {1, nil} = changes.update_all
assert {1, nil} = changes.delete_all
assert Map.has_key?(changes.run, :insert)
refute Map.has_key?(changes.run, :update)
end
test "with empty multi" do
assert {:ok, changes} = TestRepo.transaction(Multi.new)
refute_received {:transaction, _}
assert changes == %{}
end
test "rolls back from run" do
changeset = Changeset.change(%Comment{id: 1}, x: 1)
multi =
Multi.new
|> Multi.insert(:insert, changeset)
|> Multi.run(:run, fn _changes -> {:error, "error from run"} end)
|> Multi.update(:update, changeset)
|> Multi.delete(:delete, changeset)
assert {:error, :run, "error from run", changes} = TestRepo.transaction(multi)
assert_received {:transaction, _}
assert_received {:rollback, _}
assert {:messages, [{:insert, {nil, "comments"}}]} == Process.info(self(), :messages)
assert %Comment{} = changes.insert
refute Map.has_key?(changes, :run)
refute Map.has_key?(changes, :update)
end
test "rolls back on error" do
changeset = Changeset.change(%Comment{id: 1}, x: 1)
multi =
Multi.new
|> Multi.insert(:insert, changeset)
|> Multi.error(:oops, "explicit error")
|> Multi.update(:update, changeset)
|> Multi.delete(:delete, changeset)
assert {:error, :oops, "explicit error", changes} = TestRepo.transaction(multi)
assert_received {:transaction, _}
assert_received {:rollback, _}
assert {:messages, [{:insert, {nil, "comments"}}]} == Process.info(self(), :messages)
assert %Comment{} = changes.insert
refute Map.has_key?(changes, :run)
refute Map.has_key?(changes, :update)
end
test "rolls back from repo" do
changeset = Changeset.change(%Comment{id: 1}, x: 1)
invalid = put_in(changeset.data.__meta__.context, {:invalid, [unique: "comments_x_index"]})
|> Changeset.unique_constraint(:x)
multi =
Multi.new
|> Multi.insert(:insert, changeset)
|> Multi.run(:run, fn _changes -> {:ok, "ok"} end)
|> Multi.update(:update, invalid)
|> Multi.delete(:delete, changeset)
assert {:error, :update, error, changes} = TestRepo.transaction(multi)
assert_received {:transaction, _}
assert_received {:rollback, _}
assert {:messages, [{:insert, {nil, "comments"}}]} == Process.info(self(), :messages)
assert %Comment{} = changes.insert
assert "ok" == changes.run
assert error.errors == [x: {"has already been taken", []}]
refute Map.has_key?(changes, :update)
end
test "checks invalid changesets before starting transaction" do
changeset = %{Changeset.change(%Comment{}) | valid?: false}
multi = Multi.new |> Multi.insert(:invalid, changeset)
assert {:error, :invalid, invalid, %{}} = TestRepo.transaction(multi)
assert invalid.data == changeset.data
refute_received {:transaction, _}
end
end
end
| 31.578554 | 158 | 0.591171 |
f7dd1fd2ca9c8ef9fa3aca8ba0452ab2751ac8c1 | 26,007 | ex | Elixir | lib/elixir/lib/exception.ex | rcoppolo/elixir | c4092e071f8b42f5a9ad213dd8b3632918097213 | [
"Apache-2.0"
] | null | null | null | lib/elixir/lib/exception.ex | rcoppolo/elixir | c4092e071f8b42f5a9ad213dd8b3632918097213 | [
"Apache-2.0"
] | null | null | null | lib/elixir/lib/exception.ex | rcoppolo/elixir | c4092e071f8b42f5a9ad213dd8b3632918097213 | [
"Apache-2.0"
] | null | null | null | defmodule Exception do
@moduledoc """
Functions to format throw/catch/exit and exceptions.
Note that stacktraces in Elixir are updated on throw,
errors and exits. For example, at any given moment,
`System.stacktrace/0` will return the stacktrace for the
last throw/error/exit that occurred in the current process.
Do not rely on the particular format returned by the `format*`
functions in this module. They may be changed in future releases
in order to better suit Elixir's tool chain. In other words,
by using the functions in this module it is guaranteed you will
format exceptions as in the current Elixir version being used.
"""
@typedoc "The exception type"
@type t :: %{
required(:__struct__) => module,
required(:__exception__) => true,
atom => any
}
@typedoc "The kind handled by formatting functions"
@type kind :: :error | :exit | :throw | {:EXIT, pid}
@type stacktrace :: [stacktrace_entry]
@type stacktrace_entry ::
{module, atom, arity_or_args, location} |
{(... -> any), arity_or_args, location}
@typep arity_or_args :: non_neg_integer | list
@typep location :: Keyword.t
@callback exception(term) :: t
@callback message(t) :: String.t
@doc """
Returns `true` if the given `term` is an exception.
"""
def exception?(term)
def exception?(%{__struct__: struct, __exception__: true}) when is_atom(struct),
do: true
def exception?(_), do: false
@doc """
Gets the message for an `exception`.
"""
def message(%{__struct__: module, __exception__: true} = exception) when is_atom(module) do
try do
module.message(exception)
rescue
e ->
"got #{inspect e.__struct__} with message #{inspect message(e)} " <>
"while retrieving Exception.message/1 for #{inspect(exception)}"
else
x when is_binary(x) -> x
x ->
"got #{inspect(x)} " <>
"while retrieving Exception.message/1 for #{inspect(exception)} " <>
"(expected a string)"
end
end
@doc """
Normalizes an exception, converting Erlang exceptions
to Elixir exceptions.
It takes the `kind` spilled by `catch` as an argument and
normalizes only `:error`, returning the untouched payload
for others.
The third argument, a stacktrace, is optional. If it is
not supplied `System.stacktrace/0` will sometimes be used
to get additional information for the `kind` `:error`. If
the stacktrace is unknown and `System.stacktrace/0` would
not return the stacktrace corresponding to the exception
an empty stacktrace, `[]`, must be used.
"""
@spec normalize(:error, any, stacktrace) :: t
@spec normalize(kind, payload, stacktrace) :: payload when payload: var
# Generating a stacktrace is expensive, default to nil
# to only fetch it when needed.
def normalize(kind, payload, stacktrace \\ nil)
def normalize(:error, exception, stacktrace) do
if exception?(exception) do
exception
else
ErlangError.normalize(exception, stacktrace)
end
end
def normalize(_kind, payload, _stacktrace) do
payload
end
@doc """
Normalizes and formats any throw/error/exit.
The message is formatted and displayed in the same
format as used by Elixir's CLI.
The third argument, a stacktrace, is optional. If it is
not supplied `System.stacktrace/0` will sometimes be used
to get additional information for the `kind` `:error`. If
the stacktrace is unknown and `System.stacktrace/0` would
not return the stacktrace corresponding to the exception
an empty stacktrace, `[]`, must be used.
"""
@spec format_banner(kind, any, stacktrace | nil) :: String.t
def format_banner(kind, exception, stacktrace \\ nil)
def format_banner(:error, exception, stacktrace) do
exception = normalize(:error, exception, stacktrace)
"** (" <> inspect(exception.__struct__) <> ") " <> message(exception)
end
def format_banner(:throw, reason, _stacktrace) do
"** (throw) " <> inspect(reason)
end
def format_banner(:exit, reason, _stacktrace) do
"** (exit) " <> format_exit(reason, <<"\n ">>)
end
def format_banner({:EXIT, pid}, reason, _stacktrace) do
"** (EXIT from #{inspect pid}) " <> format_exit(reason, <<"\n ">>)
end
@doc """
Normalizes and formats throw/errors/exits and stacktraces.
It relies on `format_banner/3` and `format_stacktrace/1`
to generate the final format.
Note that `{:EXIT, pid}` do not generate a stacktrace though
(as they are retrieved as messages without stacktraces).
"""
@spec format(kind, any, stacktrace | nil) :: String.t
def format(kind, payload, stacktrace \\ nil)
def format({:EXIT, _} = kind, any, _) do
format_banner(kind, any)
end
def format(kind, payload, stacktrace) do
stacktrace = stacktrace || System.stacktrace
message = format_banner(kind, payload, stacktrace)
case stacktrace do
[] -> message
_ -> message <> "\n" <> format_stacktrace(stacktrace)
end
end
@doc """
Formats an exit. It returns a string.
Often there are errors/exceptions inside exits. Exits are often
wrapped by the caller and provide stacktraces too. This function
formats exits in a way to nicely show the exit reason, caller
and stacktrace.
"""
@spec format_exit(any) :: String.t
def format_exit(reason) do
format_exit(reason, <<"\n ">>)
end
# 2-Tuple could be caused by an error if the second element is a stacktrace.
defp format_exit({exception, maybe_stacktrace} = reason, joiner)
when is_list(maybe_stacktrace) and maybe_stacktrace !== [] do
try do
Enum.map(maybe_stacktrace, &format_stacktrace_entry/1)
else
formatted_stacktrace ->
# Assume a non-empty list formattable as stacktrace is a
# stacktrace, so exit was caused by an error.
message = "an exception was raised:" <> joiner <>
format_banner(:error, exception, maybe_stacktrace)
Enum.join([message | formatted_stacktrace], joiner <> <<" ">>)
catch
:error, _ ->
# Not a stacktrace, was an exit.
format_exit_reason(reason)
end
end
# :supervisor.start_link returns this error reason when it fails to init
# because a child's start_link raises.
defp format_exit({:shutdown,
{:failed_to_start_child, child, {:EXIT, reason}}}, joiner) do
format_start_child(child, reason, joiner)
end
# :supervisor.start_link returns this error reason when it fails to init
# because a child's start_link returns {:error, reason}.
defp format_exit({:shutdown, {:failed_to_start_child, child, reason}},
joiner) do
format_start_child(child, reason, joiner)
end
# 2-Tuple could be an exit caused by mfa if second element is mfa, args
# must be a list of arguments - max length 255 due to max arity.
defp format_exit({reason2, {mod, fun, args}} = reason, joiner)
when length(args) < 256 do
try do
format_mfa(mod, fun, args)
else
mfa ->
# Assume tuple formattable as an mfa is an mfa, so exit was caused by
# failed mfa.
"exited in: " <> mfa <> joiner <>
"** (EXIT) " <> format_exit(reason2, joiner <> <<" ">>)
catch
:error, _ ->
# Not an mfa, was an exit.
format_exit_reason(reason)
end
end
defp format_exit(reason, _joiner) do
format_exit_reason(reason)
end
defp format_exit_reason(:normal), do: "normal"
defp format_exit_reason(:shutdown), do: "shutdown"
defp format_exit_reason({:shutdown, reason}) do
"shutdown: #{inspect(reason)}"
end
defp format_exit_reason(:calling_self), do: "process attempted to call itself"
defp format_exit_reason(:timeout), do: "time out"
defp format_exit_reason(:killed), do: "killed"
defp format_exit_reason(:noconnection), do: "no connection"
defp format_exit_reason(:noproc) do
"no process: the process is not alive or there's no process currently associated with the given name, possibly because its application isn't started"
end
defp format_exit_reason({:nodedown, node_name}) when is_atom(node_name) do
"no connection to #{node_name}"
end
# :gen_server exit reasons
defp format_exit_reason({:already_started, pid}) do
"already started: " <> inspect(pid)
end
defp format_exit_reason({:bad_return_value, value}) do
"bad return value: " <> inspect(value)
end
defp format_exit_reason({:bad_call, request}) do
"bad call: " <> inspect(request)
end
defp format_exit_reason({:bad_cast, request}) do
"bad cast: " <> inspect(request)
end
# :supervisor.start_link error reasons
# If value is a list will be formatted by mfa exit in format_exit/1
defp format_exit_reason({:bad_return, {mod, :init, value}})
when is_atom(mod) do
format_mfa(mod, :init, 1) <> " returned a bad value: " <> inspect(value)
end
defp format_exit_reason({:bad_start_spec, start_spec}) do
"bad start spec: invalid children: " <> inspect(start_spec)
end
defp format_exit_reason({:start_spec, start_spec}) do
"bad start spec: " <> format_sup_spec(start_spec)
end
defp format_exit_reason({:supervisor_data, data}) do
"bad supervisor data: " <> format_sup_data(data)
end
defp format_exit_reason(reason), do: inspect(reason)
defp format_start_child(child, reason, joiner) do
"shutdown: failed to start child: " <> inspect(child) <> joiner <>
"** (EXIT) " <> format_exit(reason, joiner <> <<" ">>)
end
defp format_sup_data({:invalid_type, type}) do
"invalid type: " <> inspect(type)
end
defp format_sup_data({:invalid_strategy, strategy}) do
"invalid strategy: " <> inspect(strategy)
end
defp format_sup_data({:invalid_intensity, intensity}) do
"invalid intensity: " <> inspect(intensity)
end
defp format_sup_data({:invalid_period, period}) do
"invalid period: " <> inspect(period)
end
defp format_sup_data(other), do: inspect(other)
defp format_sup_spec({:invalid_child_spec, child_spec}) do
"invalid child spec: " <> inspect(child_spec)
end
defp format_sup_spec({:invalid_child_type, type}) do
"invalid child type: " <> inspect(type)
end
defp format_sup_spec({:invalid_mfa, mfa}) do
"invalid mfa: " <> inspect(mfa)
end
defp format_sup_spec({:invalid_restart_type, restart}) do
"invalid restart type: " <> inspect(restart)
end
defp format_sup_spec({:invalid_shutdown, shutdown}) do
"invalid shutdown: " <> inspect(shutdown)
end
defp format_sup_spec({:invalid_module, mod}) do
"invalid module: " <> inspect(mod)
end
defp format_sup_spec({:invalid_modules, modules}) do
"invalid modules: " <> inspect(modules)
end
defp format_sup_spec(other), do: inspect(other)
@doc """
Receives a stacktrace entry and formats it into a string.
"""
@spec format_stacktrace_entry(stacktrace_entry) :: String.t
def format_stacktrace_entry(entry)
# From Macro.Env.stacktrace
def format_stacktrace_entry({module, :__MODULE__, 0, location}) do
format_location(location) <> inspect(module) <> " (module)"
end
# From :elixir_compiler_*
def format_stacktrace_entry({_module, :__MODULE__, 1, location}) do
format_location(location) <> "(module)"
end
# From :elixir_compiler_*
def format_stacktrace_entry({_module, :__FILE__, 1, location}) do
format_location(location) <> "(file)"
end
def format_stacktrace_entry({module, fun, arity, location}) do
format_application(module) <> format_location(location) <> format_mfa(module, fun, arity)
end
def format_stacktrace_entry({fun, arity, location}) do
format_location(location) <> format_fa(fun, arity)
end
defp format_application(module) do
# We cannot use Application due to bootstrap issues
case :application.get_application(module) do
{:ok, app} -> "(" <> Atom.to_string(app) <> ") "
:undefined -> ""
end
end
@doc """
Formats the stacktrace.
A stacktrace must be given as an argument. If not, the stacktrace
is retrieved from `Process.info/2`.
"""
def format_stacktrace(trace \\ nil) do
trace = trace || case Process.info(self(), :current_stacktrace) do
{:current_stacktrace, t} -> Enum.drop(t, 3)
end
case trace do
[] -> "\n"
_ -> " " <> Enum.map_join(trace, "\n ", &format_stacktrace_entry(&1)) <> "\n"
end
end
@doc """
Receives an anonymous function and arity and formats it as
shown in stacktraces. The arity may also be a list of arguments.
## Examples
Exception.format_fa(fn -> nil end, 1)
#=> "#Function<...>/1"
"""
def format_fa(fun, arity) when is_function(fun) do
"#{inspect fun}#{format_arity(arity)}"
end
@doc """
Receives a module, fun and arity and formats it
as shown in stacktraces. The arity may also be a list
of arguments.
## Examples
iex> Exception.format_mfa Foo, :bar, 1
"Foo.bar/1"
iex> Exception.format_mfa Foo, :bar, []
"Foo.bar()"
iex> Exception.format_mfa nil, :bar, []
"nil.bar()"
Anonymous functions are reported as -func/arity-anonfn-count-,
where func is the name of the enclosing function. Convert to
"anonymous fn in func/arity"
"""
def format_mfa(module, fun, arity) when is_atom(module) and is_atom(fun) do
fun =
case inspect(fun) do
":" <> fun -> fun
fun -> fun
end
case match?("\"-" <> _, fun) and String.split(fun, "-") do
[ "\"", outer_fun, "fun", _count, "\"" ] ->
"anonymous fn#{format_arity(arity)} in #{inspect module}.#{outer_fun}"
_ ->
"#{inspect module}.#{fun}#{format_arity(arity)}"
end
end
defp format_arity(arity) when is_list(arity) do
inspected = for x <- arity, do: inspect(x)
"(#{Enum.join(inspected, ", ")})"
end
defp format_arity(arity) when is_integer(arity) do
"/" <> Integer.to_string(arity)
end
@doc """
Formats the given `file` and `line` as shown in stacktraces.
If any of the values are `nil`, they are omitted.
## Examples
iex> Exception.format_file_line("foo", 1)
"foo:1:"
iex> Exception.format_file_line("foo", nil)
"foo:"
iex> Exception.format_file_line(nil, nil)
""
"""
def format_file_line(file, line, suffix \\ "") do
if file do
if line && line != 0 do
"#{file}:#{line}:#{suffix}"
else
"#{file}:#{suffix}"
end
else
""
end
end
defp format_location(opts) when is_list(opts) do
format_file_line Keyword.get(opts, :file), Keyword.get(opts, :line), " "
end
end
# Some exceptions implement "message/1" instead of "exception/1" mostly
# for bootstrap reasons. It is recommended for applications to implement
# "exception/1" instead of "message/1" as described in "defexception/1"
# docs.
defmodule RuntimeError do
defexception message: "runtime error"
end
defmodule ArgumentError do
defexception message: "argument error"
end
defmodule ArithmeticError do
defexception message: "bad argument in arithmetic expression"
end
defmodule SystemLimitError do
defexception []
def message(_) do
"a system limit has been reached"
end
end
defmodule SyntaxError do
defexception [:file, :line, description: "syntax error"]
def message(exception) do
Exception.format_file_line(Path.relative_to_cwd(exception.file), exception.line) <>
" " <> exception.description
end
end
defmodule TokenMissingError do
defexception [:file, :line, description: "expression is incomplete"]
def message(%{file: file, line: line, description: description}) do
Exception.format_file_line(file && Path.relative_to_cwd(file), line) <>
" " <> description
end
end
defmodule CompileError do
defexception [:file, :line, description: "compile error"]
def message(%{file: file, line: line, description: description}) do
Exception.format_file_line(file && Path.relative_to_cwd(file), line) <>
" " <> description
end
end
defmodule BadFunctionError do
defexception [:term]
def message(exception) do
"expected a function, got: #{inspect(exception.term)}"
end
end
defmodule BadStructError do
defexception [:struct, :term]
def message(exception) do
"expected a struct named #{inspect(exception.struct)}, got: #{inspect(exception.term)}"
end
end
defmodule BadMapError do
defexception [:term]
def message(exception) do
"expected a map, got: #{inspect(exception.term)}"
end
end
defmodule MatchError do
defexception [:term]
def message(exception) do
"no match of right hand side value: #{inspect(exception.term)}"
end
end
defmodule CaseClauseError do
defexception [:term]
def message(exception) do
"no case clause matching: #{inspect(exception.term)}"
end
end
defmodule WithClauseError do
defexception [:term]
def message(exception) do
"no with clause matching: #{inspect(exception.term)}"
end
end
defmodule CondClauseError do
defexception []
def message(_exception) do
"no cond clause evaluated to a true value"
end
end
defmodule TryClauseError do
defexception [:term]
def message(exception) do
"no try clause matching: #{inspect(exception.term)}"
end
end
defmodule BadArityError do
defexception [:function, :args]
def message(exception) do
fun = exception.function
args = exception.args
insp = Enum.map_join(args, ", ", &inspect/1)
{:arity, arity} = :erlang.fun_info(fun, :arity)
"#{inspect(fun)} with arity #{arity} called with #{count(length(args), insp)}"
end
defp count(0, _insp), do: "no arguments"
defp count(1, insp), do: "1 argument (#{insp})"
defp count(x, insp), do: "#{x} arguments (#{insp})"
end
defmodule UndefinedFunctionError do
defexception [:module, :function, :arity, :reason, :exports]
def message(%{reason: nil, module: module, function: function, arity: arity} = e) do
cond do
is_nil(function) or is_nil(arity) ->
"undefined function"
not is_nil(module) and :code.is_loaded(module) === false ->
message(%{e | reason: :"module could not be loaded"})
true ->
message(%{e | reason: :"function not exported"})
end
end
def message(%{reason: :"module could not be loaded", module: module, function: function, arity: arity}) do
"function " <> Exception.format_mfa(module, function, arity) <>
" is undefined (module #{inspect module} is not available)"
end
def message(%{reason: :"function not exported", module: module, function: function, arity: arity, exports: exports}) do
suffix =
if macro_exported?(module, function, arity) do
" but #{inspect(module)} defines a macro with the same name and arity." <>
" Be sure to require #{inspect(module)} if you intend to invoke this macro"
else
did_you_mean(module, function, arity, exports)
end
"function " <>
Exception.format_mfa(module, function, arity) <>
" is undefined or private" <>
suffix
end
def message(%{reason: :"function not available", module: module, function: function, arity: arity}) do
"nil." <> fa = Exception.format_mfa(nil, function, arity)
"function " <> Exception.format_mfa(module, function, arity) <>
" is undefined (function #{fa} is not available)"
end
def message(%{reason: reason, module: module, function: function, arity: arity}) do
"function " <> Exception.format_mfa(module, function, arity) <> " is undefined (#{reason})"
end
@function_threshold 0.77
@max_suggestions 5
defp did_you_mean(module, function, _arity, exports) do
exports = exports || exports_for(module)
result =
case Keyword.take(exports, [function]) do
[] ->
base = Atom.to_string(function)
for {key, val} <- exports,
dist = String.jaro_distance(base, Atom.to_string(key)),
dist >= @function_threshold,
do: {dist, key, val}
arities ->
for {key, val} <- arities, do: {1.0, key, val}
end
|> Enum.sort(&elem(&1, 0) >= elem(&2, 0))
|> Enum.take(@max_suggestions)
|> Enum.sort(&elem(&1, 1) <= elem(&2, 1))
case result do
[] -> ""
suggestions -> ". Did you mean one of:\n\n#{Enum.map(suggestions, &format_fa/1)}"
end
end
defp format_fa({_dist, fun, arity}) do
fun = with ":" <> fun <- inspect(fun), do: fun
" * " <> fun <> "/" <> Integer.to_string(arity) <> "\n"
end
defp exports_for(module) do
if function_exported?(module, :__info__, 1) do
module.__info__(:macros) ++ module.__info__(:functions)
else
module.module_info(:exports)
end
rescue
# In case the module was removed while we are computing this
UndefinedFunctionError -> []
end
end
defmodule FunctionClauseError do
defexception [:module, :function, :arity]
def message(exception) do
if exception.function do
formatted = Exception.format_mfa exception.module, exception.function, exception.arity
"no function clause matching in #{formatted}"
else
"no function clause matches"
end
end
end
defmodule Code.LoadError do
defexception [:file, :message]
def exception(opts) do
file = Keyword.fetch!(opts, :file)
%Code.LoadError{message: "could not load #{file}", file: file}
end
end
defmodule Protocol.UndefinedError do
defexception [:protocol, :value, description: ""]
def message(exception) do
msg = "protocol #{inspect exception.protocol} not implemented for #{inspect exception.value}"
case exception.description do
"" -> msg
descr -> msg <> ", " <> descr
end
end
end
defmodule KeyError do
defexception [:key, :term]
def message(exception) do
msg = "key #{inspect exception.key} not found"
if exception.term != nil do
msg <> " in: #{inspect exception.term}"
else
msg
end
end
end
defmodule UnicodeConversionError do
defexception [:encoded, :message]
def exception(opts) do
%UnicodeConversionError{
encoded: Keyword.fetch!(opts, :encoded),
message: "#{Keyword.fetch!(opts, :kind)} #{detail Keyword.fetch!(opts, :rest)}"
}
end
defp detail(rest) when is_binary(rest) do
"encoding starting at #{inspect rest}"
end
defp detail([h | _]) when is_integer(h) do
"code point #{h}"
end
defp detail([h | _]) do
detail(h)
end
end
defmodule Enum.OutOfBoundsError do
defexception message: "out of bounds error"
end
defmodule Enum.EmptyError do
defexception message: "empty error"
end
defmodule File.Error do
defexception [:reason, :path, action: ""]
def message(%{action: action, reason: reason, path: path}) do
formatted =
case {action, reason} do
{"remove directory", :eexist} ->
"directory is not empty"
_ ->
IO.iodata_to_binary(:file.format_error(reason))
end
"could not #{action} #{inspect(path)}: #{formatted}"
end
end
defmodule File.CopyError do
defexception [:reason, :source, :destination, on: "", action: ""]
def message(exception) do
formatted =
IO.iodata_to_binary(:file.format_error(exception.reason))
location =
case exception.on do
"" -> ""
on -> ". #{on}"
end
"could not #{exception.action} from #{inspect(exception.source)} to " <>
"#{inspect(exception.destination)}#{location}: #{formatted}"
end
end
defmodule ErlangError do
defexception [:original]
def message(exception) do
"erlang error: #{inspect(exception.original)}"
end
@doc false
def normalize(:badarg, _stacktrace) do
%ArgumentError{}
end
def normalize(:badarith, _stacktrace) do
%ArithmeticError{}
end
def normalize(:system_limit, _stacktrace) do
%SystemLimitError{}
end
def normalize(:cond_clause, _stacktrace) do
%CondClauseError{}
end
def normalize({:badarity, {fun, args}}, _stacktrace) do
%BadArityError{function: fun, args: args}
end
def normalize({:badfun, term}, _stacktrace) do
%BadFunctionError{term: term}
end
def normalize({:badstruct, struct, term}, _stacktrace) do
%BadStructError{struct: struct, term: term}
end
def normalize({:badmatch, term}, _stacktrace) do
%MatchError{term: term}
end
def normalize({:badmap, term}, _stacktrace) do
%BadMapError{term: term}
end
def normalize({:badkey, key}, stacktrace) do
term =
case stacktrace || :erlang.get_stacktrace do
[{Map, :get_and_update!, [map, _, _], _} | _] -> map
[{Map, :update!, [map, _, _], _} | _] -> map
[{:maps, :update, [_, _, map], _} | _] -> map
[{:maps, :get, [_, map], _} | _] -> map
_ -> nil
end
%KeyError{key: key, term: term}
end
def normalize({:badkey, key, map}, _stacktrace) do
%KeyError{key: key, term: map}
end
def normalize({:case_clause, term}, _stacktrace) do
%CaseClauseError{term: term}
end
def normalize({:with_clause, term}, _stacktrace) do
%WithClauseError{term: term}
end
def normalize({:try_clause, term}, _stacktrace) do
%TryClauseError{term: term}
end
def normalize(:undef, stacktrace) do
stacktrace = stacktrace || :erlang.get_stacktrace
{mod, fun, arity} = from_stacktrace(stacktrace)
%UndefinedFunctionError{module: mod, function: fun, arity: arity}
end
def normalize(:function_clause, stacktrace) do
{mod, fun, arity} = from_stacktrace(stacktrace || :erlang.get_stacktrace)
%FunctionClauseError{module: mod, function: fun, arity: arity}
end
def normalize({:badarg, payload}, _stacktrace) do
%ArgumentError{message: "argument error: #{inspect(payload)}"}
end
def normalize(other, _stacktrace) do
%ErlangError{original: other}
end
defp from_stacktrace([{module, function, args, _} | _]) when is_list(args) do
{module, function, length(args)}
end
defp from_stacktrace([{module, function, arity, _} | _]) do
{module, function, arity}
end
defp from_stacktrace(_) do
{nil, nil, nil}
end
end
| 28.176598 | 153 | 0.666897 |
f7dd85a728a6883276392b5f6637b7f2603cc88f | 198 | exs | Elixir | test/phx_pow_web/controllers/page_controller_test.exs | brandedux/phoenix_authentication | a1a5758532a4ece73bfc7319f8c9235b070bd1fe | [
"MIT"
] | 2 | 2019-09-25T22:02:59.000Z | 2019-12-18T22:33:34.000Z | test/phx_pow_web/controllers/page_controller_test.exs | brandedux/phoenix_authentication | a1a5758532a4ece73bfc7319f8c9235b070bd1fe | [
"MIT"
] | null | null | null | test/phx_pow_web/controllers/page_controller_test.exs | brandedux/phoenix_authentication | a1a5758532a4ece73bfc7319f8c9235b070bd1fe | [
"MIT"
] | null | null | null | defmodule PhxPowWeb.PageControllerTest do
use PhxPowWeb.ConnCase
test "GET /", %{conn: conn} do
conn = get(conn, "/")
assert html_response(conn, 200) =~ "Welcome to Phoenix!"
end
end
| 22 | 60 | 0.676768 |
f7dd89eec3dd071c4317e5ef4396e3982e3dfcd9 | 1,262 | ex | Elixir | example/lib/example_web/endpoint.ex | revelrylabs/phoenix_harmonium | 41aa64f53fda8af5db0882040e5508d4409996b1 | [
"MIT"
] | 10 | 2018-10-03T17:01:35.000Z | 2021-12-29T21:13:35.000Z | example/lib/example_web/endpoint.ex | revelrylabs/phoenix_harmonium | 41aa64f53fda8af5db0882040e5508d4409996b1 | [
"MIT"
] | 243 | 2018-06-13T18:42:35.000Z | 2022-03-15T17:18:08.000Z | example/lib/example_web/endpoint.ex | revelrylabs/phoenix_harmonium | 41aa64f53fda8af5db0882040e5508d4409996b1 | [
"MIT"
] | 3 | 2019-01-08T19:09:28.000Z | 2020-05-11T21:01:57.000Z | defmodule ExampleWeb.Endpoint do
use Phoenix.Endpoint, otp_app: :example
socket "/socket", ExampleWeb.UserSocket,
websocket: true,
longpoll: false
# Serve at "/" the static files from "priv/static" directory.
#
# You should set gzip to true if you are running phx.digest
# when deploying your static files in production.
plug Plug.Static,
at: "/",
from: :example,
gzip: false,
only: ~w(css fonts images js favicon.ico robots.txt)
# Code reloading can be explicitly enabled under the
# :code_reloader configuration of your endpoint.
if code_reloading? do
socket "/phoenix/live_reload/socket", Phoenix.LiveReloader.Socket
plug Phoenix.LiveReloader
plug Phoenix.CodeReloader
end
plug Plug.RequestId
plug Plug.Logger
plug Plug.Parsers,
parsers: [:urlencoded, :multipart, :json],
pass: ["*/*"],
json_decoder: Phoenix.json_library()
plug Plug.MethodOverride
plug Plug.Head
# The session will be stored in the cookie and signed,
# this means its contents can be read but not tampered with.
# Set :encryption_salt if you would also like to encrypt it.
plug Plug.Session,
store: :cookie,
key: "_example_key",
signing_salt: "1H/oIyPc"
plug ExampleWeb.Router
end
| 26.851064 | 69 | 0.708399 |
f7ddb9518aef4f5298d94c58182472e11867ed77 | 3,830 | ex | Elixir | clients/dfa_reporting/lib/google_api/dfa_reporting/v33/model/report_floodlight_criteria.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/dfa_reporting/lib/google_api/dfa_reporting/v33/model/report_floodlight_criteria.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/dfa_reporting/lib/google_api/dfa_reporting/v33/model/report_floodlight_criteria.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.DFAReporting.V33.Model.ReportFloodlightCriteria do
@moduledoc """
The report criteria for a report of type "FLOODLIGHT".
## Attributes
* `customRichMediaEvents` (*type:* `list(GoogleApi.DFAReporting.V33.Model.DimensionValue.t)`, *default:* `nil`) - The list of custom rich media events to include.
* `dateRange` (*type:* `GoogleApi.DFAReporting.V33.Model.DateRange.t`, *default:* `nil`) - The date range this report should be run for.
* `dimensionFilters` (*type:* `list(GoogleApi.DFAReporting.V33.Model.DimensionValue.t)`, *default:* `nil`) - The list of filters on which dimensions are filtered.
Filters for different dimensions are ANDed, filters for the same dimension are grouped together and ORed.
* `dimensions` (*type:* `list(GoogleApi.DFAReporting.V33.Model.SortedDimension.t)`, *default:* `nil`) - The list of dimensions the report should include.
* `floodlightConfigId` (*type:* `GoogleApi.DFAReporting.V33.Model.DimensionValue.t`, *default:* `nil`) - The floodlight ID for which to show data in this report. All advertisers associated with that ID will automatically be added. The dimension of the value needs to be 'dfa:floodlightConfigId'.
* `metricNames` (*type:* `list(String.t)`, *default:* `nil`) - The list of names of metrics the report should include.
* `reportProperties` (*type:* `GoogleApi.DFAReporting.V33.Model.ReportFloodlightCriteriaReportProperties.t`, *default:* `nil`) - The properties of the report.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:customRichMediaEvents => list(GoogleApi.DFAReporting.V33.Model.DimensionValue.t()),
:dateRange => GoogleApi.DFAReporting.V33.Model.DateRange.t(),
:dimensionFilters => list(GoogleApi.DFAReporting.V33.Model.DimensionValue.t()),
:dimensions => list(GoogleApi.DFAReporting.V33.Model.SortedDimension.t()),
:floodlightConfigId => GoogleApi.DFAReporting.V33.Model.DimensionValue.t(),
:metricNames => list(String.t()),
:reportProperties =>
GoogleApi.DFAReporting.V33.Model.ReportFloodlightCriteriaReportProperties.t()
}
field(:customRichMediaEvents, as: GoogleApi.DFAReporting.V33.Model.DimensionValue, type: :list)
field(:dateRange, as: GoogleApi.DFAReporting.V33.Model.DateRange)
field(:dimensionFilters, as: GoogleApi.DFAReporting.V33.Model.DimensionValue, type: :list)
field(:dimensions, as: GoogleApi.DFAReporting.V33.Model.SortedDimension, type: :list)
field(:floodlightConfigId, as: GoogleApi.DFAReporting.V33.Model.DimensionValue)
field(:metricNames, type: :list)
field(:reportProperties,
as: GoogleApi.DFAReporting.V33.Model.ReportFloodlightCriteriaReportProperties
)
end
defimpl Poison.Decoder, for: GoogleApi.DFAReporting.V33.Model.ReportFloodlightCriteria do
def decode(value, options) do
GoogleApi.DFAReporting.V33.Model.ReportFloodlightCriteria.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.DFAReporting.V33.Model.ReportFloodlightCriteria do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 54.714286 | 299 | 0.74752 |
f7dde6f42ca773ead48c071af5f1f261b1586260 | 1,899 | ex | Elixir | lib/mix/tasks/phoenix/pow.phoenix.gen.templates.ex | abartier/pow | 58a3d082da093e2dc7f07825a950ee133204813f | [
"Unlicense",
"MIT"
] | null | null | null | lib/mix/tasks/phoenix/pow.phoenix.gen.templates.ex | abartier/pow | 58a3d082da093e2dc7f07825a950ee133204813f | [
"Unlicense",
"MIT"
] | null | null | null | lib/mix/tasks/phoenix/pow.phoenix.gen.templates.ex | abartier/pow | 58a3d082da093e2dc7f07825a950ee133204813f | [
"Unlicense",
"MIT"
] | null | null | null | defmodule Mix.Tasks.Pow.Phoenix.Gen.Templates do
@shortdoc "Generates Pow views and templates"
@moduledoc """
Generates pow templates for Phoenix.
mix pow.phoenix.gen.templates
## Arguments
* `--context-app MyApp` app to use for path and module names
"""
use Mix.Task
alias Mix.{Pow, Pow.Phoenix}
@switches [context_app: :string]
@default_opts []
@mix_task "pow.phoenix.gen.templates"
@doc false
def run(args) do
Pow.no_umbrella!(@mix_task)
Pow.ensure_phoenix!(@mix_task, args)
args
|> Pow.parse_options(@switches, @default_opts)
|> create_template_files()
|> print_shell_instructions()
end
@templates [
{"registration", ~w(new edit)},
{"session", ~w(new)}
]
defp create_template_files({config, _parsed, _invalid}) do
structure = Phoenix.parse_structure(config)
context_base = structure[:context_base]
web_module = structure[:web_module]
web_prefix = structure[:web_prefix]
Enum.each(@templates, fn {name, actions} ->
Phoenix.create_view_file(Elixir.Pow, name, web_module, web_prefix)
Phoenix.create_templates(Elixir.Pow, name, web_prefix, actions)
end)
%{context_base: context_base, web_module: web_module}
end
defp print_shell_instructions(%{context_base: context_base, web_module: web_base}) do
Mix.shell.info("""
Pow Phoenix templates and views has been generated.
Please set `web_module: #{inspect(web_base)}` in your configuration.
defmodule #{inspect(web_base)}.Endpoint do
use #{inspect(web_base)}.Endpoint, otp_app: :#{Macro.underscore(context_base)}
# ...
plug #{inspect(web_base)}.Pow.Plug.Session,
repo: #{inspect(context_base)}.Repo,
user: #{inspect(context_base)}.Users.User,
web_module: #{inspect(web_base)}
# ...
end
""")
end
end
| 26.375 | 88 | 0.662454 |
f7ddee57a2b30bc5fd0527e0dcd912c76fc62808 | 1,934 | exs | Elixir | test/app_env_test.exs | nate/app_env | a556125a6e3c9661dea0a041e15910137fbf8276 | [
"MIT"
] | null | null | null | test/app_env_test.exs | nate/app_env | a556125a6e3c9661dea0a041e15910137fbf8276 | [
"MIT"
] | null | null | null | test/app_env_test.exs | nate/app_env | a556125a6e3c9661dea0a041e15910137fbf8276 | [
"MIT"
] | null | null | null | defmodule AppEnvTest do
use ExUnit.Case
# doctest AppEnv
setup do
Application.put_env(:app_env, :config, %{"foo" => "bar"})
System.put_env("FOO", "1")
end
test "has a config value" do
assert Application.get_env(:app_env, :config) == %{"foo" => "bar"}
end
test "copies a new value" do
:ok = AppEnv.copy("FOO", :app_env, :config)
assert Application.get_env(:app_env, :config) == "1"
end
test "copies a new value with a custom merge function" do
:ok =
AppEnv.copy("FOO", :app_env, :config, fn old_value, env_value ->
{new_value, _} = Integer.parse(env_value)
{:ok, Map.put(old_value, "foo", new_value)}
end)
assert Application.get_env(:app_env, :config) == %{"foo" => 1}
end
test "copies a new value to a path" do
:ok = AppEnv.copy_to("FOO", :app_env, :config, ["foo"])
assert Application.get_env(:app_env, :config) == %{"foo" => "1"}
end
test "copies a new value to a path with a custom format function" do
:ok =
AppEnv.copy_to("FOO", :app_env, :config, ["foo"], fn env_value ->
{new_value, _} = Integer.parse(env_value)
{:ok, new_value}
end)
assert Application.get_env(:app_env, :config) == %{"foo" => 1}
end
test "handles errors in copying" do
{:error, "error doing things"} =
AppEnv.copy("FOO", :app_env, :config, fn _old_value, _env_value ->
"error doing things"
end)
{:error, "another error doing things"} =
AppEnv.copy("FOO", :app_env, :config, fn _old_value, _env_value ->
{:error, "another error doing things"}
end)
{:error, "error doing things"} =
AppEnv.copy_to("FOO", :app_env, :config, ["foo"], fn _env_value -> "error doing things" end)
{:error, "another error doing things"} =
AppEnv.copy_to("FOO", :app_env, :config, ["foo"], fn _env_value ->
{:error, "another error doing things"}
end)
end
end
| 30.21875 | 98 | 0.608583 |
f7ddff8ec5f7286b86391f37e909e1785fc6236d | 1,023 | ex | Elixir | lib/phlearn/application.ex | orneryhippo/phlearn | 7ef72120c8d4719ef90809f16907b5e98d6c54b1 | [
"MIT"
] | null | null | null | lib/phlearn/application.ex | orneryhippo/phlearn | 7ef72120c8d4719ef90809f16907b5e98d6c54b1 | [
"MIT"
] | null | null | null | lib/phlearn/application.ex | orneryhippo/phlearn | 7ef72120c8d4719ef90809f16907b5e98d6c54b1 | [
"MIT"
] | null | null | null | defmodule Phlearn.Application do
use Application
# See https://hexdocs.pm/elixir/Application.html
# for more information on OTP Applications
def start(_type, _args) do
import Supervisor.Spec
# Define workers and child supervisors to be supervised
children = [
# Start the Ecto repository
supervisor(Phlearn.Repo, []),
# Start the endpoint when the application starts
supervisor(PhlearnWeb.Endpoint, []),
# Start your own worker by calling: Phlearn.Worker.start_link(arg1, arg2, arg3)
# worker(Phlearn.Worker, [arg1, arg2, arg3]),
]
# See https://hexdocs.pm/elixir/Supervisor.html
# for other strategies and supported options
opts = [strategy: :one_for_one, name: Phlearn.Supervisor]
Supervisor.start_link(children, opts)
end
# Tell Phoenix to update the endpoint configuration
# whenever the application is updated.
def config_change(changed, _new, removed) do
PhlearnWeb.Endpoint.config_change(changed, removed)
:ok
end
end
| 31.96875 | 85 | 0.713587 |
f7de2ff36a5cff78c55fae98fd582cb107e2569e | 3,166 | ex | Elixir | apps/andi/lib/andi/input_schemas/input_converter.ex | SmartColumbusOS/smartcitiesdata | c8553d34631c822b034945eebf396994bf1001ff | [
"Apache-2.0"
] | 1 | 2021-04-05T19:17:18.000Z | 2021-04-05T19:17:18.000Z | apps/andi/lib/andi/input_schemas/input_converter.ex | SmartColumbusOS/smartcitiesdata | c8553d34631c822b034945eebf396994bf1001ff | [
"Apache-2.0"
] | 11 | 2020-01-07T15:43:42.000Z | 2020-12-22T15:23:25.000Z | apps/andi/lib/andi/input_schemas/input_converter.ex | SmartColumbusOS/smartcitiesdata | c8553d34631c822b034945eebf396994bf1001ff | [
"Apache-2.0"
] | null | null | null | defmodule Andi.InputSchemas.InputConverter do
@moduledoc """
Used to convert between SmartCity.Datasets, form data (defined by Andi.InputSchemas.DatasetInput), and Ecto.Changesets.
"""
alias SmartCity.Dataset
alias Andi.InputSchemas.DatasetInput
@type dataset :: map() | Dataset.t()
@spec changeset_from_dataset(dataset) :: Ecto.Changeset.t()
def changeset_from_dataset(dataset) do
%{id: id, business: business, technical: technical} = AtomicMap.convert(dataset, safe: false, underscore: false)
from_business = get_business(business) |> fix_modified_date()
from_technical = get_technical(technical)
%{id: id}
|> Map.merge(from_business)
|> Map.merge(from_technical)
|> DatasetInput.full_validation_changeset()
end
@spec changeset_from_dataset(Dataset.t(), map()) :: Ecto.Changeset.t()
def changeset_from_dataset(%SmartCity.Dataset{} = original_dataset, changes) do
form_data_with_atom_keys = AtomicMap.convert(changes, safe: false, underscore: false)
original_dataset_flattened =
original_dataset
|> changeset_from_dataset()
|> Ecto.Changeset.apply_changes()
all_changes = Map.merge(original_dataset_flattened, form_data_with_atom_keys)
all_changes
|> adjust_form_input()
|> DatasetInput.full_validation_changeset()
end
@spec form_changeset(map()) :: Ecto.Changeset.t()
def form_changeset(params \\ %{}) do
params
|> adjust_form_input()
|> DatasetInput.light_validation_changeset()
end
defp adjust_form_input(params) do
params
|> AtomicMap.convert(safe: false, underscore: false)
|> Map.update(:keywords, nil, &keywords_to_list/1)
|> fix_modified_date()
end
@spec restruct(map(), Dataset.t()) :: Dataset.t()
def restruct(changes, dataset) do
formatted_changes =
changes
|> Map.update(:issuedDate, nil, &date_to_iso8601_datetime/1)
|> Map.update(:modifiedDate, nil, &date_to_iso8601_datetime/1)
business = Map.merge(dataset.business, get_business(formatted_changes)) |> Map.from_struct()
technical = Map.merge(dataset.technical, get_technical(formatted_changes)) |> Map.from_struct()
%{}
|> Map.put(:id, dataset.id)
|> Map.put(:business, business)
|> Map.put(:technical, technical)
|> SmartCity.Dataset.new()
|> (fn {:ok, dataset} -> dataset end).()
end
defp get_business(map) when is_map(map) do
Map.take(map, DatasetInput.business_keys())
end
defp get_technical(map) when is_map(map) do
Map.take(map, DatasetInput.technical_keys())
end
defp keywords_to_list(nil), do: []
defp keywords_to_list(""), do: []
defp keywords_to_list(keywords) when is_binary(keywords) do
keywords
|> String.split(", ")
|> Enum.map(&String.trim/1)
end
defp keywords_to_list(keywords) when is_list(keywords), do: keywords
defp date_to_iso8601_datetime(date) do
time_const = "00:00:00Z"
"#{Date.to_iso8601(date)} #{time_const}"
end
defp fix_modified_date(map) do
map
|> Map.get_and_update(:modifiedDate, fn
"" -> {"", nil}
current_value -> {current_value, current_value}
end)
|> elem(1)
end
end
| 29.867925 | 121 | 0.697726 |
f7de37494bf64859a32cf2157aabd136a60800ef | 212 | exs | Elixir | priv/repo/migrations/20180914060034_add_role_to_user.exs | TDogVoid/job_board | 23793917bd1cc4e68bccce737b971093030a31eb | [
"MIT"
] | null | null | null | priv/repo/migrations/20180914060034_add_role_to_user.exs | TDogVoid/job_board | 23793917bd1cc4e68bccce737b971093030a31eb | [
"MIT"
] | null | null | null | priv/repo/migrations/20180914060034_add_role_to_user.exs | TDogVoid/job_board | 23793917bd1cc4e68bccce737b971093030a31eb | [
"MIT"
] | null | null | null | defmodule JobBoard.Repo.Migrations.AddRoleToUser do
use Ecto.Migration
def change do
alter table(:users) do
add :role_id, references(:roles)
end
create index(:users, [:role_id])
end
end
| 17.666667 | 51 | 0.693396 |
f7de78ffa0e1ac881fa76515a2b9e11feb297dc2 | 533 | ex | Elixir | lib/usir/server.ex | usir/usir_ex | 059cc1679dcdc9defcad17a4aac3492a33e529e5 | [
"MIT"
] | null | null | null | lib/usir/server.ex | usir/usir_ex | 059cc1679dcdc9defcad17a4aac3492a33e529e5 | [
"MIT"
] | null | null | null | lib/usir/server.ex | usir/usir_ex | 059cc1679dcdc9defcad17a4aac3492a33e529e5 | [
"MIT"
] | null | null | null | defmodule Usir.Server do
alias Usir.Message.Client
@spec handle_message(Client.message) :: {:ok, atom} | :error
def handle_message(message) do
case message do
%Client.Mount{} ->
{:ok, :mount}
%Client.Unmount{} ->
{:ok, :unmount}
%Client.Authenticate{} ->
{:ok, :authenticate}
%Client.Action{} ->
{:ok, :action}
%Client.CallResponse{} ->
{:ok, :response}
%Client.CallError{} ->
{:ok, :response}
_ ->
:error
end
end
end
| 22.208333 | 62 | 0.532833 |
f7de9343595111b4cf6cc51cb3310a7ec149c8dc | 6,526 | exs | Elixir | test/plausible_web/controllers/auth_controller_test.exs | wvffle/analytics | 2c0fd55bc67f74af1fe1e2641678d44e9fee61d5 | [
"MIT"
] | 1 | 2020-10-08T13:33:04.000Z | 2020-10-08T13:33:04.000Z | test/plausible_web/controllers/auth_controller_test.exs | wvffle/analytics | 2c0fd55bc67f74af1fe1e2641678d44e9fee61d5 | [
"MIT"
] | null | null | null | test/plausible_web/controllers/auth_controller_test.exs | wvffle/analytics | 2c0fd55bc67f74af1fe1e2641678d44e9fee61d5 | [
"MIT"
] | null | null | null | defmodule PlausibleWeb.AuthControllerTest do
use PlausibleWeb.ConnCase
use Bamboo.Test
import Plausible.TestUtils
describe "GET /register" do
test "shows the register form", %{conn: conn} do
conn = get(conn, "/register")
assert html_response(conn, 200) =~ "Enter your details"
end
test "registering sends an activation link", %{conn: conn} do
post(conn, "/register",
user: %{
name: "Jane Doe",
email: "[email protected]"
}
)
assert_email_delivered_with(subject: "Activate your Plausible free trial")
end
test "user sees success page after registering", %{conn: conn} do
conn =
post(conn, "/register",
user: %{
name: "Jane Doe",
email: "[email protected]"
}
)
assert html_response(conn, 200) =~ "Success!"
end
end
describe "GET /claim-activation" do
test "creates the user", %{conn: conn} do
token = Plausible.Auth.Token.sign_activation("Jane Doe", "[email protected]")
get(conn, "/claim-activation?token=#{token}")
assert Plausible.Auth.find_user_by(email: "[email protected]")
end
test "sends the welcome email", %{conn: conn} do
token = Plausible.Auth.Token.sign_activation("Jane Doe", "[email protected]")
get(conn, "/claim-activation?token=#{token}")
assert_email_delivered_with(subject: "Welcome to Plausible")
end
test "redirects new user to create a password", %{conn: conn} do
token = Plausible.Auth.Token.sign_activation("Jane Doe", "[email protected]")
conn = get(conn, "/claim-activation?token=#{token}")
assert redirected_to(conn) == "/password"
end
test "shows error when user with that email already exists", %{conn: conn} do
token = Plausible.Auth.Token.sign_activation("Jane Doe", "[email protected]")
conn = get(conn, "/claim-activation?token=#{token}")
conn = get(conn, "/claim-activation?token=#{token}")
assert conn.status == 400
end
end
describe "GET /login_form" do
test "shows the login form", %{conn: conn} do
conn = get(conn, "/login")
assert html_response(conn, 200) =~ "Enter your email and password"
end
end
describe "POST /login" do
test "valid email and password - logs the user in", %{conn: conn} do
user = insert(:user, password: "password")
conn = post(conn, "/login", email: user.email, password: "password")
assert get_session(conn, :current_user_id) == user.id
assert redirected_to(conn) == "/sites"
end
test "email does not exist - renders login form again", %{conn: conn} do
conn = post(conn, "/login", email: "[email protected]", password: "password")
assert get_session(conn, :current_user_id) == nil
assert html_response(conn, 200) =~ "Enter your email and password"
end
test "bad password - renders login form again", %{conn: conn} do
user = insert(:user, password: "password")
conn = post(conn, "/login", email: user.email, password: "wrong")
assert get_session(conn, :current_user_id) == nil
assert html_response(conn, 200) =~ "Enter your email and password"
end
end
describe "GET /password/request-reset" do
test "renders the form", %{conn: conn} do
conn = get(conn, "/password/request-reset")
assert html_response(conn, 200) =~ "Enter your email so we can send a password reset link"
end
end
describe "POST /password/request-reset" do
test "email is empty - renders form with error", %{conn: conn} do
conn = post(conn, "/password/request-reset", %{email: ""})
assert html_response(conn, 200) =~ "Enter your email so we can send a password reset link"
end
test "email is present and exists - sends password reset email", %{conn: conn} do
user = insert(:user)
conn = post(conn, "/password/request-reset", %{email: user.email})
assert html_response(conn, 200) =~ "Success!"
assert_email_delivered_with(subject: "Plausible password reset")
end
end
describe "GET /password/reset" do
test "with valid token - shows form", %{conn: conn} do
token = Plausible.Auth.Token.sign_password_reset("[email protected]")
conn = get(conn, "/password/reset", %{token: token})
assert html_response(conn, 200) =~ "Reset your password"
end
test "with invalid token - shows error page", %{conn: conn} do
conn = get(conn, "/password/reset", %{token: "blabla"})
assert html_response(conn, 401) =~ "Your token is invalid"
end
end
describe "POST /password/reset" do
alias Plausible.Auth.{User, Token, Password}
test "with valid token - resets the password", %{conn: conn} do
user = insert(:user)
token = Token.sign_password_reset(user.email)
post(conn, "/password/reset", %{token: token, password: "new-password"})
user = Plausible.Repo.get(User, user.id)
assert Password.match?("new-password", user.password_hash)
end
end
describe "GET /settings" do
setup [:create_user, :log_in]
test "shows the form", %{conn: conn} do
conn = get(conn, "/settings")
assert html_response(conn, 200) =~ "Account settings"
end
test "shows subscription", %{conn: conn, user: user} do
insert(:subscription, paddle_plan_id: "558018", user: user)
conn = get(conn, "/settings")
assert html_response(conn, 200) =~ "10k pageviews"
assert html_response(conn, 200) =~ "monthly billing"
end
end
describe "PUT /settings" do
setup [:create_user, :log_in]
test "updates user record", %{conn: conn, user: user} do
put(conn, "/settings", %{"user" => %{"name" => "New name"}})
user = Plausible.Repo.get(Plausible.Auth.User, user.id)
assert user.name == "New name"
end
end
describe "DELETE /me" do
setup [:create_user, :log_in, :create_site]
use Plausible.Repo
test "deletes the user", %{conn: conn, user: user, site: site} do
Repo.insert_all("intro_emails", [
%{
user_id: user.id,
timestamp: NaiveDateTime.utc_now()
}
])
Repo.insert_all("feedback_emails", [
%{
user_id: user.id,
timestamp: NaiveDateTime.utc_now()
}
])
insert(:google_auth, site: site, user: user)
insert(:subscription, user: user, status: "deleted")
conn = delete(conn, "/me")
assert redirected_to(conn) == "/"
end
end
end
| 31.52657 | 96 | 0.630708 |
f7de93da603998904781ba9101fbe874ef71ece5 | 828 | ex | Elixir | lib/tapebas_web/controllers/user_registration_controller.ex | cristineguadelupe/tapebas | 5f8c70d5ac36b2a606fe4630cc659161b2f4d7bf | [
"MIT"
] | 3 | 2022-03-24T16:48:38.000Z | 2022-03-24T16:50:04.000Z | lib/tapebas_web/controllers/user_registration_controller.ex | cristineguadelupe/tapebas | 5f8c70d5ac36b2a606fe4630cc659161b2f4d7bf | [
"MIT"
] | null | null | null | lib/tapebas_web/controllers/user_registration_controller.ex | cristineguadelupe/tapebas | 5f8c70d5ac36b2a606fe4630cc659161b2f4d7bf | [
"MIT"
] | 1 | 2022-03-20T01:11:12.000Z | 2022-03-20T01:11:12.000Z | defmodule TapebasWeb.UserRegistrationController do
use TapebasWeb, :controller
alias Tapebas.Accounts
alias Tapebas.Accounts.User
alias TapebasWeb.UserAuth
def new(conn, _params) do
changeset = Accounts.change_user_registration(%User{})
render(conn, "new.html", changeset: changeset)
end
def create(conn, %{"user" => user_params}) do
case Accounts.register_user(user_params) do
{:ok, user} ->
{:ok, _} =
Accounts.deliver_user_confirmation_instructions(
user,
&Routes.user_confirmation_url(conn, :edit, &1)
)
conn
|> put_flash(:info, "User created successfully.")
|> UserAuth.log_in_user(user)
{:error, %Ecto.Changeset{} = changeset} ->
render(conn, "new.html", changeset: changeset)
end
end
end
| 26.709677 | 58 | 0.647343 |
f7deec2563d55e97f4de669e6e1414acc64026e3 | 1,178 | ex | Elixir | lib/cforum/accounts/setting.ex | campingrider/cforum_ex | cf27684c47d6dc26c9c37a946f1c729a79d27c70 | [
"MIT"
] | null | null | null | lib/cforum/accounts/setting.ex | campingrider/cforum_ex | cf27684c47d6dc26c9c37a946f1c729a79d27c70 | [
"MIT"
] | null | null | null | lib/cforum/accounts/setting.ex | campingrider/cforum_ex | cf27684c47d6dc26c9c37a946f1c729a79d27c70 | [
"MIT"
] | null | null | null | defmodule Cforum.Accounts.Setting do
use CforumWeb, :model
@primary_key {:setting_id, :id, autogenerate: true}
@derive {Phoenix.Param, key: :setting_id}
schema "settings" do
field(:options, :map)
belongs_to(:forum, Cforum.Forums.Forum, references: :forum_id)
belongs_to(:user, Cforum.Accounts.User, references: :user_id)
end
@doc """
Builds a changeset based on the `struct` and `params`.
"""
def changeset(struct, params \\ %{}) do
struct
|> cast(params, [:options, :forum_id, :user_id])
|> remove_default_option_values()
|> validate_required([:options])
|> unique_constraint(:forum_id)
end
defp remove_default_option_values(%Ecto.Changeset{valid?: true} = changeset) do
case Ecto.Changeset.get_change(changeset, :options) do
nil ->
changeset
options ->
options = Enum.reduce(options, %{}, &defaults_reduce/2)
Ecto.Changeset.put_change(changeset, :options, options)
end
end
defp remove_default_option_values(changeset), do: changeset
defp defaults_reduce({_key, "_DEFAULT_"}, acc), do: acc
defp defaults_reduce({key, value}, acc), do: Map.put(acc, key, value)
end
| 29.45 | 81 | 0.683362 |
f7def801f046f85449b417d59716ec52d4dec8d8 | 3,236 | ex | Elixir | clients/cloud_asset/lib/google_api/cloud_asset/v1/model/google_cloud_orgpolicy_v1_boolean_policy.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/cloud_asset/lib/google_api/cloud_asset/v1/model/google_cloud_orgpolicy_v1_boolean_policy.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/cloud_asset/lib/google_api/cloud_asset/v1/model/google_cloud_orgpolicy_v1_boolean_policy.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.CloudAsset.V1.Model.GoogleCloudOrgpolicyV1BooleanPolicy do
@moduledoc """
Used in `policy_type` to specify how `boolean_policy` will behave at this resource.
## Attributes
* `enforced` (*type:* `boolean()`, *default:* `nil`) - If `true`, then the `Policy` is enforced. If `false`, then any configuration is acceptable. Suppose you have a `Constraint` `constraints/compute.disableSerialPortAccess` with `constraint_default` set to `ALLOW`. A `Policy` for that `Constraint` exhibits the following behavior: - If the `Policy` at this resource has enforced set to `false`, serial port connection attempts will be allowed. - If the `Policy` at this resource has enforced set to `true`, serial port connection attempts will be refused. - If the `Policy` at this resource is `RestoreDefault`, serial port connection attempts will be allowed. - If no `Policy` is set at this resource or anywhere higher in the resource hierarchy, serial port connection attempts will be allowed. - If no `Policy` is set at this resource, but one exists higher in the resource hierarchy, the behavior is as if the`Policy` were set at this resource. The following examples demonstrate the different possible layerings: Example 1 (nearest `Constraint` wins): `organizations/foo` has a `Policy` with: {enforced: false} `projects/bar` has no `Policy` set. The constraint at `projects/bar` and `organizations/foo` will not be enforced. Example 2 (enforcement gets replaced): `organizations/foo` has a `Policy` with: {enforced: false} `projects/bar` has a `Policy` with: {enforced: true} The constraint at `organizations/foo` is not enforced. The constraint at `projects/bar` is enforced. Example 3 (RestoreDefault): `organizations/foo` has a `Policy` with: {enforced: true} `projects/bar` has a `Policy` with: {RestoreDefault: {}} The constraint at `organizations/foo` is enforced. The constraint at `projects/bar` is not enforced, because `constraint_default` for the `Constraint` is `ALLOW`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:enforced => boolean()
}
field(:enforced)
end
defimpl Poison.Decoder, for: GoogleApi.CloudAsset.V1.Model.GoogleCloudOrgpolicyV1BooleanPolicy do
def decode(value, options) do
GoogleApi.CloudAsset.V1.Model.GoogleCloudOrgpolicyV1BooleanPolicy.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.CloudAsset.V1.Model.GoogleCloudOrgpolicyV1BooleanPolicy do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 68.851064 | 1,794 | 0.761434 |
f7df0cf309057df5dbffa13d60ec0bb6c3c1af68 | 5,370 | ex | Elixir | lib/fun_land.ex | Qqwy/elixir_fun_land | 337a322ea4102e2170557dbee0a77903dba79927 | [
"MIT"
] | 47 | 2016-07-10T15:40:21.000Z | 2019-04-22T09:41:01.000Z | lib/fun_land.ex | Qqwy/elixir-fun_land | 337a322ea4102e2170557dbee0a77903dba79927 | [
"MIT"
] | 34 | 2019-08-30T05:27:17.000Z | 2022-03-23T18:22:32.000Z | lib/fun_land.ex | Qqwy/elixir_fun_land | 337a322ea4102e2170557dbee0a77903dba79927 | [
"MIT"
] | 3 | 2016-07-23T16:23:31.000Z | 2019-02-03T17:18:47.000Z | defmodule FunLand do
@moduledoc """
FunLand defines many different Algebraic Data Types.
An Algebraic Data Type is nothing more, than a 'container' for some other data type.
Exactly how that 'container' behaves is what makes one ADT different from another.
Lists are ADTs. And so are Maps. And Sets. And Tuples. And many other things.
Algebraic Data Types contain no value of their own. They get a value, once you fill them with something,
and then have useful operations you can perform on their contents.
There are many similarities in the way the different ADTs work. This allows us to define behaviours which
generalize to all ADTs. Any of your custom data types that you can implement one or multiple of these behaviours for,
is an ADT, and will receive the benefits that the implemented ADTs give.
Another nice thing about this generalization, is that there is no 'learning a new API' necessary when
switching to one thing-that-is-an-ADT to the next.
To easily use FunLand in your code, call `use FunLand`, which will alias for you:
- `Mappable` -> A structure is Mappable if there is a way to map a function over it: transforming the contents but keeping the structure.
- `Appliable` -> A structure is Applibable if it is Mappable and, given two of them where the first contains a partially-applied function, you can apply them together.
- `Applicative` -> A structure is Applicative if it is Appliable and you can create a new one by wrapping any value.
- `Chainable` -> A structure is Chainable if it is Appliable and you can chain multiple operations after each other, resulting in just a single structure.
- `Monad` -> A structure is a Monad if it is both Applicative and Chainable.
- `Semicombinable` -> A structure is Semicombinable if there is a way to combine two structures into one.
- `Combinable` -> A structure is Combinable if it is Semicombinable and there is a clearly defined 'empty' element.
- `CombinableMonad` -> A structure is a CombinableMonad if it is both Combinable and a Monad.
- `Reducible` -> A structure is reducible if you can fold/reduce it to a single value, when giving a Combinable or function+default.
- `Traversable` -> A structure is Traversable if it is Reducible and there is a way to flip the ???
When given the option `operators: true`, it will also import the following operators:
- `~>` Shorthand for `Mappable.map/2`
- `<~>` Shorthand for `Appliable.apply_with/2`
- `~>>` Shorthand for `Chainable.chain/2`
- `<>` Shorthand for `Combinable.combine/2`. This operator still works the same for binaries, but will now also work for any other Chainable.
"""
# Elixir doesn't let you _really_ define algebraic data types, so we're creating a 'one type fits all' type.
@type adt :: [any] | {} | map | struct
defmacro __using__(opts) do
# Only import operators if wanted.
import_code =
if Keyword.get(opts, :operators, false) do
quote do
import Kernel, except: [<>: 2]
import FunLand, only: [<>: 2, ~>: 2, <~>: 2, ~>>: 2]
end
else
quote do
import Kernel
end
end
quote do
unquote(import_code)
alias FunLand.{
Mappable,
Appliable,
Applicative,
Chainable,
Monad,
Semicombinable,
Combinable,
Reducible,
Traversable,
CombinableMonad
}
end
end
defdelegate map(mappable, fun), to: FunLand.Mappable
defdelegate apply_with(appliable_with_fun, appliable), to: FunLand.Appliable
defdelegate new(module, value), to: FunLand.Applicative
defdelegate chain(chainable, fun_returning_chainable), to: FunLand.Chainable
defdelegate empty(module), to: FunLand.Combinable
defdelegate combine(semicombinable, semicombinable), to: FunLand.Semicombinable
defdelegate reduce(reducible, accumulator, fun), to: FunLand.Reducible
defdelegate reduce(reducible, combinable), to: FunLand.Reducible
def any?(reducible, property_fun) do
FunLand.Reducible.reduce(reducible, false, fn elem, acc -> acc || property_fun.(elem) end)
end
def all?(reducible, property_fun) do
FunLand.Reducible.reduce(reducible, true, fn elem, acc -> acc && property_fun.(elem) end)
end
@doc """
Infix version of `FunLand.Mappable.map/2`
"""
def a ~> b do
FunLand.Mappable.map(a, b)
end
@doc """
Infix version of `FunLand.Appliable.apply_with/2`
"""
def a <~> b do
FunLand.Appliable.apply_with(a, b)
end
@doc """
Infix version of `FunLand.Chainable.chain/2`
"""
def a ~>> b do
FunLand.Chainable.chain(a, b)
end
# This operator is made more general. It still works for binary combining, as binaries are indeed Combinable.
@doc """
Infix version of `FunLand.Combinable.combine/2`.
Note that binary strings are Combinable, so "foo" <> "bar" still works.
`<>/2` can still be used in pattern-matches and guard clauses, but it will fall back to the
behavior of `Kernel.<>/2`, which means that it will only work with binary strings.
"""
defmacro left <> right do
in_module? = __CALLER__.context == nil
if in_module? do
quote do
FunLand.Combinable.combine(unquote(left), unquote(right))
end
else
quote do
Kernel.<>(unquote(left), unquote(right))
end
end
end
end
| 38.913043 | 169 | 0.700559 |
f7df312c4b7e5bf798fea7088d1b418d12bb8a9b | 416 | ex | Elixir | lib/client.ex | menuan/fav | 49ae04a5a7dbc70fcc8058303c70ca8332bec0e4 | [
"MIT"
] | null | null | null | lib/client.ex | menuan/fav | 49ae04a5a7dbc70fcc8058303c70ca8332bec0e4 | [
"MIT"
] | null | null | null | lib/client.ex | menuan/fav | 49ae04a5a7dbc70fcc8058303c70ca8332bec0e4 | [
"MIT"
] | null | null | null | defmodule FirebaseAuthVerifier.Client do
use Tesla
@moduledoc """
A simple Tesla client to request the signing certificate that
will be used to verify the signature.
"""
plug Tesla.Middleware.BaseUrl,
Application.get_env(:firebase_auth_verifier, :cert_url) || raise "Base URL has not been set for FirebaseAuthVerifier"
plug Tesla.Middleware.JSON
def get_certificates do
get("/")
end
end
| 24.470588 | 121 | 0.747596 |
f7df5563b7c8584a19a73b9b83623a062bbca26d | 2,632 | ex | Elixir | lib/grizzly/command_class/network_management_installation_maintenance/priority_route_get.ex | pragdave/grizzly | bcd7b46ab2cff1797dac04bc3cd12a36209dd579 | [
"Apache-2.0"
] | null | null | null | lib/grizzly/command_class/network_management_installation_maintenance/priority_route_get.ex | pragdave/grizzly | bcd7b46ab2cff1797dac04bc3cd12a36209dd579 | [
"Apache-2.0"
] | null | null | null | lib/grizzly/command_class/network_management_installation_maintenance/priority_route_get.ex | pragdave/grizzly | bcd7b46ab2cff1797dac04bc3cd12a36209dd579 | [
"Apache-2.0"
] | null | null | null | defmodule Grizzly.CommandClass.NetworkManagementInstallationMaintenance.PriorityRouteGet do
@behaviour Grizzly.Command
alias Grizzly.Packet
alias Grizzly.Command.{EncodeError, Encoding}
alias Grizzly.CommandClass.NetworkManagementInstallationMaintenance
@type t :: %__MODULE__{
seq_number: Grizzly.seq_number(),
retries: non_neg_integer(),
node_id: non_neg_integer()
}
@type opt ::
{:seq_number, Grizzly.seq_number()}
| {:retries, non_neg_integer() | {:node_id, non_neg_integer}}
@enforce_keys [:node_id]
defstruct seq_number: nil, retries: 2, node_id: nil
@spec init([opt]) :: {:ok, t}
def init(opts) do
{:ok, struct(__MODULE__, opts)}
end
@spec encode(t) :: {:ok, binary} | {:error, EncodeError.t()}
def encode(%__MODULE__{seq_number: seq_number, node_id: node_id} = command) do
with {:ok, _encoded} <-
Encoding.encode_and_validate_args(command, %{
node_id: :byte
}) do
binary = Packet.header(seq_number) <> <<0x67, 0x02, node_id>>
{:ok, binary}
end
end
@spec handle_response(t, Packet.t()) ::
{:continue, t()}
| {:done, {:error, :nack_response}}
| {:done, NetworkManagementInstallationMaintenance.priority_route_report()}
| {:retry, t()}
| {:queued, t()}
def handle_response(%__MODULE__{seq_number: seq_number} = command, %Packet{
seq_number: seq_number,
types: [:ack_response]
}) do
{:continue, command}
end
def handle_response(%__MODULE__{seq_number: seq_number, retries: 0}, %Packet{
seq_number: seq_number,
types: [:nack_response]
}) do
{:done, {:error, :nack_response}}
end
def handle_response(%__MODULE__{seq_number: seq_number, retries: n} = command, %Packet{
seq_number: seq_number,
types: [:nack_response]
}) do
{:retry, %{command | retries: n - 1}}
end
def handle_response(
%__MODULE__{seq_number: seq_number} = command,
%Packet{
seq_number: seq_number,
types: [:nack_response, :nack_waiting]
} = packet
) do
if Packet.sleeping_delay?(packet) do
{:queued, command}
else
{:continue, command}
end
end
def handle_response(
_,
%Packet{
body: %{
command_class: :network_management_installation_maintenance,
command: :priority_route_report,
value: value
}
}
) do
{:done, {:ok, value}}
end
def handle_response(command, _), do: {:continue, command}
end
| 28 | 91 | 0.612082 |
f7df94f896b02498c05bd015d566264ffd7c091f | 1,719 | ex | Elixir | clients/health_care/lib/google_api/health_care/v1beta1/model/test_iam_permissions_request.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/health_care/lib/google_api/health_care/v1beta1/model/test_iam_permissions_request.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/health_care/lib/google_api/health_care/v1beta1/model/test_iam_permissions_request.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.HealthCare.V1beta1.Model.TestIamPermissionsRequest do
@moduledoc """
Request message for `TestIamPermissions` method.
## Attributes
* `permissions` (*type:* `list(String.t)`, *default:* `nil`) - The set of permissions to check for the `resource`. Permissions with
wildcards (such as '*' or 'storage.*') are not allowed. For more
information see
[IAM Overview](https://cloud.google.com/iam/docs/overview#permissions).
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:permissions => list(String.t())
}
field(:permissions, type: :list)
end
defimpl Poison.Decoder, for: GoogleApi.HealthCare.V1beta1.Model.TestIamPermissionsRequest do
def decode(value, options) do
GoogleApi.HealthCare.V1beta1.Model.TestIamPermissionsRequest.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.HealthCare.V1beta1.Model.TestIamPermissionsRequest do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 34.38 | 135 | 0.741129 |
f7dfb1cc724fbe92e736674a0ff6ca9a19c83c3a | 1,988 | exs | Elixir | test/duration_test.exs | slashdotdash/duration | c3afc94ea2a8b113280d1c71fa1c842a52a05197 | [
"BSD-3-Clause"
] | 2 | 2020-06-03T16:26:14.000Z | 2020-06-04T12:28:06.000Z | test/duration_test.exs | slashdotdash/duration | c3afc94ea2a8b113280d1c71fa1c842a52a05197 | [
"BSD-3-Clause"
] | null | null | null | test/duration_test.exs | slashdotdash/duration | c3afc94ea2a8b113280d1c71fa1c842a52a05197 | [
"BSD-3-Clause"
] | null | null | null | defmodule DurationTest do
use ExUnit.Case
doctest Duration
test "parse durations" do
assert Duration.parse("PT3S") == {:ok, %Duration{seconds: 3}}
assert Duration.parse("P3M") == {:ok, %Duration{months: 3}}
assert Duration.parse("PT3M") == {:ok, %Duration{minutes: 3}}
assert Duration.parse("P12341223T235759") ==
{:ok,
%Duration{years: 1234, months: 12, days: 23, hours: 23, minutes: 57, seconds: 59}}
assert Duration.parse("P1234-12-23T23:57:59") ==
{:ok,
%Duration{years: 1234, months: 12, days: 23, hours: 23, minutes: 57, seconds: 59}}
end
test "string protocol" do
assert to_string(%Duration{}) == "PT0S"
assert to_string(%Duration{years: 1}) == "P1Y"
assert to_string(%Duration{months: 1}) == "P1M"
assert to_string(%Duration{years: 1, seconds: 12}) == "P1YT12S"
assert to_string(%Duration{years: 1, months: 2, days: 3}) == "P1Y2M3D"
assert to_string(%Duration{years: 1, months: 2, days: 3, hours: 4, minutes: 5, seconds: 6}) ==
"P1Y2M3DT4H5M6S"
assert to_string(%Duration{minutes: 1}) == "PT1M"
assert to_string(%Duration{hours: 1, minutes: 2, seconds: 3}) == "PT1H2M3S"
assert to_string(%Duration{hours: 0, minutes: 2, seconds: 3}) == "PT2M3S"
assert to_string(%Duration{hours: 0, minutes: 0, seconds: 3}) == "PT3S"
end
test "errors" do
assert {:error, _} = Duration.parse("")
assert {:error, _} = Duration.parse("P12341331T246060")
assert {:error, _} = Duration.parse("P12341232T246060")
assert {:error, _} = Duration.parse("P12341231T256060")
assert {:error, _} = Duration.parse("P12341231T246160")
assert {:error, _} = Duration.parse("P12341231T246061")
end
test "Timex.integration" do
{:ok, duration} = Duration.new("PT1S")
assert {:ok, [seconds: 1]} = Duration.to_timex_options(duration, :forward)
assert {:ok, [seconds: -1]} = Duration.to_timex_options(duration, :backward)
end
end
| 39.76 | 98 | 0.635312 |
f7dfc23a3a0bc4e57aa1c63a8bfdbc4242c3a03a | 4,563 | ex | Elixir | lib/livebook/runtime/mix_standalone.ex | mcrumm/livebook | b1ceedc2205e64348212fbf7edce568cdb084e97 | [
"Apache-2.0"
] | 1 | 2021-05-21T22:14:23.000Z | 2021-05-21T22:14:23.000Z | lib/livebook/runtime/mix_standalone.ex | mcrumm/livebook | b1ceedc2205e64348212fbf7edce568cdb084e97 | [
"Apache-2.0"
] | null | null | null | lib/livebook/runtime/mix_standalone.ex | mcrumm/livebook | b1ceedc2205e64348212fbf7edce568cdb084e97 | [
"Apache-2.0"
] | null | null | null | defmodule Livebook.Runtime.MixStandalone do
defstruct [:node, :primary_pid, :project_path]
# A runtime backed by a standalone Elixir node managed by Livebook.
#
# This runtime is similar to `Livebook.Runtime.ElixirStandalone`,
# but the node is started in the context of a Mix project.
import Livebook.Runtime.StandaloneInit
alias Livebook.Utils
alias Livebook.Utils.Emitter
@type t :: %__MODULE__{
node: node(),
primary_pid: pid(),
project_path: String.t()
}
@doc """
Starts a new Elixir node (i.e. a system process) and initializes
it with Livebook-specific modules and processes.
The node is started together with a Mix environment appropriate
for the given `project_path`. The setup may involve
long-running steps (like fetching dependencies, compiling the project),
so the initialization is asynchronous. This function spawns and links
a process responsible for initialization, which then uses `emitter`
to emit the following notifications:
* `{:output, string}` - arbitrary output/info sent as the initialization proceeds
* `{:ok, runtime}` - a final message indicating successful initialization
* `{:error, message}` - a final message indicating failure
If no process calls `Runtime.connect/1` for a period of time,
the node automatically terminates. Whoever connects, becomes the owner
and as soon as it terminates, the node terminates as well.
The node may also be terminated manually by using `Runtime.disconnect/1`.
Note: to start the node it is required that both `elixir` and `mix` are
recognised executables within the system.
"""
@spec init_async(String.t(), Emitter.t()) :: :ok
def init_async(project_path, emitter) do
output_emitter = Emitter.mapper(emitter, fn output -> {:output, output} end)
spawn_link(fn ->
parent_node = node()
child_node = child_node_name(parent_node)
Utils.temporarily_register(self(), child_node, fn ->
argv = [parent_node]
with {:ok, elixir_path} <- find_elixir_executable(),
:ok <- run_mix_task("deps.get", project_path, output_emitter),
:ok <- run_mix_task("compile", project_path, output_emitter),
eval = child_node_eval_string(),
port = start_elixir_mix_node(elixir_path, child_node, eval, argv, project_path),
{:ok, primary_pid} <- parent_init_sequence(child_node, port, output_emitter) do
runtime = %__MODULE__{
node: child_node,
primary_pid: primary_pid,
project_path: project_path
}
Emitter.emit(emitter, {:ok, runtime})
else
{:error, error} ->
Emitter.emit(emitter, {:error, error})
end
end)
end)
:ok
end
defp run_mix_task(task, project_path, output_emitter) do
Emitter.emit(output_emitter, "Running mix #{task}...\n")
case System.cmd("mix", [task],
cd: project_path,
stderr_to_stdout: true,
into: output_emitter
) do
{_callback, 0} -> :ok
{_callback, _status} -> {:error, "running mix #{task} failed, see output for more details"}
end
end
defp start_elixir_mix_node(elixir_path, node_name, eval, argv, project_path) do
# Here we create a port to start the system process in a non-blocking way.
Port.open({:spawn_executable, elixir_path}, [
:binary,
:stderr_to_stdout,
:hide,
cd: project_path,
args:
elixir_flags(node_name) ++
["-S", "mix", "run", "--eval", eval, "--" | Enum.map(argv, &to_string/1)]
])
end
end
defimpl Livebook.Runtime, for: Livebook.Runtime.MixStandalone do
alias Livebook.Runtime.ErlDist
def connect(runtime) do
ErlDist.Manager.set_owner(runtime.node, self())
Process.monitor({ErlDist.Manager, runtime.node})
end
def disconnect(runtime) do
ErlDist.Manager.stop(runtime.node)
end
def evaluate_code(
runtime,
code,
container_ref,
evaluation_ref,
prev_evaluation_ref \\ :initial,
opts \\ []
) do
ErlDist.Manager.evaluate_code(
runtime.node,
code,
container_ref,
evaluation_ref,
prev_evaluation_ref,
opts
)
end
def forget_evaluation(runtime, container_ref, evaluation_ref) do
ErlDist.Manager.forget_evaluation(runtime.node, container_ref, evaluation_ref)
end
def drop_container(runtime, container_ref) do
ErlDist.Manager.drop_container(runtime.node, container_ref)
end
end
| 32.133803 | 97 | 0.669516 |
f7dfe8042bcb40d6e91dfec40ab64f362c5bc01a | 1,670 | ex | Elixir | web/web.ex | mntns/artus | 958380f42612ec0bc9d059037cf7b59dfbe1cfa9 | [
"MIT"
] | null | null | null | web/web.ex | mntns/artus | 958380f42612ec0bc9d059037cf7b59dfbe1cfa9 | [
"MIT"
] | null | null | null | web/web.ex | mntns/artus | 958380f42612ec0bc9d059037cf7b59dfbe1cfa9 | [
"MIT"
] | null | null | null | defmodule Artus.Web do
@moduledoc """
A module that keeps using definitions for controllers,
views and so on.
This can be used in your application as:
use Artus.Web, :controller
use Artus.Web, :view
The definitions below will be executed for every view,
controller, etc, so keep them short and clean, focused
on imports, uses and aliases.
Do NOT define functions inside the quoted expressions
below.
"""
def model do
quote do
use Ecto.Schema
import Ecto
import Ecto.Changeset
import Ecto.Query, only: [from: 1, from: 2]
end
end
def controller do
quote do
use Phoenix.Controller
alias Artus.Repo
import Ecto
import Ecto.Query, only: [from: 1, from: 2]
import Artus.Router.Helpers
import Artus.Gettext
end
end
def view do
quote do
use Phoenix.View, root: "web/templates"
# Import convenience functions from controllers
import Phoenix.Controller, only: [get_csrf_token: 0, get_flash: 2, view_module: 1]
# Use all HTML functionality (forms, tags, etc)
use Phoenix.HTML
import Artus.Router.Helpers
import Artus.ErrorHelpers
import Artus.Gettext
end
end
def router do
quote do
use Phoenix.Router
end
end
def channel do
quote do
use Phoenix.Channel
alias Artus.Repo
import Ecto
import Ecto.Query, only: [from: 1, from: 2]
import Artus.Gettext
end
end
@doc """
When used, dispatch to the appropriate controller/view/etc.
"""
defmacro __using__(which) when is_atom(which) do
apply(__MODULE__, which, [])
end
end
| 20.365854 | 88 | 0.655689 |
f7e00ca3d1cdefa5b8d2a21aae3fefe452346521 | 1,527 | ex | Elixir | apps/admin_app/lib/admin_app/application.ex | VeryBigThings/avia | 7ce5d5b244ae0dfddc30c09c17efe27f1718a4c9 | [
"MIT"
] | 1 | 2021-04-08T22:29:19.000Z | 2021-04-08T22:29:19.000Z | apps/admin_app/lib/admin_app/application.ex | VeryBigThings/avia | 7ce5d5b244ae0dfddc30c09c17efe27f1718a4c9 | [
"MIT"
] | null | null | null | apps/admin_app/lib/admin_app/application.ex | VeryBigThings/avia | 7ce5d5b244ae0dfddc30c09c17efe27f1718a4c9 | [
"MIT"
] | null | null | null | defmodule AdminApp.Application do
@moduledoc false
use Application
alias AdminAppWeb.Endpoint
# See https://hexdocs.pm/elixir/Application.html
# for more information on OTP Applications
def start(_type, _args) do
import Supervisor.Spec
# Define workers and child supervisors to be supervised
children = [
# Start the endpoint when the application starts
supervisor(AdminAppWeb.Endpoint, []),
# Start your own worker by calling: AdminApp.Worker.start_link(arg1, arg2, arg3)
# worker(AdminApp.Worker, [arg1, arg2, arg3]),
{Phoenix.PubSub, [name: AdminApp.PubSub, adapter: Phoenix.PubSub.PG2]}
]
# Look for a better way to start HoneyDew workers.
:ok = Honeydew.start_queue(:etsy_import_queue)
:ok = Honeydew.start_workers(:etsy_import_queue, Avia.Etsy.ImportWorker)
:ok = Honeydew.start_queue(:export_data_queue)
:ok = Honeydew.start_workers(:export_data_queue, Avia.ExportDataWorker)
:ok = Honeydew.start_queue(:category_delete_queue)
:ok = Honeydew.start_workers(:category_delete_queue, Avia.CategoryWorker, num: 3)
# See https://hexdocs.pm/elixir/Supervisor.html
# for other strategies and supported options
opts = [strategy: :one_for_one, name: AdminApp.Supervisor]
Supervisor.start_link(children, opts)
end
# Tell Phoenix to update the endpoint configuration
# whenever the application is updated.
def config_change(changed, _new, removed) do
Endpoint.config_change(changed, removed)
:ok
end
end
| 34.704545 | 86 | 0.7315 |
f7e02267881fcf10e11e7101b79e2f0187d7b415 | 518 | ex | Elixir | apps/engine/lib/engine/ethereum/authority/submitter/core.ex | omgnetwork/omg-childchain-v2 | 31cc9cf9e42718fc3b9bd6668f24a627cac80b4f | [
"Apache-2.0"
] | 4 | 2020-11-30T17:38:57.000Z | 2021-01-23T21:29:41.000Z | apps/engine/lib/engine/ethereum/authority/submitter/core.ex | omgnetwork/omg-childchain-v2 | 31cc9cf9e42718fc3b9bd6668f24a627cac80b4f | [
"Apache-2.0"
] | 24 | 2020-11-30T17:32:48.000Z | 2021-02-22T06:25:22.000Z | apps/engine/lib/engine/ethereum/authority/submitter/core.ex | omgnetwork/omg-childchain-v2 | 31cc9cf9e42718fc3b9bd6668f24a627cac80b4f | [
"Apache-2.0"
] | null | null | null | defmodule Engine.Ethereum.Authority.Submitter.Core do
@moduledoc """
Submission + Ethereum logic
"""
@doc """
Plasma contracts give us the next mined plasma block number, but we're insterested in the
current block number.
The last mined block is the difference between the next childblock minus the interval.
"""
@spec mined(non_neg_integer(), pos_integer()) :: non_neg_integer()
def mined(next_child_block, child_block_interval) do
next_child_block - child_block_interval
end
end
| 32.375 | 94 | 0.739382 |
f7e02455a28c4da7b85c19a6afbf7b848f8658ab | 1,464 | ex | Elixir | testData/org/elixir_lang/parser_definition/matched_dot_operator_call_operation/unqualified_no_parentheses_many_arguments_call_parsing_test_case/StringHeredoc.ex | keyno63/intellij-elixir | 4033e319992c53ddd42a683ee7123a97b5e34f02 | [
"Apache-2.0"
] | 1,668 | 2015-01-03T05:54:27.000Z | 2022-03-25T08:01:20.000Z | testData/org/elixir_lang/parser_definition/matched_dot_operator_call_operation/unqualified_no_parentheses_many_arguments_call_parsing_test_case/StringHeredoc.ex | keyno63/intellij-elixir | 4033e319992c53ddd42a683ee7123a97b5e34f02 | [
"Apache-2.0"
] | 2,018 | 2015-01-01T22:43:39.000Z | 2022-03-31T20:13:08.000Z | testData/org/elixir_lang/parser_definition/matched_dot_operator_call_operation/unqualified_no_parentheses_many_arguments_call_parsing_test_case/StringHeredoc.ex | keyno63/intellij-elixir | 4033e319992c53ddd42a683ee7123a97b5e34f02 | [
"Apache-2.0"
] | 145 | 2015-01-15T11:37:16.000Z | 2021-12-22T05:51:02.000Z | """
String
Heredoc
""".and unqualified positional, key: value
"""
String
Heredoc
""".&& unqualified positional, key: value
"""
String
Heredoc
""".|> unqualified positional, key: value
"""
String
Heredoc
""".@ unqualified positional, key: value
"""
String
Heredoc
""".& unqualified positional, key: value
"""
String
Heredoc
""".== unqualified positional, key: value
"""
String
Heredoc
""".+ unqualified positional, key: value
"""
String
Heredoc
""".^^^ unqualified positional, key: value
"""
String
Heredoc
""".<- unqualified positional, key: value
"""
String
Heredoc
""".in unqualified positional, key: value
"""
String
Heredoc
""".= unqualified positional, key: value
"""
String
Heredoc
"""./ unqualified positional, key: value
"""
String
Heredoc
""".* unqualified positional, key: value
"""
String
Heredoc
""".or unqualified positional, key: value
"""
String
Heredoc
""".|| unqualified positional, key: value
"""
String
Heredoc
""".| unqualified positional, key: value
"""
String
Heredoc
""".<= unqualified positional, key: value
"""
String
Heredoc
""".-> unqualified positional, key: value
"""
String
Heredoc
""".<> unqualified positional, key: value
"""
String
Heredoc
""".^ unqualified positional, key: value
"""
String
Heredoc
""".not unqualified positional, key: value
"""
String
Heredoc
""".after unqualified positional, key: value
"""
String
Heredoc
""".do unqualified positional, key: value
"""
String
Heredoc
""".when unqualified positional, key: value
| 15.092784 | 44 | 0.702869 |
f7e05d329a6b59e0ed8ddc25151cc2135bef2585 | 4,600 | ex | Elixir | lib/twirp.ex | daskycodes/twirp-elixir | 51b701111a3b33601980703417388cd099dc7e44 | [
"Apache-2.0"
] | 30 | 2019-11-03T16:30:13.000Z | 2020-06-23T19:38:53.000Z | lib/twirp.ex | daskycodes/twirp-elixir | 51b701111a3b33601980703417388cd099dc7e44 | [
"Apache-2.0"
] | 16 | 2020-03-13T17:56:16.000Z | 2020-06-11T10:40:02.000Z | lib/twirp.ex | daskycodes/twirp-elixir | 51b701111a3b33601980703417388cd099dc7e44 | [
"Apache-2.0"
] | 3 | 2019-12-05T16:43:15.000Z | 2020-05-11T21:34:44.000Z | defmodule Twirp do
@moduledoc """
Twirp provides an elixir implementation of the [twirp rpc framework](https://github.com/twitchtv/twirp)
developed by Twitch. The protocol defines semantics for routing and
serialization of RPCs based on protobufs.
## Example
The canonical Twirp example is a Haberdasher service. Here's the protobuf
description for the service.
```protobuf
syntax = "proto3";
package example;
// Haberdasher service makes hats for clients.
service Haberdasher {
// MakeHat produces a hat of mysterious, randomly-selected color!
rpc MakeHat(Size) returns (Hat);
}
// Size of a Hat, in inches.
message Size {
int32 inches = 1; // must be > 0
}
// A Hat is a piece of headwear made by a Haberdasher.
message Hat {
int32 inches = 1;
string color = 2; // anything but "invisible"
string name = 3; // i.e. "bowler"
}
```
We'll assume for now that this proto file lives in `priv/protos/service.proto`
### Code generation
We can now use `protoc` to generate the files we need. You can run this command
from the root directory of your project.
$ protoc --proto_path=./priv/protos --elixir_out=./lib/example --twirp_elixir_out=./lib/example ./priv/protos/service.proto
After running this command there should be 2 files located in `lib/example`.
The message definitions:
```elixir
defmodule Example.Size do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
inches: integer
}
defstruct [:inches]
field :inches, 1, type: :int32
end
defmodule Example.Hat do
@moduledoc false
use Protobuf, syntax: :proto3
@type t :: %__MODULE__{
inches: integer,
color: String.t(),
name: String.t()
}
defstruct [:inches, :color, :name]
field :inches, 1, type: :int32
field :color, 2, type: :string
field :name, 3, type: :string
end
```
The service and client definition:
```elixir
defmodule Example.HaberdasherService do
@moduledoc false
use Twirp.Service
package "example"
service "Haberdasher"
rpc :MakeHat, Example.Size, Example.Hat, :make_hat
end
defmodule Example.HaberdasherClient do
@moduledoc false
use Twirp.Client, service: Example.HaberdasherService
end
```
### Implementing the server
Now that we've generated the service definition we can implement a "handler"
module that will implement each "method".
```elixir
defmodule Example.HaberdasherHandler do
@colors ~w|white black brown red blue|
@names ["bowler", "baseball cap", "top hat", "derby"]
def make_hat(_ctx, size) do
if size <= 0 do
Twirp.Error.invalid_argument("I can't make a hat that small!")
else
%Haberdasher.Hat{
inches: size.inches,
color: Enum.random(@colors),
name: Enum.random(@names)
}
end
end
end
```
Separating the service and handler like this may seem a little odd but there are
good reasons to do this. The most important is that it allows the service to be
autogenerated again in the future. The second reason is that it allows us to
easily mock service implementations for testing.
### Running the server
To serve traffic Twirp provides a Plug. We use this plug to attach our service
definition with our handler.
```elixir
defmodule Example.Router do
use Plug.Router
plug Twirp.Plug,
service: Haberdasher.HatMakerService,
handler: Haberdasher.HatMakerHandler
end
```
```elixir
defmodule Example.Application do
use Application
def start(_type, _args) do
children = [
Plug.Cowboy.child_spec(scheme: :http, plug: Example.Router, options: [port: 4040]),
]
opts = [strategy: :one_for_one, name: Example.Supervisor]
Supervisor.start_link(children, opts)
end
end
```
If you start your application your plug will now be available on port 4040.
### Using the client
Client definitions are generated alongside the service definition. This allows
you to generate clients for your services in other applications. You can make
RPC calls like so:
```elixir
defmodule AnotherService.GetHats do
alias Example.HaberdasherClient, as: Client
alias Example.{Size, Hat}
def make_a_hat(inches) do
case Client.make_hat(Size.new(inches: inches)) do
{:ok, %Hat{}=hat} ->
hat
{:error, %Twirp.Error{msg: msg}} ->
Logger.error(msg)
end
end
end
```
"""
end
| 25 | 129 | 0.66587 |
f7e06679888ee7553ecfae22fb531b70cf3fae88 | 3,619 | ex | Elixir | lib/shex/shape_expressions/node_constraint/numeric_facets.ex | rdf-elixir/shex-ex | 84100ab3dfcf3988b2b90289a8e4fbeb9f4d1516 | [
"MIT"
] | 4 | 2020-06-06T15:09:16.000Z | 2021-03-22T19:46:30.000Z | lib/shex/shape_expressions/node_constraint/numeric_facets.ex | rdf-elixir/shex-ex | 84100ab3dfcf3988b2b90289a8e4fbeb9f4d1516 | [
"MIT"
] | null | null | null | lib/shex/shape_expressions/node_constraint/numeric_facets.ex | rdf-elixir/shex-ex | 84100ab3dfcf3988b2b90289a8e4fbeb9f4d1516 | [
"MIT"
] | null | null | null | defmodule ShEx.NodeConstraint.NumericFacets do
@moduledoc false
defstruct ~w[mininclusive minexclusive maxinclusive maxexclusive totaldigits fractiondigits]a
alias RDF.{Literal, XSD}
def new(xs_facets) do
xs_facets_with_literals =
Map.new(xs_facets, fn
{key, value} when key in ~w[mininclusive minexclusive maxinclusive maxexclusive]a ->
{key, value |> XSD.Decimal.new() |> Literal.canonical()}
{key, value} ->
{key, value}
end)
numeric_facets = struct(__MODULE__, xs_facets_with_literals)
if %__MODULE__{} != numeric_facets do
numeric_facets
end
end
# TODO: instead of checking on every application to a node which constraints are there and must be applied, this could be compiled into minimal constraint checker
def satisfies(nil, _), do: :ok
def satisfies(numeric_facets, %Literal{} = node) do
with true <- XSD.Numeric.datatype?(node) and Literal.valid?(node),
true <- satisfies_numeric_mininclusive(numeric_facets.mininclusive, node),
true <- satisfies_numeric_minexclusive(numeric_facets.minexclusive, node),
true <- satisfies_numeric_maxinclusive(numeric_facets.maxinclusive, node),
true <- satisfies_numeric_maxexclusive(numeric_facets.maxexclusive, node),
true <- satisfies_numeric_totaldigits(numeric_facets.totaldigits, node),
true <- satisfies_numeric_fractiondigits(numeric_facets.fractiondigits, node) do
:ok
else
false ->
%ShEx.Violation.NumericFacetConstraint{
facet_type: :invalid_numeric,
node: node
}
{:violates, type, value} ->
%ShEx.Violation.NumericFacetConstraint{
facet_type: type,
facet_value: value,
node: node
}
end
end
def satisfies(_, node) do
%ShEx.Violation.NumericFacetConstraint{
facet_type: :invalid_numeric,
node: node
}
end
defp satisfies_numeric_mininclusive(nil, _), do: true
defp satisfies_numeric_mininclusive(mininclusive, literal) do
RDF.Literal.compare(literal, mininclusive) in [:gt, :eq] ||
{:violates, :mininclusive, mininclusive}
end
defp satisfies_numeric_minexclusive(nil, _), do: true
defp satisfies_numeric_minexclusive(minexclusive, literal) do
RDF.Literal.compare(literal, minexclusive) == :gt ||
{:violates, :minexclusive, minexclusive}
end
defp satisfies_numeric_maxinclusive(nil, _), do: true
defp satisfies_numeric_maxinclusive(maxinclusive, literal) do
RDF.Literal.compare(literal, maxinclusive) in [:lt, :eq] ||
{:violates, :maxinclusive, maxinclusive}
end
defp satisfies_numeric_maxexclusive(nil, _), do: true
defp satisfies_numeric_maxexclusive(maxexclusive, literal) do
RDF.Literal.compare(literal, maxexclusive) == :lt ||
{:violates, :maxexclusive, maxexclusive}
end
defp satisfies_numeric_totaldigits(nil, _), do: true
defp satisfies_numeric_totaldigits(totaldigits, literal) do
(decimal?(literal) && XSD.Decimal.digit_count(literal) <= totaldigits) ||
{:violates, :totaldigits, totaldigits}
end
defp satisfies_numeric_fractiondigits(nil, _), do: true
defp satisfies_numeric_fractiondigits(fractiondigits, literal) do
(decimal?(literal) && XSD.Decimal.fraction_digit_count(literal) <= fractiondigits) ||
{:violates, :fractiondigits, fractiondigits}
end
defp decimal?(%Literal{} = literal) do
# We also have to check for XSD.Integer since RDF.ex implements it as a primitive
XSD.Integer.datatype?(literal) or XSD.Decimal.datatype?(literal)
end
end
| 33.82243 | 164 | 0.709865 |
f7e085c660993b30e47f7cdee7d58cd987d5aec7 | 1,357 | ex | Elixir | lib/codewar/application.ex | hanam1ni/codewar-web | 0d7c46ac32d85b1d76c604226e0f3d6f2b76b0ad | [
"MIT"
] | null | null | null | lib/codewar/application.ex | hanam1ni/codewar-web | 0d7c46ac32d85b1d76c604226e0f3d6f2b76b0ad | [
"MIT"
] | null | null | null | lib/codewar/application.ex | hanam1ni/codewar-web | 0d7c46ac32d85b1d76c604226e0f3d6f2b76b0ad | [
"MIT"
] | null | null | null | defmodule Codewar.Application do
# See https://hexdocs.pm/elixir/Application.html
# for more information on OTP Applications
@moduledoc false
use Application
def start(_type, _args) do
topologies = [
codewar: [
strategy: Cluster.Strategy.DNSPoll,
config: [
polling_interval: 1000,
query: "codewar-web-staging.codewar-web-staging.local",
node_basename: "codewar-web-staging"
]
]
]
children = [
# Start the Ecto repository
Codewar.Repo,
# Start the Telemetry supervisor
CodewarWeb.Telemetry,
# Start the PubSub system
{Phoenix.PubSub, name: Codewar.PubSub},
# Start the Endpoint (http/https)
CodewarWeb.Endpoint,
{Cluster.Supervisor, [topologies, [name: Codewar.ClusterSupervisor]]}
# Start a worker by calling: Codewar.Worker.start_link(arg)
# {Codewar.Worker, arg}
]
# See https://hexdocs.pm/elixir/Supervisor.html
# for other strategies and supported options
opts = [strategy: :one_for_one, name: Codewar.Supervisor]
Supervisor.start_link(children, opts)
end
# Tell Phoenix to update the endpoint configuration
# whenever the application is updated.
def config_change(changed, _new, removed) do
CodewarWeb.Endpoint.config_change(changed, removed)
:ok
end
end
| 28.87234 | 75 | 0.671334 |
f7e0a8be68d7d325d159b8e4f7af3e9632798bb4 | 767 | ex | Elixir | lib/afk/keycode.ex | doughsay/afk | 9b7f55c98aa8c52dee3f134cef112c1361fe75f4 | [
"MIT"
] | 4 | 2019-12-10T21:27:06.000Z | 2020-01-26T03:07:43.000Z | lib/afk/keycode.ex | doughsay/afk | 9b7f55c98aa8c52dee3f134cef112c1361fe75f4 | [
"MIT"
] | 46 | 2019-12-13T05:46:08.000Z | 2020-10-29T13:07:40.000Z | lib/afk/keycode.ex | nerves-keyboard/afk | 9b7f55c98aa8c52dee3f134cef112c1361fe75f4 | [
"MIT"
] | 1 | 2020-01-02T13:35:03.000Z | 2020-01-02T13:35:03.000Z | defmodule AFK.Keycode do
@moduledoc """
A keycode represents a key that when pressed affects the keyboard state in
some way.
The currently supported keycode types are:
* `AFK.Keycode.Key` - A basic keyboard key
* `AFK.Keycode.KeyLock` - A key that allows locking other keys
* `AFK.Keycode.Layer` - A key that can activate other layers
* `AFK.Keycode.Modifier` - A basic keyboard modifier
* `AFK.Keycode.None` - A keycode that does nothing
* `AFK.Keycode.Transparent` - A key that is transparent to its layer
"""
alias __MODULE__.{Key, KeyLock, Layer, MFA, Modifier, None, Transparent}
@type t :: Key.t() | KeyLock.t() | Layer.t() | MFA.t() | Modifier.t() | None.t() | Transparent.t()
@type with_scancode :: Key.t() | Modifier.t()
end
| 34.863636 | 100 | 0.688396 |
f7e0dbf98075df18d15c37fe10e93507f4bc3ae6 | 2,869 | ex | Elixir | lib/macchinista/accounts.ex | themaxhero/Machinista | 2e0114dbbc69fc8187b3ed080de694ac4f558ae8 | [
"BSD-2-Clause"
] | 1 | 2019-12-16T10:22:07.000Z | 2019-12-16T10:22:07.000Z | lib/macchinista/accounts.ex | themaxhero/Machinista | 2e0114dbbc69fc8187b3ed080de694ac4f558ae8 | [
"BSD-2-Clause"
] | 2 | 2021-03-10T06:56:47.000Z | 2021-05-11T02:41:32.000Z | lib/macchinista/accounts.ex | themaxhero/Macchinista | 2e0114dbbc69fc8187b3ed080de694ac4f558ae8 | [
"BSD-2-Clause"
] | null | null | null | defmodule Macchinista.Accounts do
@moduledoc """
The Accounts context.
"""
import Ecto.Query, warn: false
alias Macchinista.Repo
alias Macchinista.Accounts.{User, Session}
alias Session.Query, as: SessionQuery
@token_secret Application.get_env(:macchinista, :token_secret)
@doc """
Returns the list of users.
## Examples
iex> list_users()
[%User{}, ...]
"""
def list_users do
Repo.all(User)
end
@doc """
Gets a single user.
Raises `Ecto.NoResultsError` if the User does not exist.
## Examples
iex> get_user!(123)
%User{}
iex> get_user!(456)
** (Ecto.NoResultsError)
"""
def get_user(id), do: Repo.get(User, id)
def get_user!(id), do: Repo.get!(User, id)
def get_user_by_email(email), do: Repo.get_by(User, email: email)
@doc """
Creates a user.
## Examples
iex> create_user(%{field: value})
{:ok, %User{}}
iex> create_user(%{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def create_user(attrs \\ %{}) do
Repo.transaction(fn ->
result =
attrs
|> User.create_changeset()
|> Repo.insert()
case result do
{:ok, %User{} = user} ->
# create_log(user, :user, :insert, :success, user)
user
{:error, _} ->
Repo.rollback(:internal)
end
end)
end
@doc """
Updates a user.
## Examples
iex> update_user(user, %{field: new_value})
{:ok, %User{}}
iex> update_user(user, %{field: bad_value})
{:error, %Ecto.Changeset{}}
"""
def update_user(%User{} = user, attrs) do
user
|> User.update_changeset(attrs)
|> Repo.update()
end
@doc """
Deletes a User.
## Examples
iex> delete_user(user)
{:ok, %User{}}
iex> delete_user(user)
{:error, %Ecto.Changeset{}}
"""
def delete_user(%User{} = user) do
Repo.delete(user)
end
def login(%{email: email, password: password}) do
with user <- get_user_by_email(email),
true <- Bcrypt.verify_pass(password, user.password_hash) do
IO.puts(@token_secret)
{:ok, Phoenix.Token.sign(@token_secret, "user", user.id)}
else
{:error, _} = response -> response
false -> {:error, :invalid_credentials}
_ -> {:error, :unknown_error}
end
end
@spec validate_user(User.t() | nil) ::
{:ok, User.t()}
| {:error, :invalid_authorization_token}
defp validate_user(%User{} = user), do: {:ok, user}
defp validate_user(_), do: {:error, :invalid_authorization_token}
@spec authorize_user(Session.token()) ::
{:ok, User.t()}
| {:error, :invalid_authorization_token}
def authorize_user(token) do
token
|> SessionQuery.by_token()
|> Repo.one()
|> Session.get_user_id()
|> get_user()
|> validate_user()
end
end
| 20.640288 | 68 | 0.584524 |
f7e0e259a18811070fdcbc5773d1653e2f6a67f9 | 3,365 | exs | Elixir | test/order_test.exs | aforward/acme_ex | f199943be6a75e0e4672a64dc4820dfc15abf330 | [
"MIT"
] | 1 | 2020-02-01T19:30:44.000Z | 2020-02-01T19:30:44.000Z | test/order_test.exs | aforward/acme_ex | f199943be6a75e0e4672a64dc4820dfc15abf330 | [
"MIT"
] | null | null | null | test/order_test.exs | aforward/acme_ex | f199943be6a75e0e4672a64dc4820dfc15abf330 | [
"MIT"
] | null | null | null | defmodule AcmeEx.OrderTest do
use ExUnit.Case, async: false
alias AcmeEx.{Account, Order, Nonce}
@config %{site: "http://localhost:9999"}
test "new" do
account = Account.new("abc124")
order_id = Nonce.next()
expected = %{
id: order_id,
status: :pending,
cert: nil,
domains: ["d1", "d2"],
token: "xxx123"
}
assert expected == Order.new(["d1", "d2"], account, "xxx123")
assert {:ok, expected} == Order.fetch("abc124", order_id)
assert {:ok, expected} == Order.fetch(account.id, order_id)
assert {expected, %{id: account.id}} == Order.decode_path("#{account.id}/#{order_id}")
end
test "new generate token" do
account = Account.new("abc126")
id = Nonce.next()
actual = Order.new(["d1", "d2"], account)
expected = %{id: id, status: :pending, cert: nil, domains: ["d1", "d2"], token: actual.token}
assert expected == actual
assert {:ok, expected} == Order.fetch("abc126", id)
end
test "update" do
account = Account.new("abc125")
id = Nonce.next()
_ = Order.new(["d1", "d2"], account, "xxx123")
new_order = %{id: id, status: :pending, cert: nil, domains: ["d1", "d3"], token: "xxx124"}
assert Order.update("abc125", new_order) == {:ok, new_order}
assert {:ok, new_order} == Order.fetch("abc125", id)
end
test "domains" do
assert ["foo.bar", "www.foo.bar", "blog.foo.bar"] ==
AcmeEx.Order.domains(%{
payload: %{
"identifiers" => [
%{"type" => "dns", "value" => "foo.bar"},
%{"type" => "dns", "value" => "www.foo.bar"},
%{"type" => "dns", "value" => "blog.foo.bar"}
],
"resource" => "new-order",
"status" => "pending"
}
})
end
test "identifiers" do
assert [%{type: "dns", value: "d1"}, %{type: "dns", value: "d2"}] ==
Order.identifiers(%{domains: ["d1", "d2"]})
end
test "order_path" do
assert "10/11" == Order.encode_path(%{id: 11}, %{id: 10})
end
test "authorization" do
assert "http://localhost:9999/authorizations/10/11" ==
Order.authorization(@config, %{id: 11}, %{id: 10})
end
test "location" do
assert "http://localhost:9999/order/10/11" == Order.location(@config, %{id: 11}, %{id: 10})
end
test "finalize" do
assert "http://localhost:9999/finalize/10/11" == Order.finalize(@config, %{id: 11}, %{id: 10})
end
test "expires" do
assert "2018-09-20T11:11:13Z" == Order.expires(3601, ~N[2018-09-20 10:11:12])
assert "2018-09-20T11:11:12Z" == Order.expires(nil, ~N[2018-09-20 10:11:12])
assert !is_nil(Order.expires())
end
test "to_challenge" do
assert %{
type: "http-01",
status: "pending",
url: "http://localhost:9999/challenge/http/10/11",
token: "def456"
} ==
Order.to_challenge(@config, %{id: 11, status: "pending", token: "def456"}, %{id: 10})
end
test "to_summary" do
assert %{
status: "pending",
certificate: "http://localhost:9999/cert/10/11",
identifier: %{type: "dns", value: "localhost"}
} ==
Order.to_summary(@config, %{id: 11, status: "pending", token: "def456"}, %{id: 10})
end
end
| 30.315315 | 98 | 0.53997 |
f7e0e4330f157cba8b04eda867332f641a1f9c05 | 1,955 | ex | Elixir | clients/dlp/lib/google_api/dlp/v2/model/google_privacy_dlp_v2_list_job_triggers_response.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | 1 | 2018-12-03T23:43:10.000Z | 2018-12-03T23:43:10.000Z | clients/dlp/lib/google_api/dlp/v2/model/google_privacy_dlp_v2_list_job_triggers_response.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/dlp/lib/google_api/dlp/v2/model/google_privacy_dlp_v2_list_job_triggers_response.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.DLP.V2.Model.GooglePrivacyDlpV2ListJobTriggersResponse do
@moduledoc """
Response message for ListJobTriggers.
## Attributes
* `jobTriggers` (*type:* `list(GoogleApi.DLP.V2.Model.GooglePrivacyDlpV2JobTrigger.t)`, *default:* `nil`) - List of triggeredJobs, up to page_size in ListJobTriggersRequest.
* `nextPageToken` (*type:* `String.t`, *default:* `nil`) - If the next page is available then the next page token to be used
in following ListJobTriggers request.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:jobTriggers => list(GoogleApi.DLP.V2.Model.GooglePrivacyDlpV2JobTrigger.t()),
:nextPageToken => String.t()
}
field(:jobTriggers, as: GoogleApi.DLP.V2.Model.GooglePrivacyDlpV2JobTrigger, type: :list)
field(:nextPageToken)
end
defimpl Poison.Decoder, for: GoogleApi.DLP.V2.Model.GooglePrivacyDlpV2ListJobTriggersResponse do
def decode(value, options) do
GoogleApi.DLP.V2.Model.GooglePrivacyDlpV2ListJobTriggersResponse.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.DLP.V2.Model.GooglePrivacyDlpV2ListJobTriggersResponse do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 38.333333 | 177 | 0.753964 |
f7e1057265b3deb25857b64f0605fdae2cf280cc | 4,575 | ex | Elixir | farmbot_firmware/lib/farmbot_firmware/request.ex | adamswsk/farmbot_os | d177d3b74888c1e7bcbf8f8595818708ee97f73b | [
"MIT"
] | null | null | null | farmbot_firmware/lib/farmbot_firmware/request.ex | adamswsk/farmbot_os | d177d3b74888c1e7bcbf8f8595818708ee97f73b | [
"MIT"
] | null | null | null | farmbot_firmware/lib/farmbot_firmware/request.ex | adamswsk/farmbot_os | d177d3b74888c1e7bcbf8f8595818708ee97f73b | [
"MIT"
] | null | null | null | defmodule FarmbotFirmware.Request do
@moduledoc false
# sister module to FarmbotFirmware.Command
# see docs for FarmbotFirmware.request/1
alias FarmbotFirmware
alias FarmbotFirmware.GCODE
@spec request(GenServer.server(), GCODE.t()) ::
{:ok, GCODE.t()}
| {:error,
:invalid_command | :firmware_error | FarmbotFirmware.status()}
@ok [
:parameter_read,
:status_read,
:pin_read,
:end_stops_read,
:position_read,
:software_version_read
]
def request(firmware_server \\ FarmbotFirmware, code)
def request(firmware_server, {_tag, {kind, _}} = code) do
if kind not in @ok do
raise ArgumentError, "#{kind} is not a valid request."
end
case GenServer.call(firmware_server, code, :infinity) do
{:ok, tag} -> wait_for_request_result(tag, code)
{:error, status} -> {:error, status}
end
end
def request(firmware_server, {_, _} = code) do
request(firmware_server, {to_string(:rand.uniform(100)), code})
end
def request_timeout(tag, code, result \\ nil) do
if result do
{:ok, {tag, result}}
else
{:error, "timeout waiting for request to complete: #{inspect(code)}"}
end
end
# This is a bit weird but let me explain:
# if this function `receive`s
# * report_error
# * report_invalid
# * report_emergency_lock
# it needs to return an error.
#
# If this function `receive`s
# * report_success
# when no valid data has been collected from `wait_for_request_result_process`
# it needs to return an error.
#
# If this function `receive`s
# * report_success
# when valid data has been collected from `wait_for_request_result_process`
# it will return that data.
# If this function returns no data for 5 seconds, it needs to error.
def wait_for_request_result(tag, code, result \\ nil) do
receive do
{tag, {:report_begin, []}} ->
wait_for_request_result(tag, code, result)
{tag, {:report_busy, []}} ->
wait_for_request_result(tag, code, result)
{tag, {:report_success, []}} ->
if result,
do: {:ok, {tag, result}},
else: wait_for_request_result(tag, code, result)
{_, {:report_error, error_code}} ->
if error_code,
do: {:error, error_code},
else: {:error, :firmware_error}
{_, {:report_invalid, []}} ->
{:error, :invalid_command}
{_, {:report_emergency_lock, []}} ->
{:error, :emergency_lock}
{:error, reason} ->
{:error, reason}
{tag, report} ->
wait_for_request_result_process(report, tag, code, result)
after
10_000 -> request_timeout(tag, code, result)
end
end
# {:parameter_read, [param]} => {:report_parameter_value, [{param, val}]}
defp wait_for_request_result_process(
{:report_parameter_value, _} = report,
tag,
{_, {:parameter_read, _}} = code,
_
) do
wait_for_request_result(tag, code, report)
end
# {:status_read, [status]} => {:report_status_value, [{status, value}]}
defp wait_for_request_result_process(
{:report_status_value, _} = report,
tag,
{_, {:status_read, _}} = code,
_
) do
wait_for_request_result(tag, code, report)
end
# {:pin_read, [pin]} => {:report_pin_value, [{pin, value}]}
defp wait_for_request_result_process(
{:report_pin_value, _} = report,
tag,
{_, {:pin_read, _}} = code,
_
) do
wait_for_request_result(tag, code, report)
end
# {:end_stops_read, []} => {:position_end_stops, end_stops}
defp wait_for_request_result_process(
{:report_end_stops, _} = report,
tag,
{_, {:end_stops_read, []}} = code,
_
) do
wait_for_request_result(tag, code, report)
end
# {:position_read, []} => {:position_report, [x: x, y: y, z: z]}
defp wait_for_request_result_process(
{:report_position, _} = report,
tag,
{_, {:position_read, []}} = code,
_
) do
wait_for_request_result(tag, code, report)
end
# {:software_version_read, []} => {:report_software_version, [version]}
defp wait_for_request_result_process(
{:report_software_version, _} = report,
tag,
{_, {:software_version_read, _}} = code,
_
) do
wait_for_request_result(tag, code, report)
end
defp wait_for_request_result_process(_report, tag, code, result) do
wait_for_request_result(tag, code, result)
end
end
| 28.240741 | 80 | 0.616831 |
f7e130d4ca576e5138e2afa2cf005c978932efb3 | 2,197 | exs | Elixir | config/prod.exs | thomasvolk/lighthouse | 566f4029a1ef1e5863e45b7f9b1b7afa914980ec | [
"Apache-2.0"
] | null | null | null | config/prod.exs | thomasvolk/lighthouse | 566f4029a1ef1e5863e45b7f9b1b7afa914980ec | [
"Apache-2.0"
] | null | null | null | config/prod.exs | thomasvolk/lighthouse | 566f4029a1ef1e5863e45b7f9b1b7afa914980ec | [
"Apache-2.0"
] | null | null | null | use Mix.Config
# For production, we often load configuration from external
# sources, such as your system environment. For this reason,
# you won't find the :http configuration below, but set inside
# LighthouseWeb.Endpoint.init/2 when load_from_system_env is
# true. Any dynamic configuration should be done there.
#
# Don't forget to configure the url host to something meaningful,
# Phoenix uses this information when generating URLs.
#
# Finally, we also include the path to a cache manifest
# containing the digested version of static files. This
# manifest is generated by the mix phx.digest task
# which you typically run after static files are built.
config :lighthouse, LighthouseWeb.Endpoint,
http: [port: 9996],
load_from_system_env: false,
root: "."
#cache_static_manifest: "priv/static/cache_manifest.json"
# Do not print debug messages in production
config :logger, level: :info
# ## SSL Support
#
# To get SSL working, you will need to add the `https` key
# to the previous section and set your `:url` port to 443:
#
# config :lighthouse, LighthouseWeb.Endpoint,
# ...
# url: [host: "example.com", port: 443],
# https: [:inet6,
# port: 443,
# keyfile: System.get_env("SOME_APP_SSL_KEY_PATH"),
# certfile: System.get_env("SOME_APP_SSL_CERT_PATH")]
#
# Where those two env variables return an absolute path to
# the key and cert in disk or a relative path inside priv,
# for example "priv/ssl/server.key".
#
# We also recommend setting `force_ssl`, ensuring no data is
# ever sent via http, always redirecting to https:
#
# config :lighthouse, LighthouseWeb.Endpoint,
# force_ssl: [hsts: true]
#
# Check `Plug.SSL` for all available options in `force_ssl`.
# ## Using releases
#
# If you are doing OTP releases, you need to instruct Phoenix
# to start the server for all endpoints:
#
# config :phoenix, :serve_endpoints, true
#
# Alternatively, you can configure exactly which server to
# start per endpoint:
#
# config :lighthouse, LighthouseWeb.Endpoint, server: true
#
# Finally import the config/prod.secret.exs
# which should be versioned separately.
#import_config "prod.secret.exs"
| 33.287879 | 67 | 0.723259 |
f7e13a41126a3f643c707748b3f0c5b2de41c1fa | 14,299 | exs | Elixir | test/validation/point_point_a_test.exs | codabrink/topo | f1ca4b7fe337a67285ee4c65a34fb521b119342c | [
"MIT"
] | 110 | 2016-05-05T21:09:19.000Z | 2022-03-08T05:22:16.000Z | test/validation/point_point_a_test.exs | codabrink/topo | f1ca4b7fe337a67285ee4c65a34fb521b119342c | [
"MIT"
] | 15 | 2016-12-01T00:32:11.000Z | 2022-01-18T13:56:37.000Z | test/validation/point_point_a_test.exs | codabrink/topo | f1ca4b7fe337a67285ee4c65a34fb521b119342c | [
"MIT"
] | 24 | 2016-09-19T20:06:50.000Z | 2021-06-16T06:41:10.000Z | defmodule Intersect.Validation.PointPointATest do
use ExUnit.Case
@tag :validation
test "01-001 - P/P: same point" do
a = "POINT (20 20)" |> Geo.WKT.decode!()
b = "POINT (20 20)" |> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == true
assert Topo.within?(a, b) == true
assert Topo.equals?(a, b) == true
assert Topo.equals?(b, a) == true
end
@tag :validation
test "01-001 - P/P: same point (float)" do
a = "POINT(20.0 20.0)" |> Geo.WKT.decode!()
b = "POINT(20.0 20.0)" |> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == true
assert Topo.within?(a, b) == true
assert Topo.equals?(a, b) == true
assert Topo.equals?(b, a) == true
end
@tag :validation
test "01-002 - P/P: different point" do
a = "POINT (20 20)" |> Geo.WKT.decode!()
b = "POINT (40 60)" |> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == false
assert Topo.intersects?(b, a) == false
assert Topo.disjoint?(a, b) == true
assert Topo.disjoint?(b, a) == true
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "01-002 - P/P: different point (float)" do
a = "POINT(20.0 20.0)" |> Geo.WKT.decode!()
b = "POINT(40.0 60.0)" |> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == false
assert Topo.intersects?(b, a) == false
assert Topo.disjoint?(a, b) == true
assert Topo.disjoint?(b, a) == true
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "01-003 - P/mP: different points" do
a = "POINT (40 40)" |> Geo.WKT.decode!()
b = "MULTIPOINT (20 20, 80 80, 20 120)" |> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == false
assert Topo.intersects?(b, a) == false
assert Topo.disjoint?(a, b) == true
assert Topo.disjoint?(b, a) == true
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "01-003 - P/mP: different points (float)" do
a = "POINT(40.0 40.0)" |> Geo.WKT.decode!()
b = "MULTIPOINT(20.0 20.0,80.0 80.0,20.0 120.0)" |> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == false
assert Topo.intersects?(b, a) == false
assert Topo.disjoint?(a, b) == true
assert Topo.disjoint?(b, a) == true
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "01-004 - P/mP: point A within one of B points" do
a = "POINT (20 20)" |> Geo.WKT.decode!()
b = "MULTIPOINT (20 20, 80 80, 20 120)" |> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == true
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "01-004 - P/mP: point A within one of B points (float)" do
a = "POINT(20.0 20.0)" |> Geo.WKT.decode!()
b = "MULTIPOINT(20.0 20.0,80.0 80.0,20.0 120.0)" |> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == true
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "01-005 - mP/mP-1-1: same points" do
a = "MULTIPOINT (40 40, 80 60, 120 100)" |> Geo.WKT.decode!()
b = "MULTIPOINT (40 40, 80 60, 120 100)" |> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == true
assert Topo.within?(a, b) == true
assert Topo.equals?(a, b) == true
assert Topo.equals?(b, a) == true
end
@tag :validation
test "01-005 - mP/mP-1-1: same points (float)" do
a = "MULTIPOINT(40.0 40.0,80.0 60.0,120.0 100.0)" |> Geo.WKT.decode!()
b = "MULTIPOINT(40.0 40.0,80.0 60.0,120.0 100.0)" |> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == true
assert Topo.within?(a, b) == true
assert Topo.equals?(a, b) == true
assert Topo.equals?(b, a) == true
end
@tag :validation
test "01-006 - mP/mP-1-2: same but different sequence of points" do
a = "MULTIPOINT (40 40, 80 60, 120 100)" |> Geo.WKT.decode!()
b = "MULTIPOINT (40 40, 120 100, 80 60)" |> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == true
assert Topo.within?(a, b) == true
assert Topo.equals?(a, b) == true
assert Topo.equals?(b, a) == true
end
@tag :validation
test "01-006 - mP/mP-1-2: same but different sequence of points (float)" do
a = "MULTIPOINT(40.0 40.0,80.0 60.0,120.0 100.0)" |> Geo.WKT.decode!()
b = "MULTIPOINT(40.0 40.0,120.0 100.0,80.0 60.0)" |> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == true
assert Topo.within?(a, b) == true
assert Topo.equals?(a, b) == true
assert Topo.equals?(b, a) == true
end
@tag :validation
test "01-007 - mP/mP-2: different points" do
a = "MULTIPOINT (40 40, 60 100, 100 60, 120 120)" |> Geo.WKT.decode!()
b = "MULTIPOINT (20 120, 60 60, 100 100, 140 40)" |> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == false
assert Topo.intersects?(b, a) == false
assert Topo.disjoint?(a, b) == true
assert Topo.disjoint?(b, a) == true
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "01-007 - mP/mP-2: different points (float)" do
a = "MULTIPOINT(40.0 40.0,60.0 100.0,100.0 60.0,120.0 120.0)" |> Geo.WKT.decode!()
b = "MULTIPOINT(20.0 120.0,60.0 60.0,100.0 100.0,140.0 40.0)" |> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == false
assert Topo.intersects?(b, a) == false
assert Topo.disjoint?(a, b) == true
assert Topo.disjoint?(b, a) == true
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "01-008 - mP/mP-5-1: same points" do
a = "MULTIPOINT (20 20, 80 70, 140 120, 200 170)" |> Geo.WKT.decode!()
b = "MULTIPOINT (20 20, 80 70, 140 120, 200 170)" |> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == true
assert Topo.within?(a, b) == true
assert Topo.equals?(a, b) == true
assert Topo.equals?(b, a) == true
end
@tag :validation
test "01-008 - mP/mP-5-1: same points (float)" do
a = "MULTIPOINT(20.0 20.0,80.0 70.0,140.0 120.0,200.0 170.0)" |> Geo.WKT.decode!()
b = "MULTIPOINT(20.0 20.0,80.0 70.0,140.0 120.0,200.0 170.0)" |> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == true
assert Topo.within?(a, b) == true
assert Topo.equals?(a, b) == true
assert Topo.equals?(b, a) == true
end
@tag :validation
test "01-009 - mP/mP-5-2: same points but different sequence" do
a = "MULTIPOINT (20 20, 140 120, 80 70, 200 170)" |> Geo.WKT.decode!()
b = "MULTIPOINT (80 70, 20 20, 200 170, 140 120)" |> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == true
assert Topo.within?(a, b) == true
assert Topo.equals?(a, b) == true
assert Topo.equals?(b, a) == true
end
@tag :validation
test "01-009 - mP/mP-5-2: same points but different sequence (float)" do
a = "MULTIPOINT(20.0 20.0,140.0 120.0,80.0 70.0,200.0 170.0)" |> Geo.WKT.decode!()
b = "MULTIPOINT(80.0 70.0,20.0 20.0,200.0 170.0,140.0 120.0)" |> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == true
assert Topo.within?(a, b) == true
assert Topo.equals?(a, b) == true
assert Topo.equals?(b, a) == true
end
@tag :validation
test "01-010 - mP/mP-5-3: some points same" do
a = "MULTIPOINT (20 20, 80 70, 140 120, 200 170)" |> Geo.WKT.decode!()
b = "MULTIPOINT (80 70, 140 120)" |> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == true
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "01-010 - mP/mP-5-3: some points same (float)" do
a = "MULTIPOINT(20.0 20.0,80.0 70.0,140.0 120.0,200.0 170.0)" |> Geo.WKT.decode!()
b = "MULTIPOINT(80.0 70.0,140.0 120.0)" |> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == true
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "01-011 - mP/mP-5-4: some points same, in a different sequence" do
a = "MULTIPOINT (80 70, 20 20, 200 170, 140 120)" |> Geo.WKT.decode!()
b = "MULTIPOINT (140 120, 80 70)" |> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == true
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "01-011 - mP/mP-5-4: some points same, in a different sequence (float)" do
a = "MULTIPOINT(80.0 70.0,20.0 20.0,200.0 170.0,140.0 120.0)" |> Geo.WKT.decode!()
b = "MULTIPOINT(140.0 120.0,80.0 70.0)" |> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == true
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "01-012 - mP/mP-6-1: some points same, some different" do
a = "MULTIPOINT (80 70, 20 20, 200 170, 140 120)" |> Geo.WKT.decode!()
b = "MULTIPOINT (80 170, 140 120, 200 80)" |> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "01-012 - mP/mP-6-1: some points same, some different (float)" do
a = "MULTIPOINT(80.0 70.0,20.0 20.0,200.0 170.0,140.0 120.0)" |> Geo.WKT.decode!()
b = "MULTIPOINT(80.0 170.0,140.0 120.0,200.0 80.0)" |> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "01-013 - mP/mP-6-2: dim(0){A.4P1.Int = B.4P4.Int}, dim(0){A.4P4.Int = B.4P2.Int}" do
a = "MULTIPOINT (80 70, 20 20, 200 170, 140 120)" |> Geo.WKT.decode!()
b = "MULTIPOINT (80 170, 140 120, 200 80, 80 70)" |> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
@tag :validation
test "01-013 - mP/mP-6-2: dim(0){A.4P1.Int = B.4P4.Int}, dim(0){A.4P4.Int = B.4P2.Int} (float)" do
a = "MULTIPOINT(80.0 70.0,20.0 20.0,200.0 170.0,140.0 120.0)" |> Geo.WKT.decode!()
b = "MULTIPOINT(80.0 170.0,140.0 120.0,200.0 80.0,80.0 70.0)" |> Geo.WKT.decode!()
assert Topo.intersects?(a, b) == true
assert Topo.intersects?(b, a) == true
assert Topo.disjoint?(a, b) == false
assert Topo.disjoint?(b, a) == false
assert Topo.contains?(a, b) == false
assert Topo.within?(a, b) == false
assert Topo.equals?(a, b) == false
assert Topo.equals?(b, a) == false
end
end
| 36.291878 | 100 | 0.598224 |
f7e14c18b69026f8a1acfbc60099de59fe362a08 | 640 | ex | Elixir | lib/mix/tasks/pandoc/update_infos.ex | dmitriid/panpipe | c93a189dd8aa40a3cef4d9fac5830268682df9eb | [
"MIT"
] | 24 | 2019-08-19T02:53:06.000Z | 2022-03-13T14:41:08.000Z | lib/mix/tasks/pandoc/update_infos.ex | dmitriid/panpipe | c93a189dd8aa40a3cef4d9fac5830268682df9eb | [
"MIT"
] | 4 | 2020-04-02T02:05:52.000Z | 2022-03-18T00:56:23.000Z | lib/mix/tasks/pandoc/update_infos.ex | dmitriid/panpipe | c93a189dd8aa40a3cef4d9fac5830268682df9eb | [
"MIT"
] | 2 | 2019-10-28T15:38:47.000Z | 2021-03-24T17:43:27.000Z | defmodule Mix.Tasks.Pandoc.UpdateInfos do
@moduledoc false
use Mix.Task
alias Panpipe.Pandoc
@shortdoc "Updates the information about the supported features of Pandoc."
def run(_) do
Mix.Shell.IO.cmd "pandoc --list-extensions > #{Pandoc.extensions_file()}"
Mix.Shell.IO.cmd "pandoc --list-highlight-languages > #{Pandoc.highlight_languages_file()}"
Mix.Shell.IO.cmd "pandoc --list-highlight-styles > #{Pandoc.highlight_styles_file()}"
Mix.Shell.IO.cmd "pandoc --list-input-formats > #{Pandoc.input_formats_file()}"
Mix.Shell.IO.cmd "pandoc --list-output-formats > #{Pandoc.output_formats_file()}"
end
end
| 37.647059 | 95 | 0.726563 |
f7e170eed2c0fd48e492a7a82fca339e78da9a7d | 1,805 | exs | Elixir | farmbot_firmware/mix.exs | va2ron1/farmbot_os | c80a38058713adc2ad91a4802664bcfe8da9d96c | [
"MIT"
] | null | null | null | farmbot_firmware/mix.exs | va2ron1/farmbot_os | c80a38058713adc2ad91a4802664bcfe8da9d96c | [
"MIT"
] | null | null | null | farmbot_firmware/mix.exs | va2ron1/farmbot_os | c80a38058713adc2ad91a4802664bcfe8da9d96c | [
"MIT"
] | null | null | null | defmodule FarmbotFirmware.MixProject do
use Mix.Project
@version Path.join([__DIR__, "..", "VERSION"])
|> File.read!()
|> String.trim()
@elixir_version Path.join([__DIR__, "..", "ELIXIR_VERSION"])
|> File.read!()
|> String.trim()
defp arduino_commit do
opts = [cd: Path.join("c_src", "farmbot-arduino-firmware")]
System.cmd("git", ~w"rev-parse --verify HEAD", opts)
|> elem(0)
|> String.trim()
end
def project do
[
app: :farmbot_firmware,
version: @version,
elixir: @elixir_version,
elixirc_options: [warnings_as_errors: true, ignore_module_conflict: true],
arduino_commit: arduino_commit(),
start_permanent: Mix.env() == :prod,
deps: deps(),
test_coverage: [tool: ExCoveralls],
preferred_cli_env: [
test: :test,
coveralls: :test,
"coveralls.circle": :test,
"coveralls.detail": :test,
"coveralls.post": :test,
"coveralls.html": :test
],
source_url: "https://github.com/Farmbot/farmbot_os",
homepage_url: "http://farmbot.io",
docs: [
logo: "../farmbot_os/priv/static/farmbot_logo.png",
extras: Path.wildcard("../docs/**/*.md")
]
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:logger]
]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
{:farmbot_telemetry, path: "../farmbot_telemetry", env: Mix.env()},
{:circuits_uart, "~> 1.4.2"},
{:excoveralls, "~> 0.13.3", only: [:test], targets: [:host]},
{:mimic, "~> 1.3.1", only: :test},
{:ex_doc, "~> 0.23.0", only: [:dev], targets: [:host], runtime: false}
]
end
end
| 28.203125 | 80 | 0.569529 |
f7e1b829b987cff39298451077f6ef72daedf385 | 793 | exs | Elixir | test/fixtures/umbrella/apps/api/mix.exs | snyk/mix-parser | 1816912559529b41919f1ccee09dcf79aad3548d | [
"Apache-2.0"
] | 2 | 2021-05-14T04:31:05.000Z | 2022-02-07T19:03:54.000Z | test/fixtures/umbrella/apps/api/mix.exs | snyk/snyk-hex-plugin | 1f4db02fe247c984b694957663e50d43362947b0 | [
"Apache-2.0"
] | 2 | 2021-04-29T08:07:22.000Z | 2022-02-28T22:48:46.000Z | test/fixtures/umbrella/apps/api/mix.exs | snyk/mix-parser | 1816912559529b41919f1ccee09dcf79aad3548d | [
"Apache-2.0"
] | 2 | 2021-06-30T23:41:25.000Z | 2021-10-07T22:06:25.000Z | defmodule Api.Mixfile do
use Mix.Project
def project do
[
app: :api,
version: "0.1.0",
build_path: "../../_build",
config_path: "../../config/config.exs",
deps_path: "../../deps",
lockfile: "../../mix.lock",
elixir: "~> 1.7",
elixirc_paths: elixirc_paths(Mix.env),
compilers: [:phoenix] ++ Mix.compilers,
start_permanent: Mix.env == :prod,
deps: deps()
]
end
def application do
[
mod: {Api.Application, []},
extra_applications: [:logger, :runtime_tools]
]
end
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
defp deps do
[
{:core, in_umbrella: true},
{:cowboy, "~> 1.0"},
{:phoenix, "~> 1.3.4"}
]
end
end
| 20.868421 | 56 | 0.532156 |
f7e20286612f591e6d8447c4d9e10558879f0ea7 | 525 | ex | Elixir | apps/extended_api/lib/extended_api_web/views/command_view.ex | iotaledger/chronicle | 73566e5613268e4b0c5951265ae4760cedb4051f | [
"Apache-2.0"
] | 19 | 2019-09-17T18:14:36.000Z | 2021-12-06T07:29:27.000Z | apps/extended_api/lib/extended_api_web/views/command_view.ex | iotaledger/chronicle | 73566e5613268e4b0c5951265ae4760cedb4051f | [
"Apache-2.0"
] | 5 | 2019-09-30T04:57:14.000Z | 2020-11-10T15:41:03.000Z | apps/extended_api/lib/extended_api_web/views/command_view.ex | iotaledger/chronicle | 73566e5613268e4b0c5951265ae4760cedb4051f | [
"Apache-2.0"
] | 2 | 2019-09-17T19:03:16.000Z | 2021-03-01T01:04:31.000Z | defmodule ExtendedApiWeb.CommandView do
use ExtendedApiWeb, :view
@derive Jason.Encoder
def render("getTrytes.json", %{trytes: trytes}) do
%{trytes: trytes}
end
def render("bundles.json", %{hashes: hashes}) do
%{hashes: hashes}
end
def render("addresses.json", %{hashes: hashes, hints: hints}) do
%{hashes: hashes, hints: hints}
end
def render("approvees.json", %{hashes: hashes}) do
%{hashes: hashes}
end
def render("tags.json", %{hints: hints}) do
%{hints: hints}
end
end
| 19.444444 | 66 | 0.651429 |
f7e22f2ac404eb1c68c8aea5fcf72058ec8abe13 | 663 | ex | Elixir | lib/packages_bot/melpa/client.ex | thiamsantos/melpa_telegram_bot | f71b4c4d6dec3dbfa1f137a0acb39cb02e72162f | [
"Apache-2.0"
] | 4 | 2019-04-12T23:24:20.000Z | 2019-04-17T12:18:12.000Z | lib/packages_bot/melpa/client.ex | thiamsantos/melpa_telegram_bot | f71b4c4d6dec3dbfa1f137a0acb39cb02e72162f | [
"Apache-2.0"
] | 7 | 2019-04-17T12:16:41.000Z | 2019-04-24T23:34:01.000Z | lib/packages_bot/melpa/client.ex | thiamsantos/melpa_telegram_bot | f71b4c4d6dec3dbfa1f137a0acb39cb02e72162f | [
"Apache-2.0"
] | 1 | 2019-04-13T13:37:18.000Z | 2019-04-13T13:37:18.000Z | defmodule PackagesBot.Melpa.Client do
use Tesla
plug Tesla.Middleware.BaseUrl, "https://melpa.org"
plug Tesla.Middleware.Headers, [{"user-agent", "Melpa telegram bot"}]
plug PackagesBot.TeslaLogger, marker: inspect(__MODULE__)
plug Tesla.Middleware.JSON
def archive do
case get("archive.json") do
{:ok, %{body: body, status: 200}} when is_map(body) -> {:ok, body}
_ -> {:error, "Failed to fetch archive!"}
end
end
def download_counts do
case get("download_counts.json") do
{:ok, %{body: body, status: 200}} when is_map(body) -> {:ok, body}
_ -> {:error, "Failed to fetch download counts!"}
end
end
end
| 28.826087 | 72 | 0.656109 |
f7e2454a500292a972ed0384550dc0acf8f74ffb | 751 | ex | Elixir | lib/toy.ex | bconnes/arcgis | cfe9dd53cb58dac30f0d64ee2c4ad3661e866c50 | [
"MIT"
] | null | null | null | lib/toy.ex | bconnes/arcgis | cfe9dd53cb58dac30f0d64ee2c4ad3661e866c50 | [
"MIT"
] | null | null | null | lib/toy.ex | bconnes/arcgis | cfe9dd53cb58dac30f0d64ee2c4ad3661e866c50 | [
"MIT"
] | null | null | null | defmodule Toy do
use GenServer
def store(key, value) do
GenServer.cast(__MODULE__, {:store, key, value})
end
def get(key) do
GenServer.call(__MODULE__, {:get, key})
end
def start_link, do: GenServer.start_link(__MODULE__, :ok, [name: Toy])
def init(_args) do
starting_state = %{
key_value: %{}
}
{:ok, starting_state}
end
def handle_cast({:store, key, value}, state) do
key_value = state.key_value
new_key_value = Map.put(key_value, key, value)
new_state = Map.put(state, :key_value, new_key_value)
{:noreply, new_state}
end
def handle_call({:get, key}, _from, state) do
key_value = state.key_value
value = Map.get(key_value, key)
{:reply, value, state}
end
end
| 19.25641 | 72 | 0.6498 |
f7e283b201ff743641d7f828c77010da88320b47 | 180 | exs | Elixir | test/tenancy_model_test.exs | wise-home/wise_homex | 851f13191ed67acb1003888c12f4e6b71800d31b | [
"MIT"
] | 2 | 2019-03-14T09:09:14.000Z | 2020-11-25T06:44:46.000Z | test/tenancy_model_test.exs | wise-home/wise_homex | 851f13191ed67acb1003888c12f4e6b71800d31b | [
"MIT"
] | 139 | 2019-03-14T06:58:55.000Z | 2022-03-29T12:28:09.000Z | test/tenancy_model_test.exs | wise-home/wise_homex | 851f13191ed67acb1003888c12f4e6b71800d31b | [
"MIT"
] | 7 | 2021-04-09T13:55:45.000Z | 2021-04-16T15:49:36.000Z | defmodule WiseHomex.TenancyModelTest do
@moduledoc false
use WiseHomex.Test.Case, async: true
import WiseHomex.Tenancy, only: [sort_date: 1]
doctest WiseHomex.Tenancy
end
| 22.5 | 48 | 0.783333 |
f7e2ce163af9b57cf4a57280362cdb1fbd1784e3 | 2,740 | ex | Elixir | lib/blockquote_web/controllers/daily_quote_controller.ex | allen-garvey/block-quote-phoenix | 5c0f5d16daf6bb515a8f1846c3e4311b368a7bdb | [
"MIT"
] | null | null | null | lib/blockquote_web/controllers/daily_quote_controller.ex | allen-garvey/block-quote-phoenix | 5c0f5d16daf6bb515a8f1846c3e4311b368a7bdb | [
"MIT"
] | 1 | 2020-09-05T17:07:13.000Z | 2020-09-05T17:07:13.000Z | lib/blockquote_web/controllers/daily_quote_controller.ex | allen-garvey/block-quote-phoenix | 5c0f5d16daf6bb515a8f1846c3e4311b368a7bdb | [
"MIT"
] | null | null | null | defmodule BlockquoteWeb.DailyQuoteController do
use BlockquoteWeb, :controller
alias Blockquote.Admin
alias Blockquote.Admin.DailyQuote
def custom_render(conn, template, assigns) do
custom_render(conn, view_module(conn), template, assigns)
end
def custom_render(conn, view_module, template, assigns) do
assigns = [{:item_name_singular, "daily quote"}] ++ assigns
render(conn, view_module, template, assigns)
end
def related_fields do
quotes = Admin.list_quotes() |> BlockquoteWeb.QuoteView.map_for_form
[quotes: quotes]
end
def index(conn, _params) do
daily_quotes = Admin.list_daily_quotes_for_index()
custom_render(conn, BlockquoteWeb.SharedView, "index.html", items: daily_quotes, item_view: view_module(conn), item_display_func: :to_s)
end
def new_page(conn, changeset, _params) do
custom_render(conn, "new.html", changeset: changeset, related_fields: related_fields())
end
def edit_page(conn, changeset, daily_quote) do
custom_render(conn, "edit.html", changeset: changeset, related_fields: related_fields(), item: daily_quote)
end
def new(conn, params) do
changeset = Admin.change_daily_quote(%DailyQuote{})
new_page(conn, changeset, params)
end
def create(conn, %{"daily_quote" => daily_quote_params}) do
case Admin.create_daily_quote(daily_quote_params) do
{:ok, daily_quote} ->
conn
|> put_flash(:info, "Daily quote created successfully.")
|> redirect(to: daily_quote_path(conn, :show, daily_quote))
{:error, %Ecto.Changeset{} = changeset} ->
new_page(conn, changeset, nil)
end
end
def show(conn, %{"id" => id}) do
daily_quote = Admin.get_daily_quote_for_show!(id)
custom_render(conn, "show.html", daily_quote: daily_quote)
end
def edit(conn, %{"id" => id}) do
daily_quote = Admin.get_daily_quote!(id)
changeset = Admin.change_daily_quote(daily_quote)
edit_page(conn, changeset, daily_quote)
end
def update(conn, %{"id" => id, "daily_quote" => daily_quote_params}) do
daily_quote = Admin.get_daily_quote!(id)
case Admin.update_daily_quote(daily_quote, daily_quote_params) do
{:ok, daily_quote} ->
conn
|> put_flash(:info, "Daily quote updated successfully.")
|> redirect(to: daily_quote_path(conn, :show, daily_quote))
{:error, %Ecto.Changeset{} = changeset} ->
edit_page(conn, changeset, daily_quote)
end
end
def delete(conn, %{"id" => id}) do
daily_quote = Admin.get_daily_quote!(id)
{:ok, _daily_quote} = Admin.delete_daily_quote(daily_quote)
conn
|> put_flash(:info, "Daily quote deleted successfully.")
|> redirect(to: daily_quote_path(conn, :index))
end
end
| 33.012048 | 140 | 0.69854 |
f7e333de8c3d49ed3458d36949f86763ba9eb5c4 | 4,980 | ex | Elixir | lib/smppex/mc.ex | desoulter/smppex | 1c8dbd9673291431b2d329a2cb20134c91857af2 | [
"MIT"
] | null | null | null | lib/smppex/mc.ex | desoulter/smppex | 1c8dbd9673291431b2d329a2cb20134c91857af2 | [
"MIT"
] | null | null | null | lib/smppex/mc.ex | desoulter/smppex | 1c8dbd9673291431b2d329a2cb20134c91857af2 | [
"MIT"
] | null | null | null | defmodule SMPPEX.MC do
@moduledoc """
This is a module for launching a TCP listener (or any other listener supported by `ranch`, for example, `ssl`) which handles incoming connections with the passed `SMPPEX.Session` implementations.
To start an MC one generally should do the following.
1. Implement an `SMPPEX.Session` behaviour.
```elixir
defmodule MyMCSession do
use SMPPEX.Session
# ...Callback implementation
end
```
2. Start a listener passing implemented behaviour as a callback module.
```elixir
{:ok, listener} = SMPPEX.MC.start({MyESMESession, some_args},
transport_opts: [port: 2775])
```
The important things to note are:
* There is no `start_link` method, since started listener is not a standalone
`GenServer` but a pool of socket acceptors running under `Ranch` supervisor.
* Each received connection is served with its own process which uses passed callback module (`MyESMESession`) for handling connection events. Each process has his own state initialized by `init` callback receiving `socket`, `transport` and a copy of arguments (`some_args`).
"""
alias :ranch, as: Ranch
alias SMPPEX.Session.Defaults
@default_transport :ranch_tcp
@default_acceptor_count 50
@spec start({module, args :: term}, opts :: Keyword.t()) ::
{:ok, listener_ref :: Ranch.ref()}
| {:error, reason :: term}
@doc """
Starts listener for MC entitiy.
`module` is the callback module which should implement `SMPPEX.Session` behaviour.
`args` is the argument passed to the `init` callback each time a new connection is received.
`opts` is a keyword list of different options:
* `:transport` is Ranch transport used for TCP connections: either `ranch_tcp` (the default) or `ranch_ssl`;
* `:transport_opts` is a list of Ranch transport options. The major option is `{:port, port}`. The port is set to `0` by default, which means that the listener will accept connections on a random free port.
* `:acceptor_count` is the number of Ranch listener acceptors, #{@default_acceptor_count} by default.
* `:mc_opts` is a keyword list of MC options:
- `:timer_resolution` is interval of internal `ticks` on which time related events happen, like checking timeouts for pdus, checking SMPP timers, etc. The default is #{
inspect(Defaults.timer_resolution())
} ms;
- `:session_init_limit` is the maximum time for which a session waits an incoming bind request. If no bind request is received within this interval of time, the session stops. The default value is #{
inspect(Defaults.session_init_limit())
} ms;
- `:enquire_link_limit` is value for enquire_link SMPP timer, i.e. the interval of SMPP session inactivity after which enquire_link PDU is send to "ping" the connetion. The default value is #{
inspect(Defaults.enquire_link_limit())
} ms;
- `:enquire_link_resp_limit` is the maximum time for which a session waits for enquire_link PDU response. If the response is not received within this interval of time and no activity from the peer occurs, the session is then considered dead and the session stops. The default value is #{
inspect(Defaults.enquire_link_resp_limit())
} ms;
- `:inactivity_limit` is the maximum time for which a peer is allowed not to send PDUs (which are not response PDUs). If no such PDUs are received within this interval of time, the session stops. The default is #{
inspect(Defaults.inactivity_limit())
} ms;
- `:response_limit` is the maximum time to wait for a response for a previously sent PDU. If the response is not received within this interval, `handle_resp_timeout` callback is triggered for the original pdu. If the response is received later, it is discarded. The default value is #{
inspect(Defaults.response_limit())
} ms.
If `:mc_opts` list of options is ommited, all options take their default values.
The returned value is either `{:ok, ref}` or `{:error, reason}`. The `ref` can be later used
to stop the whole MC listener and all sessions received by it.
"""
def start({_module, _args} = mod_with_args, opts \\ []) do
acceptor_count = Keyword.get(opts, :acceptor_count, @default_acceptor_count)
transport = Keyword.get(opts, :transport, @default_transport)
transport_opts = Keyword.get(opts, :transport_opts, [{:port, 0}])
mc_opts = Keyword.get(opts, :mc_opts, [])
ref = make_ref()
start_result =
Ranch.start_listener(
ref,
acceptor_count,
transport,
transport_opts,
SMPPEX.TransportSession,
{SMPPEX.Session, [mod_with_args, mc_opts], :mc}
)
case start_result do
{:error, _} = error -> error
{:ok, _, _} -> {:ok, ref}
{:ok, _} -> {:ok, ref}
end
end
@spec stop(Ranch.ref()) :: :ok
@doc """
Stops MC listener and all its sessions.
"""
def stop(listener) do
Ranch.stop_listener(listener)
end
end
| 44.864865 | 293 | 0.705622 |
f7e36b9bc74f5f66189afe95e4d1bd11f77c1c34 | 2,380 | ex | Elixir | clients/vision/lib/google_api/vision/v1/model/google_cloud_vision_v1p5beta1_input_config.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | 1 | 2018-12-03T23:43:10.000Z | 2018-12-03T23:43:10.000Z | clients/vision/lib/google_api/vision/v1/model/google_cloud_vision_v1p5beta1_input_config.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/vision/lib/google_api/vision/v1/model/google_cloud_vision_v1p5beta1_input_config.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.Vision.V1.Model.GoogleCloudVisionV1p5beta1InputConfig do
@moduledoc """
The desired input location and metadata.
## Attributes
* `content` (*type:* `String.t`, *default:* `nil`) - File content, represented as a stream of bytes.
Note: As with all `bytes` fields, protobuffers use a pure binary
representation, whereas JSON representations use base64.
Currently, this field only works for BatchAnnotateFiles requests. It does
not work for AsyncBatchAnnotateFiles requests.
* `gcsSource` (*type:* `GoogleApi.Vision.V1.Model.GoogleCloudVisionV1p5beta1GcsSource.t`, *default:* `nil`) - The Google Cloud Storage location to read the input from.
* `mimeType` (*type:* `String.t`, *default:* `nil`) - The type of the file. Currently only "application/pdf", "image/tiff" and
"image/gif" are supported. Wildcards are not supported.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:content => String.t(),
:gcsSource => GoogleApi.Vision.V1.Model.GoogleCloudVisionV1p5beta1GcsSource.t(),
:mimeType => String.t()
}
field(:content)
field(:gcsSource, as: GoogleApi.Vision.V1.Model.GoogleCloudVisionV1p5beta1GcsSource)
field(:mimeType)
end
defimpl Poison.Decoder, for: GoogleApi.Vision.V1.Model.GoogleCloudVisionV1p5beta1InputConfig do
def decode(value, options) do
GoogleApi.Vision.V1.Model.GoogleCloudVisionV1p5beta1InputConfig.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Vision.V1.Model.GoogleCloudVisionV1p5beta1InputConfig do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 40.338983 | 171 | 0.740756 |
f7e372e0896063659585da2725c97a821829943f | 1,100 | exs | Elixir | mix.exs | wesleimp/humanizex | ba459d3b9f8f92b3900fd618ca3ce5920259d559 | [
"MIT"
] | null | null | null | mix.exs | wesleimp/humanizex | ba459d3b9f8f92b3900fd618ca3ce5920259d559 | [
"MIT"
] | null | null | null | mix.exs | wesleimp/humanizex | ba459d3b9f8f92b3900fd618ca3ce5920259d559 | [
"MIT"
] | null | null | null | defmodule Humanizex.MixProject do
use Mix.Project
@version "0.1.0"
@source_url "https://github.com/wesleimp/humanizex"
def project do
[
app: :humanizex,
version: "0.1.0",
elixir: "~> 1.11",
start_permanent: Mix.env() == :prod,
description: "A simple library for making the web more humane.",
deps: deps(),
package: package(),
docs: docs()
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:logger]
]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
{:ex_doc, "~> 0.23", only: :dev, runtime: false}
]
end
defp package do
[
files: ["lib", "mix.exs", "README.md", "LICENSE"],
maintainers: ["Weslei Juan Moser Pereira"],
licenses: ["MIT"],
links: %{GitHub: @source_url}
]
end
defp docs do
[
main: "readme",
source_url: @source_url,
source_ref: "v#{@version}",
formatter_opts: [gfm: true],
extras: [
"README.md"
]
]
end
end
| 20 | 70 | 0.563636 |
f7e3824c1c89caf57bb588aa1f606ba6023653fe | 1,139 | exs | Elixir | mix.exs | wundercar/paytm | 378ce47ae68588b80a60098f84ea50eb380cf7de | [
"MIT"
] | 2 | 2017-11-10T14:57:02.000Z | 2018-01-30T04:53:11.000Z | mix.exs | wundercar/paytm | 378ce47ae68588b80a60098f84ea50eb380cf7de | [
"MIT"
] | 2 | 2019-04-16T12:55:01.000Z | 2020-08-25T15:17:52.000Z | mix.exs | wundercar/paytm | 378ce47ae68588b80a60098f84ea50eb380cf7de | [
"MIT"
] | null | null | null | defmodule Paytm.Mixfile do
use Mix.Project
def project do
[
app: :paytm,
version: "0.8.2",
elixir: "~> 1.5",
start_permanent: Mix.env() == :prod,
deps: deps(),
elixirc_paths: elixirc_paths(Mix.env()),
package: package(),
aliases: aliases(),
description: "Paytm API client for Elixir with Wallet, Gratification and OAuth API support"
]
end
def application do
[
extra_applications: [:logger]
]
end
defp package do
[
maintainers: [
"Nihal Gonsalves <[email protected]>"
],
licenses: ["MIT"],
links: %{"GitHub" => "https://github.com/wundercar/paytm"}
]
end
defp deps do
[
{:httpoison, "~> 1.2"},
{:poison, "~> 3.1"},
{:money, "~> 1.4"},
{:exvcr, "~> 0.8", only: :test},
{:ex_doc, "~> 0.20.2", only: :dev},
{:timex, "~> 3.1"},
{:uuid, "~> 1.1", only: [:dev, :test]}
]
end
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
defp aliases do
[
compile: ["compile --warnings-as-errors"]
]
end
end
| 20.709091 | 97 | 0.524144 |
f7e391fbfd12a356b47adc6f6df089acf04a962b | 2,260 | ex | Elixir | exrack_firmware/lib/exrack_firmware/dht.ex | jirimakarius/exrack | e499ea62f61000463360adb6b2b7a9ce9695467f | [
"MIT"
] | null | null | null | exrack_firmware/lib/exrack_firmware/dht.ex | jirimakarius/exrack | e499ea62f61000463360adb6b2b7a9ce9695467f | [
"MIT"
] | null | null | null | exrack_firmware/lib/exrack_firmware/dht.ex | jirimakarius/exrack | e499ea62f61000463360adb6b2b7a9ce9695467f | [
"MIT"
] | null | null | null | defmodule ExRack.DHT do
@moduledoc false
use GenServer
@period 10_000
# Client
def start_link(state) do
GenServer.start_link(__MODULE__, state, name: __MODULE__)
end
def config do
Application.fetch_env!(:exrack_firmware, ExRack.DHT)
|> Map.new()
end
def temperature do
GenServer.call(__MODULE__, :temperature)
end
def humidity do
GenServer.call(__MODULE__, :humidity)
end
def data do
GenServer.call(__MODULE__, :data)
end
def subscribe() do
GenServer.call(__MODULE__, :subscribe)
end
# Server
@impl true
def init(state) do
state =
state
|> Map.merge(%{temperature: nil, humidity: nil, subscribed: []})
schedule_work()
{:ok, state}
end
@impl true
def handle_info(:work, %{:gpio => gpio, :sensor => sensor, :subscribed => subscribed} = state) do
state =
case DHT.read(gpio, sensor) do
{:ok, %{:humidity => humidity, :temperature => temperature}} ->
:telemetry.execute([:dht, :humididty], %{percent: humidity}, %{sensor: sensor})
:telemetry.execute([:dht, :temperature], %{celsius: temperature}, %{sensor: sensor})
Enum.each(subscribed, fn pid ->
if Process.alive?(pid) do
Process.send(pid, {:dht, %{:temperature => temperature, :humidity => humidity}}, [])
end
end)
Map.merge(state, %{:temperature => temperature, :humidity => humidity})
_ ->
state
end
schedule_work()
{:noreply, state}
end
@impl true
def handle_call(:temperature, _from, %{:temperature => temperature} = state) do
{:reply, temperature, state}
end
@impl true
def handle_call(:humidity, _from, %{:humidity => humidity} = state) do
{:reply, humidity, state}
end
@impl true
def handle_call(:data, _from, %{:humidity => humidity, :temperature => temperature} = state) do
{:reply, %{:temperature => temperature, :humidity => humidity}, state}
end
@impl true
def handle_call(:subscribe, {pid, _}, state) do
subscribed_state = [pid | state.subscribed]
{:reply, :ok, Map.put(state, :subscribed, subscribed_state)}
end
defp schedule_work do
Process.send_after(self(), :work, @period)
end
end
| 23.061224 | 99 | 0.625664 |
f7e3cae62aab95169ebe13176e4749bbdd72e03d | 748 | ex | Elixir | lib/sir_alex_web/views/group_view.ex | dnsbty/sir_alex | ab569dc7692826411877728444eaa00ec05767c2 | [
"MIT"
] | 3 | 2019-05-19T05:27:37.000Z | 2020-04-21T06:23:08.000Z | lib/sir_alex_web/views/group_view.ex | dnsbty/sir_alex | ab569dc7692826411877728444eaa00ec05767c2 | [
"MIT"
] | 3 | 2017-10-28T20:52:07.000Z | 2017-11-24T08:15:27.000Z | lib/sir_alex_web/views/group_view.ex | dnsbty/sir_alex | ab569dc7692826411877728444eaa00ec05767c2 | [
"MIT"
] | 1 | 2020-04-15T16:31:28.000Z | 2020-04-15T16:31:28.000Z | defmodule SirAlexWeb.GroupView do
use SirAlexWeb, :view
alias SirAlex.Groups.Group
def action_button(conn, %Group{is_private?: false} = group, false) do
join_button("Join Group", conn, group)
end
def action_button(conn, %Group{is_private?: true} = group, false) do
join_button("Request to Join Group", conn, group)
end
def action_button(conn, group, true) do
options = [
to: member_path(conn, :leave, group),
method: :delete,
class: "btn btn-secondary"
]
link("Leave Group", options)
end
defp join_button(link_text, conn, group) do
options = [
to: member_path(conn, :create, group),
method: :post,
class: "btn btn-primary"
]
link(link_text, options)
end
end
| 25.793103 | 71 | 0.656417 |
f7e3e20c9ccc645eec3180675d8fd995be6d55ba | 265 | ex | Elixir | lib/chat_api/repo.ex | vbrazo/chat_elixir_api | f2595d123934014b3a7c1b3b63b987fa3df5b3ad | [
"MIT"
] | 2 | 2021-04-30T08:30:26.000Z | 2021-04-30T16:20:39.000Z | lib/chat_api/repo.ex | vbrazo/chat_elixir_api | f2595d123934014b3a7c1b3b63b987fa3df5b3ad | [
"MIT"
] | null | null | null | lib/chat_api/repo.ex | vbrazo/chat_elixir_api | f2595d123934014b3a7c1b3b63b987fa3df5b3ad | [
"MIT"
] | null | null | null | defmodule ChatApi.Repo do
use Ecto.Repo, otp_app: :chat_api
@doc """
Dynamically loads the repository url from the
DATABASE_URL environment variable.
"""
def init(_, opts) do
{:ok, Keyword.put(opts, :url, System.get_env("DATABASE_URL"))}
end
end
| 22.083333 | 66 | 0.698113 |
f7e3e48b3e37c3926c625f5899d61c1b822416fe | 1,135 | ex | Elixir | larc_website/test/support/channel_case.ex | Cate-Lukner/cate-lukner-internship | 43e8b467287ea3a7955e23f18180cb4f849e6620 | [
"MIT"
] | null | null | null | larc_website/test/support/channel_case.ex | Cate-Lukner/cate-lukner-internship | 43e8b467287ea3a7955e23f18180cb4f849e6620 | [
"MIT"
] | null | null | null | larc_website/test/support/channel_case.ex | Cate-Lukner/cate-lukner-internship | 43e8b467287ea3a7955e23f18180cb4f849e6620 | [
"MIT"
] | 1 | 2020-05-22T19:21:24.000Z | 2020-05-22T19:21:24.000Z | defmodule LarcWebsiteWeb.ChannelCase do
@moduledoc """
This module defines the test case to be used by
channel tests.
Such tests rely on `Phoenix.ChannelTest` and also
import other functionality to make it easier
to build common data structures and query the data layer.
Finally, if the test case interacts with the database,
we enable the SQL sandbox, so changes done to the database
are reverted at the end of every test. If you are using
PostgreSQL, you can even run database tests asynchronously
by setting `use LarcWebsiteWeb.ChannelCase, async: true`, although
this option is not recommended for other databases.
"""
use ExUnit.CaseTemplate
using do
quote do
# Import conveniences for testing with channels
import Phoenix.ChannelTest
import LarcWebsiteWeb.ChannelCase
# The default endpoint for testing
@endpoint LarcWebsiteWeb.Endpoint
end
end
setup tags do
:ok = Ecto.Adapters.SQL.Sandbox.checkout(LarcWebsite.Repo)
unless tags[:async] do
Ecto.Adapters.SQL.Sandbox.mode(LarcWebsite.Repo, {:shared, self()})
end
:ok
end
end
| 27.682927 | 73 | 0.73304 |
f7e3f3c4dfa4545a5bc062bf2ccb779db91594e3 | 3,536 | ex | Elixir | lib/ref_web/router.ex | tk04/ref | 6547a58344141e09300284943264dfe02f1e84f3 | [
"MIT"
] | null | null | null | lib/ref_web/router.ex | tk04/ref | 6547a58344141e09300284943264dfe02f1e84f3 | [
"MIT"
] | null | null | null | lib/ref_web/router.ex | tk04/ref | 6547a58344141e09300284943264dfe02f1e84f3 | [
"MIT"
] | null | null | null | defmodule RefWeb.Router do
use RefWeb, :router
use PowAssent.Phoenix.Router
use Pow.Phoenix.Router
use Pow.Extension.Phoenix.Router,
extensions: [PowExtensionOne, PowExtensionTwo]
pipeline :browser do
plug :accepts, ["html"]
plug :fetch_session
plug :fetch_live_flash
plug :put_root_layout, {RefWeb.LayoutView, :root}
plug :protect_from_forgery
plug :put_secure_browser_headers
end
pipeline :api do
plug :accepts, ["json"]
# get "/users", UserController, :get_users
end
pipeline :skip_csrf_protection do
plug :accepts, ["html"]
plug :fetch_session
plug :fetch_flash
end
# use this pipline for routes that require user_auth to visit
pipeline :protected do
plug Pow.Plug.RequireAuthenticated,
error_handler: Pow.Phoenix.PlugErrorHandler
end
scope "/" do
pipe_through [:browser]
scope "/", RefWeb, as: "pow" do
delete "/session", SessionController, :delete
post "/session", SessionController, :create
post "/registration", RegistrationController, :create
get "/auth/paypal/callback", PaypalController, :token
end
pow_session_routes()
pow_extension_routes()
end
scope "/" do
pipe_through :browser
pow_assent_routes()
pow_routes()
end
scope "/", RefWeb do
pipe_through :browser
get "/", IndexController, :index
live "/page", PageLive, :index
live "/posts", PostLive.Index, :index
live "/posts/new", PostLive.Index, :new
live "/posts/:id/edit", PostLive.Index, :edit
live "/posts/:id", PostLive.Show, :show
live "/posts/:id/show/edit", PostLive.Show, :edit
live "/comments/new", PostLive.Index, :new_comment
resources "/:username/admin", ServiceController
post "/:username/admin/status", UserController, :commission_status
live "/messages/:username", MessageLive.Index, :index
live "/messages/new", MessageLive.Index, :new
live "/messages/:id/edit", MessageLive.Index, :edit
live "/messages/:id", MessageLive.Show, :show
live "/messages/:id/show/edit", MessageLive.Show, :edit
get "/:username", UserController, :show
post "/:username", UserController, :create_follow
delete "/:username", UserController, :delete_follow
live "/:username/requests/:rid", MessageLive.Index, :index
resources "/:username/:id/requests/", RequestController
get "/:username/:id/requests", RequestController, :index
get "/:username/:id/requests/:id/edit", RequestController, :edit
get "/:username/:id/requests/new", RequestController, :new
post "/:username/:id/requests", RequestController, :create
patch "/:username/:id/requests/:id", RequestController, :update
put "/:username/:id/requests/:id", RequestController, :update
delete "/:username/:id/requests/:id", RequestController, :delete
end
# Other scopes may use custom stacks.
# scope "/api", RefWeb do
# pipe_through :api
# end
# Enables LiveDashboard only for development
#
# If you want to use the LiveDashboard in production, you should put
# it behind authentication and allow only admins to access it.
# If your application does not have an admins-only section yet,
# you can use Plug.BasicAuth to set up some basic authentication
# as long as you are also using SSL (which you should anyway).
if Mix.env() in [:dev, :test] do
import Phoenix.LiveDashboard.Router
scope "/" do
pipe_through :browser
live_dashboard "/dashboard", metrics: RefWeb.Telemetry
end
end
end
| 29.714286 | 70 | 0.692873 |
f7e3fda42bd937bddf8d40a917c89ffff165f666 | 1,825 | ex | Elixir | clients/cloud_tasks/lib/google_api/cloud_tasks/v2beta2/model/list_locations_response.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/cloud_tasks/lib/google_api/cloud_tasks/v2beta2/model/list_locations_response.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/cloud_tasks/lib/google_api/cloud_tasks/v2beta2/model/list_locations_response.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.CloudTasks.V2beta2.Model.ListLocationsResponse do
@moduledoc """
The response message for Locations.ListLocations.
## Attributes
* `locations` (*type:* `list(GoogleApi.CloudTasks.V2beta2.Model.Location.t)`, *default:* `nil`) - A list of locations that matches the specified filter in the request.
* `nextPageToken` (*type:* `String.t`, *default:* `nil`) - The standard List next-page token.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:locations => list(GoogleApi.CloudTasks.V2beta2.Model.Location.t()) | nil,
:nextPageToken => String.t() | nil
}
field(:locations, as: GoogleApi.CloudTasks.V2beta2.Model.Location, type: :list)
field(:nextPageToken)
end
defimpl Poison.Decoder, for: GoogleApi.CloudTasks.V2beta2.Model.ListLocationsResponse do
def decode(value, options) do
GoogleApi.CloudTasks.V2beta2.Model.ListLocationsResponse.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.CloudTasks.V2beta2.Model.ListLocationsResponse do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 36.5 | 171 | 0.742466 |
f7e41164b29c92469d3b334beb04a4487923369b | 9,497 | ex | Elixir | lib/web/router/router.ex | IvanPereyra-23/PaaS | 0179c7b57645473308b0a295a70b6284ed220fbf | [
"Apache-2.0"
] | 1 | 2020-08-27T18:43:11.000Z | 2020-08-27T18:43:11.000Z | lib/web/router/router.ex | IvanPereyra-23/PaaS | 0179c7b57645473308b0a295a70b6284ed220fbf | [
"Apache-2.0"
] | null | null | null | lib/web/router/router.ex | IvanPereyra-23/PaaS | 0179c7b57645473308b0a295a70b6284ed220fbf | [
"Apache-2.0"
] | 1 | 2020-08-27T18:43:21.000Z | 2020-08-27T18:43:21.000Z | # Copyright(c) 2015-2020 ACCESS CO., LTD. All rights reserved.
defmodule Antikythera.Router do
@moduledoc """
Defines the antikythera routing DSL.
## Routing macros
This module defines macros to be used in each gear's Router module.
The names of the macros are the same as the HTTP verbs: `get`, `post`, etc.
The macros take the following 4 arguments (although you can omit the last and just pass 3 of them):
- URL path pattern which consists of '/'-separated segments. The 1st character must be '/'.
To match against incoming request path to a pattern you can use placeholders. See examples below for the usage.
- Controller module.
Antikythera expects that the module name given here does not contain `GearName.Controller.` as a prefix;
it's automatically prepended by antikythera.
- Name of the controller action as an atom.
- Keyword list of options.
Currently available options are `:from` and `:as`. See below for further explanations.
## Example
If you define the following router module,
defmodule MyGear.Router do
use Antikythera.Router
static_prefix "/static"
websocket "/ws"
get "/foo" , Hello, :exact_match
post "/foo/:a/:b" , Hello, :placeholders
put "/foo/bar/*w", Hello, :wildcard
end
Then the following requests are routed as:
- `GET "/foo"` => `MyGear.Controller.Hello.exact_match/1` is invoked with `path_matches`: `%{}`
- `POST "/foo/bar/baz"` => `MyGear.Controller.Hello.placeholders/1` is invoked with `path_matches`: `%{a: "bar", b: "baz"}`
- `PUT "/foo/bar/abc/def/ghi"` => `MyGear.Controller.Hello.wildcard/1` is invoked with `path_matches`: `%{w: "abc/def/ghi"}`
Note that
- Each controller action is expected to receive a `Antikythera.Conn` struct and returns a `Antikythera.Conn` struct.
- `Antikythera.Conn` struct has a field `request` which is a `Antikythera.Request` struct.
- Matched segments are URL-decoded and stored in `path_matches` field in `Antikythera.Request`.
If the result of URL-decoding is nonprintable binary, the request is rejected.
## Websocket endpoint
To enable websocket interaction with clients, you must first define `MyGear.Websocket` module.
See `Antikythera.Websocket` for more details about websocket handler module.
Then invoke `websocket/1` macro in your router.
websocket "/ws_path_pattern"
The path pattern may have placeholders in the same way as normal routes.
GET request with appropriate headers to this path will initialize a websocket connection using the HTTP 1.1 upgrade mechanism.
If your gear does not interact with clients via websocket, simply don't invoke `websocket/1` macro in your router.
## Static file serving
You can serve your static assets by placing them under `/priv/static` directory in your gear project.
The endpoint to be used can be specified by `static_prefix/1` macro.
For example, if you add
static_prefix "/assets"
to your router, you can download `/priv/static/html/index.html` file by sending GET request to the path `/assets/html/index.html`.
If you don't need to serve static assets, just don't call `static_prefix/1` macro in your router.
Currently, static assets served in this way are NOT automatically gzip compressed,
even if `acceept-encoding: gzip` request header is set.
It is recommended to use CDN to deliver large static assets in production.
See also `Antikythera.Asset` for usage of CDN in delivery of static assets.
## Web requests and gear-to-gear (g2g) requests
Antikythera treats both web requests and g2g requests in basically the same way.
This means that if you define a route in your gear one can send request to the route using both HTTP and g2g communication.
If you want to define a route that can be accessible only via g2g communication, specify `from: :gear` option.
get "/foo", Hello, :action1, from: :gear
post "/bar", Hello, :action2, from: :gear
Similarly passing `from: :web` makes the route accessible only from web request.
When dealing with multiple routes, `only_from_web/1` and `only_from_gear/1` macros can be used.
For example, the following routes definition is the same as above one.
only_from_gear do
get "/foo", Hello, :action1
post "/bar", Hello, :action2
end
## Reverse routing
To generate URL path of a route (e.g. a link in HTML), you will want to refer to the route's path.
For this purpose you can specify `:as` option.
For example, you have the following router module
defmodule MyGear.Router do
use Antikythera.Router
get "/foo/:a/:b/*c", Hello, :placeholders, as: :myroute
end
By writing this the router automatically defines a function `myroute_path/4`,
which receives segments that fill placeholders and an optional map for query parameters.
MyGear.Router.myroute_path("segment_a", "segment_b", ["wildcard", "part"])
=> "/foo/segment_a/segment_b/wildcard/part
MyGear.Router.myroute_path("segment_a", "segment_b", ["wildcard", "part"], %{"query" => "param"})
=> "/foo/segment_a/segment_b/wildcard/part?query=param
Reverse routing helper functions automatically URI-encode all given arguments.
If websocket endpoint is enabled, you can get its path with `MyGear.Router.websocket_path/0`.
Also if static file serving is enabled, path prefix for static files can be obtained by `MyGear.Router.static_prefix/0`.
"""
alias Antikythera.Router.Impl
defmacro __using__(_) do
quote do
import Antikythera.Router
Module.register_attribute(__MODULE__, :antikythera_web_routes , accumulate: true)
Module.register_attribute(__MODULE__, :antikythera_gear_routes, accumulate: true)
Module.put_attribute(__MODULE__, :from_option, nil)
@before_compile Antikythera.Router
end
end
defmacro __before_compile__(%Macro.Env{module: module}) do
web_routing_source = Module.get_attribute(module, :antikythera_web_routes ) |> Enum.reverse()
gear_routing_source = Module.get_attribute(module, :antikythera_gear_routes) |> Enum.reverse()
routing_quotes(module, web_routing_source, gear_routing_source) ++ reverse_routing_quotes(web_routing_source, gear_routing_source)
end
defp routing_quotes(module, web_source, gear_source) do
Impl.generate_route_function_clauses(module, :web, web_source) ++ Impl.generate_route_function_clauses(module, :gear, gear_source)
end
defp reverse_routing_quotes(web_source, gear_source) do
alias Antikythera.Router.Reverse
Enum.uniq(web_source ++ gear_source)
|> Enum.reject(fn {_verb, _path, _controller, _action, opts} -> is_nil(opts[:as]) end)
|> Enum.map(fn {_verb, path, _controller, _action, opts} -> Reverse.define_path_helper(opts[:as], path) end)
end
for from <- [:web, :gear] do
defmacro unquote(:"only_from_#{from}")(do: block) do
current_from = unquote(from)
quote do
if @from_option, do: raise "nested invocation of `only_from_*` is not allowed"
@from_option unquote(current_from)
unquote(block)
@from_option nil
end
end
end
for verb <- Antikythera.Http.Method.all() do
defmacro unquote(verb)(path, controller, action, opts \\ []) do
%Macro.Env{module: router_module} = __CALLER__
add_route(router_module, unquote(verb), path, controller, action, opts)
end
end
defp add_route(router_module, verb, path, controller_given, action, opts) do
quote bind_quoted: [r_m: router_module, verb: verb, path: path, c_g: controller_given, action: action, opts: opts] do
controller = Antikythera.Router.fully_qualified_controller_module(r_m, c_g, opts)
from_grouped = Module.get_attribute(__MODULE__, :from_option)
from_per_route = opts[:from]
if from_grouped && from_per_route, do: raise "using :from option within `only_from_*` block is not allowed"
opts_without_from_option = Keyword.delete(opts, :from)
routing_info = {verb, path, controller, action, opts_without_from_option}
case from_grouped || from_per_route do
:web -> @antikythera_web_routes routing_info
:gear -> @antikythera_gear_routes routing_info
nil ->
@antikythera_web_routes routing_info
@antikythera_gear_routes routing_info
end
end
end
def fully_qualified_controller_module(router_module, controller, opts) do
if opts[:websocket?] do
controller
else
[
Module.split(router_module) |> hd(),
"Controller",
Macro.expand(controller, __ENV__), # `{:__aliases__, meta, atoms}` must be expanded
] |> Module.concat() # Executed during compilation; `Module.concat/1` causes no problem
end
end
defmacro websocket(path, opts \\ []) do
%Macro.Env{module: router_module} = __CALLER__
ws_module = Module.split(router_module) |> hd() |> Module.concat("Websocket") # during compilation, it's safe to call `Module.concat/2`
quote do
get unquote(path), unquote(ws_module), :connect, [only_from: :web, websocket?: true] ++ unquote(opts)
end
end
defmacro static_prefix(prefix) do
quote bind_quoted: [prefix: prefix] do
if prefix =~ ~R|\A(/[0-9A-Za-z.~_-]+)+\z| do
def static_prefix(), do: unquote(prefix)
else
raise "invalid path prefix given to `static_prefix/1`: #{prefix}"
end
end
end
end
| 42.208889 | 139 | 0.710119 |
f7e454ea5f4b916a670325f06fdeccd2f458d73e | 2,547 | ex | Elixir | clients/analytics/lib/google_api/analytics/v3/model/segment.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/analytics/lib/google_api/analytics/v3/model/segment.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/analytics/lib/google_api/analytics/v3/model/segment.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Analytics.V3.Model.Segment do
@moduledoc """
JSON template for an Analytics segment.
## Attributes
* `created` (*type:* `DateTime.t`, *default:* `nil`) - Time the segment was created.
* `definition` (*type:* `String.t`, *default:* `nil`) - Segment definition.
* `id` (*type:* `String.t`, *default:* `nil`) - Segment ID.
* `kind` (*type:* `String.t`, *default:* `analytics#segment`) - Resource type for Analytics segment.
* `name` (*type:* `String.t`, *default:* `nil`) - Segment name.
* `segmentId` (*type:* `String.t`, *default:* `nil`) - Segment ID. Can be used with the 'segment' parameter in Core Reporting API.
* `selfLink` (*type:* `String.t`, *default:* `nil`) - Link for this segment.
* `type` (*type:* `String.t`, *default:* `nil`) - Type for a segment. Possible values are "BUILT_IN" or "CUSTOM".
* `updated` (*type:* `DateTime.t`, *default:* `nil`) - Time the segment was last modified.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:created => DateTime.t(),
:definition => String.t(),
:id => String.t(),
:kind => String.t(),
:name => String.t(),
:segmentId => String.t(),
:selfLink => String.t(),
:type => String.t(),
:updated => DateTime.t()
}
field(:created, as: DateTime)
field(:definition)
field(:id)
field(:kind)
field(:name)
field(:segmentId)
field(:selfLink)
field(:type)
field(:updated, as: DateTime)
end
defimpl Poison.Decoder, for: GoogleApi.Analytics.V3.Model.Segment do
def decode(value, options) do
GoogleApi.Analytics.V3.Model.Segment.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Analytics.V3.Model.Segment do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 35.873239 | 134 | 0.654888 |
f7e498499ee512c115a6a388217f4712b37de09e | 15,102 | exs | Elixir | test/phoenix_live_view/integrations/params_test.exs | feliperenan/phoenix_live_view | af65bb51fe12ea88e7c66808d2b1118e1c491ddd | [
"MIT"
] | null | null | null | test/phoenix_live_view/integrations/params_test.exs | feliperenan/phoenix_live_view | af65bb51fe12ea88e7c66808d2b1118e1c491ddd | [
"MIT"
] | null | null | null | test/phoenix_live_view/integrations/params_test.exs | feliperenan/phoenix_live_view | af65bb51fe12ea88e7c66808d2b1118e1c491ddd | [
"MIT"
] | null | null | null | defmodule Phoenix.LiveView.ParamsTest do
# Telemetry events need to run asynchronously
use ExUnit.Case, async: false
import Plug.Conn
import Phoenix.ConnTest
import Phoenix.LiveViewTest
import Phoenix.LiveView.TelemetryTestHelpers
alias Phoenix.LiveView
alias Phoenix.LiveViewTest.{Endpoint, DOM}
@endpoint Endpoint
setup_all do
ExUnit.CaptureLog.capture_log(fn -> Endpoint.start_link() end)
:ok
end
setup do
conn =
Phoenix.ConnTest.build_conn(:get, "http://www.example.com/", nil)
|> Plug.Test.init_test_session(%{})
|> put_session(:test_pid, self())
{:ok, conn: conn}
end
defp put_serialized_session(conn, key, value) do
put_session(conn, key, :erlang.term_to_binary(value))
end
describe "handle_params on disconnected mount" do
test "is called with named and query string params", %{conn: conn} do
conn = get(conn, "/counter/123", query1: "query1", query2: "query2")
response = html_response(conn, 200)
assert response =~
rendered_to_string(~s|params: %{"id" => "123", "query1" => "query1", "query2" => "query2"}|)
assert response =~
rendered_to_string(~s|mount: %{"id" => "123", "query1" => "query1", "query2" => "query2"}|)
end
test "telemetry events are emitted on success", %{conn: conn} do
attach_telemetry([:phoenix, :live_view, :handle_params])
get(conn, "/counter/123", query1: "query1", query2: "query2")
assert_receive {:event, [:phoenix, :live_view, :handle_params, :start], %{system_time: _},
metadata}
refute metadata.socket.transport_pid
assert metadata.params == %{"query1" => "query1", "query2" => "query2", "id" => "123"}
assert metadata.uri == "http://www.example.com/counter/123?query1=query1&query2=query2"
assert_receive {:event, [:phoenix, :live_view, :handle_params, :stop], %{duration: _},
metadata}
refute metadata.socket.transport_pid
assert metadata.params == %{"query1" => "query1", "query2" => "query2", "id" => "123"}
assert metadata.uri == "http://www.example.com/counter/123?query1=query1&query2=query2"
end
test "telemetry events are emitted on exception", %{conn: conn} do
attach_telemetry([:phoenix, :live_view, :handle_params])
assert_raise Plug.Conn.WrapperError, ~r/boom/, fn ->
get(conn, "/errors", crash_on: "disconnected_handle_params")
end
assert_receive {:event, [:phoenix, :live_view, :handle_params, :start], %{system_time: _},
metadata}
refute metadata.socket.transport_pid
assert metadata.params == %{"crash_on" => "disconnected_handle_params"}
assert metadata.uri == "http://www.example.com/errors?crash_on=disconnected_handle_params"
assert_receive {:event, [:phoenix, :live_view, :handle_params, :exception], %{duration: _},
metadata}
refute metadata.socket.transport_pid
assert metadata.params == %{"crash_on" => "disconnected_handle_params"}
assert metadata.uri == "http://www.example.com/errors?crash_on=disconnected_handle_params"
end
test "hard redirects", %{conn: conn} do
assert conn
|> put_serialized_session(
:on_handle_params,
&{:noreply, LiveView.redirect(&1, to: "/")}
)
|> get("/counter/123?from=handle_params")
|> redirected_to() == "/"
end
test "hard redirect with flash message", %{conn: conn} do
conn =
put_serialized_session(conn, :on_handle_params, fn socket ->
{:noreply, socket |> LiveView.put_flash(:info, "msg") |> LiveView.redirect(to: "/")}
end)
|> fetch_flash()
|> get("/counter/123?from=handle_params")
assert redirected_to(conn) == "/"
assert get_flash(conn, :info) == "msg"
end
test "push_patch", %{conn: conn} do
assert conn
|> put_serialized_session(:on_handle_params, fn socket ->
{:noreply, LiveView.push_patch(socket, to: "/counter/123?from=rehandled_params")}
end)
|> get("/counter/123?from=handle_params")
|> redirected_to() == "/counter/123?from=rehandled_params"
end
test "push_redirect", %{conn: conn} do
assert conn
|> put_serialized_session(:on_handle_params, fn socket ->
{:noreply, LiveView.push_redirect(socket, to: "/thermo/456")}
end)
|> get("/counter/123?from=handle_params")
|> redirected_to() == "/thermo/456"
end
test "with encoded URL", %{conn: conn} do
assert get(conn, "/counter/Wm9uZ%2FozNzYxOA%3D%3D?foo=bar+15%26")
assert_receive {:handle_params, _uri, _assigns,
%{"id" => "Wm9uZ/ozNzYxOA==", "foo" => "bar 15&"}}
end
end
describe "handle_params on connected mount" do
test "is called on connected mount with query string params from get", %{conn: conn} do
{:ok, _, html} =
conn
|> get("/counter/123?q1=1", q2: "2")
|> live()
assert html =~ rendered_to_string(~s|params: %{"id" => "123", "q1" => "1", "q2" => "2"}|)
assert html =~ rendered_to_string(~s|mount: %{"id" => "123", "q1" => "1", "q2" => "2"}|)
end
test "is called on connected mount with query string params from live", %{conn: conn} do
{:ok, _, html} =
conn
|> live("/counter/123?q1=1")
assert html =~ rendered_to_string(~s|%{"id" => "123", "q1" => "1"}|)
end
test "telemetry events are emitted on success", %{conn: conn} do
attach_telemetry([:phoenix, :live_view, :handle_params])
live(conn, "/counter/123?foo=bar")
assert_receive {:event, [:phoenix, :live_view, :handle_params, :start], %{system_time: _},
%{socket: %{transport_pid: pid}} = metadata}
when is_pid(pid)
assert metadata.params == %{"id" => "123", "foo" => "bar"}
assert metadata.uri == "http://www.example.com/counter/123?foo=bar"
assert_receive {:event, [:phoenix, :live_view, :handle_params, :stop], %{duration: _},
%{socket: %{transport_pid: pid}} = metadata}
when is_pid(pid)
assert metadata.params == %{"id" => "123", "foo" => "bar"}
assert metadata.uri == "http://www.example.com/counter/123?foo=bar"
end
test "telemetry events are emitted on exception", %{conn: conn} do
attach_telemetry([:phoenix, :live_view, :handle_params])
assert catch_exit(live(conn, "/errors?crash_on=connected_handle_params"))
assert_receive {:event, [:phoenix, :live_view, :handle_params, :start], %{system_time: _},
%{socket: %Phoenix.LiveView.Socket{transport_pid: pid}}}
when is_pid(pid)
assert_receive {:event, [:phoenix, :live_view, :handle_params, :exception], %{duration: _},
%{socket: %Phoenix.LiveView.Socket{transport_pid: pid}}}
when is_pid(pid)
end
test "hard redirects", %{conn: conn} do
{:error, {:redirect, %{to: "/thermo/456"}}} =
conn
|> put_serialized_session(:on_handle_params, fn socket ->
if LiveView.connected?(socket) do
{:noreply, LiveView.redirect(socket, to: "/thermo/456")}
else
{:noreply, socket}
end
end)
|> get("/counter/123?from=handle_params")
|> live()
end
test "push_patch", %{conn: conn} do
{:ok, counter_live, _html} =
conn
|> put_serialized_session(:on_handle_params, fn socket ->
if LiveView.connected?(socket) do
{:noreply, LiveView.push_patch(socket, to: "/counter/123?from=rehandled_params")}
else
{:noreply, socket}
end
end)
|> get("/counter/123?from=handle_params")
|> live()
response = render(counter_live)
assert response =~ rendered_to_string(~s|params: %{"from" => "rehandled_params", "id" => "123"}|)
assert response =~ rendered_to_string(~s|mount: %{"from" => "handle_params", "id" => "123"}|)
end
test "push_redirect", %{conn: conn} do
{:error, {:live_redirect, %{to: "/thermo/456"}}} =
conn
|> put_serialized_session(:on_handle_params, fn socket ->
if LiveView.connected?(socket) do
{:noreply, LiveView.push_redirect(socket, to: "/thermo/456")}
else
{:noreply, socket}
end
end)
|> get("/counter/123?from=handle_params")
|> live()
end
test "with encoded URL", %{conn: conn} do
{:ok, _counter_live, _html} = live(conn, "/counter/Wm9uZTozNzYxOA%3D%3D?foo=bar+15%26")
assert_receive {:handle_params, _uri, %{connected?: true},
%{"id" => "Wm9uZTozNzYxOA==", "foo" => "bar 15&"}}
end
end
describe "live_link" do
test "renders static container", %{conn: conn} do
container =
conn
|> get("/counter/123", query1: "query1", query2: "query2")
|> html_response(200)
|> DOM.parse()
|> hd()
assert {
"div",
[
{"data-phx-main", "true"},
{"data-phx-session", _},
{"data-phx-static", _},
{"id", "phx-" <> _}
],
["The value is: 1" <> _]
} = container
end
test "invokes handle_params", %{conn: conn} do
{:ok, counter_live, _html} = live(conn, "/counter/123")
assert render_patch(counter_live, "/counter/123?filter=true") =~
rendered_to_string(~s|%{"filter" => "true", "id" => "123"}|)
end
test "with encoded URL", %{conn: conn} do
{:ok, counter_live, _html} = live(conn, "/counter/123")
assert render_patch(counter_live, "/counter/Wm9uZTozNzYxOa%3d%3d?foo=bar+15%26") =~
rendered_to_string(~s|%{"foo" => "bar 15&", "id" => "Wm9uZTozNzYxOa=="}|)
end
end
describe "push_patch" do
test "from event callback ack", %{conn: conn} do
{:ok, counter_live, _html} = live(conn, "/counter/123")
assert render_click(counter_live, :push_patch, %{to: "/counter/123?from=event_ack"}) =~
rendered_to_string(~s|%{"from" => "event_ack", "id" => "123"}|)
assert_patch(counter_live, "/counter/123?from=event_ack")
end
test "from handle_info", %{conn: conn} do
{:ok, counter_live, _html} = live(conn, "/counter/123")
send(counter_live.pid, {:push_patch, "/counter/123?from=handle_info"})
assert render(counter_live) =~ rendered_to_string(~s|%{"from" => "handle_info", "id" => "123"}|)
end
test "from handle_cast", %{conn: conn} do
{:ok, counter_live, _html} = live(conn, "/counter/123")
:ok = GenServer.cast(counter_live.pid, {:push_patch, "/counter/123?from=handle_cast"})
assert render(counter_live) =~ rendered_to_string(~s|%{"from" => "handle_cast", "id" => "123"}|)
end
test "from handle_call", %{conn: conn} do
{:ok, counter_live, _html} = live(conn, "/counter/123")
next = fn socket ->
{:reply, :ok, LiveView.push_patch(socket, to: "/counter/123?from=handle_call")}
end
:ok = GenServer.call(counter_live.pid, {:push_patch, next})
assert render(counter_live) =~ rendered_to_string(~s|%{"from" => "handle_call", "id" => "123"}|)
end
test "from handle_params", %{conn: conn} do
{:ok, counter_live, _html} = live(conn, "/counter/123")
next = fn socket ->
send(self(), {:set, :val, 1000})
new_socket =
LiveView.assign(socket, :on_handle_params, fn socket ->
{:noreply, LiveView.push_patch(socket, to: "/counter/123?from=rehandled_params")}
end)
{:reply, :ok, LiveView.push_patch(new_socket, to: "/counter/123?from=handle_params")}
end
:ok = GenServer.call(counter_live.pid, {:push_patch, next})
html = render(counter_live)
assert html =~ rendered_to_string(~s|%{"from" => "rehandled_params", "id" => "123"}|)
assert html =~ "The value is: 1000"
assert_receive {:handle_params, "http://www.example.com/counter/123?from=rehandled_params",
%{val: 1}, %{"from" => "rehandled_params", "id" => "123"}}
end
end
describe "push_redirect" do
test "from event callback", %{conn: conn} do
{:ok, counter_live, _html} = live(conn, "/counter/123")
assert {:error, {:live_redirect, %{to: "/thermo/123"}}} =
render_click(counter_live, :push_redirect, %{to: "/thermo/123"})
assert_redirect(counter_live, "/thermo/123")
end
test "from handle_params", %{conn: conn} do
{:ok, counter_live, _html} = live(conn, "/counter/123")
next = fn socket ->
new_socket =
LiveView.assign(socket, :on_handle_params, fn socket ->
{:noreply, LiveView.push_redirect(socket, to: "/thermo/123")}
end)
{:reply, :ok, LiveView.push_patch(new_socket, to: "/counter/123?from=handle_params")}
end
:ok = GenServer.call(counter_live.pid, {:push_patch, next})
assert_receive {:handle_params, "http://www.example.com/counter/123?from=handle_params",
%{val: 1}, %{"from" => "handle_params", "id" => "123"}}
end
test "shuts down with push_redirect", %{conn: conn} do
{:ok, counter_live, _html} = live(conn, "/counter/123")
next = fn socket ->
{:noreply, LiveView.push_redirect(socket, to: "/thermo/123")}
end
assert {{:shutdown, {:live_redirect, %{to: "/thermo/123"}}}, _} =
catch_exit(GenServer.call(counter_live.pid, {:push_redirect, next}))
end
end
describe "@live_action" do
test "when initially set to nil", %{conn: conn} do
{:ok, live, html} = live(conn, "/action")
assert html =~ "Live action: nil"
assert html =~ "Mount action: nil"
assert html =~ "Params: %{}"
html = render_patch(live, "/action/index")
assert html =~ "Live action: :index"
assert html =~ "Mount action: nil"
assert html =~ "Params: %{}"
html = render_patch(live, "/action/1/edit")
assert html =~ "Live action: :edit"
assert html =~ "Mount action: nil"
assert html =~ "Params: %{"id" => "1"}"
end
test "when initially set to action", %{conn: conn} do
{:ok, live, html} = live(conn, "/action/index")
assert html =~ "Live action: :index"
assert html =~ "Mount action: :index"
assert html =~ "Params: %{}"
html = render_patch(live, "/action")
assert html =~ "Live action: nil"
assert html =~ "Mount action: :index"
assert html =~ "Params: %{}"
html = render_patch(live, "/action/1/edit")
assert html =~ "Live action: :edit"
assert html =~ "Mount action: :index"
assert html =~ "Params: %{"id" => "1"}"
end
end
end
| 36.302885 | 107 | 0.584823 |
f7e4c857770cec3ccd3629b022f112181d8365bc | 1,379 | ex | Elixir | lib/rasa_sdk/model/tracker.ex | henry-hz/rasa-sdk-elixir | 575a57a32b322eb5647bac67bd88e0e03f36685e | [
"Apache-2.0"
] | 3 | 2020-06-18T11:26:33.000Z | 2021-04-19T18:05:42.000Z | lib/rasa_sdk/model/tracker.ex | henry-hz/rasa-sdk-elixir | 575a57a32b322eb5647bac67bd88e0e03f36685e | [
"Apache-2.0"
] | null | null | null | lib/rasa_sdk/model/tracker.ex | henry-hz/rasa-sdk-elixir | 575a57a32b322eb5647bac67bd88e0e03f36685e | [
"Apache-2.0"
] | 3 | 2020-06-15T22:16:56.000Z | 2021-02-04T01:10:35.000Z | # NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
# https://openapi-generator.tech
# Do not edit the class manually.
defmodule RasaSdk.Model.Tracker do
@moduledoc """
Conversation tracker which stores the conversation state.
"""
alias RasaSdk.Model.ParseResult
alias RasaSdk.Model.Event
alias RasaSdk.Model.TrackerActiveForm
@derive [Poison.Encoder]
defstruct [
:conversation_id,
:slots,
:latest_message,
:latest_event_time,
:followup_action,
:paused,
:events,
:latest_input_channel,
:latest_action_name,
:active_form
]
@type t :: %__MODULE__{
conversation_id: String.t | nil,
slots: [Map] | nil,
latest_message: ParseResult | nil,
latest_event_time: float() | nil,
followup_action: String.t | nil,
paused: boolean() | nil,
events: [Event] | nil,
latest_input_channel: String.t | nil,
latest_action_name: String.t | nil,
active_form: TrackerActiveForm | nil
}
end
defimpl Poison.Decoder, for: RasaSdk.Model.Tracker do
import RasaSdk.Deserializer
def decode(value, options) do
value
|> deserialize(:latest_message, :struct, RasaSdk.Model.ParseResult, options)
|> deserialize(:events, :list, RasaSdk.Model.Event, options)
|> deserialize(:active_form, :struct, RasaSdk.Model.TrackerActiveForm, options)
end
end
| 27.039216 | 91 | 0.705584 |
f7e4dd7e1638d590efb27e9a5969d40b0b6d3d1f | 15,862 | ex | Elixir | lib/amqp/basic/async.ex | neggertz/subscribex | 5f40b06a514108dbdc42b4d7a166b7f4aa19e693 | [
"Unlicense"
] | 22 | 2016-08-11T11:51:13.000Z | 2020-01-12T17:22:11.000Z | lib/amqp/basic/async.ex | neggertz/subscribex | 5f40b06a514108dbdc42b4d7a166b7f4aa19e693 | [
"Unlicense"
] | 7 | 2016-10-04T19:26:14.000Z | 2018-12-15T19:18:43.000Z | lib/amqp/basic/async.ex | neggertz/subscribex | 5f40b06a514108dbdc42b4d7a166b7f4aa19e693 | [
"Unlicense"
] | 14 | 2016-08-09T14:27:57.000Z | 2021-04-01T20:18:43.000Z | defmodule AMQP.Basic.Async do
@moduledoc false
import AMQP.Core
alias AMQP.Utils
alias AMQP.Channel
@doc """
Publishes a message to an Exchange.
This method publishes a message to a specific exchange. The message will be routed
to queues as defined by the exchange configuration and distributed to any subscribers.
The parameter `exchange` specifies the name of the exchange to publish to. If set to
empty string, it publishes to the default exchange.
The `routing_key` parameter specifies the routing key for the message.
The `payload` parameter specifies the message content as a binary.
In addition to the previous parameters, the following options can be used:
# Options
* `:mandatory` - If set, returns an error if the broker can't route the message to a queue (default `false`);
* `:immediate` - If set, returns an error if the broker can't deliver te message to a consumer immediately (default `false`);
* `:content_type` - MIME Content type;
* `:content_encoding` - MIME Content encoding;
* `:headers` - Message headers. Can be used with headers Exchanges;
* `:persistent` - If set, uses persistent delivery mode. Messages marked as `persistent` that are delivered to `durable` \
queues will be logged to disk;
* `:correlation_id` - application correlation identifier;
* `:priority` - message priority, ranging from 0 to 9;
* `:reply_to` - name of the reply queue;
* `:expiration` - how long the message is valid (in milliseconds);
* `:message_id` - message identifier;
* `:timestamp` - timestamp associated with this message (epoch time);
* `:type` - message type as a string;
* `:user_id` - creating user ID. RabbitMQ will validate this against the active connection user;
* `:app_id` - publishing application ID.
## Examples
iex> AMQP.Basic.publish chan, \"my_exchange\", \"my_routing_key\", \"Hello World!\", persistent: true
:ok
"""
@spec publish(Channel.t(), String.t(), String.t(), String.t(), keyword) ::
:ok | :blocked | :closing
def publish(%Channel{pid: pid}, exchange, routing_key, payload, options \\ []) do
basic_publish =
basic_publish(
exchange: exchange,
routing_key: routing_key,
mandatory: Keyword.get(options, :mandatory, false),
immediate: Keyword.get(options, :immediate, false)
)
p_basic =
p_basic(
content_type: Keyword.get(options, :content_type, :undefined),
content_encoding: Keyword.get(options, :content_encoding, :undefined),
headers: Keyword.get(options, :headers, :undefined) |> Utils.to_type_tuple(),
delivery_mode: if(options[:persistent], do: 2, else: 1),
priority: Keyword.get(options, :priority, :undefined),
correlation_id: Keyword.get(options, :correlation_id, :undefined),
reply_to: Keyword.get(options, :reply_to, :undefined),
expiration: Keyword.get(options, :expiration, :undefined),
message_id: Keyword.get(options, :message_id, :undefined),
timestamp: Keyword.get(options, :timestamp, :undefined),
type: Keyword.get(options, :type, :undefined),
user_id: Keyword.get(options, :user_id, :undefined),
app_id: Keyword.get(options, :app_id, :undefined),
cluster_id: Keyword.get(options, :cluster_id, :undefined)
)
:amqp_channel.cast(pid, basic_publish, amqp_msg(props: p_basic, payload: payload))
end
@doc """
Sets the message prefetch count or prefetech size (in bytes). If `global` is set to `true` this
applies to the entire Connection, otherwise it applies only to the specified Channel.
"""
@spec qos(Channel.t(), keyword) :: :ok
def qos(%Channel{pid: pid}, options \\ []) do
basic_qos_ok() =
:amqp_channel.cast(
pid,
basic_qos(
prefetch_size: Keyword.get(options, :prefetch_size, 0),
prefetch_count: Keyword.get(options, :prefetch_count, 0),
global: Keyword.get(options, :global, false)
)
)
:ok
end
@doc """
Acknowledges one or more messages. If `multiple` is set to `true`, all messages up to the one
specified by `delivery_tag` are considered acknowledged by the server.
"""
@spec ack(Channel.t(), String.t(), keyword) :: :ok | :blocked | :closing
def ack(%Channel{pid: pid}, delivery_tag, options \\ []) do
:amqp_channel.cast(
pid,
basic_ack(
delivery_tag: delivery_tag,
multiple: Keyword.get(options, :multiple, false)
)
)
end
@doc """
Rejects (and, optionally, requeues) a message.
"""
@spec reject(Channel.t(), String.t(), keyword) :: :ok | :blocked | :closing
def reject(%Channel{pid: pid}, delivery_tag, options \\ []) do
:amqp_channel.cast(
pid,
basic_reject(
delivery_tag: delivery_tag,
requeue: Keyword.get(options, :requeue, true)
)
)
end
@doc """
Negative acknowledge of one or more messages. If `multiple` is set to `true`, all messages up to the
one specified by `delivery_tag` are considered as not acknowledged by the server. If `requeue` is set
to `true`, the message will be returned to the queue and redelivered to the next available consumer.
This is a RabbitMQ specific extension to AMQP 0.9.1. It is equivalent to reject, but allows rejecting
multiple messages using the `multiple` option.
"""
@spec nack(Channel.t(), String.t(), keyword) :: :ok | :blocked | :closing
def nack(%Channel{pid: pid}, delivery_tag, options \\ []) do
:amqp_channel.cast(
pid,
basic_nack(
delivery_tag: delivery_tag,
multiple: Keyword.get(options, :multiple, false),
requeue: Keyword.get(options, :requeue, true)
)
)
end
@doc """
Polls a queue for an existing message.
Returns the tuple `{:empty, meta}` if the queue is empty or the tuple {:ok, payload, meta} if at least
one message exists in the queue. The returned meta map includes the entry `message_count` with the
current number of messages in the queue.
Receiving messages by polling a queue is not as as efficient as subscribing a consumer to a queue,
so consideration should be taken when receiving large volumes of messages.
Setting the `no_ack` option to true will tell the broker that the receiver will not send an acknowledgement of
the message. Once it believes it has delivered a message, then it is free to assume that the consuming application
has taken responsibility for it. In general, a lot of applications will not want these semantics, rather, they
will want to explicitly acknowledge the receipt of a message and have `no_ack` with the default value of false.
"""
@spec get(Channel.t(), String.t(), keyword) :: {:ok, String.t(), map} | {:empty, map}
def get(%Channel{pid: pid}, queue, options \\ []) do
case :amqp_channel.cast(
pid,
basic_get(queue: queue, no_ack: Keyword.get(options, :no_ack, false))
) do
{basic_get_ok(
delivery_tag: delivery_tag,
redelivered: redelivered,
exchange: exchange,
routing_key: routing_key,
message_count: message_count
),
amqp_msg(
props:
p_basic(
content_type: content_type,
content_encoding: content_encoding,
headers: headers,
delivery_mode: delivery_mode,
priority: priority,
correlation_id: correlation_id,
reply_to: reply_to,
expiration: expiration,
message_id: message_id,
timestamp: timestamp,
type: type,
user_id: user_id,
app_id: app_id,
cluster_id: cluster_id
),
payload: payload
)} ->
{:ok, payload,
%{
delivery_tag: delivery_tag,
redelivered: redelivered,
exchange: exchange,
routing_key: routing_key,
message_count: message_count,
content_type: content_type,
content_encoding: content_encoding,
headers: headers,
persistent: delivery_mode == 2,
priority: priority,
correlation_id: correlation_id,
reply_to: reply_to,
expiration: expiration,
message_id: message_id,
timestamp: timestamp,
type: type,
user_id: user_id,
app_id: app_id,
cluster_id: cluster_id
}}
basic_get_empty(cluster_id: cluster_id) ->
{:empty, %{cluster_id: cluster_id}}
end
end
@doc """
Registers a queue consumer process. The `pid` of the process can be set using
the `consumer_pid` argument and defaults to the calling process.
The consumer process will receive the following data structures:
* `{:basic_deliver, payload, meta}` - This is sent for each message consumed, where \
`payload` contains the message content and `meta` contains all the metadata set when \
sending with Basic.publish or additional info set by the broker;
* `{:basic_consume_ok, %{consumer_tag: consumer_tag}}` - Sent when the consumer \
process is registered with Basic.consume. The caller receives the same information \
as the return of Basic.consume;
* `{:basic_cancel, %{consumer_tag: consumer_tag, no_wait: no_wait}}` - Sent by the \
broker when the consumer is unexpectedly cancelled (such as after a queue deletion)
* `{:basic_cancel_ok, %{consumer_tag: consumer_tag}}` - Sent to the consumer process after a call to Basic.cancel
"""
@spec consume(Channel.t(), String.t(), pid | nil, keyword) :: {:ok, String.t()}
def consume(%Channel{} = chan, queue, consumer_pid \\ nil, options \\ []) do
basic_consume =
basic_consume(
queue: queue,
consumer_tag: Keyword.get(options, :consumer_tag, ""),
no_local: Keyword.get(options, :no_local, false),
no_ack: Keyword.get(options, :no_ack, false),
exclusive: Keyword.get(options, :exclusive, false),
nowait: Keyword.get(options, :no_wait, false),
arguments: Keyword.get(options, :arguments, [])
)
consumer_pid = consumer_pid || self()
adapter_pid =
spawn(fn ->
Process.flag(:trap_exit, true)
Process.monitor(consumer_pid)
Process.monitor(chan.pid)
do_start_consumer(chan, consumer_pid)
end)
basic_consume_ok(consumer_tag: consumer_tag) =
:amqp_channel.subscribe(chan.pid, basic_consume, adapter_pid)
{:ok, consumer_tag}
end
defp do_start_consumer(chan, consumer_pid) do
receive do
basic_consume_ok(consumer_tag: consumer_tag) ->
send(consumer_pid, {:basic_consume_ok, %{consumer_tag: consumer_tag}})
do_consume(chan, consumer_pid, consumer_tag)
end
end
defp do_consume(chan, consumer_pid, consumer_tag) do
receive do
{basic_deliver(
consumer_tag: consumer_tag,
delivery_tag: delivery_tag,
redelivered: redelivered,
exchange: exchange,
routing_key: routing_key
),
amqp_msg(
props:
p_basic(
content_type: content_type,
content_encoding: content_encoding,
headers: headers,
delivery_mode: delivery_mode,
priority: priority,
correlation_id: correlation_id,
reply_to: reply_to,
expiration: expiration,
message_id: message_id,
timestamp: timestamp,
type: type,
user_id: user_id,
app_id: app_id,
cluster_id: cluster_id
),
payload: payload
)} ->
send(
consumer_pid,
{:basic_deliver, payload,
%{
consumer_tag: consumer_tag,
delivery_tag: delivery_tag,
redelivered: redelivered,
exchange: exchange,
routing_key: routing_key,
content_type: content_type,
content_encoding: content_encoding,
headers: headers,
persistent: delivery_mode == 2,
priority: priority,
correlation_id: correlation_id,
reply_to: reply_to,
expiration: expiration,
message_id: message_id,
timestamp: timestamp,
type: type,
user_id: user_id,
app_id: app_id,
cluster_id: cluster_id
}}
)
do_consume(chan, consumer_pid, consumer_tag)
basic_consume_ok(consumer_tag: consumer_tag) ->
send(consumer_pid, {:basic_consume_ok, %{consumer_tag: consumer_tag}})
do_consume(chan, consumer_pid, consumer_tag)
basic_cancel_ok(consumer_tag: consumer_tag) ->
send(consumer_pid, {:basic_cancel_ok, %{consumer_tag: consumer_tag}})
basic_cancel(consumer_tag: consumer_tag, nowait: no_wait) ->
send(consumer_pid, {:basic_cancel, %{consumer_tag: consumer_tag, no_wait: no_wait}})
{:DOWN, _ref, :process, ^consumer_pid, reason} ->
AMQP.Basic.cancel(chan, consumer_tag)
exit(reason)
{:DOWN, _ref, :process, _pid, reason} ->
exit(reason)
end
end
@doc """
Registers a handler to deal with returned messages. The registered
process will receive `{:basic_return, payload, meta}` data structures.
"""
@spec return(Channel.t(), pid) :: :ok
def return(%Channel{pid: pid}, return_handler_pid) do
adapter_pid =
spawn(fn ->
Process.flag(:trap_exit, true)
Process.monitor(return_handler_pid)
Process.monitor(pid)
handle_return_messages(pid, return_handler_pid)
end)
:amqp_channel.register_return_handler(pid, adapter_pid)
end
@doc """
Removes the return handler, if it exists. Does nothing if there is no
such handler.
"""
@spec cancel_return(Channel.t()) :: :ok
def cancel_return(%Channel{pid: pid}) do
:amqp_channel.unregister_return_handler(pid)
end
defp handle_return_messages(chan_pid, return_handler_pid) do
receive do
{basic_return(
reply_code: reply_code,
reply_text: reply_text,
exchange: exchange,
routing_key: routing_key
),
amqp_msg(
props:
p_basic(
content_type: content_type,
content_encoding: content_encoding,
headers: headers,
delivery_mode: delivery_mode,
priority: priority,
correlation_id: correlation_id,
reply_to: reply_to,
expiration: expiration,
message_id: message_id,
timestamp: timestamp,
type: type,
user_id: user_id,
app_id: app_id,
cluster_id: cluster_id
),
payload: payload
)} ->
send(
return_handler_pid,
{:basic_return, payload,
%{
reply_code: reply_code,
reply_text: reply_text,
exchange: exchange,
routing_key: routing_key,
content_type: content_type,
content_encoding: content_encoding,
headers: headers,
persistent: delivery_mode == 2,
priority: priority,
correlation_id: correlation_id,
reply_to: reply_to,
expiration: expiration,
message_id: message_id,
timestamp: timestamp,
type: type,
user_id: user_id,
app_id: app_id,
cluster_id: cluster_id
}}
)
handle_return_messages(chan_pid, return_handler_pid)
{:DOWN, _ref, :process, _pid, reason} ->
exit(reason)
end
end
end
| 36.132118 | 129 | 0.630185 |
f7e4fc70c20f5ce2c5a791061282b45fe0fd894c | 10,757 | ex | Elixir | lib/mix/lib/mix/utils.ex | enokd/elixir | e39b32f235082b8a29fcb22d250c822cca98609f | [
"Apache-2.0"
] | 1 | 2015-11-12T19:23:45.000Z | 2015-11-12T19:23:45.000Z | lib/mix/lib/mix/utils.ex | enokd/elixir | e39b32f235082b8a29fcb22d250c822cca98609f | [
"Apache-2.0"
] | null | null | null | lib/mix/lib/mix/utils.ex | enokd/elixir | e39b32f235082b8a29fcb22d250c822cca98609f | [
"Apache-2.0"
] | null | null | null | defmodule Mix.Utils do
@moduledoc """
Utilities used throughout Mix and tasks.
"""
@doc """
Get the mix home.
It defaults to `~/.mix` unless the `MIX_HOME`
environment variable is set.
"""
def mix_home do
System.get_env("MIX_HOME") || Path.expand("~/.mix")
end
@doc """
Get all paths defined in the MIX_PATH env variable.
`MIX_PATH` may contain multiple paths. If on Windows, those
paths should be separated by `;`, if on unix systems, use `:`.
"""
def mix_paths do
if path = System.get_env("MIX_PATH") do
String.split(path, path_separator)
else
[]
end
end
defp path_separator do
case :os.type do
{ :win32, _ } -> ";"
{ :unix, _ } -> ":"
end
end
@doc """
Take a `command` name and attempts to load a module
with the command name converted to a module name
in the given `at` scope.
Returns `{ :module, module }` in case a module
exists and is loaded, `{ :error, reason }` otherwise.
## Examples
iex> Mix.Utils.command_to_module("compile", Mix.Tasks)
{ :module, Mix.Tasks.Compile }
"""
def command_to_module(command, at \\ Elixir) do
module = Module.concat(at, command_to_module_name(command))
Code.ensure_loaded(module)
end
@doc """
Returns `true` if any of the `sources` are stale
compared to the given `targets`.
"""
def stale?(sources, targets) do
Enum.any? stale_stream(sources, targets)
end
@doc """
Extract all stale `sources` compared to the given `targets`.
"""
def extract_stale(_sources, []), do: []
def extract_stale(sources, targets) do
stale_stream(sources, targets) |> Enum.to_list
end
defp stale_stream(sources, targets) do
modified_target = targets |> Enum.map(&last_modified(&1)) |> Enum.min
Stream.filter(sources, fn(source) ->
source_mtime(source) > modified_target
end)
end
defp source_mtime({ _, { { _, _, _ }, { _, _, _ } } = source }) do
source
end
defp source_mtime(source) do
last_modified(source)
end
defp last_modified(path) do
case File.stat(path) do
{ :ok, File.Stat[mtime: mtime] } -> mtime
{ :error, _ } -> { { 1970, 1, 1 }, { 0, 0, 0 } }
end
end
@doc ~S"""
Reads the given file as a manifest and returns each entry
as a list.
A manifest is a tabular file where each line is a row
and each entry in a row is separated by "\t". The first
entry must always be a path to a compiled artifact.
In case there is no manifest file, returns an empty list.
"""
def read_manifest(file) do
case File.read(file) do
{ :ok, contents } -> String.split(contents, "\n")
{ :error, _ } -> []
end
end
@doc """
Writes a manifest file with the given `entries` list.
"""
def write_manifest(file, entries) do
Path.dirname(file) |> File.mkdir_p!
File.write!(file, Enum.join(entries, "\n"))
end
@doc """
Extract files from a list of paths.
`exts_or_pattern` may be a list of extensions or a
`Path.wildcard/1` pattern.
If the path in `paths` is a file, it is included in
the return result. If it is a directory, it is searched
recursively for files with the given extensions or matching
the given patterns.
Any file starting with `"."` is ignored.
"""
def extract_files(paths, exts_or_pattern)
def extract_files(paths, exts) when is_list(exts) do
extract_files(paths, "*.{#{Enum.join(exts, ",")}}")
end
def extract_files(paths, pattern) do
files = Enum.flat_map(paths, fn path ->
if File.regular?(path), do: [path], else: Path.wildcard("#{path}/**/#{pattern}")
end)
files |> exclude_files |> Enum.uniq
end
defp exclude_files(files) do
filter = fn(x) -> not match?("." <> _, Path.basename(x)) end
Enum.filter files, filter
end
@doc """
Merges two configs recursively, merging keyword lists
and concatenating normal lists.
"""
def config_merge(old, new) do
Keyword.merge(old, new, fn(_, x, y) ->
if is_list(x) and is_list(y) do
if Keyword.keyword?(x) and Keyword.keyword?(y) do
config_merge(x, y)
else
x ++ y
end
else
y
end
end)
end
@doc """
Converts the given atom or binary to underscore format.
If an atom is given, it is assumed to be an Elixir module,
so it is converted to a binary and then processed.
## Examples
iex> Mix.Utils.underscore "FooBar"
"foo_bar"
iex> Mix.Utils.underscore "Foo.Bar"
"foo/bar"
iex> Mix.Utils.underscore Foo.Bar
"foo/bar"
In general, `underscore` can be thought of as the reverse of
`camelize`, however, in some cases formatting may be lost:
Mix.Utils.underscore "SAPExample" #=> "sap_example"
Mix.Utils.camelize "sap_example" #=> "SapExample"
"""
def underscore(atom) when is_atom(atom) do
"Elixir." <> rest = atom_to_binary(atom)
underscore(rest)
end
def underscore(""), do: ""
def underscore(<<h, t :: binary>>) do
<<to_lower_char(h)>> <> do_underscore(t, h)
end
defp do_underscore(<<h, t, rest :: binary>>, _) when h in ?A..?Z and not t in ?A..?Z do
<<?_, to_lower_char(h), t>> <> do_underscore(rest, t)
end
defp do_underscore(<<h, t :: binary>>, prev) when h in ?A..?Z and not prev in ?A..?Z do
<<?_, to_lower_char(h)>> <> do_underscore(t, h)
end
defp do_underscore(<<?-, t :: binary>>, _) do
<<?_>> <> do_underscore(t, ?-)
end
defp do_underscore(<< "..", t :: binary>>, _) do
<<"..">> <> underscore(t)
end
defp do_underscore(<<?.>>, _), do: <<?.>>
defp do_underscore(<<?., t :: binary>>, _) do
<<?/>> <> underscore(t)
end
defp do_underscore(<<h, t :: binary>>, _) do
<<to_lower_char(h)>> <> do_underscore(t, h)
end
defp do_underscore(<<>>, _) do
<<>>
end
@doc """
Converts the given string to CamelCase format.
## Examples
iex> Mix.Utils.camelize "foo_bar"
"FooBar"
"""
def camelize(""), do: ""
def camelize(<<?_, t :: binary>>) do
camelize(t)
end
def camelize(<<h, t :: binary>>) do
<<to_upper_char(h)>> <> do_camelize(t)
end
defp do_camelize(<<?_, ?_, t :: binary>>) do
do_camelize(<< ?_, t :: binary >>)
end
defp do_camelize(<<?_, h, t :: binary>>) when h in ?a..?z do
<<to_upper_char(h)>> <> do_camelize(t)
end
defp do_camelize(<<?_>>) do
<<>>
end
defp do_camelize(<<?/, t :: binary>>) do
<<?.>> <> camelize(t)
end
defp do_camelize(<<h, t :: binary>>) do
<<h>> <> do_camelize(t)
end
defp do_camelize(<<>>) do
<<>>
end
@doc """
Takes a module and converts it to a command.
The nesting argument can be given in order to remove
the nesting of a module.
## Examples
iex> Mix.Utils.module_name_to_command(Mix.Tasks.Compile, 2)
"compile"
iex> Mix.Utils.module_name_to_command("Mix.Tasks.Compile.Elixir", 2)
"compile.elixir"
"""
def module_name_to_command(module, nesting \\ 0)
def module_name_to_command(module, nesting) when is_atom(module) do
module_name_to_command(inspect(module), nesting)
end
def module_name_to_command(module, nesting) do
t = Regex.split(~r/\./, to_string(module))
t |> Enum.drop(nesting) |> Enum.map(&first_to_lower(&1)) |> Enum.join(".")
end
@doc """
Takes a command and converts it to the module name format.
## Examples
iex> Mix.Utils.command_to_module_name("compile.elixir")
"Compile.Elixir"
"""
def command_to_module_name(s) do
Regex.split(~r/\./, to_string(s)) |>
Enum.map(&first_to_upper(&1)) |>
Enum.join(".")
end
defp first_to_upper(<<s, t :: binary>>), do: <<to_upper_char(s)>> <> t
defp first_to_upper(<<>>), do: <<>>
defp first_to_lower(<<s, t :: binary>>), do: <<to_lower_char(s)>> <> t
defp first_to_lower(<<>>), do: <<>>
defp to_upper_char(char) when char in ?a..?z, do: char - 32
defp to_upper_char(char), do: char
defp to_lower_char(char) when char in ?A..?Z, do: char + 32
defp to_lower_char(char), do: char
@doc """
Symlink directory `source` to `target` or copy it recursively
in case symlink fails.
Expect source and target to be absolute paths as it generates
a relative symlink.
"""
def symlink_or_copy(source, target) do
if File.exists?(source) do
source_list = String.to_char_list!(source)
case :file.read_link(target) do
{ :ok, ^source_list } ->
:ok
{ :ok, _ } ->
File.rm!(target)
do_symlink_or_copy(source, target)
{ :error, :enoent } ->
do_symlink_or_copy(source, target)
{ :error, _ } ->
File.rm_rf!(target)
do_symlink_or_copy(source, target)
end
else
{ :error, :enoent }
end
end
defp do_symlink_or_copy(source, target) do
symlink_source = make_relative_path(source, target)
case :file.make_symlink(symlink_source, target) do
:ok -> :ok
{ :error, _ } -> File.cp_r!(source, target)
end
end
# Make a relative path in between two paths.
# Expects both paths to be fully expanded.
defp make_relative_path(source, target) do
do_make_relative_path(Path.split(source), Path.split(target))
end
defp do_make_relative_path([h|t1], [h|t2]) do
do_make_relative_path(t1, t2)
end
defp do_make_relative_path(source, target) do
base = List.duplicate("..", max(length(target) - 1, 0))
Path.join(base ++ source)
end
@doc """
Opens and reads content from either a URL or a local filesystem path.
Used by tasks like `local.install` and `local.rebar` that support
installation either from a URL or a local file.
Raises if the given path is not a url, nor a file or if the
file or url are invalid.
"""
def read_path!(path) do
cond do
url?(path) -> read_url(path)
file?(path) -> read_file(path)
:else -> raise Mix.Error, message: "Expected #{path} to be a url or a local file path"
end
end
defp read_file(path) do
File.read!(path)
end
defp read_url(path) do
:ssl.start
:inets.start
headers = [ { 'user-agent', 'Mix/#{System.version}' } ]
request = { :binary.bin_to_list(path), headers }
case :httpc.request(:get, request, [], body_format: :binary) do
{ :ok, { { _, status, _ }, _, body } } when status in 200..299 ->
body
{ :ok, { { _, status, _ }, _, _ } } ->
raise Mix.Error, message: "Could not access url #{path}, got status: #{status}"
{ :error, reason } ->
raise Mix.Error, message: "Could not access url #{path}, error: #{inspect reason}"
end
end
defp file?(path) do
File.regular?(path)
end
defp url?(path) do
URI.parse(path).scheme in ["http", "https"]
end
end
| 25.192037 | 98 | 0.618667 |
f7e511ab31571f23d9076bbc438d7b5e7b85e4e2 | 3,332 | ex | Elixir | lib/intcode.ex | eldemonstro/AdventOfCode2019Elixir | a047a5085a906b7d78ce4f3c8a26904415616f2c | [
"MIT"
] | null | null | null | lib/intcode.ex | eldemonstro/AdventOfCode2019Elixir | a047a5085a906b7d78ce4f3c8a26904415616f2c | [
"MIT"
] | null | null | null | lib/intcode.ex | eldemonstro/AdventOfCode2019Elixir | a047a5085a906b7d78ce4f3c8a26904415616f2c | [
"MIT"
] | null | null | null | defmodule IntCode do
@moduledoc """
Executes the intcode
"""
def sum(x, y) do
{x + y, 4}
end
def mul(x, y) do
{x * y, 4}
end
def less_than(x, y) do
{(if x < y, do: 1, else: 0), 4}
end
def equals(x, y) do
{(if x == y, do: 1, else: 0), 4}
end
def parse_program(program) do
program
|>String.split(~r/[,\n]/)
|>Enum.reject(& &1 == "")
|>Enum.map(& String.to_integer(&1))
|>Enum.with_index(0)
|>Enum.map(fn {k, v} -> {v, k} end)
|>Map.new()
end
def execute(program, opcode, needle, inputs \\ [])
def execute(program, opcode, needle, inputs) when rem(opcode, 100) == 99 do
{program, :halt, needle, inputs}
end
def execute(program, opcode, needle, inputs) when rem(opcode, 100) == 6 do
reversed_opcode = opcode |> Integer.digits() |> Enum.reverse()
mode_b = Enum.at(reversed_opcode, 3) || 0
mode_c = Enum.at(reversed_opcode, 2) || 0
index_x = program[needle + 1]
index_y = program[needle + 2]
value_x = program[(if mode_c == 1, do: needle + 1, else: index_x)]
value_y = program[(if mode_b == 1, do: needle + 2, else: index_y)]
needle_jump = (if value_x == 0, do: value_y, else: needle + 3)
execute(program, program[needle_jump], needle_jump, inputs)
end
def execute(program, opcode, needle, inputs) when rem(opcode, 100) == 5 do
reversed_opcode = opcode |> Integer.digits() |> Enum.reverse()
mode_b = Enum.at(reversed_opcode, 3) || 0
mode_c = Enum.at(reversed_opcode, 2) || 0
index_x = program[needle + 1]
index_y = program[needle + 2]
value_x = program[(if mode_c == 1, do: needle + 1, else: index_x)]
value_y = program[(if mode_b == 1, do: needle + 2, else: index_y)]
needle_jump = (if value_x != 0, do: value_y, else: needle + 3)
execute(program, program[needle_jump], needle_jump, inputs)
end
def execute(program, opcode, needle, inputs) when rem(opcode, 100) == 4 do
reversed_opcode = opcode |> Integer.digits() |> Enum.reverse()
mode_c = Enum.at(reversed_opcode, 2) || 0
index_x = program[needle + 1]
value_x = program[(if mode_c == 1, do: needle + 1, else: index_x)]
IO.puts value_x
execute(program, program[needle + 2], needle + 2, inputs)
end
def execute(program, opcode, needle, inputs) when rem(opcode, 100) == 3 do
[input | new_inputs] = inputs
index_x = program[needle + 1]
execute(Map.put(program, index_x, input), program[needle + 2], needle + 2, new_inputs)
end
def execute(program, opcode, needle, inputs) do
reversed_opcode = opcode |> Integer.digits() |> Enum.reverse()
{mode_a, mode_b, mode_c} = {Enum.at(reversed_opcode, 4, 0), Enum.at(reversed_opcode, 3, 0), Enum.at(reversed_opcode, 2, 0)}
index_z = program[needle + 3]
value_x = program[(if mode_c == 1, do: needle + 1, else: program[needle + 1])]
value_y = program[(if mode_b == 1, do: needle + 2, else: program[needle + 1])]
{result, needle_jump} = case rem(opcode, 100) do
1 ->
sum(value_x, value_y)
2 ->
mul(value_x, value_y)
7 ->
less_than(value_x, value_y)
8 ->
equals(value_x, value_y)
end
new_program = Map.put(program, index_z, result)
execute(new_program, new_program[needle + needle_jump], needle + needle_jump, inputs)
end
end
| 30.018018 | 127 | 0.618848 |
f7e511fdbe76d4277fb9bb4f412f390a2c879275 | 950 | ex | Elixir | lib/bot_ex/helpers/user_actions.ex | bot-ex/bot_ex | 7f28464723187ef415b5e6926e4c48e2ace50fad | [
"MIT"
] | 20 | 2020-04-10T11:25:47.000Z | 2021-11-08T08:03:22.000Z | lib/bot_ex/helpers/user_actions.ex | bot-ex/botex | 7f28464723187ef415b5e6926e4c48e2ace50fad | [
"MIT"
] | 1 | 2020-07-21T08:01:50.000Z | 2020-07-22T17:53:00.000Z | lib/bot_ex/helpers/user_actions.ex | bot-ex/botex | 7f28464723187ef415b5e6926e4c48e2ace50fad | [
"MIT"
] | 2 | 2020-04-11T11:12:03.000Z | 2020-07-21T07:37:55.000Z | defmodule BotEx.Helpers.UserActions do
@moduledoc """
User actions interacting functions
"""
alias BotEx.Models.Message
@doc """
Find the last user action
## Parameters:
- u_id: user id
"""
@spec get_last_call(integer | binary) :: Message.t()
def get_last_call(u_id) when is_binary(u_id), do: String.to_integer(u_id) |> get_last_call()
def get_last_call(u_id) when is_integer(u_id) do
case :ets.lookup(:last_call, u_id) do
[] -> %Message{}
[{_, msg}] -> msg
end
end
@doc """
Update last user message
## Parameters:
- user_id: user id
- call: `BotEx.Models.Message` for saving
"""
@spec update_last_call(user_id :: integer() | binary(), call :: Message.t()) :: :true
def update_last_call(user_id, call) when is_binary(user_id), do: String.to_integer(user_id) |> update_last_call(call)
def update_last_call(user_id, %Message{} = call), do: :ets.insert(:last_call, {user_id, call})
end
| 29.6875 | 119 | 0.672632 |
f7e52568f5a4ff41add48baf084f99c6cb7d61cd | 791 | exs | Elixir | mix.exs | topfreegames/vernemq_redis_auth_plugin | 2e447fb17827820fec271081e0e748571c2ce378 | [
"MIT"
] | 3 | 2017-07-25T13:59:13.000Z | 2017-11-26T04:34:42.000Z | mix.exs | topfreegames/vernemq_redis_auth_plugin | 2e447fb17827820fec271081e0e748571c2ce378 | [
"MIT"
] | null | null | null | mix.exs | topfreegames/vernemq_redis_auth_plugin | 2e447fb17827820fec271081e0e748571c2ce378 | [
"MIT"
] | null | null | null | defmodule RedisAuthPlugin.Mixfile do
use Mix.Project
def project do
[app: :redis_auth_plugin,
version: "0.1.1",
elixir: "~> 1.4",
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
deps: deps()]
end
defp vmq_plugin_hooks do
hooks = [
{RedisAuthPlugin, :auth_on_register, 5, []},
{RedisAuthPlugin, :auth_on_subscribe, 3, []},
{RedisAuthPlugin, :auth_on_publish, 6, []},
]
{:vmq_plugin_hooks, hooks}
end
def application do
[
mod: {RedisAuthPlugin, []},
extra_applications: [:logger, :redix, :pbkdf2],
env: [vmq_plugin_hooks()]
]
end
defp deps do
[
{:pbkdf2, "~> 2.0"},
{:redix, ">= 0.0.0"},
{:distillery, "~> 1.4", runtime: false},
]
end
end
| 20.815789 | 53 | 0.5689 |
f7e543ac9bf9a2c72ec4f659e08e0471146dbe86 | 1,121 | exs | Elixir | config/config.exs | lukeledet/ex_selfie | c097b039f29e77f69e89cb564662bf280b2e772f | [
"MIT"
] | 1 | 2020-01-26T06:02:33.000Z | 2020-01-26T06:02:33.000Z | config/config.exs | lukeledet/ex_selfie | c097b039f29e77f69e89cb564662bf280b2e772f | [
"MIT"
] | 1 | 2018-03-02T21:18:12.000Z | 2018-03-02T21:18:12.000Z | config/config.exs | lukeledet/ex_selfie | c097b039f29e77f69e89cb564662bf280b2e772f | [
"MIT"
] | 1 | 2018-03-02T17:52:18.000Z | 2018-03-02T17:52:18.000Z | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
use Mix.Config
# This configuration is loaded before any dependency and is restricted
# to this project. If another project depends on this project, this
# file won't be loaded nor affect the parent project. For this reason,
# if you want to provide default values for your application for
# 3rd-party users, it should be done in your "mix.exs" file.
# You can configure your application as:
#
# config :selfie, key: :value
#
# and access this configuration in your application as:
#
# Application.get_env(:selfie, :key)
#
# You can also configure a 3rd-party app:
#
# config :logger, level: :info
#
# It is also possible to import configuration files, relative to this
# directory. For example, you can emulate configuration per environment
# by uncommenting the line below and defining dev.exs, test.exs and such.
# Configuration from the imported file will override the ones defined
# here (which is why it is important to import them last).
#
# import_config "#{Mix.env}.exs"
| 36.16129 | 73 | 0.751115 |
f7e546a114e7ba8e8e3458b257fe745979ec9c2e | 1,939 | ex | Elixir | test/support/shop.ex | stevegrossi/ex_venture | e02d5a63fdb882d92cfb4af3e15f7b48ad7054aa | [
"MIT"
] | 1 | 2019-02-10T10:22:39.000Z | 2019-02-10T10:22:39.000Z | test/support/shop.ex | stevegrossi/ex_venture | e02d5a63fdb882d92cfb4af3e15f7b48ad7054aa | [
"MIT"
] | null | null | null | test/support/shop.ex | stevegrossi/ex_venture | e02d5a63fdb882d92cfb4af3e15f7b48ad7054aa | [
"MIT"
] | null | null | null | defmodule Test.Game.Shop do
alias Data.Item
alias Data.Shop
def start_link() do
Agent.start_link(fn () -> %{shop: _shop()} end, name: __MODULE__)
end
def _shop() do
%Shop{
name: "Tree Stand Shop",
}
end
def set_shop(shop) do
start_link()
Agent.update(__MODULE__, fn (state) -> Map.put(state, :shop, shop) end)
end
def list(_id) do
start_link()
Agent.get(__MODULE__, fn (state) -> Map.get(state, :shop) end)
end
def set_buy(buy_response) do
start_link()
Agent.update(__MODULE__, fn (state) -> Map.put(state, :buy_response, buy_response) end)
end
def buy(id, item_id, save) do
start_link()
Agent.get_and_update(__MODULE__, fn (state) ->
buys = Map.get(state, :buy, [])
state = Map.put(state, :buy, buys ++ [{id, item_id, save}])
response = Map.get(state, :buy_response, {:ok, %{save | currency: save.currency - 1}, %Item{}})
{response, state}
end)
end
def get_buys() do
start_link()
Agent.get(__MODULE__, fn (state) -> Map.get(state, :buy, []) end)
end
def clear_buys() do
start_link()
Agent.update(__MODULE__, fn (state) -> Map.put(state, :buy, []) end)
end
def set_sell(sell_response) do
start_link()
Agent.update(__MODULE__, fn (state) -> Map.put(state, :sell_response, sell_response) end)
end
def sell(id, item_id, save) do
start_link()
Agent.get_and_update(__MODULE__, fn (state) ->
sells = Map.get(state, :sell, [])
state = Map.put(state, :sell, sells ++ [{id, item_id, save}])
response = Map.get(state, :sell_response, {:ok, %{save | currency: save.currency - 1}, %Item{}})
{response, state}
end)
end
def get_sells() do
start_link()
Agent.get(__MODULE__, fn (state) -> Map.get(state, :sell, []) end)
end
def clear_sells() do
start_link()
Agent.update(__MODULE__, fn (state) -> Map.put(state, :sell, []) end)
end
end
| 25.853333 | 102 | 0.619391 |
f7e55f3bd0442889fbbebe447f69905cb7e6278e | 1,272 | ex | Elixir | lib/prima_auth0_ex/telemetry.ex | primait/auth0_ex | 15ef5d6d91d8fe00ff703a4f58e1cb32bb169a82 | [
"MIT"
] | 5 | 2021-12-01T10:50:40.000Z | 2022-02-15T13:07:02.000Z | lib/prima_auth0_ex/telemetry.ex | primait/auth0_ex | 15ef5d6d91d8fe00ff703a4f58e1cb32bb169a82 | [
"MIT"
] | 11 | 2021-12-22T09:19:28.000Z | 2022-03-24T06:15:04.000Z | lib/prima_auth0_ex/telemetry.ex | primait/auth0_ex | 15ef5d6d91d8fe00ff703a4f58e1cb32bb169a82 | [
"MIT"
] | null | null | null | defmodule PrimaAuth0Ex.Telemetry do
@moduledoc """
A pre-defined module which sets up telemetry with a given reporter
"""
alias PrimaAuth0Ex.Telemetry.Handler
@auth0_handler_id "auth0-handler"
def setup do
reporter = telemetry_reporter()
if reporter != nil do
:ok =
:telemetry.attach_many(
@auth0_handler_id,
[
[:prima_auth0_ex, :retrieve_token, :failure],
[:prima_auth0_ex, :retrieve_token, :success]
],
&Handler.handle_event/4,
%{reporter: reporter}
)
end
end
defp telemetry_reporter, do: Application.get_env(:prima_auth0_ex, :telemetry_reporter)
end
defmodule PrimaAuth0Ex.Telemetry.Handler do
@moduledoc """
A pre-defined telemetry handler
"""
def handle_event([:prima_auth0_ex, :retrieve_token, :failure], %{count: count}, %{audience: audience}, %{
reporter: reporter
}) do
reporter.increment("retrieve_token:failure", count, tags: ["audience:#{audience}"])
end
def handle_event([:prima_auth0_ex, :retrieve_token, :success], %{count: count}, %{audience: audience}, %{
reporter: reporter
}) do
reporter.increment("retrieve_token:success", count, tags: ["audience:#{audience}"])
end
end
| 27.06383 | 107 | 0.654874 |
f7e564ca71193238797a0b16e57eae0b3376641c | 3,913 | exs | Elixir | test/radiator_web/graphql/public/schema/query/episodes_test.exs | djschilling/radiator | 382e22904d7e400a8ffba54e9ddfd2845bc2b623 | [
"MIT"
] | null | null | null | test/radiator_web/graphql/public/schema/query/episodes_test.exs | djschilling/radiator | 382e22904d7e400a8ffba54e9ddfd2845bc2b623 | [
"MIT"
] | null | null | null | test/radiator_web/graphql/public/schema/query/episodes_test.exs | djschilling/radiator | 382e22904d7e400a8ffba54e9ddfd2845bc2b623 | [
"MIT"
] | null | null | null | defmodule RadiatorWeb.GraphQL.Public.Schema.Query.EpisodesTest do
use RadiatorWeb.ConnCase, async: true
import Radiator.Factory
alias Radiator.Directory.Episode
alias Radiator.Media
@single_query """
query ($id: ID!) {
publishedEpisode(id: $id) {
id
title
enclosure {
length
type
url
}
audio {
chapters {
image
link
start
title
}
}
}
}
"""
test "episode returns an episode", %{conn: conn} do
podcast = build(:podcast)
upload = %Plug.Upload{
path: "test/fixtures/image.jpg",
filename: "image.jpg"
}
audio = insert(:audio)
{:ok, chapter} =
Radiator.AudioMeta.create_chapter(audio, %{
image: upload,
link: "http://example.com",
title: "An Example",
start: 12345
})
image_url = Media.ChapterImage.url({chapter.image, chapter})
episode =
insert(:published_episode,
podcast: podcast,
audio: audio
)
enclosure = Episode.enclosure(episode)
conn = get conn, "/api/graphql", query: @single_query, variables: %{"id" => episode.id}
assert image_url =~ "http"
assert json_response(conn, 200) == %{
"data" => %{
"publishedEpisode" => %{
"id" => Integer.to_string(episode.id),
"title" => episode.title,
"enclosure" => %{
"length" => enclosure.length,
"type" => enclosure.type,
"url" => enclosure.url
},
"audio" => %{
"chapters" => [
%{
"image" => image_url,
"link" => "http://example.com",
"title" => "An Example",
"start" => 12345
}
]
}
}
}
}
end
test "episode returns an error when queried with a non-existent ID", %{conn: conn} do
conn = get conn, "/api/graphql", query: @single_query, variables: %{"id" => -1}
assert %{"errors" => [%{"message" => message}]} = json_response(conn, 200)
assert message == "Episode ID -1 not found"
end
test "episode returns an error if not published", %{conn: conn} do
episode = insert(:unpublished_episode)
conn = get conn, "/api/graphql", query: @single_query, variables: %{"id" => episode.id}
assert %{"errors" => [%{"message" => message}]} = json_response(conn, 200)
assert message == "Episode ID #{episode.id} not found"
end
@episodes_in_podcast_query """
query ($podcast_id: ID!) {
publishedPodcast(id: $podcast_id) {
episodes {
title
}
}
}
"""
test "episodes in podcast are ordered, latest first", %{conn: conn} do
podcast = insert(:podcast)
timestamp = 1_500_000_000
_ep1 =
insert(:published_episode,
title: "E001",
published_at: DateTime.from_unix!(timestamp),
podcast: podcast
)
_ep3 =
insert(:published_episode,
title: "E003",
published_at: DateTime.from_unix!(timestamp + 20),
podcast: podcast
)
_ep2 =
insert(:published_episode,
title: "E002",
published_at: DateTime.from_unix!(timestamp + 10),
podcast: podcast
)
conn =
get conn, "/api/graphql",
query: @episodes_in_podcast_query,
variables: %{"podcast_id" => podcast.id}
assert %{
"data" => %{
"publishedPodcast" => %{
"episodes" => [
%{"title" => "E003"},
%{"title" => "E002"},
%{"title" => "E001"}
]
}
}
} = json_response(conn, 200)
end
end
| 25.245161 | 91 | 0.49885 |
f7e5715f81f559a88eab4f3fbd0ec43a14d561a0 | 2,824 | ex | Elixir | clients/dfa_reporting/lib/google_api/dfa_reporting/v34/model/city.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/dfa_reporting/lib/google_api/dfa_reporting/v34/model/city.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/dfa_reporting/lib/google_api/dfa_reporting/v34/model/city.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.DFAReporting.V34.Model.City do
@moduledoc """
Contains information about a city that can be targeted by ads.
## Attributes
* `countryCode` (*type:* `String.t`, *default:* `nil`) - Country code of the country to which this city belongs.
* `countryDartId` (*type:* `String.t`, *default:* `nil`) - DART ID of the country to which this city belongs.
* `dartId` (*type:* `String.t`, *default:* `nil`) - DART ID of this city. This is the ID used for targeting and generating reports.
* `kind` (*type:* `String.t`, *default:* `dfareporting#city`) - Identifies what kind of resource this is. Value: the fixed string "dfareporting#city".
* `metroCode` (*type:* `String.t`, *default:* `nil`) - Metro region code of the metro region (DMA) to which this city belongs.
* `metroDmaId` (*type:* `String.t`, *default:* `nil`) - ID of the metro region (DMA) to which this city belongs.
* `name` (*type:* `String.t`, *default:* `nil`) - Name of this city.
* `regionCode` (*type:* `String.t`, *default:* `nil`) - Region code of the region to which this city belongs.
* `regionDartId` (*type:* `String.t`, *default:* `nil`) - DART ID of the region to which this city belongs.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:countryCode => String.t(),
:countryDartId => String.t(),
:dartId => String.t(),
:kind => String.t(),
:metroCode => String.t(),
:metroDmaId => String.t(),
:name => String.t(),
:regionCode => String.t(),
:regionDartId => String.t()
}
field(:countryCode)
field(:countryDartId)
field(:dartId)
field(:kind)
field(:metroCode)
field(:metroDmaId)
field(:name)
field(:regionCode)
field(:regionDartId)
end
defimpl Poison.Decoder, for: GoogleApi.DFAReporting.V34.Model.City do
def decode(value, options) do
GoogleApi.DFAReporting.V34.Model.City.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.DFAReporting.V34.Model.City do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 39.774648 | 154 | 0.672805 |
f7e572f2f964da8b3af01848322ce2290f1886f9 | 1,404 | ex | Elixir | test/support/data_case.ex | sb8244/ecto_tenancy_enforcer | 7dac523d0560ec4a57bcf3800b788d312d70d6e6 | [
"MIT"
] | 36 | 2019-12-30T23:02:59.000Z | 2022-03-26T14:38:41.000Z | test/support/data_case.ex | sb8244/ecto_tenancy_enforcer | 7dac523d0560ec4a57bcf3800b788d312d70d6e6 | [
"MIT"
] | 1 | 2021-01-13T05:01:04.000Z | 2021-01-13T05:01:04.000Z | test/support/data_case.ex | sb8244/ecto_tenancy_enforcer | 7dac523d0560ec4a57bcf3800b788d312d70d6e6 | [
"MIT"
] | 1 | 2021-04-25T16:50:16.000Z | 2021-04-25T16:50:16.000Z | defmodule Tenancy.DataCase do
@moduledoc """
This module defines the setup for tests requiring
access to the application's data layer.
You may define functions here to be used as helpers in
your tests.
Finally, if the test case interacts with the database,
it cannot be async. For this reason, every test runs
inside a transaction which is reset at the beginning
of the test unless the test case is marked as async.
"""
use ExUnit.CaseTemplate
using do
quote do
alias Tenancy.Repo
import Ecto
import Ecto.Changeset
import Ecto.Query
import Tenancy.DataCase
end
end
setup tags do
:ok = Ecto.Adapters.SQL.Sandbox.checkout(Tenancy.Repo)
unless tags[:async] do
Ecto.Adapters.SQL.Sandbox.mode(Tenancy.Repo, {:shared, self()})
end
:ok
end
@doc """
A helper that transforms changeset errors into a map of messages.
assert {:error, changeset} = Accounts.create_user(%{password: "short"})
assert "password is too short" in errors_on(changeset).password
assert %{password: ["password is too short"]} = errors_on(changeset)
"""
def errors_on(changeset) do
Ecto.Changeset.traverse_errors(changeset, fn {message, opts} ->
Regex.replace(~r"%{(\w+)}", message, fn _, key ->
opts |> Keyword.get(String.to_existing_atom(key), key) |> to_string()
end)
end)
end
end
| 26 | 77 | 0.679487 |
f7e57c4cc786c01b9606fd3144c661d0feb6c9ca | 2,103 | exs | Elixir | harbor/test/ports/rumble/message/landing_test.exs | miapolis/port7 | 7df1223f83d055eeb6ce8f61f4af8b4f2cf33e74 | [
"MIT"
] | null | null | null | harbor/test/ports/rumble/message/landing_test.exs | miapolis/port7 | 7df1223f83d055eeb6ce8f61f4af8b4f2cf33e74 | [
"MIT"
] | null | null | null | harbor/test/ports/rumble/message/landing_test.exs | miapolis/port7 | 7df1223f83d055eeb6ce8f61f4af8b4f2cf33e74 | [
"MIT"
] | null | null | null | defmodule PortsTests.Rumble.Message.Landing.LandingTest do
use ExUnit.Case, async: true
alias PierTest.WsClient
alias PierTest.WsClientFactory
alias PierTest.Helpers.Room
alias HarborTest.Support.Factory
require WsClient
setup do
user_id = Factory.user_token()
client_ws = WsClientFactory.create_client_for(user_id)
{:ok, user_id: user_id, client_ws: client_ws}
end
describe "validate landing" do
test "single peer", t do
Room.create_and_join(t.client_ws, :rumble)
WsClient.assert_frame(
"landing",
%{
"milestone" => %{"state" => "lobby", "serverNow" => _some_val},
"peers" => [%{"id" => 0, "nickname" => "TEST USER", "isJoined" => false}]
}
)
end
test "second peer", t do
{room_id, _} = Room.create_and_join(t.client_ws, :rumble)
second_ws = WsClientFactory.create_client_for(Factory.user_token())
Room.join_existing(second_ws, room_id)
WsClient.assert_frame(
"landing",
%{
"milestone" => %{"state" => "lobby", "serverNow" => _some_val},
"peers" => [
%{"id" => 0, "nickname" => "TEST USER", "isJoined" => false},
%{"id" => 1, "nickname" => "TEST USER", "isJoined" => false}
]
},
second_ws
)
end
test "first peer leaves and receives same landing", t do
{room_id, _} = Room.create_and_join(t.client_ws, :rumble)
second_ws = WsClientFactory.create_client_for(Factory.user_token())
Room.join_existing(second_ws, room_id)
Process.exit(t.client_ws, :kill)
restarted_ws = WsClientFactory.create_client_for(t.user_id)
Room.join_existing(restarted_ws, room_id)
WsClient.assert_frame(
"landing",
%{
"milestone" => %{"state" => "lobby", "serverNow" => _some_val},
"peers" => [
%{"id" => 0, "nickname" => "TEST USER", "isJoined" => false},
%{"id" => 1, "nickname" => "TEST USER", "isJoined" => false}
]
},
restarted_ws
)
end
end
end
| 28.418919 | 83 | 0.584403 |
f7e5ad2eafecda30cd5ec801ccf8953faedaa35f | 166 | ex | Elixir | web/controllers/page_controller.ex | zampino/livex | 74bcc6557ca8e5185b7e4dac85323b1c5174e52d | [
"MIT"
] | 6 | 2016-03-31T21:23:21.000Z | 2020-09-15T01:36:17.000Z | web/controllers/page_controller.ex | zampino/livex | 74bcc6557ca8e5185b7e4dac85323b1c5174e52d | [
"MIT"
] | null | null | null | web/controllers/page_controller.ex | zampino/livex | 74bcc6557ca8e5185b7e4dac85323b1c5174e52d | [
"MIT"
] | null | null | null | defmodule Livex.PageController do
use Livex.Web, :controller
alias Livex.Page
def index(conn, _params) do
render(conn, "index.html", pages: [])
end
end
| 16.6 | 41 | 0.698795 |
f7e5e459a1f69630933cc407b91d36517125f9ef | 794 | ex | Elixir | test/support/gen/plug/test_post.ex | feng19/oasis | 36e81db1413f24f153f960b1ee3706858ec1f952 | [
"MIT"
] | 11 | 2021-02-24T09:21:11.000Z | 2021-12-26T03:51:25.000Z | test/support/gen/plug/test_post.ex | feng19/oasis | 36e81db1413f24f153f960b1ee3706858ec1f952 | [
"MIT"
] | 5 | 2021-03-18T14:15:37.000Z | 2022-03-02T09:53:42.000Z | test/support/gen/plug/test_post.ex | feng19/oasis | 36e81db1413f24f153f960b1ee3706858ec1f952 | [
"MIT"
] | 6 | 2021-02-24T09:21:01.000Z | 2021-11-16T08:45:03.000Z | defmodule Oasis.Gen.Plug.TestPost do
use Oasis.Controller
alias Oasis.BadRequestError
def init(opts), do: opts
def call(conn, _opts) do
json(
conn,
%{
"body_params" => conn.body_params,
"params" => conn.params
}
)
end
def handle_errors(conn, %{kind: _kind, reason: %BadRequestError{error: %BadRequestError.JsonSchemaValidationFailed{} = json_schema} = reason, stack: _stack}) do
message = "Find #{reason.use_in} parameter `#{reason.param_name}` with error: #{to_string(json_schema.error)}"
send_resp(conn, conn.status, message)
end
def handle_errors(conn, %{kind: _kind, reason: reason, stack: _stack}) do
message = Map.get(reason, :message) || "Something went wrong"
send_resp(conn, conn.status, message)
end
end
| 29.407407 | 162 | 0.676322 |
f7e5e957010c78464bd4bda6553e10750bdd9da4 | 571 | exs | Elixir | test/type_check/type_error/formatter_test.exs | ktec/elixir-type_check | 42bde40b4a67e999653c5336294dc651c98a747a | [
"MIT"
] | null | null | null | test/type_check/type_error/formatter_test.exs | ktec/elixir-type_check | 42bde40b4a67e999653c5336294dc651c98a747a | [
"MIT"
] | null | null | null | test/type_check/type_error/formatter_test.exs | ktec/elixir-type_check | 42bde40b4a67e999653c5336294dc651c98a747a | [
"MIT"
] | null | null | null | defmodule TypeCheck.TypeError.FormatterTest do
use ExUnit.Case
use ExUnitProperties
import StreamData, only: []
import TypeCheck.Type.StreamData
property "the default formatter is able to handle all problem tuples (returning a binary string message)" do
check all problem <-
StreamData.scale(
to_gen(TypeCheck.TypeError.Formatter.problem_tuple()),
&div(&1, 3)
) do
result = TypeCheck.TypeError.DefaultFormatter.format_wrap(problem)
assert is_binary(result)
end
end
end
| 30.052632 | 110 | 0.669002 |
f7e5ec8e68726782350e5e203640f5d7e15e19a1 | 422 | ex | Elixir | lib/validation/rules/credit_card/jcb.ex | adolfont/validation | 6288f5a5745f645c90b6f6241e14f0088c218f5b | [
"MIT"
] | null | null | null | lib/validation/rules/credit_card/jcb.ex | adolfont/validation | 6288f5a5745f645c90b6f6241e14f0088c218f5b | [
"MIT"
] | null | null | null | lib/validation/rules/credit_card/jcb.ex | adolfont/validation | 6288f5a5745f645c90b6f6241e14f0088c218f5b | [
"MIT"
] | null | null | null | defmodule Validation.Rules.CreditCard.Jcb do
@moduledoc false
alias Validation.Rules.CreditCard
@spec validate?(String.t()) :: boolean
def validate?(input) when is_binary(input) do
# only numbers
input = Regex.replace(~r/\D/, input, "")
CreditCard.validate?(input) and jcb_validation(input)
end
defp jcb_validation(input) do
String.match?(input, ~r/^(?:2131|1800|35\d{3})\d{11}$/)
end
end
| 24.823529 | 59 | 0.689573 |
f7e60b4f902ce4632782e99400c46dead3e242e3 | 2,050 | ex | Elixir | clients/data_labeling/lib/google_api/data_labeling/v1beta1/model/google_cloud_datalabeling_v1p1alpha1_label_text_entity_extraction_operation_metadata.ex | mcrumm/elixir-google-api | 544f22797cec52b3a23dfb6e39117f0018448610 | [
"Apache-2.0"
] | null | null | null | clients/data_labeling/lib/google_api/data_labeling/v1beta1/model/google_cloud_datalabeling_v1p1alpha1_label_text_entity_extraction_operation_metadata.ex | mcrumm/elixir-google-api | 544f22797cec52b3a23dfb6e39117f0018448610 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/data_labeling/lib/google_api/data_labeling/v1beta1/model/google_cloud_datalabeling_v1p1alpha1_label_text_entity_extraction_operation_metadata.ex | mcrumm/elixir-google-api | 544f22797cec52b3a23dfb6e39117f0018448610 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.DataLabeling.V1beta1.Model.GoogleCloudDatalabelingV1p1alpha1LabelTextEntityExtractionOperationMetadata do
@moduledoc """
Details of a LabelTextEntityExtraction operation metadata.
## Attributes
* `basicConfig` (*type:* `GoogleApi.DataLabeling.V1beta1.Model.GoogleCloudDatalabelingV1p1alpha1HumanAnnotationConfig.t`, *default:* `nil`) - Basic human annotation config used in labeling request.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:basicConfig =>
GoogleApi.DataLabeling.V1beta1.Model.GoogleCloudDatalabelingV1p1alpha1HumanAnnotationConfig.t()
}
field(:basicConfig,
as:
GoogleApi.DataLabeling.V1beta1.Model.GoogleCloudDatalabelingV1p1alpha1HumanAnnotationConfig
)
end
defimpl Poison.Decoder,
for:
GoogleApi.DataLabeling.V1beta1.Model.GoogleCloudDatalabelingV1p1alpha1LabelTextEntityExtractionOperationMetadata do
def decode(value, options) do
GoogleApi.DataLabeling.V1beta1.Model.GoogleCloudDatalabelingV1p1alpha1LabelTextEntityExtractionOperationMetadata.decode(
value,
options
)
end
end
defimpl Poison.Encoder,
for:
GoogleApi.DataLabeling.V1beta1.Model.GoogleCloudDatalabelingV1p1alpha1LabelTextEntityExtractionOperationMetadata do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 35.344828 | 201 | 0.783415 |
f7e61464d1ec38de02948d98dd32547aac81f198 | 1,129 | exs | Elixir | config/config.exs | k-tomoyasu/boinize-elixir | 79454780c7bd87b9fb475663210ab28fa4a4840f | [
"MIT"
] | null | null | null | config/config.exs | k-tomoyasu/boinize-elixir | 79454780c7bd87b9fb475663210ab28fa4a4840f | [
"MIT"
] | null | null | null | config/config.exs | k-tomoyasu/boinize-elixir | 79454780c7bd87b9fb475663210ab28fa4a4840f | [
"MIT"
] | null | null | null | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
use Mix.Config
# This configuration is loaded before any dependency and is restricted
# to this project. If another project depends on this project, this
# file won't be loaded nor affect the parent project. For this reason,
# if you want to provide default values for your application for
# third-party users, it should be done in your "mix.exs" file.
# You can configure your application as:
#
# config :boinize, key: :value
#
# and access this configuration in your application as:
#
# Application.get_env(:boinize, :key)
#
# You can also configure a third-party app:
#
# config :logger, level: :info
#
# It is also possible to import configuration files, relative to this
# directory. For example, you can emulate configuration per environment
# by uncommenting the line below and defining dev.exs, test.exs and such.
# Configuration from the imported file will override the ones defined
# here (which is why it is important to import them last).
#
# import_config "#{Mix.env()}.exs"
| 36.419355 | 73 | 0.751107 |
f7e6440c5b70ca71856a43557bdcb9e727a7dcec | 993 | ex | Elixir | test/support/channel_case.ex | helapu/gd_push | 4bfe17194a898232fe8e8a4d54d4f90934ba4125 | [
"MIT"
] | null | null | null | test/support/channel_case.ex | helapu/gd_push | 4bfe17194a898232fe8e8a4d54d4f90934ba4125 | [
"MIT"
] | null | null | null | test/support/channel_case.ex | helapu/gd_push | 4bfe17194a898232fe8e8a4d54d4f90934ba4125 | [
"MIT"
] | null | null | null | defmodule GdPush.ChannelCase do
@moduledoc """
This module defines the test case to be used by
channel tests.
Such tests rely on `Phoenix.ChannelTest` and also
import other functionality to make it easier
to build and query models.
Finally, if the test case interacts with the database,
it cannot be async. For this reason, every test runs
inside a transaction which is reset at the beginning
of the test unless the test case is marked as async.
"""
use ExUnit.CaseTemplate
using do
quote do
# Import conveniences for testing with channels
use Phoenix.ChannelTest
alias GdPush.Repo
import Ecto
import Ecto.Changeset
import Ecto.Query
# The default endpoint for testing
@endpoint GdPush.Endpoint
end
end
setup tags do
:ok = Ecto.Adapters.SQL.Sandbox.checkout(GdPush.Repo)
unless tags[:async] do
Ecto.Adapters.SQL.Sandbox.mode(GdPush.Repo, {:shared, self()})
end
:ok
end
end
| 22.568182 | 68 | 0.698892 |
f7e64ad226c57135bd65675779e556ac5a84cc73 | 124 | exs | Elixir | test/bankocrkata_test.exs | Slumber86/bankOcrElixir | 48e6a73d9e732a798d53a2eea91d33e0fa528ea3 | [
"MIT"
] | null | null | null | test/bankocrkata_test.exs | Slumber86/bankOcrElixir | 48e6a73d9e732a798d53a2eea91d33e0fa528ea3 | [
"MIT"
] | null | null | null | test/bankocrkata_test.exs | Slumber86/bankOcrElixir | 48e6a73d9e732a798d53a2eea91d33e0fa528ea3 | [
"MIT"
] | null | null | null | defmodule BankOCRKataTest do
use ExUnit.Case
doctest BankOCRKata
test "the truth" do
assert 1 + 1 == 2
end
end
| 13.777778 | 28 | 0.693548 |
f7e661fe67519ef27d07f4422f3ed358b683de65 | 1,103 | ex | Elixir | distillator_umbrella/apps/distillator_web/lib/distillator_web/channels/user_socket.ex | MadPumpkin/ex_distillator | 6153c9c25e7ccc9117fb4db053a7df9d703d58ff | [
"Apache-2.0"
] | null | null | null | distillator_umbrella/apps/distillator_web/lib/distillator_web/channels/user_socket.ex | MadPumpkin/ex_distillator | 6153c9c25e7ccc9117fb4db053a7df9d703d58ff | [
"Apache-2.0"
] | null | null | null | distillator_umbrella/apps/distillator_web/lib/distillator_web/channels/user_socket.ex | MadPumpkin/ex_distillator | 6153c9c25e7ccc9117fb4db053a7df9d703d58ff | [
"Apache-2.0"
] | null | null | null | defmodule DistillatorWeb.UserSocket do
use Phoenix.Socket
## Channels
# channel "room:*", DistillatorWeb.RoomChannel
# Socket params are passed from the client and can
# be used to verify and authenticate a user. After
# verification, you can put default assigns into
# the socket that will be set for all channels, ie
#
# {:ok, assign(socket, :user_id, verified_user_id)}
#
# To deny connection, return `:error`.
#
# See `Phoenix.Token` documentation for examples in
# performing token verification on connect.
@impl true
def connect(_params, socket, _connect_info) do
{:ok, socket}
end
# Socket id's are topics that allow you to identify all sockets for a given user:
#
# def id(socket), do: "user_socket:#{socket.assigns.user_id}"
#
# Would allow you to broadcast a "disconnect" event and terminate
# all active sockets and channels for a given user:
#
# DistillatorWeb.Endpoint.broadcast("user_socket:#{user.id}", "disconnect", %{})
#
# Returning `nil` makes this socket anonymous.
@impl true
def id(_socket), do: nil
end
| 30.638889 | 86 | 0.698096 |
f7e6639676382d822cfbd2afffd491709134405c | 2,142 | exs | Elixir | test/web/controllers/sync_controller_test.exs | leonardocouy/accent | 29fb324395ff998cc5cdc6947c60070ffabe647c | [
"BSD-3-Clause"
] | null | null | null | test/web/controllers/sync_controller_test.exs | leonardocouy/accent | 29fb324395ff998cc5cdc6947c60070ffabe647c | [
"BSD-3-Clause"
] | null | null | null | test/web/controllers/sync_controller_test.exs | leonardocouy/accent | 29fb324395ff998cc5cdc6947c60070ffabe647c | [
"BSD-3-Clause"
] | null | null | null | defmodule AccentTest.SyncController do
use Accent.ConnCase
import Ecto.Query, only: [from: 2]
import Mox
setup :verify_on_exit!
alias Accent.{
Repo,
Project,
Revision,
Document,
User,
AccessToken,
Collaborator,
Operation,
Language
}
@user %User{email: "[email protected]"}
def file(filename \\ "simple.json") do
%Plug.Upload{content_type: "application/json", filename: filename, path: "test/support/formatter/json/simple.json"}
end
setup do
user = Repo.insert!(@user)
access_token = %AccessToken{user_id: user.id, token: "test-token"} |> Repo.insert!()
french_language = %Language{name: "french", slug: Ecto.UUID.generate()} |> Repo.insert!()
project = %Project{name: "My project"} |> Repo.insert!()
%Collaborator{project_id: project.id, user_id: user.id, role: "admin"} |> Repo.insert!()
%Revision{language_id: french_language.id, project_id: project.id, master: true} |> Repo.insert!()
{:ok, [access_token: access_token, user: user, project: project, language: french_language]}
end
test "sync with operations", %{user: user, access_token: access_token, conn: conn, project: project, language: language} do
body = %{file: file(), project_id: project.id, language: language.slug, document_format: "json", document_path: "simple"}
Accent.Hook.BroadcasterMock
|> expect(:fanout, fn _ -> :ok end)
response =
conn
|> put_req_header("authorization", "Bearer #{access_token.token}")
|> post(sync_path(conn, []), body)
assert response.status == 200
assert Enum.map(Repo.all(Document), &Map.get(&1, :path)) == ["simple"]
new_operations = from(o in Operation, where: [action: ^"new"]) |> Repo.all()
sync_operation = from(o in Operation, where: [action: ^"sync"]) |> Repo.one()
assert length(new_operations) == 3
assert sync_operation.user_id == user.id
assert sync_operation.project_id == project.id
response =
conn
|> put_req_header("authorization", "Bearer #{access_token.token}")
|> post(sync_path(conn, []), body)
assert response.status == 200
end
end
| 31.5 | 125 | 0.663399 |
f7e66f3ce5f617bdb932a0c045b3f14c349c1510 | 2,795 | ex | Elixir | clients/sql_admin/lib/google_api/sql_admin/v1beta4/model/ssl_cert.ex | kyleVsteger/elixir-google-api | 3a0dd498af066a4361b5b0fd66ffc04a57539488 | [
"Apache-2.0"
] | 1 | 2021-10-01T09:20:41.000Z | 2021-10-01T09:20:41.000Z | clients/sql_admin/lib/google_api/sql_admin/v1beta4/model/ssl_cert.ex | kyleVsteger/elixir-google-api | 3a0dd498af066a4361b5b0fd66ffc04a57539488 | [
"Apache-2.0"
] | null | null | null | clients/sql_admin/lib/google_api/sql_admin/v1beta4/model/ssl_cert.ex | kyleVsteger/elixir-google-api | 3a0dd498af066a4361b5b0fd66ffc04a57539488 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.SQLAdmin.V1beta4.Model.SslCert do
@moduledoc """
SslCerts Resource
## Attributes
* `cert` (*type:* `String.t`, *default:* `nil`) - PEM representation.
* `certSerialNumber` (*type:* `String.t`, *default:* `nil`) - Serial number, as extracted from the certificate.
* `commonName` (*type:* `String.t`, *default:* `nil`) - User supplied name. Constrained to [a-zA-Z.-_ ]+.
* `createTime` (*type:* `DateTime.t`, *default:* `nil`) - The time when the certificate was created in RFC 3339 format, for example *2012-11-15T16:19:00.094Z*
* `expirationTime` (*type:* `DateTime.t`, *default:* `nil`) - The time when the certificate expires in RFC 3339 format, for example *2012-11-15T16:19:00.094Z*.
* `instance` (*type:* `String.t`, *default:* `nil`) - Name of the database instance.
* `kind` (*type:* `String.t`, *default:* `nil`) - This is always *sql#sslCert*.
* `selfLink` (*type:* `String.t`, *default:* `nil`) - The URI of this resource.
* `sha1Fingerprint` (*type:* `String.t`, *default:* `nil`) - Sha1 Fingerprint.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:cert => String.t() | nil,
:certSerialNumber => String.t() | nil,
:commonName => String.t() | nil,
:createTime => DateTime.t() | nil,
:expirationTime => DateTime.t() | nil,
:instance => String.t() | nil,
:kind => String.t() | nil,
:selfLink => String.t() | nil,
:sha1Fingerprint => String.t() | nil
}
field(:cert)
field(:certSerialNumber)
field(:commonName)
field(:createTime, as: DateTime)
field(:expirationTime, as: DateTime)
field(:instance)
field(:kind)
field(:selfLink)
field(:sha1Fingerprint)
end
defimpl Poison.Decoder, for: GoogleApi.SQLAdmin.V1beta4.Model.SslCert do
def decode(value, options) do
GoogleApi.SQLAdmin.V1beta4.Model.SslCert.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.SQLAdmin.V1beta4.Model.SslCert do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 39.366197 | 163 | 0.667621 |
f7e69288fd74d93f24bfa857c76a7ff19d486013 | 6,387 | exs | Elixir | test/rtl_web/integration/coding_interface_test.exs | topherhunt/reassembling-the-line | c6823b3394ee98d9b0149fa3d09448928ac5c0db | [
"MIT"
] | 1 | 2019-04-27T15:39:20.000Z | 2019-04-27T15:39:20.000Z | test/rtl_web/integration/coding_interface_test.exs | topherhunt/reassembling-the-line | c6823b3394ee98d9b0149fa3d09448928ac5c0db | [
"MIT"
] | 11 | 2020-07-16T11:40:53.000Z | 2021-08-16T07:03:33.000Z | test/rtl_web/integration/coding_interface_test.exs | topherhunt/reassembling-the-line | c6823b3394ee98d9b0149fa3d09448928ac5c0db | [
"MIT"
] | null | null | null | # High-level coverage of the manage videos & coding UI, including the videos list LV.
# See CodingControllerTest for the CRUD basics.
defmodule RTLWeb.CodingInterfaceTest do
use RTLWeb.IntegrationCase
alias RTL.{Projects, Videos}
alias RTL.Videos.{Tagging, Tag}
hound_session()
test "Project admin can list and delete videos", %{conn: conn} do
user = insert_user()
project = insert_project()
prompt = insert_prompt(project_id: project.id)
v1 = insert_video(prompt_id: prompt.id)
v2 = insert_video(prompt_id: prompt.id)
Projects.add_project_admin!(user, project)
login(user)
navigate_to Routes.admin_video_path(conn, :index, project)
assert_selector(".test-row-video-#{v1.id}")
assert_selector(".test-row-video-#{v2.id}")
# This is a LV phx-click event.
find_element(".test-link-delete-video-#{v1.id}") |> click()
accept_dialog()
wait_until(fn -> count_selector(".test-row-video-#{v1.id}") == 0 end)
refute_selector(".test-row-video-#{v1.id}")
assert_selector(".test-row-video-#{v2.id}")
end
test "User can code a video", %{conn: conn} do
user = insert_user()
project = insert_project()
prompt = insert_prompt(project_id: project.id)
video = insert_video(prompt_id: prompt.id)
Projects.add_project_admin!(user, project)
# I can open the coding page for a video
login(user)
navigate_to Routes.admin_video_path(conn, :index, project)
assert_selector(".test-page-manage-video-index")
find_element(".test-link-code-video-#{video.id}") |> click()
assert_selector(".test-coding-page")
# I can add, edit, and delete tags
tag1 = create_tag(project)
tag2 = create_tag(project)
tag3 = create_tag(project)
edit_tag_name(tag2, "policy")
delete_tag(tag3)
# I can apply tags to the timeline
make_timeline_selection(9, 23)
# TODO: Troubleshoot flaps here
apply_tag(tag1)
apply_tag(tag2)
make_timeline_selection(44, 78)
apply_tag(tag2)
assert_tagging(tag1, 9, 23)
assert_tagging(tag2, 9, 23)
assert_tagging(tag2, 44, 78)
Process.sleep(2000)
# I can edit and delete taggings
edit_tagging({tag1, 9, 23}, :starts_at, 21)
delete_tagging(tag2, 9, 23)
Process.sleep(2000)
# I can mark coding as complete
assert Videos.get_coding_by!(video: video).completed_at == nil
find_element(".test-complete-coding-button") |> click()
assert_selector(".test-page-manage-video-index")
assert Videos.get_coding_by!(video: video).completed_at != nil
# I can edit tags for a video
find_element(".test-link-code-video-#{video.id}") |> click()
assert_selector(".test-coding-page")
refute_selector(".test-complete-coding-button")
# All my tagging data shows up as it should
Process.sleep(2000)
assert_tagging(tag1, 21, 23)
assert_tagging(tag2, 44, 78)
end
#
# Helpers
#
defp create_tag(project) do
name = random_uuid()
# sanity check: the add tag form should be in "inactive" state
refute_selector(".test-add-tag-submit")
find_element(".test-add-tag-field") |> fill_field(name)
find_element(".test-add-tag-submit") |> click()
# Ensure the form submitted
refute_selector(".test-add-tag-submit")
tag = Tag.first!(project: project, name: name)
assert_selector(".test-tag-row-#{tag.id}", name: name)
tag
end
defp edit_tag_name(tag, new_name) do
row_class = ".test-tag-row-#{tag.id}"
find_element(row_class) |> move_to(1, 1)
# sanity check: the tag row should not be in editing state.
refute_selector("#{row_class} .test-tag-edit-submit")
find_element("#{row_class} .test-tag-edit-link") |> click()
find_element("#{row_class} .test-tag-edit-field") |> fill_field(new_name)
find_element("#{row_class} .test-tag-edit-submit") |> click()
# Ensure the form submitted and the tag was updated
refute_selector("#{row_class} .test-tag-edit-submit")
assert Tag.get!(tag.id).name == new_name
end
defp delete_tag(tag) do
row_class = ".test-tag-row-#{tag.id}"
find_element(row_class) |> move_to(1, 1)
find_element("#{row_class} .test-tag-delete-link") |> click()
accept_dialog()
# Ensure the tag is removed both from the list and from the db
refute_selector(row_class)
assert Tag.get(tag.id) == nil
end
defp apply_tag(tag) do
row_class = ".test-tag-row-#{tag.id}"
find_element(row_class) |> move_to(1, 1)
find_element("#{row_class} .test-tag-apply-link") |> click()
# We don't yet assert that it was applied; we don't have enough info to do that here
end
defp make_timeline_selection(from, to) do
find_element(".test-tickmark-#{from}s") |> move_to(0, 0)
mouse_down(0)
find_element(".test-tickmark-#{to}s") |> move_to(0, 0)
mouse_up(0)
assert_selector(".test-timeline-selection-#{from}s-#{to}s")
end
defp assert_tagging(tag, from, to) do
# Confirm that it shows up on the page
selector = get_tagging_selector(tag.id, from, to)
assert_selector(selector)
# Confirm that it's persisted w correct values
assert Tagging.first(tag: tag, starts_at: from, ends_at: to) != nil
end
defp edit_tagging({tag, old_starts_at, ends_at}, :starts_at, new_starts_at) do
old_selector = get_tagging_selector(tag.id, old_starts_at, ends_at)
find_element(old_selector) |> click()
find_element("#{old_selector} .test-handle-left") |> move_to(1, 1)
mouse_down(0)
find_element(".test-tickmark-#{new_starts_at}s") |> move_to(0, 0)
mouse_up(0)
# Confirm that the tagging updates on the page
new_selector = get_tagging_selector(tag.id, new_starts_at, ends_at)
assert_selector(new_selector)
# Confirm that the tagging is updated in the db
assert Tagging.first(tag: tag, starts_at: new_starts_at, ends_at: ends_at) != nil
end
defp delete_tagging(tag, from, to) do
selector = get_tagging_selector(tag.id, from, to)
find_element(selector) |> click()
find_element("#{selector} .test-tagging-delete-link") |> click()
accept_dialog()
refute_selector(selector)
assert Tagging.first(tag: tag, starts_at: from, ends_at: to) == nil
end
defp get_tagging_selector(tag_id, from, to) do
".test-tagging"<>
"[data-tag-id=\"#{tag_id}\"]"<>
"[data-starts-at=\"#{from}\"]"<>
"[data-ends-at=\"#{to}\"]"
end
end
| 33.093264 | 88 | 0.679818 |
f7e6a5bc887b4b81674e24ff61647a52ff756323 | 5,716 | exs | Elixir | test/hexpm/web/controllers/api/user_controller_test.exs | pragmaticivan/hexpm | 7845d1baaf14e8811df00db550b59e51ac9675c6 | [
"Apache-2.0"
] | null | null | null | test/hexpm/web/controllers/api/user_controller_test.exs | pragmaticivan/hexpm | 7845d1baaf14e8811df00db550b59e51ac9675c6 | [
"Apache-2.0"
] | null | null | null | test/hexpm/web/controllers/api/user_controller_test.exs | pragmaticivan/hexpm | 7845d1baaf14e8811df00db550b59e51ac9675c6 | [
"Apache-2.0"
] | null | null | null | defmodule Hexpm.Web.API.UserControllerTest do
use Hexpm.ConnCase, async: true
use Bamboo.Test
alias Hexpm.Accounts.User
defp publish_package(user) do
meta = %{name: "ecto", version: "1.0.0", description: "Domain-specific language."}
body = create_tar(meta, [])
build_conn()
|> put_req_header("content-type", "application/octet-stream")
|> put_req_header("authorization", key_for(user))
|> post("api/packages/ecto/releases", body)
end
describe "POST /api/users" do
test "create user" do
params = %{
username: Fake.sequence(:username),
email: Fake.sequence(:email),
password: "passpass"
}
conn = json_post(build_conn(), "api/users", params)
assert json_response(conn, 201)["url"] =~ "/api/users/#{params.username}"
user = Hexpm.Repo.get_by!(User, username: params.username) |> Hexpm.Repo.preload(:emails)
assert List.first(user.emails).email == params.email
end
test "create user sends mails and requires confirmation" do
params = %{
username: Fake.sequence(:username),
email: Fake.sequence(:email),
password: "passpass"
}
conn = json_post(build_conn(), "api/users", params)
assert conn.status == 201
user = Hexpm.Repo.get_by!(User, username: params.username) |> Hexpm.Repo.preload(:emails)
user_email = List.first(user.emails)
assert_delivered_email(Hexpm.Emails.verification(user, user_email))
conn = publish_package(user)
assert json_response(conn, 403)["message"] == "email not verified"
conn =
get(
build_conn(),
"email/verify?username=#{params.username}&email=#{URI.encode_www_form(user_email.email)}&key=#{
user_email.verification_key
}"
)
assert redirected_to(conn) == "/"
assert get_flash(conn, :info) =~ "verified"
conn = publish_package(user)
assert conn.status == 201
end
test "create user validates" do
params = %{username: Fake.sequence(:username), password: "passpass"}
conn = json_post(build_conn(), "api/users", params)
result = json_response(conn, 422)
assert result["message"] == "Validation error(s)"
assert result["errors"]["emails"] == "can't be blank"
refute Hexpm.Repo.get_by(User, username: params.username)
end
end
describe "GET /api/users/me" do
test "get current user" do
user = insert(:user)
repository = insert(:repository, users: [user])
package1 = insert(:package, package_owners: [build(:package_owner, owner: user)])
package2 =
insert(
:package,
repository_id: repository.id,
package_owners: [build(:package_owner, owner: user)]
)
insert(:repository_user, repository: repository, user: user)
body =
build_conn()
|> put_req_header("authorization", key_for(user))
|> get("api/users/me")
|> json_response(200)
assert body["username"] == user.username
assert body["email"] == hd(user.emails).email
refute body["emails"]
refute body["password"]
assert hd(body["organizations"])["name"] == repository.name
assert hd(body["organizations"])["role"] == "read"
assert [json1, json2] = body["packages"]
assert json1["url"] =~ "/api/packages/#{package1.name}"
assert json1["html_url"] =~ "/packages/#{package1.name}"
assert json1["repository"] =~ "hexpm"
assert json2["url"] =~ "/api/repos/#{repository.name}/packages/#{package2.name}"
assert json2["html_url"] =~ "/packages/#{repository.name}/#{package2.name}"
assert json2["repository"] =~ repository.name
# TODO: deprecated
assert Enum.count(body["owned_packages"]) == 2
assert body["owned_packages"][package1.name] =~ "/api/packages/#{package1.name}"
assert body["owned_packages"][package2.name] =~
"/api/repos/#{repository.name}/packages/#{package2.name}"
end
test "return 401 if not authenticated" do
build_conn()
|> get("api/users/me")
|> json_response(401)
end
end
describe "GET /api/users/:name" do
test "get user" do
user = insert(:user)
conn = get(build_conn(), "api/users/#{user.username}")
body = json_response(conn, 200)
assert body["username"] == user.username
assert body["email"] == hd(user.emails).email
refute body["emails"]
refute body["password"]
conn = get(build_conn(), "api/users/bad")
assert conn.status == 404
end
end
describe "POST /api/users/:name/reset" do
test "email is sent with reset_token when password is reset" do
user = insert(:user)
# initiate reset request
conn = post(build_conn(), "api/users/#{user.username}/reset", %{})
assert conn.status == 204
# check email was sent with correct token
user = Hexpm.Repo.get_by!(User, username: user.username) |> Hexpm.Repo.preload(:emails)
assert_delivered_email(Hexpm.Emails.password_reset_request(user))
# check reset will succeed
assert User.password_reset?(user, user.reset_key) == true
end
end
describe "GET /api/users/:name/test" do
test "test auth" do
user = insert(:user)
conn =
build_conn()
|> put_req_header("authorization", key_for(user))
|> get("api/users/#{user.username}/test")
body = json_response(conn, 200)
assert body["username"] == user.username
conn =
build_conn()
|> put_req_header("authorization", "badkey")
|> get("api/users/#{user.username}/test")
assert conn.status == 401
end
end
end
| 31.755556 | 105 | 0.622988 |
f7e6a790c482c66ac7bfb2783d32f2494add9fdc | 1,681 | exs | Elixir | mix.exs | membraneframework/membrane-element-live-audiomixer | f72558c770f1aafc1ea1856791dffeb1e5aae14e | [
"Apache-2.0"
] | null | null | null | mix.exs | membraneframework/membrane-element-live-audiomixer | f72558c770f1aafc1ea1856791dffeb1e5aae14e | [
"Apache-2.0"
] | null | null | null | mix.exs | membraneframework/membrane-element-live-audiomixer | f72558c770f1aafc1ea1856791dffeb1e5aae14e | [
"Apache-2.0"
] | null | null | null | defmodule Membrane.Element.LiveAudioMixer.MixProject do
use Mix.Project
@version "0.1.1"
@github_url "https://github.com/membraneframework/membrane-element-live-audiomixer"
def project do
[
app: :membrane_element_live_audiomixer,
compilers: [:unifex, :bundlex] ++ Mix.compilers(),
version: @version,
elixir: "~> 1.7",
elixirc_paths: elixirc_paths(Mix.env()),
start_permanent: Mix.env() == :prod,
package: package(),
name: "Membrane Element: Live AudioMixer",
source_url: @github_url,
docs: docs(),
homepage_url: "https://membraneframework.org",
deps: deps()
]
end
def application do
[extra_applications: [:logger]]
end
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
defp docs do
[
main: "readme",
extras: ["README.md"],
source_ref: "v#{@version}",
nest_modules_by_prefix: [Membrane.Element, Membrane.Element.LiveAudioMixer]
]
end
defp package do
[
maintainers: ["Membrane Team"],
licenses: ["Apache 2.0"],
links: %{
"GitHub" => @github_url,
"Membrane Framework Homepage" => "https://membraneframework.org"
}
]
end
defp deps do
[
{:ex_doc, "~> 0.19", only: :dev, runtime: false},
{:membrane_core,
github: "membraneframework/membrane-core", branch: "master", override: true},
{:membrane_caps_audio_raw, "~> 0.1.7"},
{:membrane_loggers, "~> 0.2.0"},
{:membrane_common_audiomix, github: "membraneframework/membrane-common-audiomix"},
{:bunch, "~> 1.0"},
{:unifex, "~> 0.2"}
]
end
end
| 26.265625 | 88 | 0.607377 |
f7e6ab542aaeb1d9c3504bd842153ec3c923919c | 1,711 | ex | Elixir | clients/service_management/lib/google_api/service_management/v1/model/flow_error_details.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/service_management/lib/google_api/service_management/v1/model/flow_error_details.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/service_management/lib/google_api/service_management/v1/model/flow_error_details.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.ServiceManagement.V1.Model.FlowErrorDetails do
@moduledoc """
Encapsulation of flow-specific error details for debugging. Used as a details field on an error Status, not intended for external use.
## Attributes
* `exceptionType` (*type:* `String.t`, *default:* `nil`) - The type of exception (as a class name).
* `flowStepId` (*type:* `String.t`, *default:* `nil`) - The step that failed.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:exceptionType => String.t() | nil,
:flowStepId => String.t() | nil
}
field(:exceptionType)
field(:flowStepId)
end
defimpl Poison.Decoder, for: GoogleApi.ServiceManagement.V1.Model.FlowErrorDetails do
def decode(value, options) do
GoogleApi.ServiceManagement.V1.Model.FlowErrorDetails.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.ServiceManagement.V1.Model.FlowErrorDetails do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 34.22 | 136 | 0.731736 |
f7e6bf7ac88ff908b1f5e2bfa91c590eb674c6b0 | 4,247 | ex | Elixir | lib/benchee/profile.ex | mayel/benchee | faf3c85c241a4c7eeaab8edfc85094bbbb10c44d | [
"MIT"
] | 636 | 2016-06-06T07:58:36.000Z | 2019-03-19T15:27:37.000Z | lib/benchee/profile.ex | mayel/benchee | faf3c85c241a4c7eeaab8edfc85094bbbb10c44d | [
"MIT"
] | 198 | 2016-06-18T08:19:15.000Z | 2019-03-19T15:32:37.000Z | lib/benchee/profile.ex | mayel/benchee | faf3c85c241a4c7eeaab8edfc85094bbbb10c44d | [
"MIT"
] | 43 | 2016-06-08T08:04:30.000Z | 2019-02-13T17:10:24.000Z | defmodule Benchee.Profile do
alias Benchee.Benchmark.Collect
alias Benchee.Benchmark.RunOnce
alias Benchee.Benchmark.ScenarioContext
alias Benchee.Output.ProfilePrinter, as: Printer
alias Benchee.Suite
@default_profiler :eprof
@builtin_profilers [:cprof, :eprof, :fprof]
# we run the function a bunch already, no need for further warmup
@default_profiler_opts [warmup: false]
defmodule Benchee.UnknownProfilerError do
defexception message: "error"
end
@moduledoc """
Profiles each scenario after benchmarking them if the `profile_after` option is either set to:
* `true`,
* a valid `profiler`,
* a tuple of a valid `profiler` and a list of options to pass to it, e.g., `{:fprof, [sort: :own]}`.
The profiler that will be used is either the one set by the `profiler_after` option or, if set to `true`,
the default one (`:eprof`). It accepts however the following profilers:
* `:cprof` will profile with [`Mix.Task.Profile.Cprof`](https://hexdocs.pm/mix/Mix.Tasks.Profile.Cprof.html).
It provides information related to the number of function calls.
* `:eprof` will profile with [`Mix.Task.Profile.Eprof`](https://hexdocs.pm/mix/Mix.Tasks.Profile.Eprof.html).
It provides information related to the time spent on each function in regard to the total execution time.
* `:fprof` will profile with [`Mix.Task.Profile.Fprof`](https://hexdocs.pm/mix/Mix.Tasks.Profile.Cprof.html).
It provides information related to the time spent on each function, both the *total* time spent on it and the time spent on it,
*excluding* the time of called functions.
"""
@doc """
Returns the atom corresponding to the default profiler.
"""
@spec default_profiler() :: unquote(@default_profiler)
def default_profiler, do: @default_profiler
@doc """
List of supported builtin profilers as atoms.
"""
def builtin_profilers, do: @builtin_profilers
@doc """
Runs for each scenario found in the suite the `profile/2` function from the given profiler.
"""
@spec profile(Suite.t(), module) :: Suite.t()
def profile(suite, printer \\ Printer)
def profile(suite = %{configuration: %{profile_after: false}}, _printer), do: suite
def profile(
suite = %{
scenarios: scenarios,
configuration: config = %{profile_after: true}
},
printer
) do
do_profile(scenarios, {@default_profiler, @default_profiler_opts}, config, printer)
suite
end
def profile(
suite = %{
scenarios: scenarios,
configuration: config = %{profile_after: {profiler, profiler_opts}}
},
printer
) do
profiler_opts = Keyword.merge(@default_profiler_opts, profiler_opts)
do_profile(scenarios, {profiler, profiler_opts}, config, printer)
suite
end
def profile(
suite = %{
scenarios: scenarios,
configuration: config = %{profile_after: profiler}
},
printer
) do
do_profile(scenarios, {profiler, @default_profiler_opts}, config, printer)
suite
end
defp do_profile(scenarios, {profiler, profiler_opts}, config, printer) do
profiler_module = profiler_to_module(profiler)
Enum.each(scenarios, fn scenario ->
run(scenario, {profiler, profiler_module, profiler_opts}, config, printer)
end)
end
defp run(
scenario,
{profiler, profiler_module, profiler_opts},
config,
printer
) do
printer.profiling(scenario.name, profiler)
RunOnce.run(
scenario,
%ScenarioContext{config: config},
{Collect.Profile, [profiler_module: profiler_module, profiler_opts: profiler_opts]}
)
end
# If given a builtin profiler the function will return its proper module.
# In the case of an unknown profiler, it will raise an `UnknownProfilerError` exception.
defp profiler_to_module(profiler) do
if Enum.member?(@builtin_profilers, profiler) do
profiler =
profiler
|> Atom.to_string()
|> String.capitalize()
Module.concat(Mix.Tasks.Profile, profiler)
else
raise Benchee.UnknownProfilerError,
message: "Got an unknown '#{inspect(profiler)}' built-in profiler."
end
end
end
| 33.179688 | 131 | 0.684248 |
f7e6c07ef9cb0dbf16f6b9ea6d7feb780bf9e75b | 3,533 | ex | Elixir | clients/you_tube/lib/google_api/you_tube/v3/api/video_categories.ex | nuxlli/elixir-google-api | ecb8679ac7282b7dd314c3e20c250710ec6a7870 | [
"Apache-2.0"
] | null | null | null | clients/you_tube/lib/google_api/you_tube/v3/api/video_categories.ex | nuxlli/elixir-google-api | ecb8679ac7282b7dd314c3e20c250710ec6a7870 | [
"Apache-2.0"
] | null | null | null | clients/you_tube/lib/google_api/you_tube/v3/api/video_categories.ex | nuxlli/elixir-google-api | ecb8679ac7282b7dd314c3e20c250710ec6a7870 | [
"Apache-2.0"
] | 1 | 2020-11-10T16:58:27.000Z | 2020-11-10T16:58:27.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.YouTube.V3.Api.VideoCategories do
@moduledoc """
API calls for all endpoints tagged `VideoCategories`.
"""
alias GoogleApi.YouTube.V3.Connection
alias GoogleApi.Gax.{Request, Response}
@doc """
Returns a list of categories that can be associated with YouTube videos.
## Parameters
- connection (GoogleApi.YouTube.V3.Connection): Connection to server
- part (String.t): The part parameter specifies the videoCategory resource properties that the API response will include. Set the parameter value to snippet.
- opts (KeywordList): [optional] Optional parameters
- :alt (String.t): Data format for the response.
- :fields (String.t): Selector specifying which fields to include in a partial response.
- :key (String.t): API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
- :oauth_token (String.t): OAuth 2.0 token for the current user.
- :prettyPrint (boolean()): Returns response with indentations and line breaks.
- :quotaUser (String.t): An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
- :userIp (String.t): Deprecated. Please use quotaUser instead.
- :hl (String.t): The hl parameter specifies the language that should be used for text values in the API response.
- :id (String.t): The id parameter specifies a comma-separated list of video category IDs for the resources that you are retrieving.
- :regionCode (String.t): The regionCode parameter instructs the API to return the list of video categories available in the specified country. The parameter value is an ISO 3166-1 alpha-2 country code.
## Returns
{:ok, %GoogleApi.YouTube.V3.Model.VideoCategoryListResponse{}} on success
{:error, info} on failure
"""
@spec youtube_video_categories_list(Tesla.Env.client(), String.t(), keyword()) ::
{:ok, GoogleApi.YouTube.V3.Model.VideoCategoryListResponse.t()}
| {:error, Tesla.Env.t()}
def youtube_video_categories_list(connection, part, opts \\ []) do
optional_params = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query,
:hl => :query,
:id => :query,
:regionCode => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/youtube/v3/videoCategories")
|> Request.add_param(:query, :part, part)
|> Request.add_optional_params(optional_params, opts)
connection
|> Connection.execute(request)
|> Response.decode(struct: %GoogleApi.YouTube.V3.Model.VideoCategoryListResponse{})
end
end
| 44.1625 | 206 | 0.711294 |
f7e6c9ce4308036644fecad833f5fa308f1a36be | 269 | exs | Elixir | programming/elixir/learning_elixir_code_bundle/code/chapter9/myif.exs | NomikOS/learning | 268f94605214f6861ef476ca7573e68c068ccbe5 | [
"Unlicense"
] | 1 | 2020-01-09T03:22:09.000Z | 2020-01-09T03:22:09.000Z | elixir/tour/learning_elixir_code_bundle/code/chapter9/myif.exs | lijiansong/lang | e255709da2b12e09dea45f86d54f77a19b96f13b | [
"WTFPL"
] | null | null | null | elixir/tour/learning_elixir_code_bundle/code/chapter9/myif.exs | lijiansong/lang | e255709da2b12e09dea45f86d54f77a19b96f13b | [
"WTFPL"
] | null | null | null | defmodule MyIf do
def if(condition, clauses) do
do_clause = Keyword.get(clauses, :do, nil)
else_clause = Keyword.get(clauses, :else, nil)
case condition do
val when val in [false, nil] ->
else_clause
_ -> do_clause
end
end
end
| 19.214286 | 50 | 0.628253 |
f7e6dcfa568545e0fdbdc93c8976f25386fe4091 | 2,773 | ex | Elixir | clients/docs/lib/google_api/docs/v1/model/update_table_column_properties_request.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/docs/lib/google_api/docs/v1/model/update_table_column_properties_request.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/docs/lib/google_api/docs/v1/model/update_table_column_properties_request.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Docs.V1.Model.UpdateTableColumnPropertiesRequest do
@moduledoc """
Updates the
TableColumnProperties of columns
in a table.
## Attributes
* `columnIndices` (*type:* `list(integer())`, *default:* `nil`) - The list of zero-based column indices whose property should be updated. If
no indices are specified, all columns will be updated.
* `fields` (*type:* `String.t`, *default:* `nil`) - The fields that should be updated.
At least one field must be specified. The root `tableColumnProperties` is
implied and should not be specified. A single `"*"` can be used as
short-hand for listing every field.
For example to update the column width, set `fields` to `"width"`.
* `tableColumnProperties` (*type:* `GoogleApi.Docs.V1.Model.TableColumnProperties.t`, *default:* `nil`) - The table column properties to update.
If the value of `table_column_properties#width` is less than 5 points
(5/72 inch), a 400 bad request error is returned.
* `tableStartLocation` (*type:* `GoogleApi.Docs.V1.Model.Location.t`, *default:* `nil`) - The location where the table starts in the document.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:columnIndices => list(integer()),
:fields => String.t(),
:tableColumnProperties => GoogleApi.Docs.V1.Model.TableColumnProperties.t(),
:tableStartLocation => GoogleApi.Docs.V1.Model.Location.t()
}
field(:columnIndices, type: :list)
field(:fields)
field(:tableColumnProperties, as: GoogleApi.Docs.V1.Model.TableColumnProperties)
field(:tableStartLocation, as: GoogleApi.Docs.V1.Model.Location)
end
defimpl Poison.Decoder, for: GoogleApi.Docs.V1.Model.UpdateTableColumnPropertiesRequest do
def decode(value, options) do
GoogleApi.Docs.V1.Model.UpdateTableColumnPropertiesRequest.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Docs.V1.Model.UpdateTableColumnPropertiesRequest do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 40.779412 | 148 | 0.727371 |
f7e74f1c302aec1cd9d9341c229b137047eaba05 | 6,051 | ex | Elixir | lib/chroxy/chrome_proxy.ex | heydtn/chroxy | 68f2188ba835239bcb9e40856e117ef604c2248e | [
"MIT"
] | 201 | 2018-05-02T16:12:27.000Z | 2022-03-18T16:53:19.000Z | lib/chroxy/chrome_proxy.ex | zacksiri/chroxy | 99b882d01271ea4353762961bc8030d70212bd42 | [
"MIT"
] | 41 | 2018-06-09T11:48:19.000Z | 2022-03-03T17:10:19.000Z | lib/chroxy/chrome_proxy.ex | zacksiri/chroxy | 99b882d01271ea4353762961bc8030d70212bd42 | [
"MIT"
] | 26 | 2018-06-05T12:33:36.000Z | 2021-12-29T18:15:44.000Z | defmodule Chroxy.ChromeProxy do
@moduledoc """
Process which establishes a single proxied websocket connection
to an underlying chrome browser page remote debugging websocket.
Upon initialisation, the chrome proxy signal the `Chroxy.ProxyListener`
to accept a TCP connection. The `Chroxy.ProxyListener` will initialise a
`Chroxy.ProxyServer` to manage the connection between the upstream client
and the downstream chrome remote debugging websocket.
When either the upstream or downstream connections close, the `down/2`
behaviours `Chroxy.ProxyServer.Hook` callback is invoked, allowing the
`Chroxy.ChromeProxy` to close the chrome page.
"""
use GenServer
require Logger
@behaviour Chroxy.ProxyServer.Hook
##
# API
def child_spec(opts) do
%{
id: __MODULE__,
start: {__MODULE__, :start_link, [opts]},
restart: :transient,
shutdown: 5000,
type: :worker
}
end
@doc """
Spawns `Chroxy.ChromeProxy` process.
Keyword `args`:
- `:chrome` - pid of a `Chroxy.ChromeServer` process.
"""
def start_link(args) do
GenServer.start_link(__MODULE__, args)
end
@doc """
Starts a chrome page, and returns a websocket connection
routed via the underlying proxy.
"""
def chrome_connection(ref) do
GenServer.call(ref, :chrome_connection)
end
##
# Proxy Hook Callbacks
@doc """
`Chroxy.ProxyServer` Callback Hook
Called when upstream connection is established to ProxyServer.
Will return downstream connection information of the Chrome instance.
"""
def up(ref, proxy_state) do
GenServer.call(ref, {:up, proxy_state})
end
@doc """
`Chroxy.ProxyServer` Callback Hook
Called when upstream or downstream connections are closed.
Will close the chrome page and shutdown this process.
"""
def down(ref, proxy_state) do
GenServer.cast(ref, {:down, proxy_state})
end
@doc """
Extract Chrome `page_id` from url.
"""
def page_id({:url, url}) do
url
|> String.split("/")
|> List.last()
end
@doc """
Extract Chrome `page_id` from http request.
"""
def page_id({:http_request, data}) do
data
|> String.split(" HTTP")
|> List.first()
|> String.split("GET /devtools/page/")
|> Enum.at(1)
end
##
# GenServer Callbacks
@doc false
def init(args) do
chrome_pid = Keyword.get(args, :chrome)
# We don't need to terminate the underlying proxy if the chrome browser process
# goes down as:
# 1. It may not have been openned yet when client is yet to connect.
# 2. The socket close signal when browser is terminated will terminate the proxy.
# In the event it has not been established yet, we will want to terminate
# this process, alas it should be linked.
Process.flag(:trap_exit, true)
Process.link(chrome_pid)
{:ok, %{chrome: chrome_pid, page: nil, proxy_opts: nil}}
end
@doc false
def handle_call(:chrome_connection, _from, state = %{chrome: chrome, page: nil}) do
# Create a new page
page = new_page(chrome)
# Register page into `ProxyRouter` for dynamic lookup
Chroxy.ProxyRouter.put(page["id"], self())
# Get the websocket host:port for the page and pass to the proxy listener
# directly in order to set the downstream connection proxy process when
# a upstream client connects. (Note: no need to use `up/2` callback as we
# have the downstream information available at tcp listener accept time).
uri = page["webSocketDebuggerUrl"] |> URI.parse()
# Asynchronously signal the listen to accept connection, which will spawn a
# ProxyServer to handle the communications. The ProxyServer will be passed
# the lookup function which will find downstream connection options based on
# the incomming request
Chroxy.ProxyListener.accept(dyn_hook: fn(req) ->
%{
mod: Chroxy.ChromeProxy,
ref: page_id({:http_request, req})
|> Chroxy.ProxyRouter.get()
}
end)
proxy_websocket = proxy_websocket_addr(page)
{:reply, proxy_websocket,
%{state |
page: page,
proxy_opts: [
downstream_host: uri.host |> String.to_charlist(),
downstream_port: uri.port
]
}
}
end
@doc false
def handle_call(:chrome_connection, _from, state = %{page: page}) do
proxy_websocket_url = proxy_websocket_addr(page)
{:reply, proxy_websocket_url, state}
end
@doc false
def handle_call({:up, _proxy_state}, _from, state = %{proxy_opts: proxy_opts}) do
{:reply, proxy_opts, state}
end
@doc false
def handle_cast({:down, _proxy_state}, state = %{chrome: chrome, page: page}) do
Logger.info("Proxy connection down - closing page")
# Close the page when connection is down, unless chrome process has died
# which is a reason for which the connection could be down
if Process.alive?(chrome) do
Chroxy.ChromeServer.close_page(chrome, page)
Chroxy.ProxyRouter.delete(page["id"])
end
# terminate this process, as underlying proxy connections have been closed
{:stop, :normal, state}
end
defp proxy_websocket_addr(%{"webSocketDebuggerUrl" => websocket}) do
# Change host and port in websocket address to that of the proxy
proxy_opts = Application.get_env(:chroxy, Chroxy.ProxyListener)
proxy_host = Keyword.get(proxy_opts, :host)
proxy_port = Keyword.get(proxy_opts, :port)
uri = URI.parse(websocket)
proxy_websocket =
websocket
|> String.replace(Integer.to_string(uri.port), proxy_port)
|> String.replace(uri.host, proxy_host)
proxy_websocket
end
defp new_page(chrome) do
case Chroxy.ChromeServer.new_page(chrome) do
:not_ready ->
Logger.debug(
"Failed to obtain new page, ChromeServer [#{inspect(chrome)}] not ready, retrying..."
)
Chroxy.ChromeServer.ready(chrome)
new_page(chrome)
page ->
Logger.debug("Obtained new page from ChromeServer [#{inspect(chrome)}]")
page
end
end
end
| 29.661765 | 95 | 0.68121 |
f7e754d599c0b72ea183a4989ac61ac726e83cc3 | 932 | ex | Elixir | lib/rfxi_web/sockets/rfx_socket.ex | andyl/rfxi | 9007c75693d643555c45a20e9634dd4b3867deba | [
"MIT"
] | 1 | 2021-08-10T14:46:10.000Z | 2021-08-10T14:46:10.000Z | lib/rfxi_web/sockets/rfx_socket.ex | andyl/rfxi | 9007c75693d643555c45a20e9634dd4b3867deba | [
"MIT"
] | 2 | 2021-06-22T14:12:37.000Z | 2021-06-28T05:06:23.000Z | lib/rfxi_web/sockets/rfx_socket.ex | andyl/rfxi | 9007c75693d643555c45a20e9634dd4b3867deba | [
"MIT"
] | null | null | null | defmodule RfxiWeb.RfxSocket do
@behaviour Phoenix.Socket.Transport
def child_spec(_opts) do
# We won't spawn any process, so let's return a dummy task
# %{id: Task, start: {Task, :start_link, [fn -> :ok end]}, restart: :transient}
%{id: :ws_rfx, start: {Task, :start_link, [fn -> :ok end]}, restart: :transient}
end
def connect(state) do
# IO.inspect state, label: "CONNECT_RFX_SOCKET"
# Callback to retrieve relevant data from the connection.
# The map contains options, params, transport and endpoint keys.
{:ok, state}
end
def init(state) do
# Now we are effectively inside the process that maintains the socket.
{:ok, state}
end
def handle_in({text, _opts}, state) do
output = RfxCli.Base.main_core(text).json
{:reply, :ok, {:text, output}, state}
end
def handle_info(_, state) do
{:ok, state}
end
def terminate(_reason, _state) do
:ok
end
end
| 26.628571 | 84 | 0.66309 |
f7e766f135655ce5597cfd3a9860c1bcbd311f60 | 1,699 | ex | Elixir | backend/lib/edgehog/assets.ex | harlem88/edgehog | 7a278d119c3d592431fdbba406207376e194f7eb | [
"Apache-2.0"
] | null | null | null | backend/lib/edgehog/assets.ex | harlem88/edgehog | 7a278d119c3d592431fdbba406207376e194f7eb | [
"Apache-2.0"
] | null | null | null | backend/lib/edgehog/assets.ex | harlem88/edgehog | 7a278d119c3d592431fdbba406207376e194f7eb | [
"Apache-2.0"
] | null | null | null | #
# This file is part of Edgehog.
#
# Copyright 2021 SECO Mind Srl
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
defmodule Edgehog.Assets do
alias Edgehog.Assets.SystemModelPicture
@assets_system_model_picture_module Application.compile_env(
:edgehog,
:assets_system_model_picture_module,
SystemModelPicture
)
def upload_system_model_picture(_system_model, nil) do
{:ok, nil}
end
def upload_system_model_picture(system_model, picture_file) do
with :ok <- ensure_storage_enabled() do
@assets_system_model_picture_module.upload(system_model, picture_file)
end
end
def delete_system_model_picture(_system_model, nil) do
:ok
end
def delete_system_model_picture(system_model, picture_url) do
with :ok <- ensure_storage_enabled() do
@assets_system_model_picture_module.delete(system_model, picture_url)
end
end
defp ensure_storage_enabled do
if Application.get_env(:edgehog, :enable_s3_storage?, false) do
:ok
else
{:error, :storage_disabled}
end
end
end
| 30.339286 | 76 | 0.689818 |
f7e77104032d7221c4432402e4801ca7030a3368 | 6,615 | exs | Elixir | test/game/overworld/sector_test.exs | jgsmith/ex_venture | 546adaa8fe80d45a72fde6de8d8d6906902c12d4 | [
"MIT"
] | 2 | 2019-05-14T11:36:44.000Z | 2020-07-01T08:54:04.000Z | test/game/overworld/sector_test.exs | nickwalton/ex_venture | d8ff1b0181db03f9ddcb7610ae7ab533feecbfbb | [
"MIT"
] | null | null | null | test/game/overworld/sector_test.exs | nickwalton/ex_venture | d8ff1b0181db03f9ddcb7610ae7ab533feecbfbb | [
"MIT"
] | 1 | 2021-01-29T14:12:40.000Z | 2021-01-29T14:12:40.000Z | defmodule Game.Overworld.SectorTest do
use Data.ModelCase
alias Data.Character
alias Game.Character, as: GameCharacter
alias Game.Events.RoomEntered
alias Game.Events.RoomLeft
alias Game.Overworld.Sector
alias Game.Session
setup do
state = %{
zone_id: 1,
sector: "0-0",
players: [],
npcs: [],
}
user = base_user()
character = GameCharacter.to_simple(base_character(user))
npc = GameCharacter.to_simple(%{base_npc() | id: 11, name: "Bandit"})
%{state: state, user: user, character: character, npc: npc, overworld_id: "1:1,1"}
end
describe "looking" do
test "looks at the current overworld", %{state: state, overworld_id: overworld_id} do
Cachex.put(:zones, 1, %{id: 1, name: "Zone", overworld_map: []})
{:reply, {:ok, environment}, _state} = Sector.handle_call({:look, overworld_id}, nil, state)
assert environment.x == 1
assert environment.y == 1
assert environment.zone == "Zone"
end
end
describe "entering an overworld id" do
test "player entering", %{state: state, character: character, overworld_id: overworld_id} do
{:noreply, state} = Sector.handle_cast({:enter, overworld_id, character, :enter}, state)
assert state.players == [{%{x: 1, y: 1}, character}]
end
test "npc entering", %{state: state, npc: npc, overworld_id: overworld_id} do
{:noreply, state} = Sector.handle_cast({:enter, overworld_id, npc, :enter}, state)
assert state.npcs == [{%{x: 1, y: 1}, npc}]
end
test "sends a notification to users in the same cell", %{state: state, character: character, overworld_id: overworld_id} do
notify_user = %{base_user() | id: 11}
notify_character = %{base_character(notify_user) | id: 11}
Session.Registry.register(notify_character)
Session.Registry.catch_up()
state = %{state | players: [{%{x: 1, y: 1}, notify_character}]}
{:noreply, _state} = Sector.handle_cast({:enter, overworld_id, character, :enter}, state)
assert_receive {:"$gen_cast", {:notify, %RoomEntered{character: ^character}}}
end
test "does not send notifications to users in different cells", %{state: state, character: character, overworld_id: overworld_id} do
notify_user = %{base_user() | id: 11}
notify_character = %{base_character(notify_user) | id: 11}
Session.Registry.register(notify_character)
Session.Registry.catch_up()
state = %{state | players: [{%{x: 1, y: 2}, notify_character}]}
{:noreply, _state} = Sector.handle_cast({:enter, overworld_id, character, :enter}, state)
refute_receive {:"$gen_cast", {:notify, %RoomEntered{character: ^character}}}, 50
end
end
describe "leaving an overworld id" do
test "player entering", %{state: state, character: character, overworld_id: overworld_id} do
state = %{state | players: [{%{x: 1, y: 1}, character}, {%{x: 1, y: 1}, %Character{id: 2, name: "Guard"}}]}
{:noreply, state} = Sector.handle_cast({:leave, overworld_id, character, :leave}, state)
assert state.players == [{%{x: 1, y: 1}, %Character{id: 2, name: "Guard"}}]
end
test "npc entering", %{state: state, npc: npc, overworld_id: overworld_id} do
state = %{state | npcs: [{%{x: 1, y: 1}, npc}, {%{x: 1, y: 1}, %{id: 2, name: "Guard"}}]}
{:noreply, state} = Sector.handle_cast({:leave, overworld_id, npc, :leave}, state)
assert state.npcs == [{%{x: 1, y: 1}, %{id: 2, name: "Guard"}}]
end
test "sends a notification to users in the same cell", %{state: state, character: character, overworld_id: overworld_id} do
notify_user = %{base_user() | id: 11}
notify_character = %{base_character(notify_user) | id: 11}
Session.Registry.register(notify_character)
Session.Registry.catch_up()
state = %{state | players: [{%{x: 1, y: 1}, character}, {%{x: 1, y: 1}, notify_character}]}
{:noreply, _state} = Sector.handle_cast({:leave, overworld_id, character, :leave}, state)
assert_receive {:"$gen_cast", {:notify, %RoomLeft{character: ^character}}}
end
test "does not send notifications to users in different cells", %{state: state, character: character, overworld_id: overworld_id} do
notify_user = %{base_user() | id: 11}
notify_character = %{base_character(notify_user) | id: 11}
Session.Registry.register(notify_character)
Session.Registry.catch_up()
state = %{state | players: [{%{x: 1, y: 1}, character}, {%{x: 1, y: 2}, notify_character}]}
{:noreply, _state} = Sector.handle_cast({:leave, overworld_id, character, :leave}, state)
refute_receive {:"$gen_cast", {:notify, %RoomLeft{character: ^character}}}, 50
end
end
describe "notify" do
test "sends notifications to players in the same cell", %{state: state, character: character, overworld_id: overworld_id} do
notify_user = %{base_user() | id: 11}
notify_character = %{base_character(notify_user) | id: 11}
Session.Registry.register(notify_character)
Session.Registry.catch_up()
state = %{state | players: [{%{x: 1, y: 1}, character}, {%{x: 1, y: 1}, notify_character}]}
{:noreply, _state} = Sector.handle_cast({:notify, overworld_id, character, {:hi}}, state)
assert_receive {:"$gen_cast", {:notify, {:hi}}}
end
test "does not send notifications to users in different cells", %{state: state, character: character, overworld_id: overworld_id} do
notify_user = %{base_user() | id: 11}
notify_character = %{base_character(notify_user) | id: 11}
Session.Registry.register(notify_character)
Session.Registry.catch_up()
state = %{state | players: [{%{x: 1, y: 1}, character}, {%{x: 1, y: 2}, notify_character}]}
{:noreply, _state} = Sector.handle_cast({:notify, overworld_id, character, {:hi}}, state)
refute_receive {:"$gen_cast", {:notify, {:hi}}}, 50
end
end
describe "update character" do
test "stores the new information", %{state: state, character: character, overworld_id: overworld_id} do
character = %{character | name: "Player2"}
{:noreply, state} = Sector.handle_cast({:update_character, overworld_id, character}, state)
assert [{_cell, %{name: "Player2"}}] = state.players
end
test "stores the new information - npc", %{state: state, npc: npc, overworld_id: overworld_id} do
npc = %{npc | name: "Bandito"}
{:noreply, state} = Sector.handle_cast({:update_character, overworld_id, npc}, state)
assert [{_cell, %{name: "Bandito"}}] = state.npcs
end
end
end
| 39.375 | 136 | 0.646561 |
f7e774627e0d2338e2f23fceb029ea7806a732e3 | 6,648 | exs | Elixir | apps/ewallet/test/ewallet/web/v1/serializers/user_serializer_test.exs | AndonMitev/EWallet | 898cde38933d6f134734528b3e594eedf5fa50f3 | [
"Apache-2.0"
] | 322 | 2018-02-28T07:38:44.000Z | 2020-05-27T23:09:55.000Z | apps/ewallet/test/ewallet/web/v1/serializers/user_serializer_test.exs | AndonMitev/EWallet | 898cde38933d6f134734528b3e594eedf5fa50f3 | [
"Apache-2.0"
] | 643 | 2018-02-28T12:05:20.000Z | 2020-05-22T08:34:38.000Z | apps/ewallet/test/ewallet/web/v1/serializers/user_serializer_test.exs | AndonMitev/EWallet | 898cde38933d6f134734528b3e594eedf5fa50f3 | [
"Apache-2.0"
] | 63 | 2018-02-28T10:57:06.000Z | 2020-05-27T23:10:38.000Z | # Copyright 2018-2019 OmiseGO Pte Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
defmodule EWallet.Web.V1.UserSerializerTest do
use EWallet.Web.SerializerCase, :v1
alias Ecto.Association.NotLoaded
alias EWallet.Web.Paginator
alias EWallet.Web.V1.UserSerializer
alias Utils.Helpers.DateFormatter
describe "serialize/1" do
test "serializes a user into correct JSON format" do
user = insert(:user)
expected = %{
object: "user",
id: user.id,
socket_topic: "user:#{user.id}",
username: user.username,
full_name: user.full_name,
calling_name: user.calling_name,
provider_user_id: user.provider_user_id,
email: user.email,
enabled: user.enabled,
enabled_2fa_at: nil,
avatar: %{
original: nil,
large: nil,
small: nil,
thumb: nil
},
metadata: %{
"first_name" => user.metadata["first_name"],
"last_name" => user.metadata["last_name"]
},
encrypted_metadata: %{},
created_at: DateFormatter.to_iso8601(user.inserted_at),
updated_at: DateFormatter.to_iso8601(user.updated_at)
}
assert UserSerializer.serialize(user) == expected
end
test "serializes to nil if user is not given" do
assert UserSerializer.serialize(nil) == nil
end
test "serializes to nil if user is not loaded" do
assert UserSerializer.serialize(%NotLoaded{}) == nil
end
test "serializes a user paginator into a list object" do
user1 = insert(:user)
user2 = insert(:user)
paginator = %Paginator{
data: [user1, user2],
pagination: %{
current_page: 9,
per_page: 7,
is_first_page: false,
is_last_page: true
}
}
expected = %{
object: "list",
data: [
%{
object: "user",
id: user1.id,
socket_topic: "user:#{user1.id}",
username: user1.username,
full_name: user1.full_name,
calling_name: user1.calling_name,
provider_user_id: user1.provider_user_id,
email: user1.email,
avatar: %{
original: nil,
large: nil,
small: nil,
thumb: nil
},
enabled: user1.enabled,
enabled_2fa_at: nil,
metadata: %{
"first_name" => user1.metadata["first_name"],
"last_name" => user1.metadata["last_name"]
},
encrypted_metadata: %{},
created_at: DateFormatter.to_iso8601(user1.inserted_at),
updated_at: DateFormatter.to_iso8601(user1.updated_at)
},
%{
object: "user",
id: user2.id,
socket_topic: "user:#{user2.id}",
username: user2.username,
full_name: user2.full_name,
calling_name: user2.calling_name,
provider_user_id: user2.provider_user_id,
email: user2.email,
avatar: %{
original: nil,
large: nil,
small: nil,
thumb: nil
},
enabled: user2.enabled,
enabled_2fa_at: nil,
metadata: %{
"first_name" => user2.metadata["first_name"],
"last_name" => user2.metadata["last_name"]
},
encrypted_metadata: %{},
created_at: DateFormatter.to_iso8601(user2.inserted_at),
updated_at: DateFormatter.to_iso8601(user2.updated_at)
}
],
pagination: %{
current_page: 9,
per_page: 7,
is_first_page: false,
is_last_page: true
}
}
assert UserSerializer.serialize(paginator) == expected
end
test "serializes a list of users into a list object" do
user1 = insert(:user)
user2 = insert(:user)
users = [user1, user2]
expected = %{
object: "list",
data: [
%{
object: "user",
id: user1.id,
socket_topic: "user:#{user1.id}",
username: user1.username,
full_name: user1.full_name,
calling_name: user1.calling_name,
provider_user_id: user1.provider_user_id,
email: user1.email,
avatar: %{
original: nil,
large: nil,
small: nil,
thumb: nil
},
metadata: %{
"first_name" => user1.metadata["first_name"],
"last_name" => user1.metadata["last_name"]
},
encrypted_metadata: %{},
enabled: user1.enabled,
enabled_2fa_at: nil,
created_at: DateFormatter.to_iso8601(user1.inserted_at),
updated_at: DateFormatter.to_iso8601(user1.updated_at)
},
%{
object: "user",
id: user2.id,
socket_topic: "user:#{user2.id}",
username: user2.username,
full_name: user2.full_name,
calling_name: user2.calling_name,
provider_user_id: user2.provider_user_id,
email: user2.email,
avatar: %{
original: nil,
large: nil,
small: nil,
thumb: nil
},
metadata: %{
"first_name" => user2.metadata["first_name"],
"last_name" => user2.metadata["last_name"]
},
encrypted_metadata: %{},
enabled: user2.enabled,
enabled_2fa_at: nil,
created_at: DateFormatter.to_iso8601(user2.inserted_at),
updated_at: DateFormatter.to_iso8601(user2.updated_at)
}
]
}
assert UserSerializer.serialize(users) == expected
end
end
describe "serialize/2" do
test "serializes users to ids" do
users = [user1, user2] = insert_list(2, :user)
assert UserSerializer.serialize(users, :id) == [user1.id, user2.id]
end
end
end
| 30.92093 | 74 | 0.550842 |
f7e775ba8a7c5ac1c4242fc04e745c3711c971f2 | 1,234 | ex | Elixir | lib/predictions/prediction.ex | mbta/realtime_signs | 3fd8cbc26ce2b0820e608e60fe12135dab5def69 | [
"MIT"
] | 1 | 2022-01-24T12:39:05.000Z | 2022-01-24T12:39:05.000Z | lib/predictions/prediction.ex | mbta/realtime_signs | 3fd8cbc26ce2b0820e608e60fe12135dab5def69 | [
"MIT"
] | 40 | 2021-05-05T10:14:25.000Z | 2022-03-31T18:34:15.000Z | lib/predictions/prediction.ex | mbta/realtime_signs | 3fd8cbc26ce2b0820e608e60fe12135dab5def69 | [
"MIT"
] | 1 | 2022-03-20T21:08:12.000Z | 2022-03-20T21:08:12.000Z | defmodule Predictions.Prediction do
defstruct stop_id: nil,
seconds_until_arrival: nil,
seconds_until_departure: nil,
seconds_until_passthrough: nil,
direction_id: nil,
schedule_relationship: nil,
route_id: nil,
trip_id: nil,
destination_stop_id: nil,
stopped?: false,
stops_away: 0,
boarding_status: nil,
new_cars?: false,
revenue_trip?: true,
vehicle_id: nil
@type trip_id :: String.t()
@type t :: %__MODULE__{
stop_id: String.t(),
seconds_until_arrival: non_neg_integer() | nil,
seconds_until_departure: non_neg_integer() | nil,
seconds_until_passthrough: non_neg_integer() | nil,
direction_id: 0 | 1,
schedule_relationship: :scheduled | :skipped | nil,
route_id: String.t(),
trip_id: trip_id() | nil,
destination_stop_id: String.t(),
stopped?: boolean(),
stops_away: integer(),
boarding_status: String.t() | nil,
new_cars?: boolean(),
revenue_trip?: boolean(),
vehicle_id: String.t() | nil
}
end
| 32.473684 | 61 | 0.558347 |
f7e7b880be131a6abdf3b58244682cc22813e0e6 | 241 | ex | Elixir | web/controllers/page_controller.ex | yasuhiro-okada-aktsk/sample_elixir_exrm | 755abee8b7ee64dc82679d09c800d301393063ab | [
"MIT"
] | null | null | null | web/controllers/page_controller.ex | yasuhiro-okada-aktsk/sample_elixir_exrm | 755abee8b7ee64dc82679d09c800d301393063ab | [
"MIT"
] | null | null | null | web/controllers/page_controller.ex | yasuhiro-okada-aktsk/sample_elixir_exrm | 755abee8b7ee64dc82679d09c800d301393063ab | [
"MIT"
] | null | null | null | defmodule SampleElixirExrm.PageController do
use SampleElixirExrm.Web, :controller
def index(conn, _params) do
data = [false, 42, ~w(forty two), [time: "now"], %{foo: :bar}]
Apex.ap data
render conn, "index.html"
end
end
| 21.909091 | 66 | 0.672199 |
f7e7b97072ee1d623a4c056845d3d0b0f2541c14 | 2,617 | ex | Elixir | lib/rabbit_case.ex | akoutmos/gen_rmq | 0015ee20019bf4612ce8b706df63b5defbd2a4c9 | [
"MIT"
] | null | null | null | lib/rabbit_case.ex | akoutmos/gen_rmq | 0015ee20019bf4612ce8b706df63b5defbd2a4c9 | [
"MIT"
] | null | null | null | lib/rabbit_case.ex | akoutmos/gen_rmq | 0015ee20019bf4612ce8b706df63b5defbd2a4c9 | [
"MIT"
] | null | null | null | defmodule GenRMQ.RabbitCase do
@moduledoc """
This module defines the setup for tests requiring
access to the rabbit mq.
"""
defmacro __using__([]) do
quote do
use AMQP
def rmq_open(uri) do
AMQP.Connection.open(uri)
end
def publish_message(conn, exchange, message, routing_key \\ "#", meta \\ []) do
{:ok, channel} = AMQP.Channel.open(conn)
GenRMQ.Binding.declare_exchange(channel, exchange)
AMQP.Basic.publish(channel, GenRMQ.Binding.exchange_name(exchange), routing_key, message, meta)
AMQP.Channel.close(channel)
end
def setup_out_queue(conn, out_queue, out_exchange) do
{:ok, chan} = AMQP.Channel.open(conn)
AMQP.Queue.declare(chan, out_queue)
AMQP.Exchange.topic(chan, out_exchange, durable: true)
AMQP.Queue.bind(chan, out_queue, out_exchange, routing_key: "#")
AMQP.Channel.close(chan)
end
def get_message_from_queue(context) do
get_message_from_queue(context[:rabbit_conn], context[:out_queue])
end
def get_message_from_queue(conn, queue) do
{:ok, chan} = AMQP.Channel.open(conn)
{:ok, payload, meta} = AMQP.Basic.get(chan, queue)
{:ok, Jason.decode!(payload), meta}
end
def purge_queues(uri, queues) do
{:ok, conn} = rmq_open(uri)
Enum.each(queues, &purge_queue(conn, &1))
AMQP.Connection.close(conn)
end
def purge_queues!(uri, queues) do
{:ok, conn} = rmq_open(uri)
Enum.each(queues, &purge_queue!(conn, &1))
AMQP.Connection.close(conn)
end
def purge_queue(conn, queue) do
try do
purge_queue!(conn, queue)
catch
:exit, _ ->
:ok
end
end
def purge_queue!(conn, queue) do
{:ok, chan} = AMQP.Channel.open(conn)
AMQP.Queue.purge(chan, queue)
AMQP.Channel.close(chan)
end
def out_queue_count(context) do
queue_count!(context[:rabbit_conn], context[:out_queue])
end
def dl_queue_count(context) do
queue_count!(context[:rabbit_conn], context[:dl_queue])
end
def queue_count!(conn, queue) do
{:ok, chan} = AMQP.Channel.open(conn)
{:ok, %{message_count: count}} = AMQP.Queue.declare(chan, queue, passive: true)
AMQP.Channel.close(chan)
count
end
def queue_count(conn, queue) do
try do
{:ok, queue_count!(conn, queue)}
catch
:exit, _ ->
{:error, :not_found}
end
end
end
end
end
| 28.139785 | 103 | 0.598395 |
f7e7f5f145cb002d5032e8f2d0dbe2716747930f | 3,144 | ex | Elixir | lib/live_view_demo/board.ex | JohnB/live_view_demo | e623e6355b5242acf563d39e04c92bfbd0f3d25c | [
"Apache-2.0"
] | null | null | null | lib/live_view_demo/board.ex | JohnB/live_view_demo | e623e6355b5242acf563d39e04c92bfbd0f3d25c | [
"Apache-2.0"
] | null | null | null | lib/live_view_demo/board.ex | JohnB/live_view_demo | e623e6355b5242acf563d39e04c92bfbd0f3d25c | [
"Apache-2.0"
] | null | null | null | defmodule Board do
# See https://hexdocs.pm/elixir/Application.html
# for more information on OTP Applications
@moduledoc """
Encapsulate the board position structure, which informs the BoardLive
UI component.
"""
defstruct [:board_squares, :width, :height, :start_squares]
@player_colors ["blue", "green", "yellow", "red"]
def new( width \\ default_width(), height \\ default_height(), _start_style \\ :corners) do
# Randomize our start squares w/in the corner quadrant
top_left_x = Enum.random([0, 1, 2, 3])
top_left_y = Enum.random([0, 1, 2, 3])
top_left = width * top_left_y + top_left_x
top_right = width * (top_left_x + 1) - top_left_y - 1
bot_right = width * height - top_left - 1
bot_left = width * height - top_right - 1
%__MODULE__{
width: width,
height: height,
start_squares: %{
top_left => Enum.at(@player_colors, 0),
top_right => Enum.at(@player_colors, 1),
bot_right => Enum.at(@player_colors, 2),
bot_left => Enum.at(@player_colors, 3)
},
board_squares:
Enum.reduce( 1..(width * height), %{},
fn x, acc -> put_in(acc, [x-1],
%{
base: "", # in
overlay: nil,
square: x-1
}
)
end
)
}
end
def default_width() do
Enum.random([18,19,21,22])
end
def default_height() do
20 # consistent height looks better on mobile than Enum.random([18,19,21,22])
end
# Return the CSS class(es) that should be applied to this square.
def square_class(%__MODULE__{start_squares: start_squares, board_squares: board_squares}, square_index) do
case start_squares[square_index] do
nil -> ["#{board_squares[square_index].base} #{board_squares[square_index].overlay}"]
color -> [color]
end
end
def show_on_board(board, five_squares, position, class) do
five_squares = smear(five_squares, board.width)
|> Enum.map(fn n -> n + position end)
updated_squares = board.board_squares
|> Enum.map(fn {k, v} ->
case Enum.member?(five_squares, k) do
true -> {k, %{v | overlay: class}}
_ -> {k, v}
end end)
%{board | board_squares: updated_squares}
end
# shift these squares to the upper left of a grid of src_width to a grid of dest_width
def smear(squares, width) do
Enum.map(squares, fn n -> Integer.mod(n, 5) + width * Integer.floor_div(n, 5) end)
end
# shift these squares to the upper left of a grid of src_width to a grid of dest_width
def snug(squares, src_width \\ 5, dest_width \\ 5) do
IO.puts(inspect(squares))
min_x = squares
|> Enum.map(fn position -> Integer.mod(position, src_width) end)
|> Enum.min()
min_y = squares
|> Enum.map(fn position -> Integer.floor_div(position, src_width) end)
|> Enum.min()
shifted_left = Enum.map(squares, fn position -> position - min_x end)
Enum.map(shifted_left, fn position ->
Integer.mod(position, src_width) +
(Integer.floor_div(position, src_width) - min_y) * dest_width
end)
end
end
| 34.549451 | 108 | 0.624682 |
f7e83568d4feac38510915e24c89989b1cf2d3cb | 6,854 | ex | Elixir | apps/mishka_content/lib/blog/post.ex | mojtaba-naserei/mishka-cms | 1f31f61347bab1aae6ba0d47c5515a61815db6c9 | [
"Apache-2.0"
] | 35 | 2021-06-26T09:05:50.000Z | 2022-03-30T15:41:22.000Z | apps/mishka_content/lib/blog/post.ex | iArazar/mishka-cms | 8b579101d607d91e80834527c1508fe5f4ceefef | [
"Apache-2.0"
] | 101 | 2021-01-01T09:54:07.000Z | 2022-03-28T10:02:24.000Z | apps/mishka_content/lib/blog/post.ex | iArazar/mishka-cms | 8b579101d607d91e80834527c1508fe5f4ceefef | [
"Apache-2.0"
] | 8 | 2021-01-17T17:08:07.000Z | 2022-03-11T16:12:06.000Z | defmodule MishkaContent.Blog.Post do
alias MishkaDatabase.Schema.MishkaContent.Blog.Post
alias MishkaContent.Blog.Like, as: UserLiked
import Ecto.Query
use MishkaDatabase.CRUD,
module: Post,
error_atom: :post,
repo: MishkaDatabase.Repo
@type data_uuid() :: Ecto.UUID.t
@type record_input() :: map()
@type error_tag() :: :post
@type repo_data() :: Ecto.Schema.t()
@type repo_error() :: Ecto.Changeset.t()
@behaviour MishkaDatabase.CRUD
def subscribe do
Phoenix.PubSub.subscribe(MishkaHtml.PubSub, "blog_post")
end
@doc delegate_to: {MishkaDatabase.CRUD, :crud_add, 1}
def create(attrs) do
crud_add(attrs)
|> notify_subscribers(:post)
end
@doc delegate_to: {MishkaDatabase.CRUD, :crud_add, 1}
def create(attrs, allowed_fields) do
crud_add(attrs, allowed_fields)
|> notify_subscribers(:post)
end
@doc delegate_to: {MishkaDatabase.CRUD, :crud_edit, 1}
def edit(attrs) do
crud_edit(attrs)
|> notify_subscribers(:post)
end
@doc delegate_to: {MishkaDatabase.CRUD, :crud_edit, 1}
def edit(attrs, allowed_fields) do
crud_edit(attrs, allowed_fields)
|> notify_subscribers(:post)
end
@doc delegate_to: {MishkaDatabase.CRUD, :crud_delete, 1}
def delete(id) do
crud_delete(id)
|> notify_subscribers(:post)
end
@doc delegate_to: {MishkaDatabase.CRUD, :crud_get_record, 1}
def show_by_id(id) do
crud_get_record(id)
end
@spec show_by_alias_link(String.t()) ::
{:error, :get_record_by_field, error_tag()} | {:ok, :get_record_by_field, error_tag(), repo_data()}
def show_by_alias_link(alias_link) do
crud_get_by_field("alias_link", alias_link)
end
@spec posts([{:conditions, {integer() | String.t(), integer() | String.t()}} | {:filters, map()} | {:user_id, nil | data_uuid()}, ...]) ::
Scrivener.Page.t()
def posts(conditions: {page, page_size}, filters: filters, user_id: user_id) when is_binary(user_id) or is_nil(user_id) do
user_id = if(!is_nil(user_id), do: user_id, else: Ecto.UUID.generate)
from(
post in Post,
join: cat in assoc(post, :blog_categories),
left_join: like in assoc(post, :blog_likes),
left_join: liked_user in subquery(UserLiked.user_liked),
on: liked_user.user_id == ^user_id and liked_user.post_id == post.id
)
|> convert_filters_to_where(filters)
|> fields()
|> MishkaDatabase.Repo.paginate(page: page, page_size: page_size)
rescue
db_error ->
MishkaContent.db_content_activity_error("blog_post", "read", db_error)
%Scrivener.Page{entries: [], page_number: 1, page_size: page_size, total_entries: 0,total_pages: 1}
end
defp convert_filters_to_where(query, filters) do
Enum.reduce(filters, query, fn {key, value}, query ->
case key do
:category_title ->
like = "%#{value}%"
from([post, cat, like] in query, where: like(cat.title, ^like))
:title ->
like = "%#{value}%"
from([post, cat, like] in query, where: like(post.title, ^like))
_ -> from([post, cat, like] in query, where: field(post, ^key) == ^value)
end
end)
end
defp fields(query) do
from [post, cat, like, liked_user] in query,
order_by: [desc: post.inserted_at, desc: post.id],
group_by: [post.id, cat.id, like.post_id, liked_user.post_id, liked_user.user_id],
select: %{
category_id: cat.id,
category_title: cat.title,
category_status: cat.status,
category_alias_link: cat.alias_link,
category_short_description: cat.short_description,
category_main_image: cat.main_image,
id: post.id,
title: post.title,
short_description: post.short_description,
main_image: post.main_image,
status: post.status,
alias_link: post.alias_link,
priority: post.priority,
inserted_at: post.inserted_at,
updated_at: post.updated_at,
unpublish: post.unpublish,
like_count: count(like.id),
liked_user: liked_user
}
end
@spec post(String.t(), String.t() | atom()) :: map() | nil
def post(alias_link, status) do
# when this project has many records as like, I think like counter should be seprated or create a
# lazy query instead of this
# Post comments were seperated because the comment module is going to be used whole the project not only post
from(post in Post,
where: post.alias_link == ^alias_link and post.status == ^status,
join: cat in assoc(post, :blog_categories),
where: cat.status == ^status,
left_join: author in assoc(post, :blog_authors),
left_join: like in assoc(post, :blog_likes),
left_join: user in assoc(author, :users),
left_join: tag_map in assoc(post, :blog_tags_mappers),
left_join: tag in assoc(tag_map, :blog_tags),
preload: [blog_categories: cat, blog_authors: {author, users: user}, blog_tags: tag],
order_by: [desc: post.inserted_at, desc: post.id],
select: map(post, [
:id, :title, :short_description, :main_image, :header_image, :description, :status,
:priority, :location, :unpublish, :alias_link, :meta_keywords,
:meta_description, :custom_title, :robots, :post_visibility, :allow_commenting,
:allow_liking, :allow_printing, :allow_reporting, :allow_social_sharing,
:allow_bookmarking, :show_hits, :show_time, :show_authors, :show_category,
:show_links, :show_location, :category_id, :inserted_at, :updated_at,
blog_categories: [:id, :title, :short_description, :main_image, :header_image, :description, :status,
:sub, :alias_link, :meta_keywords, :meta_description, :custom_title, :robots,
:category_visibility, :allow_commenting, :allow_liking, :allow_printing,
:allow_reporting, :allow_social_sharing, :allow_subscription,
:allow_bookmarking, :allow_notif, :show_hits, :show_time, :show_authors,
:show_category, :show_links, :show_location],
blog_authors: [
:id, :user_id, :post_id,
users: [:id, :full_name, :username]
],
blog_tags: [
:id, :title, :alias_link, :custom_title
]
]
))
|> MishkaDatabase.Repo.one()
rescue
db_error ->
MishkaContent.db_content_activity_error("blog_post", "read", db_error)
nil
end
@spec allowed_fields(:atom | :string) :: nil | list
def allowed_fields(:atom), do: Post.__schema__(:fields)
def allowed_fields(:string), do: Post.__schema__(:fields) |> Enum.map(&Atom.to_string/1)
@spec notify_subscribers(tuple(), atom() | String.t()) :: tuple() | map()
def notify_subscribers({:ok, _, :post, repo_data} = params, type_send) do
Phoenix.PubSub.broadcast(MishkaHtml.PubSub, "blog_post", {type_send, :ok, repo_data})
params
end
def notify_subscribers(params, _), do: params
end
| 36.073684 | 140 | 0.671725 |
f7e861d14b720d594c42efe9ab456f4daf820cbe | 1,834 | ex | Elixir | lib/mix/cloak.ex | tizpuppi/cloak | cdeae97c69f7c54aeadf0921db30dc411d7a9938 | [
"MIT"
] | null | null | null | lib/mix/cloak.ex | tizpuppi/cloak | cdeae97c69f7c54aeadf0921db30dc411d7a9938 | [
"MIT"
] | null | null | null | lib/mix/cloak.ex | tizpuppi/cloak | cdeae97c69f7c54aeadf0921db30dc411d7a9938 | [
"MIT"
] | null | null | null | defmodule Mix.Cloak do
@moduledoc false
# Helpers for building Mix tasks for Cloak
# %{ app => %{repo: repo, schemas: schemas}}
def parse_config(args) do
{opts, _, _} =
OptionParser.parse(args,
aliases: [s: :schema, r: :repo],
strict: [schema: :string, repo: :string]
)
opts
|> Enum.into(%{})
|> do_parse_config()
end
defp do_parse_config(%{repo: repo, schema: schema}) do
%{current_app() => %{repo: to_module(repo), schemas: [to_module(schema)]}}
end
defp do_parse_config(_argv) do
get_apps()
|> Enum.map(&get_app_config/1)
|> Enum.into(%{})
|> validate_config!()
end
defp get_apps do
apps = Mix.Project.apps_paths()
if apps do
Map.keys(apps)
else
[current_app()]
end
end
defp get_app_config(app) do
{app,
%{
repo: Application.get_env(app, :cloak_repo),
schemas: Application.get_env(app, :cloak_schemas)
}}
end
defp current_app do
Mix.Project.config()[:app]
end
defp validate_config!(config) do
invalid_configs = Enum.filter(config, &(!valid?(&1)))
unless length(invalid_configs) == 0 do
apps = Keyword.keys(invalid_configs)
raise Mix.Error, """
warning: no configured Ecto repos or schemas found in any of the apps: #{inspect(apps)}
You can avoid this by passing the -r and -s flags or by setting the repo and schemas
in your config/config.exs:
config #{inspect(hd(apps))},
cloak_repo: ...,
cloak_schemas: [...]
"""
end
config
end
defp valid?({_app, %{repo: repo, schemas: [schema | _]}})
when is_atom(repo) and is_atom(schema),
do: true
defp valid?(_config), do: false
defp to_module(name) do
String.to_existing_atom("Elixir." <> name)
end
end
| 22.365854 | 93 | 0.60578 |