hexsha
stringlengths 40
40
| size
int64 2
991k
| ext
stringclasses 2
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
208
| max_stars_repo_name
stringlengths 6
106
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
sequence | max_stars_count
int64 1
33.5k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
208
| max_issues_repo_name
stringlengths 6
106
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
sequence | max_issues_count
int64 1
16.3k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
208
| max_forks_repo_name
stringlengths 6
106
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
sequence | max_forks_count
int64 1
6.91k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 2
991k
| avg_line_length
float64 1
36k
| max_line_length
int64 1
977k
| alphanum_fraction
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
73c013354a08e63f76dab81b814a37ee470819b1 | 1,524 | exs | Elixir | mix.exs | RxAssim/polymorphic_embed | d8712ae6d4327beaa873f7d994f977fd5172df1d | [
"Apache-2.0"
] | null | null | null | mix.exs | RxAssim/polymorphic_embed | d8712ae6d4327beaa873f7d994f977fd5172df1d | [
"Apache-2.0"
] | null | null | null | mix.exs | RxAssim/polymorphic_embed | d8712ae6d4327beaa873f7d994f977fd5172df1d | [
"Apache-2.0"
] | null | null | null | defmodule PolymorphicEmbed.MixProject do
use Mix.Project
@version "1.7.0"
def project do
[
app: :polymorphic_embed,
elixir: "~> 1.9",
deps: deps(),
aliases: aliases(),
elixirc_paths: elixirc_paths(Mix.env()),
# Hex
version: @version,
package: package(),
description: "Polymorphic embeds in Ecto",
# ExDoc
name: "Polymorphic Embed",
source_url: "https://github.com/mathieuprog/polymorphic_embed",
docs: docs()
]
end
def application do
[
extra_applications: [:logger]
]
end
defp deps do
[
{:ecto, "~> 3.6"},
{:jason, "~> 1.2"},
{:phoenix_html, "~> 2.14 or ~> 3.0 ", optional: true},
{:ex_doc, "~> 0.23", only: :dev},
{:ecto_sql, "~> 3.6", only: :test},
{:postgrex, "~> 0.15", only: :test},
{:query_builder, "~> 0.19.2", only: :test},
{:phoenix_ecto, "~> 4.4", only: :test}
]
end
defp aliases do
[
test: [
"ecto.create --quiet",
"ecto.rollback --all",
"ecto.migrate",
"test"
]
]
end
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
defp package do
[
licenses: ["Apache 2.0"],
maintainers: ["Mathieu Decaffmeyer"],
links: %{"GitHub" => "https://github.com/mathieuprog/polymorphic_embed"}
]
end
defp docs do
[
main: "readme",
extras: ["README.md"],
source_ref: "v#{@version}"
]
end
end
| 20.32 | 78 | 0.527559 |
73c02decd3af8e096eab889efc848caa9437d391 | 1,354 | ex | Elixir | benchmark/lib/benchmark.ex | k32/map_sets | b1e846e74b2a2adc95ce408ef3d3e4c0a022fd1c | [
"Unlicense"
] | 12 | 2018-06-13T21:14:05.000Z | 2021-08-08T03:06:33.000Z | benchmark/lib/benchmark.ex | k32/map_sets | b1e846e74b2a2adc95ce408ef3d3e4c0a022fd1c | [
"Unlicense"
] | 5 | 2019-01-11T20:41:12.000Z | 2019-03-13T21:43:02.000Z | benchmark/lib/benchmark.ex | k32/map_sets | b1e846e74b2a2adc95ce408ef3d3e4c0a022fd1c | [
"Unlicense"
] | 2 | 2020-02-07T06:20:14.000Z | 2021-02-12T17:51:46.000Z | # I don't know Elixir, so the below code probably doesn't make sense
l1 = Enum.to_list(1..10_000)
l2 = Enum.to_list(10_001..20_000)
l3 = Enum.to_list(5_000..15_000)
Benchee.run(%{ 'sets from_list' => fn -> :sets.from_list(l1) end,
'map_sets from_list' => fn -> :map_sets.from_list(l1) end
},
time: 10,
memory_time: 2,
formatters: [ Benchee.Formatters.Console,
{Benchee.Formatters.CSV, file: 'from_list.csv'}
]
)
defmodule MapSets do
def run(name, fun, sets0) do
sets = Enum.map sets0, &:sets.from_list/1
map_sets = Enum.map sets0, &:map_sets.from_list/1
Benchee.run(%{ 'sets ' ++ name => fn -> :erlang.apply(:sets, fun, sets) end,
'map_sets ' ++ name => fn -> :erlang.apply(:map_sets, fun, map_sets) end
},
time: 10,
memory_time: 2,
formatters: [ Benchee.Formatters.HTML,
Benchee.Formatters.Console,
{Benchee.Formatters.CSV, file: name ++ '.csv'}
]
)
end
end
MapSets.run('empty intersection', :intersection, [l1, l2])
MapSets.run('intersection', :intersection, [l1, l3])
MapSets.run('union', :union, [l1, l3])
| 35.631579 | 91 | 0.521418 |
73c0319152bf494532f91a94ee85cbd6514aadb3 | 2,916 | ex | Elixir | lib/bamboo/adapters/send_grid_helper.ex | ChrisYammine/bamboo | 4b61b02bab965be254ced3f503981888616656a9 | [
"MIT"
] | null | null | null | lib/bamboo/adapters/send_grid_helper.ex | ChrisYammine/bamboo | 4b61b02bab965be254ced3f503981888616656a9 | [
"MIT"
] | null | null | null | lib/bamboo/adapters/send_grid_helper.ex | ChrisYammine/bamboo | 4b61b02bab965be254ced3f503981888616656a9 | [
"MIT"
] | 1 | 2020-01-02T20:00:00.000Z | 2020-01-02T20:00:00.000Z | defmodule Bamboo.SendGridHelper do
@moduledoc """
Functions for using features specific to Sendgrid.
## Example
email
|> with_template("80509523-83de-42b6-a2bf-54b7513bd2aa")
|> substitute("%name%", "Jon Snow")
|> substitute("%location%", "Westeros")
"""
alias Bamboo.Email
@field_name :send_grid_template
@categories :categories
@doc """
Specify the template for SendGrid to use for the context of the substitution
tags.
## Example
email
|> with_template("80509523-83de-42b6-a2bf-54b7513bd2aa")
"""
def with_template(email, template_id) do
template = Map.get(email.private, @field_name, %{})
email
|> Email.put_private(@field_name, set_template(template, template_id))
end
@doc """
Add a tag to the list of substitutions in the SendGrid template.
The tag must be a `String.t` due to SendGrid using special characters to wrap
tags in the template.
## Example
email
|> substitute("%name%", "Jon Snow")
"""
def substitute(email, tag, value) do
if is_binary(tag) do
template = Map.get(email.private, @field_name, %{})
email
|> Email.put_private(@field_name, add_substitution(template, tag, value))
else
raise "expected the tag parameter to be of type binary, got #{tag}"
end
end
@doc """
Add dynamic data to be replaced in the Dynamic SendGrid template.
The key will replace anything with the same key name in handlebars
## Example
email
|> dynamic_data("body", "<h1>Wow such body</h1>")
"""
def dynamic_data(email, key, value) do
template = Map.get(email.private, @field_name, %{})
email
|> Email.put_private(@field_name, add_dynamic_data(template, key, value))
end
@doc """
An array of category names for this email. A maximum of 10 categories can be assigned to an email.
Duplicate categories will be ignored and only unique entries will be sent.
## Example
email
|> with_categories("campaign-12345")
"""
def with_categories(email, categories) when is_list(categories) do
categories =
(Map.get(email.private, @categories, []) ++ categories)
|> MapSet.new()
|> MapSet.to_list()
email
|> Email.put_private(@categories, Enum.slice(categories, 0, 10))
end
def with_categories(_email, _categories) do
raise "expected a list of category strings"
end
defp set_template(template, template_id) do
template
|> Map.merge(%{template_id: template_id})
end
defp add_substitution(template, tag, value) do
template
|> Map.update(:substitutions, %{tag => value}, fn substitutions ->
Map.merge(substitutions, %{tag => value})
end)
end
defp add_dynamic_data(template, key, value) do
template
|> Map.update(:dynamic_data, %{key => value}, fn dynamic_data ->
Map.merge(dynamic_data, %{key => value})
end)
end
end
| 25.356522 | 100 | 0.665295 |
73c038421e0b2803e3dd5874b6751d7e967567bb | 380 | exs | Elixir | clients/poly/test/test_helper.exs | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | 1 | 2018-12-03T23:43:10.000Z | 2018-12-03T23:43:10.000Z | clients/poly/test/test_helper.exs | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/poly/test/test_helper.exs | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | ExUnit.start()
defmodule GoogleApi.Poly.V1.TestHelper do
defmacro __using__(opts) do
quote do
use ExUnit.Case, unquote(opts)
import GoogleApi.Poly.V1.TestHelper
end
end
def for_scope(scopes) when is_list(scopes), do: for_scope(Enum.join(scopes, " "))
def for_scope(scope) do
{:ok, token} = Goth.Token.for_scope(scope)
token.token
end
end
| 20 | 83 | 0.692105 |
73c0597b73a724c9c8e0423ba7c38674fb5257e6 | 7,253 | exs | Elixir | test/tortoise_integration_test.exs | ConnorRigby/creep | 91d503187fda50b20788244487ca7d412373893c | [
"ISC"
] | 13 | 2019-09-21T04:48:09.000Z | 2022-01-26T13:13:03.000Z | test/tortoise_integration_test.exs | ConnorRigby/creep | 91d503187fda50b20788244487ca7d412373893c | [
"ISC"
] | null | null | null | test/tortoise_integration_test.exs | ConnorRigby/creep | 91d503187fda50b20788244487ca7d412373893c | [
"ISC"
] | 1 | 2020-03-03T16:50:42.000Z | 2020-03-03T16:50:42.000Z | defmodule Creep.TortoiseIntegrationTest do
use ExUnit.Case, async: true
defmodule IntegrationHandler do
@behaviour Tortoise.Handler
@impl Tortoise.Handler
def init(args) do
test_pid = Keyword.fetch!(args, :pid)
{:ok, %{test_pid: test_pid}}
end
@impl Tortoise.Handler
def connection(status, state) do
send(state.test_pid, {:connection, status})
{:ok, state}
end
@impl Tortoise.Handler
def handle_message(topic_levels, payload, state) do
send(state.test_pid, {:message, topic_levels, payload})
{:ok, state}
end
@impl Tortoise.Handler
def subscription(status, topic_filter, state) do
send(state.test_pid, {:subscription, status, topic_filter})
{:ok, state}
end
@impl Tortoise.Handler
def terminate(reason, state) do
send(state.test_pid, {:terminate, reason})
{:ok, state}
end
end
setup do
broker_id = :rand.uniform() |> to_string()
client_id = :rand.uniform() |> to_string()
port = Enum.random(60000..60123)
{:ok, pid} =
Creep.start_link(
broker_id: broker_id,
packet_processor: Creep.InMemProcessor,
transports: [
{Creep.RanchTransport, [port: port]}
]
)
{:ok, %{broker: pid, broker_id: broker_id, client_id: client_id, port: port}}
end
describe "connection" do
test "connect", %{client_id: client_id, port: port} do
{:ok, _pid} =
Tortoise.Supervisor.start_child(
client_id: client_id,
handler: {IntegrationHandler, [pid: self()]},
server: {Tortoise.Transport.Tcp, host: 'localhost', port: port},
subscriptions: []
)
assert_receive {:connection, :up}
end
test "connect with subscriptions", %{client_id: client_id, port: port} do
{:ok, _pid} =
Tortoise.Supervisor.start_child(
client_id: client_id,
handler: {IntegrationHandler, [pid: self()]},
server: {Tortoise.Transport.Tcp, host: 'localhost', port: port},
subscriptions: [{"a/b", 0}, {"c/d", 1}, {"e/f", 2}]
)
assert_receive {:connection, :up}
assert_receive {:subscription, :up, "a/b"}
assert_receive {:subscription, :up, "c/d"}
assert_receive {:subscription, :up, "e/f"}
end
test "connect w/ username password", %{client_id: client_id, port: port} do
{:ok, _pid} =
Tortoise.Supervisor.start_child(
client_id: client_id,
username: "connor",
password: "password",
handler: {IntegrationHandler, [pid: self()]},
server: {Tortoise.Transport.Tcp, host: 'localhost', port: port},
subscriptions: []
)
assert_receive {:connection, :up}
end
end
describe "publish/subscribe" do
test "subscribe", %{client_id: client_id, port: port} do
{:ok, _pid} =
Tortoise.Supervisor.start_child(
client_id: client_id,
handler: {IntegrationHandler, [pid: self()]},
server: {Tortoise.Transport.Tcp, host: 'localhost', port: port},
subscriptions: []
)
:ok = Tortoise.Connection.subscribe_sync(client_id, [{"a/b", 0}])
assert_receive {:subscription, :up, "a/b"}
:ok = Tortoise.publish(client_id, "a/b", "hello, world")
assert_receive {:message, ["a", "b"], "hello, world"}
end
test "wildcard subscribe", %{client_id: client_id, port: port} do
{:ok, _pid} =
Tortoise.Supervisor.start_child(
client_id: client_id,
handler: {IntegrationHandler, [pid: self()]},
server: {Tortoise.Transport.Tcp, host: 'localhost', port: port},
subscriptions: []
)
:ok = Tortoise.Connection.subscribe_sync(client_id, [{"a/#", 0}])
assert_receive {:subscription, :up, "a/#"}
:ok = Tortoise.publish(client_id, "a/b", "hello, world")
assert_receive {:message, ["a", "b"], "hello, world"}
end
test "unsubscribe", %{client_id: client_id, port: port} do
{:ok, _pid} =
Tortoise.Supervisor.start_child(
client_id: client_id,
handler: {IntegrationHandler, [pid: self()]},
server: {Tortoise.Transport.Tcp, host: 'localhost', port: port},
subscriptions: []
)
assert_receive {:connection, :up}
:ok = Tortoise.Connection.subscribe_sync(client_id, [{"a/b", 0}])
assert_receive {:subscription, :up, "a/b"}
:ok = Tortoise.Connection.unsubscribe_sync(client_id, ["a/b"])
assert_receive {:subscription, :down, "a/b"}
:ok = Tortoise.publish(client_id, "a/b", "hello, world")
refute_receive {:message, ["a", "b"], "hello, world"}, 200
# check for typos
refute_received _
end
test "wildcard unsubscribe", %{client_id: client_id, port: port} do
{:ok, _pid} =
Tortoise.Supervisor.start_child(
client_id: client_id,
handler: {IntegrationHandler, [pid: self()]},
server: {Tortoise.Transport.Tcp, host: 'localhost', port: port},
subscriptions: []
)
:ok = Tortoise.Connection.subscribe_sync(client_id, [{"a/b", 0}, {"a/c", 0}])
assert_receive {:connection, :up}
assert_receive {:subscription, :up, "a/b"}
assert_receive {:subscription, :up, "a/c"}
:ok = Tortoise.Connection.unsubscribe_sync(client_id, ["a/#"])
assert_receive {:subscription, :down, "a/#"}
:ok = Tortoise.publish_sync(client_id, "a/b", "hello, world(b)")
:ok = Tortoise.publish_sync(client_id, "a/c", "hello, world(c)")
refute_receive {:message, ["a", "b"], "hello, world(b)"}, 200
refute_receive {:message, ["a", "c"], "hello, world(c)"}, 200
# check for typos
refute_received _
end
end
describe "misc" do
test "ping", %{client_id: client_id, port: port} do
{:ok, _pid} =
Tortoise.Supervisor.start_child(
client_id: client_id,
handler: {IntegrationHandler, [pid: self()]},
server: {Tortoise.Transport.Tcp, host: 'localhost', port: port},
subscriptions: []
)
assert_receive {:connection, :up}
assert {:ok, _} = Tortoise.Connection.ping_sync(client_id, 1000)
end
test "disconnect", %{client_id: client_id, port: port} do
{:ok, _pid} =
Tortoise.Supervisor.start_child(
client_id: client_id,
handler: {IntegrationHandler, [pid: self()]},
server: {Tortoise.Transport.Tcp, host: 'localhost', port: port},
subscriptions: [],
will: %Tortoise.Package.Publish{topic: "a/b", payload: "goodbye"}
)
{:ok, _pid} =
Tortoise.Supervisor.start_child(
client_id: "disconnect_test",
handler: {IntegrationHandler, [pid: self()]},
server: {Tortoise.Transport.Tcp, host: 'localhost', port: port},
subscriptions: []
)
assert_receive {:connection, :up}
assert_receive {:connection, :up}
:ok = Tortoise.Connection.disconnect(client_id)
assert_receive {:connection, :terminating}
assert_receive {:terminate, :normal}
assert_receive {:message, ["a", "b"], "goodbye"}
refute_receive _
end
end
end
| 32.379464 | 83 | 0.597684 |
73c063d0495bcd184eaed1ca36699f0c69cc7e89 | 426 | exs | Elixir | elixir/priv/repo/migrations/20210209213330_create_users_companies.exs | XanderHarris/web-homework | d0b5c2bbbd055341d51f02f824a2020188f6c094 | [
"MIT"
] | null | null | null | elixir/priv/repo/migrations/20210209213330_create_users_companies.exs | XanderHarris/web-homework | d0b5c2bbbd055341d51f02f824a2020188f6c094 | [
"MIT"
] | null | null | null | elixir/priv/repo/migrations/20210209213330_create_users_companies.exs | XanderHarris/web-homework | d0b5c2bbbd055341d51f02f824a2020188f6c094 | [
"MIT"
] | null | null | null | defmodule Homework.Repo.Migrations.CreateUsersCompanies do
use Ecto.Migration
def change do
create table(:users_companies, primary_key: false) do
add(:id, :uuid, primary_key: true)
add(:user_id, references(:users, type: :uuid, on_delete: :nothing))
add(:company_id, references(:companies, type: :uuid, on_delete: :nothing))
add(:primary_company, :boolean)
timestamps()
end
end
end | 30.428571 | 80 | 0.697183 |
73c06def3700f6a96b35b481d043b8cd711d8bf8 | 1,443 | exs | Elixir | apps/ello_serve/mix.exs | ello/apex | 4acb096b3ce172ff4ef9a51e5d068d533007b920 | [
"MIT"
] | 16 | 2017-06-21T21:31:20.000Z | 2021-05-09T03:23:26.000Z | apps/ello_serve/mix.exs | ello/apex | 4acb096b3ce172ff4ef9a51e5d068d533007b920 | [
"MIT"
] | 25 | 2017-06-07T12:18:28.000Z | 2018-06-08T13:27:43.000Z | apps/ello_serve/mix.exs | ello/apex | 4acb096b3ce172ff4ef9a51e5d068d533007b920 | [
"MIT"
] | 3 | 2018-06-14T15:34:07.000Z | 2022-02-28T21:06:13.000Z | defmodule Ello.Serve.Mixfile do
use Mix.Project
def project do
[app: :ello_serve,
version: "0.0.1",
build_path: "../../_build",
config_path: "../../config/config.exs",
deps_path: "../../deps",
lockfile: "../../mix.lock",
elixir: "~> 1.2",
elixirc_paths: elixirc_paths(Mix.env),
elixirc_options: [warnings_as_errors: Mix.env == :test],
compilers: [:phoenix, :gettext] ++ Mix.compilers,
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
deps: deps()]
end
# Configuration for the OTP application.
#
# Type `mix help compile.app` for more information.
def application do
[mod: {Ello.Serve, []},
extra_applications: [:logger]]
end
# Specifies which paths to compile per environment.
defp elixirc_paths(:test), do: ["lib", "web", "test/support"]
defp elixirc_paths(_), do: ["lib", "web"]
defp deps do
[
{:phoenix, "~> 1.3.3"},
{:plug_cowboy, "~> 1.0"},
{:phoenix_pubsub, "~> 1.0"},
{:phoenix_html, "~> 2.10"},
{:gettext, "~> 0.11"},
{:cowboy, "~> 1.1"},
{:jason, "~> 1.0"},
{:html_sanitize_ex, "~> 1.0.0"},
{:timex, "~> 3.0"},
{:ello_core, in_umbrella: true},
{:ello_auth, in_umbrella: true},
{:ello_search, in_umbrella: true},
{:ello_stream, in_umbrella: true},
{:ello_v2, in_umbrella: true}, # for image_url generation
]
end
end
| 27.75 | 63 | 0.57034 |
73c070c9f0d221954ef8605bfac2931450b606ce | 1,620 | ex | Elixir | lib/bus_car/repo/modules.ex | elbow-jason/bus_car | cd70b9f6b8dd404eb518f642491e0a4430e2d9f9 | [
"MIT"
] | 18 | 2016-09-25T21:36:39.000Z | 2021-02-17T15:09:52.000Z | lib/bus_car/repo/modules.ex | elbow-jason/bus_car | cd70b9f6b8dd404eb518f642491e0a4430e2d9f9 | [
"MIT"
] | 7 | 2016-12-08T05:01:23.000Z | 2018-04-05T08:55:11.000Z | lib/bus_car/repo/modules.ex | elbow-jason/bus_car | cd70b9f6b8dd404eb518f642491e0a4430e2d9f9 | [
"MIT"
] | 1 | 2020-04-24T02:10:15.000Z | 2020-04-24T02:10:15.000Z | defmodule BusCar.Repo.Modules do
defmacro define_explain(mod) do
quote do
mod = unquote(mod)
name = BusCar.Repo.Helpers.concat_names(mod, Explain)
defmodule name do
use BusCar.Repo.Explain, repo: mod
end
end
end
defmacro define_cluster(mod) do
quote do
mod = unquote(mod)
name = BusCar.Repo.Helpers.concat_names(mod, Cluster)
defmodule name do
use BusCar.Repo.Cluster, repo: mod
end
end
end
defmacro define_index(mod) do
quote do
mod = unquote(mod)
name = BusCar.Repo.Helpers.concat_names(mod, Index)
defmodule name do
use BusCar.Repo.Index, repo: mod
end
end
end
defmacro define_cat(mod) do
quote do
mod = unquote(mod)
name = BusCar.Repo.Helpers.concat_names(mod, Cat)
defmodule name do
use BusCar.Repo.Cat, repo: mod
end
end
end
defmacro define_config(mod, otp_app) do
quote do
mod = unquote(mod)
name = BusCar.Repo.Helpers.concat_names(mod, Config)
defmodule name do
use BusCar.Repo.Config, otp_app: unquote(otp_app)
end
end
end
defmacro define_api(mod, otp_app) do
quote do
mod = unquote(mod)
name = BusCar.Repo.Helpers.concat_names(mod, Api)
defmodule name do
use BusCar.Repo.Api, otp_app: unquote(otp_app)
end
end
end
defmacro define_search(mod) do
quote do
mod = unquote(mod)
name = BusCar.Repo.Helpers.concat_names(mod, Search)
defmodule name do
use BusCar.Repo.Search, repo: mod
end
end
end
end
| 21.891892 | 59 | 0.632099 |
73c094455049c2a71d3c9f157ee6bad50400fd7e | 1,145 | exs | Elixir | config/config.exs | shamil614/elastic-jsonapi | d5a8703af2731096e788409323e9d2ae3cc58c7f | [
"MIT"
] | null | null | null | config/config.exs | shamil614/elastic-jsonapi | d5a8703af2731096e788409323e9d2ae3cc58c7f | [
"MIT"
] | null | null | null | config/config.exs | shamil614/elastic-jsonapi | d5a8703af2731096e788409323e9d2ae3cc58c7f | [
"MIT"
] | null | null | null | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
use Mix.Config
# This configuration is loaded before any dependency and is restricted
# to this project. If another project depends on this project, this
# file won't be loaded nor affect the parent project. For this reason,
# if you want to provide default values for your application for
# third-party users, it should be done in your "mix.exs" file.
# You can configure your application as:
#
# config :elastic_jsonapi, key: :value
#
# and access this configuration in your application as:
#
# Application.get_env(:elastic_jsonapi, :key)
#
# You can also configure a third-party app:
#
# config :logger, level: :info
#
# It is also possible to import configuration files, relative to this
# directory. For example, you can emulate configuration per environment
# by uncommenting the line below and defining dev.exs, test.exs and such.
# Configuration from the imported file will override the ones defined
# here (which is why it is important to import them last).
#
# import_config "#{Mix.env()}.exs"
| 36.935484 | 73 | 0.754585 |
73c096ee5f6f500f6187bf8a4de54cc8c7fb69b5 | 1,134 | ex | Elixir | lib/ex_admin/themes/admin_lte2.ex | devonestes/ex_admin | e135ae7c28de78fc87baf519ff8a32da12e8bf66 | [
"MIT"
] | 1,347 | 2015-10-05T18:23:49.000Z | 2022-01-09T18:38:36.000Z | lib/ex_admin/themes/admin_lte2.ex | leonardzhou/ex_admin | c241e956503c548a472e3ee89751e64a16477638 | [
"MIT"
] | 402 | 2015-10-03T13:53:32.000Z | 2021-07-08T09:52:22.000Z | lib/ex_admin/themes/admin_lte2.ex | leonardzhou/ex_admin | c241e956503c548a472e3ee89751e64a16477638 | [
"MIT"
] | 333 | 2015-10-12T22:56:57.000Z | 2021-05-26T18:40:24.000Z | defmodule ExAdmin.Theme.AdminLte2 do
@moduledoc false
use Xain
import ExAdmin.Utils
# import ExAdmin.ViewHelpers
import ExAdmin.Form, only: [required_abbr: 1]
@name "admin_lte2"
def name, do: @name
def get_form_error_class(error) do
unless error == "", do: ".has-error", else: ""
end
def build_form_error(error) do
label ".control-label" do
i(".fa.fa-times-circle-o")
text(" #{ExAdmin.Form.error_messages(error)}")
end
end
def wrap_item_type(:boolean, label, ext_name, contents, error, _required) do
error = get_form_error_class(error)
div ".col-sm-offset-2.col-sm-10#{error}" do
div ".checkbox" do
label do
contents.(ext_name)
humanize(label) |> text
end
end
end
end
def wrap_item_type(_type, label, ext_name, contents, error, required) do
error = get_form_error_class(error)
markup do
label ".col-sm-2.control-label", for: ext_name do
text(humanize(label))
required_abbr(required)
end
div ".col-sm-10#{error}" do
contents.(ext_name)
end
end
end
end
| 22.235294 | 78 | 0.634921 |
73c0ca9d422a3c569bef9931ca2561398efc2efb | 2,042 | exs | Elixir | examples/example.exs | oivoodoo/exfacebook | 42d960336c4e432216ae592925519f93fc817eaf | [
"MIT"
] | 18 | 2016-07-30T00:04:21.000Z | 2020-01-13T09:17:54.000Z | examples/example.exs | oivoodoo/exfacebook | 42d960336c4e432216ae592925519f93fc817eaf | [
"MIT"
] | 7 | 2016-08-10T08:14:59.000Z | 2017-02-15T09:36:43.000Z | examples/example.exs | oivoodoo/exfacebook | 42d960336c4e432216ae592925519f93fc817eaf | [
"MIT"
] | 6 | 2016-08-10T07:28:15.000Z | 2017-03-31T11:14:57.000Z | defmodule Exfacebook.DevTest do
require Logger
alias Exfacebook.Api
def get_connections do
params = %{fields: "id,name", access_token: System.get_env("FACEBOOK_ACCESS_TOKEN")}
{:ok, collection} = Api.get_connections(:me, :feed, params)
Logger.info "[Exfacebook] me feed: #{inspect(collection)}"
end
def get_object do
params = %{fields: "id,name", access_token: System.get_env("FACEBOOK_ACCESS_TOKEN")}
{:ok, object} = Api.get_object(:me, params)
Logger.info "[Exfacebook] me object: #{inspect(object)}"
end
def list_subscriptions do
params = %{fields: "id,name"}
{:ok, collection} = Api.list_subscriptions(params)
Logger.info "[Exfacebook] subscriptions: #{inspect(collection)}"
end
def gen_list_subscriptions do
{:ok, pid} = Exfacebook.start_link
params = %{fields: "id,name"}
{:ok, collection} = Exfacebook.list_subscriptions(pid, params)
Logger.info "[Exfacebook] subscriptions: #{inspect(collection)}"
end
def subscribe do
{:ok, pid} = Exfacebook.start_link
response = Exfacebook.subscribe(pid, "id-1", "friends, feed", "http://www.example.com/facebook/updates", "token-123")
Logger.info "[Exfacebook] RESPONSE: #{inspect(response)}"
end
def unsubscribe do
{:ok, pid} = Exfacebook.start_link
response = Exfacebook.unsubscribe(pid, "id-1")
Logger.info "[Exfacebook] RESPONSE: #{inspect(response)}"
end
end
if System.get_env("FACEBOOK_ACCESS_TOKEN") == nil do
raise "FACEBOOK_ACCESS_TOKEN is required as env param"
end
if System.get_env("FACEBOOK_APP_ID") == nil do
raise "FACEBOOK_APP_ID is required as env param"
end
if System.get_env("FACEBOOK_APP_SECRET") == nil do
raise "FACEBOOK_APP_SECRET is required as env param"
end
Exfacebook.DevTest.list_subscriptions
Exfacebook.DevTest.gen_list_subscriptions
Exfacebook.DevTest.get_connections
Exfacebook.DevTest.get_object
try do
Exfacebook.DevTest.subscribe
rescue
_ -> "oh, error"
end
try do
Exfacebook.DevTest.unsubscribe
rescue
_ -> "oh, error"
end
| 25.848101 | 121 | 0.718413 |
73c0da461097ee8a59253a89445664b35d0ca631 | 14,500 | ex | Elixir | lib/mix/lib/mix/utils.ex | jbcrail/elixir | f30ef15d9d028a6d0f74d10c2bb320d5f8501bdb | [
"Apache-2.0"
] | 1 | 2015-02-23T00:01:48.000Z | 2015-02-23T00:01:48.000Z | lib/mix/lib/mix/utils.ex | jbcrail/elixir | f30ef15d9d028a6d0f74d10c2bb320d5f8501bdb | [
"Apache-2.0"
] | null | null | null | lib/mix/lib/mix/utils.ex | jbcrail/elixir | f30ef15d9d028a6d0f74d10c2bb320d5f8501bdb | [
"Apache-2.0"
] | null | null | null | defmodule Mix.Utils do
@moduledoc """
Utilities used throughout Mix and tasks.
"""
@doc """
Get the mix home.
It defaults to `~/.mix` unless the `MIX_HOME`
environment variable is set.
Developers should only store entries in the
`MIX_HOME` directory which are guaranteed to
work across multiple Elixir versions, as it is
not recommended to swap the `MIX_HOME` directory
as configuration and other important data may be
stored there.
"""
def mix_home do
System.get_env("MIX_HOME") || Path.expand("~/.mix")
end
@doc """
Get all paths defined in the MIX_PATH env variable.
`MIX_PATH` may contain multiple paths. If on Windows, those
paths should be separated by `;`, if on unix systems, use `:`.
"""
def mix_paths do
if path = System.get_env("MIX_PATH") do
String.split(path, path_separator)
else
[]
end
end
defp path_separator do
case :os.type do
{:win32, _} -> ";"
{:unix, _} -> ":"
end
end
@doc """
Take a `command` name and attempts to load a module
with the command name converted to a module name
in the given `at` scope.
Returns `{:module, module}` in case a module
exists and is loaded, `{:error, reason}` otherwise.
## Examples
iex> Mix.Utils.command_to_module("compile", Mix.Tasks)
{:module, Mix.Tasks.Compile}
"""
def command_to_module(command, at \\ Elixir) do
module = Module.concat(at, command_to_module_name(command))
Code.ensure_loaded(module)
end
@doc """
Returns `true` if any of the `sources` are stale
compared to the given `targets`.
"""
def stale?(sources, targets) do
Enum.any? stale_stream(sources, targets)
end
@doc """
Extract all stale `sources` compared to the given `targets`.
"""
def extract_stale(_sources, []), do: []
def extract_stale([], _targets), do: []
def extract_stale(sources, targets) do
stale_stream(sources, targets) |> Enum.to_list
end
defp stale_stream(sources, targets) do
modified_target = targets |> Enum.map(&last_modified(&1)) |> Enum.min
Stream.filter(sources, fn(source) ->
last_modified(source) > modified_target
end)
end
@doc """
Returns the date the given path was last modified.
If the path does not exist, it returns the unix epoch
(1970-01-01 00:00:00).
"""
def last_modified(path)
def last_modified({{_, _, _}, {_, _, _}} = timestamp) do
timestamp
end
def last_modified(path) do
now = :calendar.local_time
case File.stat(path) do
{:ok, %File.Stat{mtime: mtime}} when mtime > now ->
Mix.shell.error("warning: mtime (modified time) for \"#{path}\" was set to the future, resetting to now")
File.touch!(path, now)
mtime
{:ok, %File.Stat{mtime: mtime}} ->
mtime
{:error, _} ->
{{1970, 1, 1}, {0, 0, 0}}
end
end
@doc """
Extract files from a list of paths.
`exts_or_pattern` may be a list of extensions or a
`Path.wildcard/1` pattern.
If the path in `paths` is a file, it is included in
the return result. If it is a directory, it is searched
recursively for files with the given extensions or matching
the given patterns.
"""
def extract_files(paths, exts_or_pattern)
def extract_files(paths, exts) when is_list(exts) do
extract_files(paths, "*.{#{Enum.join(exts, ",")}}")
end
def extract_files(paths, pattern) do
Enum.flat_map(paths, fn path ->
if File.regular?(path), do: [path], else: Path.wildcard("#{path}/**/#{pattern}")
end) |> Enum.uniq
end
@doc """
Converts the given atom or binary to underscore format.
If an atom is given, it is assumed to be an Elixir module,
so it is converted to a binary and then processed.
## Examples
iex> Mix.Utils.underscore "FooBar"
"foo_bar"
iex> Mix.Utils.underscore "Foo.Bar"
"foo/bar"
iex> Mix.Utils.underscore Foo.Bar
"foo/bar"
In general, `underscore` can be thought of as the reverse of
`camelize`, however, in some cases formatting may be lost:
iex> Mix.Utils.underscore "SAPExample"
"sap_example"
iex> Mix.Utils.camelize "sap_example"
"SapExample"
"""
def underscore(atom) when is_atom(atom) do
"Elixir." <> rest = Atom.to_string(atom)
underscore(rest)
end
def underscore(""), do: ""
def underscore(<<h, t :: binary>>) do
<<to_lower_char(h)>> <> do_underscore(t, h)
end
defp do_underscore(<<h, t, rest :: binary>>, _) when h in ?A..?Z and not t in ?A..?Z do
<<?_, to_lower_char(h), t>> <> do_underscore(rest, t)
end
defp do_underscore(<<h, t :: binary>>, prev) when h in ?A..?Z and not prev in ?A..?Z do
<<?_, to_lower_char(h)>> <> do_underscore(t, h)
end
defp do_underscore(<<?-, t :: binary>>, _) do
<<?_>> <> do_underscore(t, ?-)
end
defp do_underscore(<< "..", t :: binary>>, _) do
<<"..">> <> underscore(t)
end
defp do_underscore(<<?.>>, _), do: <<?.>>
defp do_underscore(<<?., t :: binary>>, _) do
<<?/>> <> underscore(t)
end
defp do_underscore(<<h, t :: binary>>, _) do
<<to_lower_char(h)>> <> do_underscore(t, h)
end
defp do_underscore(<<>>, _) do
<<>>
end
@doc """
Converts the given string to CamelCase format.
## Examples
iex> Mix.Utils.camelize "foo_bar"
"FooBar"
"""
def camelize(""), do: ""
def camelize(<<?_, t :: binary>>) do
camelize(t)
end
def camelize(<<h, t :: binary>>) do
<<to_upper_char(h)>> <> do_camelize(t)
end
defp do_camelize(<<?_, ?_, t :: binary>>) do
do_camelize(<< ?_, t :: binary >>)
end
defp do_camelize(<<?_, h, t :: binary>>) when h in ?a..?z do
<<to_upper_char(h)>> <> do_camelize(t)
end
defp do_camelize(<<?_>>) do
<<>>
end
defp do_camelize(<<?/, t :: binary>>) do
<<?.>> <> camelize(t)
end
defp do_camelize(<<h, t :: binary>>) do
<<h>> <> do_camelize(t)
end
defp do_camelize(<<>>) do
<<>>
end
@doc """
Takes a module and converts it to a command.
The nesting argument can be given in order to remove
the nesting of a module.
## Examples
iex> Mix.Utils.module_name_to_command(Mix.Tasks.Compile, 2)
"compile"
iex> Mix.Utils.module_name_to_command("Mix.Tasks.Compile.Elixir", 2)
"compile.elixir"
"""
def module_name_to_command(module, nesting \\ 0)
def module_name_to_command(module, nesting) when is_atom(module) do
module_name_to_command(inspect(module), nesting)
end
def module_name_to_command(module, nesting) do
t = Regex.split(~r/\./, to_string(module))
t |> Enum.drop(nesting) |> Enum.map(&underscore(&1)) |> Enum.join(".")
end
@doc """
Takes a command and converts it to the module name format.
## Examples
iex> Mix.Utils.command_to_module_name("compile.elixir")
"Compile.Elixir"
"""
def command_to_module_name(s) do
Regex.split(~r/\./, to_string(s)) |>
Enum.map(&camelize(&1)) |>
Enum.join(".")
end
defp to_upper_char(char) when char in ?a..?z, do: char - 32
defp to_upper_char(char), do: char
defp to_lower_char(char) when char in ?A..?Z, do: char + 32
defp to_lower_char(char), do: char
@doc """
Symlink directory `source` to `target` or copy it recursively
in case symlink fails.
Expect source and target to be absolute paths as it generates
a relative symlink.
"""
def symlink_or_copy(source, target) do
if File.exists?(source) do
source_list = String.to_char_list(source)
case :file.read_link(target) do
{:ok, ^source_list} ->
:ok
{:ok, _} ->
File.rm!(target)
do_symlink_or_copy(source, target)
{:error, :enoent} ->
do_symlink_or_copy(source, target)
{:error, _} ->
_ = File.rm_rf!(target)
do_symlink_or_copy(source, target)
end
else
{:error, :enoent}
end
end
defp do_symlink_or_copy(source, target) do
# relative symbolic links on windows are broken
source_path = case :os.type do
{:win32, _} -> source
_ -> make_relative_path(source, target)
end
case :file.make_symlink(source_path, target) do
:ok -> :ok
{:error, _} -> {:ok, File.cp_r!(source, target)}
end
end
# Make a relative path in between two paths.
# Expects both paths to be fully expanded.
defp make_relative_path(source, target) do
do_make_relative_path(Path.split(source), Path.split(target))
end
defp do_make_relative_path([h|t1], [h|t2]) do
do_make_relative_path(t1, t2)
end
defp do_make_relative_path(source, target) do
base = List.duplicate("..", max(length(target) - 1, 0))
Path.join(base ++ source)
end
@doc """
Opens and reads content from either a URL or a local filesystem path
and returns the contents as a binary.
Raises if the given path is not a URL, nor a file or if the
file or URL are invalid.
## Options
* `:shell` - Forces the use of `wget` or `curl` to fetch the file if the
given path is a URL.
"""
def read_path!(path, opts \\ []) do
cond do
url?(path) && opts[:shell] ->
read_shell(path, nil)
url?(path) ->
read_httpc(path, nil)
file?(path) ->
read_file(path)
true ->
Mix.raise "Expected #{path} to be a url or a local file path"
end
end
@doc """
Copies content from either a URL or a local filesystem path to
target path.
Used by tasks like `archive.install` and `local.rebar` that support
installation either from a URL or a local file.
Raises if the given path is not a URL, nor a file or if the
file or URL are invalid.
## Options
* `:shell` - Forces the use of `wget` or `curl` to fetch the file if the
given path is a URL.
* `:force` - Forces overwriting target file without a shell prompt.
"""
def copy_path!(source, target, opts \\ []) when is_binary(source) and is_binary(target) do
if opts[:force] || overwriting?(target) do
cond do
url?(source) && opts[:shell] ->
read_shell(source, target)
url?(source) ->
read_httpc(source, target)
file?(source) ->
copy_file(source, target)
true ->
Mix.raise "Expected #{source} to be a url or a local file path"
end
true
else
false
end
end
@doc """
Prompts the user to overwrite the file if it exists. Returns
the user input.
"""
def overwriting?(path) do
if File.exists?(path) do
full = Path.expand(path)
Mix.shell.yes?(Path.relative_to_cwd(full) <> " already exists, overwrite?")
else
true
end
end
defp read_file(path) do
File.read!(path)
end
defp copy_file(source, target) do
File.mkdir_p!(Path.dirname(target))
File.cp!(source, target)
end
defp read_httpc(path, target) do
{:ok, _} = Application.ensure_all_started(:ssl)
{:ok, _} = Application.ensure_all_started(:inets)
# Starting a http client profile allows us to scope
# the effects of using a http proxy to this function
{:ok, pid} = :inets.start(:httpc, [{:profile, :mix}])
headers = [{'user-agent', 'Mix/#{System.version}'}]
request = {:binary.bin_to_list(path), headers}
# If a proxy environment variable was supplied add a proxy to httpc
http_proxy = System.get_env("HTTP_PROXY") || System.get_env("http_proxy")
https_proxy = System.get_env("HTTPS_PROXY") || System.get_env("https_proxy")
if http_proxy, do: proxy(http_proxy)
if https_proxy, do: proxy(https_proxy)
if target do
File.mkdir_p!(Path.dirname(target))
File.rm(target)
req_opts = [stream: String.to_char_list(target)]
else
req_opts = [body_format: :binary]
end
# We are using relaxed: true because some servers is returning a Location
# header with relative paths, which does not follow the spec. This would
# cause the request to fail with {:error, :no_scheme} unless :relaxed
# is given.
case :httpc.request(:get, request, [relaxed: true], req_opts, :mix) do
{:ok, :saved_to_file} ->
:ok
{:ok, {{_, status, _}, _, body}} when status in 200..299 ->
body
{:ok, {{_, status, _}, _, _}} ->
Mix.raise "Could not access url #{path}, got status: #{status}"
{:error, reason} ->
Mix.raise "Could not access url #{path}, error: #{inspect reason}"
end
after
:inets.stop(:httpc, :mix)
end
defp proxy(proxy) do
uri = URI.parse(proxy)
if uri.host && uri.port do
host = String.to_char_list(uri.host)
:httpc.set_options([{proxy_scheme(uri.scheme), {{host, uri.port}, []}}], :hex)
end
end
defp proxy_scheme(scheme) do
case scheme do
"http" -> :proxy
"https" -> :https_proxy
end
end
defp read_shell(path, target) do
filename = URI.parse(path).path |> Path.basename
out_path = target || Path.join(System.tmp_dir!, filename)
File.mkdir_p!(Path.dirname(out_path))
File.rm(out_path)
status = cond do
windows? && System.find_executable("powershell") ->
command = ~s[$ErrorActionPreference = 'Stop'; ] <>
~s[$client = new-object System.Net.WebClient; ] <>
~s[$client.DownloadFile(\\"#{path}\\", \\"#{out_path}\\")]
Mix.shell.cmd(~s[powershell -Command "& {#{command}}"])
System.find_executable("wget") ->
Mix.shell.cmd(~s(wget -nv -O "#{out_path}" "#{path}"))
System.find_executable("curl") ->
Mix.shell.cmd(~s(curl -s -S -L -o "#{out_path}" "#{path}"))
windows? ->
Mix.shell.error "powershell, wget or curl not installed"
true ->
Mix.shell.error "wget or curl not installed"
1
end
check_command!(status, path, target)
unless target do
data = File.read!(out_path)
File.rm!(out_path)
data
end
end
defp check_command!(0, _path, _out_path), do: :ok
defp check_command!(_status, path, nil) do
Mix.raise "Could not fetch data, please download manually from " <>
"#{inspect path}"
end
defp check_command!(_status, path, out_path) do
Mix.raise "Could not fetch data, please download manually from " <>
"#{inspect path} and copy it to #{inspect out_path}"
end
defp windows? do
match?({:win32, _}, :os.type)
end
defp file?(path) do
File.regular?(path)
end
defp url?(path) do
URI.parse(path).scheme in ["http", "https"]
end
end
| 26.363636 | 113 | 0.622138 |
73c0da768039946f2ec62a3bb8f26dbe7df78d62 | 11,336 | ex | Elixir | lib/ueberauth/strategy/slack.ex | connorjacobsen/ueberauth_slack | 7ac075f09fff47584899385cd75ed54116638ced | [
"MIT"
] | null | null | null | lib/ueberauth/strategy/slack.ex | connorjacobsen/ueberauth_slack | 7ac075f09fff47584899385cd75ed54116638ced | [
"MIT"
] | null | null | null | lib/ueberauth/strategy/slack.ex | connorjacobsen/ueberauth_slack | 7ac075f09fff47584899385cd75ed54116638ced | [
"MIT"
] | null | null | null | defmodule Ueberauth.Strategy.Slack do
@moduledoc """
Implements an ÜeberauthSlack strategy for authentication with slack.com.
When configuring the strategy in the Üeberauth providers, you can specify some defaults.
* `uid_field` - The field to use as the UID field. This can be any populated field in the info struct. Default `:email`
* `default_scope` - The scope to request by default from slack (permissions). Default "users:read"
* `oauth2_module` - The OAuth2 module to use. Default Ueberauth.Strategy.Slack.OAuth
```elixir
config :ueberauth, Ueberauth,
providers: [
slack: { Ueberauth.Strategy.Slack, [uid_field: :nickname, default_scope: "users:read,users:write"] }
]
```
"""
use Ueberauth.Strategy,
uid_field: :email,
default_scope: "users:read",
oauth2_module: Ueberauth.Strategy.Slack.OAuth
alias Ueberauth.Auth.Info
alias Ueberauth.Auth.Credentials
alias Ueberauth.Auth.Extra
# When handling the request just redirect to Slack
@doc false
def handle_request!(conn) do
scopes = conn.params["scope"] || option(conn, :default_scope)
opts = [scope: scopes]
opts =
if conn.params["state"], do: Keyword.put(opts, :state, conn.params["state"]), else: opts
team = option(conn, :team)
opts = if team, do: Keyword.put(opts, :team, team), else: opts
callback_url = callback_url(conn)
callback_url =
if String.ends_with?(callback_url, "?"),
do: String.slice(callback_url, 0..-2),
else: callback_url
opts = Keyword.put(opts, :redirect_uri, callback_url)
module = option(conn, :oauth2_module)
redirect!(conn, apply(module, :authorize_url!, [opts]))
end
# When handling the callback, if there was no errors we need to
# make two calls. The first, to fetch the slack auth is so that we can get hold of
# the user id so we can make a query to fetch the user info.
# So that it is available later to build the auth struct, we put it in the private section of the conn.
@doc false
def handle_callback!(%Plug.Conn{params: %{"code" => code}} = conn) do
module = option(conn, :oauth2_module)
params = [code: code]
redirect_uri = get_redirect_uri(conn)
options = %{
options: [
client_options: [redirect_uri: redirect_uri]
]
}
token = apply(module, :get_token!, [params, options])
if token.access_token == nil do
set_errors!(conn, [
error(token.other_params["error"], token.other_params["error_description"])
])
else
conn
|> store_token(token)
|> fetch_auth(token)
|> fetch_identity(token)
|> fetch_user(token)
|> fetch_team(token)
end
end
# If we don't match code, then we have an issue
@doc false
def handle_callback!(conn) do
set_errors!(conn, [error("missing_code", "No code received")])
end
# We store the token for use later when fetching the slack auth and user and constructing the auth struct.
@doc false
defp store_token(conn, token) do
put_private(conn, :slack_token, token)
end
# Remove the temporary storage in the conn for our data. Run after the auth struct has been built.
@doc false
def handle_cleanup!(conn) do
conn
|> put_private(:slack_auth, nil)
|> put_private(:slack_identity, nil)
|> put_private(:slack_user, nil)
|> put_private(:slack_token, nil)
end
# The structure of the requests is such that it is difficult to provide cusomization for the uid field.
# instead, we allow selecting any field from the info struct
@doc false
def uid(conn) do
Map.get(info(conn), option(conn, :uid_field))
end
@doc false
def credentials(conn) do
token = conn.private.slack_token
auth = conn.private[:slack_auth]
identity = conn.private[:slack_identity]
user = conn.private[:slack_user]
scope_string = token.other_params["scope"] || ""
scopes = String.split(scope_string, ",")
%Credentials{
token: token.access_token,
refresh_token: token.refresh_token,
expires_at: token.expires_at,
token_type: token.token_type,
expires: !!token.expires_at,
scopes: scopes,
other:
Map.merge(
%{
user: get_in(auth, ["user"]),
user_id: get_in(auth, ["user_id"]) || get_in(identity, ["user", "id"]),
team: get_in(auth, ["team"]) || get_in(identity, ["team", "name"]),
team_id: get_in(auth, ["team_id"]) || get_in(identity, ["team", "id"]),
team_domain: get_in(identity, ["team", "domain"]),
team_url: get_in(auth, ["url"])
},
user_credentials(user)
)
}
end
@doc false
def info(conn) do
user = conn.private[:slack_user]
auth = conn.private[:slack_auth]
identity = conn.private[:slack_identity]
profile = get_in(user, ["profile"]) || get_in(identity, ["user"]) || %{}
image_urls =
profile
|> Map.keys()
|> Enum.filter(&(&1 =~ ~r/^image_/))
|> Enum.into(%{}, &{&1, profile[&1]})
team_image_urls =
(identity || %{})
|> Map.get("team", %{})
|> Enum.filter(fn {key, value} -> key =~ ~r/^image_/ end)
|> Enum.into(%{}, fn {key, value} -> {"team_#{key}", value} end)
%Info{
name: name_from_user(user) || get_in(identity, ["user", "name"]),
nickname: get_in(user, ["name"]),
email: get_in(profile, ["email"]),
image: get_in(profile, ["image_48"]),
urls:
image_urls
|> Map.merge(team_image_urls)
|> Map.merge(%{
team_url: get_in(auth, ["url"])
})
}
end
@doc false
def extra(conn) do
%Extra{
raw_info: %{
auth: conn.private[:slack_auth],
identity: conn.private[:slack_identity],
token: conn.private[:slack_token],
user: conn.private[:slack_user],
team: conn.private[:slack_team]
}
}
end
defp user_credentials(nil), do: %{}
defp user_credentials(user) do
%{
has_2fa: user["has_2fa"],
is_admin: user["is_admin"],
is_owner: user["is_owner"],
is_primary_owner: user["is_primary_owner"],
is_restricted: user["is_restricted"],
is_ultra_restricted: user["is_ultra_restricted"]
}
end
# Before we can fetch the user, we first need to fetch the auth to find out what the user id is.
defp fetch_auth(conn, token) do
scope_string = token.other_params["scope"] || ""
scopes = String.split(scope_string, ",")
case Ueberauth.Strategy.Slack.OAuth.get(token, "/auth.test") do
{:ok, %OAuth2.Response{status_code: 401, body: _body}} ->
set_errors!(conn, [error("token", "unauthorized")])
{:ok, %OAuth2.Response{status_code: status_code, body: auth}}
when status_code in 200..399 ->
cond do
auth["ok"] ->
put_private(conn, :slack_auth, auth)
auth["error"] == "invalid_auth" && Enum.member?(scopes, "identity.basic") ->
# If the token has only the "identity.basic" scope then it may error
# at the "auth.test" endpoint but still succeed at the
# "identity.basic" endpoint.
# In this case we rely on fetch_identity to set the error if the
# token is invalid.
conn
true ->
set_errors!(conn, [error(auth["error"], auth["error"])])
end
{:error, %OAuth2.Error{reason: reason}} ->
set_errors!(conn, [error("OAuth2", reason)])
end
end
defp fetch_identity(conn, token) do
scope_string = token.other_params["scope"] || ""
scopes = String.split(scope_string, ",")
case "identity.basic" in scopes do
false ->
conn
true ->
case Ueberauth.Strategy.Slack.OAuth.get(token, "/users.identity") do
{:ok, %OAuth2.Response{status_code: 401, body: _body}} ->
set_errors!(conn, [error("token", "unauthorized")])
{:ok, %OAuth2.Response{status_code: status_code, body: identity}}
when status_code in 200..399 ->
if identity["ok"] do
put_private(conn, :slack_identity, identity)
else
set_errors!(conn, [error(identity["error"], identity["error"])])
end
{:error, %OAuth2.Error{reason: reason}} ->
set_errors!(conn, [error("OAuth2", reason)])
end
end
end
# If the call to fetch the auth fails, we're going to have failures already in place.
# If this happens don't try and fetch the user and just let it fail.
defp fetch_user(%Plug.Conn{assigns: %{ueberauth_failure: _fails}} = conn, _), do: conn
# Given the auth and token we can now fetch the user.
defp fetch_user(conn, token) do
scope_string = token.other_params["scope"] || ""
scopes = String.split(scope_string, ",")
case "users:read" in scopes do
false ->
conn
true ->
auth = conn.private.slack_auth
case Ueberauth.Strategy.Slack.OAuth.get(token, "/users.info", %{user: auth["user_id"]}) do
{:ok, %OAuth2.Response{status_code: 401, body: _body}} ->
set_errors!(conn, [error("token", "unauthorized")])
{:ok, %OAuth2.Response{status_code: status_code, body: user}}
when status_code in 200..399 ->
if user["ok"] do
put_private(conn, :slack_user, user["user"])
else
set_errors!(conn, [error(user["error"], user["error"])])
end
{:error, %OAuth2.Error{reason: reason}} ->
set_errors!(conn, [error("OAuth2", reason)])
end
end
end
defp fetch_team(%Plug.Conn{assigns: %{ueberauth_failure: _fails}} = conn, _), do: conn
defp fetch_team(conn, token) do
scope_string = token.other_params["scope"] || ""
scopes = String.split(scope_string, ",")
case "team:read" in scopes do
false ->
conn
true ->
case Ueberauth.Strategy.Slack.OAuth.get(token, "/team.info") do
{:ok, %OAuth2.Response{status_code: 401, body: _body}} ->
set_errors!(conn, [error("token", "unauthorized")])
{:ok, %OAuth2.Response{status_code: status_code, body: team}}
when status_code in 200..399 ->
if team["ok"] do
put_private(conn, :slack_team, team["team"])
else
set_errors!(conn, [error(team["error"], team["error"])])
end
{:error, %OAuth2.Error{reason: reason}} ->
set_errors!(conn, [error("OAuth2", reason)])
end
end
end
# Fetch the name to use. We try to start with the most specific name avaialble and
# fallback to the least.
defp name_from_user(nil), do: nil
defp name_from_user(user) do
[
user["profile"]["real_name_normalized"],
user["profile"]["real_name"],
user["real_name"],
user["name"]
]
|> Enum.reject(&(&1 == "" || &1 == nil))
|> List.first()
end
defp option(conn, key) do
Keyword.get(options(conn), key, Keyword.get(default_options(), key))
end
defp get_redirect_uri(%Plug.Conn{} = conn) do
config = Application.get_env(:ueberauth, Ueberauth)
redirect_uri = Keyword.get(config, :redirect_uri)
if is_nil(redirect_uri) do
callback_url(conn)
else
redirect_uri
end
end
end
| 31.753501 | 121 | 0.613444 |
73c146c5fd687c79f51df28c2b5ed30b20ab09d5 | 1,549 | exs | Elixir | test/core/version/gear_test.exs | ikeyasu/antikythera | 544fdd22e46b1f34177053d87d9e2a9708c74113 | [
"Apache-2.0"
] | null | null | null | test/core/version/gear_test.exs | ikeyasu/antikythera | 544fdd22e46b1f34177053d87d9e2a9708c74113 | [
"Apache-2.0"
] | null | null | null | test/core/version/gear_test.exs | ikeyasu/antikythera | 544fdd22e46b1f34177053d87d9e2a9708c74113 | [
"Apache-2.0"
] | null | null | null | # Copyright(c) 2015-2018 ACCESS CO., LTD. All rights reserved.
defmodule AntikytheraCore.Version.GearTest do
use Croma.TestCase, alias_as: V
defp all_messages_in_mailbox(acc \\ []) do
receive do
x -> all_messages_in_mailbox([x | acc])
after
0 -> acc
end
end
defp set(l) do
MapSet.new(l)
end
test "install_gears_whose_deps_met/3 should appropriately reorder gear installations according to their gear dependencies" do
%{
[] => [],
[gear_a: set([]), gear_b: set([])] => [],
[gear_a: set([:gear_b]), gear_b: set([])] => [],
[gear_a: set([:gear_b, :gear_c]), gear_b: set([:gear_c]), gear_c: set([])] => [],
[gear_a: set([:gear_b])] => [:gear_a],
[gear_a: set([:gear_c]), gear_b: set([])] => [:gear_a],
[gear_a: set([:gear_b]), gear_b: set([:gear_c]), gear_c: set([:gear_a])] => [:gear_a, :gear_b, :gear_c],
} |> Enum.each(fn {pairs, gears_to_be_rejected} ->
pairs_to_be_rejected = Enum.filter(pairs, fn {g, _} -> g in gears_to_be_rejected end)
ret = V.install_gears_whose_deps_met(pairs, MapSet.new, fn g -> send(self(), g) end)
assert set(ret) == set(pairs_to_be_rejected)
assert set(all_messages_in_mailbox()) == set(Keyword.keys(pairs) -- gears_to_be_rejected)
end)
end
end
| 44.257143 | 127 | 0.520336 |
73c153fbbca53490202a0d0c142c243697a0f73a | 596 | exs | Elixir | config/prod.secret.exs | pastleo/iex_line_bot | 73d02b45adc05bc7331fa5f88859861d04a2e71f | [
"MIT"
] | 1 | 2019-06-24T23:55:26.000Z | 2019-06-24T23:55:26.000Z | config/prod.secret.exs | pastleo/iex_line_bot | 73d02b45adc05bc7331fa5f88859861d04a2e71f | [
"MIT"
] | null | null | null | config/prod.secret.exs | pastleo/iex_line_bot | 73d02b45adc05bc7331fa5f88859861d04a2e71f | [
"MIT"
] | null | null | null | # In this file, we load production configuration and
# secrets from environment variables. You can also
# hardcode secrets, although such is generally not
# recommended and you have to remember to add this
# file to your .gitignore.
use Mix.Config
secret_key_base =
System.get_env("SECRET_KEY_BASE") ||
raise """
environment variable SECRET_KEY_BASE is missing.
You can generate one by calling: mix phx.gen.secret
"""
config :iex_line_bot, IexLineBotWeb.Endpoint,
http: [:inet6, port: String.to_integer(System.get_env("PORT") || "4000")],
secret_key_base: secret_key_base
| 33.111111 | 76 | 0.748322 |
73c158443d89671d9b45ed7bb66baa3712ea0108 | 366 | exs | Elixir | apps/artemis/test/artemis/schemas/feature_test.exs | chrislaskey/atlas_platform | 969aea95814f62d3471f93000ee5ad77edb9d1bf | [
"MIT"
] | 10 | 2019-07-05T19:59:20.000Z | 2021-05-23T07:36:11.000Z | apps/artemis/test/artemis/schemas/feature_test.exs | chrislaskey/atlas_platform | 969aea95814f62d3471f93000ee5ad77edb9d1bf | [
"MIT"
] | 7 | 2019-07-12T21:41:01.000Z | 2020-08-17T21:29:22.000Z | apps/artemis/test/artemis/schemas/feature_test.exs | chrislaskey/atlas_platform | 969aea95814f62d3471f93000ee5ad77edb9d1bf | [
"MIT"
] | 4 | 2019-07-05T20:04:08.000Z | 2021-05-13T16:28:33.000Z | defmodule Artemis.FeatureTest do
use Artemis.DataCase
use ExUnit.Case, async: true
import Ecto.Repo
import Artemis.Factories
describe "attributes - constraints" do
test "slug must be unique" do
existing = insert(:feature)
assert_raise Ecto.ConstraintError, fn ->
insert(:feature, slug: existing.slug)
end
end
end
end
| 20.333333 | 46 | 0.691257 |
73c15c40f15fcc35c4ceaca31c7a7ead1985c190 | 727 | ex | Elixir | lib/mix/tasks/subscriber/list_in_topic.ex | mcrumm/cloud_pubsub_samples | ea89c3bf3a6174e4ff140a501150b16f9b013027 | [
"Apache-2.0"
] | 1 | 2020-05-28T19:24:56.000Z | 2020-05-28T19:24:56.000Z | lib/mix/tasks/subscriber/list_in_topic.ex | mcrumm/cloud_pubsub_samples | ea89c3bf3a6174e4ff140a501150b16f9b013027 | [
"Apache-2.0"
] | null | null | null | lib/mix/tasks/subscriber/list_in_topic.ex | mcrumm/cloud_pubsub_samples | ea89c3bf3a6174e4ff140a501150b16f9b013027 | [
"Apache-2.0"
] | null | null | null | defmodule Mix.Tasks.Subscriber.ListInTopic do
use CloudPubsubSamples.Command
@shortdoc "Lists Cloud Pub/Sub subscriptions for a topic"
@moduledoc """
Lists the Google Cloud Pub/Sub subscriptions for the current project.
The topic must exist in the current project.
## Usage
mix subscriber.list_in_topic topic_name
"""
alias CloudPubsubSamples.{Project, Subscriber}
@impl true
def run(project, [topic | _args]) do
with {:ok, subscriptions} <- Subscriber.list_in_topic(project, topic) do
shell = Mix.shell()
path = Project.topic_path(project, topic)
shell.info("Listing subscriptions for #{path}:")
Enum.each(subscriptions, &shell.info/1)
:ok
end
end
end
| 25.964286 | 76 | 0.702889 |
73c184743c68342976dad0c90f1754694dd4401f | 248 | ex | Elixir | lib/beam/ksp/coprehensions.ex | itsemilano/beam | afa79d7f31d2e2d4ded381656735d08cabd4f9ea | [
"Apache-2.0"
] | null | null | null | lib/beam/ksp/coprehensions.ex | itsemilano/beam | afa79d7f31d2e2d4ded381656735d08cabd4f9ea | [
"Apache-2.0"
] | null | null | null | lib/beam/ksp/coprehensions.ex | itsemilano/beam | afa79d7f31d2e2d4ded381656735d08cabd4f9ea | [
"Apache-2.0"
] | null | null | null | defmodule Comprehentions do
def start1 do
for n <- [2, 4, 5, 6, 7, 8], do: n * 5
end
def start2 do
for x <- [2, 4, 6], y <- [3, 5, 9], do: x * y
end
def start3 do
for n <- [1, 2, 3, 4, 5, 6], rem(n, 2) == 0, do: n
end
end
| 17.714286 | 54 | 0.475806 |
73c1d33624ec117e10152bf1d20f4ad2c3640974 | 543 | ex | Elixir | test/support/resources/post.ex | kingshalaby1/ash_postgres | 4aa76a522b85138860c7ec1f2e1a3d8b8fc833a3 | [
"MIT"
] | null | null | null | test/support/resources/post.ex | kingshalaby1/ash_postgres | 4aa76a522b85138860c7ec1f2e1a3d8b8fc833a3 | [
"MIT"
] | null | null | null | test/support/resources/post.ex | kingshalaby1/ash_postgres | 4aa76a522b85138860c7ec1f2e1a3d8b8fc833a3 | [
"MIT"
] | null | null | null | defmodule AshPostgres.Test.Post do
@moduledoc false
use Ash.Resource,
data_layer: AshPostgres.DataLayer
postgres do
table "posts"
repo AshPostgres.TestRepo
end
actions do
read(:read)
create(:create)
end
attributes do
attribute(:id, :uuid, primary_key?: true, default: &Ecto.UUID.generate/0)
attribute(:title, :string)
attribute(:score, :integer)
attribute(:public, :boolean)
end
relationships do
has_many(:comments, AshPostgres.Test.Comment, destination_field: :post_id)
end
end
| 20.111111 | 78 | 0.701657 |
73c1db3bc8be10e9f2083f6897c03a3741ce432c | 631 | exs | Elixir | test/test_helper.exs | doyobi/livebook | 136d5039c42b406dd0b31aea188deb4fce3b1328 | [
"Apache-2.0"
] | 1 | 2022-02-16T09:13:27.000Z | 2022-02-16T09:13:27.000Z | test/test_helper.exs | doyobi/livebook | 136d5039c42b406dd0b31aea188deb4fce3b1328 | [
"Apache-2.0"
] | null | null | null | test/test_helper.exs | doyobi/livebook | 136d5039c42b406dd0b31aea188deb4fce3b1328 | [
"Apache-2.0"
] | null | null | null | # Start manager on the current node and configure it not to
# terminate automatically, so there is no race condition
# when starting/stopping Embedded runtimes in parallel
Livebook.Runtime.ErlDist.NodeManager.start(
auto_termination: false,
unload_modules_on_termination: false
)
# Disable autosaving
Livebook.Storage.current().insert(:settings, "global", autosave_path: nil)
erl_docs_available? = Code.fetch_docs(:gen_server) != {:error, :chunk_not_found}
exclude = []
exclude = if erl_docs_available?, do: exclude, else: Keyword.put(exclude, :erl_docs, true)
ExUnit.start(assert_receive_timeout: 1_000, exclude: exclude)
| 35.055556 | 90 | 0.787639 |
73c1eb56db9bec7ff3cc684dad31cb1c03f54853 | 333 | ex | Elixir | lib/contento_web/router.ex | reubenbrown13/contento | 979831bc9052f810409e969f4d11f15c0e40bb55 | [
"MIT"
] | 95 | 2017-10-03T19:21:57.000Z | 2021-02-15T12:37:37.000Z | lib/contento_web/router.ex | reubenbrown13/contento | 979831bc9052f810409e969f4d11f15c0e40bb55 | [
"MIT"
] | 20 | 2017-10-10T12:26:54.000Z | 2020-11-12T11:30:36.000Z | lib/contento_web/router.ex | reubenbrown13/contento | 979831bc9052f810409e969f4d11f15c0e40bb55 | [
"MIT"
] | 17 | 2017-10-04T07:59:40.000Z | 2022-02-09T20:10:09.000Z | defmodule ContentoWeb.Router do
use ContentoWeb, :router
pipeline :browser do
plug :accepts, ["html"]
plug :fetch_session
plug :fetch_flash
plug :protect_from_forgery
plug :put_secure_browser_headers
end
scope "/", ContentoWeb do
pipe_through :browser
get "/", PageController, :index
end
end
| 18.5 | 36 | 0.702703 |
73c1feeec02b2c50fde2f0a2ba917dc27701bd21 | 1,735 | ex | Elixir | web/web.ex | llxff/projects_statistics | 343508fc8fc8e9238a546f5230fff50e21126417 | [
"Unlicense"
] | null | null | null | web/web.ex | llxff/projects_statistics | 343508fc8fc8e9238a546f5230fff50e21126417 | [
"Unlicense"
] | null | null | null | web/web.ex | llxff/projects_statistics | 343508fc8fc8e9238a546f5230fff50e21126417 | [
"Unlicense"
] | null | null | null | defmodule ProjectsStatistics.Web do
@moduledoc """
A module that keeps using definitions for controllers,
views and so on.
This can be used in your application as:
use ProjectsStatistics.Web, :controller
use ProjectsStatistics.Web, :view
The definitions below will be executed for every view,
controller, etc, so keep them short and clean, focused
on imports, uses and aliases.
Do NOT define functions inside the quoted expressions
below.
"""
def model do
quote do
use Ecto.Schema
import Ecto
import Ecto.Changeset
import Ecto.Query
end
end
def controller do
quote do
use Phoenix.Controller
alias ProjectsStatistics.Repo
import Ecto
import Ecto.Query
import ProjectsStatistics.Router.Helpers
import ProjectsStatistics.Gettext
end
end
def view do
quote do
use Phoenix.View, root: "web/templates"
# Import convenience functions from controllers
import Phoenix.Controller, only: [get_csrf_token: 0, get_flash: 2, view_module: 1]
# Use all HTML functionality (forms, tags, etc)
use Phoenix.HTML
import ProjectsStatistics.Router.Helpers
import ProjectsStatistics.ErrorHelpers
import ProjectsStatistics.Gettext
end
end
def router do
quote do
use Phoenix.Router
end
end
def channel do
quote do
use Phoenix.Channel
alias ProjectsStatistics.Repo
import Ecto
import Ecto.Query
import ProjectsStatistics.Gettext
end
end
@doc """
When used, dispatch to the appropriate controller/view/etc.
"""
defmacro __using__(which) when is_atom(which) do
apply(__MODULE__, which, [])
end
end
| 21.158537 | 88 | 0.689337 |
73c21c43c904ff1066b3c7908cec268ea06680a4 | 274 | exs | Elixir | priv/repo/migrations/20170514103042_add_taglist_to_articles.exs | ludwikbukowski/recipes | cac5711d32874c3011da8da3329b70d0e28e725e | [
"MIT"
] | 4 | 2019-02-11T12:15:36.000Z | 2021-03-22T16:23:47.000Z | priv/repo/migrations/20170514103042_add_taglist_to_articles.exs | ludwikbukowski/recipes | cac5711d32874c3011da8da3329b70d0e28e725e | [
"MIT"
] | null | null | null | priv/repo/migrations/20170514103042_add_taglist_to_articles.exs | ludwikbukowski/recipes | cac5711d32874c3011da8da3329b70d0e28e725e | [
"MIT"
] | null | null | null | defmodule EmpexCookbook.Repo.Migrations.AddTaglistToArticles do
use Ecto.Migration
def change do
alter table(:articles) do
add(:tag_list, {:array, :string})
end
execute("CREATE INDEX article_tag_list_index ON articles USING GIN(tag_list)")
end
end
| 22.833333 | 82 | 0.737226 |
73c222cef6513a6bdcfd722b513aadc24ae37285 | 111 | exs | Elixir | test/configs/bad_config_from.exs | silvagustin/ueberauth_auth0 | 3070510024bfcb1545dbd3e26665d4bb8b1404df | [
"MIT"
] | null | null | null | test/configs/bad_config_from.exs | silvagustin/ueberauth_auth0 | 3070510024bfcb1545dbd3e26665d4bb8b1404df | [
"MIT"
] | null | null | null | test/configs/bad_config_from.exs | silvagustin/ueberauth_auth0 | 3070510024bfcb1545dbd3e26665d4bb8b1404df | [
"MIT"
] | null | null | null | import Config
config :ueberauth, Ueberauth.Strategy.Auth0.OAuth, config_from: Ueberauth.Support.BadConfigFrom
| 27.75 | 95 | 0.846847 |
73c245337558e2b6ac63fb15c4f45100caa5524e | 10,728 | ex | Elixir | test/event_store/support/subscription_test_case.ex | rsataev/Commanded-CQRS-ES | 0fd3758e5945f2bd8736619c48f212cc2bf552ad | [
"MIT"
] | null | null | null | test/event_store/support/subscription_test_case.ex | rsataev/Commanded-CQRS-ES | 0fd3758e5945f2bd8736619c48f212cc2bf552ad | [
"MIT"
] | null | null | null | test/event_store/support/subscription_test_case.ex | rsataev/Commanded-CQRS-ES | 0fd3758e5945f2bd8736619c48f212cc2bf552ad | [
"MIT"
] | null | null | null | defmodule Commanded.EventStore.SubscriptionTestCase do
import Commanded.SharedTestCase
define_tests do
alias Commanded.EventStore
alias Commanded.EventStore.{EventData, Subscriber}
alias Commanded.Helpers.{ProcessHelper, Wait}
defmodule BankAccountOpened do
defstruct [:account_number, :initial_balance]
end
describe "transient subscription to single stream" do
test "should receive events appended to the stream" do
stream_uuid = UUID.uuid4()
assert :ok = EventStore.subscribe(stream_uuid)
:ok = EventStore.append_to_stream(stream_uuid, 0, build_events(1))
received_events = assert_receive_events(1, from: 1)
assert Enum.map(received_events, & &1.stream_id) == [stream_uuid]
assert Enum.map(received_events, & &1.stream_version) == [1]
:ok = EventStore.append_to_stream(stream_uuid, 1, build_events(2))
received_events = assert_receive_events(2, from: 2)
assert Enum.map(received_events, & &1.stream_id) == [stream_uuid, stream_uuid]
assert Enum.map(received_events, & &1.stream_version) == [2, 3]
:ok = EventStore.append_to_stream(stream_uuid, 3, build_events(3))
received_events = assert_receive_events(3, from: 4)
assert Enum.map(received_events, & &1.stream_id) == [
stream_uuid,
stream_uuid,
stream_uuid
]
assert Enum.map(received_events, & &1.stream_version) == [4, 5, 6]
refute_receive {:events, _received_events}
end
test "should not receive events appended to another stream" do
stream_uuid = UUID.uuid4()
another_stream_uuid = UUID.uuid4()
assert :ok = EventStore.subscribe(stream_uuid)
:ok = EventStore.append_to_stream(another_stream_uuid, 0, build_events(1))
:ok = EventStore.append_to_stream(another_stream_uuid, 1, build_events(2))
refute_receive {:events, _received_events}
end
end
describe "subscribe to single stream" do
test "should receive `:subscribed` message once subscribed" do
{:ok, subscription} = EventStore.subscribe_to("stream1", "subscriber", self(), :origin)
assert_receive {:subscribed, ^subscription}
end
test "should receive events appended to stream" do
{:ok, subscription} = EventStore.subscribe_to("stream1", "subscriber", self(), :origin)
assert_receive {:subscribed, ^subscription}
:ok = EventStore.append_to_stream("stream1", 0, build_events(1))
:ok = EventStore.append_to_stream("stream1", 1, build_events(2))
:ok = EventStore.append_to_stream("stream1", 3, build_events(3))
assert_receive_events(subscription, 1, from: 1)
assert_receive_events(subscription, 2, from: 2)
assert_receive_events(subscription, 3, from: 4)
refute_receive {:events, _received_events}
end
test "should not receive events appended to another stream" do
{:ok, subscription} = EventStore.subscribe_to("stream1", "subscriber", self(), :origin)
:ok = EventStore.append_to_stream("stream1", 0, build_events(1))
:ok = EventStore.append_to_stream("stream2", 0, build_events(2))
:ok = EventStore.append_to_stream("stream3", 0, build_events(3))
assert_receive_events(subscription, 1, from: 1)
refute_receive {:events, _received_events}
end
test "should skip existing events when subscribing from current position" do
:ok = EventStore.append_to_stream("stream1", 0, build_events(1))
:ok = EventStore.append_to_stream("stream1", 1, build_events(2))
wait_for_event_store()
{:ok, subscription} = EventStore.subscribe_to("stream1", "subscriber", self(), :current)
assert_receive {:subscribed, ^subscription}
refute_receive {:events, _events}
:ok = EventStore.append_to_stream("stream1", 3, build_events(3))
:ok = EventStore.append_to_stream("stream2", 0, build_events(3))
:ok = EventStore.append_to_stream("stream3", 0, build_events(3))
assert_receive_events(subscription, 3, from: 4)
refute_receive {:events, _events}
end
test "should prevent duplicate subscriptions" do
{:ok, _subscription} = EventStore.subscribe_to("stream1", "subscriber", self(), :origin)
assert {:error, :subscription_already_exists} ==
EventStore.subscribe_to("stream1", "subscriber", self(), :origin)
end
end
describe "subscribe to all streams" do
test "should receive `:subscribed` message once subscribed" do
{:ok, subscription} = EventStore.subscribe_to(:all, "subscriber", self(), :origin)
assert_receive {:subscribed, ^subscription}
end
test "should receive events appended to any stream" do
{:ok, subscription} = EventStore.subscribe_to(:all, "subscriber", self(), :origin)
assert_receive {:subscribed, ^subscription}
:ok = EventStore.append_to_stream("stream1", 0, build_events(1))
:ok = EventStore.append_to_stream("stream2", 0, build_events(2))
:ok = EventStore.append_to_stream("stream3", 0, build_events(3))
assert_receive_events(subscription, 1, from: 1)
assert_receive_events(subscription, 2, from: 2)
assert_receive_events(subscription, 3, from: 4)
refute_receive {:events, _received_events}
end
test "should skip existing events when subscribing from current position" do
:ok = EventStore.append_to_stream("stream1", 0, build_events(1))
:ok = EventStore.append_to_stream("stream2", 0, build_events(2))
wait_for_event_store()
{:ok, subscription} = EventStore.subscribe_to(:all, "subscriber", self(), :current)
assert_receive {:subscribed, ^subscription}
refute_receive {:events, _received_events}
:ok = EventStore.append_to_stream("stream3", 0, build_events(3))
assert_receive_events(subscription, 3, from: 4)
refute_receive {:events, _received_events}
end
test "should prevent duplicate subscriptions" do
{:ok, _subscription} = EventStore.subscribe_to(:all, "subscriber", self(), :origin)
assert {:error, :subscription_already_exists} ==
EventStore.subscribe_to(:all, "subscriber", self(), :origin)
end
end
describe "catch-up subscription" do
test "should receive any existing events" do
:ok = EventStore.append_to_stream("stream1", 0, build_events(1))
:ok = EventStore.append_to_stream("stream2", 0, build_events(2))
wait_for_event_store()
{:ok, subscription} = EventStore.subscribe_to(:all, "subscriber", self(), :origin)
assert_receive {:subscribed, ^subscription}
assert_receive_events(subscription, 1, from: 1)
assert_receive_events(subscription, 2, from: 2)
:ok = EventStore.append_to_stream("stream3", 0, build_events(3))
assert_receive_events(subscription, 3, from: 4)
refute_receive {:events, _received_events}
end
end
describe "unsubscribe from all streams" do
test "should not receive further events appended to any stream" do
{:ok, subscription} = EventStore.subscribe_to(:all, "subscriber", self(), :origin)
assert_receive {:subscribed, ^subscription}
:ok = EventStore.append_to_stream("stream1", 0, build_events(1))
assert_receive_events(subscription, 1, from: 1)
:ok = EventStore.unsubscribe(subscription)
:ok = EventStore.append_to_stream("stream2", 0, build_events(2))
:ok = EventStore.append_to_stream("stream3", 0, build_events(3))
refute_receive {:events, _received_events}
end
end
describe "resume subscription" do
test "should remember last seen event number when subscription resumes" do
:ok = EventStore.append_to_stream("stream1", 0, build_events(1))
:ok = EventStore.append_to_stream("stream2", 0, build_events(2))
{:ok, subscriber} = Subscriber.start_link()
wait_until(fn ->
assert Subscriber.subscribed?(subscriber)
received_events = Subscriber.received_events(subscriber)
assert length(received_events) == 3
end)
ProcessHelper.shutdown(subscriber)
{:ok, subscriber} = Subscriber.start_link()
wait_until(fn ->
assert Subscriber.subscribed?(subscriber)
end)
received_events = Subscriber.received_events(subscriber)
assert length(received_events) == 0
:ok = EventStore.append_to_stream("stream3", 0, build_events(1))
wait_until(fn ->
received_events = Subscriber.received_events(subscriber)
assert length(received_events) == 1
end)
end
end
defp wait_until(assertion) do
Wait.until(event_store_wait(1_000), assertion)
end
defp wait_for_event_store do
case event_store_wait() do
nil -> :ok
wait -> :timer.sleep(wait)
end
end
defp assert_receive_events(subscription, expected_count, opts) do
assert_receive_events(expected_count, Keyword.put(opts, :subscription, subscription))
end
defp assert_receive_events(expected_count, opts) do
from_event_number = Keyword.get(opts, :from, 1)
assert_receive {:events, received_events}
received_events
|> Enum.with_index(from_event_number)
|> Enum.each(fn {received_event, expected_event_number} ->
assert received_event.event_number == expected_event_number
end)
case Keyword.get(opts, :subscription) do
nil -> :ok
subscription -> EventStore.ack_event(subscription, List.last(received_events))
end
case expected_count - length(received_events) do
0 ->
received_events
remaining when remaining > 0 ->
received_events ++
assert_receive_events(
remaining,
Keyword.put(opts, :from, from_event_number + length(received_events))
)
remaining when remaining < 0 ->
flunk("Received #{remaining} more event(s) than expected")
end
end
defp build_event(account_number) do
%EventData{
causation_id: UUID.uuid4(),
correlation_id: UUID.uuid4(),
event_type: "#{__MODULE__}.BankAccountOpened",
data: %BankAccountOpened{account_number: account_number, initial_balance: 1_000},
metadata: %{"user_id" => "test"}
}
end
defp build_events(count) do
for account_number <- 1..count, do: build_event(account_number)
end
end
end
| 35.289474 | 96 | 0.662938 |
73c26bb29786a5784c734969c73c83b01c4a820a | 730 | ex | Elixir | local/start_script.ex | wses-yoshida/antikythera | e108e59d2339edd0b0fad31ad4f41f56df45be55 | [
"Apache-2.0"
] | null | null | null | local/start_script.ex | wses-yoshida/antikythera | e108e59d2339edd0b0fad31ad4f41f56df45be55 | [
"Apache-2.0"
] | null | null | null | local/start_script.ex | wses-yoshida/antikythera | e108e59d2339edd0b0fad31ad4f41f56df45be55 | [
"Apache-2.0"
] | null | null | null | # Copyright(c) 2015-2019 ACCESS CO., LTD. All rights reserved.
use Croma
defmodule AntikytheraLocal.StartScript do
alias Antikythera.Env
alias AntikytheraLocal.{Cmd, NodeName}
defun run(command :: v[String.t], release_dir :: Path.t) :: :ok do
env = [
{"ANTIKYTHERA_RUNTIME_ENV", "local" },
{"MIX_ENV" , "prod" },
{"RELX_REPLACE_OS_VARS" , "true" },
{"NODENAME" , Atom.to_string(NodeName.get())},
{"COOKIE" , "local" },
]
Cmd.exec_and_output_log!("sh", ["bin/#{Env.antikythera_instance_name()}", command], env: env, cd: release_dir)
end
end
| 36.5 | 114 | 0.523288 |
73c27209f4b5267519ab7b4d3ff848806a6ce7fe | 458 | exs | Elixir | shoujo/mix.exs | queer/mahou | 09b63a2f7431a7b3364ce81dac35114ea508942f | [
"BSD-3-Clause"
] | 37 | 2021-01-31T17:43:03.000Z | 2021-11-17T12:29:00.000Z | shoujo/mix.exs | queer/mahou | 09b63a2f7431a7b3364ce81dac35114ea508942f | [
"BSD-3-Clause"
] | 4 | 2021-05-29T04:54:28.000Z | 2021-06-26T20:40:48.000Z | shoujo/mix.exs | queer/mahou | 09b63a2f7431a7b3364ce81dac35114ea508942f | [
"BSD-3-Clause"
] | null | null | null | defmodule Shoujo.MixProject do
use Mix.Project
def project do
[
app: :shoujo,
version: "0.1.0",
elixir: "~> 1.11",
start_permanent: Mix.env() == :prod,
deps: deps()
]
end
def application do
[
extra_applications: [:logger],
mod: {Shoujo.Application, []}
]
end
defp deps do
[
{:mahou, path: "../libmahou"},
{:ranch, "~> 2.0"},
{:ksuid, "~> 0.1.2"},
]
end
end
| 15.793103 | 42 | 0.495633 |
73c27c55b983655bf68fcc10fb8ee54a60af5ddc | 1,544 | exs | Elixir | lib/iex/test/iex/server_test.exs | QuinnWilton/elixir | e42e3e55ca1561fe56b58d6f51c7b0faae6a7a1e | [
"Apache-2.0"
] | null | null | null | lib/iex/test/iex/server_test.exs | QuinnWilton/elixir | e42e3e55ca1561fe56b58d6f51c7b0faae6a7a1e | [
"Apache-2.0"
] | null | null | null | lib/iex/test/iex/server_test.exs | QuinnWilton/elixir | e42e3e55ca1561fe56b58d6f51c7b0faae6a7a1e | [
"Apache-2.0"
] | null | null | null | Code.require_file "../test_helper.exs", __DIR__
defmodule IEx.ServerTest do
use IEx.Case
setup do
IEx.Options.set :colors, enabled: false
:ok
end
# Options
test "prefix option" do
assert String.contains? capture_io(fn ->
boot([prefix: "pry"])
end), "pry(1)> "
end
test "delegate_locals_to option" do
assert String.contains? capture_io("sort([:foo, :bar])", fn ->
boot([delegate_locals_to: Enum])
end), "[:bar, :foo]"
end
test "env option" do
assert String.contains? capture_io("__ENV__.file", fn ->
boot([env: __ENV__])
end), "server_test.exs"
end
# Take over
test "allows take over of the shell during boot" do
assert String.contains? capture_io("Y\na+b", fn ->
server = self
boot([], fn ->
opts = [prefix: "dbg", binding: [a: 1, b: 2]]
IEx.Server.take_over("iex:13", opts, 1000, server)
end)
end), "dbg(1)> "
end
test "does not operate if callback during boot fails" do
assert capture_io(fn ->
boot([], fn -> exit(0) end)
end) == nil
end
test "take over fails when there is no shell" do
assert IEx.Server.take_over("iex:13", [], 10) == { :error, :no_iex }
end
test "pry wraps around take over" do
require IEx
assert String.contains? capture_io(fn ->
assert IEx.pry == { :error, :no_iex }
end), "Is an IEx shell running?"
end
# Helpers
defp boot(opts, callback \\ fn -> end) do
IEx.Server.start(Keyword.merge([dot_iex_path: ""], opts), callback)
end
end
| 23.393939 | 72 | 0.612694 |
73c29084d4f93ed235f9f6df4fcedc0c71698bde | 1,419 | exs | Elixir | issues/test/table_formatter_test.exs | benjohns1/elixer-app | 6e866ec084c5e75442c0b70f66e35f61b5b74d34 | [
"MIT"
] | null | null | null | issues/test/table_formatter_test.exs | benjohns1/elixer-app | 6e866ec084c5e75442c0b70f66e35f61b5b74d34 | [
"MIT"
] | null | null | null | issues/test/table_formatter_test.exs | benjohns1/elixer-app | 6e866ec084c5e75442c0b70f66e35f61b5b74d34 | [
"MIT"
] | null | null | null | defmodule TableFormatterTest do
use ExUnit.Case
import ExUnit.CaptureIO
alias Issues.TableFormatter, as: TF
def simple_test_data do
[ [ c1: "r1 c1", c2: "r1 c2", c3: "r1 c3", c4: "r1+++c4" ],
[ c1: "r2 c1", c2: "r2 c2", c3: "r2 c3", c4: "r2 c4" ],
[ c1: "r3 c1", c2: "r3 c2", c3: "r3 c3", c4: "r3 c4" ],
[ c1: "r4 c1", c2: "r4++c2", c3: "r4 c3", c4: "r4 c4" ] ]
end
def headers, do: [ :c1, :c2, :c4 ]
def split_with_three_columns, do: TF.split_into_columns(simple_test_data(), headers())
test "split_into_columns" do
columns = split_with_three_columns()
assert length(columns) == length(headers())
assert List.first(columns) == ["r1 c1", "r2 c1", "r3 c1", "r4 c1"]
assert List.last(columns) == ["r1+++c4", "r2 c4", "r3 c4", "r4 c4"]
end
test "column_widths" do
widths = TF.widths_of(split_with_three_columns())
assert widths == [ 5, 6, 7 ]
end
test "correct format string returned" do
assert TF.format_for([9, 10, 11]) == "~-9s | ~-10s | ~-11s~n"
end
test "output is correct" do
result = capture_io fn -> TF.print_table_for_columns(simple_test_data(), headers()) end
assert result == """
c1 | c2 | c4
------+--------+--------
r1 c1 | r1 c2 | r1+++c4
r2 c1 | r2 c2 | r2 c4
r3 c1 | r3 c2 | r3 c4
r4 c1 | r4++c2 | r4 c4
"""
end
end | 31.533333 | 92 | 0.546159 |
73c2ae85609be26da61a8a5806cd26ae30c849ef | 565 | ex | Elixir | apps/artemis_web/lib/artemis_web/guardian.ex | artemis-platform/artemis_teams | 9930c3d9528e37b76f0525390e32b66eed7eadde | [
"MIT"
] | 2 | 2020-04-23T02:29:18.000Z | 2020-07-07T13:13:17.000Z | apps/artemis_web/lib/artemis_web/guardian.ex | chrislaskey/artemis_teams | 9930c3d9528e37b76f0525390e32b66eed7eadde | [
"MIT"
] | 4 | 2020-04-26T20:35:36.000Z | 2020-11-10T22:13:19.000Z | apps/artemis_web/lib/artemis_web/guardian.ex | chrislaskey/artemis_teams | 9930c3d9528e37b76f0525390e32b66eed7eadde | [
"MIT"
] | null | null | null | defmodule ArtemisWeb.Guardian do
use Guardian, otp_app: :artemis_web
alias Artemis.GetUser
alias Artemis.GetSystemUser
def subject_for_token(%{id: id}, _claims) do
{:ok, to_string(id)}
end
def subject_for_token(_, _) do
{:error, "Error creating token"}
end
def resource_from_claims(%{"sub" => id}) do
system_user = GetSystemUser.call!()
resource = GetUser.call(id, system_user, preload: [:permissions, :user_teams])
{:ok, resource}
end
def resource_from_claims(_claims) do
{:error, "Error reading token"}
end
end
| 21.730769 | 82 | 0.693805 |
73c2b732d69e333b6340028a9d67397966c5cfe9 | 2,948 | exs | Elixir | apps/api_web/test/api_web/event_stream/diff_test.exs | fjlanasa/api | c39bc393aea572bfb81754b2ea1adf9dda9ce24a | [
"MIT"
] | 62 | 2019-01-17T12:34:39.000Z | 2022-03-20T21:49:47.000Z | apps/api_web/test/api_web/event_stream/diff_test.exs | fjlanasa/api | c39bc393aea572bfb81754b2ea1adf9dda9ce24a | [
"MIT"
] | 375 | 2019-02-13T15:30:50.000Z | 2022-03-30T18:50:41.000Z | apps/api_web/test/api_web/event_stream/diff_test.exs | fjlanasa/api | c39bc393aea572bfb81754b2ea1adf9dda9ce24a | [
"MIT"
] | 14 | 2019-01-16T19:35:57.000Z | 2022-02-26T18:55:54.000Z | defmodule ApiWeb.EventStream.DiffTest do
@moduledoc false
use ExUnit.Case, async: true
use ExUnitProperties
import ApiWeb.EventStream.Diff
defp item(id, opts \\ []) do
type = opts[:type] || "item"
attributes = Map.new(Keyword.delete(opts, :type))
%{
"type" => type,
"id" => id,
"attributes" => attributes
}
end
describe "diff/2" do
test "returns empty lists if there are no changes" do
item = item("1")
assert diff([item], [item]) == %{
add: [],
update: [],
remove: []
}
end
test "can add items" do
item = item("1")
same = item("same")
assert diff([same], [same, item]) == %{
add: [item],
update: [],
remove: []
}
end
test "can remove items" do
item = item("1")
same = item("2")
assert diff([item, same], [same]) == %{
add: [],
update: [],
remove: [%{"id" => "1", "type" => "item"}]
}
end
test "can change items" do
old = item("1")
new = item("1", value: "new")
assert diff([old], [new]) == %{
add: [],
update: [new],
remove: []
}
end
test "items with the same ID but different types are distinct" do
a = item("1", type: "a")
b = item("1", type: "b")
same = item("2")
assert diff([a, same], [b, same]) == %{
add: [b],
update: [],
remove: [%{"id" => "1", "type" => "a"}]
}
end
test "can add/update/remove in the same diff" do
one = item("1")
two_old = item("2")
two_new = item("2", value: "new")
three = item("3")
four = item("4")
assert diff([one, two_old, three], [two_new, three, four]) == %{
add: [four],
update: [two_new],
remove: [%{"id" => "1", "type" => "item"}]
}
end
test "if it's shorter, uses a full reset" do
a = item("a")
b = item("b")
c = item("c")
assert diff([a, b, c], [b]) == %{
reset: [[b]]
}
end
test "a diff from or to an empty list is a reset" do
a = item("a")
assert diff([], [a]) == %{
reset: [[a]]
}
assert diff([a], []) == %{
reset: [[]]
}
end
test "items are added in order and removed in reverse order" do
main_items = for id <- ~w(a b c d), do: item(id)
added_items = for id <- ~w(add1 add2), do: item(id)
full_items = added_items ++ main_items
assert diff(main_items, full_items).add == added_items
assert diff(full_items, main_items).remove ==
added_items |> Enum.reverse() |> Enum.map(&Map.delete(&1, "attributes"))
end
end
end
| 24.363636 | 87 | 0.446404 |
73c2bbfe4b4f9b8f119d9772c7284f5ae93f423b | 76 | exs | Elixir | test/views/page_view_test.exs | PandemicPlayers/pandemic-server | 39ce8c12b5f08be7dc66623a69423265f01d23e1 | [
"MIT"
] | null | null | null | test/views/page_view_test.exs | PandemicPlayers/pandemic-server | 39ce8c12b5f08be7dc66623a69423265f01d23e1 | [
"MIT"
] | null | null | null | test/views/page_view_test.exs | PandemicPlayers/pandemic-server | 39ce8c12b5f08be7dc66623a69423265f01d23e1 | [
"MIT"
] | null | null | null | defmodule Pandemic.PageViewTest do
use Pandemic.ConnCase, async: true
end
| 19 | 36 | 0.815789 |
73c2be15281611c90f81354d0bfe5e26e81ca835 | 2,723 | exs | Elixir | test/suite/draft6/any_of_test.exs | depressed-pho/xema | 56de4a5d3b3f37827c70f6052f895c59feb0bb51 | [
"MIT"
] | null | null | null | test/suite/draft6/any_of_test.exs | depressed-pho/xema | 56de4a5d3b3f37827c70f6052f895c59feb0bb51 | [
"MIT"
] | null | null | null | test/suite/draft6/any_of_test.exs | depressed-pho/xema | 56de4a5d3b3f37827c70f6052f895c59feb0bb51 | [
"MIT"
] | null | null | null | defmodule Draft6.AnyOfTest do
use ExUnit.Case, async: true
import Xema, only: [valid?: 2]
describe "anyOf" do
setup do
%{schema: Xema.new(any_of: [:integer, [minimum: 2]])}
end
test "first anyOf valid", %{schema: schema} do
data = 1
assert valid?(schema, data)
end
test "second anyOf valid", %{schema: schema} do
data = 2.5
assert valid?(schema, data)
end
test "both anyOf valid", %{schema: schema} do
data = 3
assert valid?(schema, data)
end
test "neither anyOf valid", %{schema: schema} do
data = 1.5
refute valid?(schema, data)
end
end
describe "anyOf with base schema" do
setup do
%{
schema: Xema.new({:string, [any_of: [[max_length: 2], [min_length: 4]]]})
}
end
test "mismatch base schema", %{schema: schema} do
data = 3
refute valid?(schema, data)
end
test "one anyOf valid", %{schema: schema} do
data = "foobar"
assert valid?(schema, data)
end
test "both anyOf invalid", %{schema: schema} do
data = "foo"
refute valid?(schema, data)
end
end
describe "anyOf with boolean schemas, all true" do
setup do
%{schema: Xema.new(any_of: [true, true])}
end
test "any value is valid", %{schema: schema} do
data = "foo"
assert valid?(schema, data)
end
end
describe "anyOf with boolean schemas, some true" do
setup do
%{schema: Xema.new(any_of: [true, false])}
end
test "any value is valid", %{schema: schema} do
data = "foo"
assert valid?(schema, data)
end
end
describe "anyOf with boolean schemas, all false" do
setup do
%{schema: Xema.new(any_of: [false, false])}
end
test "any value is invalid", %{schema: schema} do
data = "foo"
refute valid?(schema, data)
end
end
describe "anyOf complex types" do
setup do
%{
schema:
Xema.new(
any_of: [
[properties: %{bar: :integer}, required: [:bar]],
[properties: %{foo: :string}, required: [:foo]]
]
)
}
end
test "first anyOf valid (complex)", %{schema: schema} do
data = %{bar: 2}
assert valid?(schema, data)
end
test "second anyOf valid (complex)", %{schema: schema} do
data = %{foo: "baz"}
assert valid?(schema, data)
end
test "both anyOf valid (complex)", %{schema: schema} do
data = %{bar: 2, foo: "baz"}
assert valid?(schema, data)
end
test "neither anyOf valid (complex)", %{schema: schema} do
data = %{bar: "quux", foo: 2}
refute valid?(schema, data)
end
end
end
| 22.319672 | 81 | 0.562982 |
73c2cb891da4305259448e21495de2157396f746 | 2,160 | exs | Elixir | test/paddington/configuration_test.exs | lucidstack/paddington | 24e7709e33d096d7f0d9c5a6e58401b8c6806bcc | [
"Apache-2.0"
] | 6 | 2016-04-09T00:13:20.000Z | 2019-04-15T11:47:59.000Z | test/paddington/configuration_test.exs | lucidstack/paddington | 24e7709e33d096d7f0d9c5a6e58401b8c6806bcc | [
"Apache-2.0"
] | 1 | 2018-03-19T00:08:52.000Z | 2018-03-19T00:08:52.000Z | test/paddington/configuration_test.exs | lucidstack/paddington | 24e7709e33d096d7f0d9c5a6e58401b8c6806bcc | [
"Apache-2.0"
] | 1 | 2018-03-18T23:56:29.000Z | 2018-03-18T23:56:29.000Z | defmodule PaddingtonConfigurationTest do
import Paddington.Configuration
use ExUnit.Case, async: false
import Mock
@yaml_mock %{"device" => "Launchpad Mini", "applications" => ["iTunes", "Finder"]}
test "load/1 returns {:ok, %Configuration{}} from a YAML configuration file" do
with_mock YamlElixir, [read_from_file: fn(_path) -> @yaml_mock end] do
assert {:ok, %Paddington.Configuration{
device: "Launchpad Mini", applications: ["iTunes", "Finder"]
}} == load("~/.paddington.yml")
assert called YamlElixir.read_from_file("~/.paddington.yml")
end
end
@yaml_mock %{"device" => "Launchpad Mini", "appriccatis" => ["iTunes", "Finder"]}
test "load/1 returns {:error, :applications_not_specified} on malformed YAML" do
with_mock YamlElixir, [read_from_file: fn(_path) -> @yaml_mock end] do
assert {:error, :applications_not_specified} == load("~/.paddington.yml")
assert called YamlElixir.read_from_file("~/.paddington.yml")
end
end
@yaml_mock %{"applications" => ["iTunes", "Finder"]}
test "load/1 returns {:error, :device_not_specified} on malformed YAML" do
with_mock YamlElixir, [read_from_file: fn(_path) -> @yaml_mock end] do
assert {:error, :device_not_specified} == load("~/.paddington.yml")
assert called YamlElixir.read_from_file("~/.paddington.yml")
end
end
@yaml_mock %{"device" => "Launchpad Mini", "applications" => ["iTunes", "Finder"]}
test "load!/1 returns a %Configuration{} from a YAML configuration file" do
with_mock YamlElixir, [read_from_file: fn(_path) -> @yaml_mock end] do
assert %Paddington.Configuration{
device: "Launchpad Mini", applications: ["iTunes", "Finder"]
} == load!("~/.paddington.yml")
assert called YamlElixir.read_from_file("~/.paddington.yml")
end
end
@yaml_mock %{"device" => "Launchpad Mini"}
test "load!/1 raises a ConfigurationError on malformed YAML" do
with_mock YamlElixir, [read_from_file: fn(_path) -> @yaml_mock end] do
load_config = fn -> load!("~/.paddington.yml") end
assert_raise Paddington.ConfigurationError, load_config
end
end
end
| 40.754717 | 84 | 0.681019 |
73c2ce8c1e7b61c5cc30d923ccdf75354f468e25 | 3,737 | ex | Elixir | backend/lib/getaways/vacation.ex | abeyonalaja/pragstudio-unpacked-graphql-code | 1b0a79d62b624208ffc85f94c1d59ca3d3dab044 | [
"Unlicense"
] | null | null | null | backend/lib/getaways/vacation.ex | abeyonalaja/pragstudio-unpacked-graphql-code | 1b0a79d62b624208ffc85f94c1d59ca3d3dab044 | [
"Unlicense"
] | 2 | 2020-07-17T17:34:46.000Z | 2021-03-09T11:31:50.000Z | backend/lib/getaways/vacation.ex | abeyonalaja/pragstudio-unpacked-graphql-code | 1b0a79d62b624208ffc85f94c1d59ca3d3dab044 | [
"Unlicense"
] | null | null | null | defmodule Getaways.Vacation do
@moduledoc """
The Vacation context: public interface for finding, booking,
and reviewing vacation places.
"""
import Ecto.Query, warn: false
alias Getaways.Repo
alias Getaways.Vacation.{Place, Booking, Review}
alias Getaways.Accounts.User
@doc """
Returns the place with the given `slug`.
Raises `Ecto.NoResultsError` if no place was found.
"""
def get_place_by_slug!(slug) do
Repo.get_by!(Place, slug: slug)
end
@doc """
Returns a list of all places.
"""
def list_places do
Repo.all(Place)
end
@doc """
Returns a list of places matching the given `criteria`.
Example Criteria:
[{:limit, 15}, {:order, :asc}, {:filter, [{:matching, "lake"}, {:wifi, true}, {:guest_count, 3}]}]
"""
def list_places(criteria) do
query = from p in Place
Enum.reduce(criteria, query, fn
{:limit, limit}, query ->
from p in query, limit: ^limit
{:filter, filters}, query ->
filter_with(filters, query)
{:order, order}, query ->
from p in query, order_by: [{^order, :id}]
end)
|> Repo.all
end
defp filter_with(filters, query) do
Enum.reduce(filters, query, fn
{:matching, term}, query ->
pattern = "%#{term}%"
from q in query,
where:
ilike(q.name, ^pattern) or
ilike(q.description, ^pattern) or
ilike(q.location, ^pattern)
{:pet_friendly, value}, query ->
from q in query, where: q.pet_friendly == ^value
{:pool, value}, query ->
from q in query, where: q.pool == ^value
{:wifi, value}, query ->
from q in query, where: q.wifi == ^value
{:guest_count, count}, query ->
from q in query, where: q.max_guests >= ^count
{:available_between, %{start_date: start_date, end_date: end_date}}, query ->
available_between(query, start_date, end_date)
end)
end
# Returns a query for places available between the given
# start_date and end_date using the Postgres-specific
# OVERLAPS function.
defp available_between(query, start_date, end_date) do
from place in query,
left_join: booking in Booking,
on:
booking.place_id == place.id and
fragment(
"(?, ?) OVERLAPS (?, ? + INTERVAL '1' DAY)",
booking.start_date,
booking.end_date,
type(^start_date, :date),
type(^end_date, :date)
),
where: is_nil(booking.place_id)
end
@doc """
Returns the booking with the given `id`.
Raises `Ecto.NoResultsError` if no booking was found.
"""
def get_booking!(id) do
Repo.get!(Booking, id)
end
@doc """
Creates a booking for the given user.
"""
def create_booking(%User{} = user, attrs) do
%Booking{}
|> Booking.changeset(attrs)
|> Ecto.Changeset.put_assoc(:user, user)
|> Repo.insert()
end
@doc """
Cancels the given booking.
"""
def cancel_booking(%Booking{} = booking) do
booking
|> Booking.cancel_changeset(%{state: "canceled"})
|> Repo.update()
end
@doc """
Creates a review for the given user.
"""
def create_review(%User{} = user, attrs) do
%Review{}
|> Review.changeset(attrs)
|> Ecto.Changeset.put_assoc(:user, user)
|> Repo.insert()
end
# Dataloader
def datasource() do
Dataloader.Ecto.new(Repo, query: &query/2)
end
def query(Booking, %{limit: limit, scope: :place}) do
Booking
|> where(state: "reserved")
|> order_by(asc: :start_date)
|> limit(^limit)
end
def query(Booking, %{scope: :user}) do
Booking
|> order_by(asc: :start_date)
end
def query(queryable, _) do
queryable
end
end
| 23.503145 | 100 | 0.605834 |
73c2d05f622c96c4fe3da578c582390eedb78d29 | 564 | ex | Elixir | plugins/ucc_chat/lib/ucc_chat_web/controllers/home_controller.ex | josephkabraham/ucx_ucc | 0dbd9e3eb5940336b4870cff033482ceba5f6ee7 | [
"MIT"
] | null | null | null | plugins/ucc_chat/lib/ucc_chat_web/controllers/home_controller.ex | josephkabraham/ucx_ucc | 0dbd9e3eb5940336b4870cff033482ceba5f6ee7 | [
"MIT"
] | null | null | null | plugins/ucc_chat/lib/ucc_chat_web/controllers/home_controller.ex | josephkabraham/ucx_ucc | 0dbd9e3eb5940336b4870cff033482ceba5f6ee7 | [
"MIT"
] | null | null | null | defmodule UccChatWeb.HomeController do
use UccChatWeb, :controller
require Logger
alias UccChat.{ChatDat}
alias UccChat.ServiceHelpers, as: Helpers
def index(conn, _params) do
case Helpers.get_user(Coherence.current_user(conn) |> Map.get(:id)) do
nil ->
UcxUccWeb.Coherence.SessionController.delete(conn, %{})
user ->
chatd = ChatDat.new(user)
conn
|> put_layout({UcxUccWeb.LayoutView, "app.html"})
|> put_view(UccChatWeb.HomeView)
|> render("index.html", chatd: chatd)
end
end
end
| 26.857143 | 74 | 0.657801 |
73c2d5ae3a37c9a13817e68caec312ae9cac9c06 | 4,317 | ex | Elixir | lib/plaid/transactions.ex | AlchemyFintech/plaid-elixir | 35ab2cfe61827cd11c98ceb8f52406d879c0d7a8 | [
"MIT"
] | null | null | null | lib/plaid/transactions.ex | AlchemyFintech/plaid-elixir | 35ab2cfe61827cd11c98ceb8f52406d879c0d7a8 | [
"MIT"
] | null | null | null | lib/plaid/transactions.ex | AlchemyFintech/plaid-elixir | 35ab2cfe61827cd11c98ceb8f52406d879c0d7a8 | [
"MIT"
] | null | null | null | defmodule Plaid.Transactions do
@moduledoc """
Functions for Plaid `transactions` endpoint.
"""
import Plaid, only: [make_request_with_cred: 4, validate_cred: 1]
alias Plaid.Utils
@derive Jason.Encoder
defstruct accounts: [], item: nil, total_transactions: nil, transactions: [], request_id: nil
@type t :: %__MODULE__{
accounts: [Plaid.Accounts.Account.t()],
item: Plaid.Item.t(),
total_transactions: integer,
transactions: [Plaid.Transactions.Transaction.t()],
request_id: String.t()
}
@type params :: %{required(atom) => String.t() | map}
@type config :: %{required(atom) => String.t()}
@endpoint :transactions
defmodule Transaction do
@moduledoc """
Plaid Transaction data structure.
"""
@derive Jason.Encoder
defstruct account_id: nil,
account_owner: nil,
amount: nil,
iso_currency_code: nil,
unofficial_currency_code: nil,
category: nil,
category_id: nil,
date: nil,
location: nil,
name: nil,
payment_meta: nil,
pending: false,
pending_transaction_id: nil,
transaction_id: nil,
transaction_type: nil,
merchant_name: nil,
authorized_date: nil,
payment_channel: nil,
transaction_code: nil
@type t :: %__MODULE__{
account_id: String.t(),
account_owner: String.t(),
amount: float,
iso_currency_code: String.t(),
unofficial_currency_code: String.t(),
category: [String.t()],
category_id: String.t(),
date: String.t(),
location: Plaid.Transactions.Transaction.Location.t(),
name: String.t(),
payment_meta: Plaid.Transactions.Transaction.PaymentMeta.t(),
pending: true | false,
pending_transaction_id: String.t(),
transaction_id: String.t(),
transaction_type: String.t(),
merchant_name: String.t(),
authorized_date: String.t(),
payment_channel: String.t(),
transaction_code: String.t()
}
defmodule Location do
@moduledoc """
Plaid Transaction Location data structure.
"""
@derive Jason.Encoder
defstruct address: nil,
city: nil,
region: nil,
postal_code: nil,
country: nil,
lat: nil,
lon: nil,
store_number: nil
@type t :: %__MODULE__{
address: String.t(),
city: String.t(),
region: String.t(),
postal_code: String.t(),
country: String.t(),
lat: float,
lon: float,
store_number: integer
}
end
defmodule PaymentMeta do
@moduledoc """
Plaid Transaction Payment Metadata data structure.
"""
@derive Jason.Encoder
defstruct by_order_of: nil,
payee: nil,
payer: nil,
payment_method: nil,
payment_processor: nil,
ppd_id: nil,
reason: nil,
reference_number: nil
@type t :: %__MODULE__{
by_order_of: String.t(),
payee: String.t(),
payer: String.t(),
payment_method: String.t(),
payment_processor: String.t(),
ppd_id: String.t(),
reason: String.t(),
reference_number: String.t()
}
end
end
@doc """
Gets transactions data associated with an Item.
Parameters
```
%{
access_token: "access-env-identifier",
start_date: "2017-01-01",
end_date: "2017-03-31",
options: %{
count: 20,
offset: 0
}
}
```
"""
@spec get(params, config | nil) :: {:ok, Plaid.Transactions.t()} | {:error, Plaid.Error.t()}
def get(params, config \\ %{}) do
config = validate_cred(config)
endpoint = "#{@endpoint}/get"
make_request_with_cred(:post, endpoint, config, params)
|> Utils.handle_resp(@endpoint)
end
end
| 28.215686 | 95 | 0.525828 |
73c2d980c5b8d0195bcbcf28f7e2d462f3085d69 | 829 | ex | Elixir | test/support/endpoint.ex | gjacobrobertson/phoenix_live_view | 459fd4939126edeaad673277791d460028bdb102 | [
"MIT"
] | 1 | 2020-12-23T18:28:30.000Z | 2020-12-23T18:28:30.000Z | test/support/endpoint.ex | alvises/phoenix_live_view | 37e32ffe9c59ca4e1176dfa878dbd8f41a002508 | [
"MIT"
] | null | null | null | test/support/endpoint.ex | alvises/phoenix_live_view | 37e32ffe9c59ca4e1176dfa878dbd8f41a002508 | [
"MIT"
] | 1 | 2020-09-23T17:11:30.000Z | 2020-09-23T17:11:30.000Z | defmodule Phoenix.LiveViewTest.Endpoint do
def url(), do: "http://localhost:4000"
def instrument(_, _, _, func), do: func.()
def script_name(), do: []
def config(:live_view), do: [signing_salt: "112345678212345678312345678412"]
def config(:secret_key_base), do: String.duplicate("57689", 50)
def config(:cache_static_manifest_latest), do: Process.get(:cache_static_manifest_latest)
def init(opts), do: opts
@parsers Plug.Parsers.init(
parsers: [:urlencoded, :multipart, :json],
pass: ["*/*"],
json_decoder: Phoenix.json_library()
)
def call(conn, _) do
%{conn | secret_key_base: config(:secret_key_base)}
|> Plug.Parsers.call(@parsers)
|> Plug.Conn.put_private(:phoenix_endpoint, __MODULE__)
|> Phoenix.LiveViewTest.Router.call([])
end
end
| 34.541667 | 91 | 0.665862 |
73c2dcb61da595af6d9db198ae250f121392648b | 13,654 | ex | Elixir | lib/drivers/driver.ex | pringlized/Elber | 81394ad29b7a67ac9b8d3e249cde252ec5c44697 | [
"MIT"
] | 1 | 2019-06-20T07:06:16.000Z | 2019-06-20T07:06:16.000Z | lib/drivers/driver.ex | pringlized/Elber | 81394ad29b7a67ac9b8d3e249cde252ec5c44697 | [
"MIT"
] | null | null | null | lib/drivers/driver.ex | pringlized/Elber | 81394ad29b7a67ac9b8d3e249cde252ec5c44697 | [
"MIT"
] | null | null | null | defmodule Elber.Drivers.Driver do
use GenServer
require Logger
alias Elber.Zones.Zone
alias Elber.Server, as: CityServer
def start_link(state) do
log(state, :info, "Starting driver process")
GenServer.start_link(__MODULE__, [state], [])
end
def state(driver_pid) do
GenServer.call(driver_pid, {:state})
end
def request_ride(driver_pid, rider) do
GenServer.call(driver_pid, {:request_ride, rider})
end
# -----------------------------------
# PRIVATE
# -----------------------------------
# Set current location and add to zones_traveled
defp set_curr_loc(state, loc) do
state = Map.merge(state, %{
curr_loc: loc,
zones_traveled: List.insert_at(state.zones_traveled, 0, loc)
})
end
# Set destination location
defp set_dest_loc(state, route) do
state = Map.merge(state, %{
curr_route: route,
dest_loc: List.last(route)
})
end
# Using current location, find a route to dest_loc using
# a Breadth First Search
defp find_route(state, dest_loc) do
Elber.Zones.BFS.search(state.curr_loc, dest_loc)
end
# Update route: adds to routes_traveled & current route
defp update_route(state, route) do # when is_list(route) not empty
start_loc = List.first(route)
dest_loc = List.last(route)
log(state, :info, "Traveling route from #{start_loc} to #{dest_loc}")
state = Map.merge(state, %{
dest_loc: List.last(route),
curr_route: route,
routes_traveled: [route] ++ state.routes_traveled
})
end
defp begin_trip(state) do
# HACK: Sometimes pickup & dropoff are the same
# If so, skip travel
if state.pickup_loc != state.dropoff_loc do
# get a route, update our state, then travel there
state = find_route(state, state.dropoff_loc)
|> (&update_route(state, &1)).()
|> (&travel_to_dest(&1)).()
end
state
end
# NOTE: State must be all set, otherwise it won't work
# TODO: Remove starting point since it's where we are
# TODO: Get meter_off working
defp travel_to_dest(state) do
# get next location and update remaining route
[next_loc | remaining_route] = state.curr_route
|> (&List.delete(&1, state.curr_loc)).()
# TODO: There eventually needs to be a coefficient as add a time delay
log(state, :debug, "Traveling from #{state.curr_loc} to #{next_loc}")
# update driver location for availability and gps from current_loc to next_loc
update_gps(state.curr_loc, next_loc)
if state.available == True do
update_availability(state.curr_loc, next_loc)
end
# Update current route with remaining way points
state = Map.merge(state, %{curr_route: remaining_route})
# move to next_location
state = set_curr_loc(state, next_loc)
# until we reach our destination, keep traveling recursively
if (state.curr_loc != state.dest_loc) do
state = travel_to_dest(state)
else
state = Map.merge(state, %{
meter_off: "NOW",
arrived: True
})
state
end
state
end
defp end_trip(state) do
log(state, :info, "Drop off [#{state.rider_uuid}]")
# TODO
# Server.write_history(state)
# write a fare record
[route_traveled | _] = state.routes_traveled
# RECORD
# driver uuid
# rider uuid
# request_datetime
# pickup_datetime
# pickup_loc
# dropoff_datetime
# dest_loc
# distance
record = [
state.uuid,
state.rider_uuid,
state.rider_start_datetime,
state.rider_request_datetime,
state.meter_on,
state.pickup_loc,
state.meter_off,
state.dest_loc,
length(route_traveled) - 1
]
# write fare record to a file
data = Enum.join(record, ",") |> (&(&1 <> "\r\n")).()
{:ok, file} = File.open "logs/history.csv", [:append]
IO.binwrite file, data
File.close file
# Add record to driver's history
state = Map.merge(state, %{
ride_history: List.insert_at(state.ride_history, 0, record)
})
# Reset the drvier state
state = reset(state)
end
# Reset the cab back to an available state
defp reset(state) do
log(state, :debug, "RESET ---------------")
state = Map.merge(state, %{
available: True,
has_rider: False,
arrived: False,
in_vehicle: False,
rider_request_datetime: nil,
rider_start_datetime: nil,
dest_loc: nil,
pickup_loc: nil,
dropoff_loc: nil,
meter_on: nil,
meter_off: nil,
rider_pid: nil,
rider_uuid: nil
})
# Update the state cache
status = CityServer.update_state_cache(state)
# Return new state
state
end
# End the driver's shift
defp punch_off(state) do
log(state, :info, "---------- OFF THE CLOCK ----------")
# remove from curr_loc
update_availability(state.curr_loc)
update_gps(state.curr_loc)
# reset cab state
#state = reset(state)
# punch off the clock and stop being available
state = Map.merge(state, %{
available: False,
searching: False
})
end
defp log(state, type, msg) do
predicate = "[d][#{state.uuid}] "
case type do
:debug ->
Logger.debug(predicate <> msg)
:error ->
Logger.error(predicate <> msg)
_ ->
Logger.info(predicate <> msg)
end
end
defp get_datetime do
Timex.format!(Timex.local, "{ISO:Extended}")
end
# Notify the Zones of the driver's availability
defp update_availability(at_loc, to_loc \\ nil) do
if at_loc != nil do
remove_status = Zone.remove_available_driver(at_loc)
end
if to_loc != nil do
add_status = Zone.add_available_driver(to_loc)
end
end
# Notify the Zones of the current driver's location
defp update_gps(at_loc, to_loc \\ nil) do
if at_loc != nil do
remove_status = Zone.remove_driver_in(at_loc)
end
if to_loc != nil do
add_status = Zone.add_driver_in(to_loc)
end
end
# -----------------------------------
# CALLBACKS
# -----------------------------------
def init([state]) do
Process.send_after(self(), {:deploy}, 1000)
{:ok, state}
end
def handle_info({:deploy}, state) do
# if not working, start
if state.available == False do
log(state, :info, "Deploying...")
# start working
state = Map.merge(state, %{available: True})
# add driver to zone
update_availability(nil, state.curr_loc)
update_gps(nil, state.curr_loc)
# update the state cache
status = CityServer.update_state_cache(state)
# go find first customer
Process.send_after(self(), {:search}, 5000, [])
end
{:noreply, state}
end
def handle_call({:state}, _from, state) do
{:reply, state, state}
end
def handle_call({:request_ride, rider}, _from, state) do
# add request to list
state = Map.merge(state, %{
rider_requests: List.insert_at(state.rider_requests, 0, rider.uuid)
})
# is this driver available?
if state.available == True do
state = Map.merge(state, %{
searching: False,
available: False,
has_rider: True,
rider_pid: rider.pid,
rider_uuid: rider.uuid,
rider_start_datetime: rider.start_datetime,
rider_request_datetime: get_datetime,
pickup_loc: rider.pickup_loc,
dropoff_loc: rider.dropoff_loc
})
# remove self from availability to riders
update_availability(state.curr_loc)
# now go pick up the rider
send(self(), {:pickup})
{:reply, :ok, state}
else
if state.searching == False do
send(self(), {:search})
end
{:reply, :denied, state}
end
end
def handle_info({:pickup}, state) do
if state.has_rider == True && state.available == False do
log(state, :info, "Driving to pickup [#{state.rider_uuid}]...")
# Determine if we need to travel across zones
if state.curr_loc != state.pickup_loc do
# get a route, update our state, then travel there
state = find_route(state, state.pickup_loc)
|> (&update_route(state, &1)).()
|> (&travel_to_dest(&1)).()
else
# NOTE: most times the driver and rider will be in the same zone. To
# simulate driving to the pickup location, pause for a few seconds
:timer.sleep(3000)
end
log(state, :info, "At pickup location [#{state.pickup_loc}] for [#{state.rider_uuid}]")
# send arrived notification to rider
try do
# Notify the rider that the driver has arrived
status = Elber.Riders.Rider.driver_arrived(state.rider_pid)
# send begin trip notification to self
if status == :ok do
state = Map.merge(state, %{
arrived: True,
in_vehicle: True,
})
send(self(), {:begin_trip})
else
log(state, :error, "ERROR picking up [#{state.rider_uuid}]")
end
catch
:exit, _ ->
Logger.error("[#{state.uuid}] ERROR picking up rider [#{state.rider_uuid}]. Resetting")
:timer.sleep(1000)
state = reset(state)
Process.send_after(self(), {:search}, 500)
end
end
{:noreply, state}
end
def handle_info({:begin_trip}, state) do
log(state, :info, "Begin trip for [#{state.rider_uuid}]")
# Turn the meter on
state = Map.merge(state, %{meter_on: get_datetime})
# get it rollin'
state = begin_trip(state)
# are we at the dropoff location?
if state.curr_loc == state.dropoff_loc do
log(state, :debug, "Reached dropoff for [#{state.rider_uuid}]")
send(self(), {:end_trip})
else
log(state, :error, "ERROR reaching destination for [#{state.rider_uuid}]")
end
{:noreply, state}
end
# TODO: must validate at destination state and ready to end trip
def handle_info({:end_trip}, state) do
log(state, :info, "End trip for [#{state.rider_uuid}]")
# trun the meter off
state = Map.merge(state, %{meter_off: get_datetime})
# Notify rider they are being dropped off
_ = Elber.Riders.Rider.dropoff(state.rider_pid)
# End trip: write record and reset driver status
state = end_trip(state)
# hang for a couple seconds
:timer.sleep(5000)
# start searching again
send(self(), {:search})
{:noreply, state}
end
def handle_info({:search}, state) do
if state.available == True do
# check shift
if Enum.count(state.zones_traveled) >= state.shift_length do
# done working
punch_off(state)
else
log(state, :info, "Searching for a rider")
# make sure driver is available to riders
update_availability(nil, state.curr_loc)
# set searching state
state = Map.merge(state, %{searching: True})
# Move to a random adjacent zone
# - get adjacent zones
# - filter out zone traveled from
# - get random zone from remaining list
[dest_loc] = Zone.get_adjacent_zones(state.curr_loc)
|> (&List.delete(&1, Enum.at(&1, 1))).() # DONT THINK THIS IS WORKING
|> (&Enum.take_random(&1, 1)).()
# get a route, update our state, then travel there
state = find_route(state, dest_loc)
|> (&update_route(state, &1)).()
|> (&travel_to_dest(&1)).()
# reset cab state
state = reset(state)
# hang in this zone for a bit
:timer.sleep(2000)
# continue the search
send(self(), {:search})
end
end
{:noreply, state}
end
end | 31.827506 | 107 | 0.523876 |
73c2e168f15ade7949290cb13f2bfa3ea2bb6e9f | 1,743 | ex | Elixir | lib/mop8/adapter/word_map_store.ex | mopp/mop8 | e256f0fe3b6bcb7c2df2482d4f09d9c972e9ba04 | [
"MIT"
] | null | null | null | lib/mop8/adapter/word_map_store.ex | mopp/mop8 | e256f0fe3b6bcb7c2df2482d4f09d9c972e9ba04 | [
"MIT"
] | null | null | null | lib/mop8/adapter/word_map_store.ex | mopp/mop8 | e256f0fe3b6bcb7c2df2482d4f09d9c972e9ba04 | [
"MIT"
] | null | null | null | defmodule Mop8.Adapter.WordMapStore do
alias Mop8.Bot.WordMap
@enforce_keys [:filepath, :word_map]
defstruct [:filepath, :word_map]
@opaque t :: %__MODULE__{
filepath: Path.t(),
word_map: nil | WordMap.t()
}
@spec new(Path.t()) :: t()
def new(filepath) do
%__MODULE__{
filepath: filepath,
word_map: nil
}
end
end
defimpl Mop8.Bot.Repo.WordMap, for: Mop8.Adapter.WordMapStore do
alias Mop8.Adapter.WordMapStore
alias Mop8.Bot.Repo
alias Mop8.Bot.WordMap
@spec load(Repo.WordMap.t()) ::
{:ok, {Repo.WordMap.t(), WordMap.t()}} | {:error, reason :: any()}
def load(store) do
case store do
%WordMapStore{word_map: nil, filepath: filepath} ->
with {:ok, word_map} <- load_from_file(filepath) do
{:ok, {%{store | word_map: word_map}, word_map}}
end
%WordMapStore{word_map: word_map} ->
{:ok, {store, word_map}}
end
end
def load_from_file(filepath) do
with {:ok, raw_json} <- File.read(filepath),
{:ok, raw_word_map} <- Poison.decode(raw_json) do
word_map =
Map.new(raw_word_map, fn {key, stat} ->
{key, Map.new(stat, fn {k, v} -> {String.to_atom(k), v} end)}
end)
{:ok, word_map}
else
{:error, :enoent} ->
{:ok, WordMap.new()}
error ->
error
end
end
@spec store(Repo.WordMap.t(), WordMap.t()) ::
{:ok, Repo.WordMap.t()} | {:error, reason :: any()}
def store(%WordMapStore{filepath: filepath} = store, word_map) do
with {:ok, raw_json} = Poison.encode(word_map),
:ok <- File.write(Path.expand(filepath), raw_json) do
{:ok, %{store | word_map: word_map}}
end
end
end
| 25.632353 | 76 | 0.582903 |
73c2e5c22b36ea79e813350f691837efa6b524b4 | 125 | ex | Elixir | lib/types/get_parameter_attributes_response.ex | smiyabe/cwmp_ex | 9db322497aa3208b5985ccf496ada5286cde3925 | [
"Artistic-2.0"
] | 3 | 2017-11-29T05:07:35.000Z | 2019-12-18T17:16:41.000Z | lib/types/get_parameter_attributes_response.ex | smiyabe/cwmp_ex | 9db322497aa3208b5985ccf496ada5286cde3925 | [
"Artistic-2.0"
] | 1 | 2021-12-02T19:35:28.000Z | 2022-03-29T09:40:52.000Z | lib/types/get_parameter_attributes_response.ex | smiyabe/cwmp_ex | 9db322497aa3208b5985ccf496ada5286cde3925 | [
"Artistic-2.0"
] | 2 | 2017-11-29T05:07:30.000Z | 2020-11-10T07:10:42.000Z | defmodule CWMP.Protocol.Messages.GetParameterAttributesResponse do
@derive [Poison.Encoder]
defstruct parameters: []
end
| 25 | 66 | 0.816 |
73c2ecd3ae923f4fb6ddf099abe632c6546d5da9 | 2,220 | ex | Elixir | lib/scenic/primitive/group.ex | PragTob/scenic | 7e5193c2a0b86768937e5bbd7c22f00544d26048 | [
"Apache-2.0"
] | null | null | null | lib/scenic/primitive/group.ex | PragTob/scenic | 7e5193c2a0b86768937e5bbd7c22f00544d26048 | [
"Apache-2.0"
] | null | null | null | lib/scenic/primitive/group.ex | PragTob/scenic | 7e5193c2a0b86768937e5bbd7c22f00544d26048 | [
"Apache-2.0"
] | null | null | null | #
# Created by Boyd Multerer on 5/6/17.
# Copyright © 2017 Kry10 Industries. All rights reserved.
#
defmodule Scenic.Primitive.Group do
use Scenic.Primitive
alias Scenic.Primitive
# import IEx
# ============================================================================
# data verification and serialization
# --------------------------------------------------------
def build(nil, opts), do: build([], opts)
def build(ids, opts) do
verify!(ids)
Primitive.build(__MODULE__, ids, opts)
end
# --------------------------------------------------------
def info(data),
do: """
#{IO.ANSI.red()}#{__MODULE__} data must be a list of valid uids of other elements in the graph.
#{IO.ANSI.yellow()}Received: #{inspect(data)}
#{IO.ANSI.default_color()}
"""
# --------------------------------------------------------
def verify(ids) when is_list(ids) do
case Enum.all?(ids, fn id -> is_integer(id) end) do
true -> {:ok, ids}
false -> :invalid_data
end
end
def verify(_), do: :invalid_data
# ============================================================================
# filter and gather styles
@spec valid_styles() :: [:all, ...]
def valid_styles(), do: [:all]
def filter_styles(styles) when is_map(styles), do: styles
# ============================================================================
# apis to manipulate the list of child ids
# ----------------------------------------------------------------------------
def insert_at(%Primitive{module: __MODULE__, data: uid_list} = p, index, uid) do
Map.put(
p,
:data,
List.insert_at(uid_list, index, uid)
)
end
# ----------------------------------------------------------------------------
def delete(%Primitive{module: __MODULE__, data: uid_list} = p, uid) do
Map.put(
p,
:data,
Enum.reject(uid_list, fn xid -> xid == uid end)
)
end
# ----------------------------------------------------------------------------
def increment(%Primitive{module: __MODULE__, data: uid_list} = p, offset) do
Map.put(
p,
:data,
Enum.map(uid_list, fn xid -> xid + offset end)
)
end
end
| 28.101266 | 101 | 0.435586 |
73c31989ff27acafb59aa4c00cc9549051d6a7d2 | 66,896 | ex | Elixir | lib/ecto/query/planner.ex | begedin/ecto | 01e259330f6cc5f689280e501866e31904576737 | [
"Apache-2.0"
] | 2 | 2021-02-25T15:51:16.000Z | 2021-02-25T18:42:35.000Z | lib/ecto/query/planner.ex | begedin/ecto | 01e259330f6cc5f689280e501866e31904576737 | [
"Apache-2.0"
] | null | null | null | lib/ecto/query/planner.ex | begedin/ecto | 01e259330f6cc5f689280e501866e31904576737 | [
"Apache-2.0"
] | 1 | 2018-06-18T14:47:58.000Z | 2018-06-18T14:47:58.000Z | defmodule Ecto.Query.Planner do
# Normalizes a query and its parameters.
@moduledoc false
alias Ecto.Query.{BooleanExpr, DynamicExpr, FromExpr, JoinExpr, QueryExpr, SelectExpr}
if map_size(%Ecto.Query{}) != 21 do
raise "Ecto.Query match out of date in builder"
end
@parent_as __MODULE__
@doc """
Converts a query to a list of joins.
The from is moved as last join with the where conditions as its "on"
in order to keep proper binding order.
"""
def query_to_joins(qual, source, %{wheres: wheres, joins: joins}, position) do
on = %QueryExpr{file: __ENV__.file, line: __ENV__.line, expr: true, params: []}
on =
Enum.reduce(wheres, on, fn %BooleanExpr{op: op, expr: expr, params: params}, acc ->
merge_expr_and_params(op, acc, expr, params)
end)
join = %JoinExpr{qual: qual, source: source, file: __ENV__.file, line: __ENV__.line, on: on}
last = length(joins) + position
mapping = fn
0 -> last
ix -> ix + position - 1
end
for {%{on: on} = join, ix} <- Enum.with_index(joins ++ [join]) do
%{join | on: rewrite_sources(on, mapping), ix: ix + position}
end
end
defp merge_expr_and_params(op, %QueryExpr{expr: left_expr, params: left_params} = struct,
right_expr, right_params) do
right_expr = Ecto.Query.Builder.bump_interpolations(right_expr, left_params)
%{struct | expr: merge_expr(op, left_expr, right_expr), params: left_params ++ right_params}
end
defp merge_expr(_op, left, true), do: left
defp merge_expr(_op, true, right), do: right
defp merge_expr(op, left, right), do: {op, [], [left, right]}
@doc """
Rewrites the given query expression sources using the given mapping.
"""
def rewrite_sources(%{expr: expr, params: params} = part, mapping) do
expr =
Macro.prewalk expr, fn
%Ecto.Query.Tagged{type: type, tag: tag} = tagged ->
%{tagged | type: rewrite_type(type, mapping), tag: rewrite_type(tag, mapping)}
{:&, meta, [ix]} ->
{:&, meta, [mapping.(ix)]}
other ->
other
end
params =
Enum.map params, fn
{val, type} ->
{val, rewrite_type(type, mapping)}
val ->
val
end
%{part | expr: expr, params: params}
end
defp rewrite_type({composite, {ix, field}}, mapping) when is_integer(ix) do
{composite, {mapping.(ix), field}}
end
defp rewrite_type({ix, field}, mapping) when is_integer(ix) do
{mapping.(ix), field}
end
defp rewrite_type(other, _mapping) do
other
end
@doc """
Define the query cache table.
"""
def new_query_cache(name) do
:ets.new(name, [:set, :public, read_concurrency: true])
end
@doc """
Plans the query for execution.
Planning happens in multiple steps:
1. First the query is planned by retrieving
its cache key, casting and merging parameters
2. Then a cache lookup is done, if the query is
cached, we are done
3. If there is no cache, we need to actually
normalize and validate the query, asking the
adapter to prepare it
4. The query is sent to the adapter to be generated
## Cache
All entries in the query, except the preload and sources
field, should be part of the cache key.
The cache value is the compiled query by the adapter
along-side the select expression.
"""
def query(query, operation, cache, adapter, counter) do
{query, params, key} = plan(query, operation, adapter)
query_with_cache(key, query, operation, cache, adapter, counter, params)
end
defp query_with_cache(key, query, operation, cache, adapter, counter, params) do
case query_lookup(key, query, operation, cache, adapter, counter) do
{_, select, prepared} ->
{build_meta(query, select), {:nocache, prepared}, params}
{_key, :cached, select, cached} ->
update = &cache_update(cache, key, &1)
reset = &cache_reset(cache, key, &1)
{build_meta(query, select), {:cached, update, reset, cached}, params}
{_key, :cache, select, prepared} ->
update = &cache_update(cache, key, &1)
{build_meta(query, select), {:cache, update, prepared}, params}
end
end
defp query_lookup(:nocache, query, operation, _cache, adapter, counter) do
query_without_cache(query, operation, adapter, counter)
end
defp query_lookup(key, query, operation, cache, adapter, counter) do
case :ets.lookup(cache, key) do
[term] -> term
[] -> query_prepare(query, operation, adapter, counter, cache, key)
end
end
defp query_prepare(query, operation, adapter, counter, cache, key) do
case query_without_cache(query, operation, adapter, counter) do
{:cache, select, prepared} ->
cache_insert(cache, key, {key, :cache, select, prepared})
{:nocache, _, _} = nocache ->
nocache
end
end
defp cache_insert(cache, key, elem) do
case :ets.insert_new(cache, elem) do
true ->
elem
false ->
[elem] = :ets.lookup(cache, key)
elem
end
end
defp cache_update(cache, key, cached) do
_ = :ets.update_element(cache, key, [{2, :cached}, {4, cached}])
:ok
end
defp cache_reset(cache, key, prepared) do
_ = :ets.update_element(cache, key, [{2, :cache}, {4, prepared}])
:ok
end
defp query_without_cache(query, operation, adapter, counter) do
{query, select} = normalize(query, operation, adapter, counter)
{cache, prepared} = adapter.prepare(operation, query)
{cache, select, prepared}
end
defp build_meta(%{sources: sources, preloads: preloads}, select) do
%{select: select, preloads: preloads, sources: sources}
end
@doc """
Prepares the query for cache.
This means all the parameters from query expressions are
merged into a single value and their entries are pruned
from the query.
This function is called by the backend before invoking
any cache mechanism.
"""
@spec plan(Query.t, atom, module) :: {planned_query :: Query.t, parameters :: list, cache_key :: any}
def plan(query, operation, adapter) do
query
|> plan_sources(adapter)
|> plan_assocs()
|> plan_combinations(adapter)
|> plan_ctes(adapter)
|> plan_wheres(adapter)
|> plan_cache(operation, adapter)
rescue
e ->
# Reraise errors so we ignore the planner inner stacktrace
filter_and_reraise e, __STACKTRACE__
end
@doc """
Prepare all sources, by traversing and expanding from, joins, subqueries.
"""
def plan_sources(query, adapter) do
{from, source} = plan_from(query, adapter)
# Set up the initial source so we can refer
# to the parent in subqueries in joins
query = %{query | sources: {source}}
{joins, sources, tail_sources} = plan_joins(query, [source], length(query.joins), adapter)
%{query | from: from,
joins: joins |> Enum.reverse,
sources: (tail_sources ++ sources) |> Enum.reverse |> List.to_tuple()}
end
defp plan_from(%{from: nil} = query, _adapter) do
error!(query, "query must have a from expression")
end
defp plan_from(%{from: from} = query, adapter) do
plan_source(query, from, adapter)
end
defp plan_source(query, %{source: %Ecto.SubQuery{} = subquery, prefix: prefix} = expr, adapter) do
subquery = plan_subquery(subquery, query, prefix, adapter, true)
{%{expr | source: subquery}, subquery}
end
defp plan_source(query, %{source: {nil, schema}} = expr, _adapter)
when is_atom(schema) and schema != nil do
source = schema.__schema__(:source)
prefix = plan_source_schema_prefix(expr, schema) || query.prefix
{%{expr | source: {source, schema}}, {source, schema, prefix}}
end
defp plan_source(query, %{source: {source, schema}, prefix: prefix} = expr, _adapter)
when is_binary(source) and is_atom(schema),
do: {expr, {source, schema, prefix || query.prefix}}
defp plan_source(_query, %{source: {:fragment, _, _} = source, prefix: nil} = expr, _adapter),
do: {expr, source}
defp plan_source(query, %{source: {:fragment, _, _}, prefix: prefix} = expr, _adapter),
do: error!(query, expr, "cannot set prefix: #{inspect(prefix)} option for fragment joins")
defp plan_subquery(subquery, query, prefix, adapter, source?) do
%{query: inner_query} = subquery
inner_query = %{
inner_query
| prefix: prefix || subquery.query.prefix || query.prefix,
aliases: Map.put(inner_query.aliases, @parent_as, query)
}
{inner_query, params, key} = plan(inner_query, :all, adapter)
assert_no_subquery_assocs!(inner_query)
{inner_query, select} =
inner_query
|> ensure_select(true)
|> normalize_subquery_select(adapter, source?)
{_, inner_query} = pop_in(inner_query.aliases[@parent_as])
%{subquery | query: inner_query, params: params, cache: key, select: select}
rescue
e -> raise Ecto.SubQueryError, query: query, exception: e
end
# The prefix for form are computed upfront, but not for joins
defp plan_source_schema_prefix(%FromExpr{prefix: prefix}, _schema),
do: prefix
defp plan_source_schema_prefix(%JoinExpr{prefix: prefix}, schema),
do: prefix || schema.__schema__(:prefix)
defp assert_no_subquery_assocs!(%{assocs: assocs, preloads: preloads} = query)
when assocs != [] or preloads != [] do
error!(query, "cannot preload associations in subquery")
end
defp assert_no_subquery_assocs!(query) do
query
end
defp normalize_subquery_select(query, adapter, source?) do
{expr, %{select: select} = query} = rewrite_subquery_select_expr(query, source?)
{expr, _} = prewalk(expr, :select, query, select, 0, adapter)
{meta, _fields, _from} = collect_fields(expr, [], :never, query, select.take, true)
{query, meta}
end
# If we are selecting a source, we keep it as is.
# Otherwise we normalize the select, which converts them into structs.
# This means that `select: p` in subqueries will be nullable in a join.
defp rewrite_subquery_select_expr(%{select: %{expr: {:&, _, [_]} = expr}} = query, _source?) do
{expr, query}
end
defp rewrite_subquery_select_expr(%{select: select} = query, source?) do
%{expr: expr, take: take} = select
expr =
case subquery_select(expr, take, query) do
{nil, fields} ->
{:%{}, [], fields}
{struct, fields} ->
{:%, [], [struct, {:%{}, [], fields}]}
:error when source? ->
error!(query, "subquery/cte must select a source (t), a field (t.field) or a map, got: `#{Macro.to_string(expr)}`")
:error ->
expr
end
{expr, put_in(query.select.expr, expr)}
end
defp subquery_select({:merge, _, [left, right]}, take, query) do
{left_struct, left_fields} = subquery_select(left, take, query)
{right_struct, right_fields} = subquery_select(right, take, query)
unless is_nil(left_struct) or is_nil(right_struct) or left_struct == right_struct do
error!(query, "cannot merge #{inspect(left_struct)} and #{inspect(right_struct)} because they are different structs")
end
{left_struct || right_struct, Keyword.merge(left_fields, right_fields)}
end
defp subquery_select({:%, _, [name, map]}, take, query) do
{_, fields} = subquery_select(map, take, query)
{name, fields}
end
defp subquery_select({:%{}, _, [{:|, _, [{:&, [], [ix]}, pairs]}]} = expr, take, query) do
assert_subquery_fields!(query, expr, pairs)
{source, _} = source_take!(:select, query, take, ix, ix)
{struct, fields} = subquery_struct_and_fields(source)
# Map updates may contain virtual fields, so we need to consider those
valid_keys = if struct, do: Map.keys(struct.__struct__), else: fields
update_keys = Keyword.keys(pairs)
case update_keys -- valid_keys do
[] -> :ok
[key | _] -> error!(query, "invalid key `#{inspect key}` for `#{inspect struct}` on map update in subquery/cte")
end
# In case of map updates, we need to remove duplicated fields
# at query time because we use the field names as aliases and
# duplicate aliases will lead to invalid queries.
kept_keys = fields -- update_keys
{struct, subquery_fields(kept_keys, ix) ++ pairs}
end
defp subquery_select({:%{}, _, pairs} = expr, _take, query) do
assert_subquery_fields!(query, expr, pairs)
{nil, pairs}
end
defp subquery_select({:&, _, [ix]}, take, query) do
{source, _} = source_take!(:select, query, take, ix, ix)
{struct, fields} = subquery_struct_and_fields(source)
{struct, subquery_fields(fields, ix)}
end
defp subquery_select({{:., _, [{:&, _, [ix]}, field]}, _, []}, _take, _query) do
{nil, subquery_fields([field], ix)}
end
defp subquery_select(_expr, _take, _query) do
:error
end
defp subquery_struct_and_fields({:source, {_, schema}, _, types}) do
{schema, Keyword.keys(types)}
end
defp subquery_struct_and_fields({:struct, name, types}) do
{name, Keyword.keys(types)}
end
defp subquery_struct_and_fields({:map, types}) do
{nil, Keyword.keys(types)}
end
defp subquery_fields(fields, ix) do
for field <- fields do
{field, {{:., [], [{:&, [], [ix]}, field]}, [], []}}
end
end
defp subquery_type_for({:source, _, _, fields}, field), do: Keyword.fetch(fields, field)
defp subquery_type_for({:struct, _name, types}, field), do: subquery_type_for_value(types, field)
defp subquery_type_for({:map, types}, field), do: subquery_type_for_value(types, field)
defp subquery_type_for_value(types, field) do
case Keyword.fetch(types, field) do
{:ok, {:value, type}} -> {:ok, type}
{:ok, _} -> {:ok, :any}
:error -> :error
end
end
defp assert_subquery_fields!(query, expr, pairs) do
Enum.each(pairs, fn
{key, _} when not is_atom(key) ->
error!(query, "only atom keys are allowed when selecting a map in subquery, got: `#{Macro.to_string(expr)}`")
{key, value} ->
if valid_subquery_value?(value) do
{key, value}
else
error!(query, "maps, lists, tuples and sources are not allowed as map values in subquery, got: `#{Macro.to_string(expr)}`")
end
end)
end
defp valid_subquery_value?({_, _}), do: false
defp valid_subquery_value?(args) when is_list(args), do: false
defp valid_subquery_value?({container, _, args})
when container in [:{}, :%{}, :&] and is_list(args), do: false
defp valid_subquery_value?(_), do: true
defp plan_joins(query, sources, offset, adapter) do
plan_joins(query.joins, query, [], sources, [], 1, offset, adapter)
end
defp plan_joins([%JoinExpr{assoc: {ix, assoc}, qual: qual, on: on, prefix: prefix} = join|t],
query, joins, sources, tail_sources, counter, offset, adapter) do
schema = schema_for_association_join!(query, join, Enum.fetch!(Enum.reverse(sources), ix))
refl = schema.__schema__(:association, assoc)
unless refl do
error! query, join, "could not find association `#{assoc}` on schema #{inspect schema}"
end
# If we have the following join:
#
# from p in Post,
# join: p in assoc(p, :comments)
#
# The callback below will return a query that contains only
# joins in a way it starts with the Post and ends in the
# Comment.
#
# This means we need to rewrite the joins below to properly
# shift the &... identifier in a way that:
#
# &0 -> becomes assoc ix
# &LAST_JOIN -> becomes counter
#
# All values in the middle should be shifted by offset,
# all values after join are already correct.
child = refl.__struct__.joins_query(refl)
# Rewrite prefixes:
# 1. the child query has the parent query prefix
# (note the child query should NEVER have a prefix)
# 2. from and joins can have their prefixes explicitly
# overwritten by the join prefix
child = rewrite_prefix(child, query.prefix)
child = update_in child.from, &rewrite_prefix(&1, prefix)
child = update_in child.joins, &Enum.map(&1, fn join -> rewrite_prefix(join, prefix) end)
last_ix = length(child.joins)
source_ix = counter
{_, child_from_source} = plan_source(child, child.from, adapter)
{child_joins, child_sources, child_tail} =
plan_joins(child, [child_from_source], offset + last_ix - 1, adapter)
# Rewrite joins indexes as mentioned above
child_joins = Enum.map(child_joins, &rewrite_join(&1, qual, ix, last_ix, source_ix, offset))
# Drop the last resource which is the association owner (it is reversed)
child_sources = Enum.drop(child_sources, -1)
[current_source|child_sources] = child_sources
child_sources = child_tail ++ child_sources
plan_joins(t, query, attach_on(child_joins, on) ++ joins, [current_source|sources],
child_sources ++ tail_sources, counter + 1, offset + length(child_sources), adapter)
end
defp plan_joins([%JoinExpr{source: %Ecto.Query{} = join_query, qual: qual, on: on, prefix: prefix} = join|t],
query, joins, sources, tail_sources, counter, offset, adapter) do
case join_query do
%{order_bys: [], limit: nil, offset: nil, group_bys: [], joins: [],
havings: [], preloads: [], assocs: [], distinct: nil, lock: nil} ->
join_query = rewrite_prefix(join_query, query.prefix)
from = rewrite_prefix(join_query.from, prefix)
{from, source} = plan_source(join_query, from, adapter)
[join] = attach_on(query_to_joins(qual, from.source, join_query, counter), on)
plan_joins(t, query, [join|joins], [source|sources], tail_sources, counter + 1, offset, adapter)
_ ->
error! query, join, """
invalid query was interpolated in a join.
If you want to pass a query to a join, you must either:
1. Make sure the query only has `where` conditions (which will be converted to ON clauses)
2. Or wrap the query in a subquery by calling subquery(query)
"""
end
end
defp plan_joins([%JoinExpr{} = join|t],
query, joins, sources, tail_sources, counter, offset, adapter) do
{join, source} = plan_source(query, %{join | ix: counter}, adapter)
plan_joins(t, query, [join|joins], [source|sources], tail_sources, counter + 1, offset, adapter)
end
defp plan_joins([], _query, joins, sources, tail_sources, _counter, _offset, _adapter) do
{joins, sources, tail_sources}
end
defp attach_on([%{on: on} = h | t], %{expr: expr, params: params}) do
[%{h | on: merge_expr_and_params(:and, on, expr, params)} | t]
end
defp rewrite_prefix(expr, nil), do: expr
defp rewrite_prefix(%{prefix: nil} = expr, prefix), do: %{expr | prefix: prefix}
defp rewrite_prefix(expr, _prefix), do: expr
defp rewrite_join(%{on: on, ix: join_ix} = join, qual, ix, last_ix, source_ix, inc_ix) do
expr = Macro.prewalk on.expr, fn
{:&, meta, [join_ix]} ->
{:&, meta, [rewrite_ix(join_ix, ix, last_ix, source_ix, inc_ix)]}
expr = %Ecto.Query.Tagged{type: {type_ix, type}} when is_integer(type_ix) ->
%{expr | type: {rewrite_ix(type_ix, ix, last_ix, source_ix, inc_ix), type}}
other ->
other
end
params = Enum.map(on.params, &rewrite_param_ix(&1, ix, last_ix, source_ix, inc_ix))
%{join | on: %{on | expr: expr, params: params}, qual: qual,
ix: rewrite_ix(join_ix, ix, last_ix, source_ix, inc_ix)}
end
# We need to replace the source by the one from the assoc
defp rewrite_ix(0, ix, _last_ix, _source_ix, _inc_x), do: ix
# The last entry will have the current source index
defp rewrite_ix(last_ix, _ix, last_ix, source_ix, _inc_x), do: source_ix
# All above last are already correct
defp rewrite_ix(join_ix, _ix, last_ix, _source_ix, _inc_ix) when join_ix > last_ix, do: join_ix
# All others need to be incremented by the offset sources
defp rewrite_ix(join_ix, _ix, _last_ix, _source_ix, inc_ix), do: join_ix + inc_ix
defp rewrite_param_ix({value, {upper, {type_ix, field}}}, ix, last_ix, source_ix, inc_ix) when is_integer(type_ix) do
{value, {upper, {rewrite_ix(type_ix, ix, last_ix, source_ix, inc_ix), field}}}
end
defp rewrite_param_ix({value, {type_ix, field}}, ix, last_ix, source_ix, inc_ix) when is_integer(type_ix) do
{value, {rewrite_ix(type_ix, ix, last_ix, source_ix, inc_ix), field}}
end
defp rewrite_param_ix(param, _, _, _, _), do: param
defp schema_for_association_join!(query, join, source) do
case source do
{:fragment, _, _} ->
error! query, join, "cannot perform association joins on fragment sources"
{source, nil, _} ->
error! query, join, "cannot perform association join on #{inspect source} " <>
"because it does not have a schema"
{_, schema, _} ->
schema
%Ecto.SubQuery{select: {:source, {_, schema}, _, _}} ->
schema
%Ecto.SubQuery{select: {:struct, schema, _}} ->
schema
%Ecto.SubQuery{} ->
error! query, join, "can only perform association joins on subqueries " <>
"that return a source with schema in select"
_ ->
error! query, join, "can only perform association joins on sources with a schema"
end
end
@spec plan_wheres(Ecto.Query.t, module) :: Ecto.Query.t
defp plan_wheres(query, adapter) do
wheres =
Enum.map(query.wheres, fn
%{subqueries: []} = where ->
where
%{subqueries: subqueries} = where ->
%{where | subqueries: Enum.map(subqueries, &plan_subquery(&1, query, nil, adapter, false))}
end)
%{query | wheres: wheres}
end
@doc """
Prepare the parameters by merging and casting them according to sources.
"""
def plan_cache(query, operation, adapter) do
{query, params, cache} = traverse_cache(query, operation, {[], []}, adapter)
{query, Enum.reverse(params), cache}
end
defp traverse_cache(query, operation, cache_params, adapter) do
fun = &{&3, merge_cache(&1, &2, &3, &4, operation, adapter)}
{query, {cache, params}} = traverse_exprs(query, operation, cache_params, fun)
{query, params, finalize_cache(query, operation, cache)}
end
defp merge_cache(:from, _query, from, {cache, params}, _operation, _adapter) do
{key, params} = source_cache(from, params)
{merge_cache(key, cache, key != :nocache), params}
end
defp merge_cache(kind, query, expr, {cache, params}, _operation, adapter)
when kind in ~w(select distinct limit offset)a do
if expr do
{params, cacheable?} = cast_and_merge_params(kind, query, expr, params, adapter)
{merge_cache({kind, expr_to_cache(expr)}, cache, cacheable?), params}
else
{cache, params}
end
end
defp merge_cache(kind, query, exprs, {cache, params}, _operation, adapter)
when kind in ~w(where update group_by having order_by)a do
{expr_cache, {params, cacheable?}} =
Enum.map_reduce exprs, {params, true}, fn expr, {params, cacheable?} ->
{params, current_cacheable?} = cast_and_merge_params(kind, query, expr, params, adapter)
{expr_to_cache(expr), {params, cacheable? and current_cacheable?}}
end
case expr_cache do
[] -> {cache, params}
_ -> {merge_cache({kind, expr_cache}, cache, cacheable?), params}
end
end
defp merge_cache(:join, query, exprs, {cache, params}, _operation, adapter) do
{expr_cache, {params, cacheable?}} =
Enum.map_reduce exprs, {params, true}, fn
%JoinExpr{on: on, qual: qual} = join, {params, cacheable?} ->
{key, params} = source_cache(join, params)
{params, join_cacheable?} = cast_and_merge_params(:join, query, join, params, adapter)
{params, on_cacheable?} = cast_and_merge_params(:join, query, on, params, adapter)
{{qual, key, on.expr},
{params, cacheable? and join_cacheable? and on_cacheable? and key != :nocache}}
end
case expr_cache do
[] -> {cache, params}
_ -> {merge_cache({:join, expr_cache}, cache, cacheable?), params}
end
end
defp merge_cache(:windows, query, exprs, {cache, params}, _operation, adapter) do
{expr_cache, {params, cacheable?}} =
Enum.map_reduce exprs, {params, true}, fn {key, expr}, {params, cacheable?} ->
{params, current_cacheable?} = cast_and_merge_params(:windows, query, expr, params, adapter)
{{key, expr_to_cache(expr)}, {params, cacheable? and current_cacheable?}}
end
case expr_cache do
[] -> {cache, params}
_ -> {merge_cache({:windows, expr_cache}, cache, cacheable?), params}
end
end
defp merge_cache(:combination, _query, combinations, cache_and_params, operation, adapter) do
# In here we add each combination as its own entry in the cache key.
# We could group them to avoid multiple keys, but since they are uncommon, we keep it simple.
Enum.reduce combinations, cache_and_params, fn {modifier, query}, {cache, params} ->
{_, params, inner_cache} = traverse_cache(query, operation, {[], params}, adapter)
{merge_cache({modifier, inner_cache}, cache, inner_cache != :nocache), params}
end
end
defp merge_cache(:with_cte, _query, nil, cache_and_params, _operation, _adapter) do
cache_and_params
end
defp merge_cache(:with_cte, query, with_expr, cache_and_params, _operation, adapter) do
%{queries: queries, recursive: recursive} = with_expr
key = if recursive, do: :recursive_cte, else: :non_recursive_cte
# In here we add each cte as its own entry in the cache key.
# We could group them to avoid multiple keys, but since they are uncommon, we keep it simple.
Enum.reduce queries, cache_and_params, fn
{name, %Ecto.Query{} = query}, {cache, params} ->
{_, params, inner_cache} = traverse_cache(query, :all, {[], params}, adapter)
{merge_cache({key, name, inner_cache}, cache, inner_cache != :nocache), params}
{name, %Ecto.Query.QueryExpr{} = query_expr}, {cache, params} ->
{params, cacheable?} = cast_and_merge_params(:with_cte, query, query_expr, params, adapter)
{merge_cache({key, name, expr_to_cache(query_expr)}, cache, cacheable?), params}
end
end
defp expr_to_cache(%QueryExpr{expr: expr}), do: expr
defp expr_to_cache(%SelectExpr{expr: expr}), do: expr
defp expr_to_cache(%BooleanExpr{op: op, expr: expr, subqueries: []}), do: {op, expr}
defp expr_to_cache(%BooleanExpr{op: op, expr: expr, subqueries: subqueries}) do
# Alternate implementation could be replace {:subquery, i} expression in expr.
# Current strategy appends [{:subquery, i, cache}], where cache is the cache key for this subquery.
{op, expr, Enum.map(subqueries, fn %{cache: cache} -> {:subquery, cache} end)}
end
@spec cast_and_merge_params(atom, Query.t, any, list, module) :: {params :: list, cacheable? :: boolean}
defp cast_and_merge_params(kind, query, expr, params, adapter) do
Enum.reduce expr.params, {params, true}, fn
{:subquery, i}, {acc, cacheable?} ->
# This is the place holder to intersperse subquery parameters.
%Ecto.SubQuery{params: subparams, cache: cache} = Enum.fetch!(expr.subqueries, i)
{Enum.reverse(subparams, acc), cacheable? and cache != :nocache}
{v, type}, {acc, cacheable?} ->
case cast_param(kind, query, expr, v, type, adapter) do
{:in, v} -> {Enum.reverse(v, acc), false}
v -> {[v | acc], cacheable?}
end
end
end
defp merge_cache(_left, _right, false), do: :nocache
defp merge_cache(_left, :nocache, true), do: :nocache
defp merge_cache(left, right, true), do: [left|right]
defp finalize_cache(_query, _operation, :nocache) do
:nocache
end
defp finalize_cache(query, operation, cache) do
%{assocs: assocs, prefix: prefix, lock: lock, select: select, aliases: aliases} = query
aliases = Map.delete(aliases, @parent_as)
cache =
case select do
%{take: take} when take != %{} ->
[take: take] ++ cache
_ ->
cache
end
cache =
cache
|> prepend_if(assocs != [], [assocs: assocs])
|> prepend_if(prefix != nil, [prefix: prefix])
|> prepend_if(lock != nil, [lock: lock])
|> prepend_if(aliases != %{}, [aliases: aliases])
[operation | cache]
end
defp prepend_if(cache, true, prepend), do: prepend ++ cache
defp prepend_if(cache, false, _prepend), do: cache
defp source_cache(%{source: {_, nil} = source, prefix: prefix}, params),
do: {{source, prefix}, params}
defp source_cache(%{source: {bin, schema}, prefix: prefix}, params),
do: {{bin, schema, schema.__schema__(:hash), prefix}, params}
defp source_cache(%{source: {:fragment, _, _} = source, prefix: prefix}, params),
do: {{source, prefix}, params}
defp source_cache(%{source: %Ecto.SubQuery{params: inner, cache: key}}, params),
do: {key, Enum.reverse(inner, params)}
defp cast_param(_kind, query, expr, %DynamicExpr{}, _type, _value) do
error! query, expr, "invalid dynamic expression",
"dynamic expressions can only be interpolated at the top level of where, having, group_by, order_by, update or a join's on"
end
defp cast_param(_kind, query, expr, [{key, _} | _], _type, _value) when is_atom(key) do
error! query, expr, "invalid keyword list",
"keyword lists are only allowed at the top level of where, having, distinct, order_by, update or a join's on"
end
defp cast_param(_kind, query, expr, %x{}, {:in, _type}, _value) when x in [Ecto.Query, Ecto.SubQuery] do
error! query, expr, "an #{inspect(x)} struct is not supported as right-side value of `in` operator",
"Did you mean to write `expr in subquery(query)` instead?"
end
defp cast_param(kind, query, expr, v, type, adapter) do
type = field_type!(kind, query, expr, type)
try do
case cast_param(kind, type, v, adapter) do
{:ok, v} -> v
{:error, error} -> error! query, expr, error
end
catch
:error, %Ecto.QueryError{} = e ->
raise Ecto.Query.CastError, value: v, type: type, message: Exception.message(e)
end
end
defp cast_param(kind, type, v, adapter) do
with {:ok, type} <- normalize_param(kind, type, v),
{:ok, v} <- cast_param(kind, type, v),
do: dump_param(adapter, type, v)
end
@doc """
Prepare association fields found in the query.
"""
def plan_assocs(query) do
plan_assocs(query, 0, query.assocs)
query
end
defp plan_assocs(_query, _ix, []), do: :ok
defp plan_assocs(query, ix, assocs) do
# We validate the schema exists when preparing joins above
{_, parent_schema, _} = get_preload_source!(query, ix)
Enum.each assocs, fn {assoc, {child_ix, child_assocs}} ->
refl = parent_schema.__schema__(:association, assoc)
unless refl do
error! query, "field `#{inspect parent_schema}.#{assoc}` " <>
"in preload is not an association"
end
case find_source_expr(query, child_ix) do
%JoinExpr{qual: qual} when qual in [:inner, :left, :inner_lateral, :left_lateral] ->
:ok
%JoinExpr{qual: qual} ->
error! query, "association `#{inspect parent_schema}.#{assoc}` " <>
"in preload requires an inner, left or lateral join, got #{qual} join"
_ ->
:ok
end
plan_assocs(query, child_ix, child_assocs)
end
end
defp plan_combinations(query, adapter) do
combinations =
Enum.map query.combinations, fn {type, combination_query} ->
{prepared_query, _params, _key} = combination_query |> attach_prefix(query) |> plan(:all, adapter)
prepared_query = prepared_query |> ensure_select(true)
{type, prepared_query}
end
%{query | combinations: combinations}
end
defp plan_ctes(%Ecto.Query{with_ctes: nil} = query, _adapter), do: query
defp plan_ctes(%Ecto.Query{with_ctes: %{queries: queries}} = query, adapter) do
queries =
Enum.map queries, fn
{name, %Ecto.Query{} = cte_query} ->
{planned_query, _params, _key} = cte_query |> attach_prefix(query) |> plan(:all, adapter)
planned_query = planned_query |> ensure_select(true)
{name, planned_query}
{name, other} ->
{name, other}
end
put_in(query.with_ctes.queries, queries)
end
defp attach_prefix(%{prefix: nil} = query, %{prefix: prefix}), do: %{query | prefix: prefix}
defp attach_prefix(query, _), do: query
defp find_source_expr(query, 0) do
query.from
end
defp find_source_expr(query, ix) do
Enum.find(query.joins, & &1.ix == ix)
end
@doc """
Used for customizing the query returning result.
"""
def ensure_select(%{select: select} = query, _fields) when select != nil do
query
end
def ensure_select(%{select: nil}, []) do
raise ArgumentError, ":returning expects at least one field to be given, got an empty list"
end
def ensure_select(%{select: nil} = query, fields) when is_list(fields) do
%{query | select: %SelectExpr{expr: {:&, [], [0]}, take: %{0 => {:any, fields}},
line: __ENV__.line, file: __ENV__.file}}
end
def ensure_select(%{select: nil, from: %{source: {_, nil}}} = query, true) do
error! query, "queries that do not have a schema need to explicitly pass a :select clause"
end
def ensure_select(%{select: nil} = query, true) do
%{query | select: %SelectExpr{expr: {:&, [], [0]}, line: __ENV__.line, file: __ENV__.file}}
end
def ensure_select(%{select: nil} = query, false) do
query
end
@doc """
Normalizes and validates the query.
After the query was planned and there is no cache
entry, we need to update its interpolations and check
its fields and associations exist and are valid.
"""
def normalize(query, operation, adapter, counter) do
query
|> normalize_query(operation, adapter, counter)
|> elem(0)
|> normalize_select(keep_literals?(query))
rescue
e ->
# Reraise errors so we ignore the planner inner stacktrace
filter_and_reraise e, __STACKTRACE__
end
defp keep_literals?(%{combinations: combinations}), do: combinations != []
defp normalize_query(query, operation, adapter, counter) do
case operation do
:all ->
assert_no_update!(query, operation)
:update_all ->
assert_update!(query, operation)
assert_only_filter_expressions!(query, operation)
:delete_all ->
assert_no_update!(query, operation)
assert_only_filter_expressions!(query, operation)
end
traverse_exprs(query, operation, counter,
&validate_and_increment(&1, &2, &3, &4, operation, adapter))
end
defp validate_and_increment(:from, query, %{source: %Ecto.SubQuery{}}, _counter, kind, _adapter) when kind != :all do
error! query, "`#{kind}` does not allow subqueries in `from`"
end
defp validate_and_increment(:from, query, %{source: source} = expr, counter, _kind, adapter) do
{source, acc} = prewalk_source(source, :from, query, expr, counter, adapter)
{%{expr | source: source}, acc}
end
defp validate_and_increment(kind, query, expr, counter, _operation, adapter)
when kind in ~w(select distinct limit offset)a do
if expr do
prewalk(kind, query, expr, counter, adapter)
else
{nil, counter}
end
end
defp validate_and_increment(kind, query, exprs, counter, _operation, adapter)
when kind in ~w(where group_by having order_by update)a do
{exprs, counter} =
Enum.reduce(exprs, {[], counter}, fn
%{expr: []}, {list, acc} ->
{list, acc}
expr, {list, acc} ->
{expr, acc} = prewalk(kind, query, expr, acc, adapter)
{[expr|list], acc}
end)
{Enum.reverse(exprs), counter}
end
defp validate_and_increment(:with_cte, _query, nil, counter, _operation, _adapter) do
{nil, counter}
end
defp validate_and_increment(:with_cte, query, with_expr, counter, _operation, adapter) do
fun = &validate_and_increment(&1, &2, &3, &4, :all, adapter)
{queries, counter} =
Enum.reduce with_expr.queries, {[], counter}, fn
{name, %Ecto.Query{} = query}, {queries, counter} ->
# We don't want to use normalize_subquery_select because we are
# going to prepare the whole query ourselves next.
{_, query} = rewrite_subquery_select_expr(query, true)
{query, counter} = traverse_exprs(query, :all, counter, fun)
# Now compute the fields as keyword lists so we emit AS in Ecto query.
%{select: %{expr: expr, take: take}} = query
{source, fields, _from} = collect_fields(expr, [], :never, query, take, true)
{_, keys} = subquery_struct_and_fields(source)
query = put_in(query.select.fields, Enum.zip(keys, Enum.reverse(fields)))
{[{name, query} | queries], counter}
{name, %QueryExpr{expr: {:fragment, _, _} = fragment} = query_expr}, {queries, counter} ->
{fragment, counter} = prewalk_source(fragment, :with_cte, query, with_expr, counter, adapter)
query_expr = %{query_expr | expr: fragment}
{[{name, query_expr} | queries], counter}
end
{%{with_expr | queries: Enum.reverse(queries)}, counter}
end
defp validate_and_increment(:join, query, exprs, counter, _operation, adapter) do
Enum.map_reduce exprs, counter, fn join, acc ->
{source, acc} = prewalk_source(join.source, :join, query, join, acc, adapter)
{on, acc} = prewalk(:join, query, join.on, acc, adapter)
{%{join | on: on, source: source, params: nil}, acc}
end
end
defp validate_and_increment(:windows, query, exprs, counter, _operation, adapter) do
{exprs, counter} =
Enum.reduce(exprs, {[], counter}, fn {name, expr}, {list, acc} ->
{expr, acc} = prewalk(:windows, query, expr, acc, adapter)
{[{name, expr}|list], acc}
end)
{Enum.reverse(exprs), counter}
end
defp validate_and_increment(:combination, _query, combinations, counter, operation, adapter) do
fun = &validate_and_increment(&1, &2, &3, &4, operation, adapter)
{combinations, counter} =
Enum.reduce combinations, {[], counter}, fn {type, combination_query}, {combinations, counter} ->
{combination_query, counter} = traverse_exprs(combination_query, operation, counter, fun)
{combination_query, _} = combination_query |> normalize_select(true)
{[{type, combination_query} | combinations], counter}
end
{Enum.reverse(combinations), counter}
end
defp validate_json_path!([path_field | rest], field, embed) do
case embed do
%{related: related, cardinality: :one} ->
unless Enum.any?(related.__schema__(:fields), &Atom.to_string(&1) == path_field) do
raise "field `#{path_field}` does not exist in #{inspect(related)}"
end
path_embed = related.__schema__(:embed, String.to_atom(path_field))
validate_json_path!(rest, path_field, path_embed)
%{related: _, cardinality: :many} ->
unless is_integer(path_field) do
raise "cannot use `#{path_field}` to refer to an item in `embeds_many`"
end
validate_json_path!(rest, path_field, %{embed | cardinality: :one})
other ->
raise "expected field `#{field}` to be of type embed, got: `#{inspect(other)}`"
end
end
defp validate_json_path!([], _field, _type) do
:ok
end
defp prewalk_source({:fragment, meta, fragments}, kind, query, expr, acc, adapter) do
{fragments, acc} = prewalk(fragments, kind, query, expr, acc, adapter)
{{:fragment, meta, fragments}, acc}
end
defp prewalk_source(%Ecto.SubQuery{query: inner_query} = subquery, kind, query, _expr, counter, adapter) do
try do
inner_query = put_in inner_query.aliases[@parent_as], query
{inner_query, counter} = normalize_query(inner_query, :all, adapter, counter)
{inner_query, _} = normalize_select(inner_query, true)
{_, inner_query} = pop_in(inner_query.aliases[@parent_as])
inner_query =
# If the subquery comes from a select, we are not really interested on the fields
if kind == :where do
inner_query
else
update_in(inner_query.select.fields, fn fields ->
subquery.select |> subquery_struct_and_fields() |> elem(1) |> Enum.zip(fields)
end)
end
{%{subquery | query: inner_query}, counter}
rescue
e -> raise Ecto.SubQueryError, query: query, exception: e
end
end
defp prewalk_source(source, _kind, _query, _expr, acc, _adapter) do
{source, acc}
end
defp prewalk(:update, query, expr, counter, adapter) do
source = get_source!(:update, query, 0)
{inner, acc} =
Enum.map_reduce expr.expr, counter, fn {op, kw}, counter ->
{kw, acc} =
Enum.map_reduce kw, counter, fn {field, value}, counter ->
{value, acc} = prewalk(value, :update, query, expr, counter, adapter)
{{field_source(source, field), value}, acc}
end
{{op, kw}, acc}
end
{%{expr | expr: inner, params: nil}, acc}
end
defp prewalk(kind, query, expr, counter, adapter) do
{inner, acc} = prewalk(expr.expr, kind, query, expr, counter, adapter)
{%{expr | expr: inner, params: nil}, acc}
end
defp prewalk({:in, in_meta, [left, {:^, meta, [param]}]}, kind, query, expr, acc, adapter) do
{left, acc} = prewalk(left, kind, query, expr, acc, adapter)
{right, acc} = validate_in(meta, expr, param, acc, adapter)
{{:in, in_meta, [left, right]}, acc}
end
defp prewalk({:in, in_meta, [left, {:subquery, i}]}, kind, query, expr, acc, adapter) do
{left, acc} = prewalk(left, kind, query, expr, acc, adapter)
{right, acc} = prewalk_source(Enum.fetch!(expr.subqueries, i), kind, query, expr, acc, adapter)
case right.query.select.fields do
[_] -> :ok
_ -> error!(query, "subquery must return a single field in order to be used on the right-side of `in`")
end
{{:in, in_meta, [left, right]}, acc}
end
defp prewalk({quantifier, meta, [{:subquery, i}]}, kind, query, expr, acc, adapter) when quantifier in [:exists, :any, :all] do
subquery = Enum.fetch!(expr.subqueries, i)
{subquery, acc} = prewalk_source(subquery, kind, query, expr, acc, adapter)
case {quantifier, subquery.query.select.fields} do
{:exists, _} ->
:ok
{_, [_]} ->
:ok
_ ->
error!(
query,
"subquery must return a single field in order to be used with #{quantifier}"
)
end
{{quantifier, meta, [subquery]}, acc}
end
defp prewalk({{:., dot_meta, [left, field]}, meta, []},
kind, query, expr, acc, _adapter) do
{ix, ix_expr, ix_query} = get_ix!(left, kind, query)
extra = if kind == :select, do: [type: type!(kind, ix_query, expr, ix, field)], else: []
field = field_source(get_source!(kind, ix_query, ix), field)
{{{:., extra ++ dot_meta, [ix_expr, field]}, meta, []}, acc}
end
defp prewalk({:^, meta, [ix]}, _kind, _query, _expr, acc, _adapter) when is_integer(ix) do
{{:^, meta, [acc]}, acc + 1}
end
defp prewalk({:type, _, [arg, type]}, kind, query, expr, acc, adapter) do
{arg, acc} = prewalk(arg, kind, query, expr, acc, adapter)
type = field_type!(kind, query, expr, type)
{%Ecto.Query.Tagged{value: arg, tag: type, type: Ecto.Type.type(type)}, acc}
end
defp prewalk({:json_extract_path, meta, [json_field, path]}, kind, query, expr, acc, _adapter) do
{{:., _, [{:&, _, [ix]}, field]}, _, []} = json_field
case type!(kind, query, expr, ix, field) do
{:parameterized, Ecto.Embedded, embed} ->
validate_json_path!(path, field, embed)
type ->
case Ecto.Type.type(type) do
:any ->
:ok
:map ->
:ok
{:map, _} ->
:ok
_ ->
raise "expected field `#{field}` to be an embed or a map, got: `#{inspect(type)}`"
end
end
{{:json_extract_path, meta, [json_field, path]}, acc}
end
defp prewalk(%Ecto.Query.Tagged{value: v, type: type} = tagged, kind, query, expr, acc, adapter) do
if Ecto.Type.base?(type) do
{tagged, acc}
else
{dump_param(kind, query, expr, v, type, adapter), acc}
end
end
defp prewalk({left, right}, kind, query, expr, acc, adapter) do
{left, acc} = prewalk(left, kind, query, expr, acc, adapter)
{right, acc} = prewalk(right, kind, query, expr, acc, adapter)
{{left, right}, acc}
end
defp prewalk({left, meta, args}, kind, query, expr, acc, adapter) do
{left, acc} = prewalk(left, kind, query, expr, acc, adapter)
{args, acc} = prewalk(args, kind, query, expr, acc, adapter)
{{left, meta, args}, acc}
end
defp prewalk(list, kind, query, expr, acc, adapter) when is_list(list) do
Enum.map_reduce(list, acc, &prewalk(&1, kind, query, expr, &2, adapter))
end
defp prewalk(other, _kind, _query, _expr, acc, _adapter) do
{other, acc}
end
defp dump_param(kind, query, expr, v, type, adapter) do
type = field_type!(kind, query, expr, type)
case dump_param(kind, type, v, adapter) do
{:ok, v} ->
v
{:error, error} ->
error = error <> ". Or the value is incompatible or it must be " <>
"interpolated (using ^) so it may be cast accordingly"
error! query, expr, error
end
end
defp dump_param(kind, type, v, adapter) do
with {:ok, type} <- normalize_param(kind, type, v),
do: dump_param(adapter, type, v)
end
defp validate_in(meta, expr, param, acc, adapter) do
{v, t} = Enum.fetch!(expr.params, param)
length = length(v)
case adapter.dumpers(t, t) do
[{:in, _} | _] -> {{:^, meta, [acc, length]}, acc + length}
_ -> {{:^, meta, [acc, length]}, acc + 1}
end
end
defp normalize_select(%{select: nil} = query, _keep_literals?) do
{query, nil}
end
defp normalize_select(query, keep_literals?) do
%{assocs: assocs, preloads: preloads, select: select} = query
%{take: take, expr: expr} = select
{tag, from_take} = Map.get(take, 0, {:any, []})
source = get_source!(:select, query, 0)
assocs = merge_assocs(assocs, query)
# In from, if there is a schema and we have a map tag with preloads,
# it needs to be converted to a map in a later pass.
{take, from_tag} =
case source do
{source, schema, _}
when tag == :map and preloads != [] and is_binary(source) and schema != nil ->
{Map.put(take, 0, {:struct, from_take}), :map}
_ ->
{take, :any}
end
{postprocess, fields, from} =
collect_fields(expr, [], :none, query, take, keep_literals?)
{fields, preprocess, from} =
case from do
{:ok, from_pre, from_expr, from_taken} ->
{assoc_exprs, assoc_fields} = collect_assocs([], [], query, tag, from_take, assocs)
fields = from_taken ++ Enum.reverse(assoc_fields, Enum.reverse(fields))
preprocess = [from_pre | Enum.reverse(assoc_exprs)]
{fields, preprocess, {from_tag, from_expr}}
:none when preloads != [] or assocs != [] ->
error! query, "the binding used in `from` must be selected in `select` when using `preload`"
:none ->
{Enum.reverse(fields), [], :none}
end
select = %{
preprocess: preprocess,
postprocess: postprocess,
take: from_take,
assocs: assocs,
from: from
}
{put_in(query.select.fields, fields), select}
end
# Handling of source
defp collect_fields({:merge, _, [{:&, _, [0]}, right]}, fields, :none, query, take, keep_literals?) do
{expr, taken} = source_take!(:select, query, take, 0, 0)
from = {:ok, {:source, :from}, expr, taken}
{right, right_fields, _from} = collect_fields(right, [], from, query, take, keep_literals?)
from = {:ok, {:merge, {:source, :from}, right}, expr, taken ++ Enum.reverse(right_fields)}
{{:source, :from}, fields, from}
end
defp collect_fields({:&, _, [0]}, fields, :none, query, take, _keep_literals?) do
{expr, taken} = source_take!(:select, query, take, 0, 0)
{{:source, :from}, fields, {:ok, {:source, :from}, expr, taken}}
end
defp collect_fields({:&, _, [0]}, fields, from, _query, _take, _keep_literals?)
when from != :never do
{{:source, :from}, fields, from}
end
defp collect_fields({:&, _, [ix]}, fields, from, query, take, _keep_literals?) do
{expr, taken} = source_take!(:select, query, take, ix, ix)
{expr, Enum.reverse(taken, fields), from}
end
# Expression handling
@aggs ~w(count avg min max sum row_number rank dense_rank percent_rank cume_dist ntile lag lead first_value last_value nth_value)a
defp collect_fields({agg, _, [{{:., dot_meta, [{:&, _, [_]}, _]}, _, []} | _]} = expr,
fields, from, _query, _take, _keep_literals?)
when agg in @aggs do
type =
case agg do
:count -> :integer
:row_number -> :integer
:rank -> :integer
:dense_rank -> :integer
:ntile -> :integer
# If it is possible to upcast, we do it, otherwise keep the DB value.
# For example, an average of integers will return a decimal, which can't be cast
# as an integer. But an average of "moneys" should be upcast.
_ -> {:maybe, Keyword.fetch!(dot_meta, :type)}
end
{{:value, type}, [expr | fields], from}
end
defp collect_fields({:filter, _, [call, _]} = expr, fields, from, query, take, keep_literals?) do
{type, _, _} = collect_fields(call, fields, from, query, take, keep_literals?)
{type, [expr | fields], from}
end
defp collect_fields({:coalesce, _, [left, right]} = expr, fields, from, query, take, _keep_literals?) do
{left_type, _, _} = collect_fields(left, fields, from, query, take, true)
{right_type, _, _} = collect_fields(right, fields, from, query, take, true)
type = if left_type == right_type, do: left_type, else: {:value, :any}
{type, [expr | fields], from}
end
defp collect_fields({:over, _, [call, window]} = expr, fields, from, query, take, keep_literals?) do
if is_atom(window) and not Keyword.has_key?(query.windows, window) do
error!(query, "unknown window #{inspect window} given to over/2")
end
{type, _, _} = collect_fields(call, fields, from, query, take, keep_literals?)
{type, [expr | fields], from}
end
defp collect_fields({{:., dot_meta, [{:&, _, [_]}, _]}, _, []} = expr,
fields, from, _query, _take, _keep_literals?) do
{{:value, Keyword.fetch!(dot_meta, :type)}, [expr | fields], from}
end
defp collect_fields({left, right}, fields, from, query, take, keep_literals?) do
{args, fields, from} = collect_args([left, right], fields, from, query, take, keep_literals?, [])
{{:tuple, args}, fields, from}
end
defp collect_fields({:{}, _, args}, fields, from, query, take, keep_literals?) do
{args, fields, from} = collect_args(args, fields, from, query, take, keep_literals?, [])
{{:tuple, args}, fields, from}
end
defp collect_fields({:%{}, _, [{:|, _, [data, args]}]}, fields, from, query, take, keep_literals?) do
{data, fields, from} = collect_fields(data, fields, from, query, take, keep_literals?)
{args, fields, from} = collect_kv(args, fields, from, query, take, keep_literals?, [])
{{:map, data, args}, fields, from}
end
defp collect_fields({:%{}, _, args}, fields, from, query, take, keep_literals?) do
{args, fields, from} = collect_kv(args, fields, from, query, take, keep_literals?, [])
{{:map, args}, fields, from}
end
defp collect_fields({:%, _, [name, {:%{}, _, [{:|, _, [data, args]}]}]},
fields, from, query, take, keep_literals?) do
{data, fields, from} = collect_fields(data, fields, from, query, take, keep_literals?)
{args, fields, from} = collect_kv(args, fields, from, query, take, keep_literals?, [])
struct!(name, args)
{{:struct, name, data, args}, fields, from}
end
defp collect_fields({:%, _, [name, {:%{}, _, args}]}, fields, from, query, take, keep_literals?) do
{args, fields, from} = collect_kv(args, fields, from, query, take, keep_literals?, [])
struct!(name, args)
{{:struct, name, args}, fields, from}
end
defp collect_fields({:merge, _, args}, fields, from, query, take, keep_literals?) do
{[left, right], fields, from} = collect_args(args, fields, from, query, take, keep_literals?, [])
{{:merge, left, right}, fields, from}
end
defp collect_fields({:date_add, _, [arg | _]} = expr, fields, from, query, take, keep_literals?) do
case collect_fields(arg, fields, from, query, take, keep_literals?) do
{{:value, :any}, _, _} -> {{:value, :date}, [expr | fields], from}
{type, _, _} -> {type, [expr | fields], from}
end
end
defp collect_fields({:datetime_add, _, [arg | _]} = expr, fields, from, query, take, keep_literals?) do
case collect_fields(arg, fields, from, query, take, keep_literals?) do
{{:value, :any}, _, _} -> {{:value, :naive_datetime}, [expr | fields], from}
{type, _, _} -> {type, [expr | fields], from}
end
end
defp collect_fields(args, fields, from, query, take, keep_literals?) when is_list(args) do
{args, fields, from} = collect_args(args, fields, from, query, take, keep_literals?, [])
{{:list, args}, fields, from}
end
defp collect_fields(expr, fields, from, _query, _take, true) when is_binary(expr) do
{{:value, :binary}, [expr | fields], from}
end
defp collect_fields(expr, fields, from, _query, _take, true) when is_integer(expr) do
{{:value, :integer}, [expr | fields], from}
end
defp collect_fields(expr, fields, from, _query, _take, true) when is_float(expr) do
{{:value, :float}, [expr | fields], from}
end
defp collect_fields(expr, fields, from, _query, _take, true) when is_boolean(expr) do
{{:value, :boolean}, [expr | fields], from}
end
defp collect_fields(expr, fields, from, _query, _take, _keep_literals?) when is_atom(expr) do
{expr, fields, from}
end
defp collect_fields(expr, fields, from, _query, _take, false)
when is_binary(expr) or is_number(expr) do
{expr, fields, from}
end
defp collect_fields(%Ecto.Query.Tagged{tag: tag} = expr, fields, from, _query, _take, _keep_literals?) do
{{:value, tag}, [expr | fields], from}
end
defp collect_fields({op, _, [_]} = expr, fields, from, _query, _take, _keep_literals?)
when op in ~w(not is_nil)a do
{{:value, :boolean}, [expr | fields], from}
end
defp collect_fields({op, _, [_, _]} = expr, fields, from, _query, _take, _keep_literals?)
when op in ~w(< > <= >= == != and or like ilike)a do
{{:value, :boolean}, [expr | fields], from}
end
defp collect_fields(expr, fields, from, _query, _take, _keep_literals?) do
{{:value, :any}, [expr | fields], from}
end
defp collect_kv([{key, value} | elems], fields, from, query, take, keep_literals?, acc) do
{key, fields, from} = collect_fields(key, fields, from, query, take, keep_literals?)
{value, fields, from} = collect_fields(value, fields, from, query, take, keep_literals?)
collect_kv(elems, fields, from, query, take, keep_literals?, [{key, value} | acc])
end
defp collect_kv([], fields, from, _query, _take, _keep_literals?, acc) do
{Enum.reverse(acc), fields, from}
end
defp collect_args([elem | elems], fields, from, query, take, keep_literals?, acc) do
{elem, fields, from} = collect_fields(elem, fields, from, query, take, keep_literals?)
collect_args(elems, fields, from, query, take, keep_literals?, [elem | acc])
end
defp collect_args([], fields, from, _query, _take, _keep_literals?, acc) do
{Enum.reverse(acc), fields, from}
end
defp merge_assocs(assocs, query) do
assocs
|> Enum.reduce(%{}, fn {field, {index, children}}, acc ->
children = merge_assocs(children, query)
Map.update(acc, field, {index, children}, fn
{^index, current_children} ->
{index, merge_assocs(children ++ current_children, query)}
{other_index, _} ->
error! query, "association `#{field}` is being set to binding at position #{index} " <>
"and at position #{other_index} at the same time"
end)
end)
|> Map.to_list()
end
defp collect_assocs(exprs, fields, query, tag, take, [{assoc, {ix, children}}|tail]) do
to_take = get_preload_source!(query, ix)
{fetch, take_children} = fetch_assoc(tag, take, assoc)
{expr, taken} = take!(to_take, query, fetch, assoc, ix)
exprs = [expr | exprs]
fields = Enum.reverse(taken, fields)
{exprs, fields} = collect_assocs(exprs, fields, query, tag, take_children, children)
{exprs, fields} = collect_assocs(exprs, fields, query, tag, take, tail)
{exprs, fields}
end
defp collect_assocs(exprs, fields, _query, _tag, _take, []) do
{exprs, fields}
end
defp fetch_assoc(tag, take, assoc) do
case Access.fetch(take, assoc) do
{:ok, value} -> {{:ok, {tag, value}}, value}
:error -> {:error, []}
end
end
defp source_take!(kind, query, take, field, ix) do
source = get_source!(kind, query, ix)
take!(source, query, Access.fetch(take, field), field, ix)
end
defp take!(source, query, fetched, field, ix) do
case {fetched, source} do
{{:ok, {:struct, _}}, {:fragment, _, _}} ->
error! query, "it is not possible to return a struct subset of a fragment"
{{:ok, {:struct, _}}, %Ecto.SubQuery{}} ->
error! query, "it is not possible to return a struct subset of a subquery"
{{:ok, {_, []}}, {_, _, _}} ->
error! query, "at least one field must be selected for binding `#{field}`, got an empty list"
{{:ok, {:struct, _}}, {_, nil, _}} ->
error! query, "struct/2 in select expects a source with a schema"
{{:ok, {kind, fields}}, {source, schema, prefix}} when is_binary(source) ->
dumper = if schema, do: schema.__schema__(:dump), else: %{}
schema = if kind == :map, do: nil, else: schema
{types, fields} = select_dump(List.wrap(fields), dumper, ix)
{{:source, {source, schema}, prefix || query.prefix, types}, fields}
{{:ok, {_, fields}}, _} ->
{{:map, Enum.map(fields, &{&1, {:value, :any}})}, Enum.map(fields, &select_field(&1, ix))}
{:error, {:fragment, _, _}} ->
{{:value, :map}, [{:&, [], [ix]}]}
{:error, {_, nil, _}} ->
{{:value, :map}, [{:&, [], [ix]}]}
{:error, {source, schema, prefix}} ->
{types, fields} = select_dump(schema.__schema__(:query_fields), schema.__schema__(:dump), ix)
{{:source, {source, schema}, prefix || query.prefix, types}, fields}
{:error, %Ecto.SubQuery{select: select}} ->
{_, fields} = subquery_struct_and_fields(select)
{select, Enum.map(fields, &select_field(&1, ix))}
end
end
defp select_dump(fields, dumper, ix) do
fields
|> Enum.reverse
|> Enum.reduce({[], []}, fn
field, {types, exprs} when is_atom(field) ->
{source, type} = Map.get(dumper, field, {field, :any})
{[{field, type} | types], [select_field(source, ix) | exprs]}
_field, acc ->
acc
end)
end
defp select_field(field, ix) do
{{:., [], [{:&, [], [ix]}, field]}, [], []}
end
defp get_ix!({:&, _, [ix]} = expr, _kind, query) do
{ix, expr, query}
end
defp get_ix!({:as, meta, [as]}, _kind, query) do
case query.aliases do
%{^as => ix} -> {ix, {:&, meta, [ix]}, query}
%{} -> error!(query, "could not find named binding `as(#{inspect(as)})`")
end
end
defp get_ix!({:parent_as, meta, [as]}, kind, query) do
case query.aliases[@parent_as] do
%{aliases: %{^as => ix}, sources: sources} = query ->
if kind == :select and not (ix < tuple_size(sources)) do
error!(query, "the parent_as in a subquery select used as a join can only access the `from` binding")
else
{ix, {:parent_as, [], [{:&, meta, [ix]}]}, query}
end
%{} ->
error!(query, "could not find named binding `parent_as(#{inspect(as)})`")
nil ->
error!(query, "`parent_as(#{inspect(as)})` can only be used in subqueries")
end
end
defp get_source!(where, %{sources: sources} = query, ix) do
elem(sources, ix)
rescue
ArgumentError ->
error! query, "invalid query has specified more bindings than bindings available " <>
"in `#{where}` (look for `unknown_binding!` in the printed query below)"
end
defp get_preload_source!(query, ix) do
case get_source!(:preload, query, ix) do
{source, schema, _} = all when is_binary(source) and schema != nil ->
all
_ ->
error! query, "can only preload sources with a schema " <>
"(fragments, binary and subqueries are not supported)"
end
end
## Helpers
@all_exprs [with_cte: :with_ctes, distinct: :distinct, select: :select, from: :from, join: :joins,
where: :wheres, group_by: :group_bys, having: :havings, windows: :windows,
combination: :combinations, order_by: :order_bys, limit: :limit, offset: :offset]
@update_all_exprs [with_cte: :with_ctes, update: :updates, from: :from,
join: :joins, where: :wheres, select: :select]
@delete_all_exprs [with_cte: :with_ctes, from: :from, join: :joins,
where: :wheres, select: :select]
# Traverse all query components with expressions.
# Therefore from, preload, assocs and lock are not traversed.
defp traverse_exprs(query, operation, acc, fun) do
exprs =
case operation do
:all -> @all_exprs
:update_all -> @update_all_exprs
:delete_all -> @delete_all_exprs
end
Enum.reduce exprs, {query, acc}, fn {kind, key}, {query, acc} ->
{traversed, acc} = fun.(kind, query, Map.fetch!(query, key), acc)
{%{query | key => traversed}, acc}
end
end
defp field_type!(kind, query, expr, {composite, {ix, field}}) when is_integer(ix) do
{composite, type!(kind, query, expr, ix, field)}
end
defp field_type!(kind, query, expr, {ix, field}) when is_integer(ix) do
type!(kind, query, expr, ix, field)
end
defp field_type!(_kind, _query, _expr, type) do
type
end
defp type!(_kind, _query, _expr, nil, _field), do: :any
defp type!(kind, query, expr, ix, field) when is_integer(ix) do
case get_source!(kind, query, ix) do
{:fragment, _, _} ->
:any
{_, schema, _} ->
type!(kind, query, expr, schema, field)
%Ecto.SubQuery{select: select} ->
case subquery_type_for(select, field) do
{:ok, type} -> type
:error -> error!(query, expr, "field `#{field}` does not exist in subquery")
end
end
end
defp type!(kind, query, expr, schema, field) when is_atom(schema) do
cond do
type = schema.__schema__(:type, field) ->
type
Map.has_key?(schema.__struct__(), field) ->
error! query, expr, "field `#{field}` in `#{kind}` is a virtual field in schema #{inspect schema}"
true ->
error! query, expr, "field `#{field}` in `#{kind}` does not exist in schema #{inspect schema}"
end
end
defp normalize_param(_kind, {:out, {:array, type}}, _value) do
{:ok, type}
end
defp normalize_param(_kind, {:out, :any}, _value) do
{:ok, :any}
end
defp normalize_param(kind, {:out, other}, value) do
{:error, "value `#{inspect value}` in `#{kind}` expected to be part of an array " <>
"but matched type is #{inspect other}"}
end
defp normalize_param(_kind, type, _value) do
{:ok, type}
end
defp cast_param(kind, type, v) do
case Ecto.Type.cast(type, v) do
{:ok, v} ->
{:ok, v}
_ ->
{:error, "value `#{inspect v}` in `#{kind}` cannot be cast to type #{inspect type}"}
end
end
defp dump_param(adapter, type, v) do
case Ecto.Type.adapter_dump(adapter, type, v) do
{:ok, v} ->
{:ok, v}
:error ->
{:error, "value `#{inspect v}` cannot be dumped to type #{inspect type}"}
end
end
defp field_source({source, schema, _}, field) when is_binary(source) and schema != nil do
# If the field is not found we return the field itself
# which will be checked and raise later.
schema.__schema__(:field_source, field) || field
end
defp field_source(_, field) do
field
end
defp assert_update!(%Ecto.Query{updates: updates} = query, operation) do
changes =
Enum.reduce(updates, %{}, fn update, acc ->
Enum.reduce(update.expr, acc, fn {_op, kw}, acc ->
Enum.reduce(kw, acc, fn {k, v}, acc ->
Map.update(acc, k, v, fn _ ->
error! query, "duplicate field `#{k}` for `#{operation}`"
end)
end)
end)
end)
if changes == %{} do
error! query, "`#{operation}` requires at least one field to be updated"
end
end
defp assert_no_update!(query, operation) do
case query do
%Ecto.Query{updates: []} -> query
_ ->
error! query, "`#{operation}` does not allow `update` expressions"
end
end
defp assert_only_filter_expressions!(query, operation) do
case query do
%Ecto.Query{order_bys: [], limit: nil, offset: nil, group_bys: [],
havings: [], preloads: [], assocs: [], distinct: nil, lock: nil,
windows: [], combinations: []} ->
query
_ ->
error! query, "`#{operation}` allows only `with_cte`, `where` and `join` expressions. " <>
"You can exclude unwanted expressions from a query by using " <>
"Ecto.Query.exclude/2. Error found"
end
end
defp filter_and_reraise(exception, stacktrace) do
reraise exception, Enum.reject(stacktrace, &match?({__MODULE__, _, _, _}, &1))
end
defp error!(query, message) do
raise Ecto.QueryError, message: message, query: query
end
defp error!(query, expr, message) do
raise Ecto.QueryError, message: message, query: query, file: expr.file, line: expr.line
end
defp error!(query, expr, message, hint) do
raise Ecto.QueryError, message: message, query: query, file: expr.file, line: expr.line, hint: hint
end
end
| 36.979547 | 147 | 0.630381 |
73c3262fb1f1d9fe7e6b4ec6d2cbf1b78cf41fd0 | 28,896 | exs | Elixir | lib/iex/test/iex/helpers_test.exs | tmbb/exdocs_makedown_demo | 6a0039c54d2fa10d79c080efcef8d70d359678f8 | [
"Apache-2.0"
] | null | null | null | lib/iex/test/iex/helpers_test.exs | tmbb/exdocs_makedown_demo | 6a0039c54d2fa10d79c080efcef8d70d359678f8 | [
"Apache-2.0"
] | null | null | null | lib/iex/test/iex/helpers_test.exs | tmbb/exdocs_makedown_demo | 6a0039c54d2fa10d79c080efcef8d70d359678f8 | [
"Apache-2.0"
] | null | null | null | Code.require_file "../test_helper.exs", __DIR__
defmodule IEx.HelpersTest do
use IEx.Case
import IEx.Helpers
describe "whereami" do
test "is disabled by default" do
assert capture_iex("whereami()") =~
"Pry session is not currently enabled"
end
test "shows current location for custom envs" do
whereami = capture_iex("whereami()", [], env: %{__ENV__ | line: 3})
assert whereami =~ "test/iex/helpers_test.exs:3"
assert whereami =~ "3: defmodule IEx.HelpersTest do"
end
test "prints message when location is not available" do
whereami = capture_iex("whereami()", [], env: %{__ENV__ | line: 30000})
assert whereami =~ "test/iex/helpers_test.exs:30000"
assert whereami =~ "Could not extract source snippet. Location is not available."
whereami = capture_iex("whereami()", [], env: %{__ENV__ | file: "nofile", line: 1})
assert whereami =~ "nofile:1"
assert whereami =~ "Could not extract source snippet. Location is not available."
end
end
if :erlang.system_info(:otp_release) >= '20' do
describe "breakpoints" do
setup do
on_exit fn -> IEx.Pry.remove_breaks() end
end
test "sets up a breakpoint with macro syntax" do
assert break!(URI.decode_query/2) == 1
assert IEx.Pry.breaks() == [{1, URI, {:decode_query, 2}, 1}]
end
test "sets up a breakpoint on the given module" do
assert break!(URI, :decode_query, 2) == 1
assert IEx.Pry.breaks() == [{1, URI, {:decode_query, 2}, 1}]
end
test "resets breaks on the given id" do
assert break!(URI, :decode_query, 2) == 1
assert reset_break(1) == :ok
assert IEx.Pry.breaks() == [{1, URI, {:decode_query, 2}, 0}]
end
test "resets breaks on the given module" do
assert break!(URI, :decode_query, 2) == 1
assert reset_break(URI, :decode_query, 2) == :ok
assert IEx.Pry.breaks() == [{1, URI, {:decode_query, 2}, 0}]
end
test "removes breaks in the given module" do
assert break!(URI.decode_query/2) == 1
assert remove_breaks(URI) == :ok
assert IEx.Pry.breaks() == []
end
test "removes breaks on all modules" do
assert break!(URI.decode_query/2) == 1
assert remove_breaks() == :ok
assert IEx.Pry.breaks() == []
end
test "errors when setting up a break with no beam" do
assert_raise RuntimeError,
"could not set breakpoint, could not find .beam file for IEx.HelpersTest",
fn -> break!(__MODULE__, :setup, 1) end
end
test "errors when setting up a break for unknown function" do
assert_raise RuntimeError,
"could not set breakpoint, unknown function/macro URI.unknown/2",
fn -> break!(URI, :unknown, 2) end
end
test "errors for non elixir modules" do
assert_raise RuntimeError,
"could not set breakpoint, module :elixir was not written in Elixir",
fn -> break!(:elixir, :unknown, 2) end
end
test "prints table with breaks" do
break!(URI, :decode_query, 2)
assert capture_io(fn -> breaks() end) == """
ID Module.function/arity Pending stops
---- ----------------------- ---------------
1 URI.decode_query/2 1
"""
assert capture_io(fn -> URI.decode_query("foo=bar", %{}) end) != ""
assert capture_io(fn -> breaks() end) == """
ID Module.function/arity Pending stops
---- ----------------------- ---------------
1 URI.decode_query/2 0
"""
assert capture_io(fn -> URI.decode_query("foo=bar", %{}) end) == ""
assert capture_io(fn -> breaks() end) == """
ID Module.function/arity Pending stops
---- ----------------------- ---------------
1 URI.decode_query/2 0
"""
end
test "does not print table when there are no breaks" do
assert capture_io(fn -> breaks() end) ==
"No breakpoints set\n"
end
end
end
describe "open" do
@iex_helpers Path.expand("../../lib/iex/helpers.ex", __DIR__)
@elixir_erl Path.expand("../../../elixir/src/elixir.erl", __DIR__)
@editor System.get_env("ELIXIR_EDITOR")
test "opens __FILE__ and __LINE__" do
System.put_env("ELIXIR_EDITOR", "echo __LINE__:__FILE__")
assert capture_iex("open({#{inspect __ENV__.file}, 3})") |> maybe_trim_quotes() ==
"3:#{__ENV__.file}"
after
System.put_env("ELIXIR_EDITOR", @editor)
end
test "opens Elixir module" do
assert capture_iex("open(IEx.Helpers)") |> maybe_trim_quotes() =~
~r/#{@iex_helpers}:1/
end
test "opens function" do
assert capture_iex("open(h)") |> maybe_trim_quotes() =~
~r/#{@iex_helpers}:\d+/
end
test "opens function/arity" do
assert capture_iex("open(b/1)") |> maybe_trim_quotes() =~
~r/#{@iex_helpers}:\d+/
assert capture_iex("open(h/0)") |> maybe_trim_quotes() =~
~r/#{@iex_helpers}:\d+/
end
test "opens module.function" do
assert capture_iex("open(IEx.Helpers.b)") |> maybe_trim_quotes() =~
~r/#{@iex_helpers}:\d+/
assert capture_iex("open(IEx.Helpers.h)") |> maybe_trim_quotes() =~
~r/#{@iex_helpers}:\d+/
end
test "opens module.function/arity" do
assert capture_iex("open(IEx.Helpers.b/1)") |> maybe_trim_quotes() =~
~r/#{@iex_helpers}:\d+/
assert capture_iex("open(IEx.Helpers.h/0)") |> maybe_trim_quotes() =~
~r/#{@iex_helpers}:\d+/
end
test "opens Erlang module" do
assert capture_iex("open(:elixir)") |> maybe_trim_quotes() =~
~r/#{@elixir_erl}:\d+/
end
test "opens Erlang module.function" do
assert capture_iex("open(:elixir.start)") |> maybe_trim_quotes() =~
~r/#{@elixir_erl}:\d+/
end
test "opens Erlang module.function/arity" do
assert capture_iex("open(:elixir.start/2)") |> maybe_trim_quotes() =~
~r/#{@elixir_erl}:\d+/
end
test "errors if module is not available" do
assert capture_iex("open(:unknown)") ==
"Could not open: :unknown. Module is not available."
end
test "errors if module.function is not available" do
assert capture_iex("open(:unknown.unknown)") ==
"Could not open: :unknown.unknown. Module is not available."
assert capture_iex("open(:elixir.unknown)") ==
"Could not open: :elixir.unknown. Function/macro is not available."
end
test "errors if module.function/arity is not available" do
assert capture_iex("open(:unknown.start/10)") ==
"Could not open: :unknown.start/10. Module is not available."
assert capture_iex("open(:elixir.start/10)") ==
"Could not open: :elixir.start/10. Function/macro is not available."
end
test "opens the current pry location" do
assert capture_iex("open()", [], env: %{__ENV__ | line: 3}) |> maybe_trim_quotes() ==
"#{__ENV__.file}:3"
end
test "errors if prying is not available" do
assert capture_iex("open()") == "Pry session is not currently enabled"
end
test "opens given {file, line}" do
assert capture_iex("open({#{inspect __ENV__.file}, 3})") |> maybe_trim_quotes() ==
"#{__ENV__.file}:3"
end
test "errors when given {file, line} is not available" do
assert capture_iex("open({~s[foo], 3})") ==
"Could not open: \"foo\". File is not available."
end
defp maybe_trim_quotes(string) do
case :os.type do
{:win32, _} -> String.replace(string, "\"", "")
_ -> string
end
end
end
describe "clear" do
test "clear the screen with ansi" do
Application.put_env(:elixir, :ansi_enabled, true)
assert capture_iex("clear()") == "\e[H\e[2J"
Application.put_env(:elixir, :ansi_enabled, false)
assert capture_iex("clear()") =~ "Cannot clear the screen because ANSI escape codes are not enabled on this shell"
after
Application.delete_env(:elixir, :ansi_enabled)
end
end
describe "runtime_info" do
test "shows vm information" do
assert "\n## System and architecture" <> _ =
capture_io(fn -> runtime_info() end)
end
end
describe "h" do
test "shows help" do
assert "* IEx.Helpers\n\nWelcome to Interactive Elixir" <> _
= capture_iex("h()")
end
test "prints module documentation" do
assert "* IEx.Helpers\n\nWelcome to Interactive Elixir" <> _ =
capture_io(fn -> h IEx.Helpers end)
assert capture_io(fn -> h :whatever end) ==
"Could not load module :whatever, got: nofile\n"
assert capture_io(fn -> h :lists end) ==
":lists is an Erlang module and, as such, it does not have Elixir-style docs\n"
end
test "prints function documentation" do
pwd_h = "* def pwd()\n\nPrints the current working directory.\n\n"
c_h = "* def c(files, path \\\\ :in_memory)\n\nCompiles the given files."
eq_h = "* def ==(left, right)\n\nReturns `true` if the two items are equal.\n\n"
assert capture_io(fn -> h IEx.Helpers.pwd/0 end) =~ pwd_h
assert capture_io(fn -> h IEx.Helpers.c/2 end) =~ c_h
assert capture_io(fn -> h ==/2 end) =~ eq_h
assert capture_io(fn -> h IEx.Helpers.c/1 end) =~ c_h
assert capture_io(fn -> h pwd end) =~ pwd_h
end
test "prints __info__ documentation" do
h_output_module = capture_io(fn -> h Module.__info__ end)
assert capture_io(fn -> h Module.UnlikelyTo.Exist.__info__ end) == h_output_module
assert capture_io(fn -> h Module.UnlikelyTo.Exist.__info__/1 end) == h_output_module
assert capture_io(fn -> h __info__ end) == "No documentation for Kernel.__info__ was found\n"
end
test "considers underscored functions without docs by default" do
content = """
defmodule Sample do
def __foo__(), do: 0
@doc "Bar doc"
def __bar__(), do: 1
end
"""
filename = "sample.ex"
with_file filename, content, fn ->
assert c(filename, ".") == [Sample]
assert capture_io(fn -> h Sample.__foo__ end) == "No documentation for Sample.__foo__ was found\n"
assert capture_io(fn -> h Sample.__bar__ end) == "* def __bar__()\n\nBar doc\n"
assert capture_io(fn -> h Sample.__foo__/0 end) == "No documentation for Sample.__foo__/0 was found\n"
assert capture_io(fn -> h Sample.__bar__/0 end) == "* def __bar__()\n\nBar doc\n"
end
after
cleanup_modules([Sample])
end
test "prints callback documentation when function docs are not available" do
behaviour = """
defmodule MyBehaviour do
@doc "Docs for MyBehaviour.first"
@callback first(integer) :: integer
@callback second(integer) :: integer
@callback second(integer, integer) :: integer
end
"""
impl = """
defmodule Impl do
@behaviour MyBehaviour
def first(0), do: 0
@doc "Docs for Impl.second/1"
def second(0), do: 0
@doc "Docs for Impl.second/2"
def second(0, 0), do: 0
end
"""
files = ["my_behaviour.ex", "impl.ex"]
with_file files, [behaviour, impl], fn ->
assert c(files, ".") |> Enum.sort == [Impl, MyBehaviour]
assert capture_io(fn -> h Impl.first/1 end) == "* @callback first(integer()) :: integer()\n\nDocs for MyBehaviour.first\n"
assert capture_io(fn -> h Impl.second/1 end) == "* def second(int)\n\nDocs for Impl.second/1\n"
assert capture_io(fn -> h Impl.second/2 end) == "* def second(int1, int2)\n\nDocs for Impl.second/2\n"
assert capture_io(fn -> h Impl.first end) == "* @callback first(integer()) :: integer()\n\nDocs for MyBehaviour.first\n"
assert capture_io(fn -> h Impl.second end) == "* def second(int)\n\nDocs for Impl.second/1\n* def second(int1, int2)\n\nDocs for Impl.second/2\n"
assert capture_io(fn -> h MyBehaviour.first end) == """
No documentation for function MyBehaviour.first was found, but there is a callback with the same name.
You can view callback documentations with the b/1 helper.\n
"""
assert capture_io(fn -> h MyBehaviour.second/2 end) == """
No documentation for function MyBehaviour.second/2 was found, but there is a callback with the same name.
You can view callback documentations with the b/1 helper.\n
"""
assert capture_io(fn -> h MyBehaviour.second/3 end) == "No documentation for MyBehaviour.second/3 was found\n"
end
after
cleanup_modules([Impl, MyBehaviour])
end
test "prints documentation for delegates" do
filename = "delegate.ex"
content = """
defmodule Delegator do
defdelegate func1, to: Delegated
@doc "Delegator func2 doc"
defdelegate func2, to: Delegated
end
defmodule Delegated do
def func1, do: 1
def func2, do: 2
end
"""
with_file filename, content, fn ->
assert c(filename, ".") |> Enum.sort == [Delegated, Delegator]
assert capture_io(fn -> h Delegator.func1 end) == "* def func1()\n\nSee `Delegated.func1/0`.\n"
assert capture_io(fn -> h Delegator.func2 end) == "* def func2()\n\nDelegator func2 doc\n"
end
after
cleanup_modules([Delegated, Delegator])
end
end
describe "b" do
test "lists all callbacks for a module" do
assert capture_io(fn -> b Mix end) == "No callbacks for Mix were found\n"
assert capture_io(fn -> b NoMix end) == "Could not load module NoMix, got: nofile\n"
assert capture_io(fn -> b Mix.SCM end) =~ """
@callback accepts_options(app :: atom(), opts()) :: opts() | nil
@callback checked_out?(opts()) :: boolean()
"""
end
test "prints callback documentation" do
assert capture_io(fn -> b Mix.Task.stop end) == "No documentation for Mix.Task.stop was found\n"
assert capture_io(fn -> b Mix.Task.run end) =~ "* @callback run(command_line_args :: [binary()]) :: any()\n\nA task needs to implement `run`"
assert capture_io(fn -> b NoMix.run end) == "Could not load module NoMix, got: nofile\n"
assert capture_io(fn -> b Exception.message/1 end) == "* @callback message(t()) :: String.t()\n\n\n"
end
end
describe "t" do
test "prints when there is no type information" do
assert capture_io(fn -> t IEx end) == "No type information for IEx was found\n"
end
test "prints all types in module" do
# Test that it shows at least two types
assert Enum.count(capture_io(fn -> t Enum end) |> String.split("\n"), fn line ->
String.starts_with? line, "@type"
end) >= 2
end
test "prints type information" do
assert "@type t() :: " <> _ = capture_io(fn -> t Enum.t end)
assert capture_io(fn -> t Enum.t end) == capture_io(fn -> t Enum.t/0 end)
assert "@opaque t(value)\n@type t() :: t(term())\n" = capture_io(fn -> t MapSet.t end)
assert capture_io(fn -> t URI.t end) == capture_io(fn -> t URI.t/0 end)
end
test "prints type documentation" do
content = """
defmodule TypeSample do
@typedoc "An id with description."
@type id_with_desc :: {number, String.t}
end
"""
filename = "typesample.ex"
with_file filename, content, fn ->
assert c(filename, ".") == [TypeSample]
assert capture_io(fn -> t TypeSample.id_with_desc/0 end) == """
An id with description.
@type id_with_desc() :: {number(), String.t()}
"""
assert capture_io(fn -> t TypeSample.id_with_desc end) == """
An id with description.
@type id_with_desc() :: {number(), String.t()}
"""
end
after
cleanup_modules([TypeSample])
end
end
describe "s" do
test "prints when there is no spec information" do
assert capture_io(fn -> s IEx.Remsh end) == "No specification for IEx.Remsh was found\n"
end
test "prints all specs in module" do
# Test that it shows at least two specs
assert Enum.count(capture_io(fn -> s Process end) |> String.split("\n"), fn line ->
String.starts_with? line, "@spec"
end) >= 2
end
test "prints specs" do
assert Enum.count(capture_io(fn -> s Process.flag end) |> String.split("\n"), fn line ->
String.starts_with? line, "@spec"
end) >= 2
assert capture_io(fn -> s Process.register/2 end) ==
"@spec register(pid() | port(), atom()) :: true\n"
assert capture_io(fn -> s struct end) ==
"@spec struct(module() | struct(), Enum.t()) :: struct()\n"
end
end
describe "v" do
test "returns history" do
assert "** (RuntimeError) v(0) is out of bounds" <> _ = capture_iex("v(0)")
assert "** (RuntimeError) v(1) is out of bounds" <> _ = capture_iex("v(1)")
assert "** (RuntimeError) v(-1) is out of bounds" <> _ = capture_iex("v(-1)")
assert capture_iex("1\n2\nv(2)") == "1\n2\n2"
assert capture_iex("1\n2\nv(2)") == capture_iex("1\n2\nv(-1)")
assert capture_iex("1\n2\nv(2)") == capture_iex("1\n2\nv()")
end
end
describe "flush" do
test "flushes messages" do
assert capture_io(fn -> send self(), :hello; flush() end) == ":hello\n"
end
end
describe "pwd" do
test "prints the working directory" do
File.cd! iex_path(), fn ->
assert capture_io(fn -> pwd() end) =~ ~r"lib[\\/]iex\n$"
end
end
end
describe "ls" do
test "lists the current directory" do
File.cd! iex_path(), fn ->
paths = capture_io(fn -> ls() end)
|> String.split
|> Enum.map(&String.trim/1)
assert "ebin" in paths
assert "mix.exs" in paths
end
end
test "lists the given directory" do
assert capture_io(fn -> ls "~" end) ==
capture_io(fn -> ls System.user_home end)
end
end
describe "exports" do
test "prints module exports" do
exports = capture_io(fn -> exports(IEx.Autocomplete) end)
assert exports == "expand/1 expand/2 exports/1 \n"
end
end
describe "import_file" do
test "imports a file" do
with_file "dot-iex", "variable = :hello\nimport IO", fn ->
capture_io(:stderr, fn ->
assert "** (CompileError) iex:1: undefined function variable/0" <> _ = capture_iex("variable")
end)
assert "** (CompileError) iex:1: undefined function puts/1" <> _ = capture_iex("puts \"hi\"")
assert capture_iex("import_file \"dot-iex\"\nvariable\nputs \"hi\"") ==
"IO\n:hello\nhi\n:ok"
end
end
test "imports a file that imports another file" do
dot = "parent = true\nimport_file \"dot-iex-1\""
dot_1 = "variable = :hello\nimport IO"
with_file ["dot-iex", "dot-iex-1"], [dot, dot_1], fn ->
capture_io(:stderr, fn ->
assert "** (CompileError) iex:1: undefined function parent/0" <> _ = capture_iex("parent")
end)
assert "** (CompileError) iex:1: undefined function puts/1" <> _ = capture_iex("puts \"hi\"")
assert capture_iex("import_file \"dot-iex\"\nvariable\nputs \"hi\"\nparent") ==
"IO\n:hello\nhi\n:ok\ntrue"
end
end
test "raises if file is missing" do
failing = capture_iex("import_file \"nonexistent\"")
assert "** (File.Error) could not read file" <> _ = failing
assert failing =~ "no such file or directory"
end
test "does not raise if file is missing and using import_file_if_available" do
assert "nil" == capture_iex("import_file_if_available \"nonexistent\"")
end
end
describe "import_if_available" do
test "imports a module only if available" do
assert "nil" == capture_iex("import_if_available NoSuchModule")
assert "[1, 2, 3]" == capture_iex("import_if_available Integer; digits 123")
assert "[1, 2, 3]" == capture_iex("import_if_available Integer, only: [digits: 1]; digits 123")
end
end
describe "c" do
test "compiles a file" do
assert_raise UndefinedFunctionError, ~r"function Sample\.run/0 is undefined", fn ->
Sample.run
end
filename = "sample.ex"
with_file filename, test_module_code(), fn ->
assert c(Path.expand(filename)) == [Sample]
refute File.exists?("Elixir.Sample.beam")
assert Sample.run == :run
end
after
cleanup_modules([Sample])
end
test "handles errors" do
ExUnit.CaptureIO.capture_io fn ->
with_file "sample.ex", "raise \"oops\"", fn ->
assert_raise CompileError, fn -> c("sample.ex") end
end
end
end
test "compiles a file with multiple modules " do
assert_raise UndefinedFunctionError, ~r"function Sample.run/0 is undefined", fn ->
Sample.run
end
filename = "sample.ex"
with_file filename, test_module_code() <> "\n" <> another_test_module(), fn ->
assert c(filename) |> Enum.sort == [Sample, Sample2]
assert Sample.run == :run
assert Sample2.hello == :world
end
after
cleanup_modules([Sample, Sample2])
end
test "compiles multiple modules" do
assert_raise UndefinedFunctionError, ~r"function Sample.run/0 is undefined", fn ->
Sample.run
end
filenames = ["sample1.ex", "sample2.ex"]
with_file filenames, [test_module_code(), another_test_module()], fn ->
assert c(filenames) |> Enum.sort == [Sample, Sample2]
assert Sample.run == :run
assert Sample2.hello == :world
end
after
cleanup_modules([Sample, Sample2])
end
test "compiles Erlang modules" do
assert_raise UndefinedFunctionError, ~r"function :sample.hello/0 is undefined", fn ->
:sample.hello
end
filename = "sample.erl"
with_file filename, erlang_module_code(), fn ->
assert c(filename) == [:sample]
assert :sample.hello == :world
refute File.exists?("sample.beam")
end
after
cleanup_modules([:sample])
end
test "skips unknown files" do
assert_raise UndefinedFunctionError, ~r"function :sample.hello/0 is undefined", fn ->
:sample.hello
end
filenames = ["sample.erl", "not_found.ex", "sample2.ex"]
with_file filenames, [erlang_module_code(), "", another_test_module()], fn ->
assert c(filenames) |> Enum.sort == [Sample2, :sample]
assert :sample.hello == :world
assert Sample2.hello == :world
end
after
cleanup_modules([:sample, Sample2])
end
test "compiles file in path" do
assert_raise UndefinedFunctionError, ~r"function Sample\.run/0 is undefined", fn ->
Sample.run
end
filename = "sample.ex"
with_file filename, test_module_code(), fn ->
assert c(filename, ".") == [Sample]
assert File.exists?("Elixir.Sample.beam")
assert Sample.run == :run
end
after
cleanup_modules([Sample])
end
end
describe "l" do
test "loads a given module" do
assert_raise UndefinedFunctionError, ~r"function Sample.run/0 is undefined", fn ->
Sample.run
end
assert l(:non_existent_module) == {:error, :nofile}
filename = "sample.ex"
with_file filename, test_module_code(), fn ->
assert c(filename, ".") == [Sample]
assert Sample.run == :run
File.write! filename, "defmodule Sample do end"
elixirc ["sample.ex"]
assert l(Sample) == {:module, Sample}
assert_raise UndefinedFunctionError, "function Sample.run/0 is undefined or private", fn ->
Sample.run
end
end
after
# Clean up the old version left over after l()
cleanup_modules([Sample])
end
end
describe "nl" do
test "loads a given module on the given nodes" do
assert nl(:non_existent_module) == {:error, :nofile}
assert nl([node()], Enum) == {:ok, [{:nonode@nohost, :loaded, Enum}]}
assert nl([:nosuchnode@badhost], Enum) == {:ok, [{:nosuchnode@badhost, :badrpc, :nodedown}]}
capture_log fn ->
assert nl([node()], :lists) == {:ok, [{:nonode@nohost, :error, :sticky_directory}]}
end
end
end
describe "r" do
test "raises when reloading a non existent module" do
assert_raise ArgumentError, "could not load nor find module: :non_existent_module", fn ->
r :non_existent_module
end
end
test "reloads elixir modules" do
assert_raise UndefinedFunctionError, ~r"function Sample.run/0 is undefined \(module Sample is not available\)", fn ->
Sample.run
end
filename = "sample.ex"
with_file filename, test_module_code(), fn ->
assert capture_io(:stderr, fn ->
assert c(filename, ".") == [Sample]
assert Sample.run == :run
File.write! filename, "defmodule Sample do end"
assert {:reloaded, Sample, [Sample]} = r(Sample)
assert_raise UndefinedFunctionError, "function Sample.run/0 is undefined or private", fn ->
Sample.run
end
end) =~ "redefining module Sample (current version loaded from Elixir.Sample.beam)"
end
after
# Clean up old version produced by the r helper
cleanup_modules([Sample])
end
test "reloads Erlang modules" do
assert_raise UndefinedFunctionError, ~r"function :sample.hello/0 is undefined", fn ->
:sample.hello
end
filename = "sample.erl"
with_file filename, erlang_module_code(), fn ->
assert c(filename, ".") == [:sample]
assert :sample.hello == :world
File.write!(filename, other_erlang_module_code())
assert {:reloaded, :sample, [:sample]} = r(:sample)
assert :sample.hello == :bye
end
after
cleanup_modules([:sample])
end
end
describe "pid" do
test "builds a pid from string" do
assert inspect(pid("0.32767.3276")) == "#PID<0.32767.3276>"
assert inspect(pid("0.5.6")) == "#PID<0.5.6>"
assert_raise ArgumentError, fn ->
pid("0.6.-6")
end
end
test "builds a pid from integers" do
assert inspect(pid(0, 32767, 3276)) == "#PID<0.32767.3276>"
assert inspect(pid(0, 5, 6)) == "#PID<0.5.6>"
assert_raise FunctionClauseError, fn ->
pid(0, 6, -6)
end
end
end
describe "i" do
test "prints information about the data type" do
output = capture_io fn -> i(:ok) end
assert output =~ String.trim_trailing("""
Term
:ok
Data type
Atom
Reference modules
Atom
""")
end
test "handles functions that don't display result" do
output = capture_io fn -> i(IEx.dont_display_result()) end
assert output =~ String.trim_trailing("""
Term
:"do not show this result in output"
Data type
Atom
Description
This atom is returned by IEx when a function that should not print its
return value on screen is executed.
""")
end
end
defp test_module_code do
"""
defmodule Sample do
def run do
:run
end
end
"""
end
defp another_test_module do
"""
defmodule Sample2 do
def hello do
:world
end
end
"""
end
defp erlang_module_code do
"""
-module(sample).
-export([hello/0]).
hello() -> world.
"""
end
defp other_erlang_module_code do
"""
-module(sample).
-export([hello/0]).
hello() -> bye.
"""
end
defp cleanup_modules(mods) do
Enum.each mods, fn mod ->
File.rm("#{mod}.beam")
:code.purge(mod)
true = :code.delete(mod)
end
end
defp with_file(names, codes, fun) when is_list(names) and is_list(codes) do
Enum.each Enum.zip(names, codes), fn {name, code} ->
File.write! name, code
end
try do
fun.()
after
Enum.each names, &File.rm/1
end
end
defp with_file(name, code, fun) do
with_file(List.wrap(name), List.wrap(code), fun)
end
defp elixirc(args) do
executable = Path.expand("../../../../bin/elixirc", __DIR__)
System.cmd("#{executable}#{executable_extension()}", args, [stderr_to_stdout: true])
end
defp iex_path do
Path.expand "../..", __DIR__
end
if match? {:win32, _}, :os.type do
defp executable_extension, do: ".bat"
else
defp executable_extension, do: ""
end
end
| 32.986301 | 153 | 0.596899 |
73c365daa1f1f150404674b12e10af8913541705 | 114 | exs | Elixir | test/breadboard/switch_test.exs | henrythebuilder/breadboard | f8e1378de6ad3984c7101e40bd41c81285b345c0 | [
"Apache-2.0"
] | null | null | null | test/breadboard/switch_test.exs | henrythebuilder/breadboard | f8e1378de6ad3984c7101e40bd41c81285b345c0 | [
"Apache-2.0"
] | null | null | null | test/breadboard/switch_test.exs | henrythebuilder/breadboard | f8e1378de6ad3984c7101e40bd41c81285b345c0 | [
"Apache-2.0"
] | null | null | null | defmodule SwitchTest do
use ExUnit.Case
doctest Breadboard.Switch
end
# SPDX-License-Identifier: Apache-2.0
| 14.25 | 37 | 0.780702 |
73c3737ca64969621cfd0fb2ee18864d69717439 | 3,878 | exs | Elixir | test/credo/code/parameters_test.exs | codeclimate-community/credo | b960a25d604b4499a2577321f9d61b39dc4b0437 | [
"MIT"
] | null | null | null | test/credo/code/parameters_test.exs | codeclimate-community/credo | b960a25d604b4499a2577321f9d61b39dc4b0437 | [
"MIT"
] | null | null | null | test/credo/code/parameters_test.exs | codeclimate-community/credo | b960a25d604b4499a2577321f9d61b39dc4b0437 | [
"MIT"
] | 1 | 2020-09-25T11:48:49.000Z | 2020-09-25T11:48:49.000Z | defmodule Credo.Code.ParametersTest do
use Credo.TestHelper
alias Credo.Code.Parameters
#
# names
#
test "returns the correct parameter names" do
{:ok, ast} =
"""
def some_function(p1, p2, p3, p4, p5), do: :ok
"""
|> Code.string_to_quoted()
assert [:p1, :p2, :p3, :p4, :p5] == Parameters.names(ast)
{:ok, ast} =
"""
def foobar(parameter1, parameter2 \\\\ false) do
:ok
end
"""
|> Code.string_to_quoted()
assert [:parameter1, :parameter2] == Parameters.names(ast)
{:ok, ast} =
"""
def foobar(parameter2 \\\\ false, line: line, column: column) do
:ok
end
"""
|> Code.string_to_quoted()
assert [:parameter2, [:line, :column]] == Parameters.names(ast)
{:ok, ast} =
"""
defp foobar(<<h, t :: binary>>, prev) when h in ?A..?Z and not(prev in ?A..?Z) do
:ok
end
"""
|> Code.string_to_quoted()
assert [[:h, :t], :prev] == Parameters.names(ast)
{:ok, ast} =
"""
defp foobar(<<?-, t :: binary>>, _) do
:ok
end
"""
|> Code.string_to_quoted()
assert [[:t], :_] == Parameters.names(ast)
# {:ok, ast} = """
# fn(a, b) ->
# :ok
# end
# """ |> Code.string_to_quoted
# assert [:a, :b] == Parameters.names(ast)
end
test "returns the correct parameter names for pattern matches with structs" do
{:ok, ast} =
"""
def foobar(%{} = source_file, %Issue{line: line, column: column} = issue) do
:ok
end
"""
|> Code.string_to_quoted()
assert 2 == Parameters.count(ast)
assert [[[], :source_file], [[:line, :column], :issue]] == Parameters.names(ast)
end
test "returns the correct parameter names for pattern matches with structs 2" do
{:ok, ast} =
"""
def foobar(%{ast: my_ast} = source_file, %Issue{line: line, column: column} = issue) do
:ok
end
"""
|> Code.string_to_quoted()
assert 2 == Parameters.count(ast)
assert [[[:my_ast], :source_file], [[:line, :column], :issue]] == Parameters.names(ast)
end
#
# count
#
test "returns the correct parameter counts" do
{:ok, ast} =
"""
def some_function(p1, p2, p3, p4, p5), do: :ok
"""
|> Code.string_to_quoted()
assert 5 == Parameters.count(ast)
{:ok, ast} =
"""
def foobar(parameter1, parameter2 \\\\ false) do
:ok
end
"""
|> Code.string_to_quoted()
assert 2 == Parameters.count(ast)
{:ok, ast} =
"""
def foobar(parameter2 \\\\ false, line: line) do
:ok
end
"""
|> Code.string_to_quoted()
assert 2 == Parameters.count(ast)
{:ok, ast} =
"""
defp foobar(<<h, t :: binary>>, prev) when h in ?A..?Z and not(prev in ?A..?Z) do
:ok
end
"""
|> Code.string_to_quoted()
assert 2 == Parameters.count(ast)
{:ok, ast} =
"""
defp foobar(<<?-, t :: binary>>, _) do
:ok
end
"""
|> Code.string_to_quoted()
assert 2 == Parameters.count(ast)
end
test "returns the correct parameter counts for ASTs" do
ast =
{:def, [line: 2],
[
{:some_function, [line: 2],
[
{:p1, [line: 2], nil},
{:p2, [line: 2], nil},
{:p3, [line: 2], nil},
{:p4, [line: 2], nil},
{:p5, [line: 2], nil}
]},
[
do:
{:=, [line: 3],
[
{:some_value, [line: 3], nil},
{:+, [line: 3],
[
{:parameter1, [line: 3], nil},
{:parameter2, [line: 3], nil}
]}
]}
]
]}
assert 5 == Parameters.count(ast)
end
end
| 21.786517 | 93 | 0.477824 |
73c380a157c218fbd389f3f2be2e87e6d33e8968 | 12,187 | ex | Elixir | lib/openpgp_tools/packet.ex | marcobellaccini/openpgp_tools | dd3c6559c142f1c2fe028242cc60527e1ff24974 | [
"Apache-2.0"
] | 4 | 2019-06-13T22:04:57.000Z | 2020-03-06T10:22:59.000Z | lib/openpgp_tools/packet.ex | marcobellaccini/openpgp_tools | dd3c6559c142f1c2fe028242cc60527e1ff24974 | [
"Apache-2.0"
] | null | null | null | lib/openpgp_tools/packet.ex | marcobellaccini/openpgp_tools | dd3c6559c142f1c2fe028242cc60527e1ff24974 | [
"Apache-2.0"
] | null | null | null | defmodule OpenpgpTools.Packet do
use Private
use Bitwise
# Packet types map attribute.
# See: https://tools.ietf.org/html/rfc4880#section-4.3
@ptypes %{
0 => :reserved, # Reserved
1 => :pkesk, # Public-Key Encrypted Session Key Packet
2 => :sig, # Signature Packet
3 => :skesk, # Symmetric-Key Encrypted Session Key Packet
4 => :opsig, # One-Pass Signature Packet
5 => :seck, # Secret-Key Packet
6 => :pubk, # Public-Key Packet
7 => :secsubk, # Secret-Subkey Packet
8 => :cdata, # Compressed Data Packet
9 => :sedata, # Symmetrically Encrypted Data Packet
10 => :marker, # Marker Packet
11 => :ldata, # Literal Data Packet
12 => :trust, # Trust Packet
13 => :uid, # User ID Packet
14 => :pubsubk, # Public-Subkey Packet
17 => :uattr, # User Attribute Packet
18 => :seipdata, # Sym. Encrypted and Integrity Protected Data Packet
19 => :moddetcode, # Modification Detection Code Packet
60 => :private, # Private or Experimental Values
61 => :private, # Private or Experimental Values
62 => :private, # Private or Experimental Values
63 => :private # Private or Experimental Values
}
# parse packet type function
private do
defp parse_type(x), do: @ptypes |> Map.get(x) |> _parse_type
defp _parse_type(nil), do: raise "Unknown packet type"
defp _parse_type(x), do: x
end
@doc """
Given raw binary OpenPGP data stream, return a map containing:
- format type
- packet type
- packet header raw data
- packet body raw data (in the partial body lengths case,
body is purged from all the intermediate body length fields)
- trailing (residual) raw data
Possible values for format type are:
`:old`, `:new`
See: https://tools.ietf.org/html/rfc4880#section-4.2
Possible values for packet type are:
`:reserved`, `:pkesk`, `:sig`, `:skesk`, `:opsig`, `:seck`, `:pubk`,
`:secsubk`, `:cdata`, `:sedata`, `:marker`, `:ldata`, `:trust`, `:uid`,
`:pubsubk`, `:uattr`, `:seipdata`, `:moddetcode`, `:private`
See: https://tools.ietf.org/html/rfc4880#section-4.3
## Example
iex> pkt1 = File.stream!("test/fixtures/gpg2_1/bin/encr/test.txt.gpg") |>
...> OpenpgpTools.Packet.parse_packet
iex> Map.keys(pkt1)
[:body, :format, :header, :residual, :type]
iex> pkt1.format
:old
iex> pkt1.type
:pkesk
iex> pkt1.header
[<<133>>, <<1>>, "\f" ]
iex> pkt1.body |> Enum.take(4)
[<<3>>, <<166>>, "b", "1" ]
iex> pkt1.residual |> Enum.take(4)
[<<210>>, "V", <<1>>, "c" ]
iex> pkt2 = pkt1.residual |> OpenpgpTools.Packet.parse_packet
iex> pkt2.format
:new
iex> pkt2.type
:seipdata
"""
def parse_packet([]), do: raise ("Error: no data to parse")
def parse_packet(stream) do
# get list of single-byte elements
data = stream
|> Enum.map(&(:binary.bin_to_list(&1)))
|> List.flatten
|> Enum.map(&(:binary.encode_unsigned(&1)))
# get packet tag octet and first octet of packet body length
[ptag, bl1] = data |> Enum.take(2)
# parse packet tag
{pformat, ptype, plt} = parse_ptag(ptag)
# get packet header length
hlen = get_hlen(plt, bl1)
# get packet header
phead = data |> Enum.take(hlen)
# get body length information
bli = get_blen(data)
# get packet length
pack_len = get_pack_len(hlen, bli)
# get packet and residual raw data
{packet, resdata} = data |> Enum.split(pack_len)
# get packet body
pbody = get_pbody(packet, bli)
# NOTE: remember that, for the partial body lengths case,
# packet header ++ packet body != packet
# return map with results
%{ format: pformat, type: ptype, header: phead, body: pbody,
residual: resdata }
end
# Parse a Packet Tag octet (i.e. a byte), returning the packet format, type and,
# for the old format, the body length type.
#
# Possible values for format are:
#
# `:old`, `:new`
#
# See: https://tools.ietf.org/html/rfc4880#section-4.2
#
# Possible values for type are:
#
# `:reserved`, `:pkesk`, `:sig`, `:skesk`, `:opsig`, `:seck`, `:pubk`,
# `:secsubk`, `:cdata`, `:sedata`, `:marker`, `:ldata`, `:trust`, `:uid`,
# `:pubsubk`, `:uattr`, `:seipdata`, `:moddetcode`, `:private`
#
# See: https://tools.ietf.org/html/rfc4880#section-4.3
#
# Possible values for the body length type are:
#
# `:oneoct`, `:twooct`, `:fouroct`, `:indet`, `:none`
# (the latter is returned for the new format)
#
# See: https://tools.ietf.org/html/rfc4880#section-4.2
#
private do
# handle old format packets
# one-octect length-type
defp parse_ptag(<< 1::size(1), 0::size(1), ptag::size(4), 0::size(2) >>),
do: {:old, parse_type(ptag), :oneoct}
# two-octect length-type
defp parse_ptag(<< 1::size(1), 0::size(1), ptag::size(4), 1::size(2) >>),
do: {:old, parse_type(ptag), :twooct}
# four-octect length-type
defp parse_ptag(<< 1::size(1), 0::size(1), ptag::size(4), 2::size(2) >>),
do: {:old, parse_type(ptag), :fouroct}
# indeterminate length-type
defp parse_ptag(<< 1::size(1), 0::size(1), ptag::size(4), 3::size(2) >>),
do: {:old, parse_type(ptag), :indet}
# handle new format packets
defp parse_ptag(<< 1::size(1), 1::size(1), ptag::size(6) >>),
do: {:new, parse_type(ptag), :none}
# bit 7 must be 1
defp parse_ptag(_ptagb) do
raise "Bad packet tag: bit 7 must be one"
end
end
# convert a list of binaries to a list of integers.
private do
defp binl_to_intl(binl) do
binl |> Enum.map( fn(<<ie>>) -> ie end )
end
end
# get header length
# need packet length type and first byte of packet body length as arguments
private do
# old format
# one-octet packet length type
defp get_hlen(:oneoct, _blen1), do: 2
# two-octet packet length type
defp get_hlen(:twooct, _blen1), do: 3
# four-octet packet length type
defp get_hlen(:fouroct, _blen1), do: 5
# indeterminate packet length type
defp get_hlen(:indet, _blen1), do: 1
# new format
# one-octet packet length
defp get_hlen(:none, blen1) when blen1 < << 192 >>, do: 2
# two-octet packet length
defp get_hlen(:none, blen1) when blen1 < << 224 >>, do: 3
# partial length
defp get_hlen(:none, blen1) when blen1 < << 255 >>, do: 2
# five-octet packet length
defp get_hlen(:none, blen1) when blen1 == << 255 >>, do: 6
end
# Given a packet, return a list with information about its Body Length.
#
# In most cases, the returned value is a single-element list containing
# the packet body length in bytes.
#
# However, when the packet makes use of Partial Body Lengths, the returned value
# is a list of partial body lengths, in reversed order.
#
# Ref: https://tools.ietf.org/html/rfc4880#section-4.2
#
private do
defp get_blen(packet) do
{format, _tag, blt} = packet |> Enum.at(0) |> parse_ptag
_get_blen(packet, format, blt, Enum.at(packet, 1), [])
end
# private, wrapped functions need some additional arguments:
# - packet format
# - body length type (this is for the old format, for the new one
# pass :none)
# - 1st octet of body length
# - a body-length accumulator list (passed to perform tail-call-optimization
# in the partial body lengths case - for the new format)
## old packet format, one-octet length
defp _get_blen(packet, :old, :oneoct, _blen1, bla) do
octets = packet |> Enum.slice(1, 1) |> binl_to_intl
[ Enum.at(octets, 0) | bla ]
end
## old packet format, two-octet length
defp _get_blen(packet, :old, :twooct, _blen1, bla) do
octets = packet |> Enum.slice(1, 2) |> binl_to_intl
[ (Enum.at(octets, 0) <<< 8) + Enum.at(octets, 1) | bla ]
end
## old packet format, four-octet length
defp _get_blen(packet, :old, :fouroct, _blen1, bla) do
octets = packet |> Enum.slice(1, 4) |> binl_to_intl
[ (Enum.at(octets, 0) <<< 24) + (Enum.at(octets, 1) <<< 16) +
(Enum.at(octets, 2) <<< 8) + Enum.at(octets, 3)
| bla ]
end
## old packet format, indeterminate length
# this will assume that the packet spans to the end of the stream
defp _get_blen(packet, :old, :indet, _blen1, bla) do
[ packet |> Enum.drop(1) |> length
| bla ]
end
## new packet format
# new packet format, one-octet lengths
defp _get_blen(packet, :new, :none, blen1, bla) when blen1 < << 192 >> do
octets = packet |> Enum.slice(1, 1) |> binl_to_intl
[ Enum.at(octets, 0) | bla ]
end
# new packet format, two-octet lengths
defp _get_blen(packet, :new, :none, blen1, bla) when blen1 < << 224 >> do
octets = packet |> Enum.slice(1, 2) |> binl_to_intl
[ ((Enum.at(octets, 0) - 192) <<< 8) + (Enum.at(octets, 1)) + 192 | bla ]
end
# new packet format, partial body lengths
# https://tools.ietf.org/html/rfc4880#section-4.2.2.4
defp _get_blen(packet, :new, :none, blen1, bla) when blen1 < << 255 >> do
octets = packet |> Enum.slice(1, 1) |> binl_to_intl
len_of_part = 1 <<< (Enum.at(octets, 0) &&& 0x1F)
residual = packet |> Enum.drop(2 + len_of_part - 1)
new_bla = [len_of_part | bla]
_get_blen(residual, :new, :none, Enum.at(residual, 1), new_bla)
end
# new packet format, five-octet lengths
defp _get_blen(packet, :new, :none, blen1, bla) when blen1 == << 255 >> do
octets = packet |> Enum.slice(1, 5) |> binl_to_intl
[ (Enum.at(octets, 1) <<< 24) ||| (Enum.at(octets, 2) <<< 16) |||
(Enum.at(octets, 3) <<< 8) ||| Enum.at(octets, 4)
| bla ]
end
end
# get length of packet length field for new format
# this is for the non-partial lengths case
# need body length as argument
private do
defp get_lpl_new_np(blen) when blen < 192, do: 1
defp get_lpl_new_np(blen) when blen < 8384, do: 2
defp get_lpl_new_np(blen) when blen < 4_294_967_296, do: 5
defp get_lpl_new_np(_blen), do:
raise "Cannot infer length of packet length field"
end
# given header length and a body length info list, return packet length
# (header and body)
private do
# non-partial lengths case
# gross body length is:
# body length + header_length
defp get_pack_len(hl, [bl]), do: bl + hl
# partial lengths case
# gross body length is:
# length of the last partial body length field +
# + one byte for each remaining partial body length field +
# + net body length (i.e.: sum of all partial body lengths)
# + 1 (packet tag byte)
defp get_pack_len(_hl, bli = [last_pbl | oth_bl]) do
get_lpl_new_np(last_pbl) + length(oth_bl) + Enum.sum(bli) + 1
end
end
# get packet body
# need ONE raw packet (NOT raw data containing multiple packets!)
# and body length info as arguments
private do
# wrapper function
defp get_pbody(data, bli) do
# call wrapped function
_get_pbody(data, bli)
end
# handle all cases except partial body lengths
defp _get_pbody(data, [blen]), do: data |> Enum.take(-blen)
# partial body lengths (match body lengths lists with more than one element)
defp _get_pbody(data, bli = [_last_blen | _oth]) do
_get_pbody_pbl_tail(data, bli, [])
end
# match the end of a chain of partial body lengths
defp _get_pbody_pbl_tail(data, [last_blen | oth_blen], part_body) do
part_body_new = Enum.take(data, -last_blen) ++ part_body
_get_pbody_pbl(Enum.drop(data, - last_blen - get_lpl_new_np(last_blen)),
oth_blen, part_body_new)
end
# match the beginning of a chain of partial body lengths
defp _get_pbody_pbl(data, [first_blen], part_body) do
Enum.take(data, -first_blen) ++ part_body
end
# match other parts of a chain of partial body lengths
defp _get_pbody_pbl(data, [last_blen | oth_blen], part_body) do
part_body_new = Enum.take(data, -last_blen) ++ part_body
_get_pbody_pbl(Enum.drop(data, - last_blen - 1), oth_blen,
part_body_new)
end
end
end
| 35.530612 | 82 | 0.625174 |
73c389229beb7f9b7f53131a896c43f1ef0c6dff | 2,009 | ex | Elixir | apps/ewallet_api/lib/ewallet_api/v1/controllers/pages/verify_email_controller.ex | AndonMitev/EWallet | 898cde38933d6f134734528b3e594eedf5fa50f3 | [
"Apache-2.0"
] | 322 | 2018-02-28T07:38:44.000Z | 2020-05-27T23:09:55.000Z | apps/ewallet_api/lib/ewallet_api/v1/controllers/pages/verify_email_controller.ex | AndonMitev/EWallet | 898cde38933d6f134734528b3e594eedf5fa50f3 | [
"Apache-2.0"
] | 643 | 2018-02-28T12:05:20.000Z | 2020-05-22T08:34:38.000Z | apps/ewallet_api/lib/ewallet_api/v1/controllers/pages/verify_email_controller.ex | AndonMitev/EWallet | 898cde38933d6f134734528b3e594eedf5fa50f3 | [
"Apache-2.0"
] | 63 | 2018-02-28T10:57:06.000Z | 2020-05-27T23:10:38.000Z | # Copyright 2018-2019 OmiseGO Pte Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
defmodule EWalletAPI.V1.VerifyEmailController do
@moduledoc """
Handles pages for email verification.
"""
use EWalletAPI, :controller
import EWalletAPI.V1.PageRouter.Helpers, only: [verify_email_path: 2]
alias EWallet.SignupGate
alias EWallet.Web.V1.ErrorHandler
def verify_url, do: build_url("/pages/client/v1/verify_email?email={email}&token={token}")
def success_url, do: build_url("/pages/client/v1/verify_email/success")
defp build_url(path) do
Application.get_env(:ewallet_api, :base_url) <> path
end
@doc """
Renders the landing page to start the email verification steps.
"""
def verify(conn, attrs) do
case SignupGate.verify_email(attrs) do
{:ok, %{success_url: nil}} ->
redirect(conn, to: verify_email_path(conn, :success))
{:ok, %{success_url: success_url}} ->
redirect(conn, external: success_url)
{:error, error} ->
case Map.get(ErrorHandler.errors(), error) do
%{description: description} ->
text(conn, "We were unable to verify your email address. " <> description)
nil ->
text(conn, "We were unable to verify your email address. An unknown error occured.")
end
end
end
@doc """
Renders the page to show when email verification is successful.
"""
def success(conn, _attrs) do
text(conn, "Your email address has been successfully verified!")
end
end
| 32.934426 | 96 | 0.700348 |
73c39c2e4690762d53651dc4e547445f4f1e62b9 | 2,145 | ex | Elixir | lib/harald/host/l2cap.ex | RicardoTrindade/harald | 3f56003265c29af0780730eb538183b50e55df2f | [
"MIT"
] | null | null | null | lib/harald/host/l2cap.ex | RicardoTrindade/harald | 3f56003265c29af0780730eb538183b50e55df2f | [
"MIT"
] | null | null | null | lib/harald/host/l2cap.ex | RicardoTrindade/harald | 3f56003265c29af0780730eb538183b50e55df2f | [
"MIT"
] | null | null | null | defmodule Harald.Host.L2CAP do
@moduledoc """
Reference: version 5.2, vol 3, part a.
"""
alias Harald.Host.ATT
@enforce_keys [
:length,
:channel,
:information_payload
]
defstruct [
:length,
:channel,
:information_payload
]
def decode(
<<
length::little-size(16),
channel_id::little-size(16),
encoded_information_payload::binary-size(length)
>> = encoded_bin
) do
with {:ok, channel_module} <- channel_id_to_module(channel_id),
{:ok, decoded_information_payload} <- channel_module.decode(encoded_information_payload) do
decoded_l2cap = %__MODULE__{
length: length,
channel: %{id: channel_id, module: channel_module},
information_payload: decoded_information_payload
}
{:ok, decoded_l2cap}
else
{:error, {:not_implemented, error, _bin}} ->
{:error, {:not_implemented, error, encoded_bin}}
end
end
def channel_id_to_module(0x04), do: {:ok, ATT}
def channel_id_to_module(channel_id), do: {:error, {:not_implemented, {__MODULE__, channel_id}}}
def encode(%__MODULE__{
length: length,
channel: %{id: channel_id, module: channel_module},
information_payload: decoded_information_payload
}) do
{:ok, encoded_information_payload} = channel_module.encode(decoded_information_payload)
length =
case length do
nil -> byte_size(encoded_information_payload)
length -> length
end
encoded_l2cap = <<
length::little-size(16),
channel_id::little-size(16),
encoded_information_payload::binary-size(length)
>>
{:ok, encoded_l2cap}
end
def new(channel_module, decoded_information_payload) do
decoded_l2cap = %__MODULE__{
length: nil,
channel: %{id: channel_module.id, module: channel_module},
information_payload: decoded_information_payload
}
{:ok, encoded_information_payload} = channel_module.encode(decoded_information_payload)
length = byte_size(encoded_information_payload)
{:ok, Map.put(decoded_l2cap, :length, length)}
end
end
| 27.151899 | 100 | 0.664802 |
73c39ff0d21e10e1b169db6be92ad56282df1ee8 | 1,605 | exs | Elixir | test/movement/builders/revision_merge_test.exs | isshindev/accent | ae4c13139b0a0dfd64ff536b94c940a4e2862150 | [
"BSD-3-Clause"
] | 806 | 2018-04-07T20:40:33.000Z | 2022-03-30T01:39:57.000Z | test/movement/builders/revision_merge_test.exs | isshindev/accent | ae4c13139b0a0dfd64ff536b94c940a4e2862150 | [
"BSD-3-Clause"
] | 194 | 2018-04-07T13:49:37.000Z | 2022-03-30T19:58:45.000Z | test/movement/builders/revision_merge_test.exs | isshindev/accent | ae4c13139b0a0dfd64ff536b94c940a4e2862150 | [
"BSD-3-Clause"
] | 89 | 2018-04-09T13:55:49.000Z | 2022-03-24T07:09:31.000Z | defmodule AccentTest.Movement.Builders.RevisionMerge do
use Accent.RepoCase
alias Accent.{
Document,
Language,
ProjectCreator,
Repo,
Translation,
User
}
alias Movement.Builders.RevisionMerge, as: RevisionMergeBuilder
alias Movement.Context
@user %User{email: "[email protected]"}
test "builder fetch translations and use comparer" do
user = Repo.insert!(@user)
language = Repo.insert!(%Language{name: "English", slug: Ecto.UUID.generate()})
{:ok, project} = ProjectCreator.create(params: %{main_color: "#f00", name: "My project", language_id: language.id}, user: user)
revision = project |> Repo.preload(:revisions) |> Map.get(:revisions) |> hd()
document = Repo.insert!(%Document{project_id: project.id, path: "test", format: "json"})
translation =
%Translation{
key: "a",
proposed_text: "A",
revision_id: revision.id,
document_id: document.id
}
|> Repo.insert!()
entries = [%Langue.Entry{key: "a", value: "B", value_type: "string"}]
context =
%Context{entries: entries}
|> Context.assign(:comparer, fn x, _y -> %Movement.Operation{action: "merge_on_proposed", key: x.key} end)
|> Context.assign(:document, document)
|> Context.assign(:revision, revision)
|> RevisionMergeBuilder.build()
translation_ids = context.assigns[:translations] |> Enum.map(&Map.get(&1, :id))
operations = context.operations |> Enum.map(&Map.get(&1, :action))
assert translation_ids === [translation.id]
assert operations === ["merge_on_proposed"]
end
end
| 32.1 | 131 | 0.654829 |
73c3ae6e54dbf40395e5c6d416776981444d2348 | 2,100 | exs | Elixir | test/mix/tasks/ecto.rollback_test.exs | LostKobrakai/ecto_sql | 99f3de33c48d6dee83346d971a448ec7e24cd6bf | [
"Apache-2.0"
] | null | null | null | test/mix/tasks/ecto.rollback_test.exs | LostKobrakai/ecto_sql | 99f3de33c48d6dee83346d971a448ec7e24cd6bf | [
"Apache-2.0"
] | null | null | null | test/mix/tasks/ecto.rollback_test.exs | LostKobrakai/ecto_sql | 99f3de33c48d6dee83346d971a448ec7e24cd6bf | [
"Apache-2.0"
] | null | null | null | defmodule Mix.Tasks.Ecto.RollbackTest do
use ExUnit.Case
import Mix.Tasks.Ecto.Rollback, only: [run: 2]
import Support.FileHelpers
@migrations_path Path.join([tmp_path(), inspect(Ecto.Migrate), "migrations"])
setup do
File.mkdir_p!(@migrations_path)
:ok
end
defmodule Repo do
def start_link(_) do
Process.put(:started, true)
Task.start_link fn ->
Process.flag(:trap_exit, true)
receive do
{:EXIT, _, :normal} -> :ok
end
end
end
def stop() do
:ok
end
def __adapter__ do
EctoSQL.TestAdapter
end
def config do
[priv: "tmp/#{inspect(Ecto.Migrate)}", otp_app: :ecto_sql]
end
end
defmodule StartedRepo do
def start_link(_) do
{:error, {:already_started, :whatever}}
end
def stop() do
raise "should not be called"
end
def __adapter__ do
EctoSQL.TestAdapter
end
def config do
[priv: "tmp/#{inspect(Ecto.Migrate)}", otp_app: :ecto_sql]
end
end
test "runs the migrator after starting repo" do
run ["-r", to_string(Repo), "--no-start"], fn _, _, _, _ ->
Process.put(:migrated, true)
[]
end
assert Process.get(:migrated)
assert Process.get(:started)
end
test "runs the migrator with already started repo" do
run ["-r", to_string(StartedRepo), "--no-start"], fn _, _, _, _ ->
Process.put(:migrated, true)
[]
end
assert Process.get(:migrated)
end
test "runs the migrator yielding the repository and migrations path" do
run ["-r", to_string(Repo), "--prefix", "foo"], fn repo, path, direction, opts ->
assert repo == Repo
refute path =~ ~r/_build/
assert direction == :down
assert opts[:step] == 1
assert opts[:prefix] == "foo"
[]
end
assert Process.get(:started)
end
test "raises when migrations path does not exist" do
File.rm_rf!(@migrations_path)
assert_raise Mix.Error, fn ->
run ["-r", to_string(Repo)], fn _, _, _, _ -> [] end
end
assert !Process.get(:started)
end
end
| 22.580645 | 85 | 0.60619 |
73c3fe7c77a7060ee278d10755b6d719881ee2f5 | 197 | exs | Elixir | test/portal_web/controllers/page_controller_test.exs | auth0-samples/auth0-elixir-single-page-app | 05c344e828ecf5ac716c5537b0e5760571f0574f | [
"MIT"
] | 6 | 2018-07-12T20:50:21.000Z | 2021-04-10T19:53:10.000Z | test/portal_web/controllers/page_controller_test.exs | auth0-samples/auth0-elixir-single-page-app | 05c344e828ecf5ac716c5537b0e5760571f0574f | [
"MIT"
] | null | null | null | test/portal_web/controllers/page_controller_test.exs | auth0-samples/auth0-elixir-single-page-app | 05c344e828ecf5ac716c5537b0e5760571f0574f | [
"MIT"
] | 2 | 2018-01-30T22:52:01.000Z | 2018-02-05T12:55:28.000Z | defmodule PortalWeb.PageControllerTest do
use PortalWeb.ConnCase
test "GET /", %{conn: conn} do
conn = get conn, "/"
assert html_response(conn, 200) =~ "Welcome to Phoenix!"
end
end
| 21.888889 | 60 | 0.680203 |
73c41aea35aa9603cbfb06d23723c2f4875a2e4b | 1,099 | ex | Elixir | lib/crit_web/controllers/reports/animal_report_controller.ex | brownt23/crit19 | c45c7b3ae580c193168d83144da0eeb9bc91c8a9 | [
"MIT"
] | 6 | 2019-07-16T19:31:23.000Z | 2021-06-05T19:01:05.000Z | lib/crit_web/controllers/reports/animal_report_controller.ex | brownt23/crit19 | c45c7b3ae580c193168d83144da0eeb9bc91c8a9 | [
"MIT"
] | null | null | null | lib/crit_web/controllers/reports/animal_report_controller.ex | brownt23/crit19 | c45c7b3ae580c193168d83144da0eeb9bc91c8a9 | [
"MIT"
] | 3 | 2020-02-24T23:38:27.000Z | 2020-08-01T23:50:17.000Z | defmodule CritWeb.Reports.AnimalReportController do
use CritWeb, :controller
use CritWeb.Controller.Path, :reports_animal_report_path
import CritWeb.Plugs.Authorize
alias Crit.Servers.Institution
alias Ecto.Timespan
alias Crit.SqlRows.Reservation
alias CritBiz.ViewModels.Reports.Animal
IO.puts "need to create new permissions"
plug :must_be_able_to, :make_reservations
def use_form(conn, _params) do
render(conn, "use_form.html", path: path(:use_last_month))
end
def use_last_month(conn, _params) do
institution = institution(conn)
{:ok, relevant_span} = last_month_span(institution)
uses =
relevant_span
|> Reservation.timespan_uses(institution)
|> Animal.multi_animal_uses
render(conn, "use.html", uses: uses)
end
defp last_month_span(institution) do
{now_year, now_month, _day} =
Institution.today!(institution)
|> Date.to_erl
{year, month, _day} =
{now_year, now_month, 1}
|> Date.from_erl!
|> Date.add(-1)
|> Date.to_erl
Timespan.month_span(year, month)
end
end
| 25.55814 | 62 | 0.703367 |
73c46b7f4f0ccc0e034da0552df9cdb98d386581 | 88 | ex | Elixir | lib/keycloak_admin/kc_response.ex | ericacheong/keycloak_admin | ec807ae3cb42ded1388c1a8a1bf307481185e798 | [
"MIT"
] | 1 | 2021-08-19T02:44:14.000Z | 2021-08-19T02:44:14.000Z | lib/keycloak_admin/kc_response.ex | ericacheong/keycloak_admin | ec807ae3cb42ded1388c1a8a1bf307481185e798 | [
"MIT"
] | null | null | null | lib/keycloak_admin/kc_response.ex | ericacheong/keycloak_admin | ec807ae3cb42ded1388c1a8a1bf307481185e798 | [
"MIT"
] | 1 | 2021-08-19T06:45:28.000Z | 2021-08-19T06:45:28.000Z | defmodule KeycloakAdmin.KcResponse do
defstruct [:op, :response, :input, :status]
end
| 22 | 45 | 0.761364 |
73c49abba28fb2c708ca560ddc5fee6b649be6a3 | 1,618 | ex | Elixir | lib/step_flow/metrics/job_instrumenter.ex | media-io/ex_step_flow | 07b6fe98399836679728547319c55137a1b5c933 | [
"MIT"
] | 4 | 2019-12-07T05:18:26.000Z | 2020-11-06T23:28:43.000Z | lib/step_flow/metrics/job_instrumenter.ex | media-io/ex_step_flow | 07b6fe98399836679728547319c55137a1b5c933 | [
"MIT"
] | 53 | 2020-01-06T11:23:09.000Z | 2021-06-25T15:30:07.000Z | lib/step_flow/metrics/job_instrumenter.ex | media-io/ex_step_flow | 07b6fe98399836679728547319c55137a1b5c933 | [
"MIT"
] | 3 | 2020-01-30T15:37:40.000Z | 2020-10-27T14:10:02.000Z | defmodule StepFlow.Metrics.JobInstrumenter do
@moduledoc """
Prometheus metrics instrumenter to call jobs metric collectors
"""
use Prometheus.Metric
def setup do
Counter.declare(
name: :step_flow_jobs_created,
help: "Number of created jobs.",
labels: [:name]
)
Counter.declare(
name: :step_flow_jobs_error,
help: "Number of failed jobs.",
labels: [:name]
)
Counter.declare(
name: :step_flow_jobs_completed,
help: "Number of completed jobs.",
labels: [:name]
)
Gauge.declare(
name: :step_flow_jobs_processing,
help: "Number of sockets checked out from the pool",
labels: [:name]
)
end
def inc(:step_flow_jobs_created, job_name) do
if StepFlow.Configuration.metrics_enabled?() do
Counter.inc(
name: :step_flow_jobs_created,
labels: [job_name]
)
Gauge.inc(
name: :step_flow_jobs_processing,
labels: [job_name]
)
end
end
def inc(:step_flow_jobs_error, job_name) do
if StepFlow.Configuration.metrics_enabled?() do
Counter.inc(
name: :step_flow_jobs_error,
labels: [job_name]
)
Gauge.dec(
name: :step_flow_jobs_processing,
labels: [job_name]
)
end
end
def inc(:step_flow_jobs_completed, job_name) do
if StepFlow.Configuration.metrics_enabled?() do
Counter.inc(
name: :step_flow_jobs_completed,
labels: [job_name]
)
Gauge.dec(
name: :step_flow_jobs_processing,
labels: [job_name]
)
end
end
end
| 21.573333 | 64 | 0.621755 |
73c49afc51840aa56c4348fc880a8a5db840e77a | 1,549 | ex | Elixir | clients/plus/lib/google_api/plus/v1/model/place_position.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | null | null | null | clients/plus/lib/google_api/plus/v1/model/place_position.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | null | null | null | clients/plus/lib/google_api/plus/v1/model/place_position.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | 1 | 2020-11-10T16:58:27.000Z | 2020-11-10T16:58:27.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.Plus.V1.Model.PlacePosition do
@moduledoc """
The position of the place.
## Attributes
- latitude (float()): The latitude of this position. Defaults to: `null`.
- longitude (float()): The longitude of this position. Defaults to: `null`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:latitude => any(),
:longitude => any()
}
field(:latitude)
field(:longitude)
end
defimpl Poison.Decoder, for: GoogleApi.Plus.V1.Model.PlacePosition do
def decode(value, options) do
GoogleApi.Plus.V1.Model.PlacePosition.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Plus.V1.Model.PlacePosition do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 30.372549 | 77 | 0.726921 |
73c49f67ea3d9a3debccd9b75180c763ef8f0331 | 1,333 | ex | Elixir | lib/curator_timeoutable/plug.ex | curator-ex/curator_timeoutable | 6d3fe59a78d6df50efb0ffe512b2975988d041e1 | [
"MIT"
] | null | null | null | lib/curator_timeoutable/plug.ex | curator-ex/curator_timeoutable | 6d3fe59a78d6df50efb0ffe512b2975988d041e1 | [
"MIT"
] | null | null | null | lib/curator_timeoutable/plug.ex | curator-ex/curator_timeoutable | 6d3fe59a78d6df50efb0ffe512b2975988d041e1 | [
"MIT"
] | null | null | null | defmodule CuratorTimeoutable.Plug do
@moduledoc """
Use this hook to set a last_request_at timestamp, and signout if it's greater
than a configured time
"""
# import Plug.Conn
import CuratorTimeoutable.Keys, only: [timeoutable_key: 1]
def init(opts \\ %{}) do
opts = Enum.into(opts, %{})
%{
key: Map.get(opts, :key, Curator.default_key),
timeout_in: Map.get(opts, :timeout_in, CuratorTimeoutable.Config.timeout_in)
}
end
def call(conn, opts) do
key = Map.get(opts, :key)
timeout_in = Map.get(opts, :timeout_in)
case Curator.PlugHelper.current_resource(conn, key) do
nil -> conn
{:error, _error} -> conn
_current_resource ->
case verify(conn, timeout_in, key) do
true -> set_timeoutable(conn, key)
false -> Curator.PlugHelper.clear_current_resource_with_error(conn, "Session Timeout", key)
end
end
end
defp verify(conn, timeout_in, key) do
last_request_at = Plug.Conn.get_session(conn, timeoutable_key(key))
verify_exp(timeout_in, last_request_at)
end
defp verify_exp(_, nil), do: false
defp verify_exp(timeout_in, last_request_at) do
last_request_at + timeout_in > Curator.Time.timestamp
end
defp set_timeoutable(conn, key) do
CuratorTimeoutable.set_timeoutable(conn, key)
end
end
| 27.770833 | 101 | 0.686422 |
73c4d12e93d962e5feb959ab574d661d56b55d7d | 1,081 | ex | Elixir | lib/bamboo_smtp_sandbox/sender.ex | fewlinesco/bamboo_smtp-playground | 744492672a3975366f22ea98cb9b29b20ee118ad | [
"MIT"
] | null | null | null | lib/bamboo_smtp_sandbox/sender.ex | fewlinesco/bamboo_smtp-playground | 744492672a3975366f22ea98cb9b29b20ee118ad | [
"MIT"
] | null | null | null | lib/bamboo_smtp_sandbox/sender.ex | fewlinesco/bamboo_smtp-playground | 744492672a3975366f22ea98cb9b29b20ee118ad | [
"MIT"
] | null | null | null | defmodule BambooSMTPSandbox.Sender do
@moduledoc """
Functions for delivering emails using BambooSMTP and based on our Email factory.
In development environment, the log level should be set to `:debug`, allowing
you to see the content of the email you try to send.
Feel free to tweaks this module by adding new function(s) reproducing the issue(s)
you want to fix.
"""
@doc """
Deliver an email right away.
The email contains a body and a subject but no attachments.
"""
@spec send_sample_email() :: Bamboo.Email.t()
def send_sample_email do
email = BambooSMTPSandbox.Email.sample_email()
BambooSMTPSandbox.Mailer.deliver_now(email)
end
@doc """
Deliver an email right away.
The email contains a body, a subject and some attachments based on the content
of the folder `priv/attachments`.
"""
@spec send_sample_email_with_attachments() :: Bamboo.Email.t()
def send_sample_email_with_attachments do
email = BambooSMTPSandbox.Email.sample_email_with_attachments()
BambooSMTPSandbox.Mailer.deliver_now(email)
end
end
| 29.216216 | 84 | 0.745606 |
73c4f4caafd40a3c0e5472800e4c4cb4e6f7ea20 | 375 | ex | Elixir | lib/hl7/2.4/datatypes/sps.ex | calvinb/elixir-hl7 | 5e953fa11f9184857c0ec4dda8662889f35a6bec | [
"Apache-2.0"
] | null | null | null | lib/hl7/2.4/datatypes/sps.ex | calvinb/elixir-hl7 | 5e953fa11f9184857c0ec4dda8662889f35a6bec | [
"Apache-2.0"
] | null | null | null | lib/hl7/2.4/datatypes/sps.ex | calvinb/elixir-hl7 | 5e953fa11f9184857c0ec4dda8662889f35a6bec | [
"Apache-2.0"
] | null | null | null | defmodule HL7.V2_4.DataTypes.Sps do
@moduledoc false
alias HL7.V2_4.{DataTypes}
use HL7.DataType,
fields: [
specimen_source_name_or_code: DataTypes.Ce,
additives: nil,
freetext: nil,
body_site: DataTypes.Ce,
site_modifier: DataTypes.Ce,
collection_modifier_method_code: DataTypes.Ce,
specimen_role: DataTypes.Ce
]
end
| 23.4375 | 52 | 0.693333 |
73c4fb86b000b4d7d5db2296a716f700c2757c75 | 1,278 | exs | Elixir | test/auto_api/capabilities/maintenance_capability_test.exs | nonninz/auto-api-elixir | 53e11542043285e94bbb5a0a3b8ffff0b1b47167 | [
"MIT"
] | 4 | 2018-01-19T16:11:10.000Z | 2019-12-13T16:35:10.000Z | test/auto_api/capabilities/maintenance_capability_test.exs | nonninz/auto-api-elixir | 53e11542043285e94bbb5a0a3b8ffff0b1b47167 | [
"MIT"
] | 5 | 2020-07-16T07:20:21.000Z | 2021-09-22T10:18:04.000Z | test/auto_api/capabilities/maintenance_capability_test.exs | nonninz/auto-api-elixir | 53e11542043285e94bbb5a0a3b8ffff0b1b47167 | [
"MIT"
] | 1 | 2021-02-17T18:36:13.000Z | 2021-02-17T18:36:13.000Z | # AutoAPI
# The MIT License
#
# Copyright (c) 2018- High-Mobility GmbH (https://high-mobility.com)
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
defmodule AutoApi.MaintenanceCapabilityTest do
use ExUnit.Case, async: true
doctest AutoApi.MaintenanceCapability
end
| 47.333333 | 79 | 0.78169 |
73c5139061317facf4f8f5301d7f13e8a76a0d98 | 132 | exs | Elixir | config/config.exs | pulitta/pke | b4227adb4ff992a9a818d280ac033091ccda123e | [
"MIT"
] | null | null | null | config/config.exs | pulitta/pke | b4227adb4ff992a9a818d280ac033091ccda123e | [
"MIT"
] | null | null | null | config/config.exs | pulitta/pke | b4227adb4ff992a9a818d280ac033091ccda123e | [
"MIT"
] | null | null | null | use Mix.Config
config :pke,
key_path: "test/key.pub", # <- Public key path
crypto_module: :crypto_mock # <- Crypto module name
| 18.857143 | 52 | 0.69697 |
73c537acedae15cb83958d0bacaf01e764af4ecf | 475 | ex | Elixir | api/lib/core/filters.ex | lucas-angermann/idai-field-web | 788c9c9505b6fd12d591345b23053e934f1022d1 | [
"Apache-2.0"
] | null | null | null | api/lib/core/filters.ex | lucas-angermann/idai-field-web | 788c9c9505b6fd12d591345b23053e934f1022d1 | [
"Apache-2.0"
] | null | null | null | api/lib/core/filters.ex | lucas-angermann/idai-field-web | 788c9c9505b6fd12d591345b23053e934f1022d1 | [
"Apache-2.0"
] | null | null | null | defmodule Api.Core.Filters do
def get_filters(), do: [
%{
field: "project",
label: %{ de: "Projekt", en: "Project" },
labeled_value: false,
size: 1000
},
%{
field: "resource.category",
label: %{ de: "Kategorie", en: "Category" },
labeled_value: true,
size: 1000
}
]
def get_filter_name(filter) do
if filter.labeled_value do
"#{filter.field}.name"
else
filter.field
end
end
end
| 18.269231 | 50 | 0.551579 |
73c53c68c7b58d5b2a428d4d6388666d2d0866f1 | 3,897 | ex | Elixir | lib/logger_sigil_l.ex | nottewae/logger_sigil_l | f3b9c6ef13c504aebbef626c284854a7993b2662 | [
"MIT"
] | null | null | null | lib/logger_sigil_l.ex | nottewae/logger_sigil_l | f3b9c6ef13c504aebbef626c284854a7993b2662 | [
"MIT"
] | null | null | null | lib/logger_sigil_l.ex | nottewae/logger_sigil_l | f3b9c6ef13c504aebbef626c284854a7993b2662 | [
"MIT"
] | null | null | null | defmodule LoggerSigilL do
@moduledoc """
# LoggerSigilL
```LoggerSigilL``` provides a short syntax for logging while debugging an application, like ```~w(a b c)```
## Installation
Add `logger_sigil_l` to your list of dependencies in `mix.exs`:
```elixir
def deps do
[
{:logger_sigil_l,"~> 0.1.3"}
]
end
```
## Use
```~l"your text here"<<<m>t>b>```
```elixir
~l"text" #default for Logger.debug
~l"text"i #default for Logger.info
~l"text"wr # Logger.warn with red text
~l"text"iry # Logger.info with red text and yellow background
```
- **m** - Logger method ```i``` - info, ```d``` - debug, ```e``` - error, ```w``` - warn
- **t** - text color
- r - red
- b - blue
- g - green
- w - white
- m - magenta
- c - cyan
- d - black
- y - yellow
- **b** - background color - as text color
## Examples
```elixir
#in our module
use LoggerSigilL
def test_logger do
m = "test message"
~l"test message"
~l"test message"dr
~l"test message"er
~l"test message"ir
~l"test message"wr
~l"test message"dg
~l"test message"eg
~l"test message"ig
~l"test message"wg
~l"test message"dc
~l"test message"ec
~l"test message"ic
~l"test message"wc
~l"test message"dm
~l"test message"em
~l"test message"im
~l"test message"wm
~l"test message"dwm
~l"test message"ewm
~l"test message"iwm
~l"test message"wwm
~l"test message"dry
~l"test message"ery
~l"test message"iry
~l"test message"wry
~l"test message"dwb
~l"test message"ewb
~l"test message"iwb
~l"test message"wwb
end
```
"""
defmacro __using__(_) do
first = quote do
require Logger
require LoggerSigilL
import LoggerSigilL
defp find_key(_list, nil, _default), do: nil
defp find_key(list, key, default) when is_binary(key), do: find_key(list, String.to_atom(key), default)
defp find_key(list, key, default), do: (if is_nil(Enum.find(list, fn {k,_} -> key == k end)), do: list[default], else: list[key])
def log_(message, nil, nil), do: Logger.debug(message)
end
funs = ~w(debug info warn error)
outs = Enum.map(funs, fn(fun) ->
quote do
require Logger
import Logger
def unquote(:"log_#{fun}")(message, nil, nil), do: unquote(:"#{fun}")(message)
def unquote(:"log_#{fun}")(message, text, nil), do: unquote(:"#{fun}")(message, ansi_color: [text])
def unquote(:"log_#{fun}")(message, text, back), do: unquote(:"#{fun}")(message, ansi_color: [text, back])
end
end)
sig = quote do
def sigil_l(m, opts) do
table_fun = [d: :debug, i: :info, w: :warn, e: :error ]
text_colors = [
b: :blue, c: :cyan, w: :white, r: :red, g: :green, y: :yellow,
d: :black, m: :magenta
]
background_colors = [
b: :blue_background, c: :cyan_background, w: :white_background,
r: :red_background, g: :green_background, y: :yellow_background,
d: :black_background, m: :magenta_background
]
options = opts
|> Enum.map( &to_string([&1]))
{fun, text_color, background_color} = if length(options) > 0 do
fun = find_key(table_fun, Enum.at(options, 0), :d)
text_color = find_key(text_colors, Enum.at(options, 1), :c)
background_color = find_key(background_colors, Enum.at(options, 2), :d)
{fun, text_color, background_color}
else
{table_fun[:d], nil, nil}
end
apply(__MODULE__, :"log_#{fun}", [m, text_color, background_color])
end
end
[first] ++ outs ++ [sig]
end
end
| 32.206612 | 135 | 0.554786 |
73c5713f9d3a75852d83ef6847439c5fe2f34c71 | 3,203 | ex | Elixir | lib/wow/crawler.ex | DrPandemic/expressive-broker | 66a8da94ede2c101db9e1841e17898b5bae5df49 | [
"Apache-2.0",
"CC-BY-4.0",
"MIT"
] | null | null | null | lib/wow/crawler.ex | DrPandemic/expressive-broker | 66a8da94ede2c101db9e1841e17898b5bae5df49 | [
"Apache-2.0",
"CC-BY-4.0",
"MIT"
] | null | null | null | lib/wow/crawler.ex | DrPandemic/expressive-broker | 66a8da94ede2c101db9e1841e17898b5bae5df49 | [
"Apache-2.0",
"CC-BY-4.0",
"MIT"
] | null | null | null | defmodule Wow.Crawler do
alias Tesla.Multipart
@access_token_url "https://us.battle.net/oauth/token"
@spec get_access_token(String.t, String.t) :: String.t
def get_access_token(id, secret) do
client = Tesla.client(middlewares_basic_auth(id, secret))
mp = Multipart.new |> Multipart.add_field("grant_type", "client_credentials")
case Tesla.post(
client,
@access_token_url,
mp
) do
{:ok, %Tesla.Env{status: 200, body: %{"access_token" => token}}} ->
token
end
end
@spec get_url(String.t, String.t, String.t) :: %{lastModified: integer, url: String.t}
def get_url(access_token, region, realm) do
client = Tesla.client(middlewares())
case Tesla.get(
client,
"https://#{region}.api.blizzard.com/wow/auction/data/#{realm}?locale=en_US&access_token=#{access_token}"
) do
{:ok, %Tesla.Env{status: 200, body: %{"files" => [body]}}} ->
string_key_map(body)
end
end
@spec get_dump(String.t) :: %{auctions: list(Wow.AuctionEntry.raw_entry)}
def get_dump(url) do
client = Tesla.client(middlewares())
case Tesla.get(
client,
url
) do
{:ok, %Tesla.Env{status: 200, body: %{"auctions" => auctions}}} ->
auctions
end
end
@spec get_item(String.t, integer) :: any
def get_item(access_token, item_id) do
client = Tesla.client(middlewares())
case Tesla.get(
client,
"https://us.api.blizzard.com/wow/item/#{item_id}?locale=en_US&access_token=#{access_token}"
) do
{:ok, %Tesla.Env{status: 200, body: body}} ->
body
{:ok, %Tesla.Env{status: 404}} ->
:not_found
end
end
@spec get_character(String.t, [{:character_name, String.t}, {:realm_name, String.t}, {:region, String.t}]) :: any
def get_character(access_token, character_name: character, realm_name: realm, region: region) do
client = Tesla.client(middlewares())
case Tesla.get(
client,
"https://#{region}.api.blizzard.com/wow/character/#{realm}/#{character}?locale=en_US&access_token=#{access_token}"
) do
{:ok, %Tesla.Env{status: 200, body: body}} ->
body
{:ok, %Tesla.Env{status: 404}} ->
:not_found
end
end
@spec middlewares() :: list
defp middlewares do
if Mix.env == :test do
[
Tesla.Middleware.DecodeJson,
]
else
[
Tesla.Middleware.DecodeJson,
Tesla.Middleware.Compression,
{Tesla.Middleware.Headers, [{"Accept-Encoding", "gzip"}]}
]
end
end
@spec middlewares_basic_auth(String.t, String.t) :: list
defp middlewares_basic_auth(id, secret) do
if Mix.env == :test do
[
{Tesla.Middleware.BasicAuth, %{username: id, password: secret}},
Tesla.Middleware.DecodeJson,
]
else
[
{Tesla.Middleware.BasicAuth, %{username: id, password: secret}},
Tesla.Middleware.DecodeJson,
Tesla.Middleware.Compression,
{Tesla.Middleware.Headers, [{"Accept-Encoding", "gzip"}]}
]
end
end
@spec string_key_map(map) :: map
defp string_key_map(map) do
Map.new(map, fn {k, v} -> {String.to_atom(k), v} end)
end
end
| 29.657407 | 124 | 0.61817 |
73c580583262fde74df2bc8dcdf6511a01fc9d85 | 1,868 | ex | Elixir | clients/dlp/lib/google_api/dlp/v2/model/google_privacy_dlp_v2_l_diversity_result.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | null | null | null | clients/dlp/lib/google_api/dlp/v2/model/google_privacy_dlp_v2_l_diversity_result.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | null | null | null | clients/dlp/lib/google_api/dlp/v2/model/google_privacy_dlp_v2_l_diversity_result.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | 1 | 2020-11-10T16:58:27.000Z | 2020-11-10T16:58:27.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.DLP.V2.Model.GooglePrivacyDlpV2LDiversityResult do
@moduledoc """
Result of the l-diversity computation.
## Attributes
- sensitiveValueFrequencyHistogramBuckets ([GooglePrivacyDlpV2LDiversityHistogramBucket]): Histogram of l-diversity equivalence class sensitive value frequencies. Defaults to: `null`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:sensitiveValueFrequencyHistogramBuckets =>
list(GoogleApi.DLP.V2.Model.GooglePrivacyDlpV2LDiversityHistogramBucket.t())
}
field(
:sensitiveValueFrequencyHistogramBuckets,
as: GoogleApi.DLP.V2.Model.GooglePrivacyDlpV2LDiversityHistogramBucket,
type: :list
)
end
defimpl Poison.Decoder, for: GoogleApi.DLP.V2.Model.GooglePrivacyDlpV2LDiversityResult do
def decode(value, options) do
GoogleApi.DLP.V2.Model.GooglePrivacyDlpV2LDiversityResult.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.DLP.V2.Model.GooglePrivacyDlpV2LDiversityResult do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 35.245283 | 185 | 0.771413 |
73c5ab1f064a92d149588bb0290f59509aeb2ae6 | 8,109 | exs | Elixir | test/simple_markdown_lazy_renderer_html_test.exs | TypedLambda/SimpleMarkdownEx | ce0d77c9a09c07a6f2a305c75b031c453e37ce79 | [
"BSD-2-Clause"
] | null | null | null | test/simple_markdown_lazy_renderer_html_test.exs | TypedLambda/SimpleMarkdownEx | ce0d77c9a09c07a6f2a305c75b031c453e37ce79 | [
"BSD-2-Clause"
] | null | null | null | test/simple_markdown_lazy_renderer_html_test.exs | TypedLambda/SimpleMarkdownEx | ce0d77c9a09c07a6f2a305c75b031c453e37ce79 | [
"BSD-2-Clause"
] | null | null | null | defmodule SimpleMarkdownLazyRendererHTMLTest do
use ExUnit.Case
test "rendering line break" do
assert "<br>" == [{ :line_break, [] }] |> SimpleMarkdown.ast_to_structs |> SimpleMarkdown.LazyRenderer.HTML.render |> Enum.join
end
test "rendering header" do
assert "<h1>test</h1>" == [{ :header, ["test"], 1 }] |> SimpleMarkdown.ast_to_structs |> SimpleMarkdown.LazyRenderer.HTML.render |> Enum.join
assert "<h2>test</h2>" == [{ :header, ["test"], 2 }] |> SimpleMarkdown.ast_to_structs |> SimpleMarkdown.LazyRenderer.HTML.render |> Enum.join
assert "<h3>test</h3>" == [{ :header, ["test"], 3 }] |> SimpleMarkdown.ast_to_structs |> SimpleMarkdown.LazyRenderer.HTML.render |> Enum.join
assert "<h4>test</h4>" == [{ :header, ["test"], 4 }] |> SimpleMarkdown.ast_to_structs |> SimpleMarkdown.LazyRenderer.HTML.render |> Enum.join
assert "<h5>test</h5>" == [{ :header, ["test"], 5 }] |> SimpleMarkdown.ast_to_structs |> SimpleMarkdown.LazyRenderer.HTML.render |> Enum.join
assert "<h6>test</h6>" == [{ :header, ["test"], 6 }] |> SimpleMarkdown.ast_to_structs |> SimpleMarkdown.LazyRenderer.HTML.render |> Enum.join
assert "<h1>test</h1>" == [{ :header, ["test"], 1 }] |> SimpleMarkdown.ast_to_structs |> SimpleMarkdown.LazyRenderer.HTML.render |> Enum.join
assert "<h2>test</h2>" == [{ :header, ["test"], 2 }] |> SimpleMarkdown.ast_to_structs |> SimpleMarkdown.LazyRenderer.HTML.render |> Enum.join
end
test "rendering emphasis" do
assert "<em>test</em>" == [{ :emphasis, ["test"], :regular }] |> SimpleMarkdown.ast_to_structs |> SimpleMarkdown.LazyRenderer.HTML.render |> Enum.join
assert "<strong>test</strong>" == [{ :emphasis, ["test"], :strong }] |> SimpleMarkdown.ast_to_structs |> SimpleMarkdown.LazyRenderer.HTML.render |> Enum.join
end
test "rendering horizontal rule" do
assert "<hr>" == [{ :horizontal_rule, [] }] |> SimpleMarkdown.ast_to_structs |> SimpleMarkdown.LazyRenderer.HTML.render |> Enum.join
end
test "rendering table" do
assert "<table><thead><tr><th>One</th><th>Two</th><th>Three</th><th>Four</th></tr></thead><tbody><tr><td>1</td><td style=\"text-align: center;\">2</td><td style=\"text-align: right;\">3</td><td style=\"text-align: left;\">4</td></tr><tr><td>11</td><td style=\"text-align: center;\">22</td><td style=\"text-align: right;\">33</td><td style=\"text-align: left;\">44</td></tr></tbody></table>" == [{ :table, [row: ["1", "2", "3", "4"], row: ["11", "22", "33", "44"]], [{ "One", :default }, { "Two", :center }, { "Three", :right }, { "Four", :left }] }] |> SimpleMarkdown.ast_to_structs |> SimpleMarkdown.LazyRenderer.HTML.render |> Enum.join
assert "<table><tbody><tr><td>1</td><td style=\"text-align: center;\">2</td><td style=\"text-align: right;\">3</td><td style=\"text-align: left;\">4</td></tr><tr><td>11</td><td style=\"text-align: center;\">22</td><td style=\"text-align: right;\">33</td><td style=\"text-align: left;\">44</td></tr></tbody></table>" == [{ :table, [row: ["1", "2", "3", "4"], row: ["11", "22", "33", "44"]], [:default, :center, :right, :left] }] |> SimpleMarkdown.ast_to_structs |> SimpleMarkdown.LazyRenderer.HTML.render |> Enum.join
end
test "rendering task list" do
assert "<ul><li><input type=\"checkbox\" disabled>a</li><li><input type=\"checkbox\" checked disabled>b</li></ul>" == [{ :task_list, [{ :task, ["a"], :deselected }, { :task, ["b"], :selected }] }] |> SimpleMarkdown.ast_to_structs |> SimpleMarkdown.LazyRenderer.HTML.render |> Enum.join
end
test "rendering list" do
assert "<ul><li>a</li><li>b</li></ul>" == [{ :list, [{ :item, ["a"] }, { :item, ["b"] }], :unordered }] |> SimpleMarkdown.ast_to_structs |> SimpleMarkdown.LazyRenderer.HTML.render |> Enum.join
assert "<ol><li>a</li><li>b</li></ol>" == [{ :list, [{ :item, ["a"] }, { :item, ["b"] }], :ordered }] |> SimpleMarkdown.ast_to_structs |> SimpleMarkdown.LazyRenderer.HTML.render |> Enum.join
end
defimpl SimpleMarkdown.LazyRenderer.HTML, for: SimpleMarkdown.Attribute.PreformattedCode.Test do
def render(%{ option: _ }), do: ""
def render(%{ input: input }), do: "<pre><code class=\"test\">#{SimpleMarkdown.LazyRenderer.HTML.render(input) |> Enum.join |> HtmlEntities.encode}</code></pre>"
end
test "rendering preformatted code" do
assert "<pre><code>test</code></pre>" == [{ :preformatted_code, ["test"] }] |> SimpleMarkdown.ast_to_structs |> SimpleMarkdown.LazyRenderer.HTML.render |> Enum.join
assert "<pre><code><test></code></pre>" == [{ :preformatted_code, ["<test>"] }] |> SimpleMarkdown.ast_to_structs |> SimpleMarkdown.LazyRenderer.HTML.render |> Enum.join
assert "<pre><code>test</code></pre>" == [{ :preformatted_code, ["test"], :syntax }] |> SimpleMarkdown.ast_to_structs |> SimpleMarkdown.LazyRenderer.HTML.render |> Enum.join
assert "<pre><code class=\"test\">test</code></pre>" == [{ :preformatted_code, ["test"], :test }] |> SimpleMarkdown.ast_to_structs |> SimpleMarkdown.LazyRenderer.HTML.render |> Enum.join
end
test "rendering paragraph" do
assert "<p>test</p>" == [{ :paragraph, ["test"] }] |> SimpleMarkdown.ast_to_structs |> SimpleMarkdown.LazyRenderer.HTML.render |> Enum.join
end
test "rendering blockquote" do
assert "<blockquote>test</blockquote>" == [{ :blockquote, ["test"] }] |> SimpleMarkdown.ast_to_structs |> SimpleMarkdown.LazyRenderer.HTML.render |> Enum.join
end
test "rendering link" do
assert "<a href=\"example.com\">test</a>" == [{ :link, ["test"], "example.com" }] |> SimpleMarkdown.ast_to_structs |> SimpleMarkdown.LazyRenderer.HTML.render |> Enum.join
end
test "rendering image" do
assert "<img src=\"example.com/image.jpg\" alt=\"test\">" == [{ :image, ["test"], "example.com/image.jpg" }] |> SimpleMarkdown.ast_to_structs |> SimpleMarkdown.LazyRenderer.HTML.render |> Enum.join
end
test "rendering code" do
assert "<code>test</code>" == [{ :code, ["test"] }] |> SimpleMarkdown.ast_to_structs |> SimpleMarkdown.LazyRenderer.HTML.render |> Enum.join
assert "<code><test></code>" == [{ :code, ["<test>"] }] |> SimpleMarkdown.ast_to_structs |> SimpleMarkdown.LazyRenderer.HTML.render |> Enum.join
end
test "rendering examples" do
assert "<h1>Heading</h1><h2>Sub-heading</h2><h3>Another deeper heading</h3><p>Paragraphs are separatedby a blank line.</p><p>Two spaces at the end of a line leave a<br>line break.</p><p>Text attributes <em>italic</em>, <strong>bold</strong>, <code>monospace</code>.</p><p>Bullet list:</p><ul><li>apples</li><li>oranges</li><li>pears</li></ul><p>Numbered list:</p><ol><li>apples</li><li>oranges</li><li>pears</li></ol><p>A <a href=\"http://example.com\">link</a>.</p>" == [
{ :header, ["Heading"], 1 },
{ :header, ["Sub-heading"], 2 },
{ :header, ["Another deeper heading"], 3 },
{ :paragraph, ["Paragraphs are separated", "by a blank line."] },
{ :paragraph, ["Two spaces at the end of a line leave a", { :line_break, [] }, "line break."] },
{ :paragraph, ["Text attributes ", { :emphasis, ["italic"], :regular }, ", ", { :emphasis, ["bold"], :strong }, ", ", { :code, ["monospace"] }, "."] },
{ :paragraph, ["Bullet list:"] },
{ :list, [item: ["apples"], item: ["oranges"], item: ["pears"]], :unordered },
{ :paragraph, ["Numbered list:"] },
{ :list, [item: ["apples"], item: ["oranges"], item: ["pears"]], :ordered },
{ :paragraph, ["A ", { :link, ["link"], "http://example.com" }, "."] }
] |> SimpleMarkdown.ast_to_structs |> SimpleMarkdown.LazyRenderer.HTML.render |> Enum.join
end
test "rendering stream examples" do
assert "<h1>foo</h1><h2>foo</h2><h3>foo</h3>" == Stream.iterate(1, &(&1 + 1)) |> Stream.map(&({ :header, ["foo"], &1 })) |> Stream.take(3) |> SimpleMarkdown.ast_to_structs |> SimpleMarkdown.LazyRenderer.HTML.render |> Enum.join
end
end
| 85.357895 | 646 | 0.618942 |
73c5c1505e610e4ad59a5ca5e178f278aa0521b9 | 4,392 | ex | Elixir | lib/avrora/resolver.ex | RafaelCamarda/avrora | 0771d18a7082992be035b9aaadfe625808b6de6f | [
"MIT"
] | null | null | null | lib/avrora/resolver.ex | RafaelCamarda/avrora | 0771d18a7082992be035b9aaadfe625808b6de6f | [
"MIT"
] | null | null | null | lib/avrora/resolver.ex | RafaelCamarda/avrora | 0771d18a7082992be035b9aaadfe625808b6de6f | [
"MIT"
] | null | null | null | defmodule Avrora.Resolver do
@moduledoc """
Resolves schema names or global ID's to a specific schema files while keeping
memory and registry storage up to date.
"""
require Logger
alias Avrora.{Config, Name}
@doc """
Resolves schema by all given possible identifiers.
It will return first successful resolution or the last error.
To resolve schema it uses:
* Avrora.Resolver.resolve/1 when integer
* Avrora.Resolver.resolve/1 when binary
## Examples
...> {:ok, schema} = Avrora.Resolver.resolve_any([1, "io.confluent.Payment"])
...> schema.full_name
"io.confluent.Payment"
"""
@spec resolve_any(nonempty_list(integer() | String.t())) ::
{:ok, Avrora.Schema.t()} | {:error, term()}
def resolve_any(ids) do
ids = List.wrap(ids)
total = Enum.count(ids)
ids
|> Stream.map(&{&1, resolve(&1)})
|> Stream.with_index(1)
|> Enum.find_value(fn {{id, {status, result}}, index} ->
if status == :error, do: Logger.debug("fail to resolve schema by identifier `#{id}`")
if status == :ok || index == total, do: {status, result}
end)
end
@doc """
Resolves schema by a global ID.
After schema being resolved it will be stored in memory storage
with key equal to `global ID`.
## Examples
iex> {:ok, schema} = Avrora.Resolver.resolve(1)
iex> schema.full_name
"io.confluent.Payment"
"""
@spec resolve(integer()) :: {:ok, Avrora.Schema.t()} | {:error, term()}
def resolve(id) when is_integer(id) do
with {:ok, nil} <- memory_storage().get(id),
{:ok, avro} <- registry_storage().get(id) do
memory_storage().put(id, avro)
end
end
@doc """
Resolves schema be it's name and optionally version. A version could be provided
by adding `:` and version number to the name (i.e `io.confluent.Payment:5`).
In case if confluent schema registry url is configured, resolution will take a
look there first and in case of failure try to read schema from the configured
schemas folder.
After schema being resolved it will be stored in memory storage with
key equal `name` and `name:version`. Also it will be added to the registry if
it's configured.
## Examples
...> {:ok, schema1} = Avrora.Resolver.resolve("io.confluent.Payment")
...> {:ok, schema2} = Avrora.Resolver.resolve("io.confluent.Payment:42")
...> schema1.version
42
...> schema2.version
42
...> schema1.full_name
"io.confluent.Payment"
...> schema.full_name
"io.confluent.Payment"
"""
@spec resolve(String.t()) :: {:ok, Avrora.Schema.t()} | {:error, term()}
def resolve(name) when is_binary(name) do
with {:ok, schema_name} <- Name.parse(name),
{:ok, nil} <- memory_storage().get(name) do
case registry_storage().get(name) do
{:ok, schema} ->
with {:ok, schema} <-
memory_storage().put("#{schema_name.name}:#{schema.version}", schema),
{:ok, schema} <- memory_storage().put(schema_name.name, schema),
{:ok, timestamp} <- memory_storage().expire(schema_name.name, names_ttl()) do
if timestamp == :infinity,
do: Logger.debug("schema `#{schema_name.name}` will be always resolved from memory")
memory_storage().put(schema.id, schema)
end
{:error, :unknown_subject} ->
with {:ok, schema} <- file_storage().get(schema_name.name),
{:ok, schema} <- registry_storage().put(schema_name.name, schema.json),
{:ok, schema} <- memory_storage().put(schema_name.name, schema),
{:ok, timestamp} <- memory_storage().expire(schema_name.name, names_ttl()) do
if timestamp == :infinity,
do: Logger.debug("schema `#{schema_name.name}` will be always resolved from memory")
memory_storage().put(schema.id, schema)
end
{:error, :unconfigured_registry_url} ->
with {:ok, schema} <- file_storage().get(name),
do: memory_storage().put(schema_name.name, schema)
{:error, reason} ->
{:error, reason}
end
end
end
defp file_storage, do: Config.file_storage()
defp memory_storage, do: Config.memory_storage()
defp registry_storage, do: Config.registry_storage()
defp names_ttl, do: Config.names_cache_ttl()
end
| 34.582677 | 98 | 0.624317 |
73c60c55a4ef92e81b7625372ba60bec8d83eed6 | 3,405 | exs | Elixir | spec/membrane/time_spec.exs | eboskma/membrane_core | e216994fe1ba99c5d228a4b0959faa5fabb13b1c | [
"Apache-2.0"
] | null | null | null | spec/membrane/time_spec.exs | eboskma/membrane_core | e216994fe1ba99c5d228a4b0959faa5fabb13b1c | [
"Apache-2.0"
] | null | null | null | spec/membrane/time_spec.exs | eboskma/membrane_core | e216994fe1ba99c5d228a4b0959faa5fabb13b1c | [
"Apache-2.0"
] | null | null | null | defmodule Membrane.TimeSpec do
use ESpec, async: true
describe ".nanosecond/1" do
let :value, do: 123
it "should return given value" do
expect(described_module().nanosecond(value())) |> to(eq value())
end
end
describe ".nanoseconds/1" do
let :value, do: 123
it "should return given value" do
expect(described_module().nanoseconds(value())) |> to(eq value())
end
end
describe ".microsecond/1" do
let :value, do: 123
it "should return given value multiplied by 1000" do
expect(described_module().microsecond(value())) |> to(eq value() * 1_000)
end
end
describe ".microseconds/1" do
let :value, do: 123
it "should return given value multiplied by 1000" do
expect(described_module().microseconds(value())) |> to(eq value() * 1_000)
end
end
describe ".millisecond/1" do
let :value, do: 123
it "should return given value multiplied by 1000000" do
expect(described_module().millisecond(value())) |> to(eq value() * 1_000_000)
end
end
describe ".milliseconds/1" do
let :value, do: 123
it "should return given value multiplied by 1000000" do
expect(described_module().milliseconds(value())) |> to(eq value() * 1_000_000)
end
end
describe ".second/1" do
let :value, do: 123
it "should return given value multiplied by 1000000000" do
expect(described_module().second(value())) |> to(eq value() * 1_000_000_000)
end
end
describe ".seconds/1" do
let :value, do: 123
it "should return given value multiplied by 1000000000" do
expect(described_module().seconds(value())) |> to(eq value() * 1_000_000_000)
end
end
describe ".minute/1" do
let :value, do: 123
it "should return given value multiplied by 60000000000" do
expect(described_module().minute(value())) |> to(eq value() * 60_000_000_000)
end
end
describe ".minutes/1" do
let :value, do: 123
it "should return given value multiplied by 60000000000" do
expect(described_module().minutes(value())) |> to(eq value() * 60_000_000_000)
end
end
describe ".hour/1" do
let :value, do: 123
it "should return given value multiplied by 3600000000000" do
expect(described_module().hour(value())) |> to(eq value() * 3_600_000_000_000)
end
end
describe ".hours/1" do
let :value, do: 123
it "should return given value multiplied by 3600000000000" do
expect(described_module().hours(value())) |> to(eq value() * 3_600_000_000_000)
end
end
describe ".day/1" do
let :value, do: 123
it "should return given value multiplied by 86400000000000" do
expect(described_module().day(value())) |> to(eq value() * 86_400_000_000_000)
end
end
describe ".days/1" do
let :value, do: 123
it "should return given value multiplied by 86400000000000" do
expect(described_module().days(value())) |> to(eq value() * 86_400_000_000_000)
end
end
describe ".native_resolution/0" do
it "should return 1 second converted into erlang native units" do
expect(1 |> described_module().second |> described_module().native_unit)
|> to(eq :erlang.convert_time_unit(1, :seconds, :native))
end
end
describe "monotonic_time/0" do
it "should return integer" do
expect(described_module().monotonic_time) |> to(be_integer())
end
end
end
| 26.395349 | 85 | 0.661674 |
73c613ae19b0b03b0ce715d62c692af174a8aa47 | 1,099 | ex | Elixir | test/support/channel_case.ex | colindensem/petal-k8s | def9dc653dcae2ef35021d44821e6c69d8744e15 | [
"MIT"
] | 1 | 2021-02-12T09:17:38.000Z | 2021-02-12T09:17:38.000Z | test/support/channel_case.ex | colindensem/petal-k8s | def9dc653dcae2ef35021d44821e6c69d8744e15 | [
"MIT"
] | null | null | null | test/support/channel_case.ex | colindensem/petal-k8s | def9dc653dcae2ef35021d44821e6c69d8744e15 | [
"MIT"
] | null | null | null | defmodule PetalWeb.ChannelCase do
@moduledoc """
This module defines the test case to be used by
channel tests.
Such tests rely on `Phoenix.ChannelTest` and also
import other functionality to make it easier
to build common data structures and query the data layer.
Finally, if the test case interacts with the database,
we enable the SQL sandbox, so changes done to the database
are reverted at the end of every test. If you are using
PostgreSQL, you can even run database tests asynchronously
by setting `use PetalWeb.ChannelCase, async: true`, although
this option is not recommended for other databases.
"""
use ExUnit.CaseTemplate
using do
quote do
# Import conveniences for testing with channels
import Phoenix.ChannelTest
import PetalWeb.ChannelCase
# The default endpoint for testing
@endpoint PetalWeb.Endpoint
end
end
setup tags do
:ok = Ecto.Adapters.SQL.Sandbox.checkout(Petal.Repo)
unless tags[:async] do
Ecto.Adapters.SQL.Sandbox.mode(Petal.Repo, {:shared, self()})
end
:ok
end
end
| 26.804878 | 67 | 0.724295 |
73c615e3d347a3165b69dd155efe25a872bddc50 | 2,211 | ex | Elixir | test/support/fwup.ex | axelson/ssh_subsystem_fwup | f04e45cfb81e74c0ea2c0581c1ea3fcf378f5543 | [
"Apache-2.0"
] | null | null | null | test/support/fwup.ex | axelson/ssh_subsystem_fwup | f04e45cfb81e74c0ea2c0581c1ea3fcf378f5543 | [
"Apache-2.0"
] | null | null | null | test/support/fwup.ex | axelson/ssh_subsystem_fwup | f04e45cfb81e74c0ea2c0581c1ea3fcf378f5543 | [
"Apache-2.0"
] | null | null | null | # Code.compiler_options(ignore_module_conflict: true)
defmodule SSHSubsystemFwup.Support.Fwup do
@moduledoc """
This module is intended to help with testing and development
by allowing for "easy" creation of firmware signing keys, and
signed/unsigned/corrupted firmware files.
It is a thin wrapper around `fwup`, and it persists the files in
`System.tmp_dir()`.
The files are given the names that are passed to the respective functions, so
make sure you pass unique names to avoid collisions if necessary. This module
takes little effort to avoid collisions on its own.
"""
# @after_compile {__MODULE__, :compiler_options}
# def compiler_options(_, _), do: Code.compiler_options(ignore_module_conflict: false)
@doc """
Create an unsigned firmware image, and return the path to that image.
"""
def create_firmware(options \\ []) do
out_path = tmp_path()
conf_path = make_conf(options)
{_, 0} =
System.cmd("fwup", [
"-c",
"-f",
conf_path,
"-o",
out_path
])
File.rm!(conf_path)
contents = File.read!(out_path)
File.rm!(out_path)
contents
end
@doc """
Create a corrupt firmware image
"""
def create_corrupt_firmware(options \\ []) do
<<start::binary-size(32), finish::binary>> = create_firmware(options)
<<start::binary, 1, finish::binary>>
end
defp tmp_path() do
Path.join([System.tmp_dir(), "#{random_string()}.conf"])
end
defp make_conf(options) do
path = tmp_path()
File.write!(path, build_conf_contents(options))
path
end
defp build_conf_contents(options) do
task = Keyword.get(options, :task, "upgrade")
message = Keyword.get(options, :message, "Hello, world!")
"""
meta-product = "Test firmware"
meta-description = "Try to test ssh_subsystem_fwup"
meta-version = "0.1.0"
meta-platform = "rpi3"
meta-architecture = "arm"
meta-author = "Me"
file-resource test.txt {
contents = "#{message}"
}
task #{task} {
on-resource test.txt { raw_write(0) }
}
"""
end
defp random_string() do
Integer.to_string(:rand.uniform(0x100000000), 36) |> String.downcase()
end
end
| 25.125 | 88 | 0.658073 |
73c6380f583207dcdb0d8b0180d9d74dbff49ddc | 3,192 | exs | Elixir | test/day7/internet_protocol_version_7_test.exs | fboyer/advent_of_code_2016 | ffe02f093298fa60a5547dc6a9391c1acfb2b325 | [
"MIT"
] | null | null | null | test/day7/internet_protocol_version_7_test.exs | fboyer/advent_of_code_2016 | ffe02f093298fa60a5547dc6a9391c1acfb2b325 | [
"MIT"
] | null | null | null | test/day7/internet_protocol_version_7_test.exs | fboyer/advent_of_code_2016 | ffe02f093298fa60a5547dc6a9391c1acfb2b325 | [
"MIT"
] | null | null | null | defmodule AdventOfCode2016.InternetProtocolVersion7Test do
use ExUnit.Case, async: true
doctest AdventOfCode2016.InternetProtocolVersion7
import AdventOfCode2016.InternetProtocolVersion7
describe "day 7 - part 1" do
test "can parse an ipv7" do
assert parse_ipv7("abba[mnop]qrst") == ["abba", "mnop", "qrst"]
assert parse_ipv7("abcd[bddb]xyyx") == ["abcd", "bddb", "xyyx"]
assert parse_ipv7("aaaa[qwer]tyui") == ["aaaa", "qwer", "tyui"]
assert parse_ipv7("ioxxoj[asdfgh]zxcvbn") == ["ioxxoj", "asdfgh", "zxcvbn"]
end
test "can determine if a sub sequence of an ipv7 contains an abba" do
assert contains_an_abba?(["a", "b", "b", "a"]) == true
assert contains_an_abba?(["m", "n", "o", "p"]) == false
assert contains_an_abba?(["q", "r", "s", "t"]) == false
assert contains_an_abba?(["i", "o", "x", "x", "o", "j"]) == true
assert contains_an_abba?(["a", "s", "d", "f", "g", "h"]) == false
assert contains_an_abba?(["z", "x", "c", "v", "b", "n"]) == false
end
test "can determine if an ipv7 supports tls" do
assert is_supporting_tls?("abba[mnop]qrst") == true
assert is_supporting_tls?("abcd[bddb]xyyx") == false
assert is_supporting_tls?("aaaa[qwer]tyui") == false
assert is_supporting_tls?("ioxxoj[asdfgh]zxcvbn") == true
end
test "can count the number of ipv7s supporting tls in a list" do
input = """
abba[mnop]qrst
abcd[bddb]xyyx
aaaa[qwer]tyui
ioxxoj[asdfgh]zxcvbn
"""
assert count_ipv7s_supporting_tls(input) == 2
end
test "can solve the day 7 - part 1" do
answer =
File.read!("test/day7/day7.txt")
|> count_ipv7s_supporting_tls
IO.puts "day 7 - part 1: #{inspect answer}"
end
end
describe "day 7 - part 2" do
test "can extract alternating patterns" do
assert extract_alternating_patterns(["a", "b", "a"], []) == [{"a", "b", "a"}]
assert extract_alternating_patterns(["x", "y", "z"], [{"a", "b", "a"}]) == [{"a", "b", "a"}]
assert extract_alternating_patterns(["a", "a", "a"], []) == []
assert extract_alternating_patterns(["z", "a", "z", "b", "z"], []) == [{"z", "b", "z"}, {"z", "a", "z"}]
end
test "can search for bab patterns" do
assert search_bab_patterns({"a", "b", "a"}, [{"b", "a", "b"}], false) == true
assert search_bab_patterns({"x", "y", "x"}, [{"x", "y", "x"}], false) == false
end
test "can determine if an ipv7 supports ssl" do
assert is_supporting_ssl?("aba[bab]xyz") == true
assert is_supporting_ssl?("xyx[xyx]xyx") == false
assert is_supporting_ssl?("aaa[kek]eke") == true
assert is_supporting_ssl?("zazbz[bzb]cdb") == true
end
test "can count the number of ipv7s supporting ssl in a list" do
input = """
aba[bab]xyz
xyx[xyx]xyx
aaa[kek]eke
zazbz[bzb]cdb
"""
assert count_ipv7s_supporting_ssl(input) == 3
end
test "can solve the day 7 - part 1" do
answer =
File.read!("test/day7/day7.txt")
|> count_ipv7s_supporting_ssl
IO.puts "day 7 - part 2: #{inspect answer}"
end
end
end
| 35.076923 | 110 | 0.587093 |
73c64d5ae35cbce5a04161762ca5823bd95d867f | 759 | exs | Elixir | test/automigrate_test.exs | wojtekmach/automigrate | d582e80e7e0928e70e1314ab4580cc998ffa76de | [
"Apache-2.0"
] | 1 | 2018-06-04T02:10:23.000Z | 2018-06-04T02:10:23.000Z | test/automigrate_test.exs | wojtekmach/automigrate | d582e80e7e0928e70e1314ab4580cc998ffa76de | [
"Apache-2.0"
] | null | null | null | test/automigrate_test.exs | wojtekmach/automigrate | d582e80e7e0928e70e1314ab4580cc998ffa76de | [
"Apache-2.0"
] | null | null | null | defmodule Post1 do
use Ecto.Schema
schema "posts" do
field :title, :string
field :foo, :integer
end
end
defmodule Post2 do
use Ecto.Schema
schema "posts" do
field :title, :string
field :bar, :string
end
end
defmodule AutomigrateTest do
use ExUnit.Case, async: true
setup do
File.rm_rf!("priv")
TestRepo.query!("DROP TABLE IF EXISTS posts")
:ok
end
test "automigrate" do
assert Automigrate.diff(TestRepo, Post1) == {:create_table, "posts", [id: :id, title: :string, foo: :integer]}
Automigrate.run(TestRepo, Post1)
assert Automigrate.diff(TestRepo, Post1) == :noop
assert Automigrate.diff(TestRepo, Post2) == {:alter_table, "posts", add: [bar: :string], remove: [foo: :integer]}
end
end
| 21.083333 | 117 | 0.667984 |
73c66f6ed1e92e10f056b4befb6ec1dd1fa888c8 | 232 | exs | Elixir | test/reverso_test.exs | areski/echo-server-elixir | b17768ed9de256aad3b2bfe97cc89a17c4ab8490 | [
"MIT"
] | 2 | 2017-03-12T11:55:52.000Z | 2017-03-12T23:25:14.000Z | test/reverso_test.exs | areski/echo-server-elixir | b17768ed9de256aad3b2bfe97cc89a17c4ab8490 | [
"MIT"
] | null | null | null | test/reverso_test.exs | areski/echo-server-elixir | b17768ed9de256aad3b2bfe97cc89a17c4ab8490 | [
"MIT"
] | null | null | null | defmodule ReversoTest do
use ExUnit.Case
alias Echo.Reverso
test "reverse string with reverso method" do
assert Reverso.reverso("reverse\n") == "esrever\n"
assert Reverso.reverso("reverse") == "esrever\n"
end
end | 19.333333 | 54 | 0.706897 |
73c6732b5cd6973d321f0702dc02bb0ece9fa4f9 | 2,597 | ex | Elixir | lib/Pokedex/moves.ex | MiguelERuiz/pokedex | e7faaa54f6486442a0a3768563d3decf8c37f5b1 | [
"MIT"
] | null | null | null | lib/Pokedex/moves.ex | MiguelERuiz/pokedex | e7faaa54f6486442a0a3768563d3decf8c37f5b1 | [
"MIT"
] | null | null | null | lib/Pokedex/moves.ex | MiguelERuiz/pokedex | e7faaa54f6486442a0a3768563d3decf8c37f5b1 | [
"MIT"
] | null | null | null | defmodule Pokedex.Moves do
@moduledoc """
Documentation for https://pokeapi.co/docs/v2.html/#moves-section
"""
@move_endpoint "/move/"
@move_ailment_endpoint "/move-ailment/"
@move_battle_style_endpoint "/move-battle-style/"
@move_category_endpoint "/move-category/"
@move_damage_class_endpoint "/move-damage-class/"
@move_learn_method_endpoint "/move-learn-method/"
@move_target_endpoint "/move-target/"
def move(id) when is_number(id) do
Pokedex.get(@move_endpoint <> to_string(id))
end
def move(name) when is_atom(name) do
name = Pokedex.atom_to_pokedex_string(name)
Pokedex.get(@move_endpoint <> name)
end
def move(name) when is_bitstring(name) do
Pokedex.get(@move_endpoint <> name)
end
def move_ailment(id) when is_number(id) do
Pokedex.get(@move_ailment_endpoint <> to_string(id))
end
def move_ailment(name) when is_atom(name) do
name = Pokedex.atom_to_pokedex_string(name)
Pokedex.get(@move_ailment_endpoint <> name)
end
def move_ailment(name) when is_bitstring(name) do
Pokedex.get(@move_ailment_endpoint <> name)
end
def move_battle_style(id_name) when is_number(id_name) or is_atom(id_name) do
Pokedex.get(@move_battle_style_endpoint <> to_string(id_name))
end
def move_battle_style(name) when is_bitstring(name) do
Pokedex.get(@move_battle_style_endpoint <> name)
end
def move_category(id) when is_number(id) do
Pokedex.get(@move_category_endpoint <> to_string(id))
end
def move_category(name) when is_bitstring(name) do
Pokedex.get(@move_category_endpoint <> name)
end
def move_damage_class(id_name) when is_number(id_name) or is_atom(id_name) do
Pokedex.get(@move_damage_class_endpoint <> to_string(id_name))
end
def move_damage_class(name) do
Pokedex.get(@move_damage_class_endpoint <> name)
end
def move_learn_method(id) when is_number(id) do
Pokedex.get(@move_learn_method_endpoint <> to_string(id))
end
def move_learn_method(name) when is_atom(name) do
name = Pokedex.atom_to_pokedex_string(name)
Pokedex.get(@move_learn_method_endpoint <> name)
end
def move_learn_method(name) when is_bitstring(name) do
Pokedex.get(@move_learn_method_endpoint <> name)
end
def move_target(id) when is_number(id) do
Pokedex.get(@move_target_endpoint <> to_string(id))
end
def move_target(name) when is_atom(name) do
name = Pokedex.atom_to_pokedex_string(name)
Pokedex.get(@move_target_endpoint <> name)
end
def move_target(name) when is_bitstring(name) do
Pokedex.get(@move_target_endpoint <> name)
end
end
| 32.873418 | 79 | 0.74509 |
73c6abeb0ebfbdb236babd4c40360df17d124ab6 | 1,396 | exs | Elixir | mix.exs | thiamsantos/packages_bot | 4e3e3f942c075876c93fc901b3034355a26bd627 | [
"Apache-2.0"
] | 4 | 2019-07-08T15:46:38.000Z | 2021-03-29T12:05:25.000Z | mix.exs | thiamsantos/packages_bot | 4e3e3f942c075876c93fc901b3034355a26bd627 | [
"Apache-2.0"
] | 13 | 2021-03-14T21:31:28.000Z | 2021-12-20T11:56:45.000Z | mix.exs | thiamsantos/packages_bot | 4e3e3f942c075876c93fc901b3034355a26bd627 | [
"Apache-2.0"
] | 1 | 2019-07-08T18:40:23.000Z | 2019-07-08T18:40:23.000Z | defmodule PackagesBot.MixProject do
use Mix.Project
def project do
[
app: :packages_bot,
version: "0.1.0",
elixir: "~> 1.8",
elixirc_paths: elixirc_paths(Mix.env()),
start_permanent: Mix.env() == :prod,
deps: deps(),
aliases: aliases(),
compilers: [:phoenix] ++ Mix.compilers(),
test_coverage: [tool: ExCoveralls],
preferred_cli_env: [
coveralls: :test,
"coveralls.detail": :test,
"coveralls.post": :test,
"coveralls.html": :test
]
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:logger, :runtime_tools],
mod: {PackagesBot.Application, []}
]
end
# Specifies which paths to compile per environment.
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
# Run "mix help deps" to learn about dependencies.
defp deps do
[
{:credo, "~> 1.5", only: [:dev, :test], runtime: false},
{:credo_naming, "~> 1.0", only: [:dev, :test], runtime: false},
{:excoveralls, "~> 0.14", only: :test},
{:hackney, "~> 1.17"},
{:heartcheck, "~> 0.4.3"},
{:jason, "~> 1.2"},
{:phoenix, "~> 1.5"},
{:plug_cowboy, "~> 2.4"},
{:sentry, "~> 8.0"},
{:tesla, "~> 1.4"}
]
end
defp aliases do
[]
end
end
| 24.928571 | 69 | 0.547278 |
73c6c77cbcd22993d0c5efe1927cff54428562d8 | 3,154 | exs | Elixir | apps/artemis_log/test/artemis_log/contexts/event_log/delete_all_event_logs_older_than_test.exs | chrislaskey/atlas_platform | 969aea95814f62d3471f93000ee5ad77edb9d1bf | [
"MIT"
] | 10 | 2019-07-05T19:59:20.000Z | 2021-05-23T07:36:11.000Z | apps/artemis_log/test/artemis_log/contexts/event_log/delete_all_event_logs_older_than_test.exs | chrislaskey/atlas_platform | 969aea95814f62d3471f93000ee5ad77edb9d1bf | [
"MIT"
] | 7 | 2019-07-12T21:41:01.000Z | 2020-08-17T21:29:22.000Z | apps/artemis_log/test/artemis_log/contexts/event_log/delete_all_event_logs_older_than_test.exs | chrislaskey/atlas_platform | 969aea95814f62d3471f93000ee5ad77edb9d1bf | [
"MIT"
] | 4 | 2019-07-05T20:04:08.000Z | 2021-05-13T16:28:33.000Z | defmodule ArtemisLog.DeleteAllEventLogsOlderThanTest do
use ArtemisLog.DataCase
import ArtemisLog.Factories
alias ArtemisLog.DeleteAllEventLogsOlderThan
setup do
event_log = insert(:event_log)
{:ok, event_log: event_log}
end
describe "call!" do
test "raises an exception when given invalid arguments" do
invalid_timestamp = nil
assert_raise ArtemisLog.Context.Error, fn ->
DeleteAllEventLogsOlderThan.call!(invalid_timestamp, Mock.system_user())
end
end
test "returns successfully when given a valid timestamp" do
timestamp = DateTime.utc_now()
%{total: _} = DeleteAllEventLogsOlderThan.call!(timestamp, Mock.system_user())
end
end
describe "call" do
test "raises an exception when given invalid arguments" do
invalid_timestamp = nil
result = DeleteAllEventLogsOlderThan.call(invalid_timestamp, Mock.system_user())
assert result == {:error, "Invalid timestamp"}
end
test "returns successfully when given a valid timestamp" do
timestamp = DateTime.utc_now()
{:ok, _} = DeleteAllEventLogsOlderThan.call(timestamp, Mock.system_user())
end
test "successfully deletes items before specified timestamp" do
now = Timex.now()
one_week_ago = Timex.subtract(now, Timex.Duration.from_days(7))
two_weeks_ago = Timex.subtract(now, Timex.Duration.from_days(14))
three_weeks_ago = Timex.subtract(now, Timex.Duration.from_days(21))
insert(:event_log, inserted_at: now)
insert(:event_log, inserted_at: one_week_ago)
insert(:event_log, inserted_at: two_weeks_ago)
insert(:event_log, inserted_at: three_weeks_ago)
{:ok, result} = DeleteAllEventLogsOlderThan.call(one_week_ago, Mock.system_user())
assert result.timestamp == one_week_ago
assert result.total == 2
end
test "successfully returns 0 total when none match" do
now = Timex.now()
one_week_ago = Timex.subtract(now, Timex.Duration.from_days(7))
two_weeks_ago = Timex.subtract(now, Timex.Duration.from_days(14))
insert(:event_log, inserted_at: now)
insert(:event_log, inserted_at: one_week_ago)
{:ok, result} = DeleteAllEventLogsOlderThan.call(two_weeks_ago, Mock.system_user())
assert result.timestamp == two_weeks_ago
assert result.total == 0
end
end
@tag :pending
describe "broadcasts" do
test "publishes event and record" do
now = Timex.now()
one_week_ago = Timex.subtract(now, Timex.Duration.from_days(7))
two_weeks_ago = Timex.subtract(now, Timex.Duration.from_days(14))
three_weeks_ago = Timex.subtract(now, Timex.Duration.from_days(21))
insert(:event_log, inserted_at: now)
insert(:event_log, inserted_at: one_week_ago)
insert(:event_log, inserted_at: two_weeks_ago)
insert(:event_log, inserted_at: three_weeks_ago)
{:ok, result} = DeleteAllEventLogsOlderThan.call(one_week_ago, Mock.system_user())
assert_received %Phoenix.Socket.Broadcast{
event: "event-logs:deleted",
payload: %{
data: ^result
}
}
end
end
end
| 31.227723 | 89 | 0.700698 |
73c71bfecf98ea8dd5331c6f334660374722c465 | 501 | ex | Elixir | lib/stellar/accounts.ex | revelrylabs/elixir-stellar-client | 5866fc43fdc86260e0719a4764e8dd9327ef4731 | [
"MIT"
] | 25 | 2018-01-23T13:56:28.000Z | 2021-11-08T08:10:53.000Z | lib/stellar/accounts.ex | revelrylabs/elixir-stellar-client | 5866fc43fdc86260e0719a4764e8dd9327ef4731 | [
"MIT"
] | 91 | 2018-01-30T20:10:44.000Z | 2022-01-12T19:50:24.000Z | lib/stellar/accounts.ex | revelrylabs/elixir-stellar-client | 5866fc43fdc86260e0719a4764e8dd9327ef4731 | [
"MIT"
] | 5 | 2018-04-17T15:08:26.000Z | 2019-08-07T19:08:49.000Z | defmodule Stellar.Accounts do
@moduledoc """
Functions for interacting with Accounts
"""
alias Stellar.Base
@doc """
Gets account details
"""
@spec get(binary) :: {Stellar.status(), map}
def get(accountId) do
Base.get("/accounts/#{accountId}")
end
@doc """
Gets a single data associated with the given account
"""
@spec get_data(binary, binary) :: {Stellar.status(), map}
def get_data(accountId, key) do
Base.get("/accounts/#{accountId}/data/#{key}")
end
end
| 21.782609 | 59 | 0.656687 |
73c71c99572e3d1dc8c2a5e1115d62770802420f | 876 | ex | Elixir | lib/vega_web/controllers/page_controller.ex | Fudoshiki/vega | 0577024afc734933048645976705784512fbc1f4 | [
"MIT"
] | 4 | 2020-03-22T22:12:29.000Z | 2020-07-01T22:32:01.000Z | lib/vega_web/controllers/page_controller.ex | Fudoshiki/vega | 0577024afc734933048645976705784512fbc1f4 | [
"MIT"
] | 3 | 2021-03-10T11:53:41.000Z | 2021-10-17T11:18:54.000Z | lib/vega_web/controllers/page_controller.ex | Fudoshiki/vega | 0577024afc734933048645976705784512fbc1f4 | [
"MIT"
] | 3 | 2020-03-30T19:03:23.000Z | 2022-01-17T20:21:42.000Z | defmodule VegaWeb.PageController do
use VegaWeb, :controller
alias Vega.BoardOverview
@doc """
Render the overview of the boards connected to the current user
"""
def index(conn, _params) do
{personal, visited, starred, closed} = conn
|> fetch_user()
|> BoardOverview.fetch_all_for_user()
conn
|> merge_assigns(personal: personal, visited: visited, starred: starred, closed: closed)
|> assign_asserts("welcome")
|> render("index.html")
end
def clear_db(conn, _param) do
Mongo.delete_many(:mongo, "cards", %{})
Mongo.delete_many(:mongo, "issues", %{})
Mongo.delete_many(:mongo, "boards", %{})
conn
|> merge_assigns(personal: [], visited: [], starred: [])
|> assign_asserts("welcome")
|> render("index.html")
end
end
| 25.028571 | 92 | 0.593607 |
73c729b442c2d606ed586cfbb4d844a68d34c6ea | 4,229 | ex | Elixir | apps/janus/lib/janus/stream_state_guard.ex | bornmeyer/janus_signaling | cbab905aaa844a2762d4647f9363370cecd3db22 | [
"Apache-2.0"
] | null | null | null | apps/janus/lib/janus/stream_state_guard.ex | bornmeyer/janus_signaling | cbab905aaa844a2762d4647f9363370cecd3db22 | [
"Apache-2.0"
] | null | null | null | apps/janus/lib/janus/stream_state_guard.ex | bornmeyer/janus_signaling | cbab905aaa844a2762d4647f9363370cecd3db22 | [
"Apache-2.0"
] | null | null | null | defmodule Janus.StreamStateGuard do
use GenStateMachine, callback_mode: [:handle_event_function, :state_enter]
require Logger
def child_spec(stream_id) do
%{
id: "#{stream_id}_guard",
start: {__MODULE__, :start_link, [%{stream_id: stream_id, last_state: nil}]},
type: :worker,
restart: :transient,
shutdown: 500
}
end
def start_link(state) do
GenStateMachine.start_link(__MODULE__, {:publish, state}, name: via_tuple(state.stream_id))
end
def handle_event(:enter, event, state, data) do
"Transitioning from #{data[:last_state] |> inspect} to #{event |> inspect}" |> Logger.info
{:next_state, state, data |> Map.put(:last_state, event)}
end
def handle_event({:call, from}, {:advance_stream, command, state, _web_socket, [stream_id: stream_id]}, :publish, data) do
stream = Janus.Stream.get(stream_id)
Janus.StreamManager.add_stream(stream, state.participant_id)
{:ok, plugin, _} = Janus.PluginManager.add_stream(stream, state.participant_id)
participant = %Janus.Participant{id: state.participant_id, publishing_plugin: plugin}
state = Map.put(state, :stream_ids, [stream_id]) |> Map.put(:participant, participant)
{:next_state, :start_publish, data, [{:reply, from, {state, Janus.ResponseCreator.create_response(command, command["id"], stream_id)}}]}
end
def handle_event({:call, from}, {:advance_stream, command, state, _web_socket, [sdp: sdp]}, :start_publish, data) do
{:ok, sdp, _type, room_id} = Janus.Stream.create_answer(data[:stream_id], sdp)
plugin = Janus.PluginManager.get_plugin(state.participant_id)
plugin = %{plugin | room_id: room_id}
Janus.PluginManager.update_plugin(state.participant_id, plugin)
response = Janus.ResponseCreator.create_response(command, command["id"], data[:stream_id], sdp: sdp)
{:next_state, :play, data, [{:reply, from, {state, response}}]}
end
def handle_event({:call, from}, {:advance_stream, command, state, web_socket, [stream_id: subscriber_stream_id]}, :play, data) do
{:ok, plugin} = Janus.PluginManager.new(state.participant_id, state.room_id, :subscriber)
participant = Janus.Participant.update_plugin(state.participant, plugin, :subscribing_plugin)
publishing_stream = Janus.Stream.get(data[:stream_id])
Janus.StreamSupervisor.start_child(subscriber_stream_id, participant.id, plugin.room_id, web_socket, plugin.handle_id, :subscriber, publishing_stream)
{:ok, _, _} = subscriber_stream_id |> add_to_managers(state.participant_id)
state = Map.put(state, :stream_ids, [subscriber_stream_id | state.stream_ids]) |> Map.put(:participant, participant)
{_, sdp} = Janus.Stream.create_offer(subscriber_stream_id, plugin)
response = Janus.ResponseCreator.create_response(command, command["id"], data[:stream_id], sdp: sdp)
{:next_state, :start_play, data, [{:reply, from, {state, response}}]}
end
defp add_to_managers(stream_id, participant_id) do
Janus.Stream.get(stream_id)
|> Janus.StreamManager.add_stream(participant_id)
|> Janus.PluginManager.add_stream(participant_id)
end
def handle_event({:call, from}, {:advance_stream, _command, state, _web_socket, [stream_id: stream_id, sdp: sdp]}, :start_play, data) do
Janus.Stream.set_remote_description(stream_id, sdp, "answer", state.participant)
response = []
{:next_state, :stop, data, [{:reply, from, {state, response}}]}
end
def handle_event({:call, from}, {:advance_stream, _command, state, _web_socket, [stream_id: stream_id, sdp: sdp]}, :stop, data) do
response = []
{:next_state, :leave, data, [{:reply, from, {state, response}}]}
end
def handle_event({:call, from}, {:advance_stream, command, state, _web_socket, [stream_id: stream_id, sdp: sdp]}, :leave, _data) do
{:keep_state_and_data, [{:reply, from, {state, Janus.ResponseCreator.create_response(command, command["id"], stream_id)}}]}
end
defp via_tuple(id) do
{:via, Registry, {:stream_state_guard_registry, "#{id}_guard"}}
end
def advance_stream(stream_id, command, state, web_socket, additional_args \\ []) do
GenStateMachine.call(via_tuple(stream_id), {:advance_stream, command, state, web_socket, additional_args})
end
end
| 49.752941 | 154 | 0.718846 |
73c738aca37eea6d9ce7c533dd87b53011eb1dd3 | 4,493 | ex | Elixir | lib/cog/bundle/config.ex | matusf/cog | 71708301c7dc570fb0d3498a50f47a70ef957788 | [
"Apache-2.0"
] | 1,003 | 2016-02-23T17:21:12.000Z | 2022-02-20T14:39:35.000Z | lib/cog/bundle/config.ex | matusf/cog | 71708301c7dc570fb0d3498a50f47a70ef957788 | [
"Apache-2.0"
] | 906 | 2016-02-22T22:54:19.000Z | 2022-03-11T15:19:43.000Z | lib/cog/bundle/config.ex | matusf/cog | 71708301c7dc570fb0d3498a50f47a70ef957788 | [
"Apache-2.0"
] | 95 | 2016-02-23T13:42:31.000Z | 2021-11-30T14:39:55.000Z | defmodule Cog.Bundle.Config do
require Logger
alias Cog.Command.GenCommand
def commands(config),
do: process_args(config, "commands")
# TODO: Scope these to avoid conflicts with pre-existing modules
# TODO: Pass each command process config from the bundle config
def process_args(bundle_config, "commands") do
for config <- Map.get(bundle_config, "commands", []) do
case config do
%{"module" => module_name} ->
{Module.safe_concat("Elixir", module_name), []}
end
end
end
def modules(config, type) do
for %{"module" => module_name} <- Map.get(config, type, []),
do: Module.safe_concat("Elixir", module_name)
end
# TODO: This entire module is now effectively one-use private code,
# as it is only used to generate the embedded bundle's config. We
# can consider moving this into Cog.Bundle.Embedded, as well as
# tailoring the code toward the embedded bundle. For instance, all
# the arguments for `gen_config` will always be known.
@doc """
Generate a bundle configuration via code introspection. Returns a
map representing the configuration, ready for turning into JSON.
## Arguments
- `name`: the name of the bundle
- `modules`: a list of modules to be included in the bundle
"""
def gen_config(name, description, version, author, homepage, modules, template_dir) do
# We create single key/value pair maps for each
# top-level key in the overall configuration, and then merge all
# those maps together.
Enum.reduce([%{"cog_bundle_version" => Spanner.Config.current_config_version},
gen_bundle(name, description, version, author, homepage),
gen_commands(modules),
gen_permissions(name, modules),
gen_templates(template_dir)],
&Map.merge/2)
end
# Generate top-level bundle configuration
defp gen_bundle(name, description, version, author, homepage) do
%{"name" => name,
"description" => description,
"type" => "elixir",
"version" => version,
"author" => author,
"homepage" => homepage}
end
# Generate the union of all permissions required by commands in the
# bundle. Returned permissions are namespaced by the bundle name.
defp gen_permissions(bundle_name, modules) do
permissions = modules
|> only_commands
|> Enum.map(&(GenCommand.Base.permissions(&1)))
|> Enum.map(&Enum.into(&1, HashSet.new))
|> Enum.reduce(HashSet.new, &Set.union/2)
|> Enum.map(&namespace_permission(bundle_name, &1))
|> Enum.sort
%{"permissions" => permissions}
end
defp namespace_permission(bundle_name, permission_name),
do: "#{bundle_name}:#{permission_name}"
defp gen_templates(template_dir),
do: %{"templates" => Cog.Repository.Templates.templates_from_files(template_dir)}
# Extract all commands from `modules` and generate configuration
# maps for them
defp gen_commands(modules) do
%{"commands" => Enum.reduce(only_commands(modules), %{}, &command_map/2)}
end
defp only_commands(modules),
do: Enum.filter(modules, &GenCommand.Base.used_base?/1)
defp command_map(module, acc) do
command =
%{"options" => GenCommand.Base.options(module),
"rules" => GenCommand.Base.rules(module) |> Enum.sort,
"description" => GenCommand.Base.description(module),
"long_description" => GenCommand.Base.long_description(module),
"examples" => GenCommand.Base.examples(module),
"notes" => GenCommand.Base.notes(module),
"arguments" => GenCommand.Base.arguments(module),
"output" => GenCommand.Base.output(module),
"documentation" => case Code.get_docs(module, :moduledoc) do
{_line, doc} ->
# If a module doesn't have a module doc,
# then it'll return a tuple of `{1, nil}`,
# so that works out fine here.
doc
nil ->
# TODO: Transition away from @moduledoc
# to our own thing; modules defined in
# test scripts apparently can access
# @moduledocs
nil
end,
"module" => inspect(module)}
Map.put(acc, GenCommand.Base.command_name(module), command)
end
end
| 37.756303 | 88 | 0.623414 |
73c73c36a96be7077677f39408ee0a7b7c412e74 | 1,305 | exs | Elixir | config/config.exs | SwiftAusterity/gossip | d79c53acd02fcb9905acb9730e59065efdd5a589 | [
"MIT"
] | null | null | null | config/config.exs | SwiftAusterity/gossip | d79c53acd02fcb9905acb9730e59065efdd5a589 | [
"MIT"
] | null | null | null | config/config.exs | SwiftAusterity/gossip | d79c53acd02fcb9905acb9730e59065efdd5a589 | [
"MIT"
] | null | null | null | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
#
# This configuration file is loaded before any dependency and
# is restricted to this project.
use Mix.Config
# General application configuration
config :gossip,
namespace: Web,
ecto_repos: [Gossip.Repo]
# Configures the endpoint
config :gossip, Web.Endpoint,
url: [host: "localhost"],
secret_key_base: "Pqncs1RkrPq/7DiOEo/7U0DGsm503zjPQMerRQO3YVFUtOXpDq6PKI5xBfwBCWmB",
render_errors: [view: Web.ErrorView, accepts: ~w(html json)],
http: [dispatch: [
{:_, [
{"/socket", Web.SocketHandler, []},
{"/chat/websocket", Phoenix.Endpoint.CowboyWebSocket, {Phoenix.Transports.WebSocket, {Web.Endpoint, Web.UserSocket, :websocket}}},
{:_, Plug.Adapters.Cowboy.Handler, {Web.Endpoint, []}}
]}]],
pubsub: [name: Gossip.PubSub,
adapter: Phoenix.PubSub.PG2]
config :gossip, :socket, tls: false
# Configures Elixir's Logger
config :logger, :console,
format: "$time $metadata[$level] $message\n",
metadata: [:user_id]
config :distillery, no_warn_missing: [:elixir_make]
# Import environment specific config. This must remain at the bottom
# of this file so it overrides the configuration defined above.
import_config "#{Mix.env}.exs"
| 33.461538 | 136 | 0.72567 |
73c756c6b5ef472cea9e6fc818083b9a6cc598a3 | 7,902 | exs | Elixir | test/phoenix/router/helpers_test.exs | wojtekmach/phoenix | a6b3bf301c7088b8824a39a165582dc85dfdd2a4 | [
"MIT"
] | null | null | null | test/phoenix/router/helpers_test.exs | wojtekmach/phoenix | a6b3bf301c7088b8824a39a165582dc85dfdd2a4 | [
"MIT"
] | null | null | null | test/phoenix/router/helpers_test.exs | wojtekmach/phoenix | a6b3bf301c7088b8824a39a165582dc85dfdd2a4 | [
"MIT"
] | null | null | null | defmodule Phoenix.Router.HelpersTest do
use ExUnit.Case, async: true
use ConnHelper
alias Phoenix.Router.Helpers
## Unit tests
defmodule HTTPSRouter do
def config(:https), do: [port: 443]
def config(:url), do: [host: "example.com"]
end
defmodule HTTPRouter do
def config(:https), do: false
def config(:http), do: [port: 80]
def config(:url), do: [host: "example.com"]
end
defmodule URLRouter do
def config(:https), do: false
def config(:http), do: false
def config(:url), do: [host: "example.com", port: 678, scheme: "random"]
end
test "generates url" do
assert Helpers.url(URLRouter) == "random://example.com:678"
assert Helpers.url(HTTPRouter) == "http://example.com"
assert Helpers.url(HTTPSRouter) == "https://example.com"
end
test "defhelper with :identifiers" do
route = build("GET", "/foo/:bar", nil, Hello, :world, "hello_world", [])
assert extract_defhelper(route, 0) == String.strip """
def(hello_world_path(:world, bar)) do
hello_world_path(:world, bar, [])
end
"""
assert extract_defhelper(route, 1) == String.strip """
def(hello_world_path(:world, bar, params)) do
to_path(("" <> "/foo") <> "/" <> to_string(bar), params, ["bar"])
end
"""
end
test "defhelper with *identifiers" do
route = build("GET", "/foo/*bar", nil, Hello, :world, "hello_world", [])
assert extract_defhelper(route, 0) == String.strip """
def(hello_world_path(:world, bar)) do
hello_world_path(:world, bar, [])
end
"""
assert extract_defhelper(route, 1) == String.strip """
def(hello_world_path(:world, bar, params)) do
to_path(("" <> "/foo") <> "/" <> Enum.join(bar, "/"), params, ["bar"])
end
"""
end
defp build(verb, path, host, controller, action, helper, pipe_through) do
Phoenix.Router.Route.build(verb, path, host, controller, action, helper, pipe_through)
end
defp extract_defhelper(route, pos) do
{:__block__, _, block} = Helpers.defhelper(route)
Enum.at(block, pos) |> Macro.to_string()
end
## Integration tests
defmodule Router do
use Phoenix.Router
get "/posts/top", PostController, :top, as: :top
get "/posts/:id", PostController, :show
get "/posts/file/*file", PostController, :file
get "/posts/skip", PostController, :skip, as: nil
resources "/users", UserController do
resources "/comments", CommentController do
resources "/files", FileController
end
end
resources "/files", FileController
scope "/admin", alias: Admin do
resources "/messages", MessageController
end
scope "/admin/new", alias: Admin, as: "admin" do
resources "/messages", MessageController
end
get "/", PageController, :root, as: :page
end
setup_all do
Application.put_env(:phoenix, Router, url: [host: "example.com"],
http: false, https: false)
Router.start()
on_exit &Router.stop/0
:ok
end
alias Router.Helpers
test "top-level named route" do
assert Helpers.post_path(:show, 5) == "/posts/5"
assert Helpers.post_path(:show, 5, []) == "/posts/5"
assert Helpers.post_path(:show, 5, id: 5) == "/posts/5"
assert Helpers.post_path(:show, 5, %{"id" => 5}) == "/posts/5"
assert Helpers.post_path(:file, ["foo", "bar"]) == "/posts/file/foo/bar"
assert Helpers.post_path(:file, ["foo", "bar"], []) == "/posts/file/foo/bar"
assert Helpers.top_path(:top) == "/posts/top"
assert Helpers.top_path(:top, id: 5) == "/posts/top?id=5"
assert Helpers.top_path(:top, %{"id" => 5}) == "/posts/top?id=5"
assert Helpers.page_path(:root) == "/"
assert_raise UndefinedFunctionError, fn ->
Helpers.post_path(:skip)
end
end
test "resources generates named routes for :index, :edit, :show, :new" do
assert Helpers.user_path(:index, []) == "/users"
assert Helpers.user_path(:index) == "/users"
assert Helpers.user_path(:edit, 123, []) == "/users/123/edit"
assert Helpers.user_path(:edit, 123) == "/users/123/edit"
assert Helpers.user_path(:show, 123, []) == "/users/123"
assert Helpers.user_path(:show, 123) == "/users/123"
assert Helpers.user_path(:new, []) == "/users/new"
assert Helpers.user_path(:new) == "/users/new"
end
test "resources generates named routes for :create, :update, :delete" do
assert Helpers.message_path(:create, []) == "/admin/messages"
assert Helpers.message_path(:create) == "/admin/messages"
assert Helpers.message_path(:update, 1, []) == "/admin/messages/1"
assert Helpers.message_path(:update, 1) == "/admin/messages/1"
assert Helpers.message_path(:destroy, 1, []) == "/admin/messages/1"
assert Helpers.message_path(:destroy, 1) == "/admin/messages/1"
end
test "1-Level nested resources generates nested named routes for :index, :edit, :show, :new" do
assert Helpers.user_comment_path(:index, 99, []) == "/users/99/comments"
assert Helpers.user_comment_path(:index, 99) == "/users/99/comments"
assert Helpers.user_comment_path(:edit, 88, 2, []) == "/users/88/comments/2/edit"
assert Helpers.user_comment_path(:edit, 88, 2) == "/users/88/comments/2/edit"
assert Helpers.user_comment_path(:show, 123, 2, []) == "/users/123/comments/2"
assert Helpers.user_comment_path(:show, 123, 2) == "/users/123/comments/2"
assert Helpers.user_comment_path(:new, 88, []) == "/users/88/comments/new"
assert Helpers.user_comment_path(:new, 88) == "/users/88/comments/new"
end
test "2-Level nested resources generates nested named routes for :index, :edit, :show, :new" do
assert Helpers.user_comment_file_path(:index, 99, 1, []) ==
"/users/99/comments/1/files"
assert Helpers.user_comment_file_path(:index, 99, 1) ==
"/users/99/comments/1/files"
assert Helpers.user_comment_file_path(:edit, 88, 1, 2, []) ==
"/users/88/comments/1/files/2/edit"
assert Helpers.user_comment_file_path(:edit, 88, 1, 2) ==
"/users/88/comments/1/files/2/edit"
assert Helpers.user_comment_file_path(:show, 123, 1, 2, []) ==
"/users/123/comments/1/files/2"
assert Helpers.user_comment_file_path(:show, 123, 1, 2) ==
"/users/123/comments/1/files/2"
assert Helpers.user_comment_file_path(:new, 88, 1, []) ==
"/users/88/comments/1/files/new"
assert Helpers.user_comment_file_path(:new, 88, 1) ==
"/users/88/comments/1/files/new"
end
test "resources without block generates named routes for :index, :edit, :show, :new" do
assert Helpers.file_path(:index, []) == "/files"
assert Helpers.file_path(:index) == "/files"
assert Helpers.file_path(:edit, 123, []) == "/files/123/edit"
assert Helpers.file_path(:edit, 123) == "/files/123/edit"
assert Helpers.file_path(:show, 123, []) == "/files/123"
assert Helpers.file_path(:show, 123) == "/files/123"
assert Helpers.file_path(:new, []) == "/files/new"
assert Helpers.file_path(:new) == "/files/new"
end
test "scoped route helpers generated named routes with :path, and :alias options" do
assert Helpers.message_path(:index, []) == "/admin/messages"
assert Helpers.message_path(:index) == "/admin/messages"
assert Helpers.message_path(:show, 1, []) == "/admin/messages/1"
assert Helpers.message_path(:show, 1) == "/admin/messages/1"
end
test "scoped route helpers generated named routes with :path, :alias, and :helper options" do
assert Helpers.admin_message_path(:index, []) == "/admin/new/messages"
assert Helpers.admin_message_path(:index) == "/admin/new/messages"
assert Helpers.admin_message_path(:show, 1, []) == "/admin/new/messages/1"
assert Helpers.admin_message_path(:show, 1) == "/admin/new/messages/1"
end
test "helpers module generates a url helper" do
assert Helpers.url("/foo/bar") == "http://example.com/foo/bar"
end
end
| 36.583333 | 97 | 0.64895 |
73c75a22edd69f9a6756e3a80f5492d11d714e2d | 172 | ex | Elixir | lib/battle_city/tank/armor.ex | clszzyh/battle_city_core | 7a3aca9000b186382f1faf526ebce837a76ef86e | [
"MIT"
] | null | null | null | lib/battle_city/tank/armor.ex | clszzyh/battle_city_core | 7a3aca9000b186382f1faf526ebce837a76ef86e | [
"MIT"
] | 38 | 2020-12-17T12:40:44.000Z | 2022-03-31T02:05:59.000Z | lib/battle_city/tank/armor.ex | clszzyh/battle_city | 7a3aca9000b186382f1faf526ebce837a76ef86e | [
"MIT"
] | null | null | null | defmodule BattleCity.Tank.Armor do
@moduledoc false
use BattleCity.Tank.Base,
points: 400,
health: 4,
move_speed: 2,
bullet_speed: 2,
level: 4
end
| 15.636364 | 34 | 0.662791 |
73c764ff48fb7602473ce4b4ce834a9910bef52e | 17,180 | ex | Elixir | lib/gnat.ex | santif/nats.ex | 84277ae3ae5a7ea2a7e6e70e624c59683c72d0de | [
"MIT"
] | 102 | 2019-04-17T06:19:48.000Z | 2022-03-31T02:36:11.000Z | lib/gnat.ex | santif/nats.ex | 84277ae3ae5a7ea2a7e6e70e624c59683c72d0de | [
"MIT"
] | 29 | 2019-04-16T03:45:04.000Z | 2022-03-11T12:25:54.000Z | lib/gnat.ex | santif/nats.ex | 84277ae3ae5a7ea2a7e6e70e624c59683c72d0de | [
"MIT"
] | 17 | 2019-05-07T19:33:17.000Z | 2022-03-24T18:27:25.000Z | # State transitions:
# :waiting_for_message => receive PING, send PONG => :waiting_for_message
# :waiting_for_message => receive MSG... -> :waiting_for_message
defmodule Gnat do
use GenServer
require Logger
alias Gnat.{Command, Parsec}
@type t :: GenServer.server()
@type headers :: [{binary(), iodata()}]
# A message received from NATS will be delivered to your process in this form.
# Please note that the `:reply_to` and `:headers` keys are optional.
# They will only be present if the message was received from the NATS server with
# headers or a reply_to topic
@type message :: %{
gnat: t(),
topic: String.t(),
body: String.t(),
sid: non_neg_integer(),
reply_to: String.t(),
headers: headers()
}
@type sent_message :: {:msg, message()}
@default_connection_settings %{
host: 'localhost',
port: 4222,
tcp_opts: [:binary],
connection_timeout: 3_000,
ssl_opts: [],
tls: false,
}
@request_sid 0
@doc """
Starts a connection to a nats broker
```
{:ok, gnat} = Gnat.start_link(%{host: '127.0.0.1', port: 4222})
# if the server requires TLS you can start a connection with:
{:ok, gnat} = Gnat.start_link(%{host: '127.0.0.1', port: 4222, tls: true})
# if the server requires TLS and a client certificate you can start a connection with:
{:ok, gnat} = Gnat.start_link(%{tls: true, ssl_opts: [certfile: "client-cert.pem", keyfile: "client-key.pem"]})
```
You can also pass arbitrary SSL or TCP options in the `tcp_opts` and `ssl_opts` keys.
If you pass custom TCP options please include `:binary`. Gnat uses binary matching to parse messages.
The final `opts` argument will be passed to the `GenServer.start_link` call so you can pass things like `[name: :gnat_connection]`.
"""
@spec start_link(map(), keyword()) :: GenServer.on_start
def start_link(connection_settings \\ %{}, opts \\ []) do
GenServer.start_link(__MODULE__, connection_settings, opts)
end
@doc """
Gracefully shuts down a connection
```
{:ok, gnat} = Gnat.start_link()
:ok = Gnat.stop(gnat)
```
"""
@spec stop(t()) :: :ok
def stop(pid), do: GenServer.call(pid, :stop)
@doc """
Subscribe to a topic
Supported options:
* queue_group: a string that identifies which queue group you want to join
By default each subscriber will receive a copy of every message on the topic.
When a queue_group is supplied messages will be spread among the subscribers
in the same group. (see [nats queueing](https://nats.io/documentation/concepts/nats-queueing/))
The subscribed process will begin receiving messages with a structure of `t:sent_message/0`
```
{:ok, gnat} = Gnat.start_link()
{:ok, subscription} = Gnat.sub(gnat, self(), "topic")
receive do
{:msg, %{topic: "topic", body: body}} ->
IO.puts "Received: \#\{body\}"
end
```
"""
@spec sub(t(), pid(), String.t, keyword()) :: {:ok, non_neg_integer()} | {:ok, String.t} | {:error, String.t}
def sub(pid, subscriber, topic, opts \\ []) do
start = :erlang.monotonic_time()
result = GenServer.call(pid, {:sub, subscriber, topic, opts})
latency = :erlang.monotonic_time() - start
:telemetry.execute([:gnat, :sub], %{latency: latency}, %{topic: topic})
result
end
@doc """
Publish a message
```
{:ok, gnat} = Gnat.start_link()
:ok = Gnat.pub(gnat, "characters", "Ron Swanson")
```
If you want to provide a reply address to receive a response you can pass it as an option.
[See request-response pattern](http://nats.io/documentation/concepts/nats-req-rep/).
```
{:ok, gnat} = Gnat.start_link()
:ok = Gnat.pub(gnat, "characters", "Star Lord", reply_to: "me")
```
If you want to publish a message with headers you can pass the `:headers` key in the `opts` like this.
```
{:ok, gnat} = Gnat.start_link()
:ok = Gnat.pub(gnat, "listen", "Yo", headers: [{"foo", "bar"}])
```
Headers must be passed as a `t:headers()` value (a list of tuples).
Sending and parsing headers has more overhead than typical nats messages
(see [the Nats 2.2 release notes for details](https://docs.nats.io/whats_new_22#message-headers)),
so only use them when they are really valuable.
"""
@spec pub(t(), String.t, binary(), keyword()) :: :ok
def pub(pid, topic, message, opts \\ []) do
start = :erlang.monotonic_time()
opts = prepare_headers(opts)
result = GenServer.call(pid, {:pub, topic, message, opts})
latency = :erlang.monotonic_time() - start
:telemetry.execute([:gnat, :pub], %{latency: latency} , %{topic: topic})
result
end
@doc """
Send a request and listen for a response synchronously
Following the nats [request-response pattern](http://nats.io/documentation/concepts/nats-req-rep/) this
function generates a one-time topic to receive replies and then sends a message to the provided topic.
Supported options:
* receive_timeout: an integer number of milliseconds to wait for a response. Defaults to 60_000
* headers: a set of headers you want to send with the request (see `Gnat.pub/4`)
```
{:ok, gnat} = Gnat.start_link()
case Gnat.request(gnat, "i_can_haz_cheezburger", "plZZZZ?!?!?") do
{:ok, %{body: delicious_cheezburger}} -> :yum
{:error, :timeout} -> :sad_cat
end
```
"""
@spec request(t(), String.t, binary(), keyword()) :: {:ok, message} | {:error, :timeout}
def request(pid, topic, body, opts \\ []) do
start = :erlang.monotonic_time()
receive_timeout = Keyword.get(opts, :receive_timeout, 60_000)
req = %{recipient: self(), body: body, topic: topic}
opts = prepare_headers(opts)
req =
case Keyword.get(opts, :headers) do
nil -> req
headers -> Map.put(req, :headers, headers)
end
{:ok, subscription} = GenServer.call(pid, {:request, req})
response = receive do
{:msg, %{topic: ^subscription}=msg} -> {:ok, msg}
after receive_timeout ->
{:error, :timeout}
end
:ok = unsub(pid, subscription)
latency = :erlang.monotonic_time() - start
:telemetry.execute([:gnat, :request], %{latency: latency}, %{topic: topic})
response
end
@doc """
Unsubscribe from a topic
Supported options:
* max_messages: number of messages to be received before automatically unsubscribed
This correlates to the [UNSUB](http://nats.io/documentation/internals/nats-protocol/#UNSUB) command in the nats protocol.
By default the unsubscribe is affected immediately, but an optional `max_messages` value can be provided which will allow
`max_messages` to be received before affecting the unsubscribe.
This is especially useful for [request response](http://nats.io/documentation/concepts/nats-req-rep/) patterns.
```
{:ok, gnat} = Gnat.start_link()
{:ok, subscription} = Gnat.sub(gnat, self(), "my_inbox")
:ok = Gnat.unsub(gnat, subscription)
# OR
:ok = Gnat.unsub(gnat, subscription, max_messages: 2)
```
"""
@spec unsub(t(), non_neg_integer() | String.t, keyword()) :: :ok
def unsub(pid, sid, opts \\ []) do
start = :erlang.monotonic_time()
result = GenServer.call(pid, {:unsub, sid, opts})
:telemetry.execute([:gnat, :unsub], %{latency: :erlang.monotonic_time() - start})
result
end
@doc """
Ping the NATS server
This correlates to the [PING](http://nats.io/documentation/internals/nats-protocol/#PINGPONG) command in the NATS protocol.
If the NATS server responds with a PONG message this function will return `:ok`
```
{:ok, gnat} = Gnat.start_link()
:ok = Gnat.ping(gnat)
```
"""
@deprecated "Pinging is handled internally by the connection, this functionality will be removed"
def ping(pid) do
GenServer.call(pid, {:ping, self()})
receive do
:pong -> :ok
after
3_000 -> {:error, "No PONG response after 3 sec"}
end
end
@doc "get the number of active subscriptions"
@spec active_subscriptions(t()) :: {:ok, non_neg_integer()}
def active_subscriptions(pid) do
GenServer.call(pid, :active_subscriptions)
end
@impl GenServer
def init(connection_settings) do
connection_settings = Map.merge(@default_connection_settings, connection_settings)
case Gnat.Handshake.connect(connection_settings) do
{:ok, socket} ->
parser = Parsec.new
state = %{socket: socket,
connection_settings: connection_settings,
next_sid: 1,
receivers: %{},
parser: parser,
request_receivers: %{},
request_inbox_prefix: "_INBOX.#{nuid()}."}
state = create_request_subscription(state)
{:ok, state}
{:error, reason} ->
{:stop, reason}
end
end
@impl GenServer
def handle_info({:tcp, socket, data}, %{socket: socket}=state) do
data_packets = receive_additional_tcp_data(socket, [data], 10)
new_state = Enum.reduce(data_packets, state, fn(data, %{parser: parser}=state) ->
{new_parser, messages} = Parsec.parse(parser, data)
new_state = %{state | parser: new_parser}
Enum.reduce(messages, new_state, &process_message/2)
end)
{:noreply, new_state}
end
def handle_info({:ssl, socket, data}, state) do
handle_info({:tcp, socket, data}, state)
end
def handle_info({:tcp_closed, _}, state) do
{:stop, "connection closed", state}
end
def handle_info({:ssl_closed, _}, state) do
{:stop, "connection closed", state}
end
def handle_info({:tcp_error, _, reason}, state) do
{:stop, "tcp transport error #{inspect(reason)}", state}
end
def handle_info(other, state) do
Logger.error "#{__MODULE__} received unexpected message: #{inspect other}"
{:noreply, state}
end
@impl GenServer
def handle_call(:stop, _from, state) do
socket_close(state)
{:stop, :normal, :ok, state}
end
def handle_call({:sub, receiver, topic, opts}, _from, %{next_sid: sid}=state) do
sub = Command.build(:sub, topic, sid, opts)
:ok = socket_write(state, sub)
next_state = add_subscription_to_state(state, sid, receiver) |> Map.put(:next_sid, sid + 1)
{:reply, {:ok, sid}, next_state}
end
def handle_call({:pub, topic, message, opts}, from, state) do
commands = [Command.build(:pub, topic, message, opts)]
froms = [from]
{commands, froms} = receive_additional_pubs(commands, froms, 10)
:ok = socket_write(state, commands)
Enum.each(froms, fn(from) -> GenServer.reply(from, :ok) end)
{:noreply, state}
end
def handle_call({:request, request}, _from, state) do
inbox = make_new_inbox(state)
new_state = %{state | request_receivers: Map.put(state.request_receivers, inbox, request.recipient)}
pub =
case request do
%{headers: headers} ->
Command.build(:pub, request.topic, request.body, headers: headers, reply_to: inbox)
_ ->
Command.build(:pub, request.topic, request.body, reply_to: inbox)
end
:ok = socket_write(new_state, [pub])
{:reply, {:ok, inbox}, new_state}
end
# When the SID is a string, it's a topic, which is used as a key in the request receiver map.
def handle_call({:unsub, topic, _opts}, _from, state) when is_binary(topic) do
if Map.has_key?(state.request_receivers, topic) do
request_receivers = Map.delete(state.request_receivers, topic)
new_state = %{state | request_receivers: request_receivers}
{:reply, :ok, new_state}
else
{:reply, :ok, state}
end
end
def handle_call({:unsub, sid, opts}, _from, %{receivers: receivers}=state) do
case Map.has_key?(receivers, sid) do
false -> {:reply, :ok, state}
true ->
command = Command.build(:unsub, sid, opts)
:ok = socket_write(state, command)
state = cleanup_subscription_from_state(state, sid, opts)
{:reply, :ok, state}
end
end
def handle_call({:ping, pinger}, _from, state) do
:ok = socket_write(state, "PING\r\n")
{:reply, :ok, Map.put(state, :pinger, pinger)}
end
def handle_call(:active_subscriptions, _from, state) do
active_subscriptions = Enum.count(state.receivers)
{:reply, {:ok, active_subscriptions}, state}
end
defp create_request_subscription(%{request_inbox_prefix: request_inbox_prefix}=state) do
# Example: "_INBOX.Jhf7AcTGP3x4dAV9.*"
wildcard_inbox_topic = request_inbox_prefix <> "*"
sub = Command.build(:sub, wildcard_inbox_topic, @request_sid, [])
:ok = socket_write(state, [sub])
add_subscription_to_state(state, @request_sid, self())
end
defp make_new_inbox(%{request_inbox_prefix: prefix}), do: prefix <> nuid()
defp nuid(), do: :crypto.strong_rand_bytes(12) |> Base.encode64
defp prepare_headers(opts) do
if Keyword.has_key?(opts, :headers) do
headers = :cow_http.headers(Keyword.get(opts, :headers))
Keyword.put(opts, :headers, headers)
else
opts
end
end
defp socket_close(%{socket: socket, connection_settings: %{tls: true}}), do: :ssl.close(socket)
defp socket_close(%{socket: socket}), do: :gen_tcp.close(socket)
defp socket_write(%{socket: socket, connection_settings: %{tls: true}}, iodata) do
:ssl.send(socket, iodata)
end
defp socket_write(%{socket: socket}, iodata), do: :gen_tcp.send(socket, iodata)
defp add_subscription_to_state(%{receivers: receivers}=state, sid, pid) do
receivers = Map.put(receivers, sid, %{recipient: pid, unsub_after: :infinity})
%{state | receivers: receivers}
end
defp cleanup_subscription_from_state(%{receivers: receivers}=state, sid, []) do
receivers = Map.delete(receivers, sid)
%{state | receivers: receivers}
end
defp cleanup_subscription_from_state(%{receivers: receivers}=state, sid, [max_messages: n]) do
receivers = put_in(receivers, [sid, :unsub_after], n)
%{state | receivers: receivers}
end
defp process_message({:msg, topic, @request_sid, reply_to, body}, state) do
if Map.has_key?(state.request_receivers, topic) do
send state.request_receivers[topic], {:msg, %{topic: topic, body: body, reply_to: reply_to, gnat: self()}}
state
else
Logger.error "#{__MODULE__} got a response for a request, but that is no longer registered"
state
end
end
defp process_message({:msg, topic, sid, reply_to, body}, state) do
unless is_nil(state.receivers[sid]) do
:telemetry.execute([:gnat, :message_received], %{count: 1}, %{topic: topic})
send state.receivers[sid].recipient, {:msg, %{topic: topic, body: body, reply_to: reply_to, sid: sid, gnat: self()}}
update_subscriptions_after_delivering_message(state, sid)
else
Logger.error "#{__MODULE__} got message for sid #{sid}, but that is no longer registered"
state
end
end
defp process_message({:hmsg, topic, @request_sid, reply_to, headers, body}, state) do
if Map.has_key?(state.request_receivers, topic) do
send state.request_receivers[topic], {:msg, %{topic: topic, body: body, reply_to: reply_to, gnat: self(), headers: headers}}
state
else
Logger.error "#{__MODULE__} got a response for a request, but that is no longer registered"
state
end
end
defp process_message({:hmsg, topic, sid, reply_to, headers, body}, state) do
unless is_nil(state.receivers[sid]) do
:telemetry.execute([:gnat, :message_received], %{count: 1}, %{topic: topic})
send state.receivers[sid].recipient, {:msg, %{topic: topic, body: body, reply_to: reply_to, sid: sid, gnat: self(), headers: headers}}
update_subscriptions_after_delivering_message(state, sid)
else
Logger.error "#{__MODULE__} got message for sid #{sid}, but that is no longer registered"
state
end
end
defp process_message(:ping, state) do
socket_write(state, "PONG\r\n")
state
end
defp process_message(:pong, state) do
send state.pinger, :pong
state
end
defp process_message({:error, message}, state) do
:error_logger.error_report([
type: :gnat_error_from_broker,
message: message,
])
state
end
defp receive_additional_pubs(commands, froms, 0), do: {commands, froms}
defp receive_additional_pubs(commands, froms, how_many_more) do
receive do
{:"$gen_call", from, {:pub, topic, message, opts}} ->
commands = [Command.build(:pub, topic, message, opts) | commands]
froms = [from | froms]
receive_additional_pubs(commands, froms, how_many_more - 1)
after
0 -> {commands, froms}
end
end
defp receive_additional_tcp_data(_socket, packets, 0), do: Enum.reverse(packets)
defp receive_additional_tcp_data(socket, packets, n) do
receive do
{:tcp, ^socket, data} ->
receive_additional_tcp_data(socket, [data | packets], n - 1)
after
0 -> Enum.reverse(packets)
end
end
defp update_subscriptions_after_delivering_message(%{receivers: receivers}=state, sid) do
receivers = case get_in(receivers, [sid, :unsub_after]) do
:infinity -> receivers
1 -> Map.delete(receivers, sid)
n -> put_in(receivers, [sid, :unsub_after], n - 1)
end
%{state | receivers: receivers}
end
end
| 36.553191 | 140 | 0.663097 |
73c77aaf7878d965f5263e520b750799aff0417d | 831 | ex | Elixir | apps/tai/lib/tai/orders/submissions/sell_limit_fok.ex | ccamateur/tai | 41c4b3e09dafc77987fa3f6b300c15461d981e16 | [
"MIT"
] | 276 | 2018-01-16T06:36:06.000Z | 2021-03-20T21:48:01.000Z | apps/tai/lib/tai/orders/submissions/sell_limit_fok.ex | ccamateur/tai | 41c4b3e09dafc77987fa3f6b300c15461d981e16 | [
"MIT"
] | 73 | 2018-10-05T18:45:06.000Z | 2021-02-08T05:46:33.000Z | apps/tai/lib/tai/orders/submissions/sell_limit_fok.ex | ccamateur/tai | 41c4b3e09dafc77987fa3f6b300c15461d981e16 | [
"MIT"
] | 43 | 2018-06-09T09:54:51.000Z | 2021-03-07T07:35:17.000Z | defmodule Tai.Orders.Submissions.SellLimitFok do
alias __MODULE__
@type product_type :: Tai.Venues.Product.type()
@type callback :: Tai.Orders.OrderCallback.callback()
@type t :: %SellLimitFok{
venue: String.t(),
credential: String.t(),
venue_product_symbol: String.t(),
product_symbol: String.t(),
product_type: product_type,
price: Decimal.t(),
qty: Decimal.t(),
close: boolean | nil,
order_updated_callback: callback | nil
}
@enforce_keys ~w[
venue
credential
venue_product_symbol
product_symbol
product_type
price
qty
]a
defstruct ~w[
venue
credential
venue_product_symbol
product_symbol
product_type
price
qty
close
order_updated_callback
]a
end
| 21.307692 | 55 | 0.624549 |
73c77d411121fba75e92324eab6bf944b658715d | 418 | exs | Elixir | RAEM/raem/priv/repo/migrations/20180402003623_create_curso.exs | pedromcorreia/Rumo-ao-ensino-superior | be0b9bf417604bdf8a349fde8a8a1c0aaf4c4cdb | [
"MIT"
] | null | null | null | RAEM/raem/priv/repo/migrations/20180402003623_create_curso.exs | pedromcorreia/Rumo-ao-ensino-superior | be0b9bf417604bdf8a349fde8a8a1c0aaf4c4cdb | [
"MIT"
] | null | null | null | RAEM/raem/priv/repo/migrations/20180402003623_create_curso.exs | pedromcorreia/Rumo-ao-ensino-superior | be0b9bf417604bdf8a349fde8a8a1c0aaf4c4cdb | [
"MIT"
] | 2 | 2018-02-24T19:56:21.000Z | 2018-02-26T00:16:41.000Z | defmodule Raem.Repo.Migrations.CreateCurso do
use Ecto.Migration
def change do
create table(:cursos) do
add :cod_curso, :integer
add :cod_area, :integer
add :cod_ies, :integer
add :area_enquadramento, :string
add :modalidade_ensino, :string
add :cod_municipio, :integer
add :municipio_curso, :string
add :sigla_uf, :string
timestamps()
end
end
end
| 22 | 45 | 0.660287 |
73c780f9b1ea9526ede9613bcb5104d4f1296213 | 373 | ex | Elixir | lib/salty/supervisor.ex | benknowles/libsalty | 38a10812865cb855bfa46cf266bb68d51a296f39 | [
"Apache-2.0"
] | 23 | 2017-07-04T19:29:43.000Z | 2021-02-16T19:44:38.000Z | lib/salty/supervisor.ex | benknowles/libsalty | 38a10812865cb855bfa46cf266bb68d51a296f39 | [
"Apache-2.0"
] | 16 | 2017-08-13T15:31:25.000Z | 2019-06-19T14:44:13.000Z | lib/salty/supervisor.ex | benknowles/libsalty | 38a10812865cb855bfa46cf266bb68d51a296f39 | [
"Apache-2.0"
] | 19 | 2017-08-10T19:01:49.000Z | 2021-06-20T01:34:59.000Z | defmodule Salty.Supervisor do
@moduledoc """
Salty.Supervisor is the root supervisor of GenServer processes in libsalty.
"""
use Supervisor
def start_link do
Supervisor.start_link(__MODULE__, [])
end
def init([]) do
children = [
# worker(Salty.Server, [], restart: :transient)
]
supervise(children, strategy: :one_for_one)
end
end
| 18.65 | 77 | 0.675603 |
73c784724d211b93bee63e9c574cf121312cf1a6 | 3,388 | ex | Elixir | lib/parking_tweets/garage_map.ex | paulswartz/parking_tweets | 9f2e0143348a4c4e1a26dc246d02432f7caaed98 | [
"MIT"
] | 1 | 2020-12-17T23:22:03.000Z | 2020-12-17T23:22:03.000Z | lib/parking_tweets/garage_map.ex | paulswartz/parking_tweets | 9f2e0143348a4c4e1a26dc246d02432f7caaed98 | [
"MIT"
] | 27 | 2021-02-02T22:11:05.000Z | 2022-03-21T17:17:16.000Z | lib/parking_tweets/garage_map.ex | paulswartz/parking_tweets | 9f2e0143348a4c4e1a26dc246d02432f7caaed98 | [
"MIT"
] | null | null | null | defmodule ParkingTweets.GarageMap do
@moduledoc """
Responsible for maintaing a map of garages and and their current state.
"""
alias ParkingTweets.{Garage, IdMapSet}
defstruct garages: IdMapSet.new(&Garage.id/1),
alternates: %{},
facility_to_stop_id: %{},
stop_id_to_stop_name: %{}
def new do
%__MODULE__{}
end
def new(opts) do
alternates = build_alternate_map(Keyword.get(opts, :alternates))
%__MODULE__{alternates: alternates}
end
def empty?(%__MODULE__{garages: garages}) do
IdMapSet.size(garages) == 0
end
def update_multiple(%__MODULE__{} = map, events) do
Enum.reduce(events, map, fn event, map -> update(map, event) end)
end
def update(%__MODULE__{} = map, %{event: "reset", data: data}) do
reset_map = %__MODULE__{alternates: map.alternates}
data
|> Jason.decode!()
|> Enum.reduce(reset_map, &put_json(&2, &1))
end
def update(%__MODULE__{} = map, %{event: update, data: data})
when update in ["add", "update"] do
data |> Jason.decode!() |> (&put_json(map, &1)).()
end
def update(%__MODULE__{} = map, %{event: "remove"}) do
map
end
defp build_alternate_map(nil) do
%{}
end
defp build_alternate_map(alternates) do
# `alternates` is a list of lists of garage IDs. In a given list of IDs,
# any of the garages can be substituted with each other.
Enum.reduce(alternates, %{}, fn ids, acc ->
set = MapSet.new(ids)
Enum.reduce(ids, acc, fn id, acc ->
without_current = MapSet.delete(set, id)
Map.update(acc, id, without_current, &MapSet.union(&1, without_current))
end)
end)
end
defp put_json(map, %{"type" => "facility"} = json) do
%{
"id" => facility_id,
"relationships" => %{
"stop" => %{
"data" => %{
"id" => stop_id
}
}
}
} = json
put_in(map.facility_to_stop_id[facility_id], stop_id)
end
defp put_json(map, %{"type" => "stop"} = json) do
%{
"id" => stop_id,
"attributes" => %{
"name" => stop_name
}
} = json
put_in(map.stop_id_to_stop_name[stop_id], stop_name)
end
defp put_json(map, json) do
garage = Garage.from_json_api(json)
stop_id = Map.get(map.facility_to_stop_id, garage.id)
stop_name = Map.get(map.stop_id_to_stop_name, stop_id)
garage = Garage.put_name(garage, stop_name)
put(map, garage)
end
@doc "Insert a garage directly"
def put(%__MODULE__{} = map, %Garage{} = garage) do
%{map | garages: IdMapSet.put(map.garages, garage)}
end
def difference(%__MODULE__{} = garage_map_1, %__MODULE__{} = garage_map_2) do
IdMapSet.difference_by(
garage_map_1.garages,
garage_map_2.garages,
&Garage.utilization_percent_or_status/1
)
end
def with_alternates(%__MODULE__{} = map) do
for garage <- map.garages do
case calculate_alternates(map, garage) do
[] ->
garage
alternates ->
Garage.put_alternates(garage, alternates)
end
end
end
defp calculate_alternates(map, garage) do
for alternate_id <- Map.get(map.alternates, garage.id, []),
%Garage{} = alternate_garage <- [IdMapSet.get(map.garages, alternate_id)],
Garage.utilization_percent(alternate_garage) < 90 do
alternate_garage
end
end
end
| 26.061538 | 82 | 0.622491 |
73c78c1011f31f9adb4840bfbc6fde83169819ab | 1,150 | ex | Elixir | other-options/helloplug.ex | hotpyn/plug-from-bottom-up | e5c08e185f4b4c2c1731d5a0ce2efe80828e1476 | [
"MIT"
] | null | null | null | other-options/helloplug.ex | hotpyn/plug-from-bottom-up | e5c08e185f4b4c2c1731d5a0ce2efe80828e1476 | [
"MIT"
] | null | null | null | other-options/helloplug.ex | hotpyn/plug-from-bottom-up | e5c08e185f4b4c2c1731d5a0ce2efe80828e1476 | [
"MIT"
] | null | null | null | defmodule Helloplug do
def init(default_opts) do
IO.puts "starting up Helloplug..."
default_opts
end
def call(conn, _opts) do
IO.puts "here again"
route(conn.method, conn.path_info, conn)
end
def route("GET", ["hello"], conn) do
# this route is for /hello
conn |> Plug.Conn.send_resp(200, "Hello, world!")
end
def route("GET", ["users", user_id], conn) do
# this route is for /users/<user_id>
conn |> Plug.Conn.send_resp(200, "You requested user #{user_id}")
end
def route("POST", _path, conn) do
{:ok ,body,_} = Plug.Conn.read_body(conn)
body=body |> Plug.Conn.Query.decode
IO.inspect body
value=body["data"]
{:ok, file} = File.open "save", [:append]
IO.puts file, value
File.close file
conn |> Plug.Conn.send_resp(200, "POST #{value}")
end
def route("GET", _path, conn) do
conn=Plug.Conn.fetch_query_params(conn)
value=conn.query_params["data"]
#IO.inspect conn.query_params["data"]
conn |> Plug.Conn.send_resp(200, "GET #{value}")
end
def route(_method, _path, conn) do
# this route is called if no other routes match
conn |> Plug.Conn.send_resp(404, "Couldn't find that page, sorry!")
end
end
| 24.468085 | 69 | 0.683478 |
73c78cf75a8d712333e29c0d78e2c31b50993346 | 2,410 | ex | Elixir | lib/entry.ex | ianatha/ledgerex | 7936bbe3823fb4da5b2d3767cf2a365a00281cfe | [
"Apache-2.0"
] | 4 | 2020-02-17T05:25:36.000Z | 2021-02-26T13:20:46.000Z | lib/entry.ex | ianatha/ledgerex | 7936bbe3823fb4da5b2d3767cf2a365a00281cfe | [
"Apache-2.0"
] | null | null | null | lib/entry.ex | ianatha/ledgerex | 7936bbe3823fb4da5b2d3767cf2a365a00281cfe | [
"Apache-2.0"
] | null | null | null | defmodule Ledger.Entry do
@moduledoc false
defstruct date: nil,
date_alternative: nil,
status: nil,
payee: nil,
tags: [],
entries: []
@line_length 55
def amount_to_str([cur, amt]) do
case cur do
"$" -> "#{cur}#{amt}"
_ -> "#{cur} #{amt}"
end
end
defp items_to_string(items, indent) do
items
|> Enum.map(fn
[{:account_name, account}, {:amount, amount}, [tag_k, tag_v]] ->
pre = "#{indent}#{account}"
padded_amount =
amount |> amount_to_str |> String.pad_leading(@line_length - String.length(pre))
"#{pre}#{padded_amount} ; #{tag_k}: #{tag_v}"
[account_name: account, amount: amount, balance_assertion: balance_assertion] ->
pre = "#{indent}#{account}"
amount_with_assert =
(amount |> amount_to_str) <> " = " <> (balance_assertion |> amount_to_str)
padded_amount = amount_with_assert |> String.pad_leading(@line_length - String.length(pre))
"#{pre}#{padded_amount}"
[account_name: account, amount: amount] ->
pre = "#{indent}#{account}"
padded_amount =
amount |> amount_to_str |> String.pad_leading(@line_length - String.length(pre))
"#{pre}#{padded_amount}"
[account_name: account, balance_assertion: balance_assertion] ->
pre = "#{indent}#{account}"
padded_assert =
(" = " <> (balance_assertion |> amount_to_str)) |> String.pad_leading(@line_length - String.length(pre))
"#{pre}#{padded_assert}"
[account_name: account] ->
"#{indent}#{account}"
end)
|> Enum.join("\n")
end
defp tags_to_string(tags, indent, joiner \\ "\n") do
tags
|> Enum.map(fn [k, v] ->
"#{indent}; #{k}: #{v}"
end)
|> Enum.join(joiner)
end
def to_string(r) do
alt_date = if r.date_alternative != nil do
"#{r.date}=#{r.date_alternative}"
else
"#{r.date}"
end
status = if r.status != nil do
"#{r.status}"
else
""
end
([
[alt_date, status, r.payee]|> Enum.filter(fn x -> x != nil end)|> Enum.filter(fn x -> String.length(x) > 0 end) |> Enum.join(" "),
"#{tags_to_string(r.tags, " ")}",
"#{items_to_string(r.entries, " ")}"
]
|> Enum.filter(fn x -> String.length(x) != 0 end)) ++ [""]
|> Enum.join("\n")
end
end
| 25.638298 | 136 | 0.548548 |
73c7b7fe86067a05b6c043839b17da26aa404582 | 1,660 | ex | Elixir | lib/slack_bot/plugin_worker.ex | mtgto/Elixir-SlackBot | 1598bcfffa062eb691e5b809fadbe58a4678a005 | [
"MIT"
] | 2 | 2015-12-11T18:51:49.000Z | 2016-01-08T20:42:05.000Z | lib/slack_bot/plugin_worker.ex | mtgto/Elixir-SlackBot | 1598bcfffa062eb691e5b809fadbe58a4678a005 | [
"MIT"
] | null | null | null | lib/slack_bot/plugin_worker.ex | mtgto/Elixir-SlackBot | 1598bcfffa062eb691e5b809fadbe58a4678a005 | [
"MIT"
] | null | null | null | defmodule SlackBot.PluginWorker do
@moduledoc """
GenServer which own a plugin.
"""
defmodule State do
defstruct module: nil, state: nil
end
use GenServer
require Logger
def start_link({module, config}) do
:ok = Logger.debug "SlackBot.PluginWorker.start_link"
name = name_for_module(module)
GenServer.start_link(__MODULE__, {module, config}, name: name)
end
@spec init({Atom.t, any}) :: {:ok, State.t}
def init({module, config}) do
Logger.debug "SlackBot.PluginWorker.init(#{inspect module}, #{inspect config})"
{:ok, state} = module.init(config)
{:ok, %State{module: module, state: state}}
end
@spec name_for_module(Atom.t) :: Atom.t
def name_for_module(module) do
String.to_atom("#{__MODULE__}.#{module}")
end
def handle_cast(args, state = %State{module: module, state: module_state}) do
Logger.debug "SlackBot.PluginWorker.handle_cast(#{inspect args}, #{inspect state})"
new_module_state = module.message(args, module_state)
{:noreply, %State{state | state: new_module_state}}
end
def handle_call(args, _from, state = %State{module: module, state: module_state}) do
Logger.debug "SlackBot.PluginWorker.handle_call(#{inspect args}, #{inspect state})"
{reply, new_module_state} = module.message(args, module_state)
{:reply, {:ok, reply}, %State{state | state: new_module_state}}
end
def terminate(reason, state) do
Logger.info "SlackBot.PluginWorker.terminate(#{inspect reason}, #{inspect state})"
end
def handle_info(msg, state) do
Logger.info "SlackBot.PluginWorker.handle_info(#{inspect msg}, #{inspect state})"
{:ok, state}
end
end
| 31.923077 | 87 | 0.696386 |
73c7d218ef065f80f9a4b1c563cecea5f0097178 | 455 | exs | Elixir | test/lucidboard_web/views/error_view_test.exs | borodark/lucidboard | 487a9a54053977ea9704121d4a6a4343012d4421 | [
"MIT"
] | 86 | 2019-01-07T20:49:04.000Z | 2021-10-02T21:15:42.000Z | test/lucidboard_web/views/error_view_test.exs | borodark/lucidboard | 487a9a54053977ea9704121d4a6a4343012d4421 | [
"MIT"
] | 26 | 2019-03-27T12:06:52.000Z | 2020-09-20T05:21:09.000Z | test/lucidboard_web/views/error_view_test.exs | borodark/lucidboard | 487a9a54053977ea9704121d4a6a4343012d4421 | [
"MIT"
] | 19 | 2015-01-06T19:02:49.000Z | 2020-05-25T08:54:00.000Z | defmodule LucidboardWeb.ErrorViewTest do
use LucidboardWeb.ConnCase, async: true
# Bring render/3 and render_to_string/3 for testing custom views
import Phoenix.View
test "renders 404.html" do
assert render_to_string(LucidboardWeb.ErrorView, "404.html", []) ==
"Not Found"
end
test "renders 500.html" do
assert render_to_string(LucidboardWeb.ErrorView, "500.html", []) ==
"Internal Server Error"
end
end
| 26.764706 | 71 | 0.698901 |
73c7d9048811a0828bb56f0b665371807cb3899d | 3,619 | ex | Elixir | clients/vision/lib/google_api/vision/v1/api/locations.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | null | null | null | clients/vision/lib/google_api/vision/v1/api/locations.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | null | null | null | clients/vision/lib/google_api/vision/v1/api/locations.ex | leandrocp/elixir-google-api | a86e46907f396d40aeff8668c3bd81662f44c71e | [
"Apache-2.0"
] | 1 | 2020-11-10T16:58:27.000Z | 2020-11-10T16:58:27.000Z | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.Vision.V1.Api.Locations do
@moduledoc """
API calls for all endpoints tagged `Locations`.
"""
alias GoogleApi.Vision.V1.Connection
alias GoogleApi.Gax.{Request, Response}
@doc """
Gets the latest state of a long-running operation. Clients can use this method to poll the operation result at intervals as recommended by the API service.
## Parameters
- connection (GoogleApi.Vision.V1.Connection): Connection to server
- name (String.t): The name of the operation resource.
- opts (KeywordList): [optional] Optional parameters
- :access_token (String.t): OAuth access token.
- :key (String.t): API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
- :upload_protocol (String.t): Upload protocol for media (e.g. \"raw\", \"multipart\").
- :quotaUser (String.t): Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
- :prettyPrint (boolean()): Returns response with indentations and line breaks.
- :fields (String.t): Selector specifying which fields to include in a partial response.
- :uploadType (String.t): Legacy upload protocol for media (e.g. \"media\", \"multipart\").
- :callback (String.t): JSONP
- :oauth_token (String.t): OAuth 2.0 token for the current user.
- :$.xgafv (String.t): V1 error format.
- :alt (String.t): Data format for response.
- :filter (String.t): The standard list filter.
- :pageToken (String.t): The standard list page token.
- :pageSize (integer()): The standard list page size.
## Returns
{:ok, %GoogleApi.Vision.V1.Model.Operation{}} on success
{:error, info} on failure
"""
@spec vision_locations_operations_get(Tesla.Env.client(), String.t(), keyword()) ::
{:ok, GoogleApi.Vision.V1.Model.Operation.t()} | {:error, Tesla.Env.t()}
def vision_locations_operations_get(connection, name, opts \\ []) do
optional_params = %{
:access_token => :query,
:key => :query,
:upload_protocol => :query,
:quotaUser => :query,
:prettyPrint => :query,
:fields => :query,
:uploadType => :query,
:callback => :query,
:oauth_token => :query,
:"$.xgafv" => :query,
:alt => :query,
:filter => :query,
:pageToken => :query,
:pageSize => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/v1/{+name}", %{
"name" => URI.encode_www_form(name)
})
|> Request.add_optional_params(optional_params, opts)
connection
|> Connection.execute(request)
|> Response.decode(struct: %GoogleApi.Vision.V1.Model.Operation{})
end
end
| 41.125 | 179 | 0.682233 |
73c83a2c56055d55ea869554a2b792611361c88f | 63 | ex | Elixir | lib/booking_web/views/layout_view.ex | mattiaslundberg/booking | 469d1469f306b2ab62ce1ee971a825101af6fc7e | [
"MIT"
] | null | null | null | lib/booking_web/views/layout_view.ex | mattiaslundberg/booking | 469d1469f306b2ab62ce1ee971a825101af6fc7e | [
"MIT"
] | 2 | 2021-03-10T16:49:07.000Z | 2021-05-11T12:50:22.000Z | lib/booking_web/views/layout_view.ex | mattiaslundberg/booking | 469d1469f306b2ab62ce1ee971a825101af6fc7e | [
"MIT"
] | null | null | null | defmodule BookingWeb.LayoutView do
use BookingWeb, :view
end
| 15.75 | 34 | 0.809524 |
73c86ee9b7bc52cacfa633f37b5469e1fa1f362c | 153 | exs | Elixir | app/priv/repo/migrations/20200522135342_add_slug_uniques.exs | nathanjohnson320/noodl | 2e449aab15b54fc5a1dc45ebf4b79e7b64b7c967 | [
"MIT"
] | 1 | 2021-01-20T20:00:50.000Z | 2021-01-20T20:00:50.000Z | app/priv/repo/migrations/20200522135342_add_slug_uniques.exs | nathanjohnson320/noodl | 2e449aab15b54fc5a1dc45ebf4b79e7b64b7c967 | [
"MIT"
] | null | null | null | app/priv/repo/migrations/20200522135342_add_slug_uniques.exs | nathanjohnson320/noodl | 2e449aab15b54fc5a1dc45ebf4b79e7b64b7c967 | [
"MIT"
] | null | null | null | defmodule Noodl.Repo.Migrations.AddSlugUniques do
use Ecto.Migration
def change do
create unique_index(:sessions, [:event_id, :slug])
end
end
| 19.125 | 54 | 0.751634 |
73c87a9e07747b185c25ada8752a77a5ae1fa77e | 10,324 | exs | Elixir | apps/re/priv/repo/seeds.exs | caspg/backend | 34df9dc14ab8ed75de4578fefa2e087580c7e867 | [
"MIT"
] | 1 | 2021-01-19T05:01:15.000Z | 2021-01-19T05:01:15.000Z | apps/re/priv/repo/seeds.exs | caspg/backend | 34df9dc14ab8ed75de4578fefa2e087580c7e867 | [
"MIT"
] | null | null | null | apps/re/priv/repo/seeds.exs | caspg/backend | 34df9dc14ab8ed75de4578fefa2e087580c7e867 | [
"MIT"
] | null | null | null | alias Re.{
Address,
Calendars.TourAppointment,
Development,
Favorite,
Image,
Interest,
InterestType,
Interests.ContactRequest,
Listing,
Listings.PriceHistory,
Listings.StatusHistory,
ListingTag,
PriceSuggestions.Request,
Repo,
Statistics.InPersonVisit,
Statistics.ListingVisualization,
Statistics.TourVisualization,
Tag,
Unit,
User
}
Repo.delete_all(Image)
Repo.delete_all(InPersonVisit)
Repo.delete_all(Interest)
Repo.delete_all(Favorite)
Repo.delete_all(ListingTag)
Repo.delete_all(ListingVisualization)
Repo.delete_all(PriceHistory)
Repo.delete_all(StatusHistory)
Repo.delete_all(TourAppointment)
Repo.delete_all(TourVisualization)
Repo.delete_all(Unit)
Repo.delete_all(Listing)
Repo.delete_all(Request)
Repo.delete_all(Address)
Repo.delete_all(ContactRequest)
Repo.delete_all(User)
Repo.delete_all(Development)
Repo.delete_all(Tag)
{:ok, admin1} =
Repo.insert(%User{
name: "Admin 1",
email: "[email protected]",
phone: "11111111111",
role: "admin"
})
{:ok, admin2} =
Repo.insert(%User{
name: "Admin 2",
email: "[email protected]",
phone: "22222222222",
role: "admin"
})
{:ok, user1} =
Repo.insert(%User{
name: "User 1",
email: "[email protected]",
phone: "111111111",
role: "user"
})
{:ok, user2} =
Repo.insert(%User{
name: "User 2",
email: "[email protected]",
phone: "222222222",
role: "user"
})
{:ok, user3} =
Repo.insert(%User{
name: "User 3",
email: "[email protected]",
phone: "333333333",
role: "user"
})
{:ok, user4} =
Repo.insert(%User{
name: "User 4",
email: "[email protected]",
phone: "4444444444",
role: "user"
})
{:ok, address1} =
Repo.insert(%Address{
street: "Test Street 1",
street_number: "1",
neighborhood: "Downtown",
city: "Test City 1",
state: "ST",
postal_code: "11111-111",
lat: -10.101,
lng: -10.101
})
{:ok, address2} =
Repo.insert(%Address{
street: "Test Street 2",
street_number: "2",
neighborhood: "Downtown",
city: "Test City 2",
state: "ST",
postal_code: "22222-222",
lat: -20.20202020202,
lng: -20.20202020202
})
{:ok, address3} =
Repo.insert(%Address{
street: "Test Street 3",
street_number: "3",
neighborhood: "Downtown",
city: "Test City 3",
state: "ST",
postal_code: "33333-333",
lat: -15.101,
lng: -5.101
})
{:ok, address4} =
Repo.insert(%Address{
street: "Test Street 4",
street_number: "4",
neighborhood: "Downtown",
city: "Test City 4",
state: "ST",
postal_code: "44444-444",
lat: -25.20202020202,
lng: -15.20202020202
})
{:ok, address5} =
Repo.insert(%Address{
street: "Test Street 5",
street_number: "5",
neighborhood: "Downtown",
city: "Test City 5",
state: "ST",
postal_code: "55555-555",
lat: -35.101,
lng: -25.101
})
{:ok, address6} =
Repo.insert(%Address{
street: "Test Street 6",
street_number: "6",
neighborhood: "Downtown",
city: "Test City 6",
state: "ST",
postal_code: "66666-666",
lat: -45.20202020202,
lng: -35.20202020202
})
{:ok, image1} =
Repo.insert(%Image{
filename: "axetblju0i3keovz87ab.jpg",
position: 1,
is_active: true
})
{:ok, image2} =
Repo.insert(%Image{
filename: "cz9ytkthhdmd0f9mt2wy.jpg",
position: 2,
is_active: true
})
{:ok, image3} =
Repo.insert(%Image{
filename: "u6fy4vpnjqff7jjxcg27.jp",
position: 3,
is_active: true
})
{:ok, image4} =
Repo.insert(%Image{
filename: "u6fy4vpnjqff7jjxcg27.jp",
position: 3,
is_active: true
})
{:ok, listing1} =
Repo.insert(%Listing{
uuid: UUID.uuid4(),
type: "Apartamento",
description: "A description about the listing.",
floor: "1",
price: 1_000_000,
area: 100,
rooms: 2,
bathrooms: 2,
garage_spots: 1,
score: 3,
images: [image1],
user: admin1,
address: address1,
status: "active"
})
{:ok, tag1} =
Repo.insert(%Tag{
uuid: UUID.uuid4(),
name: "Piscina",
name_slug: "piscina"
})
[tag1]
|> Enum.map(fn tag -> %ListingTag{listing_uuid: listing1.uuid, tag_uuid: tag.uuid} end)
|> Enum.map(&Repo.insert/1)
{:ok, listing2} =
Repo.insert(%Listing{
type: "Casa",
description: "A description about the listing.",
floor: "2",
price: 2_000_000,
area: 200,
rooms: 3,
bathrooms: 3,
garage_spots: 2,
score: 4,
images: [image2],
user: admin2,
address: address2,
status: "active"
})
{:ok, listing3} =
Repo.insert(%Listing{
type: "Casa",
description: "A description about the listing.",
floor: "3",
price: 3_000_000,
area: 300,
rooms: 5,
bathrooms: 2,
garage_spots: 1,
score: 4,
images: [image3],
user: user1,
address: address3,
status: "active"
})
{:ok, listing4} =
Repo.insert(%Listing{
type: "Casa",
description: "A description about the listing.",
floor: "4",
price: 4_000_000,
area: 400,
rooms: 3,
bathrooms: 3,
garage_spots: 2,
score: 4,
images: [image4],
user: user1,
address: address4,
status: "active"
})
{:ok, listing5} =
Repo.insert(%Listing{
type: "Cobertura",
description: "A description about the listing.",
floor: "2",
price: 2_000_000,
area: 200,
rooms: 3,
bathrooms: 3,
garage_spots: 2,
score: 4,
user: user2,
address: address5,
status: "inactive"
})
{:ok, listing6} =
Repo.insert(%Listing{
type: "Cobertura",
description: "A description about the listing.",
floor: "2",
price: 2_000_000,
area: 200,
rooms: 3,
bathrooms: 3,
garage_spots: 2,
score: 4,
user: user2,
address: address6,
status: "inactive"
})
{:ok, listing7} =
Repo.insert(%Listing{
type: "Casa",
description: "A description about the listing.",
floor: "2",
price: 2_000_000,
area: 200,
rooms: 3,
bathrooms: 3,
garage_spots: 2,
score: 4,
user: user2,
address: address1,
status: "active"
})
{:ok, listing8} =
Repo.insert(%Listing{
type: "Apartamento",
description: "A description about the listing.",
floor: "2",
price: 2_000_000,
area: 200,
rooms: 3,
bathrooms: 3,
garage_spots: 2,
score: 4,
user: user2,
address: address2,
status: "active"
})
{:ok, development1} =
Repo.insert(%Development{
uuid: UUID.uuid4(),
name: "EmCasa condominium",
phase: "building",
builder: "EmCasa",
description: "I awesome place to live in."
})
{:ok, listing9} =
Repo.insert(%Listing{
type: "Apartamento",
description: "A description about the listing.",
floor: "2",
price: 2_000_000,
area: 200,
rooms: 3,
bathrooms: 3,
garage_spots: 2,
score: 4,
user: user2,
address: address2,
development: development1,
status: "active"
})
{:ok, _} =
Repo.insert(%PriceHistory{
price: 1_900_000,
listing: listing1
})
{:ok, _} =
Repo.insert(%PriceHistory{
price: 2_100_000,
listing: listing1
})
{:ok, _} =
Repo.insert(%PriceHistory{
price: 1_900_000,
listing: listing2
})
{:ok, _} =
Repo.insert(%PriceHistory{
price: 1_900_000,
listing: listing3
})
{:ok, interest_type1} = Re.Repo.insert(%InterestType{name: "Me ligue dentro de 5 minutos"})
{:ok, interest_type2} = Re.Repo.insert(%InterestType{name: "Me ligue em um horário específico"})
{:ok, interest_type3} = Re.Repo.insert(%InterestType{name: "Agendamento por e-mail"})
{:ok, interest_type4} = Re.Repo.insert(%InterestType{name: "Agendamento por Whatsapp"})
{:ok, interest_type5} = Re.Repo.insert(%InterestType{name: "Agendamento online"})
{:ok, _} =
Repo.insert(%Interest{
name: "Interested Person 1",
email: "[email protected]",
phone: "123212321",
message: "Looks like an awesome listing",
listing: listing2,
interest_type: interest_type1
})
{:ok, _} =
Repo.insert(%Interest{
name: "Interested Person 2",
email: "[email protected]",
phone: "321232123",
message: "Looks like an awesome listing",
listing: listing2,
interest_type: interest_type2
})
{:ok, _} =
Repo.insert(%Interest{
name: "Interested Person 2",
email: "[email protected]",
phone: "321232123",
message: "Looks like an awesome listing",
listing: listing3,
interest_type: interest_type3
})
{:ok, _} =
Repo.insert(%Interest{
name: "Interested Person 3",
email: "[email protected]",
phone: "432112344321",
message: "Looks like an awesome listing",
listing: listing4,
interest_type: interest_type4
})
{:ok, _} =
Repo.insert(%Interest{
name: "Interested Person 3",
email: "[email protected]",
phone: "432112344321",
message: "Looks like an awesome listing",
listing: listing4,
interest_type: interest_type5
})
{:ok, _} =
Repo.insert(%Favorite{
user: user1,
listing: listing2
})
{:ok, _} =
Repo.insert(%Favorite{
user: user2,
listing: listing2
})
{:ok, _} =
Repo.insert(%Favorite{
user: user3,
listing: listing3
})
{:ok, _} =
Repo.insert(%Favorite{
user: user4,
listing: listing4
})
{:ok, _} =
Repo.insert(%Favorite{
user: user1,
listing: listing5
})
{:ok, _} =
Repo.insert(%Favorite{
user: user2,
listing: listing6
})
{:ok, _} =
Repo.insert(%Favorite{
user: user3,
listing: listing7
})
{:ok, _} =
Repo.insert(%Favorite{
user: user4,
listing: listing8
})
{:ok, _} =
Repo.insert(%Unit{
listing: listing9,
uuid: UUID.uuid4(),
price: 500_000,
property_tax: 1_500.00,
maintenance_fee: 1_000.00,
floor: "1",
rooms: 1,
bathrooms: 1,
restrooms: 1,
area: 100,
garage_spots: 1,
garage_type: "contract",
suites: 0,
dependencies: 0,
balconies: 0
})
{:ok, _} =
Repo.insert(%Unit{
listing: listing9,
uuid: UUID.uuid4(),
price: 500_000,
property_tax: 1_500.00,
maintenance_fee: 1_000.00,
floor: "1",
rooms: 1,
bathrooms: 1,
restrooms: 1,
area: 100,
garage_spots: 1,
garage_type: "contract",
suites: 0,
dependencies: 0,
balconies: 0
})
| 19.627376 | 96 | 0.611003 |
73c8afc2c04d14c039fbe54dcdd1f466ca58c0fe | 20,002 | exs | Elixir | apps/core/test/services/repositories_test.exs | svilenkov/plural | ac6c6cc15ac4b66a3b5e32ed4a7bee4d46d1f026 | [
"Apache-2.0"
] | 1 | 2021-12-13T09:43:15.000Z | 2021-12-13T09:43:15.000Z | apps/core/test/services/repositories_test.exs | svilenkov/plural | ac6c6cc15ac4b66a3b5e32ed4a7bee4d46d1f026 | [
"Apache-2.0"
] | null | null | null | apps/core/test/services/repositories_test.exs | svilenkov/plural | ac6c6cc15ac4b66a3b5e32ed4a7bee4d46d1f026 | [
"Apache-2.0"
] | null | null | null | defmodule Core.Services.RepositoriesTest do
use Core.SchemaCase, async: true
use Mimic
alias Core.PubSub
alias Core.Services.Repositories
alias Piazza.Crypto.RSA
describe "#create_repository" do
test "It will create a repository for the user's publisher" do
%{owner: user} = insert(:publisher)
{:ok, repo} = Repositories.create_repository(%{name: "piazza"}, user)
assert repo.name == "piazza"
assert is_binary(repo.public_key)
assert is_binary(repo.private_key)
assert_receive {:event, %PubSub.RepositoryCreated{item: ^repo, actor: ^user}}
end
test "It can create an associated integration resource definition" do
%{owner: user} = insert(:publisher)
{:ok, repo} = Repositories.create_repository(%{
name: "piazza",
integration_resource_definition: %{
name: "piazza",
spec: [%{type: :int, name: "int"}, %{type: :string, name: "str"}]
}
}, user)
assert repo.integration_resource_definition.name == "piazza"
[%{type: :int, name: "int"}, %{type: :string, name: "str"}] = repo.integration_resource_definition.spec
end
end
describe "#upsert_repository" do
test "It will create a repository for the user's publisher" do
%{owner: user} = pub = insert(:publisher)
{:ok, repo} = Repositories.upsert_repository(%{}, "piazza", pub.id, user)
assert repo.name == "piazza"
assert is_binary(repo.public_key)
assert is_binary(repo.private_key)
assert_receive {:event, %PubSub.RepositoryCreated{item: ^repo, actor: ^user}}
end
end
describe "#update_repository" do
test "Users can update their repositories" do
%{owner: user} = publisher = insert(:publisher)
repo = insert(:repository, publisher: publisher)
{:ok, updated} = Repositories.update_repository(%{name: "piazza"}, repo.id, user)
assert updated.name == "piazza"
assert_receive {:event, %PubSub.RepositoryUpdated{item: ^updated, actor: ^user}}
end
test "It can update integration resource definitions" do
%{owner: user} = publisher = insert(:publisher)
repo = insert(:repository, publisher: publisher)
{:ok, updated} = Repositories.update_repository(%{
name: "piazza",
integration_resource_definition: %{
name: "piazza",
spec: [%{type: :int, name: "int"}, %{type: :string, name: "str"}]
}
}, repo.id, user)
assert updated.id == repo.id
assert updated.integration_resource_definition.name == "piazza"
[%{type: :int, name: "int"}, %{type: :string, name: "str"}] = updated.integration_resource_definition.spec
end
test "It can update dashboards" do
%{owner: user} = publisher = insert(:publisher)
repo = insert(:repository, publisher: publisher)
{:ok, updated} = Repositories.update_repository(%{
name: "piazza",
dashboards: [%{name: "postgres", uid: "piazza-postgres"}]
}, repo.id, user)
assert updated.id == repo.id
[%{name: "postgres", uid: "piazza-postgres", repository_id: repo_id}] = updated.dashboards
assert repo_id == repo.id
end
test "Nonpublishers cannot update other's repositories" do
user = insert(:user)
{:ok, %{user: user}} = Core.Services.Accounts.create_account(user)
repo = insert(:repository)
{:error, _} = Repositories.update_repository(%{name: "piazza"}, repo.id, user)
end
end
describe "#create_installation" do
setup [:setup_root_user]
test "Users can install other repositories", %{user: user} do
repo = insert(:repository)
{:ok, installation} = Repositories.create_installation(%{}, repo.id, user)
assert installation.auto_upgrade
assert installation.user_id == user.id
assert installation.repository_id == repo.id
assert installation.license_key
assert is_map(installation.context)
assert_receive {:event, %PubSub.InstallationCreated{item: ^installation, actor: ^user}}
end
end
describe "update_installation" do
test "Users can update their installations" do
%{user: user} = inst = insert(:installation)
{:ok, updated} = Repositories.update_installation(%{context: %{some: "value"}}, inst.id, user)
assert_receive {:event, %PubSub.InstallationUpdated{item: ^updated}}
assert updated.context.some == "value"
end
test "Other users cannot update" do
user = insert(:user)
inst = insert(:installation)
{:error, _} = Repositories.update_installation(%{context: %{some: "val"}}, inst.id, user)
end
end
describe "#delete_installation" do
test "Users can delete their installations" do
%{user: user} = inst = insert(:installation)
{:ok, deleted} = Repositories.delete_installation(inst.id, user)
assert deleted.id == inst.id
refute refetch(deleted)
assert_receive {:event, %PubSub.InstallationDeleted{item: ^deleted, actor: ^user}}
end
test "It will cancel associated subscriptions when present" do
user = insert(:user)
repo = insert(:repository, publisher: build(:publisher, billing_account_id: "acct_id"))
inst = insert(:installation, repository: repo, user: user)
sub = insert(:subscription, installation: inst, external_id: "sub_id")
expect(Stripe.Subscription, :delete, fn "sub_id", [connect_account: "acct_id"] -> {:ok, %{}} end)
{:ok, _deleted} = Repositories.delete_installation(inst.id, user)
refute refetch(inst)
refute refetch(sub)
end
test "Other users cannot delete" do
inst = insert(:installation)
{:error, _} = Repositories.delete_installation(inst.id, insert(:user))
end
end
describe "#delete_repository" do
test "Publishers can delete repos" do
%{owner: user} = pub = insert(:publisher)
repo = insert(:repository, publisher: pub)
{:ok, repo} = Repositories.delete_repository(repo.id, user)
refute refetch(repo)
end
test "Non publishers cannot delete" do
repo = insert(:repository)
{:error, _} = Repositories.delete_repository(repo.id, insert(:user))
end
end
describe "#upsert_integration/3" do
test "A publisher can upsert an integrations" do
%{owner: user} = pub = insert(:publisher)
repo = insert(:repository,
publisher: pub,
integration_resource_definition: build(:resource_definition,
spec: [
build(:specification, type: :string, name: "str")
]
)
)
{:ok, integration} = Repositories.upsert_integration(%{
name: "github",
spec: %{"str" => "a value"},
tags: [%{tag: "some"}, %{tag: "tag"}]
}, repo.id, user)
assert integration.name == "github"
assert integration.publisher_id == pub.id
assert integration.spec["str"] == "a value"
integration = refetch(integration) |> Core.Repo.preload([:tags])
assert Enum.map(integration.tags, & &1.tag)
|> Enum.sort() == ["some", "tag"]
{:ok, integration} = Repositories.upsert_integration(%{
name: "github",
spec: %{"str" => "a different value"},
tags: [%{tag: "another"}, %{tag: "tag"}]
}, repo.id, user)
assert integration.name == "github"
assert integration.spec["str"] == "a different value"
integration = refetch(integration) |> Core.Repo.preload([:tags])
assert Enum.map(integration.tags, & &1.tag)
|> Enum.sort() == ["another", "tag"]
end
test "Non publishers cannot add integrations" do
repo = insert(:repository,
integration_resource_definition: build(:resource_definition,
spec: [
build(:specification, type: :string, name: "str")
]
)
)
{:error, _} = Repositories.upsert_integration(%{
name: "github",
spec: %{"str" => "a value"}
}, repo.id, insert(:user))
end
test "It will enforce resource definitions" do
%{owner: user} = pub = insert(:publisher)
repo = insert(:repository,
publisher: pub,
integration_resource_definition: build(:resource_definition,
spec: [
build(:specification, type: :string, name: "str")
]
)
)
{:error, %Ecto.Changeset{}} = Repositories.upsert_integration(%{
name: "github",
spec: %{"str" => 1}
}, repo.id, user)
end
end
describe "#update_docker_repository/3" do
test "a publisher can update a dkr repository" do
%{owner: user} = pub = insert(:publisher)
dkr = insert(:docker_repository, repository: build(:repository, publisher: pub))
{:ok, updated} = Repositories.update_docker_repository(%{public: true}, dkr.id, user)
assert updated.public
assert_receive {:event, %PubSub.DockerRepositoryUpdated{item: ^updated}}
end
end
describe "#authorize_docker/2" do
test "A repo owner can push/pull" do
%{owner: user} = pub = insert(:publisher)
repo = insert(:repository, publisher: pub)
allowed = Repositories.authorize_docker(repo.name, "some/image", user)
assert [:pull, :push] == Enum.sort(allowed)
end
test "An installer can pull" do
repo = insert(:repository)
%{user: user} = insert(:installation, repository: repo)
[:pull] = Repositories.authorize_docker(repo.name, "some/image", user)
end
test "public repositories can authorize unauthenticated users" do
%{repository: repo} = registry = insert(:docker_repository, public: true)
[:pull] = Repositories.authorize_docker(repo.name, registry.name, nil)
end
test "Arbitrary users have no access" do
repo = insert(:repository)
[] = Repositories.authorize_docker(repo.name, "some/image", insert(:user))
end
end
describe "#generate_license/1" do
test "It can generate an ecrypted license for an installation" do
publisher = insert(:publisher)
{:ok, repo} = Repositories.create_repository(%{
name: "my repo",
secrets: %{"token" => "a"}
}, publisher.owner)
installation = insert(:installation, repository: repo)
{:ok, license} = Repositories.generate_license(installation)
{:ok, decoded} = RSA.decrypt(license, ExPublicKey.loads!(repo.public_key))
%{"refresh_token" => token, "expires_at" => expiry, "secrets" => secrets} = Jason.decode!(decoded)
assert secrets["token"] == "a"
{:ok, _} = Timex.parse(expiry, "{ISO:Extended}")
{:ok, license} = Repositories.refresh_license(token)
{:ok, decoded} = RSA.decrypt(license, ExPublicKey.loads!(repo.public_key))
%{"refresh_token" => _} = Jason.decode!(decoded)
end
test "It can generate licenses for payed plans" do
publisher = insert(:publisher)
{:ok, repo} = Repositories.create_repository(%{name: "my repo"}, publisher.owner)
installation = insert(:installation, repository: repo)
plan = insert(:plan,
repository: repo,
line_items: %{
included: [%{dimension: "user", quantity: 1}, %{dimension: "storage", quantity: 0}],
items: [
%{dimension: "user", name: "Users", cost: 500},
%{dimension: "storage", name: "Users", cost: 500}
]
},
metadata: %{
features: [%{name: "sso", description: "does sso"}]
}
)
insert(:subscription,
installation: installation,
plan: plan,
line_items: %{
items: [%{dimension: "user", quantity: 1}, %{dimension: "storage", quantity: 3}]
}
)
{:ok, license} = Repositories.generate_license(installation)
{:ok, decoded} = RSA.decrypt(license, ExPublicKey.loads!(repo.public_key))
%{"policy" => %{"limits" => limits, "features" => [%{"name" => "sso"}]}} = Jason.decode!(decoded)
assert limits["storage"] == 3
assert limits["user"] == 2
end
test "It will not generate licenses if there is no subscription for a non-free repo" do
publisher = insert(:publisher)
{:ok, repo} = Repositories.create_repository(%{name: "my repo"}, publisher.owner)
installation = insert(:installation, repository: repo)
insert(:plan, repository: repo)
{:ok, nil} = Repositories.generate_license(installation)
end
end
describe "#create_docker_image/3" do
test "It can upsert a new docker repo/image" do
repository = insert(:repository)
user = insert(:user)
repo_name = "#{repository.name}/dkr_repo"
{:ok, %{repo: repo, image: image}} = Repositories.create_docker_image(repo_name, "latest", "some_digest", user)
assert repo.name == "dkr_repo"
assert repo.repository_id == repository.id
assert image.tag == "latest"
assert image.docker_repository_id == repo.id
assert image.digest == "some_digest"
assert_receive {:event, %PubSub.DockerImageCreated{item: found, actor: ^user}}
assert found.id == image.id
end
end
describe "#add_vulnerabilities/2" do
test "it will add vulnerabilities to an image and grade it afterwards" do
image = insert(:docker_image)
vuln = Application.get_env(:core, :vulnerability) |> Jason.decode!()
vuln = Core.Docker.TrivySource.to_vulnerability(vuln)
{:ok, %{vulnerabilities: [vuln]} = image} = Repositories.add_vulnerabilities([vuln], image)
assert vuln.image_id == image.id
assert image.scanned_at
assert image.grade == :c
end
end
describe "#create_artifact/3" do
test "Publishers can create artifacts" do
%{publisher: %{owner: user}} = repo = insert(:repository)
{:ok, artifact} = Repositories.create_artifact(%{
name: "artifact",
readme: "empty",
type: :cli,
platform: :mac
}, repo.id, user)
assert artifact.name == "artifact"
assert artifact.readme == "empty"
assert artifact.type == :cli
assert artifact.platform == :mac
end
test "non publishers cannot create artifacts" do
repo = insert(:repository)
{:error, _} = Repositories.create_artifact(%{
name: "artifact",
readme: "empty",
type: :cli,
platform: :mac
}, repo.id, insert(:user))
end
end
describe "#create_oidc_provider/3" do
test "a user can create a provider for their installation" do
account = insert(:account)
installation = insert(:installation, user: build(:user, account: account))
group = insert(:group, account: account)
expect(HTTPoison, :post, fn _, _, _ ->
{:ok, %{status_code: 200, body: Jason.encode!(%{client_id: "123", client_secret: "secret"})}}
end)
{:ok, oidc} = Repositories.create_oidc_provider(%{
redirect_uris: ["https://example.com"],
auth_method: :basic,
bindings: [%{user_id: installation.user_id}, %{group_id: group.id}]
}, installation.id, installation.user)
assert oidc.client_id == "123"
assert oidc.client_secret == "secret"
assert oidc.redirect_uris == ["https://example.com"]
[first, second] = oidc.bindings
assert first.user_id == installation.user_id
assert second.group_id == group.id
assert_receive {:event, %PubSub.OIDCProviderCreated{item: ^oidc}}
end
end
describe "#update_oidc_provider/3" do
test "it can update an oidc provider's attributes" do
installation = insert(:installation)
oidc = insert(:oidc_provider, installation: installation)
expect(HTTPoison, :put, fn _, _, _ ->
{:ok, %{status_code: 200, body: Jason.encode!(%{client_id: "123", client_secret: "secret"})}}
end)
{:ok, updated} = Repositories.update_oidc_provider(%{
redirect_uris: ["https://example.com"],
auth_method: :basic
}, installation.id, installation.user)
assert updated.id == oidc.id
assert updated.auth_method == :basic
assert_receive {:event, %PubSub.OIDCProviderUpdated{item: ^updated}}
end
end
describe "#upsert_oidc_provider/3" do
test "a user can create a provider for their installation" do
account = insert(:account)
installation = insert(:installation, user: build(:user, account: account))
group = insert(:group, account: account)
expect(HTTPoison, :post, fn _, _, _ ->
{:ok, %{status_code: 200, body: Jason.encode!(%{client_id: "123", client_secret: "secret"})}}
end)
{:ok, oidc} = Repositories.upsert_oidc_provider(%{
redirect_uris: ["https://example.com"],
auth_method: :basic,
bindings: [%{user_id: installation.user_id}, %{group_id: group.id}]
}, installation.id, installation.user)
assert oidc.client_id == "123"
assert oidc.client_secret == "secret"
assert oidc.redirect_uris == ["https://example.com"]
[first, second] = oidc.bindings
assert first.user_id == installation.user_id
assert second.group_id == group.id
assert_receive {:event, %PubSub.OIDCProviderCreated{item: ^oidc}}
end
test "it can update an oidc provider's attributes" do
installation = insert(:installation)
oidc = insert(:oidc_provider, installation: installation)
expect(HTTPoison, :put, fn _, _, _ ->
{:ok, %{status_code: 200, body: Jason.encode!(%{client_id: "123", client_secret: "secret"})}}
end)
{:ok, updated} = Repositories.upsert_oidc_provider(%{
redirect_uris: ["https://example.com"],
auth_method: :basic
}, installation.id, installation.user)
assert updated.id == oidc.id
assert updated.auth_method == :basic
assert_receive {:event, %PubSub.OIDCProviderUpdated{item: ^updated}}
end
end
describe "#delete_oidc_provider/2" do
test "it can delete an oidc provider for an installation" do
installation = insert(:installation)
oidc = insert(:oidc_provider, installation: installation)
expect(HTTPoison, :delete, fn _, _ -> {:ok, %{status_code: 204, body: ""}} end)
{:ok, deleted} = Repositories.delete_oidc_provider(installation.id, installation.user)
assert deleted.id == oidc.id
refute refetch(deleted)
end
end
describe "#acquire_apply_lock/2" do
test "A user can create an apply lock if they have repo edit permission" do
%{owner: user} = pub = insert(:publisher)
repo = insert(:repository, publisher: pub)
{:ok, lock} = Repositories.acquire_apply_lock(repo.id, user)
assert lock.owner_id == user.id
assert lock.repository_id == repo.id
end
test "A user can create an existing apply lock if there is no owner" do
%{owner: user} = pub = insert(:publisher)
repo = insert(:repository, publisher: pub)
lock = insert(:apply_lock, repository: repo)
{:ok, acquired} = Repositories.acquire_apply_lock(repo.id, user)
assert acquired.id == lock.id
assert acquired.owner_id == user.id
end
test "a lock with an owner cannot be acquired" do
%{owner: user} = pub = insert(:publisher)
repo = insert(:repository, publisher: pub)
insert(:apply_lock, repository: repo, owner: build(:user))
{:error, _} = Repositories.acquire_apply_lock(repo.id, user)
end
end
describe "#release_apply_lock/3" do
test "A lock owner can save a lock and release ownership" do
lock = insert(:apply_lock, owner: build(:user))
{:ok, release} = Repositories.release_apply_lock(
%{lock: "test"},
lock.repository_id,
lock.owner
)
assert release.id == lock.id
assert release.lock == "test"
refute release.owner_id
end
test "non-owners cannot release locks" do
lock = insert(:apply_lock, owner: build(:user))
{:error, _} = Repositories.release_apply_lock(
%{lock: "test"},
lock.repository_id,
insert(:user)
)
end
end
end
| 33.673401 | 117 | 0.633287 |
73c8b4d5ddf569dcfedffe537321bcd9bf090fd9 | 426 | ex | Elixir | lib/flux/websocket/handler.ex | cjfreeze/Flux | ea1eb29932db806c06fcd6cdcc625df04b83f8bc | [
"MIT"
] | 1 | 2021-10-05T02:22:22.000Z | 2021-10-05T02:22:22.000Z | lib/flux/websocket/handler.ex | cjfreeze/Flux | ea1eb29932db806c06fcd6cdcc625df04b83f8bc | [
"MIT"
] | null | null | null | lib/flux/websocket/handler.ex | cjfreeze/Flux | ea1eb29932db806c06fcd6cdcc625df04b83f8bc | [
"MIT"
] | null | null | null | defmodule Flux.Websocket.Handler do
alias Flux.Websocket.{Frame, Conn}
@type context :: {module, any}
@callback init(Flux.Conn.t(), any) :: {:ok, any} | :error
@callback handle_frame(Frame.opcode(), binary, Conn.t(), context) :: {:ok, Conn.t(), context}
@callback handle_info(any, Conn.t(), context) :: {:ok, Conn.t(), context}
@callback handle_terminate(atom, Conn.t(), context) :: {:ok, Conn.t(), context}
end
| 38.727273 | 95 | 0.657277 |
73c8b846b78f47d3bffbb81d4b6eec5d2c4d6b7f | 2,720 | exs | Elixir | test/guardian/permissions/text_encoding_test.exs | x-ji/guardian | 576997d3701306ccfa40b42e18b1bec5a6ac98a7 | [
"MIT"
] | 3,135 | 2015-12-19T05:35:41.000Z | 2022-03-29T00:27:25.000Z | test/guardian/permissions/text_encoding_test.exs | x-ji/guardian | 576997d3701306ccfa40b42e18b1bec5a6ac98a7 | [
"MIT"
] | 536 | 2015-12-28T04:40:31.000Z | 2022-03-30T22:45:59.000Z | test/guardian/permissions/text_encoding_test.exs | x-ji/guardian | 576997d3701306ccfa40b42e18b1bec5a6ac98a7 | [
"MIT"
] | 451 | 2015-12-20T23:43:49.000Z | 2022-03-24T10:36:14.000Z | defmodule Guardian.Permissions.TextEncodingTest do
use ExUnit.Case, async: true
defmodule Impl do
use Guardian,
permissions: %{
user: [:read, :write],
profile: %{read: 0b1, write: 0b10}
}
use Guardian.Permissions, encoding: Guardian.Permissions.TextEncoding
def subject_for_token(resource, _claims), do: {:ok, resource}
def resource_from_claims(claims), do: {:ok, claims["sub"]}
def build_claims(claims, _resource, opts) do
encode_permissions_into_claims!(claims, Keyword.get(opts, :permissions))
end
end
describe "encode_permissions" do
test "it encodes to an empty map when there are no permissions given" do
%{} = result = Impl.encode_permissions!(%{})
assert Enum.empty?(result)
end
test "it encodes when provided with an atom map" do
perms = %{profile: [:read, :write], user: [:read]}
result = Impl.encode_permissions!(perms)
assert result == %{profile: ["write", "read"], user: ["read"]}
end
test "it encodes when provided with a string map" do
perms = %{"profile" => ["read", "write"], "user" => ["read"]}
result = Impl.encode_permissions!(perms)
assert result == %{profile: ["write", "read"], user: ["read"]}
end
test "it encodes when provided with an integer" do
perms = %{profile: [], user: 0b1}
result = Impl.encode_permissions!(perms)
assert result == %{profile: [], user: ["read"]}
end
test "it is ok with using max permissions" do
perms = %{profile: Impl.max(), user: 0b1}
result = Impl.encode_permissions!(perms)
assert result == %{profile: ["read", "write"], user: ["read"]}
end
test "when setting from an integer it does not lose resolution" do
perms = %{profile: Impl.max(), user: 0b111111}
result = Impl.encode_permissions!(perms)
assert result == %{profile: ["read", "write"], user: ["read", "write"]}
end
end
describe "decode_permissions" do
test "it decodes to an empty map when there are no permissions given" do
perms = %{profile: ["read"], user: []}
result = Impl.decode_permissions(perms)
assert result == %{profile: [:read], user: []}
end
test "when setting from an integer it ignores extra resolution" do
perms = %{profile: ["read", "write"], user: ["read", "write"]}
result = Impl.decode_permissions(perms)
assert result == %{profile: [:read, :write], user: [:read, :write]}
end
test "it ignores unknown permission sets" do
perms = %{profile: ["read", "write"], unknown: ["read"]}
result = Impl.decode_permissions(perms)
assert result == %{profile: [:read, :write]}
end
end
end
| 34.871795 | 78 | 0.626471 |
73c8c34c718c5e9289a6bb758061102a7ae51c9b | 6,695 | exs | Elixir | test/exconstructor_test.exs | appcues/exconstructor | a4f5a88cfebba01eca3024ccaf58ce295d83683f | [
"MIT"
] | 264 | 2016-01-19T23:39:30.000Z | 2022-03-16T19:59:49.000Z | test/exconstructor_test.exs | appcues/exconstructor | a4f5a88cfebba01eca3024ccaf58ce295d83683f | [
"MIT"
] | 29 | 2016-02-04T02:18:36.000Z | 2022-02-22T20:02:13.000Z | test/exconstructor_test.exs | appcues/exconstructor | a4f5a88cfebba01eca3024ccaf58ce295d83683f | [
"MIT"
] | 19 | 2016-02-02T16:21:17.000Z | 2021-07-18T08:36:29.000Z | defmodule ExConstructorTest do
use ExSpec, async: true
doctest ExConstructor
defmodule TestStruct do
defstruct field_one: 1,
field_two: 2,
field_three: 3,
field_four: 4,
field_five: 5,
Field_Six: 6,
FieldSeven: 7,
FieldEight: 8,
field_nine: 9
use ExConstructor
end
context "populate_struct" do
import ExConstructor
it "handles maps with string-vs-atom, camel-vs-underscore, and literals" do
map = %{
"field_one" => "a",
"fieldTwo" => "b",
:field_three => "c",
:fieldFour => "d",
"Field_Six" => "f",
"field_seven" => 7,
:field_eight => 8,
"FieldNine" => "Nine"
}
struct = %TestStruct{
field_one: "a",
field_two: "b",
field_three: "c",
field_four: "d",
field_five: 5,
Field_Six: "f",
FieldSeven: 7,
FieldEight: 8,
field_nine: "Nine"
}
assert(struct == populate_struct(%TestStruct{}, map, []))
end
it "handles keyword lists" do
kwlist = [{:field_one, "a"}, {"field_two", "b"}]
struct = %TestStruct{
field_one: "a",
field_two: "b",
field_three: 3,
field_four: 4,
field_five: 5,
Field_Six: 6,
FieldSeven: 7,
FieldEight: 8,
field_nine: 9
}
assert(struct == populate_struct(%TestStruct{}, kwlist, []))
end
it "converts opts into %Options{}" do
ts =
populate_struct(
%TestStruct{},
%{"field_one" => 11, :field_two => 22},
strings: false
)
assert(11 != ts.field_one)
assert(22 == ts.field_two)
end
it "defaults to %Options{} when none given" do
ts =
populate_struct(
%TestStruct{},
%{"field_one" => 11, :field_two => 22}
)
assert(11 == ts.field_one)
assert(22 == ts.field_two)
end
it "blows up on bad input" do
ex = assert_raise(RuntimeError, fn -> populate_struct(:omg, %{}, []) end)
assert(String.match?(ex.message, ~r"first argument"))
ex = assert_raise(RuntimeError, fn -> populate_struct(%TestStruct{}, :hi, []) end)
assert(String.match?(ex.message, ~r"^second argument"))
ex = assert_raise(RuntimeError, fn -> populate_struct(%TestStruct{}, %{}, :oof) end)
assert(String.match?(ex.message, ~r"^third argument"))
end
end
context "invocation styles" do
defmodule TestStruct1 do
defstruct field: nil
ExConstructor.define_constructor()
end
defmodule TestStruct2 do
defstruct field: nil
use ExConstructor
end
defmodule TestStruct3 do
defstruct field: nil
use ExConstructor, :make
end
defmodule TestStruct4 do
defstruct field: nil
use ExConstructor, name: :build
end
defmodule TestStruct5 do
defstruct field: nil
ExConstructor.__using__()
end
context "ExConstructor.define_constructor" do
it "uses the default constructor name" do
assert(nil != TestStruct1.new(%{}))
end
end
context "use ExConstructor" do
it "uses the default constructor name" do
assert(nil != TestStruct2.new(%{}))
end
end
context "use ExConstructor, :constructor_name" do
it "uses the given constructor name" do
assert(nil != TestStruct3.make(%{}))
end
end
context "use ExConstructor, name: :constructor_name" do
it "uses the given constructor name" do
assert(nil != TestStruct4.build(%{}))
end
end
context "ExConstructor.__using__" do
it "uses the default constructor name" do
assert(nil != TestStruct5.new(%{}))
end
end
it "raises exception on bad invocation" do
ex =
assert_raise(RuntimeError, fn ->
defmodule TestStruct6 do
defstruct field: nil
ExConstructor.__using__(22)
end
end)
assert(String.match?(ex.message, ~r"^argument must be"))
end
it "does not crash if @enforce_keys exists" do
defmodule TestStruct7 do
@enforce_keys :field
defstruct field: 1
use ExConstructor
end
end
end
context "options" do
defmodule TestStructNoStrings do
defstruct foo: 1
use ExConstructor, strings: false
end
defmodule TestStructNoAtoms do
defstruct foo: 1
use ExConstructor, atoms: false
end
defmodule TestStructNoCamel do
defstruct foo_bar: 1
use ExConstructor, camelcase: false
end
defmodule TestStructNoUpperCamel do
defstruct foo_bar: 1
use ExConstructor, uppercamelcase: false
end
defmodule TestStructNoUnder do
defstruct fooBar: 1
use ExConstructor, underscore: false
end
it "supports strings: false" do
ts_map = TestStructNoStrings.new(%{"foo" => 2})
assert(1 == ts_map.foo)
ts_kwlist = TestStructNoStrings.new([{"foo", 2}])
assert(1 == ts_kwlist.foo)
end
it "supports atoms: false" do
ts_map = TestStructNoAtoms.new(%{:foo => 2})
assert(1 == ts_map.foo)
ts_kwlist = TestStructNoAtoms.new([{:foo, 2}])
assert(1 == ts_kwlist.foo)
end
it "supports camelcase: false" do
ts_map = TestStructNoCamel.new(%{:fooBar => 2})
assert(1 == ts_map.foo_bar)
ts_kwlist = TestStructNoCamel.new([{"fooBar", 2}])
assert(1 == ts_kwlist.foo_bar)
end
it "supports uppercamelcase: false" do
ts_map = TestStructNoUpperCamel.new(%{:FooBar => 2})
assert(1 == ts_map.foo_bar)
ts_kwlist = TestStructNoUpperCamel.new([{"FooBar", 2}])
assert(1 == ts_kwlist.foo_bar)
end
it "supports underscore: false" do
ts_map = TestStructNoUnder.new(%{:foo_bar => 2})
assert(1 == ts_map.fooBar)
ts_kwlist = TestStructNoUnder.new([{"foo_bar", 2}])
assert(1 == ts_kwlist.fooBar)
end
it "supports overrides" do
ts_map = TestStructNoStrings.new(%{"foo" => 2})
assert(1 == ts_map.foo)
ts_map = TestStructNoStrings.new(%{"foo" => 2}, strings: true)
assert(2 == ts_map.foo)
end
end
context "overriding" do
defmodule TestStructOverrideNew do
defstruct [:name]
use ExConstructor
def new(data, args \\ []) do
res = super(data, args)
%{res | name: String.capitalize(res.name)}
end
end
it "can override new and call super" do
ts_map = TestStructOverrideNew.new(%{"name" => "jim"})
assert("Jim" == ts_map.name)
end
end
end
| 25.264151 | 90 | 0.588648 |