hexsha
stringlengths 40
40
| size
int64 2
991k
| ext
stringclasses 2
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
208
| max_stars_repo_name
stringlengths 6
106
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
sequence | max_stars_count
int64 1
33.5k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
208
| max_issues_repo_name
stringlengths 6
106
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
sequence | max_issues_count
int64 1
16.3k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
208
| max_forks_repo_name
stringlengths 6
106
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
sequence | max_forks_count
int64 1
6.91k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 2
991k
| avg_line_length
float64 1
36k
| max_line_length
int64 1
977k
| alphanum_fraction
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
f79a18f4689a22172a016078c7ede5c3611787be | 482 | ex | Elixir | lib/wabanex_web/schema/types/user.ex | pouthergust/wabanex | 5c665eddf2937388e28c820c4c1c50b2f812ea29 | [
"MIT"
] | null | null | null | lib/wabanex_web/schema/types/user.ex | pouthergust/wabanex | 5c665eddf2937388e28c820c4c1c50b2f812ea29 | [
"MIT"
] | null | null | null | lib/wabanex_web/schema/types/user.ex | pouthergust/wabanex | 5c665eddf2937388e28c820c4c1c50b2f812ea29 | [
"MIT"
] | null | null | null | defmodule WabanexWeb.Schema.Types.User do
use Absinthe.Schema.Notation
@desc "Logic user representation"
object :user do
field :id, non_null(:uuid4)
field :name, non_null(:string)
field :email, non_null(:string)
end
input_object :create_user_input do
field :name, non_null(:string), description: "Users Name"
field :email, non_null(:string) , description: "Users email"
field :password, non_null(:string), description: "Users password"
end
end
| 28.352941 | 69 | 0.717842 |
f79a1bc945d156bf53c9e393b262f10f39e0298f | 6,201 | ex | Elixir | lib/ueberauth/strategies/helpers.ex | nelyj/ueberauth | 81864768d6b24775e2defd774403714679c35a6a | [
"MIT"
] | 2 | 2018-02-27T02:11:05.000Z | 2018-09-09T01:52:01.000Z | lib/ueberauth/strategies/helpers.ex | nelyj/ueberauth | 81864768d6b24775e2defd774403714679c35a6a | [
"MIT"
] | null | null | null | lib/ueberauth/strategies/helpers.ex | nelyj/ueberauth | 81864768d6b24775e2defd774403714679c35a6a | [
"MIT"
] | null | null | null | defmodule Ueberauth.Strategy.Helpers do
@moduledoc """
Provides helper methods for use within your strategy.
These helpers are provided as a convenience for accessing the options passed
to the specific pipelined strategy, considering the pipelined options and
falling back to defaults.
"""
import Plug.Conn
alias Ueberauth.Failure
alias Ueberauth.Failure.Error
@doc """
Provides the name of the strategy or provider name.
This is defined in your configuration as the provider name.
"""
@spec strategy_name(Plug.Conn.t) :: String.t
def strategy_name(conn), do: from_private(conn, :strategy_name)
@doc """
The strategy module that is being used for the request.
"""
@spec strategy(Plug.Conn.t) :: module
def strategy(conn), do: from_private(conn, :strategy)
@doc """
The request path for the strategy to hit.
Requests to this path will trigger the `request_phase` of the strategy.
"""
@spec request_path(Plug.Conn.t) :: String.t
def request_path(conn), do: from_private(conn, :request_path)
@doc """
The callback path for the requests strategy.
When a client hits this path, the callback phase will be triggered for the strategy.
"""
@spec callback_path(Plug.Conn.t) :: String.t
def callback_path(conn), do: from_private(conn, :callback_path)
@doc """
The full url for the request phase for the requests strategy.
The URL is based on the current requests host and scheme. The options will be
encoded as query params.
"""
@spec request_url(Plug.Conn.t) :: String.t
def request_url(conn, opts \\ []), do: full_url(conn, request_path(conn), opts)
@doc """
The full URL for the callback phase for the requests strategy.
The URL is based on the current requests host and scheme. The options will be
encoded as query params.
"""
@spec callback_url(Plug.Conn.t) :: String.t
def callback_url(conn, opts \\ []) do
from_private(conn, :callback_url) ||
full_url(conn, callback_path(conn), callback_params(conn, opts))
end
@doc """
Build params for callback
This method will filter conn.params with whitelisted params from :callback_params settings
"""
@spec callback_params(Plug.Conn.t) :: list(String.t)
def callback_params(conn, opts \\ []) do
callback_params = from_private(conn, :callback_params) || []
callback_params = callback_params
|> Enum.map(fn(k) -> {String.to_atom(k), conn.params[k]} end)
|> Enum.filter(fn {_, v} -> v != nil end)
|> Enum.filter(fn {k, _} -> k != "provider" end)
Keyword.merge(opts, callback_params)
end
@doc """
The configured allowed callback http methods.
This will use any supplied options from the configuration, but fallback to the
default options
"""
@spec allowed_callback_methods(Plug.Conn.t) :: list(String.t)
def allowed_callback_methods(conn), do: from_private(conn, :callback_methods)
@doc """
Is the current request http method one of the allowed callback methods?
"""
@spec allowed_callback_method?(Plug.Conn.t) :: boolean
def allowed_callback_method?(%{method: method} = conn) do
callback_method =
method
|> to_string
|> String.upcase
conn
|> allowed_callback_methods
|> Enum.member?(callback_method)
end
@doc """
The full list of options passed to the strategy in the configuration.
"""
@spec options(Plug.Conn.t) :: Keyword.t
def options(conn), do: from_private(conn, :options)
@doc """
A helper for constructing error entries on failure.
The `message_key` is intended for use by machines for translations etc.
The message is a human readable error message.
#### Example
error("something_bad", "Something really bad happened")
"""
@spec error(String.t, String.t) :: Error.t
def error(key, message), do: struct(Error, message_key: key, message: message)
@doc """
Sets a failure onto the connection containing a List of errors.
During your callback phase, this should be called to 'fail' the authentication
request and include a collection of errors outlining what the problem is.
Note this changes the conn object and should be part of your returned
connection of the `callback_phase!`.
"""
@spec error(Plug.Conn.t, list(Error.t)) :: Plug.Conn.t
def set_errors!(conn, errors) do
failure = struct(
Failure,
provider: strategy_name(conn),
strategy: strategy(conn),
errors: map_errors(errors)
)
Plug.Conn.assign(conn, :ueberauth_failure, failure)
end
@doc """
Redirects to a url and halts the plug pipeline.
"""
@spec redirect!(Plug.Conn.t, String.t) :: Plug.Conn.t
def redirect!(conn, url) do
html = Plug.HTML.html_escape(url)
body = "<html><body>You are being <a href=\"#{html}\">redirected</a>.</body></html>"
conn
|> put_resp_header("location", url)
|> send_resp(conn.status || 302, body)
|> halt
end
defp from_private(conn, key) do
opts = conn.private[:ueberauth_request_options]
if opts, do: opts[key], else: nil
end
defp full_url(conn, path, opts) do
scheme = conn
|> forwarded_proto
|> coalesce(conn.scheme)
|> normalize_scheme
%URI{
host: conn.host,
port: normalize_port(scheme, conn.port),
path: path,
query: encode_query(opts),
scheme: to_string(scheme),
}
|> to_string
end
defp forwarded_proto(conn) do
conn
|> Plug.Conn.get_req_header("x-forwarded-proto")
|> List.first
end
defp normalize_scheme("https"), do: :https
defp normalize_scheme("http"), do: :http
defp normalize_scheme(scheme), do: scheme
defp coalesce(nil, second), do: second
defp coalesce(first, _), do: first
defp normalize_port(:https, 80), do: 443
defp normalize_port(_, port), do: port
defp encode_query([]), do: nil
defp encode_query(opts), do: URI.encode_query(opts)
defp map_errors(nil), do: []
defp map_errors([]), do: []
defp map_errors(%Error{} = error), do: [error]
defp map_errors(errors), do: Enum.map(errors, &p_error/1)
defp p_error(%Error{} = error), do: error
defp p_error(%{} = error), do: struct(Error, error)
defp p_error(error) when is_list(error), do: struct(Error, error)
end
| 29.956522 | 92 | 0.688115 |
f79a482348fec71ba6a9ef03c25c098abbf9dfdb | 282 | exs | Elixir | priv/repo/migrations/20190330144510_create_lots.exs | rkalz/dragonhacks-backend | 377ed0383f403c720af30bc8ef4f3fdaf89b3e38 | [
"MIT"
] | 1 | 2019-04-03T01:48:59.000Z | 2019-04-03T01:48:59.000Z | priv/repo/migrations/20190330144510_create_lots.exs | rkalz/dragonhacks-backend | 377ed0383f403c720af30bc8ef4f3fdaf89b3e38 | [
"MIT"
] | null | null | null | priv/repo/migrations/20190330144510_create_lots.exs | rkalz/dragonhacks-backend | 377ed0383f403c720af30bc8ef4f3fdaf89b3e38 | [
"MIT"
] | 1 | 2019-03-31T21:21:47.000Z | 2019-03-31T21:21:47.000Z | defmodule Dragonhacks.Repo.Migrations.CreateLots do
use Ecto.Migration
def change do
create table(:lots) do
add :name, :string
add :address, :string
add :lat, :float
add :lng, :float
add :status, :string
timestamps()
end
end
end
| 16.588235 | 51 | 0.620567 |
f79a85006b29fd65c18b21e1c22410f2bcc05c67 | 1,869 | exs | Elixir | test/locations_web/features/edit_location_test.exs | antp/locations | fe765fe78b896fe02e11b92e8d21e97f00744384 | [
"MIT"
] | 14 | 2020-09-16T14:10:35.000Z | 2021-10-30T22:05:48.000Z | test/locations_web/features/edit_location_test.exs | antp/locations | fe765fe78b896fe02e11b92e8d21e97f00744384 | [
"MIT"
] | null | null | null | test/locations_web/features/edit_location_test.exs | antp/locations | fe765fe78b896fe02e11b92e8d21e97f00744384 | [
"MIT"
] | null | null | null | defmodule LocationsWeb.Features.EditLocationTest do
use LocationsWeb.ConnCase
import Phoenix.LiveViewTest
import Locations.AssertLiveviewHelpers
import Locations.Factories
@invalid_attrs %{
"addr1" => "",
"city" => "",
"postcode" => "",
"country" => ""
}
setup _tags do
# :ok = Ecto.Adapters.SQL.Sandbox.checkout(Locations.Repo)
# unless tags[:async] do
# Ecto.Adapters.SQL.Sandbox.mode(Locations.Repo, {:shared, self()})
# end
location = insert(:location)
{:ok, conn: Phoenix.ConnTest.build_conn(), location: location}
end
@tag :unit
test "will update location modal", %{conn: conn, location: location} do
{:ok, view, _html} = live(conn, "/")
edit_id = "#edit-#{location.id}"
assert_id_is_on_the_page(view, edit_id)
element(view, edit_id)
|> render_click()
params = string_params_for(:location)
view
|> form("#locations-form",
location: params
)
|> render_submit()
html = render(view)
assert_text_in_page(html, params)
end
@tag :unit
test "will not update a location - displays errors", %{conn: conn, location: location} do
{:ok, view, _html} = live(conn, "/")
edit_id = "#edit-#{location.id}"
assert_id_is_on_the_page(view, edit_id)
element(view, edit_id)
|> render_click()
params = @invalid_attrs
view
|> form("#locations-form",
location: params
)
|> render_submit()
assert_form_errors(view, "locations-form", params)
end
@tag :unit
test "can cancel the modal", %{conn: conn} do
{:ok, view, _html} = live(conn, "/")
element(view, "#btn-add-location")
|> render_click()
assert_id_is_on_the_page(view, "#location-cancel")
element(view, "#location-cancel")
|> render_click()
refute_id_is_on_the_page(view, "#add-location-modal")
end
end
| 21.732558 | 91 | 0.637774 |
f79abdc420a49529abad30ea996dcdd9a9c5d85b | 676 | ex | Elixir | lib/excron/allocation.ex | kchrismucheke/excron | b08e2d4e8281f5ace14350e92399b194d6ddcb2c | [
"MIT"
] | null | null | null | lib/excron/allocation.ex | kchrismucheke/excron | b08e2d4e8281f5ace14350e92399b194d6ddcb2c | [
"MIT"
] | null | null | null | lib/excron/allocation.ex | kchrismucheke/excron | b08e2d4e8281f5ace14350e92399b194d6ddcb2c | [
"MIT"
] | null | null | null | defmodule Excron.Allocation do
@moduledoc false
def is_job_allocated_to_current_node(jobs, job_index, nodes \\ [Node.self() | Node.list()]) do
nodes = Enum.sort(nodes)
current_node_index = Enum.find_index(nodes, &(&1 == Node.self()))
jobs_to_nodes = allocate_jobs_to_nodes(jobs, nodes)
jobs_to_nodes[job_index] == current_node_index
end
defp allocate_jobs_to_nodes(jobs, nodes) do
nodes_length = length(nodes)
assigned_nodes =
jobs
|> Enum.with_index()
|> Enum.map(fn {_, i} -> rem(i, nodes_length) end)
jobs_indexes = 0..(length(jobs) - 1)
jobs_indexes
|> Enum.zip(assigned_nodes)
|> Map.new()
end
end
| 26 | 96 | 0.671598 |
f79b0a2d0ca74a43580869449698494fe3f72b0d | 1,037 | ex | Elixir | backend/lib/backend.ex | tykowale/faketwitter | 8cb2f15bbc285d079daec153311b7f6fda167dd2 | [
"MIT"
] | 1 | 2016-03-20T21:38:39.000Z | 2016-03-20T21:38:39.000Z | backend/lib/backend.ex | tykowale/faketwitter | 8cb2f15bbc285d079daec153311b7f6fda167dd2 | [
"MIT"
] | null | null | null | backend/lib/backend.ex | tykowale/faketwitter | 8cb2f15bbc285d079daec153311b7f6fda167dd2 | [
"MIT"
] | null | null | null | defmodule Backend do
use Application
# See http://elixir-lang.org/docs/stable/elixir/Application.html
# for more information on OTP Applications
def start(_type, _args) do
import Supervisor.Spec
# Define workers and child supervisors to be supervised
children = [
# Start the Ecto repository
supervisor(Backend.Repo, []),
# Start the endpoint when the application starts
supervisor(Backend.Endpoint, []),
# Start your own worker by calling: Backend.Worker.start_link(arg1, arg2, arg3)
# worker(Backend.Worker, [arg1, arg2, arg3]),
]
# See http://elixir-lang.org/docs/stable/elixir/Supervisor.html
# for other strategies and supported options
opts = [strategy: :one_for_one, name: Backend.Supervisor]
Supervisor.start_link(children, opts)
end
# Tell Phoenix to update the endpoint configuration
# whenever the application is updated.
def config_change(changed, _new, removed) do
Backend.Endpoint.config_change(changed, removed)
:ok
end
end
| 32.40625 | 85 | 0.712633 |
f79b39246785b7e8f4b7a72e3be2135ab59a45d6 | 381 | exs | Elixir | apps/re/test/mix/tags.add_new_test.exs | ruby2elixir/emcasa-backend | 70d7f4f233555417941ffa6ada84cf8740c21dd2 | [
"MIT"
] | 4 | 2019-11-01T16:29:31.000Z | 2020-10-10T21:20:12.000Z | apps/re/test/mix/tags.add_new_test.exs | eduardomartines/emcasa-backend | 70d7f4f233555417941ffa6ada84cf8740c21dd2 | [
"MIT"
] | null | null | null | apps/re/test/mix/tags.add_new_test.exs | eduardomartines/emcasa-backend | 70d7f4f233555417941ffa6ada84cf8740c21dd2 | [
"MIT"
] | 5 | 2019-11-04T21:25:45.000Z | 2020-02-13T23:49:36.000Z | defmodule Mix.Tasks.Re.Tags.AddNewTest do
use Re.ModelCase
alias Mix.Tasks.Re.Tags
alias Re.{
Repo,
Tag
}
setup do
Mix.shell(Mix.Shell.Process)
on_exit(fn ->
Mix.shell(Mix.Shell.IO)
end)
:ok
end
describe "run/1" do
test "insert at least one tag" do
Tags.AddNew.run([])
refute [] == Repo.all(Tag)
end
end
end
| 13.137931 | 41 | 0.587927 |
f79b3a2a63fd8142abc189574272c95ca4fddd90 | 933 | exs | Elixir | config/prod.secret.exs | fcapovilla/paires | 72a9453d02d7afee11f9457cf342215d075b0c97 | [
"BSD-3-Clause"
] | null | null | null | config/prod.secret.exs | fcapovilla/paires | 72a9453d02d7afee11f9457cf342215d075b0c97 | [
"BSD-3-Clause"
] | null | null | null | config/prod.secret.exs | fcapovilla/paires | 72a9453d02d7afee11f9457cf342215d075b0c97 | [
"BSD-3-Clause"
] | null | null | null | # In this file, we load production configuration and secrets
# from environment variables. You can also hardcode secrets,
# although such is generally not recommended and you have to
# remember to add this file to your .gitignore.
use Mix.Config
secret_key_base =
System.get_env("SECRET_KEY_BASE") ||
raise """
environment variable SECRET_KEY_BASE is missing.
You can generate one by calling: mix phx.gen.secret
"""
config :paires, PairesWeb.Endpoint,
http: [
port: String.to_integer(System.get_env("PORT") || "4000"),
transport_options: [socket_opts: [:inet6]]
],
secret_key_base: secret_key_base
# ## Using releases (Elixir v1.9+)
#
# If you are doing OTP releases, you need to instruct Phoenix
# to start each relevant endpoint:
#
# config :paires, PairesWeb.Endpoint, server: true
#
# Then you can assemble a release by calling `mix release`.
# See `mix help release` for more information.
| 31.1 | 62 | 0.728832 |
f79b3cce0b0b9c64d3d8a89a9034c4aeecf0804c | 63 | exs | Elixir | test/ex_coin_test.exs | esdrasedu/ex_coin | e468977a17295daffef1ce9db0eabf0057b591f1 | [
"MIT"
] | null | null | null | test/ex_coin_test.exs | esdrasedu/ex_coin | e468977a17295daffef1ce9db0eabf0057b591f1 | [
"MIT"
] | null | null | null | test/ex_coin_test.exs | esdrasedu/ex_coin | e468977a17295daffef1ce9db0eabf0057b591f1 | [
"MIT"
] | null | null | null | defmodule ExCoinTest do
use ExUnit.Case
doctest ExCoin
end
| 12.6 | 23 | 0.793651 |
f79b48bc2887145222d2c8da16fc50442f83f5dd | 3,346 | exs | Elixir | test/publisher/topic_selector_test.exs | sevenmind/KaufmannEx | 44225125946921850316c272db53175bb1658fb7 | [
"MIT"
] | 84 | 2018-03-20T08:19:10.000Z | 2022-01-30T07:40:56.000Z | test/publisher/topic_selector_test.exs | sevenmind/KaufmannEx | 44225125946921850316c272db53175bb1658fb7 | [
"MIT"
] | 23 | 2018-03-29T15:15:56.000Z | 2019-12-04T14:53:57.000Z | test/publisher/topic_selector_test.exs | sevenmind/KaufmannEx | 44225125946921850316c272db53175bb1658fb7 | [
"MIT"
] | 8 | 2018-07-03T18:18:27.000Z | 2022-03-08T14:04:09.000Z | defmodule KaufmannEx.Publisher.TopicSelectorTest do
@moduledoc false
use ExUnit.Case
doctest KaufmannEx.Publisher.TopicSelector
alias KaufmannEx.Publisher.Request
alias KaufmannEx.Publisher.TopicSelector
alias KaufmannEx.Schemas.Event
import Mock
setup do
Application.put_env(:kaufmann_ex, :default_topic, "default_topic")
:ok
end
describe "resolve_topic/1" do
test "selects default topic when no topic specified" do
assert [
%KaufmannEx.Publisher.Request{
event_name: :whatever,
partition: nil,
topic: "default_topic"
}
] =
TopicSelector.resolve_topic(%Request{
event_name: :whatever,
payload: %{},
context: %{}
})
end
test "selects default topic when :default" do
assert [
%KaufmannEx.Publisher.Request{
context: %{},
encoded: nil,
event_name: :whatever,
format: :default,
metadata: nil,
partition: nil,
payload: %{},
topic: "default_topic"
}
] =
TopicSelector.resolve_topic(%Request{
event_name: :whatever,
payload: %{},
context: %{},
topic: :default
})
end
test "respects passed topic" do
assert [
%KaufmannEx.Publisher.Request{
partition: nil,
topic: "specified_topic"
}
] =
TopicSelector.resolve_topic(%Request{
event_name: :whatever,
topic: "specified_topic"
})
end
test "selects callback topic when :callback" do
assert [
%KaufmannEx.Publisher.Request{
partition: 0,
context: %{},
topic: "test_callback"
}
] =
TopicSelector.resolve_topic(%Request{
event_name: :whatever,
payload: %{},
topic: :callback,
context: %{
callback_topic: %{
topic: "test_callback",
partition: 0
}
}
})
end
test "when topic is :callback but no callback is specified in metadata" do
assert [] =
TopicSelector.resolve_topic(%Request{
event_name: :whatever,
payload: %{},
topic: :callback,
context: %{
callback_topic: nil
}
})
end
test "selects topic specifying topics and format" do
assert [
%KaufmannEx.Publisher.Request{
format: :json,
topic: "specified_topic"
}
] =
TopicSelector.resolve_topic(%Request{
event_name: :whatever,
payload: %{},
topic: %{
topic: "specified_topic",
format: :json
}
})
end
end
end
| 27.883333 | 78 | 0.450389 |
f79b65b9e6da53be210a24e82cdc597904fd8b42 | 801 | ex | Elixir | lib/repo/schema_type.ex | NeoPag/ex_audit | bd2bb4934330f62faa2ce3fd8b48f37f41e16890 | [
"MIT"
] | null | null | null | lib/repo/schema_type.ex | NeoPag/ex_audit | bd2bb4934330f62faa2ce3fd8b48f37f41e16890 | [
"MIT"
] | null | null | null | lib/repo/schema_type.ex | NeoPag/ex_audit | bd2bb4934330f62faa2ce3fd8b48f37f41e16890 | [
"MIT"
] | null | null | null | defmodule ExAudit.Type.Schema do
@behaviour Ecto.Type
@schemas Application.get_env(:ex_audit, :tracked_schemas, [])
def cast(schema) when is_atom(schema) do
case schema do
schema when schema in @schemas -> {:ok, schema}
_ -> :error
end
end
def cast(schema) when is_binary(schema) do
load(schema)
end
def cast(_), do: :error
def load(schema) do
case get_schema_by_table(schema) do
nil -> :error
schema -> {:ok, schema}
end
end
def dump(schema) do
case schema do
schema when schema in @schemas -> {:ok, schema.__schema__(:source)}
_ -> :error
end
end
defp get_schema_by_table(table) do
Enum.find(@schemas, fn schema ->
schema.__schema__(:source) == table
end)
end
def type, do: :string
end | 20.025 | 73 | 0.636704 |
f79b6b35923ce1a1891f781cb81cf824ac5bd185 | 3,203 | exs | Elixir | mix.exs | adkron/cachex | b972e4ad42d8a6d51cea2831d1d0904291ecab78 | [
"MIT"
] | null | null | null | mix.exs | adkron/cachex | b972e4ad42d8a6d51cea2831d1d0904291ecab78 | [
"MIT"
] | null | null | null | mix.exs | adkron/cachex | b972e4ad42d8a6d51cea2831d1d0904291ecab78 | [
"MIT"
] | null | null | null | defmodule Cachex.Mixfile do
use Mix.Project
@version "3.1.1"
@url_docs "http://hexdocs.pm/cachex"
@url_github "https://github.com/whitfin/cachex"
def project do
[
app: :cachex,
name: "Cachex",
description: "Powerful in-memory key/value storage for Elixir",
package: %{
files: [
"lib",
"mix.exs",
"LICENSE"
],
licenses: [ "MIT" ],
links: %{
"Docs" => @url_docs,
"GitHub" => @url_github
},
maintainers: [ "Isaac Whitfield" ]
},
version: @version,
elixir: "~> 1.2",
deps: deps(),
docs: [
source_ref: "v#{@version}",
source_url: @url_github,
main: "getting-started",
extra_section: "guides",
extras: [
"docs/features/cache-warming/reactive-warming.md",
"docs/features/cache-warming/proactive-warming.md",
"docs/features/action-blocks.md",
"docs/features/cache-limits.md",
"docs/features/custom-commands.md",
"docs/features/disk-interaction.md",
"docs/features/distributed-caches.md",
"docs/features/execution-hooks.md",
"docs/features/streaming-caches.md",
"docs/features/ttl-implementation.md",
"docs/migrations/migrating-to-v3.md",
"docs/migrations/migrating-to-v2.md",
"docs/getting-started.md"
],
groups_for_extras: [
"Features": Path.wildcard("docs/features/*.md"),
"Cache Warming": Path.wildcard("docs/features/cache-warming/*.md"),
"Migration": Path.wildcard("docs/migrations/*.md")
]
],
test_coverage: [
tool: ExCoveralls
],
preferred_cli_env: [
docs: :docs,
bench: :bench,
credo: :lint,
cachex: :test,
coveralls: :cover,
"coveralls.html": :cover,
"coveralls.travis": :cover
],
aliases: [
bench: "run benchmarks/main.exs"
]
]
end
# Configuration for the OTP application
#
# Type "mix help compile.app" for more information
def application do
[
applications: [:logger, :eternal],
mod: {Cachex.Application, []}
]
end
# Dependencies can be Hex packages:
#
# {:mydep, "~> 0.3.0"}
#
# Or git/path repositories:
#
# {:mydep, git: "https://github.com/elixir-lang/mydep.git", tag: "0.1.0"}
#
# Type "mix help deps" for more examples and options
defp deps do
[
# Production dependencies
{ :eternal, "~> 1.2" },
{ :jumper, "~> 1.0" },
{ :unsafe, "~> 1.0" },
# Testing dependencies
{ :excoveralls, "~> 0.8", optional: true, only: [ :cover ] },
{ :local_cluster, "~> 1.0", optional: true, only: [ :cover, :test ] },
# Linting dependencies
{ :credo, "~> 0.8", optional: true, only: [ :lint ] },
# Benchmarking dependencies
{ :benchee, "~> 0.13", optional: true, only: [ :bench ] },
{ :benchee_html, "~> 0.5", optional: true, only: [ :bench ] },
# Documentation dependencies
{ :ex_doc, "~> 0.16", optional: true, only: [ :docs ] }
]
end
end
| 28.855856 | 77 | 0.538558 |
f79b7bad11a3873850314fb299d8129f4159993c | 1,185 | exs | Elixir | config/config.exs | lee-dohm/ed-explorer | 879a883a8143531bc657fa74e55f72ed36b3547e | [
"MIT"
] | 1 | 2020-01-26T18:07:51.000Z | 2020-01-26T18:07:51.000Z | config/config.exs | lee-dohm/ed-explorer | 879a883a8143531bc657fa74e55f72ed36b3547e | [
"MIT"
] | null | null | null | config/config.exs | lee-dohm/ed-explorer | 879a883a8143531bc657fa74e55f72ed36b3547e | [
"MIT"
] | null | null | null | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
#
# This configuration file is loaded before any dependency and
# is restricted to this project.
use Mix.Config
# General application configuration
config :ed_explorer,
ecto_repos: [EdExplorer.Repo]
# Configures the endpoint
config :ed_explorer, EdExplorer.Web.Endpoint,
url: [host: "localhost"],
secret_key_base: "pbmNa/1/l88hXRrLFFCPPg4lAFEIrqb/PcwDO8Uz4toaAmjJ2nXMyirZK7b2xSH8",
render_errors: [view: EdExplorer.Web.ErrorView, accepts: ~w(html json)],
pubsub: [name: EdExplorer.PubSub,
adapter: Phoenix.PubSub.PG2]
# Configures Elixir's Logger
config :logger, :console,
format: "$time $metadata[$level] $message\n",
metadata: [:request_id]
config :phoenix, :template_engines,
slim: PhoenixSlime.Engine,
slime: PhoenixSlime.Engine
config :ed_explorer, :generators,
migration: true,
binary_id: true,
sample_binary_id: "11111111-1111-1111-1111-111111111111"
# Import environment specific config. This must remain at the bottom
# of this file so it overrides the configuration defined above.
import_config "#{Mix.env}.exs"
| 32.027027 | 86 | 0.766245 |
f79b7e531c913611eec7c67bf6ab3c6a6b2d40c2 | 3,500 | ex | Elixir | clients/vision/lib/google_api/vision/v1/model/google_cloud_vision_v1p4beta1_product.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/vision/lib/google_api/vision/v1/model/google_cloud_vision_v1p4beta1_product.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/vision/lib/google_api/vision/v1/model/google_cloud_vision_v1p4beta1_product.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.Vision.V1.Model.GoogleCloudVisionV1p4beta1Product do
@moduledoc """
A Product contains ReferenceImages.
## Attributes
* `description` (*type:* `String.t`, *default:* `nil`) - User-provided metadata to be stored with this product. Must be at most 4096
characters long.
* `displayName` (*type:* `String.t`, *default:* `nil`) - The user-provided name for this Product. Must not be empty. Must be at most
4096 characters long.
* `name` (*type:* `String.t`, *default:* `nil`) - The resource name of the product.
Format is:
`projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID`.
This field is ignored when creating a product.
* `productCategory` (*type:* `String.t`, *default:* `nil`) - The category for the product identified by the reference image. This should
be either "homegoods-v2", "apparel-v2", or "toys-v2". The legacy categories
"homegoods", "apparel", and "toys" are still supported, but these should
not be used for new products.
This field is immutable.
* `productLabels` (*type:* `list(GoogleApi.Vision.V1.Model.GoogleCloudVisionV1p4beta1ProductKeyValue.t)`, *default:* `nil`) - Key-value pairs that can be attached to a product. At query time,
constraints can be specified based on the product_labels.
Note that integer values can be provided as strings, e.g. "1199". Only
strings with integer values can match a range-based restriction which is
to be supported soon.
Multiple values can be assigned to the same key. One product may have up to
500 product_labels.
Notice that the total number of distinct product_labels over all products
in one ProductSet cannot exceed 1M, otherwise the product search pipeline
will refuse to work for that ProductSet.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:description => String.t(),
:displayName => String.t(),
:name => String.t(),
:productCategory => String.t(),
:productLabels =>
list(GoogleApi.Vision.V1.Model.GoogleCloudVisionV1p4beta1ProductKeyValue.t())
}
field(:description)
field(:displayName)
field(:name)
field(:productCategory)
field(
:productLabels,
as: GoogleApi.Vision.V1.Model.GoogleCloudVisionV1p4beta1ProductKeyValue,
type: :list
)
end
defimpl Poison.Decoder, for: GoogleApi.Vision.V1.Model.GoogleCloudVisionV1p4beta1Product do
def decode(value, options) do
GoogleApi.Vision.V1.Model.GoogleCloudVisionV1p4beta1Product.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Vision.V1.Model.GoogleCloudVisionV1p4beta1Product do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 39.325843 | 195 | 0.715714 |
f79b8d5babde258ac04145d73c0e03e9daf99596 | 2,020 | ex | Elixir | clients/artifact_registry/lib/google_api/artifact_registry/v1beta1/model/yum_artifact.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/artifact_registry/lib/google_api/artifact_registry/v1beta1/model/yum_artifact.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/artifact_registry/lib/google_api/artifact_registry/v1beta1/model/yum_artifact.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.ArtifactRegistry.V1beta1.Model.YumArtifact do
@moduledoc """
A detailed representation of a Yum artifact.
## Attributes
* `architecture` (*type:* `String.t`, *default:* `nil`) - Output only. Operating system architecture of the artifact.
* `name` (*type:* `String.t`, *default:* `nil`) - Output only. The Artifact Registry resource name of the artifact.
* `packageName` (*type:* `String.t`, *default:* `nil`) - Output only. The yum package name of the artifact.
* `packageType` (*type:* `String.t`, *default:* `nil`) - Output only. An artifact is a binary or source package.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:architecture => String.t() | nil,
:name => String.t() | nil,
:packageName => String.t() | nil,
:packageType => String.t() | nil
}
field(:architecture)
field(:name)
field(:packageName)
field(:packageType)
end
defimpl Poison.Decoder, for: GoogleApi.ArtifactRegistry.V1beta1.Model.YumArtifact do
def decode(value, options) do
GoogleApi.ArtifactRegistry.V1beta1.Model.YumArtifact.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.ArtifactRegistry.V1beta1.Model.YumArtifact do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 36.071429 | 121 | 0.709406 |
f79bcf62a58d7dc9bea36a925a539f4d44e2f173 | 168 | ex | Elixir | web/views/current_user_view.ex | KazuCocoa/ex_torello | 187d814094f79a3d99bca2746683804333d40dfe | [
"MIT"
] | null | null | null | web/views/current_user_view.ex | KazuCocoa/ex_torello | 187d814094f79a3d99bca2746683804333d40dfe | [
"MIT"
] | null | null | null | web/views/current_user_view.ex | KazuCocoa/ex_torello | 187d814094f79a3d99bca2746683804333d40dfe | [
"MIT"
] | null | null | null | defmodule ExTrello.CurrentUserView do
use ExTrello.Web, :view
def render("show.json", %{user: user}) do
user
end
def render("error.json", _) do
end
end
| 15.272727 | 43 | 0.672619 |
f79bd382c087baaf5c9375dab40ac776b9c4ffac | 271 | ex | Elixir | lib/groupher_server/billing/billing.ex | coderplanets/coderplanets_server | 3663e56340d6d050e974c91f7e499d8424fc25e9 | [
"Apache-2.0"
] | 240 | 2018-11-06T09:36:54.000Z | 2022-02-20T07:12:36.000Z | lib/groupher_server/billing/billing.ex | coderplanets/coderplanets_server | 3663e56340d6d050e974c91f7e499d8424fc25e9 | [
"Apache-2.0"
] | 363 | 2018-07-11T03:38:14.000Z | 2021-12-14T01:42:40.000Z | lib/groupher_server/billing/billing.ex | mydearxym/mastani_server | f24034a4a5449200165cf4a547964a0961793eab | [
"Apache-2.0"
] | 22 | 2019-01-27T11:47:56.000Z | 2021-02-28T13:17:52.000Z | defmodule GroupherServer.Billing do
@moduledoc false
alias GroupherServer.Billing.Delegate.CURD
defdelegate create_record(user, attrs), to: CURD
defdelegate paged_records(user, filter), to: CURD
defdelegate update_record_state(record_id, state), to: CURD
end
| 27.1 | 61 | 0.797048 |
f79bdc3cc293e83f563da253b82e944f2c461aa0 | 54 | exs | Elixir | config/test.exs | hqoss/pool_lad | 12c4db0296f113551e7cd79b05926b80b7aca858 | [
"MIT"
] | 2 | 2020-05-21T21:33:18.000Z | 2020-10-14T19:21:32.000Z | config/prod.exs | hqoss/pool_lad | 12c4db0296f113551e7cd79b05926b80b7aca858 | [
"MIT"
] | null | null | null | config/prod.exs | hqoss/pool_lad | 12c4db0296f113551e7cd79b05926b80b7aca858 | [
"MIT"
] | 1 | 2020-05-24T12:01:53.000Z | 2020-05-24T12:01:53.000Z | import Config
config :logger, :console, level: :warn
| 13.5 | 38 | 0.740741 |
f79bde38d4efc9da43029e6e5f4273bf5d1ea9a9 | 394 | ex | Elixir | lib/farmbot/bot_state/transport/utils.ex | pdgonzalez872/farmbot_os | a444248f05ee8f4fe57f6a4865b942131960f76c | [
"MIT"
] | 2 | 2018-08-01T23:07:52.000Z | 2018-10-17T12:49:21.000Z | lib/farmbot/bot_state/transport/utils.ex | pdgonzalez872/farmbot_os | a444248f05ee8f4fe57f6a4865b942131960f76c | [
"MIT"
] | null | null | null | lib/farmbot/bot_state/transport/utils.ex | pdgonzalez872/farmbot_os | a444248f05ee8f4fe57f6a4865b942131960f76c | [
"MIT"
] | 1 | 2017-07-22T21:51:14.000Z | 2017-07-22T21:51:14.000Z | defmodule Farmbot.BotState.Utils do
@moduledoc "Utility functions for handling bot_state data"
def should_log?(module, verbosity)
def should_log?(nil, verbosity) when verbosity <= 3, do: true
def should_log?(nil, _), do: false
def should_log?(module, verbosity) when verbosity <= 3 do
List.first(Module.split(module)) == "Farmbot"
end
def should_log?(_, _), do: false
end
| 28.142857 | 63 | 0.715736 |
f79be64595d5a7862547f10672afd9506f256a7f | 2,994 | ex | Elixir | lib/google_api/storage/v1/api/channels.ex | jamesvl/gcs_elixir_api | fd7e0f4a7e2e07f40e01822b4c9e09cfd4922d52 | [
"Apache-2.0"
] | null | null | null | lib/google_api/storage/v1/api/channels.ex | jamesvl/gcs_elixir_api | fd7e0f4a7e2e07f40e01822b4c9e09cfd4922d52 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | lib/google_api/storage/v1/api/channels.ex | jamesvl/gcs_elixir_api | fd7e0f4a7e2e07f40e01822b4c9e09cfd4922d52 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Storage.V1.Api.Channels do
@moduledoc """
API calls for all endpoints tagged `Channels`.
"""
alias GoogleApi.Storage.V1.Connection
alias GoogleApi.Gax.{Request, Response}
@library_version Mix.Project.config() |> Keyword.get(:version, "")
@doc """
Stop watching resources through this channel
## Parameters
* `connection` (*type:* `GoogleApi.Storage.V1.Connection.t`) - Connection to server
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:alt` (*type:* `String.t`) - Data format for the response.
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - An opaque string that represents a user for quota purposes. Must not exceed 40 characters.
* `:userIp` (*type:* `String.t`) - Deprecated. Please use quotaUser instead.
* `:resource` (*type:* `GoogleApi.Storage.V1.Model.Channel.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %{}}` on success
* `{:error, info}` on failure
"""
@spec storage_channels_stop(Tesla.Env.client(), keyword(), keyword()) ::
{:ok, nil} | {:ok, Tesla.Env.t()} | {:error, any()}
def storage_channels_stop(connection, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:alt => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:userIp => :query,
:resource => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/storage/v1/channels/stop", %{})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [decode: false])
end
end
| 39.394737 | 187 | 0.658317 |
f79c037dad0fc4c295d7d2fd2bcbc101d6a48ff1 | 15,035 | ex | Elixir | lib/aws/generated/chime_sdk_identity.ex | andrewhr/aws-elixir | 861dc2fafca50a2b2f83badba4cdcb44b5b0c171 | [
"Apache-2.0"
] | null | null | null | lib/aws/generated/chime_sdk_identity.ex | andrewhr/aws-elixir | 861dc2fafca50a2b2f83badba4cdcb44b5b0c171 | [
"Apache-2.0"
] | null | null | null | lib/aws/generated/chime_sdk_identity.ex | andrewhr/aws-elixir | 861dc2fafca50a2b2f83badba4cdcb44b5b0c171 | [
"Apache-2.0"
] | null | null | null | # WARNING: DO NOT EDIT, AUTO-GENERATED CODE!
# See https://github.com/aws-beam/aws-codegen for more details.
defmodule AWS.ChimeSDKIdentity do
@moduledoc """
The Amazon Chime SDK Identity APIs in this section allow software developers to
create and manage unique instances of their messaging applications.
These APIs provide the overarching framework for creating and sending messages.
For more information about the identity APIs, refer to [Amazon Chime SDK identity](https://docs.aws.amazon.com/chime/latest/APIReference/API_Operations_Amazon_Chime_SDK_Identity.html).
"""
alias AWS.Client
alias AWS.Request
def metadata do
%AWS.ServiceMetadata{
abbreviation: nil,
api_version: "2021-04-20",
content_type: "application/x-amz-json-1.1",
credential_scope: nil,
endpoint_prefix: "identity-chime",
global?: false,
protocol: "rest-json",
service_id: "Chime SDK Identity",
signature_version: "v4",
signing_name: "chime",
target_prefix: nil
}
end
@doc """
Creates an Amazon Chime SDK messaging `AppInstance` under an AWS account.
Only SDK messaging customers use this API. `CreateAppInstance` supports
idempotency behavior as described in the AWS API Standard.
identity
"""
def create_app_instance(%Client{} = client, input, options \\ []) do
url_path = "/app-instances"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
201
)
end
@doc """
Promotes an `AppInstanceUser` to an `AppInstanceAdmin`.
The promoted user can perform the following actions.
* `ChannelModerator` actions across all channels in the
`AppInstance`.
* `DeleteChannelMessage` actions.
Only an `AppInstanceUser` can be promoted to an `AppInstanceAdmin` role.
"""
def create_app_instance_admin(%Client{} = client, app_instance_arn, input, options \\ []) do
url_path = "/app-instances/#{AWS.Util.encode_uri(app_instance_arn)}/admins"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
201
)
end
@doc """
Creates a user under an Amazon Chime `AppInstance`.
The request consists of a unique `appInstanceUserId` and `Name` for that user.
"""
def create_app_instance_user(%Client{} = client, input, options \\ []) do
url_path = "/app-instance-users"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
201
)
end
@doc """
Deletes an `AppInstance` and all associated data asynchronously.
"""
def delete_app_instance(%Client{} = client, app_instance_arn, input, options \\ []) do
url_path = "/app-instances/#{AWS.Util.encode_uri(app_instance_arn)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
204
)
end
@doc """
Demotes an `AppInstanceAdmin` to an `AppInstanceUser`.
This action does not delete the user.
"""
def delete_app_instance_admin(
%Client{} = client,
app_instance_admin_arn,
app_instance_arn,
input,
options \\ []
) do
url_path =
"/app-instances/#{AWS.Util.encode_uri(app_instance_arn)}/admins/#{AWS.Util.encode_uri(app_instance_admin_arn)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
204
)
end
@doc """
Deletes an `AppInstanceUser`.
"""
def delete_app_instance_user(%Client{} = client, app_instance_user_arn, input, options \\ []) do
url_path = "/app-instance-users/#{AWS.Util.encode_uri(app_instance_user_arn)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
204
)
end
@doc """
Deregisters an `AppInstanceUserEndpoint`.
"""
def deregister_app_instance_user_endpoint(
%Client{} = client,
app_instance_user_arn,
endpoint_id,
input,
options \\ []
) do
url_path =
"/app-instance-users/#{AWS.Util.encode_uri(app_instance_user_arn)}/endpoints/#{AWS.Util.encode_uri(endpoint_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:delete,
url_path,
query_params,
headers,
input,
options,
204
)
end
@doc """
Returns the full details of an `AppInstance`.
"""
def describe_app_instance(%Client{} = client, app_instance_arn, options \\ []) do
url_path = "/app-instances/#{AWS.Util.encode_uri(app_instance_arn)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Returns the full details of an `AppInstanceAdmin`.
"""
def describe_app_instance_admin(
%Client{} = client,
app_instance_admin_arn,
app_instance_arn,
options \\ []
) do
url_path =
"/app-instances/#{AWS.Util.encode_uri(app_instance_arn)}/admins/#{AWS.Util.encode_uri(app_instance_admin_arn)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
200
)
end
@doc """
Returns the full details of an `AppInstanceUser`.
"""
def describe_app_instance_user(%Client{} = client, app_instance_user_arn, options \\ []) do
url_path = "/app-instance-users/#{AWS.Util.encode_uri(app_instance_user_arn)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Returns the full details of an `AppInstanceUserEndpoint`.
"""
def describe_app_instance_user_endpoint(
%Client{} = client,
app_instance_user_arn,
endpoint_id,
options \\ []
) do
url_path =
"/app-instance-users/#{AWS.Util.encode_uri(app_instance_user_arn)}/endpoints/#{AWS.Util.encode_uri(endpoint_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
200
)
end
@doc """
Gets the retention settings for an `AppInstance`.
"""
def get_app_instance_retention_settings(%Client{} = client, app_instance_arn, options \\ []) do
url_path = "/app-instances/#{AWS.Util.encode_uri(app_instance_arn)}/retention-settings"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
200
)
end
@doc """
Returns a list of the administrators in the `AppInstance`.
"""
def list_app_instance_admins(
%Client{} = client,
app_instance_arn,
max_results \\ nil,
next_token \\ nil,
options \\ []
) do
url_path = "/app-instances/#{AWS.Util.encode_uri(app_instance_arn)}/admins"
headers = []
query_params = []
query_params =
if !is_nil(next_token) do
[{"next-token", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"max-results", max_results} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
200
)
end
@doc """
Lists all the `AppInstanceUserEndpoints` created under a single
`AppInstanceUser`.
"""
def list_app_instance_user_endpoints(
%Client{} = client,
app_instance_user_arn,
max_results \\ nil,
next_token \\ nil,
options \\ []
) do
url_path = "/app-instance-users/#{AWS.Util.encode_uri(app_instance_user_arn)}/endpoints"
headers = []
query_params = []
query_params =
if !is_nil(next_token) do
[{"next-token", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"max-results", max_results} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
200
)
end
@doc """
List all `AppInstanceUsers` created under a single `AppInstance`.
"""
def list_app_instance_users(
%Client{} = client,
app_instance_arn,
max_results \\ nil,
next_token \\ nil,
options \\ []
) do
url_path = "/app-instance-users"
headers = []
query_params = []
query_params =
if !is_nil(next_token) do
[{"next-token", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"max-results", max_results} | query_params]
else
query_params
end
query_params =
if !is_nil(app_instance_arn) do
[{"app-instance-arn", app_instance_arn} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Lists all Amazon Chime `AppInstance`s created under a single AWS account.
"""
def list_app_instances(%Client{} = client, max_results \\ nil, next_token \\ nil, options \\ []) do
url_path = "/app-instances"
headers = []
query_params = []
query_params =
if !is_nil(next_token) do
[{"next-token", next_token} | query_params]
else
query_params
end
query_params =
if !is_nil(max_results) do
[{"max-results", max_results} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
nil
)
end
@doc """
Lists the tags applied to an Amazon Chime SDK identity resource.
"""
def list_tags_for_resource(%Client{} = client, resource_arn, options \\ []) do
url_path = "/tags"
headers = []
query_params = []
query_params =
if !is_nil(resource_arn) do
[{"arn", resource_arn} | query_params]
else
query_params
end
Request.request_rest(
client,
metadata(),
:get,
url_path,
query_params,
headers,
nil,
options,
200
)
end
@doc """
Sets the amount of time in days that a given `AppInstance` retains data.
"""
def put_app_instance_retention_settings(
%Client{} = client,
app_instance_arn,
input,
options \\ []
) do
url_path = "/app-instances/#{AWS.Util.encode_uri(app_instance_arn)}/retention-settings"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
200
)
end
@doc """
Registers an endpoint under an Amazon Chime `AppInstanceUser`.
The endpoint receives messages for a user. For push notifications, the endpoint
is a mobile device used to receive mobile push notifications for a user.
"""
def register_app_instance_user_endpoint(
%Client{} = client,
app_instance_user_arn,
input,
options \\ []
) do
url_path = "/app-instance-users/#{AWS.Util.encode_uri(app_instance_user_arn)}/endpoints"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
201
)
end
@doc """
Applies the specified tags to the specified Amazon Chime SDK identity resource.
"""
def tag_resource(%Client{} = client, input, options \\ []) do
url_path = "/tags?operation=tag-resource"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
204
)
end
@doc """
Removes the specified tags from the specified Amazon Chime SDK identity
resource.
"""
def untag_resource(%Client{} = client, input, options \\ []) do
url_path = "/tags?operation=untag-resource"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:post,
url_path,
query_params,
headers,
input,
options,
204
)
end
@doc """
Updates `AppInstance` metadata.
"""
def update_app_instance(%Client{} = client, app_instance_arn, input, options \\ []) do
url_path = "/app-instances/#{AWS.Util.encode_uri(app_instance_arn)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
200
)
end
@doc """
Updates the details of an `AppInstanceUser`.
You can update names and metadata.
"""
def update_app_instance_user(%Client{} = client, app_instance_user_arn, input, options \\ []) do
url_path = "/app-instance-users/#{AWS.Util.encode_uri(app_instance_user_arn)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
200
)
end
@doc """
Updates the details of an `AppInstanceUserEndpoint`.
You can update the name and `AllowMessage` values.
"""
def update_app_instance_user_endpoint(
%Client{} = client,
app_instance_user_arn,
endpoint_id,
input,
options \\ []
) do
url_path =
"/app-instance-users/#{AWS.Util.encode_uri(app_instance_user_arn)}/endpoints/#{AWS.Util.encode_uri(endpoint_id)}"
headers = []
query_params = []
Request.request_rest(
client,
metadata(),
:put,
url_path,
query_params,
headers,
input,
options,
200
)
end
end
| 21.509299 | 186 | 0.597406 |
f79c1dc8e317d8a545a9ed26f1f69a430c4d031c | 1,190 | ex | Elixir | lib/config.ex | esl/buildex_jobs | 928d36541c25fa0ad998278cc7fe609644562140 | [
"Apache-2.0"
] | 2 | 2020-05-28T12:23:00.000Z | 2021-03-11T23:23:57.000Z | lib/config.ex | esl/buildex_jobs | 928d36541c25fa0ad998278cc7fe609644562140 | [
"Apache-2.0"
] | null | null | null | lib/config.ex | esl/buildex_jobs | 928d36541c25fa0ad998278cc7fe609644562140 | [
"Apache-2.0"
] | 2 | 2021-03-11T23:27:28.000Z | 2022-03-06T10:20:51.000Z | defmodule RepoJobs.Config do
def get_github_access_token do
Application.get_env(:buildex_jobs, :github_auth) || System.get_env("GITHUB_AUTH")
end
def get_consumers do
Application.get_env(:buildex_jobs, :consumers)
end
def get_connection_pool_config do
Application.get_env(:buildex_jobs, :rabbitmq_conn_pool, [])
end
def get_connection_pool_id do
{_, pool_id} =
get_connection_pool_config()
|> Keyword.fetch!(:name)
pool_id
end
def get_rabbitmq_config do
Application.get_env(:buildex_jobs, :rabbitmq_config, [])
end
def get_rabbitmq_queue do
Application.fetch_env!(:buildex_jobs, :queue)
end
def get_rabbitmq_exchange do
Application.fetch_env!(:buildex_jobs, :exchange)
end
def get_rabbitmq_client do
get_rabbitmq_config()
|> Keyword.get(:adapter, ExRabbitPool.RabbitMQ)
end
def get_rabbitmq_reconnection_interval do
get_rabbitmq_config()
|> Keyword.get(:reconnect, 5000)
end
def temp_dir do
Application.get_env(:buildex_jobs, :tmp_dir, System.tmp_dir!())
end
def get_database do
Application.get_env(:buildex_jobs, :database, Buildex.Common.Services.Database)
end
end
| 22.884615 | 85 | 0.736975 |
f79c3cecca34f6c8d1cc0cd39b3aee8421463741 | 950 | ex | Elixir | lib/posthog.ex | whitepaperclip/posthog | 695f2f39fa0d424fbd3e8f7adfc42db054c0d6e4 | [
"MIT"
] | 8 | 2020-06-06T20:33:53.000Z | 2022-02-28T11:31:26.000Z | lib/posthog.ex | timgl/posthog-1 | 695f2f39fa0d424fbd3e8f7adfc42db054c0d6e4 | [
"MIT"
] | null | null | null | lib/posthog.ex | timgl/posthog-1 | 695f2f39fa0d424fbd3e8f7adfc42db054c0d6e4 | [
"MIT"
] | 2 | 2020-06-08T10:20:33.000Z | 2020-06-08T10:28:01.000Z | defmodule Posthog do
@moduledoc """
This module provides an Elixir HTTP client for Posthog.
Example config:
config :posthog,
api_url: "http://posthog.example.com",
api_key: "..."
Optionally, you can pass in a `:json_library` key. The default JSON parser
is Jason.
"""
@doc """
Sends a capture event. `distinct_id` is the only required parameter.
## Examples
iex> Posthog.capture("login", distinct_id: user.id)
:ok
iex> Posthog.capture("login", [distinct_id: user.id], DateTime.utc_now())
:ok
"""
@typep result() :: {:ok, term()} | {:error, term()}
@typep timestamp() :: DateTime.t() | NaiveDateTime.t() | String.t() | nil
@spec capture(atom() | String.t(), keyword() | map(), timestamp()) :: result()
defdelegate capture(event, params, timestamp \\ nil), to: Posthog.Client
@spec batch(list(tuple())) :: result()
defdelegate batch(events), to: Posthog.Client
end
| 27.142857 | 80 | 0.633684 |
f79c75df7d79b85759026623046532a779de58c2 | 470 | exs | Elixir | src/auth_service/test/auth_service_web/views/error_view_test.exs | wbpascal/statuswebsite | 7a81e530a9176c53abeab0582cb710113101b716 | [
"MIT"
] | 1 | 2021-04-18T20:21:03.000Z | 2021-04-18T20:21:03.000Z | src/auth_service/test/auth_service_web/views/error_view_test.exs | wbpascal/statuswebsite | 7a81e530a9176c53abeab0582cb710113101b716 | [
"MIT"
] | null | null | null | src/auth_service/test/auth_service_web/views/error_view_test.exs | wbpascal/statuswebsite | 7a81e530a9176c53abeab0582cb710113101b716 | [
"MIT"
] | 1 | 2021-04-18T20:21:08.000Z | 2021-04-18T20:21:08.000Z | defmodule AuthServiceWeb.ErrorViewTest do
use AuthServiceWeb.ConnCase, async: true
# Bring render/3 and render_to_string/3 for testing custom views
import Phoenix.View
test "renders 404.json" do
assert render(AuthServiceWeb.ErrorView, "404.json", []) == %{errors: %{detail: "Not Found"}}
end
test "renders 500.json" do
assert render(AuthServiceWeb.ErrorView, "500.json", []) ==
%{errors: %{detail: "Internal Server Error"}}
end
end
| 29.375 | 96 | 0.693617 |
f79ca5f4962ee0ec449cb917543f20381455615f | 1,776 | ex | Elixir | clients/dfa_reporting/lib/google_api/dfa_reporting/v28/model/platform_type.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | 1 | 2018-12-03T23:43:10.000Z | 2018-12-03T23:43:10.000Z | clients/dfa_reporting/lib/google_api/dfa_reporting/v28/model/platform_type.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/dfa_reporting/lib/google_api/dfa_reporting/v28/model/platform_type.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.DFAReporting.V28.Model.PlatformType do
@moduledoc """
Contains information about a platform type that can be targeted by ads.
## Attributes
- id (String.t): ID of this platform type. Defaults to: `null`.
- kind (String.t): Identifies what kind of resource this is. Value: the fixed string \"dfareporting#platformType\". Defaults to: `null`.
- name (String.t): Name of this platform type. Defaults to: `null`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:id => any(),
:kind => any(),
:name => any()
}
field(:id)
field(:kind)
field(:name)
end
defimpl Poison.Decoder, for: GoogleApi.DFAReporting.V28.Model.PlatformType do
def decode(value, options) do
GoogleApi.DFAReporting.V28.Model.PlatformType.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.DFAReporting.V28.Model.PlatformType do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 32.888889 | 148 | 0.722973 |
f79cd3f790203678339bc8cf3d2f78c20fd5f007 | 257 | ex | Elixir | lib/clecodes_ex.ex | treetopdevs/distillery-aws-example | aa09b96490dd2d1052d1e580f02e3328b1b366bc | [
"Apache-2.0"
] | null | null | null | lib/clecodes_ex.ex | treetopdevs/distillery-aws-example | aa09b96490dd2d1052d1e580f02e3328b1b366bc | [
"Apache-2.0"
] | null | null | null | lib/clecodes_ex.ex | treetopdevs/distillery-aws-example | aa09b96490dd2d1052d1e580f02e3328b1b366bc | [
"Apache-2.0"
] | null | null | null | defmodule ClecodesEx do
@moduledoc """
ClecodesEx keeps the contexts that define your domain
and business logic.
Contexts are also responsible for managing your data, regardless
if it comes from the database, an external API or others.
"""
end
| 25.7 | 66 | 0.758755 |
f79d0a329e42b8adf6f4bd9cdbbff3f8dbf6d9ca | 347 | exs | Elixir | test/entry_test.exs | stephancom/xkcd_287-elixir | e7cd1b3630430c409c93a6b329d843da507f0019 | [
"MIT"
] | null | null | null | test/entry_test.exs | stephancom/xkcd_287-elixir | e7cd1b3630430c409c93a6b329d843da507f0019 | [
"MIT"
] | null | null | null | test/entry_test.exs | stephancom/xkcd_287-elixir | e7cd1b3630430c409c93a6b329d843da507f0019 | [
"MIT"
] | null | null | null | defmodule EntryTest do
use ExUnit.Case
doctest Entry
test "forbids zero quantity" do
assert_raise FunctionClauseError, fn ->
Entry.new(Item.parse("caviar,$99.99"), 0)
end
end
test "forbids negative quantity" do
assert_raise FunctionClauseError, fn ->
Entry.new(Item.parse("caviar,$99.99"), -2)
end
end
end | 21.6875 | 48 | 0.680115 |
f79d155cef948a12107a0983243923c783a09d1d | 8,358 | ex | Elixir | lib/elixir/lib/kernel/lexical_tracker.ex | kenichi/elixir | 8c27da88c70623cbe516d5310c885943395a82a2 | [
"Apache-2.0"
] | 2 | 2018-11-15T06:38:14.000Z | 2018-11-17T18:03:14.000Z | lib/elixir/lib/kernel/lexical_tracker.ex | kenichi/elixir | 8c27da88c70623cbe516d5310c885943395a82a2 | [
"Apache-2.0"
] | 1 | 2018-09-10T23:36:45.000Z | 2018-09-10T23:36:45.000Z | lib/elixir/lib/kernel/lexical_tracker.ex | kenichi/elixir | 8c27da88c70623cbe516d5310c885943395a82a2 | [
"Apache-2.0"
] | 1 | 2018-09-10T23:32:56.000Z | 2018-09-10T23:32:56.000Z | # This is an Elixir module responsible for tracking references
# to modules, remote dispatches, and the usage of
# aliases/imports/requires in the Elixir scope.
#
# Note that since this is required for bootstrap, we can't use
# any of the `GenServer.Behaviour` conveniences.
defmodule Kernel.LexicalTracker do
@moduledoc false
@timeout 30000
@behaviour :gen_server
@doc """
Returns all remotes referenced in this lexical scope.
"""
def remote_references(arg) do
:gen_server.call(to_pid(arg), :remote_references, @timeout)
end
@doc """
Returns all remote dispatches in this lexical scope.
"""
def remote_dispatches(arg) do
:gen_server.call(to_pid(arg), :remote_dispatches, @timeout)
end
defp to_pid(pid) when is_pid(pid), do: pid
defp to_pid(mod) when is_atom(mod) do
{set, _} = :elixir_module.data_tables(mod)
:ets.lookup_element(set, {:elixir, :lexical_tracker}, 2)
end
# Internal API
# Starts the tracker and returns its PID.
@doc false
def start_link() do
:gen_server.start_link(__MODULE__, :ok, [])
end
@doc false
def stop(pid) do
:gen_server.cast(pid, :stop)
end
@doc false
def add_import(pid, module, fas, line, warn) when is_atom(module) do
:gen_server.cast(pid, {:add_import, module, fas, line, warn})
end
@doc false
def add_alias(pid, module, line, warn) when is_atom(module) do
:gen_server.cast(pid, {:add_alias, module, line, warn})
end
@doc false
def remote_reference(pid, module, mode) when is_atom(module) do
:gen_server.cast(pid, {:remote_reference, module, mode})
end
@doc false
def remote_dispatch(pid, module, fa, line, mode) when is_atom(module) do
:gen_server.cast(pid, {:remote_dispatch, module, fa, line, mode})
end
@doc false
def remote_struct(pid, module, line) when is_atom(module) do
:gen_server.cast(pid, {:remote_struct, module, line})
end
@doc false
def import_dispatch(pid, module, fa, line, mode) when is_atom(module) do
:gen_server.cast(pid, {:import_dispatch, module, fa, line, mode})
end
@doc false
def alias_dispatch(pid, module) when is_atom(module) do
:gen_server.cast(pid, {:alias_dispatch, module})
end
@doc false
def set_file(pid, file) do
:gen_server.cast(pid, {:set_file, file})
end
@doc false
def reset_file(pid) do
:gen_server.cast(pid, :reset_file)
end
@doc false
def write_cache(pid, value) do
key = :erlang.unique_integer()
:gen_server.cast(pid, {:write_cache, key, value})
key
end
@doc false
def read_cache(pid, key) do
:gen_server.call(pid, {:read_cache, key}, @timeout)
end
@doc false
def collect_unused_imports(pid) do
unused(pid, :import)
end
@doc false
def collect_unused_aliases(pid) do
unused(pid, :alias)
end
defp unused(pid, tag) do
:gen_server.call(pid, {:unused, tag}, @timeout)
end
# Callbacks
def init(:ok) do
state = %{
directives: %{},
references: %{},
compile: %{},
runtime: %{},
structs: %{},
cache: %{},
file: nil
}
{:ok, state}
end
@doc false
def handle_call({:unused, tag}, _from, state) do
directives =
for {{^tag, module_or_mfa}, marker} <- state.directives, is_integer(marker) do
{module_or_mfa, marker}
end
{:reply, Enum.sort(directives), state}
end
def handle_call(:remote_references, _from, state) do
{compile, runtime} = partition(:maps.to_list(state.references), [], [])
{:reply, {compile, :maps.keys(state.structs), runtime}, state}
end
def handle_call(:remote_dispatches, _from, state) do
{:reply, {state.compile, state.runtime}, state}
end
def handle_call({:read_cache, key}, _from, %{cache: cache} = state) do
{:reply, :maps.get(key, cache), state}
end
def handle_cast({:write_cache, key, value}, %{cache: cache} = state) do
{:noreply, %{state | cache: :maps.put(key, value, cache)}}
end
def handle_cast({:remote_reference, module, mode}, state) do
{:noreply, %{state | references: add_reference(state.references, module, mode)}}
end
def handle_cast({:remote_struct, module, line}, state) do
state = add_remote_dispatch(state, module, {:__struct__, 0}, line, :compile)
structs = :maps.put(module, true, state.structs)
{:noreply, %{state | structs: structs}}
end
def handle_cast({:remote_dispatch, module, fa, line, mode}, state) do
references = add_reference(state.references, module, mode)
state = add_remote_dispatch(state, module, fa, line, mode)
{:noreply, %{state | references: references}}
end
def handle_cast({:import_dispatch, module, {function, arity} = fa, line, mode}, state) do
state =
state
|> add_import_dispatch(module, function, arity)
|> add_remote_dispatch(module, fa, line, mode)
{:noreply, state}
end
def handle_cast({:alias_dispatch, module}, state) do
{:noreply, %{state | directives: add_dispatch(state.directives, module, :alias)}}
end
def handle_cast({:set_file, file}, state) do
{:noreply, %{state | file: file}}
end
def handle_cast(:reset_file, state) do
{:noreply, %{state | file: nil}}
end
def handle_cast({:add_import, module, fas, line, warn}, state) do
directives =
state.directives
|> Enum.reject(&match?({{:import, {^module, _, _}}, _}, &1))
|> :maps.from_list()
|> add_directive(module, line, warn, :import)
directives =
Enum.reduce(fas, directives, fn {function, arity}, directives ->
add_directive(directives, {module, function, arity}, line, warn, :import)
end)
{:noreply, %{state | directives: directives}}
end
def handle_cast({:add_alias, module, line, warn}, state) do
{:noreply, %{state | directives: add_directive(state.directives, module, line, warn, :alias)}}
end
def handle_cast(:stop, state) do
{:stop, :normal, state}
end
@doc false
def handle_info(_msg, state) do
{:noreply, state}
end
@doc false
def terminate(_reason, _state) do
:ok
end
@doc false
def code_change(_old, state, _extra) do
{:ok, state}
end
defp partition([{remote, :compile} | t], compile, runtime),
do: partition(t, [remote | compile], runtime)
defp partition([{remote, :runtime} | t], compile, runtime),
do: partition(t, compile, [remote | runtime])
defp partition([], compile, runtime), do: {compile, runtime}
# Callbacks helpers
defp add_reference(references, module, :compile) when is_atom(module),
do: :maps.put(module, :compile, references)
defp add_reference(references, module, :runtime) when is_atom(module) do
case :maps.find(module, references) do
{:ok, _} -> references
:error -> :maps.put(module, :runtime, references)
end
end
defp add_remote_dispatch(state, module, fa, line, mode) when is_atom(module) do
location = location(state.file, line)
map_update(mode, %{module => %{fa => [location]}}, state, fn mode_dispatches ->
map_update(module, %{fa => [location]}, mode_dispatches, fn module_dispatches ->
map_update(fa, [location], module_dispatches, &[location | List.delete(&1, location)])
end)
end)
end
defp location(nil, line), do: line
defp location(file, line), do: {file, line}
defp add_import_dispatch(state, module, function, arity) do
directives =
add_dispatch(state.directives, module, :import)
|> add_dispatch({module, function, arity}, :import)
# Always compile time because we depend
# on the module at compile time
references = add_reference(state.references, module, :compile)
%{state | directives: directives, references: references}
end
# In the map we keep imports and aliases.
# If the value is a line, it was imported/aliased and has a pending warning
# If the value is true, it was imported/aliased and used
defp add_directive(directives, module_or_mfa, line, warn, tag) do
marker = if warn, do: line, else: true
:maps.put({tag, module_or_mfa}, marker, directives)
end
defp add_dispatch(directives, module_or_mfa, tag) do
:maps.put({tag, module_or_mfa}, true, directives)
end
defp map_update(key, initial, map, fun) do
case :maps.find(key, map) do
{:ok, val} -> :maps.put(key, fun.(val), map)
:error -> :maps.put(key, initial, map)
end
end
end
| 27.953177 | 98 | 0.666427 |
f79d1c64535de0a36d7d8869a6940c73fcca40a1 | 2,910 | ex | Elixir | lib/hive/mq/node_agent.ex | taran96/HiveNode | 3bac148e138fd5dda0b23985020524d807c59710 | [
"MIT"
] | null | null | null | lib/hive/mq/node_agent.ex | taran96/HiveNode | 3bac148e138fd5dda0b23985020524d807c59710 | [
"MIT"
] | null | null | null | lib/hive/mq/node_agent.ex | taran96/HiveNode | 3bac148e138fd5dda0b23985020524d807c59710 | [
"MIT"
] | null | null | null | defmodule Hive.MQ.NodeAgent do
use Agent
require Logger
@moduledoc """
This agent is responsible to keep track of all nodes that are connected
to the network.
"""
@doc """
Starts the agent with an empty map
"""
def start_link(opts \\ []) do
Agent.start_link(fn -> %{} end, opts)
end
@doc """
This endpoint is used to add a node to the agent. This function only
accepts a `%Hive.MQ.Message.Greet{}`, otherwise it returns
`{:error, :wrngmsg}`. If the entry of the same hostname exists then
the existing entry is updated. It returns `:ok` on successful additions.
"""
def add(pid, greet_msg) do
if greet_msg.__struct__ == Hive.MQ.Message.Greet do
%{hostname: hostname} = greet_msg
node_info = greet_msg
|> Map.from_struct
|> Map.delete(:reply)
Logger.info "Adding #{hostname} to Node Agent: " <> inspect pid
Agent.update(pid, &Map.put(&1, hostname, node_info))
else
{:error, :wrngmsg}
end
end
@doc """
This endpoint gets the requested node based on the node's hostname. A map
containing similar fields to the `Hive.MQ.Message.Greet` struct is returned
if found. If the hostname is not found then `:notfound` is returned.
"""
def get(pid, key) do
getter = fn map ->
case Map.fetch(map, key) do
{:ok, value} -> value
:error -> :notfound
end
end
Agent.get(pid,getter)
end
defp getIPAddress() do
interface = Application.get_env(:hive, :interface, "lo")
{:ok, lst} = :inet.getifaddrs
getIPAddress(interface, lst)
end
defp getIPAddress(interface, [head | tail]) do
case head do
{^interface, lst} ->
{a, b, c, d} = Keyword.get(lst, :addr)
"#{a}.#{b}.#{c}.#{d}"
_ -> getIPAddress(interface, tail)
end
end
defp getIPAddress(_interface, []) do
:notfound
end
@doc """
This endpoint just registers the current node in the agent's state. It takes
parameters based on the RabbitMQ connection and is supposed to be called
when the connection to RabbitMQ is established.
"""
def registerSelf(pid, exchange, queue, routing_key) do
os_version = case :os.version do
{maj, min, _} -> "#{maj}.#{min}"
version -> inspect version
end
os = case :os.type do
{:unix, type} -> "#{type}"
type -> inspect type
end
{:ok, hostname} = :inet.gethostname
greet = %Hive.MQ.Message.Greet{
routing_key: routing_key,
hostname: hostname,
ip_address: getIPAddress(),
exchange: exchange,
queue: queue,
os: os,
os_version: os_version,
purpose: Application.get_env(:hive, :purpose, "UNKNOWN"),
}
add(pid, greet)
end
@doc """
This endpoint is a simple get of the current node
"""
def getSelf(pid) do
{:ok, hostname} = :inet.gethostname
get(pid, hostname)
end
end
| 26.216216 | 78 | 0.623024 |
f79d256405ba6bbbd47f124f7b5fbafe698c48de | 5,903 | ex | Elixir | lib/mix/lib/mix/tasks/deps.compile.ex | Tica2/elixir | 6cf1dcbfe4572fc75619f05e40c10fd0844083ef | [
"Apache-2.0"
] | null | null | null | lib/mix/lib/mix/tasks/deps.compile.ex | Tica2/elixir | 6cf1dcbfe4572fc75619f05e40c10fd0844083ef | [
"Apache-2.0"
] | null | null | null | lib/mix/lib/mix/tasks/deps.compile.ex | Tica2/elixir | 6cf1dcbfe4572fc75619f05e40c10fd0844083ef | [
"Apache-2.0"
] | null | null | null | defmodule Mix.Tasks.Deps.Compile do
use Mix.Task
@shortdoc "Compile dependencies"
@moduledoc """
Compile dependencies.
By default, compile all dependencies. A list of dependencies can
be given to force the compilation of specific dependencies.
This task attempts to detect if the project contains one of
the following files and act accordingly:
* `mix.exs` - invokes `mix compile`
* `rebar.config` - invokes `rebar compile`
* `Makefile.win` - invokes `nmake /F Makefile.win` (only on Windows)
* `Makefile` - invokes `make` (except on Windows)
The compilation can be customized by passing a `compile` option
in the dependency:
{:some_dependency, "0.1.0", compile: "command to compile"}
"""
import Mix.Dep, only: [loaded: 1, available?: 1, loaded_by_name: 2,
format_dep: 1, make?: 1, mix?: 1, rebar?: 1]
@spec run(OptionParser.argv) :: :ok
def run(args) do
Mix.Project.get!
case OptionParser.parse(args) do
{_, [], _} ->
# Because this command is invoked explicitly with
# deps.compile, we simply try to compile any available
# dependency.
compile(Enum.filter(loaded(env: Mix.env), &available?/1))
{_, tail, _} ->
compile(loaded_by_name(tail, env: Mix.env))
end
end
@doc false
def compile(deps) do
shell = Mix.shell
config = Mix.Project.deps_config
Mix.Task.run "deps.loadpaths"
compiled =
Enum.map(deps, fn %Mix.Dep{app: app, status: status, opts: opts, scm: scm} = dep ->
check_unavailable!(app, status)
compiled = cond do
not is_nil(opts[:compile]) ->
do_compile dep, config
mix?(dep) ->
do_mix dep, config
rebar?(dep) ->
do_rebar dep, config
make?(dep) ->
do_make dep, config
true ->
shell.error "Could not compile #{app}, no mix.exs, rebar.config or Makefile " <>
"(pass :compile as an option to customize compilation, set it to false to do nothing)"
end
unless mix?(dep), do: build_structure(dep, config)
touch_fetchable(scm, opts[:build])
compiled
end)
if Enum.any?(compiled), do: Mix.Dep.Lock.touch, else: :ok
end
defp touch_fetchable(scm, path) do
if scm.fetchable? do
File.mkdir_p!(path)
File.touch!(Path.join(path, ".compile.fetch"))
end
end
defp check_unavailable!(app, {:unavailable, _}) do
Mix.raise "Cannot compile dependency #{app} because " <>
"it isn't available, run `mix deps.get` first"
end
defp check_unavailable!(_, _) do
:ok
end
defp do_mix(dep, _config) do
Mix.Dep.in_dependency dep, fn _ ->
if req = old_elixir_req(Mix.Project.config) do
Mix.shell.error "warning: the dependency #{dep.app} requires Elixir #{inspect req} " <>
"but you are running on v#{System.version}"
end
try do
res = Mix.Task.run("compile", ["--no-deps", "--no-elixir-version-check"])
:ok in List.wrap(res)
catch
kind, reason ->
stacktrace = System.stacktrace
app = dep.app
Mix.shell.error "could not compile dependency #{app}, mix compile failed. " <>
"You can recompile this dependency with `mix deps.compile #{app}` or " <>
"update it with `mix deps.update #{app}`"
:erlang.raise(kind, reason, stacktrace)
end
end
end
defp do_rebar(%Mix.Dep{app: app} = dep, config) do
lib_path = Path.join(config[:build_path], "lib")
do_command dep, config, rebar_cmd(app), false,
"compile skip_deps=true deps_dir=#{inspect lib_path}"
end
defp rebar_cmd(app) do
Mix.Rebar.rebar_cmd || handle_rebar_not_found(app)
end
defp handle_rebar_not_found(app) do
shell = Mix.shell
shell.info "Could not find rebar, which is needed to build dependency #{inspect app}"
shell.info "I can install a local copy which is just used by mix"
unless shell.yes?("Shall I install rebar?") do
Mix.raise "Could not find rebar to compile " <>
"dependency #{app}, please ensure rebar is available"
end
(Mix.Tasks.Local.Rebar.run([]) && Mix.Rebar.local_rebar_cmd) ||
Mix.raise "rebar installation failed"
end
defp do_make(dep, config) do
command = if match?({:win32, _}, :os.type) and File.regular?("Makefile.win") do
"nmake /F Makefile.win"
else
"make"
end
do_command(dep, config, command, true)
end
defp do_compile(%Mix.Dep{opts: opts} = dep, config) do
if command = opts[:compile] do
do_command(dep, config, command, true)
else
false
end
end
defp do_command(%Mix.Dep{app: app} = dep, config, command, print_app?, extra \\ "") do
Mix.Dep.in_dependency dep, fn _ ->
env = [{"ERL_LIBS", Path.join(config[:build_path], "lib")}]
if Mix.shell.cmd("#{command} #{extra}", print_app: print_app?, env: env) != 0 do
Mix.raise "Could not compile dependency #{app}, #{command} command failed. " <>
"If you want to recompile this dependency, please run: mix deps.compile #{app}"
end
end
true
end
defp build_structure(%Mix.Dep{opts: opts} = dep, config) do
build_path = Path.dirname(opts[:build])
Enum.each Mix.Dep.source_paths(dep), fn source ->
app = Path.join(build_path, Path.basename(source))
build_structure(source, app, config)
Code.prepend_path(Path.join(app, "ebin"))
end
end
defp build_structure(dest, build, config) do
File.cd! dest, fn ->
config = Keyword.put(config, :app_path, build)
Mix.Project.build_structure(config, symlink_ebin: true)
end
end
defp old_elixir_req(config) do
req = config[:elixir]
if req && not Version.match?(System.version, req) do
req
end
end
end
| 30.905759 | 100 | 0.622734 |
f79d35f146e8ff0331e5917c28459c0d1e016f9e | 8,104 | ex | Elixir | lib/trento/application/integration/discovery/policies/sap_system_policy.ex | trento-project/web | 3260b30c781bffbbb0e5205cd650966c4026b9ac | [
"Apache-2.0"
] | 1 | 2022-03-22T16:59:34.000Z | 2022-03-22T16:59:34.000Z | lib/trento/application/integration/discovery/policies/sap_system_policy.ex | trento-project/web | 3260b30c781bffbbb0e5205cd650966c4026b9ac | [
"Apache-2.0"
] | 24 | 2022-03-22T16:45:25.000Z | 2022-03-31T13:00:02.000Z | lib/trento/application/integration/discovery/policies/sap_system_policy.ex | trento-project/web | 3260b30c781bffbbb0e5205cd650966c4026b9ac | [
"Apache-2.0"
] | 1 | 2022-03-30T14:16:16.000Z | 2022-03-30T14:16:16.000Z | defmodule Trento.Integration.Discovery.SapSystemPolicy do
@moduledoc """
This module contains functions to trasnform SAP system related integration events into commands..
"""
alias Trento.Domain.Commands.{
RegisterApplicationInstance,
RegisterDatabaseInstance
}
@uuid_namespace Application.compile_env!(:trento, :uuid_namespace)
@database_type 1
@application_type 2
@spec handle(map) ::
{:ok, [RegisterApplicationInstance.t() | RegisterDatabaseInstance.t()]} | {:error, any}
def handle(%{
"discovery_type" => "sap_system_discovery",
"agent_id" => agent_id,
"payload" => payload
}) do
payload
|> Enum.flat_map(fn sap_system -> parse_sap_system(sap_system, agent_id) end)
|> Enum.reduce_while(
{:ok, []},
fn
{:ok, command}, {:ok, commands} -> {:cont, {:ok, commands ++ [command]}}
{:error, _} = error, _ -> {:halt, error}
end
)
end
@spec parse_sap_system(map, String.t()) :: [
{:ok, RegisterDatabaseInstance.t()}
| {:ok, RegisterApplicationInstance.t()}
| {:error, any}
]
defp parse_sap_system(
%{
"Type" => @database_type,
"Id" => id,
"SID" => sid,
"Databases" => databases,
"Instances" => instances
},
host_id
) do
Enum.flat_map(databases, fn %{"Database" => tenant} ->
Enum.map(instances, fn instance ->
instance_number = parse_instance_number(instance)
instance_hostname = parse_instance_hostname(instance)
RegisterDatabaseInstance.new(%{
sap_system_id: UUID.uuid5(@uuid_namespace, id),
sid: sid,
tenant: tenant,
host_id: host_id,
instance_number: instance_number,
instance_hostname: instance_hostname,
features: parse_features(instance, instance_number, instance_hostname),
http_port: parse_http_port(instance, instance_number, instance_hostname),
https_port: parse_https_port(instance, instance_number, instance_hostname),
start_priority: parse_start_priority(instance, instance_number, instance_hostname),
system_replication: parse_system_replication(instance),
system_replication_status: parse_system_replication_status(instance),
health: parse_instance_health(instance, instance_number, instance_hostname)
})
end)
end)
end
defp parse_sap_system(
%{
"Type" => @application_type,
"DBAddress" => db_host,
"SID" => sid,
"Instances" => instances,
"Profile" => %{
"dbs/hdb/dbname" => tenant
}
},
host_id
) do
Enum.map(instances, fn instance ->
instance_number = parse_instance_number(instance)
instance_hostname = parse_instance_hostname(instance)
RegisterApplicationInstance.new(%{
sid: sid,
tenant: tenant,
db_host: db_host,
instance_number: instance_number,
instance_hostname: instance_hostname,
features: parse_features(instance, instance_number, instance_hostname),
http_port: parse_http_port(instance, instance_number, instance_hostname),
https_port: parse_https_port(instance, instance_number, instance_hostname),
start_priority: parse_start_priority(instance, instance_number, instance_hostname),
host_id: host_id,
health: parse_instance_health(instance, instance_number, instance_hostname)
})
end)
end
defp parse_sap_system(
%{
"Type" => _
},
_
),
do: []
@spec parse_http_port(map, String.t(), String.t()) :: integer() | nil
defp parse_http_port(%{"SAPControl" => sap_control}, instance_number, instance_hostname) do
case extract_sap_control_instance_data(
sap_control,
instance_number,
instance_hostname,
"httpPort"
) do
{:ok, instance_http_port} ->
instance_http_port
_ ->
nil
end
end
@spec parse_https_port(map, String.t(), String.t()) :: integer() | nil
defp parse_https_port(%{"SAPControl" => sap_control}, instance_number, instance_hostname) do
case extract_sap_control_instance_data(
sap_control,
instance_number,
instance_hostname,
"httpsPort"
) do
{:ok, instance_https_port} ->
instance_https_port
_ ->
nil
end
end
@spec parse_start_priority(map, String.t(), String.t()) :: String.t() | nil
defp parse_start_priority(%{"SAPControl" => sap_control}, instance_number, instance_hostname) do
case extract_sap_control_instance_data(
sap_control,
instance_number,
instance_hostname,
"startPriority"
) do
{:ok, start_priority} ->
start_priority
_ ->
nil
end
end
@spec parse_features(map, String.t(), String.t()) :: String.t()
defp parse_features(%{"SAPControl" => sap_control}, instance_number, instance_hostname) do
case extract_sap_control_instance_data(
sap_control,
instance_number,
instance_hostname,
"features"
) do
{:ok, features} ->
features
_ ->
""
end
end
@spec parse_instance_number(map) :: String.t() | nil
defp parse_instance_number(%{
"SAPControl" => %{"Properties" => properties}
}) do
properties
|> Enum.find_value(fn
%{"property" => "SAPSYSTEM", "value" => value} -> value
_ -> nil
end)
end
@spec parse_instance_hostname(map) :: String.t() | nil
defp parse_instance_hostname(%{
"SAPControl" => %{"Properties" => properties}
}) do
properties
|> Enum.find_value(fn
%{"property" => "SAPLOCALHOST", "value" => value} -> value
_ -> nil
end)
end
@spec parse_instance_health(map, String.t(), String.t()) ::
:passing | :warning | :critical | :unknown
defp parse_instance_health(%{"SAPControl" => sap_control}, instance_number, instance_hostname) do
case extract_sap_control_instance_data(
sap_control,
instance_number,
instance_hostname,
"dispstatus"
) do
{:ok, dispstatus} ->
parse_dispstatus(dispstatus)
_ ->
:unknown
end
end
defp parse_dispstatus("SAPControl-GREEN"), do: :passing
defp parse_dispstatus("SAPControl-YELLOW"), do: :warning
defp parse_dispstatus("SAPControl-RED"), do: :critical
defp parse_dispstatus(_), do: :unknown
@spec extract_sap_control_instance_data(map, String.t(), String.t(), String.t()) ::
{:ok, String.t()} | {:error, :key_not_found}
defp extract_sap_control_instance_data(
%{"Instances" => instances},
instance_number,
instance_hostname,
key
) do
instances
|> Enum.find(fn
%{"instanceNr" => number, "hostname" => hostname} ->
number
|> Integer.to_string()
|> String.pad_leading(2, "0") == instance_number && hostname == instance_hostname
_ ->
nil
end)
|> case do
%{^key => value} ->
{:ok, value}
_ ->
{:error, :key_not_found}
end
end
defp parse_system_replication(%{
"SystemReplication" => %{"local_site_id" => local_site_id} = system_replication
}) do
case Map.get(system_replication, "site/#{local_site_id}/REPLICATION_MODE") do
"PRIMARY" ->
"Primary"
mode when mode in ["SYNC", "SYNCMEM", "ASYNC", "UNKNOWN"] ->
"Secondary"
_ ->
""
end
end
# Find status information at:
# https://help.sap.com/viewer/4e9b18c116aa42fc84c7dbfd02111aba/2.0.04/en-US/aefc55a27003440792e34ece2125dc89.html
defp parse_system_replication_status(%{
"SystemReplication" => %{"overall_replication_status" => status}
}),
do: status
defp parse_system_replication_status(_), do: ""
end
| 29.794118 | 115 | 0.61155 |
f79d55d60cfe290517f4a0936b6ba59e05bc4885 | 2,762 | ex | Elixir | lib/ace/governor.ex | ashlyrclark/Ace | 73a9aa620923f64d6955c23a3bec6afe353e05b9 | [
"MIT"
] | 1 | 2021-11-22T10:30:44.000Z | 2021-11-22T10:30:44.000Z | lib/ace/governor.ex | ashlyrclark/Ace | 73a9aa620923f64d6955c23a3bec6afe353e05b9 | [
"MIT"
] | null | null | null | lib/ace/governor.ex | ashlyrclark/Ace | 73a9aa620923f64d6955c23a3bec6afe353e05b9 | [
"MIT"
] | 1 | 2021-11-22T10:31:48.000Z | 2021-11-22T10:31:48.000Z | defmodule Ace.Governor do
@moduledoc """
A governor maintains servers ready to handle clients.
A governor process starts with a reference to supervision that can start servers.
It will then wait until the server has accepted a connection.
Once it's server has accepted a connection the governor will start a new server.
"""
use GenServer
@enforce_keys [:server_supervisor, :listen_socket, :server, :monitor]
defstruct @enforce_keys
def start_link(server_supervisor, listen_socket) when is_pid(server_supervisor) do
initial_state = %{
server_supervisor: server_supervisor,
listen_socket: listen_socket,
server: nil,
monitor: nil
}
GenServer.start_link(__MODULE__, initial_state)
end
def child_spec({endpoint_supervisor, listen_socket}) do
%{
id: __MODULE__,
start: {__MODULE__, :start_link, [endpoint_supervisor, listen_socket]},
type: :worker,
restart: :transient,
shutdown: 500
}
end
@impl GenServer
def init(initial_state) do
new_state = start_server(initial_state)
{:ok, new_state}
end
@impl GenServer
# DEBT should match response are `{:ok, server}` or `{:error, reason}`
def handle_info({monitor, _response}, state = %{monitor: monitor, server: server}) do
true = Process.unlink(server)
true = Process.demonitor(monitor)
new_state = start_server(%{state | monitor: nil, server: nil})
{:noreply, new_state}
end
def handle_info({:DOWN, monitor, :process, _server, :normal}, state = %{monitor: monitor}) do
# Server process has terminated so existing references are irrelevant
new_state = start_server(%{state | monitor: nil, server: nil})
{:noreply, new_state}
end
# Messages from previously monitored process can arrive when the connection response quickly and exits normally.
def handle_info({:DOWN, _, :process, _, :normal}, state) do
{:noreply, state}
end
# function head ensures that only one server is being monitored at a time
defp start_server(state = %{server: nil, monitor: nil}) do
# Starting a server process must always succeed, before accepting on a connection it has no external influences.
{:ok, server} = Supervisor.start_child(state.server_supervisor, [])
# The behaviour of a server is to always after creation, therefore linking should always succeed.
true = Process.link(server)
# Creates a unique reference that we can also use to correlate the call to accept on a given socket.
monitor = Process.monitor(server)
# Simulate a `GenServer.call` but without blocking on a receive loop.
send(server, {:"$gen_call", {self(), monitor}, {:accept, state.listen_socket}})
%{state | monitor: monitor, server: server}
end
end
| 34.525 | 116 | 0.711441 |
f79d711f10da85f343a041ed3d59d23d6cfd73f0 | 8,505 | exs | Elixir | test/sanbase/project/project_list_test.exs | sitedata/sanbase2 | 8da5e44a343288fbc41b68668c6c80ae8547d557 | [
"MIT"
] | null | null | null | test/sanbase/project/project_list_test.exs | sitedata/sanbase2 | 8da5e44a343288fbc41b68668c6c80ae8547d557 | [
"MIT"
] | 1 | 2021-07-24T16:26:03.000Z | 2021-07-24T16:26:03.000Z | test/sanbase/project/project_list_test.exs | sitedata/sanbase2 | 8da5e44a343288fbc41b68668c6c80ae8547d557 | [
"MIT"
] | null | null | null | defmodule Sanbase.Model.ProjectListTest do
use Sanbase.DataCase, async: false
alias Sanbase.Model.Project
import Sanbase.Factory
import ExUnit.CaptureLog
describe "ordering with and without min_volume" do
test "does not join twice when min_volume is provided" do
# This test only checks if the query has more than one join.
# It's done by capturing the debug output and searching it for strings.
# The test can be broken by adding a new join with a new table that does
# not break the things we're testing here.
# NOTE: This can be improved when Ecto is updated to 3.x with the named
# bindings
Logger.configure(level: :debug)
log = capture_log(fn -> Project.List.projects_page(1, 2, min_volume: 100) end)
parts = String.split(log, " JOIN ")
assert length(parts) == 2
end
test "equivalent results with and without min_volume when all projects have cmc data" do
insert(:random_project) |> update_latest_cmc_data(%{rank: 1, volume_usd: 500})
insert(:random_project) |> update_latest_cmc_data(%{rank: 2, volume_usd: 1100})
insert(:random_project) |> update_latest_cmc_data(%{rank: 3, volume_usd: 1200})
insert(:random_project) |> update_latest_cmc_data(%{rank: 4, volume_usd: 2000})
insert(:random_project) |> update_latest_cmc_data(%{rank: 5, volume_usd: 200})
insert(:random_project) |> update_latest_cmc_data(%{rank: 6, volume_usd: 10000})
assert Project.List.projects_page(1, 2) == Project.List.projects_page(1, 2, min_volume: 0)
assert Project.List.projects_page(2, 2) == Project.List.projects_page(2, 2, min_volume: 0)
assert Project.List.projects_page(3, 2) == Project.List.projects_page(3, 2, min_volume: 0)
assert length(Project.List.projects_page(1, 6, min_volume: 5000)) == 1
assert length(Project.List.projects_page(2, 1, min_volume: 5000)) == 0
end
end
describe "no projects" do
test "currently trending projects" do
Sanbase.Mock.prepare_mock2(
&Sanbase.SocialData.TrendingWords.get_currently_trending_words/0,
{:ok, [%{word: "btc"}]}
)
|> Sanbase.Mock.run_with_mocks(fn ->
assert Project.List.currently_trending_projects() == []
end)
end
test "all projects" do
assert Project.List.projects() == []
end
test "all erc20 projects" do
assert Project.List.erc20_projects() == []
end
test "all currency projects" do
assert Project.List.currency_projects() == []
end
test "all projects by ticker" do
assert Project.List.projects_by_ticker("BTC") == []
end
test "projects with min_volume above 1000" do
assert Project.List.projects(min_volume: 1000) == []
end
test "without hidden projects" do
assert Project.List.projects(show_hidden_projects?: false) == []
end
test "with hidden projects" do
assert Project.List.projects(show_hidden_projects?: true) == []
end
test "all projects page" do
assert Project.List.projects_page(1, 10) == []
end
test "all erc20 projects page" do
assert Project.List.erc20_projects_page(1, 10) == []
end
test "all currency projects page" do
assert Project.List.currency_projects_page(1, 10) == []
end
test "with source" do
assert Project.List.projects_with_source("coinmarketcap") == []
end
end
describe "with projects" do
setup do
p1 =
insert(:random_erc20_project, ticker: "BTC")
|> update_latest_cmc_data(%{rank: 2, volume_usd: 500})
p2 =
insert(:random_erc20_project, ticker: "BTC")
|> update_latest_cmc_data(%{rank: 3, volume_usd: 1100})
p3 =
insert(:random_erc20_project)
|> update_latest_cmc_data(%{rank: 4, volume_usd: 2500})
p4 = insert(:random_project, source_slug_mappings: [])
p5 = insert(:random_project) |> update_latest_cmc_data(%{rank: 5, volume_usd: 100})
p6 = insert(:random_project) |> update_latest_cmc_data(%{rank: 6})
p7 =
insert(:random_project, is_hidden: true)
|> update_latest_cmc_data(%{rank: 1, volume_usd: 5000})
p8 =
insert(:random_erc20_project, is_hidden: true)
|> update_latest_cmc_data(%{rank: 11, volume_usd: 5000})
hidden_projects = [p7: p7, p8: p8]
erc20_projects = [p1: p1, p2: p2, p3: p3]
currency_projects = [p4: p4, p5: p5, p6: p6]
projects = erc20_projects ++ currency_projects ++ hidden_projects
[
total_count: length(projects) - length(hidden_projects),
total_erc20_count: length(erc20_projects),
total_currency_count: length(currency_projects),
total_hidden_count: length(hidden_projects),
hidden_projects: hidden_projects,
non_hidden_projects: erc20_projects ++ currency_projects
] ++ projects
end
test "all projects", context do
assert Project.List.projects() |> length == context.total_count
end
test "all erc20 projects", context do
assert Project.List.erc20_projects() |> length == context.total_erc20_count
end
test "all currency projects", context do
assert Project.List.currency_projects() |> length == context.total_currency_count
end
test "all projects by ticker", context do
projects = Project.List.projects_by_ticker("BTC")
assert length(projects) == 2
assert context.p1.id in Enum.map(projects, & &1.id)
assert context.p2.id in Enum.map(projects, & &1.id)
end
test "projects with min_volume above 1000", context do
projects = Project.List.projects(min_volume: 1000)
assert length(projects) == 2
assert context.p2.id in Enum.map(projects, & &1.id)
assert context.p3.id in Enum.map(projects, & &1.id)
end
test "without hidden projects", context do
projects = Project.List.projects(include_hidden_projects?: false)
assert length(projects) == context.total_count
assert context.p7.id not in Enum.map(projects, & &1.id)
assert context.p8.id not in Enum.map(projects, & &1.id)
end
test "with hidden projects", context do
projects = Project.List.projects(include_hidden_projects?: true)
assert length(projects) == context.total_count + context.total_hidden_count
assert context.p7.id in Enum.map(projects, & &1.id)
assert context.p8.id in Enum.map(projects, & &1.id)
end
test "all projects page", context do
projects = Project.List.projects_page(2, 2)
assert length(projects) == 2
assert context.p3.id in Enum.map(projects, & &1.id)
assert context.p5.id in Enum.map(projects, & &1.id)
end
test "all erc20 projects page", context do
projects = Project.List.erc20_projects_page(1, 2)
assert length(projects) == 2
assert context.p1.id in Enum.map(projects, & &1.id)
assert context.p2.id in Enum.map(projects, & &1.id)
end
test "all currency projects page", context do
projects = Project.List.currency_projects_page(1, 2)
assert length(projects) == 2
assert context.p5.id in Enum.map(projects, & &1.id)
assert context.p6.id in Enum.map(projects, & &1.id)
end
test "with source", context do
projects = Project.List.projects_with_source("coinmarketcap")
assert length(projects) == 5
assert context.p4.id not in Enum.map(projects, & &1.id)
end
test "currently trending projects", context do
%{hidden_projects: [{_, hidden_project} | _], non_hidden_projects: [{_, project} | _]} =
context
Sanbase.Mock.prepare_mock2(
&Sanbase.SocialData.TrendingWords.get_currently_trending_words/0,
{:ok,
[
%{word: hidden_project.slug},
%{word: project.slug}
]}
)
|> Sanbase.Mock.run_with_mocks(fn ->
currently_trending_projects = Project.List.currently_trending_projects()
assert currently_trending_projects |> length() == 1
assert currently_trending_projects |> hd |> Map.get(:slug) == project.slug
end)
end
end
defp update_latest_cmc_data(project, args) do
%Sanbase.Model.LatestCoinmarketcapData{}
|> Sanbase.Model.LatestCoinmarketcapData.changeset(
%{
coinmarketcap_id: project.slug,
update_time: Timex.now()
}
|> Map.merge(args)
)
|> Sanbase.Repo.insert_or_update()
Sanbase.Repo.get!(Sanbase.Model.Project, project.id)
end
end
| 34.856557 | 96 | 0.663492 |
f79d7e89f64e1098ef79a967e1d9cfb40a8fdb5c | 1,596 | ex | Elixir | lib/books_web/endpoint.ex | nickagliano/books | eec595ed4add9d678278785d9ab10106e1e426d1 | [
"MIT"
] | null | null | null | lib/books_web/endpoint.ex | nickagliano/books | eec595ed4add9d678278785d9ab10106e1e426d1 | [
"MIT"
] | null | null | null | lib/books_web/endpoint.ex | nickagliano/books | eec595ed4add9d678278785d9ab10106e1e426d1 | [
"MIT"
] | null | null | null | defmodule BooksWeb.Endpoint do
use Phoenix.Endpoint, otp_app: :books
# The session will be stored in the cookie and signed,
# this means its contents can be read but not tampered with.
# Set :encryption_salt if you would also like to encrypt it.
@session_options [
store: :cookie,
key: "_books_key",
signing_salt: "+9pQLhnS"
]
socket "/socket", BooksWeb.UserSocket,
websocket: true,
longpoll: false
socket "/live", Phoenix.LiveView.Socket, websocket: [connect_info: [session: @session_options]]
# Serve at "/" the static files from "priv/static" directory.
#
# You should set gzip to true if you are running phx.digest
# when deploying your static files in production.
plug Plug.Static,
at: "/",
from: :books,
gzip: false,
only: ~w(css fonts images js favicon.ico robots.txt)
# Code reloading can be explicitly enabled under the
# :code_reloader configuration of your endpoint.
if code_reloading? do
socket "/phoenix/live_reload/socket", Phoenix.LiveReloader.Socket
plug Phoenix.LiveReloader
plug Phoenix.CodeReloader
plug Phoenix.Ecto.CheckRepoStatus, otp_app: :books
end
plug Phoenix.LiveDashboard.RequestLogger,
param_key: "request_logger",
cookie_key: "request_logger"
plug Plug.RequestId
plug Plug.Telemetry, event_prefix: [:phoenix, :endpoint]
plug Plug.Parsers,
parsers: [:urlencoded, :multipart, :json],
pass: ["*/*"],
json_decoder: Phoenix.json_library()
plug Plug.MethodOverride
plug Plug.Head
plug Plug.Session, @session_options
plug BooksWeb.Router
end
| 29.018182 | 97 | 0.715539 |
f79d9f272c0ed2612b7044516b64a6b72b77ca82 | 1,868 | exs | Elixir | broadway_test/test/imageprocessingmodel_test.exs | kujua/blog-kujuahub-articles-code | 1a65b1a481cbf5269a3f8846a5ac353937d45a2d | [
"Apache-2.0"
] | 2 | 2019-05-25T13:25:33.000Z | 2019-05-29T15:36:17.000Z | broadway_test/test/imageprocessingmodel_test.exs | kujua/blog-kujuahub-articles-code | 1a65b1a481cbf5269a3f8846a5ac353937d45a2d | [
"Apache-2.0"
] | null | null | null | broadway_test/test/imageprocessingmodel_test.exs | kujua/blog-kujuahub-articles-code | 1a65b1a481cbf5269a3f8846a5ac353937d45a2d | [
"Apache-2.0"
] | null | null | null | defmodule ImageProcessingModelTest do
use ExUnit.Case
# alias ImageProcessingModel
setup do
model_valid_no_path = %ImageProcessingModel{
file_name: "warthog.jpg",
customer_id: "FE51EF",
image_type: "jpg",
path_from: nil,
path_to: nil,
destination_type: "png",
file_name_destination: "processed-01",
status: :ok
}
model_valid_with_path = %ImageProcessingModel{
file_name: "warthog.jpg",
customer_id: "FE51EF",
image_type: "jpg",
path_from: "assets/",
path_to: "assets/",
destination_type: "png",
file_name_destination: "processed-01",
status: :ok
}
{:ok, valid: model_valid_no_path, valid_path: model_valid_with_path}
end
# filename from
@tag :model
test "create from name with path", %{valid_path: model} = _context do
ret = ImageProcessingModel.create_from_name(model)
assert(ret == "assets/warthog.jpg")
end
# filename resize
@tag :model
test "create resize name with path", %{valid_path: model} = _context do
ret = ImageProcessingModel.create_resize_name(model)
assert(ret == "assets/FE51EF_warthog-resized.png")
end
# filename watermark
@tag :model
test "create waternark name without path", %{valid: model} = _context do
ret = ImageProcessingModel.create_watermark_name(model)
assert(ret == "FE51EF_warthog-watermark.png")
end
@tag :model
test "create watermark name with path", %{valid_path: model} = _context do
ret = ImageProcessingModel.create_watermark_name(model)
assert(ret == "assets/FE51EF_warthog-watermark.png")
end
# filename destination
@tag :model
test "create destination name with path", %{valid_path: model} = _context do
ret = ImageProcessingModel.create_destination_name(model)
assert(ret == "assets/FE51EF/FE51EF_processed-01.png")
end
end
| 27.880597 | 78 | 0.695396 |
f79dba8adfe8a024bf163d6d241829b7d5bfea69 | 1,274 | exs | Elixir | config/config.exs | kianmeng/chaperon | 6161aaace38d86f54a25e2dba68e27747c134d60 | [
"MIT"
] | 97 | 2017-09-20T09:09:11.000Z | 2022-01-13T18:26:22.000Z | config/config.exs | kianmeng/chaperon | 6161aaace38d86f54a25e2dba68e27747c134d60 | [
"MIT"
] | 13 | 2017-10-25T08:47:33.000Z | 2022-03-24T16:39:49.000Z | config/config.exs | kianmeng/chaperon | 6161aaace38d86f54a25e2dba68e27747c134d60 | [
"MIT"
] | 18 | 2018-02-02T19:19:01.000Z | 2022-03-23T16:59:06.000Z | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
use Mix.Config
config :logger, :console,
level: :info,
format: "$date $time [$level] $metadata$message\n",
metadata: [:user_id]
config :chaperon, Chaperon.Export.InfluxDB,
database: System.get_env("CHAPERON_INFLUX_DB") || "chaperon",
host: System.get_env("CHAPERON_INFLUX_HOST") || "localhost",
http_opts: [insecure: false],
pool: [max_overflow: 10, size: 5],
port: System.get_env("CHAPERON_INFLUX_PORT") || 8086,
scheme: "https",
auth: [
method: :basic,
username: System.get_env("CHAPERON_INFLUX_USER") || "chaperon",
password: System.get_env("CHAPERON_INFLUX_PW")
],
writer: Instream.Writer.Line
config :chaperon, Chaperon.API.HTTP,
username: "chaperon",
password: {:system, "CHAPERON_API_TOKEN"},
realm: "Chaperon load test API",
option_parser: Chaperon.API.OptionParser.Default
config :ex_aws,
access_key_id: [{:system, "AWS_ACCESS_KEY_ID"}, :instance_role],
secret_access_key: [{:system, "AWS_SECRET_ACCESS_KEY"}, :instance_role]
# Custom per env config files override values defined here (if they exist)
if File.exists?("#{__DIR__}/#{Mix.env()}.exs") do
import_config "#{Mix.env()}.exs"
end
| 33.526316 | 74 | 0.71978 |
f79ddb48848c2248121440044b88b5702c35d021 | 3,151 | ex | Elixir | clients/analytics/lib/google_api/analytics/v3/model/linked_foreign_account.ex | linjunpop/elixir-google-api | 444cb2b2fb02726894535461a474beddd8b86db4 | [
"Apache-2.0"
] | null | null | null | clients/analytics/lib/google_api/analytics/v3/model/linked_foreign_account.ex | linjunpop/elixir-google-api | 444cb2b2fb02726894535461a474beddd8b86db4 | [
"Apache-2.0"
] | null | null | null | clients/analytics/lib/google_api/analytics/v3/model/linked_foreign_account.ex | linjunpop/elixir-google-api | 444cb2b2fb02726894535461a474beddd8b86db4 | [
"Apache-2.0"
] | null | null | null | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.Analytics.V3.Model.LinkedForeignAccount do
@moduledoc """
JSON template for an Analytics Remarketing Audience Foreign Link.
## Attributes
- accountId (String.t): Account ID to which this linked foreign account belongs. Defaults to: `null`.
- eligibleForSearch (boolean()): Boolean indicating whether this is eligible for search. Defaults to: `null`.
- id (String.t): Entity ad account link ID. Defaults to: `null`.
- internalWebPropertyId (String.t): Internal ID for the web property to which this linked foreign account belongs. Defaults to: `null`.
- kind (String.t): Resource type for linked foreign account. Defaults to: `null`.
- linkedAccountId (String.t): The foreign account ID. For example the an Google Ads `linkedAccountId` has the following format XXX-XXX-XXXX. Defaults to: `null`.
- remarketingAudienceId (String.t): Remarketing audience ID to which this linked foreign account belongs. Defaults to: `null`.
- status (String.t): The status of this foreign account link. Defaults to: `null`.
- type (String.t): The type of the foreign account. For example, `ADWORDS_LINKS`, `DBM_LINKS`, `MCC_LINKS` or `OPTIMIZE`. Defaults to: `null`.
- webPropertyId (String.t): Web property ID of the form UA-XXXXX-YY to which this linked foreign account belongs. Defaults to: `null`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:accountId => any(),
:eligibleForSearch => any(),
:id => any(),
:internalWebPropertyId => any(),
:kind => any(),
:linkedAccountId => any(),
:remarketingAudienceId => any(),
:status => any(),
:type => any(),
:webPropertyId => any()
}
field(:accountId)
field(:eligibleForSearch)
field(:id)
field(:internalWebPropertyId)
field(:kind)
field(:linkedAccountId)
field(:remarketingAudienceId)
field(:status)
field(:type)
field(:webPropertyId)
end
defimpl Poison.Decoder, for: GoogleApi.Analytics.V3.Model.LinkedForeignAccount do
def decode(value, options) do
GoogleApi.Analytics.V3.Model.LinkedForeignAccount.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Analytics.V3.Model.LinkedForeignAccount do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 42.013333 | 184 | 0.714376 |
f79defae61178a646e5c46cbfa9a029b191cc7ee | 19,291 | ex | Elixir | clients/logging/lib/google_api/logging/v2/api/exclusions.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/logging/lib/google_api/logging/v2/api/exclusions.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/logging/lib/google_api/logging/v2/api/exclusions.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Logging.V2.Api.Exclusions do
@moduledoc """
API calls for all endpoints tagged `Exclusions`.
"""
alias GoogleApi.Logging.V2.Connection
alias GoogleApi.Gax.{Request, Response}
@library_version Mix.Project.config() |> Keyword.get(:version, "")
@doc """
Creates a new exclusion in a specified parent resource. Only log entries belonging to that resource can be excluded. You can have up to 10 exclusions in a resource.
## Parameters
* `connection` (*type:* `GoogleApi.Logging.V2.Connection.t`) - Connection to server
* `v2_id` (*type:* `String.t`) - Part of `parent`. Required. The parent resource in which to create the exclusion: "projects/[PROJECT_ID]" "organizations/[ORGANIZATION_ID]" "billingAccounts/[BILLING_ACCOUNT_ID]" "folders/[FOLDER_ID]" Examples: "projects/my-logging-project", "organizations/123456789".
* `v2_id1` (*type:* `String.t`) - Part of `parent`. See documentation of `v2Id`.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:body` (*type:* `GoogleApi.Logging.V2.Model.LogExclusion.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Logging.V2.Model.LogExclusion{}}` on success
* `{:error, info}` on failure
"""
@spec logging_exclusions_create(
Tesla.Env.client(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.Logging.V2.Model.LogExclusion.t()}
| {:ok, Tesla.Env.t()}
| {:error, any()}
def logging_exclusions_create(connection, v2_id, v2_id1, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/v2/{v2Id}/{v2Id1}/exclusions", %{
"v2Id" => URI.encode(v2_id, &URI.char_unreserved?/1),
"v2Id1" => URI.encode(v2_id1, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Logging.V2.Model.LogExclusion{}])
end
@doc """
Deletes an exclusion.
## Parameters
* `connection` (*type:* `GoogleApi.Logging.V2.Connection.t`) - Connection to server
* `v2_id` (*type:* `String.t`) - Part of `name`. Required. The resource name of an existing exclusion to delete: "projects/[PROJECT_ID]/exclusions/[EXCLUSION_ID]" "organizations/[ORGANIZATION_ID]/exclusions/[EXCLUSION_ID]" "billingAccounts/[BILLING_ACCOUNT_ID]/exclusions/[EXCLUSION_ID]" "folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]" Example: "projects/my-project-id/exclusions/my-exclusion-id".
* `v2_id1` (*type:* `String.t`) - Part of `name`. See documentation of `v2Id`.
* `exclusions_id` (*type:* `String.t`) - Part of `name`. See documentation of `v2Id`.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Logging.V2.Model.Empty{}}` on success
* `{:error, info}` on failure
"""
@spec logging_exclusions_delete(
Tesla.Env.client(),
String.t(),
String.t(),
String.t(),
keyword(),
keyword()
) :: {:ok, GoogleApi.Logging.V2.Model.Empty.t()} | {:ok, Tesla.Env.t()} | {:error, any()}
def logging_exclusions_delete(
connection,
v2_id,
v2_id1,
exclusions_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query
}
request =
Request.new()
|> Request.method(:delete)
|> Request.url("/v2/{v2Id}/{v2Id1}/exclusions/{exclusionsId}", %{
"v2Id" => URI.encode(v2_id, &URI.char_unreserved?/1),
"v2Id1" => URI.encode(v2_id1, &URI.char_unreserved?/1),
"exclusionsId" => URI.encode(exclusions_id, &(URI.char_unreserved?(&1) || &1 == ?/))
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Logging.V2.Model.Empty{}])
end
@doc """
Gets the description of an exclusion.
## Parameters
* `connection` (*type:* `GoogleApi.Logging.V2.Connection.t`) - Connection to server
* `v2_id` (*type:* `String.t`) - Part of `name`. Required. The resource name of an existing exclusion: "projects/[PROJECT_ID]/exclusions/[EXCLUSION_ID]" "organizations/[ORGANIZATION_ID]/exclusions/[EXCLUSION_ID]" "billingAccounts/[BILLING_ACCOUNT_ID]/exclusions/[EXCLUSION_ID]" "folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]" Example: "projects/my-project-id/exclusions/my-exclusion-id".
* `v2_id1` (*type:* `String.t`) - Part of `name`. See documentation of `v2Id`.
* `exclusions_id` (*type:* `String.t`) - Part of `name`. See documentation of `v2Id`.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Logging.V2.Model.LogExclusion{}}` on success
* `{:error, info}` on failure
"""
@spec logging_exclusions_get(
Tesla.Env.client(),
String.t(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.Logging.V2.Model.LogExclusion.t()}
| {:ok, Tesla.Env.t()}
| {:error, any()}
def logging_exclusions_get(
connection,
v2_id,
v2_id1,
exclusions_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/v2/{v2Id}/{v2Id1}/exclusions/{exclusionsId}", %{
"v2Id" => URI.encode(v2_id, &URI.char_unreserved?/1),
"v2Id1" => URI.encode(v2_id1, &URI.char_unreserved?/1),
"exclusionsId" => URI.encode(exclusions_id, &(URI.char_unreserved?(&1) || &1 == ?/))
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Logging.V2.Model.LogExclusion{}])
end
@doc """
Lists all the exclusions in a parent resource.
## Parameters
* `connection` (*type:* `GoogleApi.Logging.V2.Connection.t`) - Connection to server
* `v2_id` (*type:* `String.t`) - Part of `parent`. Required. The parent resource whose exclusions are to be listed. "projects/[PROJECT_ID]" "organizations/[ORGANIZATION_ID]" "billingAccounts/[BILLING_ACCOUNT_ID]" "folders/[FOLDER_ID]"
* `v2_id1` (*type:* `String.t`) - Part of `parent`. See documentation of `v2Id`.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:pageSize` (*type:* `integer()`) - Optional. The maximum number of results to return from this request. Non-positive values are ignored. The presence of nextPageToken in the response indicates that more results might be available.
* `:pageToken` (*type:* `String.t`) - Optional. If present, then retrieve the next batch of results from the preceding call to this method. pageToken must be the value of nextPageToken from the previous response. The values of other method parameters should be identical to those in the previous call.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Logging.V2.Model.ListExclusionsResponse{}}` on success
* `{:error, info}` on failure
"""
@spec logging_exclusions_list(Tesla.Env.client(), String.t(), String.t(), keyword(), keyword()) ::
{:ok, GoogleApi.Logging.V2.Model.ListExclusionsResponse.t()}
| {:ok, Tesla.Env.t()}
| {:error, any()}
def logging_exclusions_list(connection, v2_id, v2_id1, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:pageSize => :query,
:pageToken => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/v2/{v2Id}/{v2Id1}/exclusions", %{
"v2Id" => URI.encode(v2_id, &URI.char_unreserved?/1),
"v2Id1" => URI.encode(v2_id1, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Logging.V2.Model.ListExclusionsResponse{}])
end
@doc """
Changes one or more properties of an existing exclusion.
## Parameters
* `connection` (*type:* `GoogleApi.Logging.V2.Connection.t`) - Connection to server
* `v2_id` (*type:* `String.t`) - Part of `name`. Required. The resource name of the exclusion to update: "projects/[PROJECT_ID]/exclusions/[EXCLUSION_ID]" "organizations/[ORGANIZATION_ID]/exclusions/[EXCLUSION_ID]" "billingAccounts/[BILLING_ACCOUNT_ID]/exclusions/[EXCLUSION_ID]" "folders/[FOLDER_ID]/exclusions/[EXCLUSION_ID]" Example: "projects/my-project-id/exclusions/my-exclusion-id".
* `v2_id1` (*type:* `String.t`) - Part of `name`. See documentation of `v2Id`.
* `exclusions_id` (*type:* `String.t`) - Part of `name`. See documentation of `v2Id`.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:updateMask` (*type:* `String.t`) - Required. A non-empty list of fields to change in the existing exclusion. New values for the fields are taken from the corresponding fields in the LogExclusion included in this request. Fields not mentioned in update_mask are not changed and are ignored in the request.For example, to change the filter and description of an exclusion, specify an update_mask of "filter,description".
* `:body` (*type:* `GoogleApi.Logging.V2.Model.LogExclusion.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.Logging.V2.Model.LogExclusion{}}` on success
* `{:error, info}` on failure
"""
@spec logging_exclusions_patch(
Tesla.Env.client(),
String.t(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.Logging.V2.Model.LogExclusion.t()}
| {:ok, Tesla.Env.t()}
| {:error, any()}
def logging_exclusions_patch(
connection,
v2_id,
v2_id1,
exclusions_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:updateMask => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:patch)
|> Request.url("/v2/{v2Id}/{v2Id1}/exclusions/{exclusionsId}", %{
"v2Id" => URI.encode(v2_id, &URI.char_unreserved?/1),
"v2Id1" => URI.encode(v2_id1, &URI.char_unreserved?/1),
"exclusionsId" => URI.encode(exclusions_id, &(URI.char_unreserved?(&1) || &1 == ?/))
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.Logging.V2.Model.LogExclusion{}])
end
end
| 48.961929 | 430 | 0.62594 |
f79df28c192c6efeff46bc31e5cd37cf0d2de347 | 1,556 | ex | Elixir | server/test/support/data_case.ex | brianjaustin/melody-match | 5200fd347f7ae636ec782398896c782d80c17f59 | [
"Apache-1.1"
] | null | null | null | server/test/support/data_case.ex | brianjaustin/melody-match | 5200fd347f7ae636ec782398896c782d80c17f59 | [
"Apache-1.1"
] | 4 | 2021-03-28T03:09:37.000Z | 2021-04-10T17:45:10.000Z | server/test/support/data_case.ex | brianjaustin/melody-match | 5200fd347f7ae636ec782398896c782d80c17f59 | [
"Apache-1.1"
] | null | null | null | defmodule MelodyMatch.DataCase do
@moduledoc """
This module defines the setup for tests requiring
access to the application's data layer.
You may define functions here to be used as helpers in
your tests.
Finally, if the test case interacts with the database,
we enable the SQL sandbox, so changes done to the database
are reverted at the end of every test. If you are using
PostgreSQL, you can even run database tests asynchronously
by setting `use MelodyMatch.DataCase, async: true`, although
this option is not recommended for other databases.
"""
use ExUnit.CaseTemplate
using do
quote do
alias MelodyMatch.Repo
import Ecto
import Ecto.Changeset
import Ecto.Query
import MelodyMatch.DataCase
end
end
setup tags do
:ok = Ecto.Adapters.SQL.Sandbox.checkout(MelodyMatch.Repo)
unless tags[:async] do
Ecto.Adapters.SQL.Sandbox.mode(MelodyMatch.Repo, {:shared, self()})
end
:ok
end
@doc """
A helper that transforms changeset errors into a map of messages.
assert {:error, changeset} = Accounts.create_user(%{password: "short"})
assert "password is too short" in errors_on(changeset).password
assert %{password: ["password is too short"]} = errors_on(changeset)
"""
def errors_on(changeset) do
Ecto.Changeset.traverse_errors(changeset, fn {message, opts} ->
Regex.replace(~r"%{(\w+)}", message, fn _, key ->
opts |> Keyword.get(String.to_existing_atom(key), key) |> to_string()
end)
end)
end
end
| 27.785714 | 77 | 0.693445 |
f79e2551b4f2af98d7f7bd6d703d6481d266103c | 5,413 | ex | Elixir | clients/drive/lib/google_api/drive/v3/model/permission.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/drive/lib/google_api/drive/v3/model/permission.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/drive/lib/google_api/drive/v3/model/permission.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Drive.V3.Model.Permission do
@moduledoc """
A permission for a file. A permission grants a user, group, domain or the world access to a file or a folder hierarchy.
## Attributes
* `allowFileDiscovery` (*type:* `boolean()`, *default:* `nil`) - Whether the permission allows the file to be discovered through search. This is only applicable for permissions of type domain or anyone.
* `deleted` (*type:* `boolean()`, *default:* `nil`) - Whether the account associated with this permission has been deleted. This field only pertains to user and group permissions.
* `displayName` (*type:* `String.t`, *default:* `nil`) - The "pretty" name of the value of the permission. The following is a list of examples for each type of permission:
- user - User's full name, as defined for their Google account, such as "Joe Smith."
- group - Name of the Google Group, such as "The Company Administrators."
- domain - String domain name, such as "thecompany.com."
- anyone - No displayName is present.
* `domain` (*type:* `String.t`, *default:* `nil`) - The domain to which this permission refers.
* `emailAddress` (*type:* `String.t`, *default:* `nil`) - The email address of the user or group to which this permission refers.
* `expirationTime` (*type:* `DateTime.t`, *default:* `nil`) - The time at which this permission will expire (RFC 3339 date-time). Expiration times have the following restrictions:
- They can only be set on user and group permissions
- The time must be in the future
- The time cannot be more than a year in the future
* `id` (*type:* `String.t`, *default:* `nil`) - The ID of this permission. This is a unique identifier for the grantee, and is published in User resources as permissionId. IDs should be treated as opaque values.
* `kind` (*type:* `String.t`, *default:* `drive#permission`) - Identifies what kind of resource this is. Value: the fixed string "drive#permission".
* `permissionDetails` (*type:* `list(GoogleApi.Drive.V3.Model.PermissionPermissionDetails.t)`, *default:* `nil`) - Details of whether the permissions on this shared drive item are inherited or directly on this item. This is an output-only field which is present only for shared drive items.
* `photoLink` (*type:* `String.t`, *default:* `nil`) - A link to the user's profile photo, if available.
* `role` (*type:* `String.t`, *default:* `nil`) - The role granted by this permission. While new values may be supported in the future, the following are currently allowed:
- owner
- organizer
- fileOrganizer
- writer
- commenter
- reader
* `teamDrivePermissionDetails` (*type:* `list(GoogleApi.Drive.V3.Model.PermissionTeamDrivePermissionDetails.t)`, *default:* `nil`) - Deprecated - use permissionDetails instead.
* `type` (*type:* `String.t`, *default:* `nil`) - The type of the grantee. Valid values are:
- user
- group
- domain
- anyone When creating a permission, if type is user or group, you must provide an emailAddress for the user or group. When type is domain, you must provide a domain. There isn't extra information required for a anyone type.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:allowFileDiscovery => boolean(),
:deleted => boolean(),
:displayName => String.t(),
:domain => String.t(),
:emailAddress => String.t(),
:expirationTime => DateTime.t(),
:id => String.t(),
:kind => String.t(),
:permissionDetails => list(GoogleApi.Drive.V3.Model.PermissionPermissionDetails.t()),
:photoLink => String.t(),
:role => String.t(),
:teamDrivePermissionDetails =>
list(GoogleApi.Drive.V3.Model.PermissionTeamDrivePermissionDetails.t()),
:type => String.t()
}
field(:allowFileDiscovery)
field(:deleted)
field(:displayName)
field(:domain)
field(:emailAddress)
field(:expirationTime, as: DateTime)
field(:id)
field(:kind)
field(:permissionDetails, as: GoogleApi.Drive.V3.Model.PermissionPermissionDetails, type: :list)
field(:photoLink)
field(:role)
field(:teamDrivePermissionDetails,
as: GoogleApi.Drive.V3.Model.PermissionTeamDrivePermissionDetails,
type: :list
)
field(:type)
end
defimpl Poison.Decoder, for: GoogleApi.Drive.V3.Model.Permission do
def decode(value, options) do
GoogleApi.Drive.V3.Model.Permission.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Drive.V3.Model.Permission do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 51.066038 | 294 | 0.69056 |
f79e2988f2223b9768ee80b5c0c0d6ccd590bc57 | 1,670 | ex | Elixir | clients/people/lib/google_api/people/v1/model/locale.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/people/lib/google_api/people/v1/model/locale.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/people/lib/google_api/people/v1/model/locale.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.People.V1.Model.Locale do
@moduledoc """
A person's locale preference.
## Attributes
* `metadata` (*type:* `GoogleApi.People.V1.Model.FieldMetadata.t`, *default:* `nil`) - Metadata about the locale.
* `value` (*type:* `String.t`, *default:* `nil`) - The well-formed [IETF BCP 47](https://tools.ietf.org/html/bcp47) language tag representing the locale.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:metadata => GoogleApi.People.V1.Model.FieldMetadata.t() | nil,
:value => String.t() | nil
}
field(:metadata, as: GoogleApi.People.V1.Model.FieldMetadata)
field(:value)
end
defimpl Poison.Decoder, for: GoogleApi.People.V1.Model.Locale do
def decode(value, options) do
GoogleApi.People.V1.Model.Locale.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.People.V1.Model.Locale do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 33.4 | 157 | 0.71976 |
f79e3ad90b0d5b87504612fa29ed97e40b3d155a | 625 | exs | Elixir | test/romulus_web/views/error_view_test.exs | haighis/romulus_app | d1c85ecb72d935eb9dd314617448bd56b53789e9 | [
"MIT"
] | null | null | null | test/romulus_web/views/error_view_test.exs | haighis/romulus_app | d1c85ecb72d935eb9dd314617448bd56b53789e9 | [
"MIT"
] | null | null | null | test/romulus_web/views/error_view_test.exs | haighis/romulus_app | d1c85ecb72d935eb9dd314617448bd56b53789e9 | [
"MIT"
] | null | null | null | defmodule RomulusWeb.ErrorViewTest do
use RomulusWeb.ConnCase, async: true
# Bring render/3 and render_to_string/3 for testing custom views
import Phoenix.View
test "renders 404.json" do
assert render(RomulusWeb.ErrorView, "404.json", []) ==
%{errors: %{detail: "Page not found"}}
end
test "render 500.json" do
assert render(RomulusWeb.ErrorView, "500.json", []) ==
%{errors: %{detail: "Internal server error"}}
end
test "render any other" do
assert render(RomulusWeb.ErrorView, "505.json", []) ==
%{errors: %{detail: "Internal server error"}}
end
end
| 28.409091 | 66 | 0.648 |
f79e559a198c1d774ee7d4fa48c6d2841ee12ce1 | 1,678 | exs | Elixir | test/lru_test.exs | diodechain/diode_server | 1692788bd92cc17654965878abd059d13b5e236c | [
"Apache-2.0"
] | 8 | 2021-03-12T15:35:09.000Z | 2022-03-06T06:37:49.000Z | test/lru_test.exs | diodechain/diode_server_ex | 5cf47e5253a0caafd335d0af4dba711d4dcad42d | [
"Apache-2.0"
] | 15 | 2019-09-06T07:58:01.000Z | 2021-03-06T17:04:46.000Z | test/lru_test.exs | diodechain/diode_server | 1692788bd92cc17654965878abd059d13b5e236c | [
"Apache-2.0"
] | 5 | 2021-10-01T12:52:28.000Z | 2022-02-02T19:29:56.000Z | # Diode Server
# Copyright 2021 Diode
# Licensed under the Diode License, Version 1.1
defmodule LruTest do
use ExUnit.Case
test "base" do
lru = Lru.new(10)
assert Lru.size(lru) == 0
lru = Lru.insert(lru, "key", "value")
assert Lru.size(lru) == 1
assert Lru.get(lru, "key") == "value"
# Lru should not cache nil return values
assert Lru.fetch(lru, "nothing", fn -> nil end) == {lru, nil}
{lru, "yay"} = Lru.fetch(lru, "nothing", fn -> "yay" end)
assert Lru.get(lru, "nothing") == "yay"
end
test "limit" do
lru = Lru.new(3)
assert Lru.size(lru) == 0
lru = Lru.insert(lru, "a", "avalue")
lru = Lru.insert(lru, "b", "bvalue")
lru = Lru.insert(lru, "c", "cvalue")
assert Lru.size(lru) == 3
assert Lru.get(lru, "a") == "avalue"
assert Lru.get(lru, "b") == "bvalue"
assert Lru.get(lru, "c") == "cvalue"
lru = Lru.insert(lru, "d", "dvalue")
assert Lru.size(lru) == 3
assert Lru.get(lru, "a") == nil
assert Lru.get(lru, "b") == "bvalue"
assert Lru.get(lru, "c") == "cvalue"
assert Lru.get(lru, "d") == "dvalue"
end
test "repeat" do
lru = Lru.new(3)
assert Lru.size(lru) == 0
lru = Lru.insert(lru, "a", "avalue")
lru = Lru.insert(lru, "b", "bvalue")
lru = Lru.insert(lru, "c", "cvalue")
assert Lru.size(lru) == 3
assert Lru.get(lru, "a") == "avalue"
assert Lru.get(lru, "b") == "bvalue"
assert Lru.get(lru, "c") == "cvalue"
lru = Lru.insert(lru, "a", "avalue2")
assert Lru.size(lru) == 3
assert Lru.get(lru, "a") == "avalue2"
assert Lru.get(lru, "b") == "bvalue"
assert Lru.get(lru, "c") == "cvalue"
end
end
| 25.815385 | 65 | 0.561979 |
f79e648a4ab5977928cd04d4b58f3a3b83102e4f | 22,734 | ex | Elixir | lib/gradient/ast_specifier.ex | rodrigues/gradient | c09245b42d8d01b2624ca07648c41cc1a2dee49c | [
"Apache-2.0"
] | null | null | null | lib/gradient/ast_specifier.ex | rodrigues/gradient | c09245b42d8d01b2624ca07648c41cc1a2dee49c | [
"Apache-2.0"
] | null | null | null | lib/gradient/ast_specifier.ex | rodrigues/gradient | c09245b42d8d01b2624ca07648c41cc1a2dee49c | [
"Apache-2.0"
] | null | null | null | defmodule Gradient.AstSpecifier do
@moduledoc """
Module adds missing location information to the Erlang abstract code produced
from Elixir AST. Moreover it can be used to catch some ast pattern and replace
it to forms that cannot be produced from Elixir directly.
FIXME Optimize tokens searching. Find out why some tokens are dropped
NOTE Mapper implements:
- function [x]
- fun [x]
- fun @spec [x]
- clause [x]
- case [x]
- block [X]
- pipe [x]
- call [x] (remote [X])
- match [x]
- op [x]
- integer [x]
- float [x]
- string [x]
- charlist [x]
- tuple [X]
- var [X]
- list [X]
- keyword [X]
- binary [X]
- map [X]
- try [x]
- receive [X]
- record [X] elixir don't use it record_field, record_index, record_pattern, record
- named_fun [ ] is named_fun used by elixir?
NOTE Elixir expressions to handle or test:
- list comprehension [X]
- binary [X]
- maps [X]
- struct [X]
- pipe [ ] TODO decide how to search for line in reversed form order
- range [X]
- receive [X]
- record [X]
- guards [X]
"""
import Gradient.Tokens
require Logger
alias Gradient.Types
@type token :: Types.token()
@type tokens :: Types.tokens()
@type form :: Types.form()
@type forms :: Types.forms()
@type options :: Types.options()
# Api
@doc """
Read and tokenize code file. Than run mappers on the given AST (with obtained tokens)
to specify missing locations or replace some parts that match pattern.
"""
@spec specify(nonempty_list(:erl_parse.abstract_form())) :: [:erl_parse.abstract_form()]
def specify(forms) do
with {:attribute, line, :file, {path, _}} <- hd(forms),
path <- to_string(path),
{:ok, code} <- File.read(path),
{:ok, tokens} <- :elixir.string_to_tokens(String.to_charlist(code), line, line, path, []) do
run_mappers(forms, tokens)
else
error ->
IO.puts("Error occured when specifying forms : #{inspect(error)}")
forms
end
end
@doc """
Function takes forms and traverse them in order to specify location or modify
forms matching the pattern. The tokens are required to obtain the missing location
as precise as possible.
"""
@spec run_mappers([:erl_parse.abstract_form()], tokens()) :: [
:erl_parse.abstract_form()
]
def run_mappers(forms, tokens) do
opts = [end_line: -1]
{forms, _} =
forms
|> prepare_forms_order()
|> context_mapper_fold(tokens, opts)
forms
end
# Mappers
@doc """
Map over the forms using mapper and attach a context i.e. end line.
"""
@spec context_mapper_map(forms(), tokens(), options()) :: forms()
def context_mapper_map(forms, tokens, opts, mapper \\ &mapper/3)
def context_mapper_map([], _, _, _), do: []
def context_mapper_map([form | forms], tokens, opts, mapper) do
cur_opts = set_form_end_line(opts, form, forms)
{form, _} = mapper.(form, tokens, cur_opts)
[form | context_mapper_map(forms, tokens, opts, mapper)]
end
@doc """
Fold over the forms using mapper and attach a context i.e. end line.
"""
@spec context_mapper_fold(forms(), tokens(), options()) :: {forms(), tokens()}
def context_mapper_fold(forms, tokens, opts, mapper \\ &mapper/3)
def context_mapper_fold([], tokens, _, _), do: {[], tokens}
def context_mapper_fold([form | forms], tokens, opts, mapper) do
cur_opts = set_form_end_line(opts, form, forms)
{form, new_tokens} = mapper.(form, tokens, cur_opts)
{forms, res_tokens} = context_mapper_fold(forms, new_tokens, opts, mapper)
{[form | forms], res_tokens}
end
@doc """
The main mapper function traverses AST and specifies missing locations
or replaces parts that match the pattern.
"""
@spec mapper(form(), [token()], options()) :: {form(), [token()]}
def mapper(form, tokens, opts)
def mapper({:attribute, anno, :spec, {name_arity, specs}}, tokens, opts) do
new_specs = context_mapper_map(specs, [], opts, &spec_mapper/3)
{:attribute, anno, :spec, {name_arity, new_specs}}
|> pass_tokens(tokens)
end
def mapper({:function, _line, :__info__, _arity, _children} = form, tokens, _opts) do
# skip analysis for __info__ functions
pass_tokens(form, tokens)
end
def mapper({:function, anno, name, arity, clauses}, tokens, opts) do
# anno has line
{clauses, tokens} = context_mapper_fold(clauses, tokens, opts)
{:function, anno, name, arity, clauses}
|> pass_tokens(tokens)
end
def mapper({:fun, anno, {:clauses, clauses}}, tokens, opts) do
# anno has line
{clauses, tokens} = context_mapper_fold(clauses, tokens, opts)
{:fun, anno, {:clauses, clauses}}
|> pass_tokens(tokens)
end
def mapper({:case, anno, condition, clauses}, tokens, opts) do
# anno has line
# NOTE In Elixir `if`, `case` and `cond` statements are represented
# as a `case` in abstract code.
{:ok, line, anno, opts, _} = get_line(anno, opts)
opts =
case get_conditional(tokens, line, opts) do
{type, _} when type in [:case, :with] ->
Keyword.put(opts, :case_type, :case)
{type, _} when type in [:cond, :if, :unless] ->
Keyword.put(opts, :case_type, :gen)
:undefined ->
Keyword.put(opts, :case_type, :gen)
end
{new_condition, tokens} = mapper(condition, tokens, opts)
# NOTE use map because generated clauses can be in wrong order
clauses = context_mapper_map(clauses, tokens, opts)
{:case, anno, new_condition, clauses}
|> pass_tokens(tokens)
end
def mapper({:clause, anno, args, guards, children}, tokens, opts) do
# anno has line
# FIXME Handle generated clauses. Right now the literals inherit lines
# from the parents without checking them with tokens
{:ok, line, anno, opts, _} = get_line(anno, opts)
case_type = Keyword.get(opts, :case_type, :case)
tokens = drop_tokens_to_line(tokens, line)
if case_type == :case do
{guards, tokens} = guards_mapper(guards, tokens, opts)
{args, tokens} =
if not :erl_anno.generated(anno) do
context_mapper_fold(args, tokens, opts)
else
{args, tokens}
end
{children, tokens} = children |> context_mapper_fold(tokens, opts)
{:clause, anno, args, guards, children}
|> pass_tokens(tokens)
else
{children, tokens} = children |> context_mapper_fold(tokens, opts)
{:clause, anno, args, guards, children}
|> pass_tokens(tokens)
end
end
def mapper({:block, anno, body}, tokens, opts) do
# TODO check if anno has line
{:ok, _line, anno, opts, _} = get_line(anno, opts)
{body, tokens} = context_mapper_fold(body, tokens, opts)
{:block, anno, body}
|> pass_tokens(tokens)
end
def mapper({:match, anno, left, right}, tokens, opts) do
{:ok, _, anno, opts, _} = get_line(anno, opts)
{left, tokens} = mapper(left, tokens, opts)
{right, tokens} = mapper(right, tokens, opts)
{:match, anno, left, right}
|> pass_tokens(tokens)
end
def mapper({:map, anno, pairs}, tokens, opts) do
# anno has correct line
{:ok, _, anno, opts, _} = get_line(anno, opts)
{pairs, tokens} = context_mapper_fold(pairs, tokens, opts, &map_element_mapper/3)
{:map, anno, pairs}
|> pass_tokens(tokens)
end
# update map pattern
def mapper({:map, anno, map, pairs}, tokens, opts) do
# anno has correct line
{:ok, _, anno, opts, _} = get_line(anno, opts)
{map, tokens} = mapper(map, tokens, opts)
{pairs, tokens} = context_mapper_fold(pairs, tokens, opts, &map_element_mapper/3)
{:map, anno, map, pairs}
|> pass_tokens(tokens)
end
def mapper({:cons, anno, value, more} = cons, tokens, opts) do
# anno could be 0
{:ok, line, anno, opts, _} = get_line(anno, opts)
tokens = drop_tokens_to_line(tokens, line)
case get_list(tokens, opts) do
{:list, tokens} ->
cons_mapper(cons, tokens, opts)
{:keyword, tokens} ->
cons_mapper(cons, tokens, opts)
{:charlist, tokens} ->
{:cons, anno, value, more}
|> specify_line(tokens, opts)
:undefined ->
{form, _} = cons_mapper(cons, [], opts)
pass_tokens(form, tokens)
end
end
def mapper({:tuple, anno, elements}, tokens, opts) do
# anno could be 0
{:ok, line, anno, opts, has_line?} = get_line(anno, opts)
tokens
|> drop_tokens_to_line(line)
|> get_tuple(opts)
|> case do
{:tuple, tokens} ->
{anno, opts} = update_line_from_tokens(tokens, anno, opts, has_line?)
{elements, tokens} = context_mapper_fold(elements, tokens, opts)
{:tuple, anno, elements}
|> pass_tokens(tokens)
:undefined ->
elements = context_mapper_map(elements, [], opts)
{:tuple, anno, elements}
|> pass_tokens(tokens)
end
end
def mapper({:receive, anno, clauses}, tokens, opts) do
# anno has correct line
{:ok, _, anno, opts, _} = get_line(anno, opts)
{clauses, tokens} = context_mapper_fold(clauses, tokens, opts)
{:receive, anno, clauses}
|> pass_tokens(tokens)
end
# receive with timeout
def mapper({:receive, anno, clauses, after_val, after_block}, tokens, opts) do
# anno has correct line
{:ok, _, anno, opts, _} = get_line(anno, opts)
{clauses, tokens} = context_mapper_fold(clauses, tokens, opts)
{after_val, tokens} = mapper(after_val, tokens, opts)
{after_block, tokens} = context_mapper_fold(after_block, tokens, opts)
{:receive, anno, clauses, after_val, after_block}
|> pass_tokens(tokens)
end
def mapper({:try, anno, body, else_block, catchers, after_block}, tokens, opts) do
# anno has correct line
{:ok, _, anno, opts, _} = get_line(anno, opts)
{body, tokens} = context_mapper_fold(body, tokens, opts)
{catchers, tokens} = context_mapper_fold(catchers, tokens, opts)
{else_block, tokens} = context_mapper_fold(else_block, tokens, opts)
{after_block, tokens} = context_mapper_fold(after_block, tokens, opts)
{:try, anno, body, else_block, catchers, after_block}
|> pass_tokens(tokens)
end
def mapper(
{:call, anno, {:atom, _, name_atom} = name,
[expr, {:bin, _, [{:bin_element, _, {:string, _, _} = val, :default, :default}]}]},
tokens,
_opts
)
when name_atom in [:"::", :":::"] do
# unwrap string from binary for correct type annotation matching
{:call, anno, name, [expr, val]}
|> pass_tokens(tokens)
end
def mapper({:call, anno, name, args}, tokens, opts) do
# anno has correct line
{:ok, _, anno, opts, _} = get_line(anno, opts)
name = remote_mapper(name)
{args, tokens} = context_mapper_fold(args, tokens, opts)
{:call, anno, name, args}
|> pass_tokens(tokens)
end
def mapper({:op, anno, op, left, right}, tokens, opts) do
# anno has correct line
{:ok, _, anno, opts, _} = get_line(anno, opts)
{left, tokens} = mapper(left, tokens, opts)
{right, tokens} = mapper(right, tokens, opts)
{:op, anno, op, left, right}
|> pass_tokens(tokens)
end
def mapper({:op, anno, op, right}, tokens, opts) do
# anno has correct line
{:ok, _, anno, opts, _} = get_line(anno, opts)
{right, tokens} = mapper(right, tokens, opts)
{:op, anno, op, right}
|> pass_tokens(tokens)
end
def mapper({:bin, anno, elements}, tokens, opts) do
# anno could be 0
{:ok, line, anno, opts, _} = get_line(anno, opts)
# TODO find a way to merge this cases
case elements do
[{:bin_element, _, {:string, _, _}, :default, :default}] = e ->
{:bin, anno, e}
|> specify_line(tokens, opts)
_ ->
{bin_tokens, other_tokens} = cut_tokens_to_bin(tokens, line)
bin_tokens = flatten_tokens(bin_tokens)
{elements, _} = context_mapper_fold(elements, bin_tokens, opts, &bin_element_mapper/3)
{:bin, anno, elements}
|> pass_tokens(other_tokens)
end
end
def mapper({type, 0, value}, tokens, opts)
when type in [:atom, :char, :float, :integer, :string, :bin] do
# TODO check what happend for :string
{:ok, line} = Keyword.fetch(opts, :line)
{type, line, value}
|> specify_line(tokens, opts)
end
def mapper(skip, tokens, _opts)
when elem(skip, 0) in [
:fun,
:attribute,
:var,
nil,
:atom,
:char,
:float,
:integer,
:string,
:bin
] do
# NOTE fun - I skipped here checking &name/arity or &module.name/arity
# skip forms that don't need analysis and do not display warning
pass_tokens(skip, tokens)
end
def mapper(form, tokens, _opts) do
Logger.warn("Not found mapper for #{inspect(form)}")
pass_tokens(form, tokens)
end
@doc """
Adds missing location to the function specification.
"""
@spec spec_mapper(form(), tokens(), options()) :: {form(), tokens()}
def spec_mapper({:type, anno, :tuple, :any}, tokens, _opts) do
{:type, anno, :tuple, :any}
|> pass_tokens(tokens)
end
def spec_mapper({:type, anno, :map, :any}, tokens, _opts) do
{:type, anno, :map, :any}
|> pass_tokens(tokens)
end
def spec_mapper({:type, anno, :any}, tokens, _opts) do
{:type, anno, :any}
|> pass_tokens(tokens)
end
def spec_mapper({:type, anno, type_name, args}, tokens, opts) do
{:ok, _line, anno, opts, _} = get_line(anno, opts)
new_args = context_mapper_map(args, tokens, opts, &spec_mapper/3)
{:type, anno, type_name, new_args}
|> pass_tokens(tokens)
end
def spec_mapper({:remote_type, anno, [mod, type, args]}, tokens, opts) do
{:ok, _line, anno, opts, _} = get_line(anno, opts)
{new_mod, _} = spec_mapper(mod, tokens, opts)
{new_type, _} = spec_mapper(type, tokens, opts)
new_args = context_mapper_map(args, tokens, opts, &spec_mapper/3)
{:remote_type, anno, [new_mod, new_type, new_args]}
|> pass_tokens(tokens)
end
def spec_mapper({:user_type, anno, name, args}, tokens, opts) do
new_args = context_mapper_map(args, tokens, opts, &spec_mapper/3)
{:user_type, anno, name, new_args}
|> pass_tokens(tokens)
end
def spec_mapper({:ann_type, anno, attrs}, tokens, opts) do
new_attrs = context_mapper_map(attrs, tokens, opts, &spec_mapper/3)
{:ann_type, anno, new_attrs}
|> pass_tokens(tokens)
end
def spec_mapper(type, tokens, opts) do
mapper(type, tokens, opts)
end
@doc """
Adds missing location to the module literal
"""
def remote_mapper({:remote, line, {:atom, 0, mod}, fun}) do
{:remote, line, {:atom, line, mod}, fun}
end
def remote_mapper(name), do: name
@doc """
Adds missing location to the literals in the guards
"""
@spec guards_mapper([form()], [token()], options()) :: {[form()], [token()]}
def guards_mapper([], tokens, _opts), do: {[], tokens}
def guards_mapper(guards, tokens, opts) do
List.foldl(guards, {[], tokens}, fn
[guard], {gs, tokens} ->
{g, ts} = mapper(guard, tokens, opts)
{[[g] | gs], ts}
gs, {ags, ts} ->
Logger.error("Unsupported guards format #{inspect(gs)}")
{gs ++ ags, ts}
end)
end
@doc """
Run mapper on map value and key.
"""
@spec map_element_mapper(tuple(), tokens(), options()) :: {tuple(), tokens()}
def map_element_mapper({field, anno, key, value}, tokens, opts)
when field in [:map_field_assoc, :map_field_exact] do
line = :erl_anno.line(anno)
opts = Keyword.put(opts, :line, line)
{key, tokens} = mapper(key, tokens, opts)
{value, tokens} = mapper(value, tokens, opts)
{field, anno, key, value}
|> pass_tokens(tokens)
end
@doc """
Run mapper on bin element value.
"""
@spec bin_element_mapper(tuple(), tokens(), options()) :: {tuple(), tokens()}
def bin_element_mapper({:bin_element, anno, value, size, tsl}, tokens, opts) do
{:ok, _line, anno, opts, _} = get_line(anno, opts)
{value, tokens} = mapper(value, tokens, opts)
{:bin_element, anno, value, size, tsl}
|> pass_tokens(tokens)
end
@doc """
Iterate over the list in abstract code format and runs mapper on each element
"""
@spec cons_mapper(form(), [token()], options()) :: {form(), tokens()}
def cons_mapper({:cons, anno, value, tail}, tokens, opts) do
{:ok, _, anno, opts, has_line?} = get_line(anno, opts)
{anno, opts} = update_line_from_tokens(tokens, anno, opts, has_line?)
{new_value, tokens} = mapper(value, tokens, opts)
{tail, tokens} = cons_mapper(tail, tokens, opts)
{:cons, anno, new_value, tail}
|> pass_tokens(tokens)
end
def cons_mapper(other, tokens, opts), do: mapper(other, tokens, opts)
@doc """
Update form anno with location taken from the corresponding token, if found.
Otherwise return form unchanged.
"""
@spec specify_line(form(), [token()], options()) :: {form(), [token()]}
def specify_line(form, tokens, opts) do
if not :erl_anno.generated(elem(form, 1)) do
{:ok, end_line} = Keyword.fetch(opts, :end_line)
res = drop_tokens_while(tokens, end_line, &(!match_token_to_form(&1, form)))
case res do
[token | tokens] ->
{take_loc_from_token(token, form), tokens}
[] ->
{form, tokens}
end
else
{form, tokens}
end
end
# Private Helpers
@spec match_token_to_form(token(), form()) :: boolean()
defp match_token_to_form({:int, {l1, _, v1}, _}, {:integer, l2, v2}) do
l2 = :erl_anno.line(l2)
l2 <= l1 && v1 == v2
end
defp match_token_to_form({:char, {l1, _, _}, v1}, {:integer, l2, v2}) do
l2 = :erl_anno.line(l2)
l2 <= l1 && v1 == v2
end
defp match_token_to_form({:flt, {l1, _, v1}, _}, {:float, l2, v2}) do
l2 = :erl_anno.line(l2)
l2 <= l1 && v1 == v2
end
defp match_token_to_form({:atom, {l1, _, _}, v1}, {:atom, l2, v2}) do
l2 = :erl_anno.line(l2)
l2 <= l1 && v1 == v2
end
defp match_token_to_form({:alias, {l1, _, _}, v1}, {:atom, l2, v2}) do
l2 = :erl_anno.line(l2)
l2 <= l1 && v1 == v2
end
defp match_token_to_form({:kw_identifier, {l1, _, _}, v1}, {:atom, l2, v2}) do
l2 = :erl_anno.line(l2)
l2 <= l1 && v1 == v2
end
defp match_token_to_form({:list_string, {l1, _, _}, [v1]}, {:cons, l2, _, _} = cons) do
v2 = cons_to_charlist(cons)
l2 <= l1 && to_charlist(v1) == v2
end
# BINARY
defp match_token_to_form(
{:bin_string, {l1, _, _}, [v1]},
{:bin, l2, [{:bin_element, _, {:string, _, v2}, :default, :default}]}
) do
# string
l2 <= l1 && :binary.bin_to_list(v1) == v2
end
defp match_token_to_form({:str, _, v}, {:string, _, v1}) do
to_charlist(v) == v1
end
defp match_token_to_form({true, {l1, _, _}}, {:atom, l2, true}) do
l2 <= l1
end
defp match_token_to_form({false, {l1, _, _}}, {:atom, l2, false}) do
l2 <= l1
end
defp match_token_to_form(_, _) do
false
end
@spec take_loc_from_token(token(), form()) :: form()
defp take_loc_from_token({:int, {line, _, _}, _}, {:integer, _, value}) do
{:integer, line, value}
end
defp take_loc_from_token({:char, {line, _, _}, _}, {:integer, _, value}) do
{:integer, line, value}
end
defp take_loc_from_token({:flt, {line, _, _}, _}, {:float, _, value}) do
{:float, line, value}
end
defp take_loc_from_token({:atom, {line, _, _}, _}, {:atom, _, value}) do
{:atom, line, value}
end
defp take_loc_from_token({:alias, {line, _, _}, _}, {:atom, _, value}) do
{:atom, line, value}
end
defp take_loc_from_token({:kw_identifier, {line, _, _}, _}, {:atom, _, value}) do
{:atom, line, value}
end
defp take_loc_from_token({:list_string, {l1, _, _}, _}, {:cons, _, _, _} = charlist) do
charlist_set_loc(charlist, l1)
end
defp take_loc_from_token(
{:bin_string, {l1, _, _}, _},
{:bin, _, [{:bin_element, _, {:string, _, v2}, :default, :default}]}
) do
{:bin, l1, [{:bin_element, l1, {:string, l1, v2}, :default, :default}]}
end
defp take_loc_from_token({:str, _, _}, {:string, loc, v2}) do
{:string, loc, v2}
end
defp take_loc_from_token({true, {line, _, _}}, {:atom, _, true}) do
{:atom, line, true}
end
defp take_loc_from_token({false, {line, _, _}}, {:atom, _, false}) do
{:atom, line, false}
end
defp take_loc_from_token(_, _), do: nil
def cons_to_charlist({nil, _}), do: []
def cons_to_charlist({:cons, _, {:integer, _, value}, tail}) do
[value | cons_to_charlist(tail)]
end
def charlist_set_loc({:cons, _, {:integer, _, value}, tail}, loc) do
{:cons, loc, {:integer, loc, value}, charlist_set_loc(tail, loc)}
end
def charlist_set_loc({nil, loc}, _), do: {nil, loc}
def put_line(anno, opts, line) do
{:erl_anno.set_line(line, anno), Keyword.put(opts, :line, line)}
end
def update_line_from_tokens([token | _], anno, opts, false) do
line = get_line_from_token(token)
put_line(anno, opts, line)
end
def update_line_from_tokens(_, anno, opts, _) do
{anno, opts}
end
defp get_line(anno, opts) do
case :erl_anno.line(anno) do
0 ->
case Keyword.fetch(opts, :line) do
{:ok, line} ->
anno = :erl_anno.set_line(line, anno)
{:ok, line, anno, opts, false}
err ->
err
end
line ->
opts = Keyword.put(opts, :line, line)
{:ok, line, anno, opts, true}
end
end
@spec prepare_forms_order(forms()) :: forms()
defp prepare_forms_order(forms) do
forms
|> Enum.sort(fn l, r -> elem(l, 0) == elem(r, 0) and elem(l, 1) > elem(r, 1) end)
|> Enum.reverse()
end
defp set_form_end_line(opts, form, forms) do
case Enum.find(forms, fn f ->
anno = elem(f, 1)
# Maybe should try to go deeper when generated and try to obtain
# the line from the first child. It should work for sure for clauses,
# but it has to be in the right order (e.g. if clauses are reversed)
:erl_anno.line(anno) > 0 and not :erl_anno.generated(anno)
end) do
nil ->
opts
next_form ->
current_line = :erl_anno.line(elem(form, 1))
next_line = :erl_anno.line(elem(next_form, 1))
if current_line == next_line do
Keyword.put(opts, :end_line, next_line + 1)
else
Keyword.put(opts, :end_line, next_line)
end
end
end
@spec pass_tokens(any(), tokens()) :: {any(), tokens()}
defp pass_tokens(form, tokens) do
{form, tokens}
end
end
| 28.704545 | 101 | 0.613882 |
f79e7ab484d1b16fd57575289332cd2468388fba | 30 | ex | Elixir | apps/ut_monitor_lib/lib/ut_monitor_lib.ex | usertesting/nerves_status_monitors | 18ccc7a3f3787d847a09328cea6c49b51605338a | [
"MIT"
] | 1 | 2017-02-10T17:41:02.000Z | 2017-02-10T17:41:02.000Z | apps/ut_monitor_lib/lib/ut_monitor_lib.ex | usertesting/nerves_status_monitors | 18ccc7a3f3787d847a09328cea6c49b51605338a | [
"MIT"
] | null | null | null | apps/ut_monitor_lib/lib/ut_monitor_lib.ex | usertesting/nerves_status_monitors | 18ccc7a3f3787d847a09328cea6c49b51605338a | [
"MIT"
] | null | null | null | defmodule UtMonitorLib do
end
| 10 | 25 | 0.866667 |
f79e8c12e57f4d44343437ec53db8cc794426dcd | 866 | ex | Elixir | lib/movekr_web/schema.ex | devandcoffee/movekr-be | bff1690e22daec9a545ca7b69495cb928d68c0a5 | [
"MIT"
] | null | null | null | lib/movekr_web/schema.ex | devandcoffee/movekr-be | bff1690e22daec9a545ca7b69495cb928d68c0a5 | [
"MIT"
] | null | null | null | lib/movekr_web/schema.ex | devandcoffee/movekr-be | bff1690e22daec9a545ca7b69495cb928d68c0a5 | [
"MIT"
] | null | null | null | defmodule MovekrWeb.Schema do
use Absinthe.Schema
alias MovekrWeb.AccountsResolver
alias MovekrWeb.ProjectsResolver
import_types(Movekr.Schema.DataTypes)
query do
@desc "Get all users"
field :all_users, non_null(list_of(non_null(:user))) do
resolve(&AccountsResolver.all_users/3)
end
end
mutation do
@desc "Create a new user"
field :create_user, :user do
arg(:user, non_null(:input_user))
resolve(&AccountsResolver.create_user/3)
end
@desc "Create a new project"
field :create_project, :project do
arg(:project, non_null(:input_project))
resolve(&ProjectsResolver.create_project/3)
end
@desc "Create a new columns"
field :create_columns, list_of(:column) do
arg(:columns, non_null(:input_columns))
resolve(&ProjectsResolver.create_columns/3)
end
end
end
| 24.055556 | 59 | 0.700924 |
f79e8d26ff6ec2551ded18e3d376895f270b8e2a | 3,276 | ex | Elixir | plugins/one_dialer/lib/one_dialer.ex | smpallen99/ucx_ucc | 47225f205a6ac4aacdb9bb4f7512dcf4092576ad | [
"MIT"
] | 11 | 2017-05-15T18:35:05.000Z | 2018-02-05T18:27:40.000Z | plugins/one_dialer/lib/one_dialer.ex | anndream/infinity_one | 47225f205a6ac4aacdb9bb4f7512dcf4092576ad | [
"MIT"
] | 15 | 2017-11-27T10:38:05.000Z | 2018-02-09T20:42:08.000Z | plugins/one_dialer/lib/one_dialer.ex | anndream/infinity_one | 47225f205a6ac4aacdb9bb4f7512dcf4092576ad | [
"MIT"
] | 4 | 2017-09-13T11:34:16.000Z | 2018-02-26T13:37:06.000Z | defmodule OneDialer do
@moduledoc """
Click to call implementation for OneChat.
An interface for external PBX or calling service for handling calling
phone numbers using a third party phone calling service like a PBX or
web service.
This module requires an adapter implementation which is configured with
config :infinity_one, :dialer_adapter, SomeModule
The adapter must implement the `dial/4` function.
The interface also supports digit translation, which is gone here, before
the call to the adapter. The translator uses uses a pattern matching approach
taken from the asterisk open source PBX, with the following definition.
`N` - matches digits 2-9
`Z` - matches digits 1-9
`X` - matches digits 0-9
For example, to match a 10 digit number and insert a leading 1, use the
following `"1, NXXNXXXXXX"`, where the initial `1` is the inserted digit and
the trailing pattern it the matching specification.
iex> OneDialer.translate_digits("5555555555", "1, NXXNXXXXXX")
"15555555555"
iex> OneDialer.translate_digits("15555555555", "1, NXXNXXXXXX")
"15555555555"
iex> OneDialer.translate_digits("1234", "1, NXXNXXXXXX")
"1234"
Multiple translations can be defined by using a `,` to separate each. For
example, to add a second rule to add a 613 prefix to 7 digit numbers, use the
following `"1, NXXNXXXXXX, 613, NXXXXXX"`
iex> OneDialer.translate_digits("2234567", "1, NXXNXXXXXX, 613, NXXXXXX")
"6132234567"
iex> OneDialer.translate_digits("7322608", "1613, 73XXXXX")
"16137322608"
"""
require Logger
@adapter Application.get_env(:infinity_one, :dialer_adapter, nil)
@doc """
Call the dial function on the configured adapter.
Calls a number by running the `dial/4` function on the configured adapter.
"""
def dial(user, caller, number, opts), do: dial({user, caller}, number, opts)
def dial({_user, nil}, _number, _opts), do: nil
def dial({user, caller}, number, opts) do
# Logger.warn "dial number: #{inspect number}"
adapter = opts[:adapter] || @adapter
if adapter do
adapter.dial(user, caller, translate_digits(number), opts)
else
Logger.error """
OneDialer attempt to dial number #{number} without a configured adapter.
Please configure and adapter with:
config :one_dialer, :dialer_adapter, DialerModule
"""
end
end
@doc """
Apply configured digit translation rules to the called number.
"""
def translate_digits(digits, translation \\ nil) do
translation = translation || Application.get_env(:one_dialer, :dial_translation, "")
translation
|> String.replace(" ", "")
|> String.split(",")
|> Enum.chunk(2)
|> process_patterns(digits)
end
defp process_patterns(list, digits) do
Enum.find_value(list, digits, fn([insert_digits, pattern]) ->
("^" <> pattern)
|> String.replace("N", "[2-9]")
|> String.replace("Z", "[1-9]")
|> String.replace("X", "[0-9]")
|> Regex.compile!
|> find_and_replace(digits, "#{insert_digits}\\0")
end)
end
defp find_and_replace(regex, digits, replace_str) do
if Regex.run(regex, digits),
do: Regex.replace(regex, digits, replace_str)
end
end
| 32.76 | 88 | 0.68254 |
f79eaed711d1f4e6b673e091b452ce8efb33c279 | 111 | ex | Elixir | harbor/lib/harbor/permissions.ex | miapolis/port7 | 7df1223f83d055eeb6ce8f61f4af8b4f2cf33e74 | [
"MIT"
] | null | null | null | harbor/lib/harbor/permissions.ex | miapolis/port7 | 7df1223f83d055eeb6ce8f61f4af8b4f2cf33e74 | [
"MIT"
] | null | null | null | harbor/lib/harbor/permissions.ex | miapolis/port7 | 7df1223f83d055eeb6ce8f61f4af8b4f2cf33e74 | [
"MIT"
] | null | null | null | defmodule Harbor.Permissions do
def can_manage_members?(roles) do
Enum.member?(roles, :leader)
end
end
| 18.5 | 35 | 0.747748 |
f79eaf34379de61c41b90e2232d3b341c6ffeef4 | 2,187 | exs | Elixir | persistent_tree/test/persistent_tree_test/day13_test.exs | alex-dukhno/elixir-tdd-katas | 57e25fc275c4274c889f2b3760276cc8a393de9e | [
"MIT"
] | null | null | null | persistent_tree/test/persistent_tree_test/day13_test.exs | alex-dukhno/elixir-tdd-katas | 57e25fc275c4274c889f2b3760276cc8a393de9e | [
"MIT"
] | null | null | null | persistent_tree/test/persistent_tree_test/day13_test.exs | alex-dukhno/elixir-tdd-katas | 57e25fc275c4274c889f2b3760276cc8a393de9e | [
"MIT"
] | null | null | null | defmodule PersistentTreeTest.Day13 do
use ExUnit.Case
alias PersistentTree.Day13, as: Tree
alias PersistentTree.Day13.{Leaf, Node}
defp empty(), do: %Leaf{}
defp bottom_node(val),
do: %Node{item: val, left: empty(), right: empty()}
defp node_with_left(val, []),
do: bottom_node(val)
defp node_with_left(val, [left | rest]),
do: %Node{item: val, left: node_with_left(left, rest), right: empty()}
defp node_with_right(val, []), do: bottom_node(val)
defp node_with_right(val, [right | rest]),
do: %Node{item: val, left: empty(), right: node_with_right(right, rest)}
defp into_tree(list),
do: empty()
|> into_tree(list)
defp into_tree(tree, []), do: tree
defp into_tree(tree, [head | tail]),
do: tree
|> Tree.add(head)
|> into_tree(tail)
test "create an empty tree", do:
assert Tree.new() == %Leaf{}
test "add item to a tree", do:
assert Tree.new()
|> Tree.add(20) == bottom_node(20)
test "add item to a left tree", do:
assert Tree.new()
|> Tree.add(20)
|> Tree.add(10) == node_with_left(20, [10])
test "add item to a right tree", do:
assert Tree.new()
|> Tree.add(20)
|> Tree.add(30) == node_with_right(20, [30])
test "add items to a left tree", do:
assert Tree.new()
|> Tree.add(20)
|> Tree.add(15)
|> Tree.add(10) == node_with_left(20, [15, 10])
test "add items to a right tree", do:
assert Tree.new()
|> Tree.add(20)
|> Tree.add(25)
|> Tree.add(30) == node_with_right(20, [25, 30])
test "preorder traversal", do:
assert [5, 2, 1, 4, 7, 6, 8]
|> into_tree()
|> Tree.preorder()
|> Enum.to_list() == [5, 2, 1, 4, 7, 6, 8]
test "postorder traversal", do:
assert [5, 2, 1, 4, 7, 6, 8]
|> into_tree()
|> Tree.postorder()
|> Enum.to_list() == [1, 4, 2, 6, 8, 7, 5]
test "in order traversal", do:
assert [5, 2, 1, 4, 7, 6, 8]
|> into_tree()
|> Tree.in_order()
|> Enum.to_list() == [1, 2, 4, 5, 6, 7, 8]
end
| 28.402597 | 79 | 0.534065 |
f79eba579bee49fde18585f5079f70f554e13503 | 1,781 | ex | Elixir | clients/run/lib/google_api/run/v1/model/resource_record.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/run/lib/google_api/run/v1/model/resource_record.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/run/lib/google_api/run/v1/model/resource_record.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Run.V1.Model.ResourceRecord do
@moduledoc """
A DNS resource record.
## Attributes
* `name` (*type:* `String.t`, *default:* `nil`) - Relative name of the object affected by this record. Only applicable for `CNAME` records. Example: 'www'.
* `rrdata` (*type:* `String.t`, *default:* `nil`) - Data for this record. Values vary by record type, as defined in RFC 1035 (section 5) and RFC 1034 (section 3.6.1).
* `type` (*type:* `String.t`, *default:* `nil`) - Resource record type. Example: `AAAA`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:name => String.t(),
:rrdata => String.t(),
:type => String.t()
}
field(:name)
field(:rrdata)
field(:type)
end
defimpl Poison.Decoder, for: GoogleApi.Run.V1.Model.ResourceRecord do
def decode(value, options) do
GoogleApi.Run.V1.Model.ResourceRecord.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Run.V1.Model.ResourceRecord do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 33.603774 | 170 | 0.699045 |
f79f23e0aa9864f5d75654304f376f4d2a452057 | 388 | exs | Elixir | programming_elixir/Link2.exs | enilsen16/elixir | b4d1d45858a25e4beb39e07de8685f3d93d6a520 | [
"MIT"
] | null | null | null | programming_elixir/Link2.exs | enilsen16/elixir | b4d1d45858a25e4beb39e07de8685f3d93d6a520 | [
"MIT"
] | null | null | null | programming_elixir/Link2.exs | enilsen16/elixir | b4d1d45858a25e4beb39e07de8685f3d93d6a520 | [
"MIT"
] | null | null | null | defmodule Link2 do
import(:timer, only: [sleep: 1])
def sad_function do
sleep 500
exit(:boom)
end
def run do
res = spawn_monitor(Link2, :sad_function, [])
IO.puts inspect res
receive do
msg ->
IO.puts "MESSAGE RECIVED: #{inspect msg}"
after 1000 ->
IO.puts "Nothing happened as far as I am concerned"
end
end
end
Link2.run
| 17.636364 | 59 | 0.613402 |
f79f36d37e43a73a3c8841c0dcd1f57d6f81febe | 67 | ex | Elixir | lib/conductor_web/views/layout_view.ex | meltingice/conductor | 630440adc1081a0991d3dba17ced775a9dd05055 | [
"MIT"
] | null | null | null | lib/conductor_web/views/layout_view.ex | meltingice/conductor | 630440adc1081a0991d3dba17ced775a9dd05055 | [
"MIT"
] | 2 | 2021-03-10T20:23:26.000Z | 2021-05-11T15:56:49.000Z | lib/conductor_web/views/layout_view.ex | meltingice/conductor | 630440adc1081a0991d3dba17ced775a9dd05055 | [
"MIT"
] | 1 | 2020-06-05T02:34:58.000Z | 2020-06-05T02:34:58.000Z | defmodule ConductorWeb.LayoutView do
use ConductorWeb, :view
end
| 16.75 | 36 | 0.820896 |
f79f65dd7cc14c19865ec44301728c1a87ca4d7f | 1,474 | exs | Elixir | apps/omg_watcher/test/omg_watcher/integration/test_server_test.exs | kendricktan/elixir-omg | 834c103fd5c4b9e063c1d32b9b4e5728abb64009 | [
"Apache-2.0"
] | null | null | null | apps/omg_watcher/test/omg_watcher/integration/test_server_test.exs | kendricktan/elixir-omg | 834c103fd5c4b9e063c1d32b9b4e5728abb64009 | [
"Apache-2.0"
] | null | null | null | apps/omg_watcher/test/omg_watcher/integration/test_server_test.exs | kendricktan/elixir-omg | 834c103fd5c4b9e063c1d32b9b4e5728abb64009 | [
"Apache-2.0"
] | null | null | null | # Copyright 2018 OmiseGO Pte Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
defmodule OMG.Watcher.Integration.TestServerTest do
use ExUnitFixtures
use ExUnit.Case, async: false
alias OMG.Utils.HttpRPC.Encoding
alias OMG.Watcher.HttpRPC.Client
alias OMG.Watcher.Integration.TestServer
@expected_block_hash <<0::256>>
describe "/block.get -" do
@response TestServer.make_response(%{
blknum: 123_000,
hash: Encoding.to_hex(@expected_block_hash),
transactions: []
})
@tag fixtures: [:test_server]
test "successful response is parsed to expected map", %{test_server: context} do
TestServer.with_route(context, "/block.get", @response)
assert {:ok,
%{
transactions: [],
number: 123_000,
hash: @expected_block_hash
}} == Client.get_block(@expected_block_hash, context.fake_addr)
end
end
end
| 33.5 | 84 | 0.676391 |
f79f8a6d14b1a63932eadfc9259e0414f10ef4e5 | 3,760 | exs | Elixir | banking_challenge_stone_umbrella/apps/banking_challenge_web/test/banking_challenge_web/controllers/transaction_controller_test.exs | douxsalemk/banking_challenge_stone | 59c2790a0f14b68c2b25851c806ebc553227316d | [
"Apache-2.0"
] | null | null | null | banking_challenge_stone_umbrella/apps/banking_challenge_web/test/banking_challenge_web/controllers/transaction_controller_test.exs | douxsalemk/banking_challenge_stone | 59c2790a0f14b68c2b25851c806ebc553227316d | [
"Apache-2.0"
] | null | null | null | banking_challenge_stone_umbrella/apps/banking_challenge_web/test/banking_challenge_web/controllers/transaction_controller_test.exs | douxsalemk/banking_challenge_stone | 59c2790a0f14b68c2b25851c806ebc553227316d | [
"Apache-2.0"
] | null | null | null | defmodule BankingChallengeWeb.TransactionControllerTest do
use BankingChallengeWeb.ConnCase
alias BankingChallenge.Accounts.Schemas.Account
describe "send/2" do
setup %{conn: conn} do
params1 = %{"number_account" => 12123}
params2 = %{"number_account" => 12133}
{:ok, %Account{id: from_account_id}} = BankingChallenge.Accounts.Account.create_new_account(params1)
{:ok, %Account{id: to_account_id}} = BankingChallenge.Accounts.Account.create_new_account(params2)
{:ok, conn: conn, from_account_id: from_account_id, to_account_id: to_account_id}
end
test "when all params are valid, make de transfer", %{conn: conn, from_account_id: from_account_id, to_account_id: to_account_id} do
params = %{
"amount" => 1,
"from_account_id" => from_account_id,
"to_account_id" => to_account_id
}
response =
conn
|> post(Routes.transaction_path(conn, :send), params)
|> json_response(:ok)
assert %{
"message" => "Transaction successfully",
"transaction" => %{"source_acc" => %{"balance" => 999, "id" => id},
"target_acc" => %{"balance" => 1001, "id" => id2},
"transaction" => %{"amount" => 1, "from_account_id" => id, "to_account_id" => id2}}
} = response
end
#test "when all params are invalid, returns an error", %{conn: conn, from_account_id: from_account_id, to_account_id: to_account_id} do
# params = %{
# "amount" => "abc",
# "from_account_id" => from_account_id,
# "to_account_id" => to_account_id
# }
#
# response =
# conn
# |> post(Routes.transaction_path(conn, :send), params |> IO.inspect(label: "Response conn"))
# |> json_response(:bad_request)
#
# assert %{
# "message" => "Transaction successfully",
# "transaction" => %{"source_acc" => %{"balance" => 999, "id" => id},
# "target_acc" => %{"balance" => 1001, "id" => id2},
# "transaction" => %{"amount" => 1, "from_account_id" => id, "to_account_id" => id2}}
# } = response
#
#end
end
describe "withdraw/2" do
setup %{conn: conn} do
params = %{"number_account" => 12125}
{:ok, %Account{id: from_account_id}} = BankingChallenge.Accounts.Account.create_new_account(params)
{:ok, conn: conn, from_account_id: from_account_id}
end
test "when all params are valid, make the withdraw", %{conn: conn, from_account_id: from_account_id} do
params = %{
"amount" => 1,
"from_account_id" => from_account_id
}
response =
conn
|> post(Routes.transaction_path(conn, :withdraw), params)
|> json_response(:ok)
assert %{
"message" => "Withdraw successfully",
"transaction" => %{"source_acc" => %{"balance" => 999, "id" => id3},
"withdraw" => %{"amount" => 1, "from_account_id" => id3}}
} = response
end
#test "when all params are invalid, returns an error", %{conn: conn, from_account_id: from_account_id} do
#
# params = %{
# "amount" => "abc",
# "from_account_id" => from_account_id
# }
#
# response =
# conn
# |> post(Routes.transaction_path(conn, :withdraw), params)
# |> json_response(:bad_request)
#
# assert %{
# "message" => "Withdraw successfully",
# "transaction" => %{"source_acc" => %{"balance" => 999, "id" => id3},
# "withdraw" => %{"amount" => 1, "from_account_id" => id3}}
# } = response
#
#end
end
end
| 33.274336 | 139 | 0.557979 |
f79fa9c2836d877630fe62bf34a5babf94d6243e | 2,390 | exs | Elixir | test/plug/adapters/cowboy_test.exs | outstand/plug | e75d542b3028b5c1f348ac9d128306c46a6b6e70 | [
"Apache-2.0"
] | 1,218 | 2017-07-14T15:13:32.000Z | 2022-03-30T16:42:42.000Z | test/plug/adapters/cowboy_test.exs | outstand/plug | e75d542b3028b5c1f348ac9d128306c46a6b6e70 | [
"Apache-2.0"
] | 502 | 2017-07-19T15:36:44.000Z | 2022-03-31T06:47:36.000Z | test/plug/adapters/cowboy_test.exs | outstand/plug | e75d542b3028b5c1f348ac9d128306c46a6b6e70 | [
"Apache-2.0"
] | 376 | 2017-07-17T15:47:55.000Z | 2022-03-23T19:24:30.000Z | defmodule Plug.Adapters.CowboyTest do
use ExUnit.Case
@raise_message "plug_cowboy dependency missing"
@missing_warning "{:plug_cowboy, \"~> 1.0\"}"
@plug_cowboy_path Path.expand("../../fixtures/plug_cowboy.exs", __DIR__)
setup do
Code.require_file(@plug_cowboy_path)
:ok
end
import ExUnit.CaptureIO
describe "http/3" do
test "raises and warns if the plug_cowboy is missing" do
test_raise(fn -> Plug.Adapters.Cowboy.http(__MODULE__, [], port: 8003) end)
end
test "proxies if Plug.Cowboy is defined" do
assert {:ok, :http} == Plug.Adapters.Cowboy.http(__MODULE__, [], port: 8003)
end
end
describe "https/3" do
test "raises and warns if the plug_cowboy is missing" do
test_raise(fn -> Plug.Adapters.Cowboy.https(__MODULE__, [], port: 8003) end)
end
test "proxies if Plug.Cowboy is defined" do
assert {:ok, :https} == Plug.Adapters.Cowboy.https(__MODULE__, [], port: 8003)
end
end
describe "shutdown/1" do
test "raises and warns if the plug_cowboy is missing" do
test_raise(fn -> Plug.Adapters.Cowboy.shutdown(:ref) end)
end
test "proxies if Plug.Cowboy is defined" do
assert {:ok, :shutdown} == Plug.Adapters.Cowboy.shutdown(:ref)
end
end
describe "child_spec/4" do
test "raises and warns if the plug_cowboy is missing" do
test_raise(fn -> Plug.Adapters.Cowboy.child_spec(:http, __MODULE__, [], []) end)
end
test "proxies if Plug.Cowboy is defined" do
assert {:ok, :child_spec} == Plug.Adapters.Cowboy.child_spec([])
{:ok, :child_spec} = Plug.Adapters.Cowboy.child_spec(:http, __MODULE__, [], [])
end
end
describe "child_spec/1" do
test "raises and warns if the plug_cowboy is missing" do
test_raise(fn -> Plug.Adapters.Cowboy.child_spec([]) end)
end
test "proxies if Plug.Cowboy is defined" do
assert {:ok, :child_spec} == Plug.Adapters.Cowboy.child_spec([])
end
end
defp test_raise(fun) do
unload_plug_cowboy()
output =
capture_io(:stderr, fn ->
Process.flag(:trap_exit, true)
pid = spawn_link(fun)
assert_receive({:EXIT, ^pid, @raise_message})
end)
assert output =~ @missing_warning
end
defp unload_plug_cowboy() do
:code.delete(Plug.Cowboy)
:code.purge(Plug.Cowboy)
Code.unrequire_files([@plug_cowboy_path])
end
end
| 28.117647 | 86 | 0.66569 |
f79fdb5ed2c7e25c7e13437d7fec3592e2d9585e | 862 | exs | Elixir | mix.exs | RobertDober/md0 | b2337e69a9bea6b2b17951b75daa15fe7fe8cfad | [
"Apache-2.0"
] | null | null | null | mix.exs | RobertDober/md0 | b2337e69a9bea6b2b17951b75daa15fe7fe8cfad | [
"Apache-2.0"
] | null | null | null | mix.exs | RobertDober/md0 | b2337e69a9bea6b2b17951b75daa15fe7fe8cfad | [
"Apache-2.0"
] | null | null | null | defmodule Md0.MixProject do
use Mix.Project
def project do
[
app: :md0,
version: "0.1.0",
elixir: "~> 1.7",
elixirc_paths: elixirc_paths(Mix.env),
escript: escript_config(),
start_permanent: Mix.env() == :prod,
deps: deps()
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:logger]
]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
{:dialyxir, "1.0.0-rc.4", only: [:dev], runtime: false},
# {:dep_from_hexpm, "~> 0.3.0"},
# {:dep_from_git, git: "https://github.com/elixir-lang/my_dep.git", tag: "0.1.0"},
]
end
defp escript_config do
[main_module: Md0.Runner]
end
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
end
| 22.102564 | 88 | 0.591647 |
f79fdd5686dc31c8b918bdaf640443c7fe9e26f3 | 879 | ex | Elixir | lib/fx/rates.ex | kelvintaywl/fx | 1bb29b790e49a79309ca22788cafd432e1edfd92 | [
"MIT"
] | null | null | null | lib/fx/rates.ex | kelvintaywl/fx | 1bb29b790e49a79309ca22788cafd432e1edfd92 | [
"MIT"
] | null | null | null | lib/fx/rates.ex | kelvintaywl/fx | 1bb29b790e49a79309ca22788cafd432e1edfd92 | [
"MIT"
] | null | null | null | defmodule Fx.Rates do
def from(amount) when amount >= 0.0 do
from(amount, :USD)
end
def from(amount, currency) when amount >= 0.0 do
rates = Fx.API.latest(currency)
{:ok, %{amount: amount, currency: currency, rates: rates}}
end
def to({:ok, %{amount: amount, currency: currency, rates: rates}}) when amount >= 0.0 do
to({:ok, %{amount: amount, currency: currency, rates: rates}}, :USD)
end
def to({:ok, %{amount: amount, currency: source_currency, rates: _rates}}, target_currency) when amount >= 0.0 and source_currency == target_currency do
amount
end
def to({:ok, %{amount: amount, currency: _source_currency, rates: rates}}, target_currency) when amount >= 0.0 do
if rates == [] do
throw "#{rates}"
end
amount * rates[target_currency]
end
def to({:error, _res}, _currency) do
throw "Not found"
end
end
| 27.46875 | 154 | 0.649602 |
f79ff43add99b12cc73ee0601408ed7aaaa75a6d | 3,374 | ex | Elixir | lib/la_famiglia/conversation.ex | cruessler/lafamiglia | 084915a2d44a5e69fb6ad9321eac08ced0e3016a | [
"MIT"
] | 5 | 2016-10-20T10:00:59.000Z | 2017-11-19T08:14:18.000Z | lib/la_famiglia/conversation.ex | cruessler/lafamiglia | 084915a2d44a5e69fb6ad9321eac08ced0e3016a | [
"MIT"
] | 39 | 2020-04-22T05:27:32.000Z | 2022-03-13T17:22:26.000Z | lib/la_famiglia/conversation.ex | cruessler/lafamiglia | 084915a2d44a5e69fb6ad9321eac08ced0e3016a | [
"MIT"
] | null | null | null | defmodule LaFamiglia.Conversation do
use LaFamiglia.Web, :model
alias LaFamiglia.Repo
alias LaFamiglia.Player
alias LaFamiglia.Conversation
alias LaFamiglia.Message
alias LaFamiglia.ConversationStatus
alias __MODULE__
schema "conversations" do
field :last_message_sent_at, :utc_datetime
field :new_messages, :boolean, virtual: true
has_many :messages, Message
has_many :conversation_statuses, ConversationStatus
many_to_many :participants, Player, join_through: ConversationStatus
timestamps
end
def create(params) do
participants =
for p <- params.participants,
do: change(p, %{unread_conversations: p.unread_conversations + 1})
%Conversation{}
|> cast(params, [:last_message_sent_at])
|> put_assoc(:participants, participants)
end
@doc """
Finds a conversation by its set of participants. Expects `participants` to
contain only unique values.
"""
def find_by_participants(participants) do
# Find all conversations whose set of participants contains the players in
# `participants`.
conversation_ids =
Enum.reduce(participants, from(c in Conversation, select: c.id), fn p, query ->
from(c in query,
join: s in ConversationStatus,
on:
s.conversation_id == c.id and
s.player_id == ^p.id
)
end)
|> Repo.all()
if length(conversation_ids) > 0 do
# Find the right conversation by the number of participants. This assumes
# that, at any given time, every conversation is unique with respect to
# its set of participants.
conversation =
from(c in Conversation,
join: s in assoc(c, :conversation_statuses),
group_by: c.id,
select: %{id: c.id, participant_count: count(s.id)},
where: c.id in ^conversation_ids
)
|> Repo.all()
|> Enum.find(fn c -> c.participant_count == length(participants) end)
unless is_nil(conversation), do: {:ok, Repo.get(Conversation, conversation.id)}
end
# Return nil if no conversation was found.
end
defp unread_conversations(statuses, last_message_sent_at),
do: unread_conversations(statuses, last_message_sent_at, 0)
defp unread_conversations([], _, acc), do: acc
defp unread_conversations([first | rest], last_message_sent_at, acc) do
read_until = get_field(first, :read_until)
case DateTime.compare(read_until, last_message_sent_at) do
:lt -> unread_conversations(rest, acc + 1)
_ -> unread_conversations(rest, acc)
end
end
@doc """
Updates the associated `ConversationStatus` of a given `conversation` as well
as the `player`’s `unread_conversations`.
Returns a changeset for `player`.
"""
def update_read_until_for(%{id: player_id} = player, conversation) do
player = Repo.preload(player, conversation_statuses: :conversation)
new_statuses =
for s <- player.conversation_statuses do
case s.player_id do
^player_id -> change(s, %{read_until: conversation.last_message_sent_at})
_ -> change(s)
end
end
unread_conversations = unread_conversations(new_statuses, conversation.last_message_sent_at)
change(player, %{unread_conversations: unread_conversations})
|> put_assoc(:conversation_statuses, new_statuses)
end
end
| 30.672727 | 96 | 0.686722 |
f7a035a5097c62cb1d84e72ce96f883c9aac963b | 2,566 | ex | Elixir | lib/tz_world/backend/memory.ex | LostKobrakai/tz_world | 65e138301adc0c9e6d6135897d2607ef6c361bca | [
"MIT"
] | null | null | null | lib/tz_world/backend/memory.ex | LostKobrakai/tz_world | 65e138301adc0c9e6d6135897d2607ef6c361bca | [
"MIT"
] | null | null | null | lib/tz_world/backend/memory.ex | LostKobrakai/tz_world | 65e138301adc0c9e6d6135897d2607ef6c361bca | [
"MIT"
] | null | null | null | defmodule TzWorld.Backend.Memory do
@behaviour TzWorld.Backend
use GenServer
alias TzWorld.GeoData
alias Geo.Point
@timeout 10_000
@doc false
def start_link(options \\ []) do
GenServer.start_link(__MODULE__, options, name: __MODULE__)
end
def init(_options) do
{:ok, [], {:continue, :load_data}}
end
def version do
GenServer.call(__MODULE__, :version, @timeout)
end
@spec timezone_at(Geo.Point.t()) :: {:ok, String.t()} | {:error, atom}
def timezone_at(%Point{} = point) do
GenServer.call(__MODULE__, {:timezone_at, point}, @timeout)
end
@spec all_timezones_at(Geo.Point.t()) :: {:ok, [String.t()]} | {:error, atom}
def all_timezones_at(%Point{} = point) do
GenServer.call(__MODULE__, {:all_timezones_at, point}, @timeout)
end
@spec reload_timezone_data :: {:ok, term}
def reload_timezone_data do
GenServer.call(__MODULE__, :reload_data, @timeout)
end
# --- Server callback implementation
def handle_continue(:load_data, _state) do
{:noreply, GeoData.load_compressed_data()}
end
def handle_call(:reload_data, _from, _state) do
case GeoData.load_compressed_data() do
{:ok, _data} = return -> {:reply, {:ok, :loaded}, return}
other -> {:reply, other, other}
end
end
def handle_call(:version, _from, state) do
case state do
{:ok, [version | _tz_data]} -> {:reply, {:ok, version}, state}
other -> {:reply, other, state}
end
end
def handle_call({:timezone_at, %Geo.Point{} = point}, _from, state) do
timezone =
with {:ok, [_version | tz_data]} <- state do
tz_data
|> Enum.filter(&TzWorld.contains?(&1.properties.bounding_box, point))
|> Enum.find(&TzWorld.contains?(&1, point))
|> case do
%Geo.MultiPolygon{properties: %{tzid: tzid}} -> {:ok, tzid}
%Geo.Polygon{properties: %{tzid: tzid}} -> {:ok, tzid}
nil -> {:error, :time_zone_not_found}
end
end
{:reply, timezone, state}
end
def handle_call({:all_timezones_at, %Geo.Point{} = point}, _from, state) do
timezone =
with {:ok, [_version | tz_data]} <- state do
tz_data
|> Enum.filter(&TzWorld.contains?(&1.properties.bounding_box, point))
|> Enum.filter(&TzWorld.contains?(&1, point))
|> case do
%Geo.MultiPolygon{properties: %{tzid: tzid}} -> {:ok, tzid}
%Geo.Polygon{properties: %{tzid: tzid}} -> {:ok, tzid}
nil -> {:error, :time_zone_not_found}
end
end
{:reply, timezone, state}
end
end
| 28.197802 | 79 | 0.6212 |
f7a06b430811b308e5373bb6c1de6704af7b9c2e | 77 | exs | Elixir | apps/admin/test/test_helper.exs | votiakov/petal | ec03551da6dadc0c3482b25a5f5dcd400c36db43 | [
"MIT"
] | null | null | null | apps/admin/test/test_helper.exs | votiakov/petal | ec03551da6dadc0c3482b25a5f5dcd400c36db43 | [
"MIT"
] | null | null | null | apps/admin/test/test_helper.exs | votiakov/petal | ec03551da6dadc0c3482b25a5f5dcd400c36db43 | [
"MIT"
] | null | null | null | ExUnit.start()
Ecto.Adapters.SQL.Sandbox.mode(Legendary.Admin.Repo, :manual)
| 25.666667 | 61 | 0.792208 |
f7a087694c599eb5a80b6d0e43bb5954ef30a718 | 484 | ex | Elixir | test/support/fake_sub_data.ex | StephaneRob/borsh_ex | 279cb1f55effd4a29c1d571f78d4ed9d4ec44aaa | [
"BSD-2-Clause"
] | 1 | 2022-01-23T16:28:34.000Z | 2022-01-23T16:28:34.000Z | test/support/fake_sub_data.ex | StephaneRob/borsh_ex | 279cb1f55effd4a29c1d571f78d4ed9d4ec44aaa | [
"BSD-2-Clause"
] | null | null | null | test/support/fake_sub_data.ex | StephaneRob/borsh_ex | 279cb1f55effd4a29c1d571f78d4ed9d4ec44aaa | [
"BSD-2-Clause"
] | null | null | null | defmodule BorshEx.FakeSubData do
use BorshEx.Schema
defstruct a: nil, b: nil, c: nil, d: nil, e: nil, f: nil, g: nil, h: nil, i: nil, j: nil, k: nil
borsh_schema do
field :a, "string"
field :b, "u64"
field :c, "u8"
field :d, {"array", "u8"}
field :e, {"array", {"u16", 10}}
field :f, "boolean"
field :g, "boolean"
field :h, {"option", "u8"}
field :i, {"option", "string"}
field :j, {"array", {"string", 3}}
field :k, "i8"
end
end
| 24.2 | 98 | 0.53719 |
f7a089c992e2b9136ec6fad6990e991b0b79b4b6 | 1,854 | ex | Elixir | clients/content/lib/google_api/content/v2/model/shippingsettings_get_supported_carriers_response.ex | mcrumm/elixir-google-api | 544f22797cec52b3a23dfb6e39117f0018448610 | [
"Apache-2.0"
] | null | null | null | clients/content/lib/google_api/content/v2/model/shippingsettings_get_supported_carriers_response.ex | mcrumm/elixir-google-api | 544f22797cec52b3a23dfb6e39117f0018448610 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/content/lib/google_api/content/v2/model/shippingsettings_get_supported_carriers_response.ex | mcrumm/elixir-google-api | 544f22797cec52b3a23dfb6e39117f0018448610 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Content.V2.Model.ShippingsettingsGetSupportedCarriersResponse do
@moduledoc """
## Attributes
* `carriers` (*type:* `list(GoogleApi.Content.V2.Model.CarriersCarrier.t)`, *default:* `nil`) - A list of supported carriers. May be empty.
* `kind` (*type:* `String.t`, *default:* `nil`) - Identifies what kind of resource this is. Value: the fixed string "content#shippingsettingsGetSupportedCarriersResponse".
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:carriers => list(GoogleApi.Content.V2.Model.CarriersCarrier.t()),
:kind => String.t()
}
field(:carriers, as: GoogleApi.Content.V2.Model.CarriersCarrier, type: :list)
field(:kind)
end
defimpl Poison.Decoder,
for: GoogleApi.Content.V2.Model.ShippingsettingsGetSupportedCarriersResponse do
def decode(value, options) do
GoogleApi.Content.V2.Model.ShippingsettingsGetSupportedCarriersResponse.decode(value, options)
end
end
defimpl Poison.Encoder,
for: GoogleApi.Content.V2.Model.ShippingsettingsGetSupportedCarriersResponse do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 35.653846 | 175 | 0.747573 |
f7a0e3b277f18e4aee2c8e247e510dd06d00c2cf | 13,846 | ex | Elixir | clients/dfa_reporting/lib/google_api/dfa_reporting/v33/api/floodlight_configurations.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/dfa_reporting/lib/google_api/dfa_reporting/v33/api/floodlight_configurations.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/dfa_reporting/lib/google_api/dfa_reporting/v33/api/floodlight_configurations.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.DFAReporting.V33.Api.FloodlightConfigurations do
@moduledoc """
API calls for all endpoints tagged `FloodlightConfigurations`.
"""
alias GoogleApi.DFAReporting.V33.Connection
alias GoogleApi.Gax.{Request, Response}
@library_version Mix.Project.config() |> Keyword.get(:version, "")
@doc """
Gets one floodlight configuration by ID.
## Parameters
* `connection` (*type:* `GoogleApi.DFAReporting.V33.Connection.t`) - Connection to server
* `profile_id` (*type:* `String.t`) - User profile ID associated with this request.
* `id` (*type:* `String.t`) - Floodlight configuration ID.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.DFAReporting.V33.Model.FloodlightConfiguration{}}` on success
* `{:error, info}` on failure
"""
@spec dfareporting_floodlight_configurations_get(
Tesla.Env.client(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.DFAReporting.V33.Model.FloodlightConfiguration.t()}
| {:ok, Tesla.Env.t()}
| {:error, any()}
def dfareporting_floodlight_configurations_get(
connection,
profile_id,
id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url(
"/dfareporting/v3.3/userprofiles/{profileId}/floodlightConfigurations/{id}",
%{
"profileId" => URI.encode(profile_id, &URI.char_unreserved?/1),
"id" => URI.encode(id, &(URI.char_unreserved?(&1) || &1 == ?/))
}
)
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(
opts ++ [struct: %GoogleApi.DFAReporting.V33.Model.FloodlightConfiguration{}]
)
end
@doc """
Retrieves a list of floodlight configurations, possibly filtered.
## Parameters
* `connection` (*type:* `GoogleApi.DFAReporting.V33.Connection.t`) - Connection to server
* `profile_id` (*type:* `String.t`) - User profile ID associated with this request.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:ids` (*type:* `list(String.t)`) - Set of IDs of floodlight configurations to retrieve. Required field; otherwise an empty list will be returned.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.DFAReporting.V33.Model.FloodlightConfigurationsListResponse{}}` on success
* `{:error, info}` on failure
"""
@spec dfareporting_floodlight_configurations_list(
Tesla.Env.client(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.DFAReporting.V33.Model.FloodlightConfigurationsListResponse.t()}
| {:ok, Tesla.Env.t()}
| {:error, any()}
def dfareporting_floodlight_configurations_list(
connection,
profile_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:ids => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/dfareporting/v3.3/userprofiles/{profileId}/floodlightConfigurations", %{
"profileId" => URI.encode(profile_id, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(
opts ++ [struct: %GoogleApi.DFAReporting.V33.Model.FloodlightConfigurationsListResponse{}]
)
end
@doc """
Updates an existing floodlight configuration. This method supports patch semantics.
## Parameters
* `connection` (*type:* `GoogleApi.DFAReporting.V33.Connection.t`) - Connection to server
* `profile_id` (*type:* `String.t`) - User profile ID associated with this request.
* `id` (*type:* `String.t`) - FloodlightConfiguration ID.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:body` (*type:* `GoogleApi.DFAReporting.V33.Model.FloodlightConfiguration.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.DFAReporting.V33.Model.FloodlightConfiguration{}}` on success
* `{:error, info}` on failure
"""
@spec dfareporting_floodlight_configurations_patch(
Tesla.Env.client(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.DFAReporting.V33.Model.FloodlightConfiguration.t()}
| {:ok, Tesla.Env.t()}
| {:error, any()}
def dfareporting_floodlight_configurations_patch(
connection,
profile_id,
id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:patch)
|> Request.url("/dfareporting/v3.3/userprofiles/{profileId}/floodlightConfigurations", %{
"profileId" => URI.encode(profile_id, &URI.char_unreserved?/1)
})
|> Request.add_param(:query, :id, id)
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(
opts ++ [struct: %GoogleApi.DFAReporting.V33.Model.FloodlightConfiguration{}]
)
end
@doc """
Updates an existing floodlight configuration.
## Parameters
* `connection` (*type:* `GoogleApi.DFAReporting.V33.Connection.t`) - Connection to server
* `profile_id` (*type:* `String.t`) - User profile ID associated with this request.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:body` (*type:* `GoogleApi.DFAReporting.V33.Model.FloodlightConfiguration.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.DFAReporting.V33.Model.FloodlightConfiguration{}}` on success
* `{:error, info}` on failure
"""
@spec dfareporting_floodlight_configurations_update(
Tesla.Env.client(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.DFAReporting.V33.Model.FloodlightConfiguration.t()}
| {:ok, Tesla.Env.t()}
| {:error, any()}
def dfareporting_floodlight_configurations_update(
connection,
profile_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:put)
|> Request.url("/dfareporting/v3.3/userprofiles/{profileId}/floodlightConfigurations", %{
"profileId" => URI.encode(profile_id, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(
opts ++ [struct: %GoogleApi.DFAReporting.V33.Model.FloodlightConfiguration{}]
)
end
end
| 42.085106 | 196 | 0.620179 |
f7a0ef645ea71761df3d1d5119e739b6eeb06973 | 1,324 | exs | Elixir | config/releases.exs | ethervoid/postoffice | 5cb275ed5e18ec8ff91523013c3b4215fe41a53f | [
"Apache-2.0"
] | null | null | null | config/releases.exs | ethervoid/postoffice | 5cb275ed5e18ec8ff91523013c3b4215fe41a53f | [
"Apache-2.0"
] | null | null | null | config/releases.exs | ethervoid/postoffice | 5cb275ed5e18ec8ff91523013c3b4215fe41a53f | [
"Apache-2.0"
] | null | null | null | import Config
config :postoffice, PostofficeWeb.Endpoint,
secret_key_base: {:system, "SECRET_KEY_BASE", default: "12121212"}
config :postoffice, pubsub_project_name: {:system, "GCLOUD_PUBSUB_PROJECT_ID", default: "test"}
config :postoffice, Postoffice.Repo,
username: {:system, "DB_USERNAME", default: "postgres"},
hostname: {:system, "DB_HOST", default: "db"},
password: {:system, "DB_PASSWORD", default: "postgres"},
database: {:system, "DB_NAME", default: "myapp"},
port: {:system, "DB_PORT", type: :integer, default: 5432},
pool_size: {:system, "DB_POOL_SIZE", type: :integer, default: 20},
queue_target: {:system, "DB_QUEUE_TARGET", type: :integer, default: 3000},
show_sensitive_data_on_connection_error: false
{:ok, current_directory} = File.cwd()
dummy_credentials_file = current_directory <> "/secrets/dummy-credentials.json"
config :goth,
json: System.get_env("GCLOUD_PUBSUB_CREDENTIALS_PATH", dummy_credentials_file) |> File.read!()
config :libcluster,
topologies: [
k8s: [
strategy: Elixir.Cluster.Strategy.Kubernetes,
config: [
mode: :ip,
kubernetes_node_basename: "postoffice",
kubernetes_selector: "service=postoffice",
kubernetes_namespace: System.get_env("NAMESPACE", "staging"),
polling_interval: 10_000
]
]
]
| 35.783784 | 96 | 0.705438 |
f7a122958b1ca93c7134580a1a2620cf24fa5478 | 1,045 | exs | Elixir | mix.exs | Logflare/turnio-prometheus-parser | 588e72e3779fb78621a008a81ee5426909145f89 | [
"Apache-2.0"
] | null | null | null | mix.exs | Logflare/turnio-prometheus-parser | 588e72e3779fb78621a008a81ee5426909145f89 | [
"Apache-2.0"
] | null | null | null | mix.exs | Logflare/turnio-prometheus-parser | 588e72e3779fb78621a008a81ee5426909145f89 | [
"Apache-2.0"
] | null | null | null | defmodule PrometheusParser.MixProject do
use Mix.Project
def project do
[
app: :prometheus_parser,
version: "0.1.5",
elixir: "~> 1.11",
start_permanent: Mix.env() == :prod,
deps: deps(),
package: package(),
description: description()
]
end
defp package() do
[
licenses: ["Apache-2.0"],
links: %{"GitHub" => "https://github.com/turnhub/turnio-prometheus-parser"}
]
end
defp description() do
"A nimble_parsec parser for parsing the Prometheus text format."
end
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:logger]
]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
{:ex_doc, ">= 0.0.0", only: :dev, runtime: false},
{:nimble_parsec, "~> 1.2.3"},
{:mix_test_watch, "~> 1.1.0", only: :dev}
# {:dep_from_hexpm, "~> 0.3.0"},
# {:dep_from_git, git: "https://github.com/elixir-lang/my_dep.git", tag: "0.1.0"}
]
end
end
| 23.222222 | 87 | 0.585646 |
f7a134f60e495b9be05eba07d509f2e58247223e | 864 | ex | Elixir | lib/backend/application.ex | damonvjanis/messenger-react | 018ea500ec73a69dea57a6ea1e708a202a7cb9c1 | [
"MIT"
] | 1 | 2021-02-12T17:40:54.000Z | 2021-02-12T17:40:54.000Z | lib/backend/application.ex | damonvjanis/messenger-react | 018ea500ec73a69dea57a6ea1e708a202a7cb9c1 | [
"MIT"
] | 3 | 2022-02-13T11:17:47.000Z | 2022-02-27T03:19:11.000Z | lib/backend/application.ex | damonvjanis/messenger-react | 018ea500ec73a69dea57a6ea1e708a202a7cb9c1 | [
"MIT"
] | 1 | 2021-08-22T14:45:41.000Z | 2021-08-22T14:45:41.000Z | defmodule Backend.Application do
# See https://hexdocs.pm/elixir/Application.html
# for more information on OTP Applications
@moduledoc false
use Application
def start(_type, _args) do
# List all child processes to be supervised
children = [
Backend.Repo,
BackendWeb.Endpoint,
{Absinthe.Subscription, [BackendWeb.Endpoint]},
Backend.Chron.StayAlive,
Backend.Chron.MissedMessages
]
# See https://hexdocs.pm/elixir/Supervisor.html
# for other strategies and supported options
opts = [strategy: :one_for_one, name: Backend.Supervisor]
Supervisor.start_link(children, opts)
end
# Tell Phoenix to update the endpoint configuration
# whenever the application is updated.
def config_change(changed, _new, removed) do
BackendWeb.Endpoint.config_change(changed, removed)
:ok
end
end
| 27.870968 | 61 | 0.721065 |
f7a1355f96fd31e89be50efcb7989237fa263519 | 405 | ex | Elixir | lib/etdpay_web/controllers/users_controller.ex | epsilveira/etdpay | 8460bd6bf128ae93d04895494220bff121778f2f | [
"MIT"
] | null | null | null | lib/etdpay_web/controllers/users_controller.ex | epsilveira/etdpay | 8460bd6bf128ae93d04895494220bff121778f2f | [
"MIT"
] | null | null | null | lib/etdpay_web/controllers/users_controller.ex | epsilveira/etdpay | 8460bd6bf128ae93d04895494220bff121778f2f | [
"MIT"
] | null | null | null | defmodule EtdpayWeb.UsersController do
use EtdpayWeb, :controller
alias Etdpay.User
action_fallback EtdpayWeb.FallbackController
def create(conn, params) do
with {:ok, %User{} = user} <- Etdpay.create_user(params) do
conn
|> put_status(:created)
|> render("create.json", user: user)
end
end
# defp handle_response({:error, _result} = error, _conn), do: error
end
| 22.5 | 69 | 0.681481 |
f7a148efc12de2d00ffbea062cff1dbc4be3aab3 | 2,954 | ex | Elixir | clients/compute/lib/google_api/compute/v1/model/resource_policy_list.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/compute/lib/google_api/compute/v1/model/resource_policy_list.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/compute/lib/google_api/compute/v1/model/resource_policy_list.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Compute.V1.Model.ResourcePolicyList do
@moduledoc """
## Attributes
* `etag` (*type:* `String.t`, *default:* `nil`) -
* `id` (*type:* `String.t`, *default:* `nil`) - [Output Only] The unique identifier for the resource. This identifier is defined by the server.
* `items` (*type:* `list(GoogleApi.Compute.V1.Model.ResourcePolicy.t)`, *default:* `nil`) - [Output Only] A list of ResourcePolicy resources.
* `kind` (*type:* `String.t`, *default:* `compute#resourcePolicyList`) - [Output Only] Type of resource.Always compute#resourcePoliciesList for listsof resourcePolicies
* `nextPageToken` (*type:* `String.t`, *default:* `nil`) - [Output Only] This token allows you to get the next page of results for list requests. If the number of results is larger than maxResults, use the nextPageToken as a value for the query parameter pageToken in the next list request. Subsequent list requests will have their own nextPageToken to continue paging through the results.
* `selfLink` (*type:* `String.t`, *default:* `nil`) - [Output Only] Server-defined URL for this resource.
* `warning` (*type:* `GoogleApi.Compute.V1.Model.ResourcePolicyListWarning.t`, *default:* `nil`) - [Output Only] Informational warning message.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:etag => String.t(),
:id => String.t(),
:items => list(GoogleApi.Compute.V1.Model.ResourcePolicy.t()),
:kind => String.t(),
:nextPageToken => String.t(),
:selfLink => String.t(),
:warning => GoogleApi.Compute.V1.Model.ResourcePolicyListWarning.t()
}
field(:etag)
field(:id)
field(:items, as: GoogleApi.Compute.V1.Model.ResourcePolicy, type: :list)
field(:kind)
field(:nextPageToken)
field(:selfLink)
field(:warning, as: GoogleApi.Compute.V1.Model.ResourcePolicyListWarning)
end
defimpl Poison.Decoder, for: GoogleApi.Compute.V1.Model.ResourcePolicyList do
def decode(value, options) do
GoogleApi.Compute.V1.Model.ResourcePolicyList.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Compute.V1.Model.ResourcePolicyList do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 45.446154 | 393 | 0.711916 |
f7a14f35fa8303818622bb1f170d288806ad726f | 730 | ex | Elixir | lib/history/lending_rates/lending_rate_history_chunk.ex | fremantle-industries/history | a8a33744279ff4ca62620785f9a2e9c0c99e4de7 | [
"MIT"
] | 20 | 2021-08-06T01:09:48.000Z | 2022-03-28T18:44:56.000Z | lib/history/lending_rates/lending_rate_history_chunk.ex | fremantle-industries/history | a8a33744279ff4ca62620785f9a2e9c0c99e4de7 | [
"MIT"
] | 13 | 2021-08-21T21:17:02.000Z | 2022-03-27T06:33:51.000Z | lib/history/lending_rates/lending_rate_history_chunk.ex | fremantle-industries/history | a8a33744279ff4ca62620785f9a2e9c0c99e4de7 | [
"MIT"
] | 2 | 2021-09-23T11:31:59.000Z | 2022-01-09T16:19:35.000Z | defmodule History.LendingRates.LendingRateHistoryChunk do
use Ecto.Schema
import Ecto.Changeset
alias History.LendingRates
schema "lending_rate_history_chunks" do
field(:venue, :string)
field(:token, :string)
field(:start_at, :utc_datetime)
field(:end_at, :utc_datetime)
field(:status, History.ChunkStatusType)
belongs_to(:job, LendingRates.LendingRateHistoryJob)
timestamps()
end
@doc false
def changeset(chunk, attrs) do
chunk
|> cast(attrs, [:venue, :token, :start_at, :end_at, :status])
|> validate_required([:venue, :token, :start_at, :end_at, :status])
|> assoc_constraint(:job)
|> unique_constraint([:venue, :product, :job_id, :start_at, :end_at])
end
end
| 28.076923 | 73 | 0.70274 |
f7a160adc443b4e795dc0ccd504cd575e870759d | 731 | ex | Elixir | lib/plug/verify_request.ex | erikvullings/json-service | c920346e503e05d98e20b17283d43037c6202d7f | [
"MIT"
] | null | null | null | lib/plug/verify_request.ex | erikvullings/json-service | c920346e503e05d98e20b17283d43037c6202d7f | [
"MIT"
] | null | null | null | lib/plug/verify_request.ex | erikvullings/json-service | c920346e503e05d98e20b17283d43037c6202d7f | [
"MIT"
] | null | null | null | defmodule JsonService.Plug.VerifyRequest do
defmodule IncompleteRequestError do
@moduledoc """
Error raised when a required field is missing.
"""
defexception message: "", plug_status: 400
end
def init(options), do: options
def call(%Plug.Conn{request_path: path} = conn, opts) do
if path in opts[:paths], do: verify_request!(conn.body_params, opts[:fields])
conn
end
defp verify_request!(body_params, fields) do
verified = body_params
|> Map.keys
|> contains_fields?(fields)
unless verified, do: raise IncompleteRequestError
end
defp contains_fields?(keys, fields), do: Enum.all?(fields, &(&1 in keys))
end
| 26.107143 | 83 | 0.640219 |
f7a18323e508d52de1a8b2115bc6cdc48da3c96c | 1,598 | exs | Elixir | lib/eex/test/eex/smart_engine_test.exs | doughsay/elixir | 7356a47047d0b54517bd6886603f09b1121dde2b | [
"Apache-2.0"
] | 19,291 | 2015-01-01T02:42:49.000Z | 2022-03-31T21:01:40.000Z | lib/eex/test/eex/smart_engine_test.exs | doughsay/elixir | 7356a47047d0b54517bd6886603f09b1121dde2b | [
"Apache-2.0"
] | 8,082 | 2015-01-01T04:16:23.000Z | 2022-03-31T22:08:02.000Z | lib/eex/test/eex/smart_engine_test.exs | doughsay/elixir | 7356a47047d0b54517bd6886603f09b1121dde2b | [
"Apache-2.0"
] | 3,472 | 2015-01-03T04:11:56.000Z | 2022-03-29T02:07:30.000Z | Code.require_file("../test_helper.exs", __DIR__)
defmodule EEx.SmartEngineTest do
use ExUnit.Case, async: true
test "evaluates simple string" do
assert_eval("foo bar", "foo bar")
end
test "evaluates with assigns as keywords" do
assert_eval("1", "<%= @foo %>", assigns: [foo: 1])
end
test "evaluates with assigns as a map" do
assert_eval("1", "<%= @foo %>", assigns: %{foo: 1})
end
test "error with missing assigns" do
stderr =
ExUnit.CaptureIO.capture_io(:stderr, fn ->
assert_eval("", "<%= @foo %>", assigns: %{})
end)
assert stderr =~ "assign @foo not available in EEx template"
end
test "evaluates with loops" do
assert_eval("1\n2\n3\n", "<%= for x <- [1, 2, 3] do %><%= x %>\n<% end %>")
end
test "preserves line numbers in assignments" do
result = EEx.compile_string("foo\n<%= @hello %>", engine: EEx.SmartEngine)
Macro.prewalk(result, fn
{_left, meta, [_, :hello]} ->
assert Keyword.get(meta, :line) == 2
send(self(), :found)
node ->
node
end)
assert_received :found
end
test "error with unused \"do\" block without \"<%=\" modifier" do
stderr =
ExUnit.CaptureIO.capture_io(:stderr, fn ->
assert_eval("", "<% if true do %>I'm invisible!<% end %>", assigns: %{})
end)
assert stderr =~ "the contents of this expression won't be output"
end
defp assert_eval(expected, actual, binding \\ []) do
result = EEx.eval_string(actual, binding, file: __ENV__.file, engine: EEx.SmartEngine)
assert result == expected
end
end
| 26.633333 | 90 | 0.610138 |
f7a184ae0b068b0b8ee86841b81f897ecb1b6c55 | 2,235 | exs | Elixir | apps/flv/test/flv/video_data_test.exs | tiensonqin/elixir-media-libs | 87f17e2b23bf8380e785423652910bfa7d3bb47c | [
"MIT"
] | null | null | null | apps/flv/test/flv/video_data_test.exs | tiensonqin/elixir-media-libs | 87f17e2b23bf8380e785423652910bfa7d3bb47c | [
"MIT"
] | null | null | null | apps/flv/test/flv/video_data_test.exs | tiensonqin/elixir-media-libs | 87f17e2b23bf8380e785423652910bfa7d3bb47c | [
"MIT"
] | null | null | null | defmodule Flv.VideoDataTest do
use ExUnit.Case, async: true
test "Can parse avc keyframe with sequence header packet type" do
binary =
Base.decode16!(
"17000000000164001FFFE1001B6764001FACD9405005BA6A021A0280000003008000001E478C18CB01000468EFBCB0"
)
expected_data =
Base.decode16!(
"0000000164001FFFE1001B6764001FACD9405005BA6A021A0280000003008000001E478C18CB01000468EFBCB0"
)
assert {:ok,
%Flv.VideoData{
frame_type: :keyframe,
codec_id: :avc,
avc_packet_type: :sequence_header,
composition_time: 0,
data: ^expected_data
}} = Flv.VideoData.parse(binary)
end
test "Can parse avc keyframe with nalu packet type" do
binary =
Base.decode16!(
"1701000042000002F30605FFFFEFDC45E9BDE6D948B7962CD820D923EEEF78323634202D20636F7265203134362072323533382031323133393663202D20482E3236342F4D5045472D342041564320636F646563202D20436F70796C6566742032303033"
)
expected_data =
Base.decode16!(
"000002F30605FFFFEFDC45E9BDE6D948B7962CD820D923EEEF78323634202D20636F7265203134362072323533382031323133393663202D20482E3236342F4D5045472D342041564320636F646563202D20436F70796C6566742032303033"
)
assert {:ok,
%Flv.VideoData{
frame_type: :keyframe,
codec_id: :avc,
avc_packet_type: :nalu,
composition_time: 66,
data: ^expected_data
}} = Flv.VideoData.parse(binary)
end
test "Can parse avc interframe with nalu packet type" do
binary = Base.decode16!("270100004300000366419A211888FFDAC9C56643D3F25D669E7653")
expected_data = Base.decode16!("00000366419A211888FFDAC9C56643D3F25D669E7653")
assert {:ok,
%Flv.VideoData{
frame_type: :interframe,
codec_id: :avc,
avc_packet_type: :nalu,
composition_time: 67,
data: ^expected_data
}} = Flv.VideoData.parse(binary)
end
test "Error when invalid video packet" do
binary = Base.decode16!("FF4300000366419A211888FFDAC9C56643D3F25D669E")
assert :error = Flv.VideoData.parse(binary)
end
end
| 33.863636 | 210 | 0.682327 |
f7a18a19cee4645137a8f6f6d49f753ba5336b54 | 2,307 | ex | Elixir | clients/service_management/lib/google_api/service_management/v1/model/metric_descriptor_metadata.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | 1 | 2018-12-03T23:43:10.000Z | 2018-12-03T23:43:10.000Z | clients/service_management/lib/google_api/service_management/v1/model/metric_descriptor_metadata.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/service_management/lib/google_api/service_management/v1/model/metric_descriptor_metadata.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.ServiceManagement.V1.Model.MetricDescriptorMetadata do
@moduledoc """
Additional annotations that can be used to guide the usage of a metric.
## Attributes
* `ingestDelay` (*type:* `String.t`, *default:* `nil`) - The delay of data points caused by ingestion. Data points older than this
age are guaranteed to be ingested and available to be read, excluding
data loss due to errors.
* `launchStage` (*type:* `String.t`, *default:* `nil`) - Deprecated. Please use the MetricDescriptor.launch_stage instead.
The launch stage of the metric definition.
* `samplePeriod` (*type:* `String.t`, *default:* `nil`) - The sampling period of metric data points. For metrics which are written
periodically, consecutive data points are stored at this time interval,
excluding data loss due to errors. Metrics with a higher granularity have
a smaller sampling period.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:ingestDelay => String.t(),
:launchStage => String.t(),
:samplePeriod => String.t()
}
field(:ingestDelay)
field(:launchStage)
field(:samplePeriod)
end
defimpl Poison.Decoder, for: GoogleApi.ServiceManagement.V1.Model.MetricDescriptorMetadata do
def decode(value, options) do
GoogleApi.ServiceManagement.V1.Model.MetricDescriptorMetadata.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.ServiceManagement.V1.Model.MetricDescriptorMetadata do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 39.101695 | 134 | 0.733853 |
f7a1ca26ff8d9f200f5d62e80504eefb38bbbd63 | 2,377 | ex | Elixir | lib/hello_elixir_web.ex | lubien/hello_elixir | 67305875a817376216b793711426aae3ff0d6bd2 | [
"Apache-2.0"
] | 14 | 2021-05-02T14:11:03.000Z | 2021-10-19T14:03:39.000Z | lib/hello_elixir_web.ex | fly-apps/hello_elixir_sqlite | f4484624a25c47c3034b8249aeb8aeecd321bf17 | [
"Apache-2.0"
] | 2 | 2021-11-03T17:27:28.000Z | 2021-12-03T20:34:12.000Z | lib/hello_elixir_web.ex | fly-apps/hello_elixir_sqlite | f4484624a25c47c3034b8249aeb8aeecd321bf17 | [
"Apache-2.0"
] | 6 | 2021-11-08T13:52:39.000Z | 2022-01-13T19:24:49.000Z | defmodule HelloElixirWeb do
@moduledoc """
The entrypoint for defining your web interface, such
as controllers, views, channels and so on.
This can be used in your application as:
use HelloElixirWeb, :controller
use HelloElixirWeb, :view
The definitions below will be executed for every view,
controller, etc, so keep them short and clean, focused
on imports, uses and aliases.
Do NOT define functions inside the quoted expressions
below. Instead, define any helper function in modules
and import those modules here.
"""
def controller do
quote do
use Phoenix.Controller, namespace: HelloElixirWeb
import Plug.Conn
import HelloElixirWeb.Gettext
alias HelloElixirWeb.Router.Helpers, as: Routes
end
end
def view do
quote do
use Phoenix.View,
root: "lib/hello_elixir_web/templates",
namespace: HelloElixirWeb
# Import convenience functions from controllers
import Phoenix.Controller,
only: [get_flash: 1, get_flash: 2, view_module: 1, view_template: 1]
# Include shared imports and aliases for views
unquote(view_helpers())
end
end
def live_view do
quote do
use Phoenix.LiveView,
layout: {HelloElixirWeb.LayoutView, "live.html"}
unquote(view_helpers())
end
end
def live_component do
quote do
use Phoenix.LiveComponent
unquote(view_helpers())
end
end
def router do
quote do
use Phoenix.Router
import Plug.Conn
import Phoenix.Controller
import Phoenix.LiveView.Router
end
end
def channel do
quote do
use Phoenix.Channel
import HelloElixirWeb.Gettext
end
end
defp view_helpers do
quote do
# Use all HTML functionality (forms, tags, etc)
use Phoenix.HTML
# Import LiveView and .heex helpers (live_render, live_patch, <.form>, etc)
import Phoenix.LiveView.Helpers
# Import basic rendering functionality (render, render_layout, etc)
import Phoenix.View
import HelloElixirWeb.ErrorHelpers
import HelloElixirWeb.Gettext
alias HelloElixirWeb.Router.Helpers, as: Routes
end
end
@doc """
When used, dispatch to the appropriate controller/view/etc.
"""
defmacro __using__(which) when is_atom(which) do
apply(__MODULE__, which, [])
end
end
| 23.07767 | 81 | 0.687 |
f7a20b7129d275980c968dc6aae30d2b21e64393 | 1,866 | exs | Elixir | test/grizzly/smart_start/meta_extension/bootstrapping_mode_test.exs | ryanwinchester/grizzly | 86002e01debe63c18f85270ddc948e3875f25043 | [
"Apache-2.0"
] | null | null | null | test/grizzly/smart_start/meta_extension/bootstrapping_mode_test.exs | ryanwinchester/grizzly | 86002e01debe63c18f85270ddc948e3875f25043 | [
"Apache-2.0"
] | null | null | null | test/grizzly/smart_start/meta_extension/bootstrapping_mode_test.exs | ryanwinchester/grizzly | 86002e01debe63c18f85270ddc948e3875f25043 | [
"Apache-2.0"
] | null | null | null | defmodule Grizzly.SmartStart.MetaExtension.BootstrappingModeTest do
use ExUnit.Case, async: true
alias Grizzly.SmartStart.MetaExtension.BootstrappingMode
describe "create a BootstrappingMode.t()" do
test "when mode is :security_2" do
expected_mode = %BootstrappingMode{mode: :security_2}
assert {:ok, expected_mode} == BootstrappingMode.new(:security_2)
end
test "when mode is :smart_start" do
expected_mode = %BootstrappingMode{mode: :smart_start}
assert {:ok, expected_mode} == BootstrappingMode.new(:smart_start)
end
test "when mode is invalid" do
assert {:error, :invalid_mode} == BootstrappingMode.new(:asdf)
end
end
describe "encoding BootstrappingMode.t()" do
test "when mode is :security_2" do
{:ok, mode} = BootstrappingMode.new(:security_2)
expected_binary = <<0x6D, 0x01, 0x00>>
assert {:ok, expected_binary} == BootstrappingMode.to_binary(mode)
end
test "when mode is :smart_start" do
{:ok, mode} = BootstrappingMode.new(:smart_start)
expected_binary = <<0x6D, 0x01, 0x01>>
assert {:ok, expected_binary} == BootstrappingMode.to_binary(mode)
end
end
describe "decoding BootstrappingMode.t()" do
test "when mode is :security_2" do
{:ok, expected_mode} = BootstrappingMode.new(:security_2)
binary = <<0x6D, 0x01, 0x00>>
assert {:ok, expected_mode} == BootstrappingMode.from_binary(binary)
end
test "when mode is :smart_start" do
{:ok, expected_mode} = BootstrappingMode.new(:smart_start)
binary = <<0x6D, 0x01, 0x01>>
assert {:ok, expected_mode} == BootstrappingMode.from_binary(binary)
end
test "ensure critical bit is set" do
binary = <<0x6C, 0x01, 0x00>>
assert {:error, :critical_bit_not_set} == BootstrappingMode.from_binary(binary)
end
end
end
| 31.627119 | 85 | 0.686495 |
f7a22dca523b2cb2b729af02e8ba18fd25b2742b | 675 | ex | Elixir | 3. pacman rules/rules.ex | gabrielclaudino/exercism-elixir | ec5a14f6f8c70f60a201a222a75c1fe5ce14725a | [
"MIT"
] | null | null | null | 3. pacman rules/rules.ex | gabrielclaudino/exercism-elixir | ec5a14f6f8c70f60a201a222a75c1fe5ce14725a | [
"MIT"
] | null | null | null | 3. pacman rules/rules.ex | gabrielclaudino/exercism-elixir | ec5a14f6f8c70f60a201a222a75c1fe5ce14725a | [
"MIT"
] | null | null | null | defmodule Rules do
@spec eat_ghost?(boolean, boolean) :: boolean
def eat_ghost?(power_pellet_active, touching_ghost) do
power_pellet_active and touching_ghost
end
@spec score?(boolean, boolean) :: boolean
def score?(touching_power_pellet, touching_dot) do
touching_power_pellet or touching_dot
end
@spec lose?(boolean, boolean) :: boolean
def lose?(power_pellet_active, touching_ghost) do
touching_ghost and not power_pellet_active
end
@spec win?(boolean, boolean, boolean) :: boolean
def win?(has_eaten_all_dots, power_pellet_active, touching_ghost) do
has_eaten_all_dots and not lose?(power_pellet_active, touching_ghost)
end
end
| 30.681818 | 73 | 0.771852 |
f7a24aefa1b1e347147ad7d8a624d7eba29e0399 | 17,038 | ex | Elixir | lib/commanded/process_managers/process_router.ex | edwardzhou/commanded | f104cbf5ff3a37a6e9b637bc07ccde1d79c0725d | [
"MIT"
] | null | null | null | lib/commanded/process_managers/process_router.ex | edwardzhou/commanded | f104cbf5ff3a37a6e9b637bc07ccde1d79c0725d | [
"MIT"
] | null | null | null | lib/commanded/process_managers/process_router.ex | edwardzhou/commanded | f104cbf5ff3a37a6e9b637bc07ccde1d79c0725d | [
"MIT"
] | null | null | null | defmodule Commanded.ProcessManagers.ProcessRouter do
@moduledoc false
use GenServer
use Commanded.Registration
require Logger
alias Commanded.Event.Upcast
alias Commanded.EventStore
alias Commanded.EventStore.RecordedEvent
alias Commanded.ProcessManagers.FailureContext
alias Commanded.ProcessManagers.ProcessManagerInstance
alias Commanded.ProcessManagers.ProcessRouter
alias Commanded.ProcessManagers.Supervisor
alias Commanded.Subscriptions
defmodule State do
@moduledoc false
defstruct [
:command_dispatcher,
:consistency,
:event_timeout,
:process_manager_name,
:process_manager_module,
:subscribe_from,
:supervisor,
:subscription,
:subscription_ref,
:last_seen_event,
:process_event_timer,
process_managers: %{},
pending_acks: %{},
pending_events: []
]
end
def start_link(process_manager_name, process_manager_module, command_dispatcher, opts \\ []) do
name = {ProcessRouter, process_manager_name}
state = %State{
process_manager_name: process_manager_name,
process_manager_module: process_manager_module,
command_dispatcher: command_dispatcher,
consistency: opts[:consistency] || :eventual,
subscribe_from: opts[:start_from] || :origin,
event_timeout: opts[:event_timeout]
}
Registration.start_link(name, __MODULE__, state)
end
def init(%State{} = state) do
:ok = register_subscription(state)
:ok = GenServer.cast(self(), :subscribe_to_events)
{:ok, state}
end
@doc """
Acknowledge successful handling of the given event by a process manager instance
"""
def ack_event(process_router, %RecordedEvent{} = event, instance) do
GenServer.cast(process_router, {:ack_event, event, instance})
end
@doc false
def process_instance(process_router, process_uuid) do
GenServer.call(process_router, {:process_instance, process_uuid})
end
@doc false
def process_instances(process_router) do
GenServer.call(process_router, :process_instances)
end
@doc false
def handle_call(:process_instances, _from, %State{} = state) do
%State{process_managers: process_managers} = state
reply = Enum.map(process_managers, fn {process_uuid, pid} -> {process_uuid, pid} end)
{:reply, reply, state}
end
@doc false
def handle_call({:process_instance, process_uuid}, _from, %State{} = state) do
%State{process_managers: process_managers} = state
reply =
case Map.get(process_managers, process_uuid) do
nil -> {:error, :process_manager_not_found}
process_manager -> process_manager
end
{:reply, reply, state}
end
@doc false
def handle_cast({:ack_event, event, instance}, %State{} = state) do
%State{pending_acks: pending_acks} = state
%RecordedEvent{event_number: event_number} = event
state =
case pending_acks |> Map.get(event_number, []) |> List.delete(instance) do
[] ->
# Enqueue a message to continue processing any pending events
GenServer.cast(self(), :process_pending_events)
state = %State{state | pending_acks: Map.delete(pending_acks, event_number)}
# no pending acks so confirm receipt of event
confirm_receipt(event, state)
pending ->
# pending acks, don't ack event but wait for outstanding instances
%State{state | pending_acks: Map.put(pending_acks, event_number, pending)}
end
{:noreply, state}
end
@doc false
def handle_cast(:subscribe_to_events, %State{} = state) do
{:noreply, subscribe_to_all_streams(state)}
end
@doc false
def handle_cast(:process_pending_events, %State{pending_events: []} = state),
do: {:noreply, state}
@doc false
def handle_cast(:process_pending_events, %State{} = state) do
%State{pending_events: [event | pending_events]} = state
case length(pending_events) do
0 ->
:ok
1 ->
Logger.debug(fn -> describe(state) <> " has 1 pending event to process" end)
count ->
Logger.debug(fn -> describe(state) <> " has #{count} pending events to process" end)
end
case handle_event(event, state) do
%State{} = state -> {:noreply, %State{state | pending_events: pending_events}}
reply -> reply
end
end
@doc false
# Subscription to event store has successfully subscribed, init process router
def handle_info({:subscribed, subscription}, %State{subscription: subscription} = state) do
Logger.debug(fn -> describe(state) <> " has successfully subscribed to event store" end)
%State{command_dispatcher: command_dispatcher} = state
{:ok, supervisor} = Supervisor.start_link(command_dispatcher, self())
{:noreply, %State{state | supervisor: supervisor}}
end
@doc false
def handle_info({:events, events}, %State{} = state) do
Logger.debug(fn -> describe(state) <> " received #{length(events)} event(s)" end)
%State{pending_events: pending_events} = state
unseen_events =
events
|> Enum.reject(&event_already_seen?(&1, state))
|> Upcast.upcast_event_stream()
state =
case {pending_events, unseen_events} do
{[], []} ->
# no pending or unseen events, so state is unmodified
state
{[], _} ->
# no pending events, but some unseen events so start processing them
GenServer.cast(self(), :process_pending_events)
%State{state | pending_events: unseen_events}
{_, _} ->
# already processing pending events, append the unseen events so they are processed afterwards
%State{state | pending_events: pending_events ++ unseen_events}
end
{:noreply, state}
end
@doc false
# Shutdown process manager when processing an event has taken too long.
def handle_info({:event_timeout, event_number}, %State{} = state) do
%State{pending_acks: pending_acks, event_timeout: event_timeout} = state
case Map.get(pending_acks, event_number, []) do
[] ->
{:noreply, state}
_pending ->
Logger.error(fn ->
describe(state) <>
" has taken longer than " <>
inspect(event_timeout) <>
"ms to process event #" <> inspect(event_number) <> " and is now stopping"
end)
{:stop, :event_timeout, state}
end
end
@doc false
# Stop process manager when event store subscription process terminates.
def handle_info(
{:DOWN, ref, :process, pid, reason},
%State{subscription_ref: ref, subscription: pid} = state
) do
Logger.debug(fn -> describe(state) <> " subscription DOWN due to: #{inspect(reason)}" end)
{:stop, reason, state}
end
@doc false
# Remove a process manager instance that has stopped with a normal exit reason.
def handle_info({:DOWN, _ref, :process, pid, :normal}, %State{} = state) do
%State{process_managers: process_managers} = state
state = %State{state | process_managers: remove_process_manager(process_managers, pid)}
{:noreply, state}
end
@doc false
# Stop process router when a process manager instance terminates abnormally.
def handle_info({:DOWN, _ref, :process, _pid, reason}, %State{} = state) do
Logger.warn(fn -> describe(state) <> " is stopping due to: #{inspect(reason)}" end)
{:stop, reason, state}
end
# Register this process manager as a subscription with the given consistency
defp register_subscription(%State{} = state) do
%State{consistency: consistency, process_manager_name: name} = state
Subscriptions.register(name, consistency)
end
defp subscribe_to_all_streams(%State{} = state) do
%State{process_manager_name: process_manager_name, subscribe_from: subscribe_from} = state
{:ok, subscription} =
EventStore.subscribe_to(:all, process_manager_name, self(), subscribe_from)
subscription_ref = Process.monitor(subscription)
%State{state | subscription: subscription, subscription_ref: subscription_ref}
end
# Ignore already seen event
defp event_already_seen?(
%RecordedEvent{event_number: event_number},
%State{last_seen_event: last_seen_event}
) do
not is_nil(last_seen_event) and event_number <= last_seen_event
end
defp handle_event(%RecordedEvent{} = event, %State{} = state) do
%RecordedEvent{data: data} = event
%State{process_manager_module: process_manager_module} = state
try do
case process_manager_module.interested?(data) do
{:start, process_uuid} ->
Logger.debug(fn -> describe(state) <> " is interested in event " <> describe(event) end)
process_uuid
|> List.wrap()
|> Enum.reduce(state, fn process_uuid, state ->
{process_instance, state} = start_or_continue_process_manager(process_uuid, state)
delegate_event(process_instance, event, state)
end)
{:start!, process_uuid} ->
Logger.debug(fn -> describe(state) <> " is interested in event " <> describe(event) end)
{state, process_instances} =
process_uuid
|> List.wrap()
|> Enum.reduce({state, []}, fn process_uuid, {state, process_instances} ->
{process_instance, state} = start_or_continue_process_manager(process_uuid, state)
if ProcessManagerInstance.new?(process_instance) do
{state, [process_instance | process_instances]}
else
throw(
handle_routing_error(
{:error, {:start!, :process_already_started}},
event,
state
)
)
end
end)
process_instances
|> Enum.reverse()
|> Enum.reduce(state, &delegate_event(&1, event, &2))
{:continue, process_uuid} ->
Logger.debug(fn -> describe(state) <> " is interested in event " <> describe(event) end)
process_uuid
|> List.wrap()
|> Enum.reduce(state, fn process_uuid, state ->
{process_instance, state} = start_or_continue_process_manager(process_uuid, state)
delegate_event(process_instance, event, state)
end)
{:continue!, process_uuid} ->
Logger.debug(fn -> describe(state) <> " is interested in event " <> describe(event) end)
{state, process_instances} =
process_uuid
|> List.wrap()
|> Enum.reduce({state, []}, fn process_uuid, {state, process_instances} ->
{process_instance, state} = start_or_continue_process_manager(process_uuid, state)
if ProcessManagerInstance.new?(process_instance) do
throw(
handle_routing_error(
{:error, {:continue!, :process_not_started}},
event,
state
)
)
else
{state, [process_instance | process_instances]}
end
end)
process_instances
|> Enum.reverse()
|> Enum.reduce(state, &delegate_event(&1, event, &2))
{:stop, process_uuid} ->
Logger.debug(fn ->
describe(state) <> " has been stopped by event " <> describe(event)
end)
state =
process_uuid
|> List.wrap()
|> Enum.reduce(state, &stop_process_manager/2)
ack_and_continue(event, state)
false ->
Logger.debug(fn ->
describe(state) <> " is not interested in event " <> describe(event)
end)
ack_and_continue(event, state)
end
rescue
e -> handle_routing_error({:error, e}, event, state)
catch
reply -> reply
end
end
defp handle_routing_error(error, %RecordedEvent{} = failed_event, %State{} = state) do
%RecordedEvent{data: data} = failed_event
%State{process_manager_module: process_manager_module} = state
failure_context = %FailureContext{last_event: failed_event}
case process_manager_module.error(error, data, failure_context) do
:skip ->
# Skip the problematic event by confirming receipt
Logger.info(fn -> describe(state) <> " is skipping event" end)
ack_and_continue(failed_event, state)
{:stop, reason} ->
Logger.warn(fn -> describe(state) <> " has requested to stop: #{inspect(error)}" end)
{:stop, reason, state}
invalid ->
Logger.warn(fn ->
describe(state) <> " returned an invalid error response: #{inspect(invalid)}"
end)
{:stop, error, state}
end
end
# Continue processing any pending events and confirm receipt of the given event id
defp ack_and_continue(%RecordedEvent{} = event, %State{} = state) do
GenServer.cast(self(), :process_pending_events)
confirm_receipt(event, state)
end
# Confirm receipt of given event
defp confirm_receipt(%RecordedEvent{event_number: event_number} = event, %State{} = state) do
Logger.debug(fn ->
describe(state) <> " confirming receipt of event: #{inspect(event_number)}"
end)
do_ack_event(event, state)
%State{state | last_seen_event: event_number}
end
defp start_or_continue_process_manager(process_uuid, %State{} = state) do
%State{process_managers: process_managers} = state
case Map.get(process_managers, process_uuid) do
process_manager when is_pid(process_manager) ->
{process_manager, state}
nil ->
start_process_manager(process_uuid, state)
end
end
defp start_process_manager(process_uuid, %State{} = state) do
%State{
process_managers: process_managers,
process_manager_name: process_manager_name,
process_manager_module: process_manager_module,
supervisor: supervisor
} = state
{:ok, process_manager} =
Supervisor.start_process_manager(
supervisor,
process_manager_name,
process_manager_module,
process_uuid
)
Process.monitor(process_manager)
state = %State{
state
| process_managers: Map.put(process_managers, process_uuid, process_manager)
}
{process_manager, state}
end
defp stop_process_manager(process_uuid, %State{} = state) do
%State{process_managers: process_managers} = state
case Map.get(process_managers, process_uuid) do
nil ->
state
process_manager ->
:ok = ProcessManagerInstance.stop(process_manager)
%State{state | process_managers: Map.delete(process_managers, process_uuid)}
end
end
defp remove_process_manager(process_managers, pid) do
Enum.reduce(process_managers, process_managers, fn
{process_uuid, process_manager_pid}, acc when process_manager_pid == pid ->
Map.delete(acc, process_uuid)
_, acc ->
acc
end)
end
defp do_ack_event(event, %State{} = state) do
%State{consistency: consistency, process_manager_name: name, subscription: subscription} =
state
:ok = EventStore.ack_event(subscription, event)
:ok = Subscriptions.ack_event(name, consistency, event)
end
# Delegate event to process instance who will ack event processing on success
defp delegate_event(process_instance, %RecordedEvent{} = event, %State{} = state) do
%State{pending_acks: pending_acks} = state
%RecordedEvent{event_number: event_number} = event
:ok = ProcessManagerInstance.process_event(process_instance, event)
pending_acks =
Map.update(pending_acks, event_number, [process_instance], fn
pending -> [process_instance | pending]
end)
state = %State{state | pending_acks: pending_acks}
start_event_timer(event_number, state)
end
# Event timeout not configured
defp start_event_timer(_event_number, %State{event_timeout: nil} = state), do: state
defp start_event_timer(event_number, %State{process_event_timer: process_event_timer} = state)
when is_reference(process_event_timer) do
Process.cancel_timer(process_event_timer)
state = %State{state | process_event_timer: nil}
start_event_timer(event_number, state)
end
defp start_event_timer(event_number, %State{event_timeout: event_timeout} = state)
when is_integer(event_timeout) do
%State{event_timeout: event_timeout} = state
process_event_timer =
Process.send_after(self(), {:event_timeout, event_number}, event_timeout)
%State{state | process_event_timer: process_event_timer}
end
defp describe(%State{process_manager_module: process_manager_module}),
do: inspect(process_manager_module)
defp describe(%RecordedEvent{} = event) do
%RecordedEvent{
event_number: event_number,
stream_id: stream_id,
stream_version: stream_version
} = event
"#{inspect(event_number)} (#{inspect(stream_id)}@#{inspect(stream_version)})"
end
end
| 31.205128 | 104 | 0.658411 |
f7a28def5f0197da6ccbf387eb79c6be498af232 | 7,788 | exs | Elixir | apps/andi/test/integration/andi_web/live/data_dictionary_tree_test.exs | calebcarroll1/smartcitiesdata | b0f03496f6c592c82ba14aebf6c5996311cf3cd0 | [
"Apache-2.0"
] | 18 | 2020-11-13T15:38:24.000Z | 2021-05-26T00:40:08.000Z | apps/andi/test/integration/andi_web/live/data_dictionary_tree_test.exs | calebcarroll1/smartcitiesdata | b0f03496f6c592c82ba14aebf6c5996311cf3cd0 | [
"Apache-2.0"
] | 365 | 2020-09-21T12:31:40.000Z | 2021-09-25T14:54:21.000Z | apps/andi/test/integration/andi_web/live/data_dictionary_tree_test.exs | calebcarroll1/smartcitiesdata | b0f03496f6c592c82ba14aebf6c5996311cf3cd0 | [
"Apache-2.0"
] | 3 | 2020-10-06T16:17:49.000Z | 2021-09-03T17:11:41.000Z | defmodule AndiWeb.EditLiveView.DataDictionaryTreeTest do
use ExUnit.Case
use AndiWeb.Test.PublicAccessCase
use Andi.DataCase
use AndiWeb.Test.AuthConnCase.IntegrationCase
import Phoenix.LiveViewTest
import SmartCity.TestHelper
@moduletag shared_data_connection: true
alias Andi.InputSchemas.Datasets
alias SmartCity.TestDataGenerator, as: TDG
import FlokiHelpers,
only: [
get_select: 2,
get_attributes: 3
]
@endpoint AndiWeb.Endpoint
@url_path "/submissions/"
describe "expand/collapse and check/uncheck" do
setup %{conn: conn} do
dataset =
TDG.create_dataset(%{
technical: %{
schema: [
%{
name: "one",
type: "list",
itemType: "map",
subSchema: [
%{
name: "one-one",
type: "string"
}
]
},
%{
name: "two",
type: "map",
subSchema: [
%{
name: "two-one",
type: "integer"
}
]
}
]
}
})
{:ok, andi_dataset} = Datasets.update(dataset)
assert {:ok, view, html} = live(conn, @url_path <> andi_dataset.id)
data_dictionary_view = find_live_child(view, "data_dictionary_form_editor")
[expandable_one_id, expandable_two_id] =
get_attributes(html, ".data-dictionary-tree-field__action[phx-click='toggle_expanded']", "phx-value-field-id")
[expandable_one_target, expandable_two_target] =
get_attributes(html, ".data-dictionary-tree-field__action[phx-click='toggle_expanded']", "phx-target")
[checkable_one_id, _, _, checkable_two_id] =
get_attributes(html, ".data-dictionary-tree-field__text[phx-click='toggle_selected']", "phx-value-field-id")
[checkable_one_target, _, _, checkable_two_target] =
get_attributes(html, ".data-dictionary-tree-field__text[phx-click='toggle_selected']", "phx-target")
[
view: data_dictionary_view,
html: html,
expandable_one: %{id: expandable_one_id, target: expandable_one_target, name: "one"},
expandable_two: %{id: expandable_two_id, target: expandable_two_target},
checkable_one: %{id: checkable_one_id, target: checkable_one_target, name: "one", type: "list"},
checkable_two: %{id: checkable_two_id, target: checkable_two_target}
]
end
test "first field is selected by default", %{html: html, expandable_one: expandable_one} do
one_id = expandable_one.id
assert [^one_id] = get_action_field_ids(html, "selected")
end
test "first field values are displayed in the editor by default", %{html: html, expandable_one: expandable_one} do
assert [expandable_one.name] == get_attributes(html, ".data-dictionary-field-editor__name", "value")
end
test "initially expandable fields are expanded", %{html: html, expandable_one: expandable_one, expandable_two: expandable_two} do
one_id = expandable_one.id
two_id = expandable_two.id
assert [^one_id, ^two_id] = get_action_field_ids(html, "expanded")
end
test "clicking an expandable field once collapses it", %{view: view, expandable_one: expandable_one} do
one_id = expandable_one.id
expandable = element(view, ".data-dictionary-tree-field__action[phx-value-field-id='#{one_id}']")
html = render_click(expandable)
assert [^one_id] = get_action_field_ids(html, "collapsed")
end
test "clicking an expandable field twice toggles it", %{view: view, expandable_one: expandable_one} do
one_id = expandable_one.id
expandable = element(view, ".data-dictionary-tree-field__action[phx-value-field-id='#{one_id}']")
_html = render_click(expandable)
html = render_click(expandable)
assert [^one_id | _] = get_action_field_ids(html, "expanded")
end
test "clicking an expandable field does not affect another field", %{
view: view,
html: html,
expandable_one: expandable_one,
expandable_two: expandable_two
} do
one_id = expandable_one.id
two_id = expandable_two.id
assert [^one_id, ^two_id] = get_action_field_ids(html, "expanded")
expandable = element(view, ".data-dictionary-tree-field__action[phx-value-field-id='#{two_id}']")
html = render_click(expandable)
assert [^one_id] = get_action_field_ids(html, "expanded")
assert [^two_id] = get_action_field_ids(html, "collapsed")
end
test "clicking a selectable and expandable field once selects it but leaves it expanded", %{view: view, checkable_one: checkable_one} do
one_id = checkable_one.id
selectable = element(view, ".data-dictionary-tree-field__text[phx-value-field-id='#{one_id}']")
html = render_click(selectable)
assert [^one_id] = get_action_field_ids(html, "selected")
assert [^one_id | _] = get_action_field_ids(html, "expanded")
end
test "clicking a selectable and checkable field once selects and checks it", %{view: view, checkable_two: checkable_two} do
two_id = checkable_two.id
selectable = element(view, ".data-dictionary-tree-field__text[phx-value-field-id='#{two_id}']")
html = render_click(selectable)
assert [^two_id] = get_action_field_ids(html, "selected")
assert [^two_id | _] = get_action_field_ids(html, "checked")
end
test "clicking a checkable field twice does not unselect it", %{view: view, checkable_one: checkable_one} do
one_id = checkable_one.id
selectable = element(view, ".data-dictionary-tree-field__text[phx-value-field-id='#{one_id}']")
_html = render_click(selectable)
html = render_click(selectable)
assert [^one_id] = get_action_field_ids(html, "selected")
end
test "clicking a selectable field unselects other field (only one checked at a time)", %{
view: view,
html: html,
checkable_one: checkable_one,
checkable_two: checkable_two
} do
one_id = checkable_one.id
two_id = checkable_two.id
assert one_id in get_action_field_ids(html, "selected")
assert two_id in get_action_field_ids(html, "unselected")
selectable_one = element(view, ".data-dictionary-tree-field__text[phx-value-field-id='#{one_id}']")
selectable_two = element(view, ".data-dictionary-tree-field__text[phx-value-field-id='#{two_id}']")
html = render_click(selectable_two)
assert one_id in get_action_field_ids(html, "unselected")
assert two_id in get_action_field_ids(html, "selected")
html = render_click(selectable_one)
assert one_id in get_action_field_ids(html, "selected")
assert two_id in get_action_field_ids(html, "unselected")
end
test "clicking a checkable field fills the field editor with its corresponding values", %{view: view, checkable_one: checkable_one} do
one_id = checkable_one.id
one_name = checkable_one.name
one_type = checkable_one.type
selectable = element(view, ".data-dictionary-tree-field__text[phx-value-field-id='#{one_id}']")
_html = render_click(selectable)
eventually(fn ->
html = render(view)
assert [one_name] == get_attributes(html, ".data-dictionary-field-editor__name", "value")
assert {one_type, Macro.camelize(one_type)} == get_select(html, ".data-dictionary-field-editor__type")
end)
end
end
def get_action_field_ids(html, action) do
get_attributes(html, ".data-dictionary-tree__field--#{action} > .data-dictionary-tree-field__action", "phx-value-field-id")
end
end
| 36.392523 | 140 | 0.659219 |
f7a2bbb3478ea23d8afece640778a4510f6fb5cb | 4,723 | exs | Elixir | .credo.exs | beadsland/elixirscript | cb9698ad96075fcbe87b3933009d7ab2a2c939de | [
"MIT"
] | 854 | 2017-02-19T01:50:45.000Z | 2022-03-14T18:55:38.000Z | .credo.exs | beadsland/elixirscript | cb9698ad96075fcbe87b3933009d7ab2a2c939de | [
"MIT"
] | 210 | 2017-02-20T17:44:39.000Z | 2020-08-01T10:18:07.000Z | .credo.exs | beadsland/elixirscript | cb9698ad96075fcbe87b3933009d7ab2a2c939de | [
"MIT"
] | 56 | 2017-02-19T14:50:05.000Z | 2022-02-25T17:25:30.000Z | # This file contains the configuration for Credo and you are probably reading
# this after creating it with `mix credo.gen.config`.
#
# If you find anything wrong or unclear in this file, please report an
# issue on GitHub: https://github.com/rrrene/credo/issues
#
%{
#
# You can have as many configs as you like in the `configs:` field.
configs: [
%{
#
# Run any config using `mix credo -C <name>`. If no config name is given
# "default" is used.
name: "default",
#
# These are the files included in the analysis:
files: %{
#
# You can give explicit globs or simply directories.
# In the latter case `**/*.{ex,exs}` will be used.
included: ["lib/", "src/", "web/", "apps/"],
excluded: [~r"/_build/", ~r"/deps/"]
},
#
# If you create your own checks, you must specify the source files for
# them here, so they can be loaded by Credo before running the analysis.
requires: [],
#
# Credo automatically checks for updates, like e.g. Hex does.
# You can disable this behaviour below:
check_for_updates: true,
#
# If you want to enforce a style guide and need a more traditional linting
# experience, you can change `strict` to `true` below:
strict: false,
#
# If you want to use uncolored output by default, you can change `color`
# to `false` below:
color: true,
#
# You can customize the parameters of any check by adding a second element
# to the tuple.
#
# To disable a check put `false` as second element:
#
# {Credo.Check.Design.DuplicatedCode, false}
#
checks: [
{Credo.Check.Consistency.ExceptionNames},
{Credo.Check.Consistency.LineEndings},
{Credo.Check.Consistency.MultiAliasImportRequireUse},
{Credo.Check.Consistency.ParameterPatternMatching},
{Credo.Check.Consistency.SpaceAroundOperators},
{Credo.Check.Consistency.SpaceInParentheses, false},
{Credo.Check.Consistency.TabsOrSpaces},
{Credo.Check.Design.DuplicatedCode, excluded_macros: []},
{Credo.Check.Design.TagTODO, exit_status: 2},
{Credo.Check.Design.TagFIXME},
{Credo.Check.Design.AliasUsage, false},
{Credo.Check.Readability.FunctionNames},
{Credo.Check.Readability.LargeNumbers},
{Credo.Check.Readability.MaxLineLength,
priority: :low, max_length: 80, ignore_specs: true
},
{Credo.Check.Readability.ModuleAttributeNames},
{Credo.Check.Readability.ModuleDoc},
{Credo.Check.Readability.ModuleNames},
{Credo.Check.Readability.ParenthesesOnZeroArityDefs},
{Credo.Check.Readability.ParenthesesInCondition},
{Credo.Check.Readability.PredicateFunctionNames},
{Credo.Check.Readability.PreferImplicitTry},
{Credo.Check.Readability.RedundantBlankLines},
{Credo.Check.Readability.Specs, false},
{Credo.Check.Readability.StringSigils},
{Credo.Check.Readability.TrailingBlankLine},
{Credo.Check.Readability.TrailingWhiteSpace},
{Credo.Check.Readability.VariableNames},
{Credo.Check.Refactor.DoubleBooleanNegation},
{Credo.Check.Refactor.ABCSize},
{Credo.Check.Refactor.CondStatements},
{Credo.Check.Refactor.CyclomaticComplexity},
{Credo.Check.Refactor.FunctionArity},
{Credo.Check.Refactor.MatchInCondition},
{Credo.Check.Refactor.NegatedConditionsInUnless},
{Credo.Check.Refactor.NegatedConditionsWithElse},
{Credo.Check.Refactor.Nesting},
{Credo.Check.Refactor.PipeChainStart, false},
{Credo.Check.Refactor.UnlessWithElse},
{Credo.Check.Refactor.VariableRebinding, false},
{Credo.Check.Warning.BoolOperationOnSameValues},
{Credo.Check.Warning.IExPry},
{Credo.Check.Warning.IoInspect},
{Credo.Check.Warning.NameRedeclarationByAssignment},
{Credo.Check.Warning.NameRedeclarationByCase},
{Credo.Check.Warning.NameRedeclarationByDef},
{Credo.Check.Warning.NameRedeclarationByFn},
{Credo.Check.Warning.OperationOnSameValues},
{Credo.Check.Warning.OperationWithConstantResult},
{Credo.Check.Warning.UnusedEnumOperation},
{Credo.Check.Warning.UnusedFileOperation},
{Credo.Check.Warning.UnusedKeywordOperation},
{Credo.Check.Warning.UnusedListOperation},
{Credo.Check.Warning.UnusedPathOperation},
{Credo.Check.Warning.UnusedRegexOperation},
{Credo.Check.Warning.UnusedStringOperation},
{Credo.Check.Warning.UnusedTupleOperation},
]
}
]
}
| 41.069565 | 80 | 0.663138 |
f7a3122457263a3ee6753200016543847efb4771 | 5,460 | ex | Elixir | lib/titeenipeli_web/channels/game_channel.ex | Cadiac/titeenit-backend | 51db7a1f93dc78a769bb309b94b1b893cefdcdc9 | [
"MIT"
] | null | null | null | lib/titeenipeli_web/channels/game_channel.ex | Cadiac/titeenit-backend | 51db7a1f93dc78a769bb309b94b1b893cefdcdc9 | [
"MIT"
] | null | null | null | lib/titeenipeli_web/channels/game_channel.ex | Cadiac/titeenit-backend | 51db7a1f93dc78a769bb309b94b1b893cefdcdc9 | [
"MIT"
] | null | null | null | defmodule TiteenipeliWeb.GameChannel do
use TiteenipeliWeb, :channel
require Logger
alias Titeenipeli.Game
alias Titeenipeli.Core
alias Titeenipeli.Model.User
alias TiteenipeliWeb.GameView
@ratelimit_period 10_000
@ratelimit_count 100
def join("game:" <> game_id, _payload, socket) do
player_id = socket.assigns.player_id
Logger.debug("#{player_id} is joining Game channel #{game_id}")
case Hammer.check_rate("game_channel:#{player_id}", @ratelimit_period, @ratelimit_count) do
{:allow, _count} ->
with [game_guild_id, _game_zone_id] <- String.split(game_id, ":") do
# Make sure player guild matches game_id
%User{guild_id: user_guild_id, is_banned: is_banned} = Core.get_user!(player_id)
cond do
is_banned ->
{:error, %{reason: "Banned."}}
"#{user_guild_id}" != game_guild_id ->
{:error, %{reason: "Refused connection, not your guild"}}
true ->
case Game.player_connected(game_id, player_id, socket.channel_pid) do
{:ok, pid} ->
Process.monitor(pid)
status = Game.get_status(game_id, player_id)
{:ok, GameView.render("game_status.json", status, player_id),
assign(socket, :game_id, game_id)}
{:error, reason} ->
{:error, %{reason: reason}}
end
end
else
_err ->
{:error, %{reason: "Invalid game_id"}}
end
{:deny, _limit} ->
Logger.warn("#{player_id} game_channel ratelimit hit at join!")
{:error, GameView.render("error.json", %{reason: "Slow down, rate limit exceeded. (10req/s)"})}
end
end
def terminate(reason, socket) do
Logger.debug("Terminating GameChannel #{socket.assigns.player_id} - #{inspect(reason)}")
with %{player_id: player_id, game_id: game_id} <- socket.assigns do
Game.player_disconnected(game_id, player_id)
:ok
else
_err ->
:ok
end
end
# Commands
def handle_in("game:begin_cast", %{"spell_id" => spell_id}, socket) when is_number(spell_id) do
player_id = socket.assigns.player_id
game_id = socket.assigns.game_id
case Hammer.check_rate("game_channel:#{player_id}", @ratelimit_period, @ratelimit_count) do
{:allow, _count} ->
case Game.begin_cast(game_id, player_id, spell_id) do
{:ok, result} ->
{:reply, {:ok, GameView.render("begin_cast.json", result)}, socket}
{:error, message} ->
{:reply, {:error, GameView.render("error.json", %{reason: message})}, socket}
end
{:deny, _limit} ->
Logger.warn("#{player_id} game_channel ratelimit hit at cast!")
{:stop,
{:error,
GameView.render("error.json", %{reason: "Slow down, rate limit exceeded. (10req/s)"})},
socket}
end
end
# Broadcast
def broadcast_stop(game_id) do
Logger.debug("Broadcasting game:stopped from GameChannel #{game_id}")
TiteenipeliWeb.Endpoint.broadcast("game:#{game_id}", "game:stopped", %{})
end
def broadcast_player_connected(game_id, players, player) do
Logger.debug("Broadcasting game:player_connected GameChannel #{game_id}")
TiteenipeliWeb.Endpoint.broadcast(
"game:#{game_id}",
"game:player_connected",
GameView.render("player_connected.json", players, player)
)
end
def broadcast_player_disconnected(game_id, players, player) do
Logger.debug("Broadcasting game:player_disconnected GameChannel #{game_id}")
TiteenipeliWeb.Endpoint.broadcast(
"game:#{game_id}",
"game:player_disconnected",
GameView.render("player_disconnected.json", players, player)
)
end
def broadcast_npc_updated(game_id, npc) do
TiteenipeliWeb.Endpoint.broadcast(
"game:#{game_id}",
"game:npc_updated",
GameView.render("npc_updated.json", %{npc: npc})
)
end
def broadcast_npc_defeated(game_id, game) do
Logger.debug("Broadcasting game:npc_defeated GameChannel #{game_id}")
TiteenipeliWeb.Endpoint.broadcast(
"game:#{game_id}",
"game:npc_defeated",
GameView.render("npc_defeated.json", game)
)
end
def broadcast_server_message(game_id, message) do
Logger.debug("Broadcasting game:server_message GameChannel #{game_id}")
TiteenipeliWeb.Endpoint.broadcast(
"game:#{game_id}",
"game:server_message",
GameView.render("server_message.json", message)
)
end
def broadcast_buff_gained(game_id, buff) do
Logger.debug("Broadcasting game:buff_gained GameChannel #{game_id}")
TiteenipeliWeb.Endpoint.broadcast(
"game:#{game_id}",
"game:buff_gained",
GameView.render("buff.json", buff)
)
end
def broadcast_buff_faded(game_id, buff) do
Logger.debug("Broadcasting game:buff_faded GameChannel #{game_id}")
TiteenipeliWeb.Endpoint.broadcast(
"game:#{game_id}",
"game:buff_faded",
GameView.render("buff.json", buff)
)
end
def broadcast_npc_damaged(game_id, damage, npc, spell, player, effect) do
Logger.debug("Broadcasting game:damage_npc GameChannel #{game_id}")
TiteenipeliWeb.Endpoint.broadcast(
"game:#{game_id}",
"game:damage_npc",
GameView.render("damage.json", %{damage: damage, npc: npc, spell: spell, from: player, effect: effect})
)
end
end
| 30.674157 | 109 | 0.642308 |
f7a31a2e2650b0fccd54b8a63ac24985cc1e2101 | 328 | exs | Elixir | test/lilac/module_test.exs | Stratus3D/lilac | 0d3cc5a1c416d8b94b5744b083046295f1e832ba | [
"MIT"
] | 2 | 2015-02-16T15:22:15.000Z | 2016-06-10T23:38:08.000Z | test/lilac/module_test.exs | Stratus3D/lilac | 0d3cc5a1c416d8b94b5744b083046295f1e832ba | [
"MIT"
] | null | null | null | test/lilac/module_test.exs | Stratus3D/lilac | 0d3cc5a1c416d8b94b5744b083046295f1e832ba | [
"MIT"
] | null | null | null | defmodule Lilac.ModuleTest do
use ExUnit.Case
test "All functions in :teal_behaviours are available" do
lilac_exports = length(Lilac.Module.module_info :exports) - 1 # Elixir modules have an extra function
teal_exports = length(:teal_modules.module_info :exports)
assert lilac_exports == teal_exports
end
end
| 32.8 | 105 | 0.768293 |
f7a32c0aedec4b4672d16fd26d123d4cecb00d87 | 5,050 | ex | Elixir | lib/elixir/lib/module/checker.ex | Javyre/elixir | 4da31098a84268d040e569590515744c02efb9cc | [
"Apache-2.0"
] | null | null | null | lib/elixir/lib/module/checker.ex | Javyre/elixir | 4da31098a84268d040e569590515744c02efb9cc | [
"Apache-2.0"
] | null | null | null | lib/elixir/lib/module/checker.ex | Javyre/elixir | 4da31098a84268d040e569590515744c02efb9cc | [
"Apache-2.0"
] | null | null | null | defmodule Module.Checker do
def verify(module_map, _binary) do
state = %{
file: module_map.file,
module: module_map.module,
compile_opts: module_map.compile_opts,
function: nil
}
module_map.definitions
|> check_definitions(state)
|> List.flatten()
|> merge_warnings()
|> sort_warnings()
|> emit_warnings()
end
defp check_definitions(definitions, state) do
Enum.map(definitions, &check_definition(&1, state))
end
defp check_definition({function, _kind, meta, clauses}, state) do
state = put_file_meta(%{state | function: function}, meta)
Enum.map(clauses, &check_clause(&1, state))
end
defp put_file_meta(state, meta) do
case Keyword.fetch(meta, :file) do
{:ok, {file, _}} -> %{state | file: file}
:error -> state
end
end
defp check_clause({_meta, _args, _guards, body}, state) do
check_body(body, state)
end
# &Mod.fun/arity
defp check_body({:&, meta, [{:/, _, [{{:., dot_meta, [module, fun]}, _, []}, arity]}]}, state)
when is_atom(module) and is_atom(fun) do
check_remote(module, fun, arity, meta ++ dot_meta, state)
end
# Mod.fun(...)
defp check_body({{:., meta, [module, fun]}, _, args}, state)
when is_atom(module) and is_atom(fun) do
check_remote(module, fun, length(args), meta, state)
end
# Function call
defp check_body({left, _meta, right}, state) when is_list(right) do
[check_body(right, state), check_body(left, state)]
end
# {x, y}
defp check_body({left, right}, state) do
[check_body(right, state), check_body(left, state)]
end
# [...]
defp check_body(list, state) when is_list(list) do
Enum.map(list, &check_body(&1, state))
end
defp check_body(_other, _state) do
[]
end
defp check_remote(module, fun, arity, meta, state) do
cond do
not warn_if_undefined?(module, fun, arity, state) ->
[]
# TODO: In the future we may want to warn for modules defined
# in the local context
Keyword.get(meta, :context_module, false) and state.module != module ->
[]
# TODO: Add no_autoload
not Code.ensure_loaded?(module) ->
warn(meta, state, {:undefined_module, module, fun, arity})
not function_exported?(module, fun, arity) ->
exports = exports_for(module)
warn(meta, state, {:undefined_function, module, fun, arity, exports})
true ->
[]
end
end
# TODO: Do not warn inside guards
# TODO: Properly handle protocols
defp warn_if_undefined?(_module, :__impl__, 1, _state), do: false
defp warn_if_undefined?(:erlang, :orelse, 2, _state), do: false
defp warn_if_undefined?(:erlang, :andalso, 2, _state), do: false
defp warn_if_undefined?(module, fun, arity, state) do
for(
{:no_warn_undefined, values} <- state.compile_opts,
value <- List.wrap(values),
value == module or value == {module, fun, arity},
do: :skip
) == []
end
defp exports_for(module) do
try do
module.__info__(:macros) ++ module.__info__(:functions)
rescue
_ -> module.module_info(:exports)
end
end
defp warn(meta, %{file: file, module: module, function: {fun, arity}}, warning) do
{warning, {file, meta[:line], {module, fun, arity}}}
end
defp merge_warnings(warnings) do
Enum.reduce(warnings, %{}, fn {warning, location}, acc ->
locations = MapSet.new([location])
Map.update(acc, warning, locations, &MapSet.put(&1, location))
end)
end
defp sort_warnings(warnings) do
warnings
|> Enum.map(fn {warning, locations} -> {warning, Enum.sort(locations)} end)
|> Enum.sort()
end
defp emit_warnings(warnings) do
Enum.flat_map(warnings, fn {warning, locations} ->
message = format_warning(warning)
print_warning([message, ?\n, format_locations(locations)])
Enum.map(locations, fn {file, line, _mfa} ->
{file, line, message}
end)
end)
end
defp format_warning({:undefined_module, module, fun, arity}) do
[
Exception.format_mfa(module, fun, arity),
" is undefined (module ",
inspect(module),
" is not available or is yet to be defined)"
]
end
defp format_warning({:undefined_function, module, fun, arity, exports}) do
[
Exception.format_mfa(module, fun, arity),
" is undefined or private",
UndefinedFunctionError.hint_for_loaded_module(module, fun, arity, exports)
]
end
defp format_locations([location]) do
format_location(location)
end
defp format_locations(locations) do
[
"Found at #{length(locations)} locations:\n",
Enum.map(locations, &format_location/1)
]
end
defp format_location({file, line, {module, fun, arity}}) do
file = Path.relative_to_cwd(file)
line = Integer.to_string(line)
mfa = Exception.format_mfa(module, fun, arity)
[" ", file, ?:, line, ": ", mfa, ?\n]
end
defp print_warning(message) do
IO.puts(:stderr, [:elixir_errors.warning_prefix(), message])
end
end
| 27.595628 | 96 | 0.640198 |
f7a35be903b733095de356f20aee4b1c27b85a4a | 16,528 | ex | Elixir | lib/mix/tasks/docs.ex | devtayls/ex_doc | 0319ace1782c9a6cd4ade57a5df6f8649d9b2683 | [
"Apache-2.0",
"CC-BY-4.0"
] | null | null | null | lib/mix/tasks/docs.ex | devtayls/ex_doc | 0319ace1782c9a6cd4ade57a5df6f8649d9b2683 | [
"Apache-2.0",
"CC-BY-4.0"
] | null | null | null | lib/mix/tasks/docs.ex | devtayls/ex_doc | 0319ace1782c9a6cd4ade57a5df6f8649d9b2683 | [
"Apache-2.0",
"CC-BY-4.0"
] | null | null | null | defmodule Mix.Tasks.Docs do
use Mix.Task
@shortdoc "Generate documentation for the project"
@moduledoc ~S"""
Uses ExDoc to generate a static web page from the project documentation.
## Command line options
* `--canonical`, `-n` - Indicate the preferred URL with
rel="canonical" link element, defaults to no canonical path
* `--formatter`, `-f` - Which formatters to use, "html" or
"epub". This option can be given more than once. By default,
both html and epub are generated.
* `--output`, `-o` - Output directory for the generated
docs, default: `"doc"`
* `--language` - Specifies the language to annotate the
EPUB output in valid [BCP 47](https://tools.ietf.org/html/bcp47)
The command line options have higher precedence than the options
specified in your `mix.exs` file below.
## Configuration
ExDoc will automatically pull in information from your project,
like the application and version. However, you may want to set
`:name`, `:source_url` and `:homepage_url` to have a nicer output
from ExDoc, for example:
def project do
[app: :my_app,
version: "0.1.0-dev",
deps: deps(),
# Docs
name: "My App",
source_url: "https://github.com/USER/PROJECT",
homepage_url: "http://YOUR_PROJECT_HOMEPAGE",
docs: [main: "MyApp", # The main page in the docs
logo: "path/to/logo.png",
extras: ["README.md"]]]
end
ExDoc also allows configuration specific to the documentation to
be set. The following options should be put under the `:docs` key
in your project's main configuration. The `:docs` options should
be a keyword list or a function returning a keyword list that will
be lazily executed.
* `:api_reference` - Whether to generate `api-reference.html`; default: `true`.
If this is set to false, `:main` must also be set.
* `:assets` - Path to a directory that will be copied as is to the "assets"
directory in the output path. Its entries may be referenced in your docs
under "assets/ASSET.EXTENSION"; defaults to no assets directory.
* `:before_closing_body_tag` - a function that takes as argument an atom specifying
the formatter being used (`:html` or `:epub`) and returns a literal HTML string
to be included just before the closing body tag (`</body>`).
The atom given as argument can be used to include different content in both formats.
Useful to inject custom assets, such as Javascript.
* `:before_closing_head_tag` - a function that takes as argument an atom specifying
the formatter being used (`:html` or `:epub`) and returns a literal HTML string
to be included just before the closing head tag (`</head>`).
The atom given as argument can be used to include different content in both formats.
Useful to inject custom assets, such as CSS stylesheets.
* `:canonical` - String that defines the preferred URL with the rel="canonical"
element; defaults to no canonical path.
* `:deps` - A keyword list application names and their documentation URL.
ExDoc will by default include all dependencies and assume they are hosted on
HexDocs. This can be overridden by your own values. Example: `[plug: "https://myserver/plug/"]`
* `:extra_section` - String that defines the section title of the additional
Markdown and plain text pages; default: "PAGES". Example: "GUIDES"
* `:extras` - List of keywords, each key must indicate the path to additional
Markdown or plain text pages, the value for each keyword (optional) gives you more control
about the PATH and the title of the output files; default: `[]`. Example:
`["README.md", "LICENSE", "CONTRIBUTING.md": [filename: "contributing", title: "Contributing"]]`
* `:filter_prefix` - Include only modules that match the given prefix in
the generated documentation. Example: "MyApp.Core". If you set this option,
remember to also set the `:main` option to a module that will be included
, for example `main: "MyApp.Core.Inner"`.
* `:formatters` - Formatter to use; default: ["html", "epub"], options: "html", "epub".
* `:groups_for_extras`, `:groups_for_modules`, `:groups_for_functions` - See the "Groups" section
* `:javascript_config_path` - Path of an additional JavaScript file to be included on all pages
to provide up-to-date data for features like the version dropdown - See the "Additional
JavaScript config" section. Example: `"../versions.js"`
* `:nest_modules_by_prefix` - See the "Nesting" section
* `:language` - Identify the primary language of the documents, its value must be
a valid [BCP 47](https://tools.ietf.org/html/bcp47) language tag; default: "en"
* `:logo` - Path to the image logo of the project (only PNG or JPEG accepted)
The image size will be 64x64. When specified, the logo will be placed under
the "assets" directory in the output path under the name "logo" and the
appropriate extension.
* `:cover` - Path to the epub cover image (only PNG or JPEG accepted)
The image size should be around 1600x2400. When specified, the cover will be placed under
the "assets" directory in the output path under the name "cover" and the
appropriate extension. This option has no effect when using the "html" formatter.
* `:authors` - List of authors for the generated docs or epub.
* `:main` - Main page of the documentation. It may be a module or a
generated page, like "Plug" or "api-reference"; default: "api-reference".
* `:markdown_processor` - The markdown processor to use,
either `module()` or `{module(), keyword()}` to provide configuration options;
* `:source_beam` - Path to the beam directory; default: mix's compile path.
* `:source_ref` - The branch/commit/tag used for source link inference;
default: "master".
* `:source_url_pattern` - Public URL of the project for source links. This is derived
automatically from the project's `:source_url` and `:source_ref` when using one of
the supported public hosting services (currently GitHub, GitLab, or Bitbucket). If
you are using one of those services with their default public hostname, you do not
need to set this configuration.
However, if using a different solution, or self-hosting, you will need to set this
configuration variable to a pattern for source code links. The value must be a string
of the full URI to use for links with the following variables available for interpolation:
* `%{path}`: the path of a file in the repo
* `%{line}`: the line number in the file
For GitLab/GitHub:
```text
https://mydomain.org/user_or_team/repo_name/blob/master/%{path}#L%{line}
```
For Bitbucket:
```text
https://mydomain.org/user_or_team/repo_name/src/master/%{path}#cl-%{line}
```
* `:output` - Output directory for the generated docs; default: "doc".
May be overridden by command line argument.
* `:ignore_apps` - Apps to be ignored when generating documentation in an umbrella project.
Receives a list of atoms. Example: `[:first_app, :second_app]`.
* `:skip_undefined_reference_warnings_on` - ExDoc warns when it can't create a `Mod.fun/arity`
reference in the current project docs e.g. because of a typo. This list controls where to
skip the warnings, for a given module/function/callback/type (e.g.: `["Foo", "Bar.baz/0"]`)
or on a given file (e.g.: `["pages/deprecations.md"]`); default: `[]`.
## Groups
ExDoc content can be organized in groups. This is done via the `:groups_for_extras`
and `:groups_for_modules`. For example, imagine you are storing extra guides in
your documentation which are organized per directory. In the extras section you
have:
extras: [
"guides/introduction/foo.md",
"guides/introduction/bar.md",
...
"guides/advanced/baz.md",
"guides/advanced/bat.md"
]
You can have those grouped as follows:
groups_for_extras: [
"Introduction": Path.wildcard("guides/introduction/*.md"),
"Advanced": Path.wildcard("guides/advanced/*.md")
]
Or via a regex:
groups_for_extras: [
"Introduction": ~r"/introduction/",
"Advanced": ~r"/advanced/"
]
Similar can be done for modules:
groups_for_modules: [
"Data types": [Atom, Regex, URI],
"Collections": [Enum, MapSet, Stream]
]
A regex or the string name of the module is also supported.
### Grouping functions
Functions inside a module can also be organized in groups. This is done via
the `:groups_for_functions` configuration which is a keyword list of group
titles and filtering functions that receive the documentation metadata of
functions as argument.
For example, imagine that you have an API client library with a large surface
area for all the API endpoints you need to support. It would be helpful to
group the functions with similar responsibilities together. In this case in
your module you might have:
defmodule APIClient do
@doc section: :auth
def refresh_token(params \\ [])
@doc subject: :object
def update_status(id, new_status)
@doc permission: :grant
def grant_privilege(resource, privilege)
end
And then in the configuration you can group these with:
groups_for_functions: [
Authentication: & &1[:section] == :auth,
Resource: & &1[:subject] == :object,
Admin: & &1[:permission] in [:grant, :write]
]
A function can belong to a single group only. If multiple group filters match,
the first will take precedence. Functions that don't have a custom group will
be listed under the default "Functions" group.
## Additional JavaScript config
Since version `0.20.0` ExDoc includes a way to enrich the documentation
with new information without having to re-generate it, through a JavaScript
file that can be shared across documentation for multiple versions of the
package. If `:javascript_config_path` is set when building the documentation,
this script will be referenced in each page's `<head>` using a `<script>` tag.
The script should define data in global JavaScript variables that will be
interpreted by `ex_doc` when viewing the documentation.
Currently supported variables:
### `versionNodes`
This global JavaScript variable should be providing an array of objects that
define all versions of this Mix package which should appear in the package
versions dropdown in the documentation sidebar. The versions dropdown allows
for switching between package versions' documentation.
Example:
```javascript
var versionNodes = [
{
version: "v0.0.0", // version number or name (required)
url: "https://hexdocs.pm/ex_doc/0.19.3/" // documentation URL (required)
}
]
```
## Nesting
ExDoc also allows module names in the sidebar to appear nested under a given
prefix. The `:nest_modules_by_prefix` expects a list of module names, such as
`[Foo.Bar, Bar.Baz]`. In this case, a module named `Foo.Bar.Baz` will appear
nested within `Foo.Bar` and only the name `Baz` will be shown in the sidebar.
Note the `Foo.Bar` module itself is not affected.
This option is mainly intended to improve the display of long module names in
the sidebar, particularly when they are too long for the sidebar or when many
modules share a long prefix. If you mean to group modules logically or call
attention to them in the docs, you should probably use `:groups_for_modules`
(which can be used in conjunction with `:nest_modules_by_prefix`).
## Umbrella project
ExDoc can be used in an umbrella project and generates a single documentation
for all child apps. You can use the `:ignore_apps` configuration to exclude
certain projects in the umbrella from documentation.
Generating documentation per each child app can be achieved by running:
mix cmd mix docs
See `mix help cmd` for more information.
"""
@switches [
canonical: :string,
formatter: :keep,
language: :string,
output: :string
]
@aliases [n: :canonical, f: :formatter, o: :output]
@doc false
def run(args, config \\ Mix.Project.config(), generator \\ &ExDoc.generate_docs/3) do
Mix.Task.run("compile")
{:ok, _} = Application.ensure_all_started(:ex_doc)
unless Code.ensure_loaded?(ExDoc.Config) do
Mix.raise(
"Could not load ExDoc configuration. Please make sure you are running the " <>
"docs task in the same Mix environment it is listed in your deps"
)
end
{cli_opts, args, _} = OptionParser.parse(args, aliases: @aliases, switches: @switches)
if args != [] do
Mix.raise("Extraneous arguments on the command line")
end
project =
to_string(
config[:name] || config[:app] ||
raise("expected :name or :app to be found in the project definition in mix.exs")
)
version = config[:version] || "dev"
options =
config
|> get_docs_opts()
|> Keyword.merge(cli_opts)
# accepted at root level config
|> normalize_source_url(config)
# accepted at root level config
|> normalize_homepage_url(config)
|> normalize_source_beam(config)
|> normalize_apps(config)
|> normalize_main()
|> normalize_deps()
|> put_package(config)
Mix.shell().info("Generating docs...")
for formatter <- get_formatters(options) do
index = generator.(project, version, Keyword.put(options, :formatter, formatter))
Mix.shell().info([:green, "View #{inspect(formatter)} docs at #{inspect(index)}"])
index
end
end
defp get_formatters(options) do
case Keyword.get_values(options, :formatter) do
[] -> options[:formatters] || ["html", "epub"]
values -> values
end
end
defp get_docs_opts(config) do
docs = config[:docs]
cond do
is_function(docs, 0) -> docs.()
is_nil(docs) -> []
true -> docs
end
end
defp normalize_source_url(options, config) do
if source_url = config[:source_url] do
Keyword.put(options, :source_url, source_url)
else
options
end
end
defp normalize_homepage_url(options, config) do
if homepage_url = config[:homepage_url] do
Keyword.put(options, :homepage_url, homepage_url)
else
options
end
end
defp normalize_source_beam(options, config) do
compile_path =
if Mix.Project.umbrella?(config) do
umbrella_compile_paths(Keyword.get(options, :ignore_apps, []))
else
Mix.Project.compile_path()
end
Keyword.put_new(options, :source_beam, compile_path)
end
defp umbrella_compile_paths(ignored_apps) do
build = Mix.Project.build_path()
for {app, _} <- Mix.Project.apps_paths(),
app not in ignored_apps do
Path.join([build, "lib", Atom.to_string(app), "ebin"])
end
end
defp normalize_apps(options, config) do
if Mix.Project.umbrella?(config) do
ignore = Keyword.get(options, :ignore_apps, [])
apps =
for {app, _} <- Mix.Project.apps_paths(), app not in ignore do
app
end
Keyword.put(options, :apps, apps)
else
Keyword.put(options, :apps, List.wrap(config[:app]))
end
end
defp normalize_main(options) do
main = options[:main]
cond do
is_nil(main) ->
Keyword.delete(options, :main)
is_atom(main) ->
Keyword.put(options, :main, inspect(main))
is_binary(main) ->
options
end
end
defp normalize_deps(options) do
user_deps = Keyword.get(options, :deps, [])
deps =
for {app, doc} <- Keyword.merge(get_deps(), user_deps),
lib_dir = :code.lib_dir(app),
is_list(lib_dir),
do: {app, doc}
Keyword.put(options, :deps, deps)
end
defp get_deps do
for {key, _} <- Mix.Project.deps_paths(),
_ = Application.load(key),
vsn = Application.spec(key, :vsn) do
{key, "https://hexdocs.pm/#{key}/#{vsn}/"}
end
end
defp put_package(options, config) do
if package = config[:package] do
Keyword.put(options, :package, package[:name] || config[:app])
else
options
end
end
end
| 35.467811 | 102 | 0.674613 |
f7a35cd65e82ae3997f03bf899628d2a1a54d414 | 3,981 | ex | Elixir | lib/elixir/lib/collectable.ex | chulkilee/elixir | 699231dcad52916a76f38856cbd7cf7c7bdadc51 | [
"Apache-2.0"
] | null | null | null | lib/elixir/lib/collectable.ex | chulkilee/elixir | 699231dcad52916a76f38856cbd7cf7c7bdadc51 | [
"Apache-2.0"
] | null | null | null | lib/elixir/lib/collectable.ex | chulkilee/elixir | 699231dcad52916a76f38856cbd7cf7c7bdadc51 | [
"Apache-2.0"
] | null | null | null | defprotocol Collectable do
@moduledoc """
A protocol to traverse data structures.
The `Enum.into/2` function uses this protocol to insert an
enumerable into a collection:
iex> Enum.into([a: 1, b: 2], %{})
%{a: 1, b: 2}
## Why Collectable?
The `Enumerable` protocol is useful to take values out of a collection.
In order to support a wide range of values, the functions provided by
the `Enumerable` protocol do not keep shape. For example, passing a
map to `Enum.map/2` always returns a list.
This design is intentional. `Enumerable` was designed to support infinite
collections, resources and other structures with fixed shape. For example,
it doesn't make sense to insert values into a range, as it has a fixed
shape where just the range limits are stored.
The `Collectable` module was designed to fill the gap left by the
`Enumerable` protocol. `into/1` can be seen as the opposite of
`Enumerable.reduce/3`. If `Enumerable` is about taking values out,
`Collectable.into/1` is about collecting those values into a structure.
## Examples
To show how to manually use the `Collectable` protocol, let's play with its
implementation for `MapSet`.
iex> {initial_acc, collector_fun} = Collectable.into(MapSet.new())
iex> updated_acc = Enum.reduce([1, 2, 3], initial_acc, fn elem, acc ->
...> collector_fun.(acc, {:cont, elem})
...> end)
iex> collector_fun.(updated_acc, :done)
#MapSet<[1, 2, 3]>
To show how the protocol can be implemented, we can take again a look at the
implementation for `MapSet`. In this implementation "collecting" elements
simply means inserting them in the set through `MapSet.put/2`.
defimpl Collectable do
def into(original) do
collector_fun = fn
set, {:cont, elem} -> MapSet.put(set, elem)
set, :done -> set
_set, :halt -> :ok
end
{original, collector_fun}
end
end
"""
@type command :: {:cont, term} | :done | :halt
@doc """
Returns an initial accumulator and a "collector" function.
The returned function receives a term and a command and injects the term into
the collectable on every `{:cont, term}` command.
`:done` is passed as a command when no further values will be injected. This
is useful when there's a need to close resources or normalizing values. A
collectable must be returned when the command is `:done`.
If injection is suddenly interrupted, `:halt` is passed and the function
can return any value as it won't be used.
For examples on how to use the `Collectable` protocol and `into/1` see the
module documentation.
"""
@spec into(t) :: {term, (term, command -> t | term)}
def into(collectable)
end
defimpl Collectable, for: List do
def into(original) do
fun = fn
list, {:cont, x} -> [x | list]
list, :done -> original ++ :lists.reverse(list)
_, :halt -> :ok
end
{[], fun}
end
end
defimpl Collectable, for: BitString do
def into(original) when is_binary(original) do
fun = fn
acc, {:cont, x} when is_binary(x) and is_list(acc) ->
[acc | x]
acc, {:cont, x} when is_bitstring(x) and is_bitstring(acc) ->
<<acc::bitstring, x::bitstring>>
acc, {:cont, x} when is_bitstring(x) ->
<<IO.iodata_to_binary(acc)::bitstring, x::bitstring>>
acc, :done ->
IO.iodata_to_binary(acc)
_, :halt ->
:ok
end
{[original], fun}
end
def into(original) when is_bitstring(original) do
fun = fn
acc, {:cont, x} when is_bitstring(x) ->
<<acc::bitstring, x::bitstring>>
acc, :done ->
acc
_, :halt ->
:ok
end
{original, fun}
end
end
defimpl Collectable, for: Map do
def into(original) do
fun = fn
map, {:cont, {k, v}} -> :maps.put(k, v, map)
map, :done -> map
_, :halt -> :ok
end
{original, fun}
end
end
| 28.234043 | 79 | 0.639287 |
f7a374cc245d3dbab452449f8d0316fc24276d04 | 71 | exs | Elixir | 2016/day09/priv/day09.exs | matt-thomson/advent-of-code | feff903151284240a9d3f0c84cdfe52d8d11ef06 | [
"MIT"
] | null | null | null | 2016/day09/priv/day09.exs | matt-thomson/advent-of-code | feff903151284240a9d3f0c84cdfe52d8d11ef06 | [
"MIT"
] | null | null | null | 2016/day09/priv/day09.exs | matt-thomson/advent-of-code | feff903151284240a9d3f0c84cdfe52d8d11ef06 | [
"MIT"
] | null | null | null | filename = System.argv |> Enum.at(0)
IO.puts Day09.part_one(filename)
| 17.75 | 36 | 0.732394 |
f7a37db01f7b2564c371d6095566aea2cca10f94 | 253 | exs | Elixir | .formatter.exs | thepeoplesbourgeois/phoenix_live_controller | 04286b79697e9a7a62abb37efd7725dd9124a6c3 | [
"MIT"
] | 35 | 2020-04-15T17:15:36.000Z | 2021-11-08T08:50:50.000Z | .formatter.exs | thepeoplesbourgeois/phoenix_live_controller | 04286b79697e9a7a62abb37efd7725dd9124a6c3 | [
"MIT"
] | 3 | 2020-04-15T14:07:28.000Z | 2022-03-25T23:08:23.000Z | .formatter.exs | thepeoplesbourgeois/phoenix_live_controller | 04286b79697e9a7a62abb37efd7725dd9124a6c3 | [
"MIT"
] | 3 | 2020-11-08T15:37:31.000Z | 2022-03-23T05:36:54.000Z | export_locals_without_parens = [plug: 1, plug: 2]
[
inputs: ["{mix,.formatter}.exs", "{config,lib,test}/**/*.{ex,exs}"],
locals_without_parens: export_locals_without_parens,
export: [
locals_without_parens: export_locals_without_parens
]
]
| 25.3 | 70 | 0.715415 |
f7a37e8a8236c5fdd6756d77205d835a04d37137 | 12,857 | ex | Elixir | apps/nerves_hub_web_core/lib/nerves_hub_web_core/firmwares.ex | acrogenesis/nerves_hub_web | 27f651dd64b40a034254b50805884f4efd679957 | [
"Apache-2.0"
] | 1 | 2019-10-13T10:56:28.000Z | 2019-10-13T10:56:28.000Z | apps/nerves_hub_web_core/lib/nerves_hub_web_core/firmwares.ex | Eaftos/nerves_hub_web | ac03bd044b97265bf3ba3edd8da249d300fa3668 | [
"Apache-2.0"
] | null | null | null | apps/nerves_hub_web_core/lib/nerves_hub_web_core/firmwares.ex | Eaftos/nerves_hub_web | ac03bd044b97265bf3ba3edd8da249d300fa3668 | [
"Apache-2.0"
] | null | null | null | defmodule NervesHubWebCore.Firmwares do
import Ecto.Query
alias Ecto.Changeset
alias NervesHubWebCore.Accounts
alias NervesHubWebCore.Accounts.{OrgKey, Org}
alias NervesHubWebCore.Firmwares.{Firmware, FirmwareMetadata, FirmwareTransfer}
alias NervesHubWebCore.Products
alias NervesHubWebCore.Products.Product
alias NervesHubWebCore.Repo
require Logger
@type upload_file_2 :: (filepath :: String.t(), filename :: String.t() -> :ok | {:error, any()})
@uploader Application.fetch_env!(:nerves_hub_web_core, :firmware_upload)
@spec get_firmwares_by_product(integer()) :: [Firmware.t()]
def get_firmwares_by_product(product_id) do
from(
f in Firmware,
where: f.product_id == ^product_id
)
|> Firmware.with_product()
|> Repo.all()
end
@spec get_firmware(Org.t(), integer()) ::
{:ok, Firmware.t()}
| {:error, :not_found}
def get_firmware(%Org{id: org_id}, id) do
from(
f in Firmware,
where: f.id == ^id,
join: p in assoc(f, :product),
where: p.org_id == ^org_id
)
|> Firmware.with_product()
|> Repo.one()
|> case do
nil -> {:error, :not_found}
firmware -> {:ok, firmware}
end
end
def get_firmware!(firmware_id), do: Repo.get!(Firmware, firmware_id)
@spec get_firmware_by_org_id(non_neg_integer()) :: [Firmware.t()]
def get_firmware_by_org_id(org_id) do
q =
from(
f in Firmware,
join: p in assoc(f, :product),
where: p.org_id == ^org_id
)
Repo.all(q)
end
@spec get_firmware_by_product_and_version(Org.t(), String.t(), String.t()) ::
{:ok, Firmware.t()}
| {:error, :not_found}
def get_firmware_by_product_and_version(%Org{} = org, product, version) do
Firmware
|> Repo.get_by(org_id: org.id, product: product, version: version)
|> case do
nil -> {:error, :not_found}
firmware -> {:ok, firmware}
end
end
@spec get_firmware_by_uuid(String.t()) :: [Firmware.t()]
def get_firmware_by_uuid(uuid) do
from(
f in Firmware,
where: f.uuid == ^uuid
)
|> Repo.all()
end
@spec get_firmware_by_product_and_uuid(Product.t(), String.t()) ::
{:ok, Firmware.t()}
| {:error, :not_found}
def get_firmware_by_product_and_uuid(%Product{id: product_id}, uuid) do
from(
f in Firmware,
where: f.uuid == ^uuid,
join: p in assoc(f, :product),
preload: [product: p],
where: p.id == ^product_id
)
|> Repo.one()
|> case do
nil -> {:error, :not_found}
firmware -> {:ok, firmware}
end
end
@spec create_firmware(Org.t(), String.t(), opts :: [{:upload_file_2, upload_file_2()}]) ::
{:ok, Firmware.t()}
| {:error, Changeset.t() | :no_public_keys | :invalid_signature | any}
def create_firmware(org, filepath, params \\ %{}, opts \\ []) do
upload_file_2 = opts[:upload_file_2] || (&@uploader.upload_file/2)
Repo.transaction(
fn ->
with {:ok, params} <- build_firmware_params(org, filepath, params),
params <- set_ttl(org, params),
{:ok, firmware} <- insert_firmware(params),
:ok <- upload_file_2.(filepath, firmware.upload_metadata) do
firmware
else
{:error, error} ->
Logger.error(fn -> "Error while publishing firmware: #{inspect(error)}" end)
Repo.rollback(error)
end
end,
timeout: 30_000
)
end
def delete_firmware(%Firmware{} = firmware) do
Repo.transaction(fn ->
with {:ok, _} <- firmware |> Firmware.delete_changeset(%{}) |> Repo.delete(),
:ok <- @uploader.delete_file(firmware) do
:ok
else
{:error, error} -> Repo.rollback(error)
end
end)
|> case do
{:ok, _} -> :ok
ret -> ret
end
end
@spec verify_signature(String.t(), [OrgKey.t()]) ::
{:ok, OrgKey.t()}
| {:error, :invalid_signature}
| {:error, :no_public_keys}
def verify_signature(_filepath, []), do: {:error, :no_public_keys}
def verify_signature(filepath, keys) when is_binary(filepath) do
keys
|> Enum.find(fn %{key: key} ->
case System.cmd("fwup", ["--verify", "--public-key", key, "-i", filepath]) do
{_, 0} ->
true
_ ->
false
end
end)
|> case do
%OrgKey{} = key ->
{:ok, key}
nil ->
{:error, :invalid_signature}
end
end
def update_firmware_ttl(nil), do: :ok
def update_firmware_ttl(firmware_id) do
q =
from(f in NervesHubWebCore.Firmwares.Firmware,
left_join: d in NervesHubWebCore.Deployments.Deployment,
on: d.firmware_id == f.id,
where:
f.id == ^firmware_id and
not is_nil(d.firmware_id),
limit: 1
)
case Repo.one(q) do
# Firmware has no associations. Set ttl.
nil ->
case NervesHubWebCore.Repo.get(Firmware, firmware_id) do
%Firmware{ttl_until: nil, ttl: ttl} = firmware ->
ttl_until = DateTime.utc_now() |> Timex.shift(seconds: ttl)
firmware
|> Firmware.update_changeset(%{ttl_until: ttl_until})
|> Repo.update()
:set
_ ->
:noop
end
# Firmware has associations and no ttl has been set.
%Firmware{ttl_until: nil} ->
:noop
# Firmware has associations and is marked for ttl. Unset ttl.
%Firmware{} = firmware ->
firmware
|> Firmware.update_changeset(%{ttl_until: nil})
|> Repo.update()
:unset
end
end
def get_firmware_by_expired_ttl() do
from(
f in Firmware,
where: f.ttl_until < ^DateTime.utc_now()
)
|> Repo.all()
end
def metadata_from_conn(%Plug.Conn{} = conn) do
params = %{
uuid: get_metadata_req_header(conn, "uuid"),
architecture: get_metadata_req_header(conn, "architecture"),
platform: get_metadata_req_header(conn, "platform"),
product: get_metadata_req_header(conn, "product"),
version: get_metadata_req_header(conn, "version"),
author: get_metadata_req_header(conn, "author"),
description: get_metadata_req_header(conn, "description"),
vcs_identifier: get_metadata_req_header(conn, "vcs-identifier"),
misc: get_metadata_req_header(conn, "misc")
}
metadata_or_firmware(params)
end
def metadata_from_firmware(%Firmware{} = firmware) do
firmware = Repo.preload(firmware, [:product])
metadata = %{
uuid: firmware.uuid,
architecture: firmware.architecture,
platform: firmware.platform,
product: firmware.product.name,
version: firmware.version,
author: firmware.author,
description: firmware.description,
vcs_identifier: firmware.vcs_identifier,
misc: firmware.misc
}
{:ok, metadata}
end
def metadata_from_fwup(firmware_file) do
with {:ok, fwup_metadata} <- get_fwup_metadata(firmware_file),
{:ok, uuid} <- fetch_fwup_metadata_item(fwup_metadata, "meta-uuid"),
{:ok, architecture} <- fetch_fwup_metadata_item(fwup_metadata, "meta-architecture"),
{:ok, platform} <- fetch_fwup_metadata_item(fwup_metadata, "meta-platform"),
{:ok, product} <- fetch_fwup_metadata_item(fwup_metadata, "meta-product"),
{:ok, version} <- fetch_fwup_metadata_item(fwup_metadata, "meta-version"),
author <- get_fwup_metadata_item(fwup_metadata, "meta-author"),
description <- get_fwup_metadata_item(fwup_metadata, "meta-description"),
misc <- get_fwup_metadata_item(fwup_metadata, "meta-misc"),
vcs_identifier <- get_fwup_metadata_item(fwup_metadata, "meta-vcs-identifier") do
metadata = %{
uuid: uuid,
architecture: architecture,
platform: platform,
product: product,
version: version,
author: author,
description: description,
vcs_identifier: vcs_identifier,
misc: misc
}
{:ok, metadata}
end
end
def metadata_from_device(metadata) do
params = %{
uuid: Map.get(metadata, "nerves_fw_uuid"),
architecture: Map.get(metadata, "nerves_fw_architecture"),
platform: Map.get(metadata, "nerves_fw_platform"),
product: Map.get(metadata, "nerves_fw_product"),
version: Map.get(metadata, "nerves_fw_version"),
author: Map.get(metadata, "nerves_fw_author"),
description: Map.get(metadata, "nerves_fw_description"),
vcs_identifier: Map.get(metadata, "nerves_fw_vcs_identifier"),
misc: Map.get(metadata, "nerves_fw_misc")
}
metadata_or_firmware(params)
end
def create_firmware_transfer(params) do
%FirmwareTransfer{}
|> FirmwareTransfer.changeset(params)
|> Repo.insert()
end
def get_firmware_transfers_by_org_id_between_dates(org_id, from_datetime, to_datetime) do
q =
from(
ft in FirmwareTransfer,
where:
ft.org_id == ^org_id and
ft.timestamp >= ^from_datetime and
ft.timestamp <= ^to_datetime
)
Repo.all(q)
end
# Private functions
defp insert_firmware(params) do
%Firmware{}
|> Firmware.create_changeset(params)
|> Repo.insert()
end
defp build_firmware_params(%{id: org_id} = org, filepath, params) do
org = NervesHubWebCore.Repo.preload(org, :org_keys)
with {:ok, %{id: org_key_id}} <- verify_signature(filepath, org.org_keys),
{:ok, metadata} <- metadata_from_fwup(filepath) do
filename = metadata.uuid <> ".fw"
params =
resolve_product(%{
architecture: metadata.architecture,
author: metadata.author,
description: metadata.description,
filename: filename,
filepath: filepath,
misc: metadata.misc,
org_id: org_id,
org_key_id: org_key_id,
platform: metadata.platform,
product_name: metadata.product,
upload_metadata: @uploader.metadata(org_id, filename),
size: :filelib.file_size(filepath),
ttl: Map.get(params, :ttl),
uuid: metadata.uuid,
vcs_identifier: metadata.vcs_identifier,
version: metadata.version
})
{:ok, params}
end
end
defp set_ttl(%{id: org_id}, params) do
ttl =
case Map.get(params, :ttl) do
ttl when ttl == nil or ttl == "" ->
org_id
|> Accounts.get_org_limit_by_org_id()
|> Map.get(:firmware_ttl_seconds_default)
ttl when is_binary(ttl) ->
String.to_integer(ttl)
ttl ->
ttl
end
ttl_until = DateTime.utc_now() |> Timex.shift(seconds: ttl)
params
|> Map.put(:ttl, ttl)
|> Map.put(:ttl_until, ttl_until)
end
defp resolve_product(params) do
params.org_id
|> Products.get_product_by_org_id_and_name(params.product_name)
|> case do
{:ok, product} -> Map.put(params, :product_id, product.id)
_ -> params
end
with {:ok, product} <-
Products.get_product_by_org_id_and_name(params.org_id, params.product_name) do
Map.put(params, :product_id, product.id)
else
_ -> params
end
end
def metadata_or_firmware(metadata) do
case FirmwareMetadata.changeset(%FirmwareMetadata{}, metadata).valid? do
true ->
{:ok, metadata}
false ->
case Map.get(metadata, :uuid) do
nil ->
{:ok, nil}
uuid ->
case get_firmware_by_uuid(uuid) do
[firmware | _] -> metadata_from_firmware(firmware)
[] -> {:ok, nil}
end
end
end
end
defp get_fwup_metadata(filepath) do
case System.cmd("fwup", ["-m", "-i", filepath]) do
{metadata, 0} ->
{:ok, metadata}
_error ->
{:error}
end
end
@spec fetch_fwup_metadata_item(String.t(), String.t()) ::
{:ok, String.t()} | {:error, {String.t(), :not_found}}
defp fetch_fwup_metadata_item(metadata, key) when is_binary(key) do
{:ok, regex} = "#{key}=\"(?<item>[^\n]+)\"" |> Regex.compile()
case Regex.named_captures(regex, metadata) do
%{"item" => item} -> {:ok, item}
_ -> {:error, {key, :not_found}}
end
end
@spec get_fwup_metadata_item(String.t(), String.t(), String.t() | nil) :: String.t() | nil
defp get_fwup_metadata_item(metadata, key, default \\ nil) when is_binary(key) do
case fetch_fwup_metadata_item(metadata, key) do
{:ok, metadata_item} -> metadata_item
{:error, {_, :not_found}} -> default
end
end
defp get_metadata_req_header(conn, header) do
case Plug.Conn.get_req_header(conn, "x-nerveshub-#{header}") do
[] -> nil
["" | _] -> nil
[value | _] -> value
end
end
end
| 28.957207 | 98 | 0.608307 |
f7a38b4e6247f4c0c0f367abc9a9e4e8ad16f928 | 409 | exs | Elixir | services/fc_goods/config/dev.exs | fleadope/freshcom | 8d5944befaa6eea8d31e5f5995939be2a1a44262 | [
"BSD-3-Clause"
] | 46 | 2018-10-13T23:18:13.000Z | 2021-08-07T07:46:51.000Z | services/fc_goods/config/dev.exs | fleadope/freshcom | 8d5944befaa6eea8d31e5f5995939be2a1a44262 | [
"BSD-3-Clause"
] | 25 | 2018-10-14T00:56:07.000Z | 2019-12-23T19:41:02.000Z | services/fc_goods/config/dev.exs | fleadope/freshcom | 8d5944befaa6eea8d31e5f5995939be2a1a44262 | [
"BSD-3-Clause"
] | 5 | 2018-12-16T04:39:51.000Z | 2020-10-01T12:17:03.000Z | use Mix.Config
config :eventstore, EventStore.Storage,
serializer: FCBase.EventSerializer,
username: System.get_env("EVENTSTORE_DB_USERNAME"),
password: System.get_env("EVENTSTORE_DB_PASSWORD"),
database: "fc_goods_eventstore_dev",
hostname: "localhost",
pool_size: 10
config :fc_state_storage, :adapter, FCStateStorage.RedisAdapter
config :fc_state_storage, :redis, System.get_env("REDIS_URL")
| 31.461538 | 63 | 0.794621 |
f7a38bc17cd936f148fc7e0f1682bc5fb9b951eb | 2,709 | ex | Elixir | clients/service_consumer_management/lib/google_api/service_consumer_management/v1/model/authentication_rule.ex | mocknen/elixir-google-api | dac4877b5da2694eca6a0b07b3bd0e179e5f3b70 | [
"Apache-2.0"
] | null | null | null | clients/service_consumer_management/lib/google_api/service_consumer_management/v1/model/authentication_rule.ex | mocknen/elixir-google-api | dac4877b5da2694eca6a0b07b3bd0e179e5f3b70 | [
"Apache-2.0"
] | null | null | null | clients/service_consumer_management/lib/google_api/service_consumer_management/v1/model/authentication_rule.ex | mocknen/elixir-google-api | dac4877b5da2694eca6a0b07b3bd0e179e5f3b70 | [
"Apache-2.0"
] | null | null | null | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule GoogleApi.ServiceConsumerManagement.V1.Model.AuthenticationRule do
@moduledoc """
Authentication rules for the service. By default, if a method has any authentication requirements, every request must include a valid credential matching one of the requirements. It's an error to include more than one kind of credential in a single request. If a method doesn't have any auth requirements, request credentials will be ignored.
## Attributes
- allowWithoutCredential (boolean()): If true, the service accepts API keys without any other credential. Defaults to: `null`.
- oauth (OAuthRequirements): The requirements for OAuth credentials. Defaults to: `null`.
- requirements ([AuthRequirement]): Requirements for additional authentication providers. Defaults to: `null`.
- selector (String.t): Selects the methods to which this rule applies. Refer to selector for syntax details. Defaults to: `null`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:allowWithoutCredential => any(),
:oauth => GoogleApi.ServiceConsumerManagement.V1.Model.OAuthRequirements.t(),
:requirements => list(GoogleApi.ServiceConsumerManagement.V1.Model.AuthRequirement.t()),
:selector => any()
}
field(:allowWithoutCredential)
field(:oauth, as: GoogleApi.ServiceConsumerManagement.V1.Model.OAuthRequirements)
field(
:requirements,
as: GoogleApi.ServiceConsumerManagement.V1.Model.AuthRequirement,
type: :list
)
field(:selector)
end
defimpl Poison.Decoder, for: GoogleApi.ServiceConsumerManagement.V1.Model.AuthenticationRule do
def decode(value, options) do
GoogleApi.ServiceConsumerManagement.V1.Model.AuthenticationRule.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.ServiceConsumerManagement.V1.Model.AuthenticationRule do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 43 | 354 | 0.760059 |
f7a3a9b44ea56c086f50d63c2d51e6a62f22771c | 1,471 | ex | Elixir | lib/telegex/marked/rules/italic_rule.ex | Hentioe/telegex_mark | 5c289404cff2c3f8ff4ef1157196886f5b81398c | [
"MIT"
] | 1 | 2020-07-13T00:07:19.000Z | 2020-07-13T00:07:19.000Z | lib/telegex/marked/rules/italic_rule.ex | Hentioe/telegex_mark | 5c289404cff2c3f8ff4ef1157196886f5b81398c | [
"MIT"
] | 12 | 2020-07-10T21:20:35.000Z | 2020-07-26T02:19:51.000Z | lib/telegex/marked/rules/italic_rule.ex | Hentioe/telegex_mark | 5c289404cff2c3f8ff4ef1157196886f5b81398c | [
"MIT"
] | null | null | null | defmodule Telegex.Marked.ItalicRule do
@moduledoc false
# Matching and parsing of italic nodes.
use Telegex.Marked.Rule
@markup "_"
@ntype :italic
@impl true
def match(state) do
%{line: %{src: src, len: len}, pos: pos} = state
prev_char = String.at(src, pos - 1)
next_char = String.at(src, pos + 1)
if ignore_begin?(@markup, String.at(src, pos), prev_char, next_char) do
{:nomatch, state}
else
chars = String.graphemes(String.slice(src, pos + 1, len))
equals_markup_fun = fn {char, index} ->
if char == @markup do
Enum.at(chars, index + 1) != @markup && !escapes_char?(Enum.at(chars, index - 1))
else
false
end
end
end_index =
chars
|> Enum.with_index()
|> Enum.filter(equals_markup_fun)
|> Enum.find(fn {_, index} ->
# 跳过 underline 的标记符
# 如果前一个也是 @markup 但前第二个不是,则不匹配(有且仅有两个 @markup 相连)
[before_2, before_1] = Enum.slice(chars, (index - 2)..(index - 1))
!(before_1 == @markup && before_2 != @markup)
end)
|> elem_or_nil(1)
|> calculate_end_index(pos)
if end_index do
state = %{state | pos: end_index}
state =
State.push_node(state, %Node{
type: @ntype,
children: children_text(src, pos, end_index)
})
{:match, state}
else
{:nomatch, state}
end
end
end
end
| 24.932203 | 91 | 0.552005 |
f7a3e2121976c35df10b309efe1882bc9db564fb | 2,464 | ex | Elixir | clients/tool_results/lib/google_api/tool_results/v1beta3/model/android_instrumentation_test.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | 1 | 2018-12-03T23:43:10.000Z | 2018-12-03T23:43:10.000Z | clients/tool_results/lib/google_api/tool_results/v1beta3/model/android_instrumentation_test.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/tool_results/lib/google_api/tool_results/v1beta3/model/android_instrumentation_test.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.ToolResults.V1beta3.Model.AndroidInstrumentationTest do
@moduledoc """
A test of an Android application that can control an Android component independently of its normal lifecycle.
See for more information on types of Android tests.
## Attributes
* `testPackageId` (*type:* `String.t`, *default:* `nil`) - The java package for the test to be executed. Required
* `testRunnerClass` (*type:* `String.t`, *default:* `nil`) - The InstrumentationTestRunner class. Required
* `testTargets` (*type:* `list(String.t)`, *default:* `nil`) - Each target must be fully qualified with the package name or class name, in one of these formats: - "package package_name" - "class package_name.class_name" - "class package_name.class_name#method_name"
If empty, all targets in the module will be run.
* `useOrchestrator` (*type:* `boolean()`, *default:* `nil`) - The flag indicates whether Android Test Orchestrator will be used to run test or not.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:testPackageId => String.t(),
:testRunnerClass => String.t(),
:testTargets => list(String.t()),
:useOrchestrator => boolean()
}
field(:testPackageId)
field(:testRunnerClass)
field(:testTargets, type: :list)
field(:useOrchestrator)
end
defimpl Poison.Decoder, for: GoogleApi.ToolResults.V1beta3.Model.AndroidInstrumentationTest do
def decode(value, options) do
GoogleApi.ToolResults.V1beta3.Model.AndroidInstrumentationTest.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.ToolResults.V1beta3.Model.AndroidInstrumentationTest do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 41.066667 | 269 | 0.732549 |
f7a3e462838455bfb9ca84ad04092610bc16b750 | 1,922 | ex | Elixir | clients/storage/lib/google_api/storage/v1/model/buckets.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | 1 | 2018-12-03T23:43:10.000Z | 2018-12-03T23:43:10.000Z | clients/storage/lib/google_api/storage/v1/model/buckets.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/storage/lib/google_api/storage/v1/model/buckets.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.Storage.V1.Model.Buckets do
@moduledoc """
A list of buckets.
## Attributes
* `items` (*type:* `list(GoogleApi.Storage.V1.Model.Bucket.t)`, *default:* `nil`) - The list of items.
* `kind` (*type:* `String.t`, *default:* `storage#buckets`) - The kind of item this is. For lists of buckets, this is always storage#buckets.
* `nextPageToken` (*type:* `String.t`, *default:* `nil`) - The continuation token, used to page through large result sets. Provide this value in a subsequent request to return the next page of results.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:items => list(GoogleApi.Storage.V1.Model.Bucket.t()),
:kind => String.t(),
:nextPageToken => String.t()
}
field(:items, as: GoogleApi.Storage.V1.Model.Bucket, type: :list)
field(:kind)
field(:nextPageToken)
end
defimpl Poison.Decoder, for: GoogleApi.Storage.V1.Model.Buckets do
def decode(value, options) do
GoogleApi.Storage.V1.Model.Buckets.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Storage.V1.Model.Buckets do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 36.264151 | 205 | 0.712799 |
f7a3f0542796c70a8b7602181f637880946b1474 | 14,834 | ex | Elixir | lib/surface/api.ex | mazz-seven/surface | 2e2b027ffd8432478dd7cd9b83d4e6e32153fcbd | [
"MIT"
] | null | null | null | lib/surface/api.ex | mazz-seven/surface | 2e2b027ffd8432478dd7cd9b83d4e6e32153fcbd | [
"MIT"
] | null | null | null | lib/surface/api.ex | mazz-seven/surface | 2e2b027ffd8432478dd7cd9b83d4e6e32153fcbd | [
"MIT"
] | null | null | null | defmodule Surface.API do
@moduledoc false
alias Surface.IOHelper
@types [
:any,
:css_class,
:list,
:event,
:boolean,
:string,
:time,
:date,
:datetime,
:naive_datetime,
:number,
:integer,
:decimal,
:map,
:fun,
:atom,
:module,
:changeset,
:form,
:keyword,
:struct,
:tuple,
:pid,
:port,
:reference,
:bitstring,
:range,
:mapset,
:regex,
:uri,
:path,
# Private
:generator,
:context_put,
:context_get
]
defmacro __using__(include: include) do
arities = %{
prop: [2, 3],
slot: [1, 2],
data: [2, 3]
}
functions = for func <- include, arity <- arities[func], into: [], do: {func, arity}
quote do
import unquote(__MODULE__), only: unquote(functions)
@before_compile unquote(__MODULE__)
@after_compile unquote(__MODULE__)
Module.register_attribute(__MODULE__, :assigns, accumulate: false)
# Any caller component can hold other components with slots
Module.register_attribute(__MODULE__, :assigned_slots_by_parent, accumulate: false)
Module.put_attribute(__MODULE__, :use_context?, false)
for func <- unquote(include) do
Module.register_attribute(__MODULE__, func, accumulate: true)
end
end
end
defmacro __before_compile__(env) do
generate_docs(env)
[
quoted_prop_funcs(env),
quoted_slot_funcs(env),
quoted_data_funcs(env),
quoted_context_funcs(env)
]
end
def __after_compile__(env, _) do
if function_exported?(env.module, :__slots__, 0) do
validate_slot_props_bindings!(env)
end
end
@doc "Defines a property for the component"
defmacro prop(name_ast, type_ast, opts_ast \\ []) do
build_assign_ast(:prop, name_ast, type_ast, opts_ast, __CALLER__)
end
@doc "Defines a slot for the component"
defmacro slot(name_ast, opts_ast \\ []) do
build_assign_ast(:slot, name_ast, :any, opts_ast, __CALLER__)
end
@doc "Defines a data assign for the component"
defmacro data(name_ast, type_ast, opts_ast \\ []) do
build_assign_ast(:data, name_ast, type_ast, opts_ast, __CALLER__)
end
@doc false
def put_assign!(caller, func, name, type, opts, opts_ast, line) do
Surface.API.validate!(func, name, type, opts, caller)
assign = %{
func: func,
name: name,
type: type,
doc: pop_doc(caller.module),
opts: opts,
opts_ast: opts_ast,
line: line
}
assigns = Module.get_attribute(caller.module, :assigns) || %{}
name = Keyword.get(assign.opts, :as, assign.name)
existing_assign = assigns[name]
if existing_assign do
component_type = Module.get_attribute(caller.module, :component_type)
builtin_assign? = name in Surface.Compiler.Helpers.builtin_assigns_by_type(component_type)
details = existing_assign_details_message(builtin_assign?, existing_assign)
message = ~s(cannot use name "#{assign.name}". #{details}.)
IOHelper.compile_error(message, caller.file, assign.line)
else
assigns = Map.put(assigns, name, assign)
Module.put_attribute(caller.module, :assigns, assigns)
end
Module.put_attribute(caller.module, assign.func, assign)
end
defp existing_assign_details_message(true = _builtin?, %{func: func}) do
"There's already a built-in #{func} assign with the same name"
end
defp existing_assign_details_message(false = _builtin?, %{func: func, line: line})
when func == :slot do
"""
There's already a #{func} assign with the same name at line #{line}.
You could use the optional ':as' option in slot macro to name the related assigns.
"""
end
defp existing_assign_details_message(false = _builtin?, %{func: func, line: line}) do
"There's already a #{func} assign with the same name at line #{line}"
end
@doc false
def get_assigns(module) do
if Module.open?(module) do
module
|> Module.get_attribute(:assigns)
|> Kernel.||(%{})
|> Enum.map(fn {name, %{line: line}} -> {name, line} end)
else
data = if function_exported?(module, :__data__, 0), do: module.__data__(), else: []
props = if function_exported?(module, :__props__, 0), do: module.__props__(), else: []
slots = if function_exported?(module, :__slots__, 0), do: module.__slots__(), else: []
Enum.map(data ++ props ++ slots, fn %{name: name, line: line} -> {name, line} end)
end
end
@doc false
def get_slots(module) do
Module.get_attribute(module, :slot) || []
end
@doc false
def get_props(module) do
Module.get_attribute(module, :prop) || []
end
@doc false
def get_data(module) do
Module.get_attribute(module, :data) || []
end
@doc false
def get_defaults(module) do
for %{name: name, opts: opts} <- get_data(module), Keyword.has_key?(opts, :default) do
{name, opts[:default]}
end
end
defp quoted_data_funcs(env) do
data = get_data(env.module)
quote do
@doc false
def __data__() do
unquote(Macro.escape(data))
end
end
end
defp quoted_prop_funcs(env) do
props =
env.module
|> get_props()
|> sort_props()
props_names = for p <- props, do: p.name
props_by_name = for p <- props, into: %{}, do: {p.name, p}
required_props_names = for %{name: name, opts: opts} <- props, opts[:required], do: name
quote do
@doc false
def __props__() do
unquote(Macro.escape(props))
end
@doc false
def __validate_prop__(prop) do
prop in unquote(props_names)
end
@doc false
def __get_prop__(name) do
Map.get(unquote(Macro.escape(props_by_name)), name)
end
@doc false
def __required_props_names__() do
unquote(Macro.escape(required_props_names))
end
end
end
@doc false
def sort_props(props) when is_list(props) do
Enum.sort_by(props, &{&1.name != :id, !&1.opts[:required], &1.line})
end
defp quoted_slot_funcs(env) do
slots = env.module |> get_slots() |> Enum.uniq_by(& &1.name)
slots_names = Enum.map(slots, fn slot -> slot.name end)
slots_by_name = for p <- slots, into: %{}, do: {p.name, p}
required_slots_names =
for %{name: name, opts: opts} <- slots, opts[:required] do
name
end
assigned_slots_by_parent = Module.get_attribute(env.module, :assigned_slots_by_parent) || %{}
quote do
@doc false
def __slots__() do
unquote(Macro.escape(slots))
end
@doc false
def __validate_slot__(prop) do
prop in unquote(slots_names)
end
@doc false
def __get_slot__(name) do
Map.get(unquote(Macro.escape(slots_by_name)), name)
end
@doc false
def __assigned_slots_by_parent__() do
unquote(Macro.escape(assigned_slots_by_parent))
end
@doc false
def __required_slots_names__() do
unquote(Macro.escape(required_slots_names))
end
end
end
defp quoted_context_funcs(env) do
use_context? = Module.get_attribute(env.module, :use_context?)
quote do
@doc false
def __use_context__?() do
unquote(use_context?)
end
end
end
def validate!(func, name, type, opts, caller) do
with :ok <- validate_type(func, name, type),
:ok <- validate_opts_keys(func, name, type, opts),
:ok <- validate_opts(func, type, opts) do
maybe_warn_mutually_exclusive_opts(func, type, opts, caller)
:ok
else
{:error, message} ->
file = Path.relative_to_cwd(caller.file)
IOHelper.compile_error(message, file, caller.line)
end
end
defp validate_name_ast!(_func, {name, meta, context}, _caller)
when is_atom(name) and is_list(meta) and is_atom(context) do
name
end
defp validate_name_ast!(func, name_ast, caller) do
message = """
invalid #{func} name. Expected a variable name, got: #{Macro.to_string(name_ast)}\
"""
IOHelper.compile_error(message, caller.file, caller.line)
end
defp validate_type(_func, _name, type) when type in @types do
:ok
end
defp validate_type(func, name, type) do
message = """
invalid type #{Macro.to_string(type)} for #{func} #{name}.
Expected one of #{inspect(@types)}.
Hint: Use :any if the type is not listed.\
"""
{:error, message}
end
defp validate_opts_keys(func, name, type, opts) do
with true <- Keyword.keyword?(opts),
keys <- Keyword.keys(opts),
valid_opts <- get_valid_opts(func, type, opts),
[] <- keys -- valid_opts do
:ok
else
false ->
{:error,
"invalid options for #{func} #{name}. " <>
"Expected a keyword list of options, got: #{inspect(opts)}"}
unknown_options ->
valid_opts = get_valid_opts(func, type, opts)
{:error, unknown_options_message(valid_opts, unknown_options)}
end
end
defp validate_opts_ast!(func, opts, caller) when is_list(opts) do
if Keyword.keyword?(opts) do
for {key, value} <- opts do
{key, validate_opt_ast!(func, key, value, caller)}
end
else
opts
end
end
defp validate_opts_ast!(_func, opts, _caller) do
opts
end
defp validate_opts(func, type, opts) do
Enum.reduce_while(opts, :ok, fn {key, value}, _acc ->
case validate_opt(func, type, key, value) do
:ok ->
{:cont, :ok}
error ->
{:halt, error}
end
end)
end
defp maybe_warn_mutually_exclusive_opts(:prop, _, opts, caller) do
if Keyword.get(opts, :required, false) and Keyword.has_key?(opts, :default) do
IOHelper.warn(
"setting a default value on a required prop has no effect. Either set the default value or set the prop as required, but not both.",
caller,
fn _ -> caller.line end
)
end
end
defp maybe_warn_mutually_exclusive_opts(_, _, _, _), do: nil
defp get_valid_opts(:prop, _type, _opts) do
[:required, :default, :values, :accumulate]
end
defp get_valid_opts(:data, _type, _opts) do
[:default, :values]
end
defp get_valid_opts(:slot, _type, _opts) do
[:required, :props, :as]
end
defp validate_opt_ast!(:slot, :props, args_ast, caller) do
Enum.map(args_ast, fn
{name, {:^, _, [{generator, _, context}]}} when context in [Elixir, nil] ->
Macro.escape(%{name: name, generator: generator})
name when is_atom(name) ->
Macro.escape(%{name: name, generator: nil})
ast ->
message =
"invalid slot prop #{Macro.to_string(ast)}. " <>
"Expected an atom or a binding to a generator as `key: ^property_name`"
IOHelper.compile_error(message, caller.file, caller.line)
end)
end
defp validate_opt_ast!(_func, _key, value, _caller) do
value
end
defp validate_opt(_func, _type, :required, value) when not is_boolean(value) do
{:error, "invalid value for option :required. Expected a boolean, got: #{inspect(value)}"}
end
defp validate_opt(_func, _type, :values, value) when not is_list(value) do
{:error,
"invalid value for option :values. Expected a list of values, got: #{inspect(value)}"}
end
defp validate_opt(:prop, _type, :accumulate, value) when not is_boolean(value) do
{:error, "invalid value for option :accumulate. Expected a boolean, got: #{inspect(value)}"}
end
defp validate_opt(:slot, _type, :as, value) when not is_atom(value) do
{:error, "invalid value for option :as in slot. Expected an atom, got: #{inspect(value)}"}
end
defp validate_opt(_func, _type, _key, _value) do
:ok
end
defp unknown_options_message(valid_opts, unknown_options) do
{plural, unknown_items} =
case unknown_options do
[option] ->
{"", option}
_ ->
{"s", unknown_options}
end
"""
unknown option#{plural} #{inspect(unknown_items)}. \
Available options: #{inspect(valid_opts)}\
"""
end
defp format_opts(opts_ast) do
opts_ast
|> Macro.to_string()
|> String.slice(1..-2)
end
defp generate_docs(env) do
case Module.get_attribute(env.module, :moduledoc) do
{_line, false} ->
:ok
nil ->
props_doc = generate_props_docs(env.module)
Module.put_attribute(env.module, :moduledoc, {env.line, props_doc})
{line, doc} ->
props_doc = generate_props_docs(env.module)
Module.put_attribute(env.module, :moduledoc, {line, doc <> "\n" <> props_doc})
end
end
defp generate_props_docs(module) do
docs =
for prop <- get_props(module) do
doc = if prop.doc, do: " - #{prop.doc}.", else: ""
opts = if prop.opts == [], do: "", else: ", #{format_opts(prop.opts_ast)}"
"* **#{prop.name}** *#{inspect(prop.type)}#{opts}*#{doc}"
end
|> Enum.reverse()
|> Enum.join("\n")
"""
### Properties
#{docs}
"""
end
defp validate_slot_props_bindings!(env) do
for slot <- env.module.__slots__(),
slot_props = Keyword.get(slot.opts, :props, []),
%{name: name, generator: generator} <- slot_props,
generator != nil do
case env.module.__get_prop__(generator) do
nil ->
existing_properties_names = env.module.__props__() |> Enum.map(& &1.name)
message = """
cannot bind slot prop `#{name}` to property `#{generator}`. \
Expected an existing property after `^`, \
got: an undefined property `#{generator}`.
Hint: Available properties are #{inspect(existing_properties_names)}\
"""
IOHelper.compile_error(message, env.file, slot.line)
%{type: type} when type != :list ->
message = """
cannot bind slot prop `#{name}` to property `#{generator}`. \
Expected a property of type :list after `^`, \
got: a property of type #{inspect(type)}\
"""
IOHelper.compile_error(message, env.file, slot.line)
_ ->
:ok
end
end
:ok
end
defp pop_doc(module) do
doc =
case Module.get_attribute(module, :doc) do
{_, doc} -> doc
_ -> nil
end
Module.delete_attribute(module, :doc)
doc
end
defp build_assign_ast(func, name_ast, type_ast, opts_ast, caller) do
quote bind_quoted: [
func: func,
name: validate_name_ast!(func, name_ast, caller),
type: type_ast,
opts: validate_opts_ast!(func, opts_ast, caller),
opts_ast: Macro.escape(opts_ast),
line: caller.line
] do
Surface.API.put_assign!(__ENV__, func, name, type, opts, opts_ast, line)
end
end
end
| 26.776173 | 140 | 0.621613 |
f7a4204ae2fc604027ee08085f730d1026a79ddb | 141 | exs | Elixir | .formatter.exs | seantanly/elixir-combination | 58756ca10f8f30bd3b3c923e8f893b0e424b4715 | [
"MIT"
] | 23 | 2016-01-16T20:45:39.000Z | 2021-11-26T15:33:00.000Z | .formatter.exs | seantanly/elixir-combination | 58756ca10f8f30bd3b3c923e8f893b0e424b4715 | [
"MIT"
] | null | null | null | .formatter.exs | seantanly/elixir-combination | 58756ca10f8f30bd3b3c923e8f893b0e424b4715 | [
"MIT"
] | 2 | 2016-01-15T10:43:07.000Z | 2021-06-24T22:27:44.000Z | [
inputs: [
"mix.exs",
"{config,lib,test}/**/*.{ex,exs}",
"apps/*/mix.exs",
"apps/*/{config,lib,test}/**/*.{ex,exs}"
]
]
| 15.666667 | 44 | 0.439716 |
f7a42b6f479ee2fd71fb8c13bd0c6bb5e663adc7 | 761 | exs | Elixir | priv/repo/migrations/20201220184905_create_users_auth_tables.exs | AminArria/sponsorly | fa78ead63076a54cb1cb1f9d4f4c5fd7a4a78fac | [
"MIT"
] | null | null | null | priv/repo/migrations/20201220184905_create_users_auth_tables.exs | AminArria/sponsorly | fa78ead63076a54cb1cb1f9d4f4c5fd7a4a78fac | [
"MIT"
] | null | null | null | priv/repo/migrations/20201220184905_create_users_auth_tables.exs | AminArria/sponsorly | fa78ead63076a54cb1cb1f9d4f4c5fd7a4a78fac | [
"MIT"
] | null | null | null | defmodule Sponsorly.Repo.Migrations.CreateUsersAuthTables do
use Ecto.Migration
def change do
execute "CREATE EXTENSION IF NOT EXISTS citext", ""
create table(:users) do
add :email, :citext, null: false
add :hashed_password, :string, null: false
add :confirmed_at, :naive_datetime
timestamps()
end
create unique_index(:users, [:email])
create table(:users_tokens) do
add :user_id, references(:users, on_delete: :delete_all), null: false
add :token, :binary, null: false
add :context, :string, null: false
add :sent_to, :string
timestamps(updated_at: false)
end
create index(:users_tokens, [:user_id])
create unique_index(:users_tokens, [:context, :token])
end
end
| 27.178571 | 75 | 0.674113 |
f7a4737204bec1073ee5f419c216d441ab6e1259 | 644 | exs | Elixir | mix.exs | TheDragonProject/webhooks | 5e0fc3fd53e439321ab65a99f06b31596ddce8ed | [
"MIT"
] | null | null | null | mix.exs | TheDragonProject/webhooks | 5e0fc3fd53e439321ab65a99f06b31596ddce8ed | [
"MIT"
] | null | null | null | mix.exs | TheDragonProject/webhooks | 5e0fc3fd53e439321ab65a99f06b31596ddce8ed | [
"MIT"
] | null | null | null | defmodule Webhooks.MixProject do
use Mix.Project
def project do
[
app: :webhooks,
version: "0.1.0",
elixir: "~> 1.6",
start_permanent: Mix.env() == :prod,
deps: deps()
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:logger],
mod: {Webhooks.Application, []}
]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
{:cowboy, "~> 1.1"},
{:plug, "~> 1.4.5"},
{:poison, "~> 3.1.0"},
{:redix, "~> 0.7.0"},
{:distillery, "~> 1.5.2", runtime: false}
]
end
end
| 19.515152 | 59 | 0.526398 |
f7a47a30ef65ff8b655e252121ff72e68e324158 | 1,223 | ex | Elixir | web/views/error_helpers.ex | selfup/unigen_visualizer | 8c80290e71ff485f815929a0fe7269034345fea4 | [
"MIT"
] | null | null | null | web/views/error_helpers.ex | selfup/unigen_visualizer | 8c80290e71ff485f815929a0fe7269034345fea4 | [
"MIT"
] | null | null | null | web/views/error_helpers.ex | selfup/unigen_visualizer | 8c80290e71ff485f815929a0fe7269034345fea4 | [
"MIT"
] | null | null | null | defmodule UniApi.ErrorHelpers do
@moduledoc """
Conveniences for translating and building error messages.
"""
use Phoenix.HTML
@doc """
Generates tag for inlined form input errors.
"""
def error_tag(form, field) do
if error = form.errors[field] do
content_tag :span, translate_error(error), class: "help-block"
end
end
@doc """
Translates an error message using gettext.
"""
def translate_error({msg, opts}) do
# Because error messages were defined within Ecto, we must
# call the Gettext module passing our Gettext backend. We
# also use the "errors" domain as translations are placed
# in the errors.po file.
# Ecto will pass the :count keyword if the error message is
# meant to be pluralized.
# On your own code and templates, depending on whether you
# need the message to be pluralized or not, this could be
# written simply as:
#
# dngettext "errors", "1 file", "%{count} files", count
# dgettext "errors", "is invalid"
#
if count = opts[:count] do
Gettext.dngettext(UniApi.Gettext, "errors", msg, msg, count, opts)
else
Gettext.dgettext(UniApi.Gettext, "errors", msg, opts)
end
end
end
| 29.829268 | 72 | 0.666394 |
f7a4b8e689612118d0dd3925c7bcfe830851449c | 16,985 | exs | Elixir | lib/elixir/test/elixir/inspect_test.exs | lytedev/elixir | dc25bb8e1484e2328eef819402d268dec7bb908a | [
"Apache-2.0"
] | 1 | 2018-08-08T12:15:48.000Z | 2018-08-08T12:15:48.000Z | lib/elixir/test/elixir/inspect_test.exs | lytedev/elixir | dc25bb8e1484e2328eef819402d268dec7bb908a | [
"Apache-2.0"
] | null | null | null | lib/elixir/test/elixir/inspect_test.exs | lytedev/elixir | dc25bb8e1484e2328eef819402d268dec7bb908a | [
"Apache-2.0"
] | null | null | null | Code.require_file("test_helper.exs", __DIR__)
defmodule Inspect.AtomTest do
use ExUnit.Case, async: true
doctest Inspect
test "basic" do
assert inspect(:foo) == ":foo"
end
test "empty" do
assert inspect(:"") == ":\"\""
end
test "true, false, nil" do
assert inspect(false) == "false"
assert inspect(true) == "true"
assert inspect(nil) == "nil"
end
test "with uppercase letters" do
assert inspect(:fOO) == ":fOO"
assert inspect(:FOO) == ":FOO"
end
test "aliases" do
assert inspect(Foo) == "Foo"
assert inspect(Foo.Bar) == "Foo.Bar"
assert inspect(Elixir) == "Elixir"
assert inspect(Elixir.Foo) == "Foo"
assert inspect(Elixir.Elixir) == "Elixir.Elixir"
assert inspect(Elixir.Elixir.Foo) == "Elixir.Elixir.Foo"
end
test "with integers" do
assert inspect(User1) == "User1"
assert inspect(:user1) == ":user1"
end
test "with trailing ? or !" do
assert inspect(:foo?) == ":foo?"
assert inspect(:bar!) == ":bar!"
assert inspect(:Foo?) == ":Foo?"
end
test "operators" do
assert inspect(:+) == ":+"
assert inspect(:<~) == ":<~"
assert inspect(:~>) == ":~>"
assert inspect(:&&&) == ":&&&"
assert inspect(:~~~) == ":~~~"
assert inspect(:<<~) == ":<<~"
assert inspect(:~>>) == ":~>>"
assert inspect(:<~>) == ":<~>"
assert inspect(:<|>) == ":<|>"
end
test "with @" do
assert inspect(:@) == ":@"
assert inspect(:foo@bar) == ":foo@bar"
assert inspect(:foo@bar@) == ":foo@bar@"
assert inspect(:foo@bar@baz) == ":foo@bar@baz"
end
test "others" do
assert inspect(:...) == ":..."
assert inspect(:<<>>) == ":<<>>"
assert inspect(:{}) == ":{}"
assert inspect(:%{}) == ":%{}"
assert inspect(:%) == ":%"
assert inspect(:->) == ":->"
end
test "escaping" do
assert inspect(:"hy-phen") == ~s(:"hy-phen")
assert inspect(:"@hello") == ~s(:"@hello")
assert inspect(:"Wat!?") == ~s(:"Wat!?")
assert inspect(:"'quotes' and \"double quotes\"") == ~S(:"'quotes' and \"double quotes\"")
end
test "colors" do
opts = [syntax_colors: [atom: :red]]
assert inspect(:hello, opts) == "\e[31m:hello\e[0m"
opts = [syntax_colors: [reset: :cyan]]
assert inspect(:hello, opts) == ":hello"
end
test "unicode" do
assert inspect(:olá) == ":olá"
assert inspect(:Olá) == ":Olá"
assert inspect(:Ólá) == ":Ólá"
assert inspect(:こんにちは世界) == ":こんにちは世界"
nfd = :unicode.characters_to_nfd_binary("olá")
assert inspect(String.to_atom(nfd)) == ":\"#{nfd}\""
end
end
defmodule Inspect.BitStringTest do
use ExUnit.Case, async: true
test "bitstring" do
assert inspect(<<1::12-integer-signed>>) == "<<0, 1::size(4)>>"
assert inspect(<<1, 2, 3, 4, 5>>, pretty: true, width: 10) == "<<1, 2, 3,\n 4, 5>>"
end
test "binary" do
assert inspect("foo") == "\"foo\""
assert inspect(<<?a, ?b, ?c>>) == "\"abc\""
end
test "escaping" do
assert inspect("f\no") == "\"f\\no\""
assert inspect("f\\o") == "\"f\\\\o\""
assert inspect("f\ao") == "\"f\\ao\""
assert inspect("\a\b\d\e\f\n\r\s\t\v") == "\"\\a\\b\\d\\e\\f\\n\\r \\t\\v\""
end
test "UTF-8" do
assert inspect(" ゆんゆん") == "\" ゆんゆん\""
# BOM
assert inspect("\uFEFFhello world") == "\"\\uFEFFhello world\""
end
test "infer" do
assert inspect(<<"john", 193, "doe">>, binaries: :infer) ==
~s(<<106, 111, 104, 110, 193, 100, 111, 101>>)
assert inspect(<<"john">>, binaries: :infer) == ~s("john")
assert inspect(<<193>>, binaries: :infer) == ~s(<<193>>)
end
test "as strings" do
assert inspect(<<"john", 193, "doe">>, binaries: :as_strings) == ~s("john\\xC1doe")
assert inspect(<<"john">>, binaries: :as_strings) == ~s("john")
assert inspect(<<193>>, binaries: :as_strings) == ~s("\\xC1")
end
test "as binaries" do
assert inspect(<<"john", 193, "doe">>, binaries: :as_binaries) ==
"<<106, 111, 104, 110, 193, 100, 111, 101>>"
assert inspect(<<"john">>, binaries: :as_binaries) == "<<106, 111, 104, 110>>"
assert inspect(<<193>>, binaries: :as_binaries) == "<<193>>"
# Any base other than :decimal implies "binaries: :as_binaries"
assert inspect("abc", base: :hex) == "<<0x61, 0x62, 0x63>>"
assert inspect("abc", base: :octal) == "<<0o141, 0o142, 0o143>>"
# Size is still represented as decimal
assert inspect(<<10, 11, 12::4>>, base: :hex) == "<<0xA, 0xB, 0xC::size(4)>>"
end
test "unprintable with limit" do
assert inspect(<<193, 193, 193, 193>>, limit: 3) == "<<193, 193, 193, ...>>"
end
test "printable limit" do
assert inspect("hello world", printable_limit: 4) == ~s("hell" <> ...)
# Non-printable characters after the limit don't matter
assert inspect("hello world" <> <<0>>, printable_limit: 4) == ~s("hell" <> ...)
# Non printable strings aren't affected by printable limit
assert inspect(<<0, 1, 2, 3, 4>>, printable_limit: 3) == ~s(<<0, 1, 2, 3, 4>>)
end
end
defmodule Inspect.NumberTest do
use ExUnit.Case, async: true
test "integer" do
assert inspect(100) == "100"
end
test "decimal" do
assert inspect(100, base: :decimal) == "100"
end
test "hex" do
assert inspect(100, base: :hex) == "0x64"
assert inspect(-100, base: :hex) == "-0x64"
end
test "octal" do
assert inspect(100, base: :octal) == "0o144"
assert inspect(-100, base: :octal) == "-0o144"
end
test "binary" do
assert inspect(86, base: :binary) == "0b1010110"
assert inspect(-86, base: :binary) == "-0b1010110"
end
test "float" do
assert inspect(1.0) == "1.0"
assert inspect(1.0e10) == "1.0e10"
assert inspect(1.0e10) == "1.0e10"
assert inspect(1.0e-10) == "1.0e-10"
end
test "integer colors" do
opts = [syntax_colors: [number: :red]]
assert inspect(123, opts) == "\e[31m123\e[0m"
opts = [syntax_colors: [reset: :cyan]]
assert inspect(123, opts) == "123"
end
test "float colors" do
opts = [syntax_colors: [number: :red]]
assert inspect(1.3, opts) == "\e[31m1.3\e[0m"
opts = [syntax_colors: [reset: :cyan]]
assert inspect(1.3, opts) == "1.3"
end
end
defmodule Inspect.TupleTest do
use ExUnit.Case
test "basic" do
assert inspect({1, "b", 3}) == "{1, \"b\", 3}"
assert inspect({1, "b", 3}, pretty: true, width: 1) == "{1,\n \"b\",\n 3}"
assert inspect({1, "b", 3}, pretty: true, width: 10) == "{1, \"b\",\n 3}"
end
test "empty" do
assert inspect({}) == "{}"
end
test "with limit" do
assert inspect({1, 2, 3, 4}, limit: 3) == "{1, 2, 3, ...}"
end
test "colors" do
opts = [syntax_colors: []]
assert inspect({}, opts) == "{}"
opts = [syntax_colors: [reset: :cyan]]
assert inspect({}, opts) == "{}"
assert inspect({:x, :y}, opts) == "{:x, :y}"
opts = [syntax_colors: [reset: :cyan, atom: :red]]
assert inspect({}, opts) == "{}"
assert inspect({:x, :y}, opts) == "{\e[31m:x\e[36m, \e[31m:y\e[36m}"
opts = [syntax_colors: [tuple: :green, reset: :cyan, atom: :red]]
assert inspect({}, opts) == "\e[32m{\e[36m\e[32m}\e[36m"
assert inspect({:x, :y}, opts) ==
"\e[32m{\e[36m\e[31m:x\e[36m\e[32m,\e[36m \e[31m:y\e[36m\e[32m}\e[36m"
end
end
defmodule Inspect.ListTest do
use ExUnit.Case, async: true
test "basic" do
assert inspect([1, "b", 3]) == "[1, \"b\", 3]"
assert inspect([1, "b", 3], pretty: true, width: 1) == "[1,\n \"b\",\n 3]"
end
test "printable" do
assert inspect('abc') == "'abc'"
end
test "printable limit" do
assert inspect('hello world', printable_limit: 4) == ~s('hell' ++ ...)
# Non printable characters after the limit don't matter
assert inspect('hello world' ++ [0], printable_limit: 4) == ~s('hell' ++ ...)
# Non printable strings aren't affected by printable limit
assert inspect([0, 1, 2, 3, 4], printable_limit: 3) == ~s([0, 1, 2, 3, 4])
end
test "keyword" do
assert inspect(a: 1) == "[a: 1]"
assert inspect(a: 1, b: 2) == "[a: 1, b: 2]"
assert inspect(a: 1, a: 2, b: 2) == "[a: 1, a: 2, b: 2]"
assert inspect("123": 1) == ~s(["123": 1])
assert inspect([foo: [1, 2, 3], baz: [4, 5, 6]], pretty: true, width: 20) ==
"[\n foo: [1, 2, 3],\n baz: [4, 5, 6]\n]"
end
test "opt infer" do
assert inspect('john' ++ [0] ++ 'doe', charlists: :infer) ==
"[106, 111, 104, 110, 0, 100, 111, 101]"
assert inspect('john', charlists: :infer) == "'john'"
assert inspect([0], charlists: :infer) == "[0]"
end
test "opt as strings" do
assert inspect('john' ++ [0] ++ 'doe', charlists: :as_charlists) == "'john\\0doe'"
assert inspect('john', charlists: :as_charlists) == "'john'"
assert inspect([0], charlists: :as_charlists) == "'\\0'"
end
test "opt as lists" do
assert inspect('john' ++ [0] ++ 'doe', charlists: :as_lists) ==
"[106, 111, 104, 110, 0, 100, 111, 101]"
assert inspect('john', charlists: :as_lists) == "[106, 111, 104, 110]"
assert inspect([0], charlists: :as_lists) == "[0]"
end
test "non printable" do
assert inspect([{:b, 1}, {:a, 1}]) == "[b: 1, a: 1]"
end
test "improper" do
assert inspect([:foo | :bar]) == "[:foo | :bar]"
assert inspect([1, 2, 3, 4, 5 | 42], pretty: true, width: 1) ==
"[1,\n 2,\n 3,\n 4,\n 5 |\n 42]"
end
test "nested" do
assert inspect(Enum.reduce(1..100, [0], &[&2, Integer.to_string(&1)]), limit: 5) ==
"[[[[[[...], ...], \"97\"], \"98\"], \"99\"], \"100\"]"
assert inspect(Enum.reduce(1..100, [0], &[&2 | Integer.to_string(&1)]), limit: 5) ==
"[[[[[[...] | \"96\"] | \"97\"] | \"98\"] | \"99\"] | \"100\"]"
end
test "codepoints" do
assert inspect('é') == "[233]"
end
test "empty" do
assert inspect([]) == "[]"
end
test "with limit" do
assert inspect([1, 2, 3, 4], limit: 3) == "[1, 2, 3, ...]"
end
test "colors" do
opts = [syntax_colors: []]
assert inspect([], opts) == "[]"
opts = [syntax_colors: [reset: :cyan]]
assert inspect([], opts) == "[]"
assert inspect([:x, :y], opts) == "[:x, :y]"
opts = [syntax_colors: [reset: :cyan, atom: :red]]
assert inspect([], opts) == "[]"
assert inspect([:x, :y], opts) == "[\e[31m:x\e[36m, \e[31m:y\e[36m]"
opts = [syntax_colors: [reset: :cyan, atom: :red, list: :green]]
assert inspect([], opts) == "\e[32m[]\e[36m"
assert inspect([:x, :y], opts) ==
"\e[32m[\e[36m\e[31m:x\e[36m\e[32m,\e[36m \e[31m:y\e[36m\e[32m]\e[36m"
end
test "keyword with colors" do
opts = [syntax_colors: [reset: :cyan, list: :green, number: :blue]]
assert inspect([], opts) == "\e[32m[]\e[36m"
assert inspect([a: 9999], opts) == "\e[32m[\e[36ma: \e[34m9999\e[36m\e[32m]\e[36m"
opts = [syntax_colors: [reset: :cyan, atom: :red, list: :green, number: :blue]]
assert inspect([], opts) == "\e[32m[]\e[36m"
assert inspect([a: 9999], opts) == "\e[32m[\e[36m\e[31ma:\e[36m \e[34m9999\e[36m\e[32m]\e[36m"
end
test "limit with colors" do
opts = [limit: 1, syntax_colors: [reset: :cyan, list: :green, atom: :red]]
assert inspect([], opts) == "\e[32m[]\e[36m"
assert inspect([:x, :y], opts) == "\e[32m[\e[36m\e[31m:x\e[36m\e[32m,\e[36m ...\e[32m]\e[36m"
end
end
defmodule Inspect.MapTest do
use ExUnit.Case
test "basic" do
assert inspect(%{1 => "b"}) == "%{1 => \"b\"}"
assert inspect(%{1 => "b", 2 => "c"}, pretty: true, width: 1) ==
"%{\n 1 => \"b\",\n 2 => \"c\"\n}"
end
test "keyword" do
assert inspect(%{a: 1}) == "%{a: 1}"
assert inspect(%{a: 1, b: 2}) == "%{a: 1, b: 2}"
assert inspect(%{a: 1, b: 2, c: 3}) == "%{a: 1, b: 2, c: 3}"
end
test "with limit" do
assert inspect(%{1 => 1, 2 => 2, 3 => 3, 4 => 4}, limit: 3) ==
"%{1 => 1, 2 => 2, 3 => 3, ...}"
end
defmodule Public do
defstruct key: 0
end
defmodule Private do
end
test "public struct" do
assert inspect(%Public{key: 1}) == "%Inspect.MapTest.Public{key: 1}"
end
test "public modified struct" do
public = %Public{key: 1}
assert inspect(Map.put(public, :foo, :bar)) ==
"%{__struct__: Inspect.MapTest.Public, foo: :bar, key: 1}"
end
test "private struct" do
assert inspect(%{__struct__: Private, key: 1}) ==
"%{__struct__: Inspect.MapTest.Private, key: 1}"
end
defmodule Failing do
defstruct key: 0
defimpl Inspect do
def inspect(struct, _) do
struct.unknown
end
end
end
test "bad implementation unsafe" do
msg =
"got KeyError with message \"key :unknown not found in: " <>
"%{__struct__: Inspect.MapTest.Failing, key: 0}\" while " <>
"inspecting %{__struct__: Inspect.MapTest.Failing, key: 0}"
try do
inspect(%Failing{}, safe: false)
rescue
e in Inspect.Error ->
assert Exception.message(e) =~ msg
assert [{Inspect.Inspect.MapTest.Failing, :inspect, 2, _} | _] = __STACKTRACE__
else
_ -> flunk("expected failure")
end
end
test "bad implementation safe" do
msg =
"got KeyError with message \"key :unknown not found in: " <>
"%{__struct__: Inspect.MapTest.Failing, key: 0}\" while " <>
"inspecting %{__struct__: Inspect.MapTest.Failing, key: 0}"
assert inspect(%Failing{}) == inspect(%Inspect.Error{message: "#{msg}"})
end
test "bad implementation safe disables colors" do
msg =
"got KeyError with message \\\"key :unknown not found in: " <>
"%{__struct__: Inspect.MapTest.Failing, key: 0}\\\" while " <>
"inspecting %{__struct__: Inspect.MapTest.Failing, key: 0}"
assert inspect(%Failing{}, syntax_colors: [atom: [:green]]) =~ msg
end
test "exception" do
assert inspect(%RuntimeError{message: "runtime error"}) ==
"%RuntimeError{message: \"runtime error\"}"
end
test "colors" do
opts = [syntax_colors: [reset: :cyan, atom: :red, number: :magenta]]
assert inspect(%{1 => 2}, opts) == "%{\e[35m1\e[36m => \e[35m2\e[36m}"
assert inspect(%{a: 1}, opts) == "%{\e[31ma:\e[36m \e[35m1\e[36m}"
assert inspect(%Public{key: 1}, opts) ==
"%Inspect.MapTest.Public{\e[31mkey:\e[36m \e[35m1\e[36m}"
opts = [syntax_colors: [reset: :cyan, atom: :red, map: :green, number: :blue]]
assert inspect(%{a: 9999}, opts) ==
"\e[32m%{\e[36m" <> "\e[31ma:\e[36m " <> "\e[34m9999\e[36m" <> "\e[32m}\e[36m"
end
end
defmodule Inspect.OthersTest do
use ExUnit.Case, async: true
def fun() do
fn -> :ok end
end
def unquote(:"weirdly named/fun-")() do
fn -> :ok end
end
test "external Elixir funs" do
bin = inspect(&Enum.map/2)
assert bin == "&Enum.map/2"
assert inspect(&__MODULE__."weirdly named/fun-"/0) ==
~s(&Inspect.OthersTest."weirdly named/fun-"/0)
end
test "external Erlang funs" do
bin = inspect(&:lists.map/2)
assert bin == "&:lists.map/2"
end
test "outdated functions" do
defmodule V do
def fun do
fn -> 1 end
end
end
Application.put_env(:elixir, :anony, V.fun())
Application.put_env(:elixir, :named, &V.fun/0)
:code.delete(V)
:code.purge(V)
anony = Application.get_env(:elixir, :anony)
named = Application.get_env(:elixir, :named)
assert inspect(anony) =~ ~r"#Function<0.\d+/0 in Inspect.OthersTest.V>"
assert inspect(named) =~ ~r"&Inspect.OthersTest.V.fun/0"
after
Application.delete_env(:elixir, :anony)
Application.delete_env(:elixir, :named)
end
test "other funs" do
assert "#Function<" <> _ = inspect(fn x -> x + 1 end)
assert "#Function<" <> _ = inspect(fun())
opts = [syntax_colors: []]
assert "#Function<" <> _ = inspect(fun(), opts)
opts = [syntax_colors: [reset: :red]]
assert "#Function<" <> rest = inspect(fun(), opts)
assert String.ends_with?(rest, ">")
inspected = inspect(__MODULE__."weirdly named/fun-"())
assert inspected =~ ~r(#Function<\d+\.\d+/0 in Inspect\.OthersTest\."weirdly named/fun-"/0>)
end
test "map set" do
assert "#MapSet<" <> _ = inspect(MapSet.new())
end
test "PIDs" do
assert "#PID<" <> _ = inspect(self())
opts = [syntax_colors: []]
assert "#PID<" <> _ = inspect(self(), opts)
opts = [syntax_colors: [reset: :cyan]]
assert "#PID<" <> rest = inspect(self(), opts)
assert String.ends_with?(rest, ">")
end
test "references" do
assert "#Reference<" <> _ = inspect(make_ref())
end
test "regex" do
assert inspect(~r(foo)m) == "~r/foo/m"
assert inspect(Regex.compile!("\a\b\d\e\f\n\r\s\t\v/")) ==
"~r/\\a\\x08\\x7F\\x1B\\f\\n\\r \\t\\v\\//"
assert inspect(~r<\a\b\d\e\f\n\r\s\t\v/>) == "~r/\\a\\b\\d\\e\\f\\n\\r\\s\\t\\v\\//"
assert inspect(~r" \\/ ") == "~r/ \\\\\\/ /"
assert inspect(~r/hi/, syntax_colors: [regex: :red]) == "\e[31m~r/hi/\e[0m"
end
end
| 29.487847 | 98 | 0.554784 |
f7a4e34cb5b6794a05e64e5c1265bc16666332fe | 37 | ex | Elixir | apps/panacea_beacon/lib/panacea_beacon/util/beacon.ex | timjp87/panacea | 5edddfa12a8f18b040248b9b186479b9ec8aed51 | [
"MIT"
] | null | null | null | apps/panacea_beacon/lib/panacea_beacon/util/beacon.ex | timjp87/panacea | 5edddfa12a8f18b040248b9b186479b9ec8aed51 | [
"MIT"
] | null | null | null | apps/panacea_beacon/lib/panacea_beacon/util/beacon.ex | timjp87/panacea | 5edddfa12a8f18b040248b9b186479b9ec8aed51 | [
"MIT"
] | null | null | null | defmodule Beacon.Util.Beacon do
end
| 9.25 | 31 | 0.810811 |
f7a4f2a953369ae028cdecdd1ab5e2413d580eb6 | 3,049 | ex | Elixir | lib/unicode/script.ex | elixir-cldr/unicode | 43f7b53a0df4e178098f8e507f596387a37d1e13 | [
"Apache-2.0"
] | 21 | 2019-11-22T02:29:31.000Z | 2021-12-14T01:30:36.000Z | lib/unicode/script.ex | kipcole9/cldr_unicode | 43f7b53a0df4e178098f8e507f596387a37d1e13 | [
"Apache-2.0"
] | 3 | 2020-10-10T22:02:03.000Z | 2021-05-24T18:15:46.000Z | lib/unicode/script.ex | kipcole9/cldr_unicode | 43f7b53a0df4e178098f8e507f596387a37d1e13 | [
"Apache-2.0"
] | 2 | 2020-10-10T14:50:04.000Z | 2021-05-24T16:35:52.000Z | defmodule Unicode.Script do
@moduledoc """
Functions to introspect Unicode
scripts for binaries
(Strings) and codepoints.
"""
@behaviour Unicode.Property.Behaviour
alias Unicode.Utils
@scripts Utils.scripts()
|> Utils.remove_annotations()
@doc """
Returns the map of Unicode
scripts.
The script name is the map
key and a list of codepoint
ranges as tuples as the value.
"""
def scripts do
@scripts
end
@doc """
Returns a list of known Unicode
script names.
This function does not return the
names of any script aliases.
"""
@known_scripts Map.keys(@scripts)
def known_scripts do
@known_scripts
end
@script_alias Utils.property_value_alias()
|> Map.get("sc")
|> Utils.invert_map()
|> Utils.atomize_values()
|> Utils.downcase_keys_and_remove_whitespace()
|> Utils.add_canonical_alias()
@doc """
Returns a map of aliases for
Unicode scripts.
An alias is an alternative name
for referring to a script. Aliases
are resolved by the `fetch/1` and
`get/1` functions.
"""
@impl Unicode.Property.Behaviour
def aliases do
@script_alias
end
@doc """
Returns the Unicode ranges for
a given script as a list of
ranges as 2-tuples.
Aliases are resolved by this function.
Returns either `{:ok, range_list}` or
`:error`.
"""
@impl Unicode.Property.Behaviour
def fetch(script) when is_atom(script) do
Map.fetch(scripts(), script)
end
def fetch(script) do
script = Utils.downcase_and_remove_whitespace(script)
script = Map.get(aliases(), script, script)
Map.fetch(scripts(), script)
end
@doc """
Returns the Unicode ranges for
a given script as a list of
ranges as 2-tuples.
Aliases are resolved by this function.
Returns either `range_list` or
`nil`.
"""
@impl Unicode.Property.Behaviour
def get(script) do
case fetch(script) do
{:ok, script} -> script
_ -> nil
end
end
@doc """
Returns the count of the number of characters
for a given script.
## Example
iex> Unicode.Script.count("mongolian")
168
"""
@impl Unicode.Property.Behaviour
def count(script) do
with {:ok, script} <- fetch(script) do
Enum.reduce(script, 0, fn {from, to}, acc -> acc + to - from + 1 end)
end
end
@doc """
Returns the script name(s) for the
given binary or codepoint.
In the case of a codepoint, a single
script name is returned.
For a binary a list of distinct script
names represented by the graphemes in
the binary is returned.
"""
def script(string) when is_binary(string) do
string
|> String.to_charlist()
|> Enum.map(&script/1)
|> Enum.uniq()
end
for {script, ranges} <- @scripts do
def script(codepoint) when unquote(Utils.ranges_to_guard_clause(ranges)) do
unquote(script)
end
end
def script(codepoint) when is_integer(codepoint) and codepoint in 0..0x10FFFF do
:unknown
end
end
| 20.059211 | 82 | 0.655297 |