hexsha
stringlengths 40
40
| size
int64 2
991k
| ext
stringclasses 2
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
208
| max_stars_repo_name
stringlengths 6
106
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
sequence | max_stars_count
int64 1
33.5k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
208
| max_issues_repo_name
stringlengths 6
106
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
sequence | max_issues_count
int64 1
16.3k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
208
| max_forks_repo_name
stringlengths 6
106
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
sequence | max_forks_count
int64 1
6.91k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 2
991k
| avg_line_length
float64 1
36k
| max_line_length
int64 1
977k
| alphanum_fraction
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
73d39580917b056c7d20a2242b10b0c6ac0fb5ec | 1,634 | exs | Elixir | bench/dateformat_bench.exs | chungwong/timex | bcd2504119f5c11ada7455d19726b5a49254dabf | [
"MIT"
] | 1,623 | 2015-01-03T16:53:19.000Z | 2022-03-27T01:25:50.000Z | bench/dateformat_bench.exs | chungwong/timex | bcd2504119f5c11ada7455d19726b5a49254dabf | [
"MIT"
] | 654 | 2015-01-04T23:59:47.000Z | 2022-03-08T01:02:01.000Z | bench/dateformat_bench.exs | chungwong/timex | bcd2504119f5c11ada7455d19726b5a49254dabf | [
"MIT"
] | 428 | 2015-01-04T19:37:37.000Z | 2022-03-31T10:48:44.000Z | defmodule Timex.Timex.Bench do
use Benchfella
use Timex
alias Timex.Parse.DateTime.Tokenizers.Strftime
alias Timex.Parse.DateTime.Tokenizers.Default
@datetime "2014-07-22T12:30:05Z"
@datetime_zoned "2014-07-22T12:30:05+02:00"
setup_all do
Application.ensure_all_started(:tzdata)
{:ok, nil}
end
bench "(default) parse ISO 8601 datetime" do
datetime = Timex.parse(@datetime, "{ISO:Extended}")
datetime_zoned = Timex.parse(@datetime_zoned, "{ISO:Extended}")
{:ok, _} = datetime
{:ok, _} = datetime_zoned
end
bench "(strftime) parse ISO 8601 datetime" do
datetime = Timex.parse(@datetime, "%FT%TZ", :strftime)
datetime_zoned = Timex.parse(@datetime_zoned, "%FT%T%:z", :strftime)
{:ok, _} = datetime
{:ok, _} = datetime_zoned
end
bench "(default) format ISO 8601 datetime" do
date = Timex.epoch
{:ok, _} = Timex.format(date, "{ISO:Extended:Z}")
{:ok, _} = Timex.format(date, "{ISO:Extended}")
end
bench "(strftime) format ISO 8601 datetime" do
date = Timex.epoch
{:ok, _} = Timex.format(date, "%FT%TZ", :strftime)
{:ok, _} = Timex.format(date, "%FT%Tz", :strftime)
end
bench "(strftime) tokenize ISO 8601" do
{:ok, _} = Strftime.tokenize("%FT%TZ")
{:ok, _} = Strftime.tokenize("%FT%T%z")
end
bench "(default) tokenize ISO 8601" do
{:ok, _} = Default.tokenize("{YYYY}-{M}-{D}T{h24}:{m}:{s}Z")
{:ok, _} = Default.tokenize("{YYYY}-{M}-{D}T{h24}:{m}:{s}{Z}")
end
bench "Timex.local" do
_ = Timex.local
:ok
end
end
| 29.178571 | 74 | 0.597919 |
73d3a5b0c367ff1245064b6cf06fdd03dbfeca27 | 828 | ex | Elixir | lib/mix_tasks_phoenix_gen_scaffold.ex | itsgreggreg/phoenix_generator | ed39acbaf42900706968fff2a28978605af8fc78 | [
"MIT"
] | 32 | 2015-01-18T08:41:38.000Z | 2016-12-07T07:11:36.000Z | lib/mix_tasks_phoenix_gen_scaffold.ex | itsgreggreg/phoenix_generator | ed39acbaf42900706968fff2a28978605af8fc78 | [
"MIT"
] | 5 | 2015-01-31T20:18:43.000Z | 2016-05-03T15:16:30.000Z | lib/mix_tasks_phoenix_gen_scaffold.ex | itsgreggreg/phoenix_generator | ed39acbaf42900706968fff2a28978605af8fc78 | [
"MIT"
] | 5 | 2015-01-28T06:32:48.000Z | 2016-11-26T18:16:00.000Z | defmodule Mix.Tasks.Phoenix.Gen.Scaffold do
use Mix.Task
import Phoenix.Gen.Utils
@shortdoc "Generate a Controller/Model/View/Template scaffold"
@moduledoc """
Generates a Controller/Model/View/Template scaffold
mix phoenix.gen.scaffold resource_name field_name:field_type
## Command line options
* `--repo=RepoName` - the repo to generate a migration for (defaults to `YourApp.Repo`)
## Examples
mix phoenix.gen.scaffold post title:string body:string --repo:MyApp.Repo
"""
def run(opts) do
{switches, [resource_name | _fields], _files} = OptionParser.parse opts
repo = case switches[:repo] do
nil -> []
r -> ["--repo", r]
end
Mix.Tasks.Phoenix.Gen.Controller.run [resource_name, "--crud"]++repo
Mix.Tasks.Phoenix.Gen.Ectomodel.run opts
end
end
| 27.6 | 93 | 0.687198 |
73d3ff229485c042d06dc9f0e8f48cdd3817b8ce | 594 | ex | Elixir | apps/man_api/test/support/fixtures_factory.ex | edenlabllc/man.api.public | 010016c5ecc209413a56ee1f8e9e6fa31da8de18 | [
"MIT"
] | null | null | null | apps/man_api/test/support/fixtures_factory.ex | edenlabllc/man.api.public | 010016c5ecc209413a56ee1f8e9e6fa31da8de18 | [
"MIT"
] | null | null | null | apps/man_api/test/support/fixtures_factory.ex | edenlabllc/man.api.public | 010016c5ecc209413a56ee1f8e9e6fa31da8de18 | [
"MIT"
] | null | null | null | defmodule Man.FixturesFactory do
@moduledoc """
This module provides simple factory to generate fixtures in tests.
"""
alias Man.Templates.API
def create(:template, attrs \\ %{}) do
{:ok, template} =
:template
|> build(attrs)
|> API.create_template()
template
end
def build(fixture, attrs \\ %{})
def build(fixture, attrs) when is_list(attrs), do: build(fixture, Enum.into(attrs, %{}))
def build(:template, attrs) do
%{
body: "some body",
validation_schema: %{},
title: "some title"
}
|> Map.merge(attrs)
end
end
| 20.482759 | 90 | 0.611111 |
73d42765099d6da615cf918deb384ffc7b0494c1 | 2,375 | ex | Elixir | lib/worker/branch_deleter.ex | UnofficialJuliaMirror/bors-ng-bors-ng | fcec51b5f6e5a691d3e310bd35ba7b74e55b667f | [
"Apache-2.0"
] | null | null | null | lib/worker/branch_deleter.ex | UnofficialJuliaMirror/bors-ng-bors-ng | fcec51b5f6e5a691d3e310bd35ba7b74e55b667f | [
"Apache-2.0"
] | 1 | 2020-03-07T08:28:14.000Z | 2020-03-07T08:28:14.000Z | lib/worker/branch_deleter.ex | UnofficialJuliaMirror/bors-ng-bors-ng | fcec51b5f6e5a691d3e310bd35ba7b74e55b667f | [
"Apache-2.0"
] | null | null | null | defmodule BorsNG.Worker.BranchDeleter do
@moduledoc """
BranchDeleter controls merged branches removal
if delete_merged_branches is true in bors.toml file.
By default we wait for pull request "closed" event with pr merged flag set.
Also since event arrival is not guaranteed we poll every 5 minutes and
if pr is merged we delete pr head branch.
"""
use GenServer
alias BorsNG.Worker.Batcher
alias BorsNG.Database.Patch
alias BorsNG.Database.Project
alias BorsNG.Database.Repo
alias BorsNG.GitHub
# 1 minute between tries
@retry_delay 60 * 1000
# keep trying for one hour
@retries 60
# Public API
def start_link do
GenServer.start_link(__MODULE__, :ok, name: __MODULE__)
end
def delete(%Patch{} = patch) do
GenServer.cast(__MODULE__, {:delete, patch, 0})
end
# Server callbacks
def init(:ok) do
{:ok, :ok}
end
def handle_cast({:delete, patch, attempt}, state) do
patch = Repo.preload(patch, :project)
conn = Project.installation_connection(patch.project.repo_xref, Repo)
case GitHub.get_pr(conn, patch.pr_xref) do
{:ok, %{merged: true} = pr} ->
delete_branch(conn, pr)
{:ok, %{state: :closed} = pr} ->
delete_branch(conn, pr)
{:ok, %{state: :open}} when attempt < @retries ->
Process.send_after(self(),
{:retry_delete, patch, attempt + 1},
attempt_delay(attempt))
_ -> nil
end
{:noreply, state}
end
def handle_info({:retry_delete, patch, attempt}) do
GenServer.cast(__MODULE__, {:delete, patch, attempt})
end
defp delete_branch(conn, pr) do
pr_in_same_repo = pr.head_repo_id > 0 &&
pr.head_repo_id == pr.base_repo_id
toml_result = Batcher.GetBorsToml.get(conn, pr.head_ref)
delete_merged_branches = case toml_result do
{:ok, toml} -> toml.delete_merged_branches
_ -> false
end
pr_closed = pr.state == :closed
pr_squash_merged = String.starts_with?(pr.title, "[Merged by Bors] - ")
if pr_in_same_repo && delete_merged_branches do
cond do
pr.merged ->
GitHub.delete_branch!(conn, pr.head_ref)
pr_closed && pr_squash_merged ->
GitHub.delete_branch!(conn, pr.head_ref)
true ->
nil
end
end
end
defp attempt_delay(attempt) do
@retry_delay * attempt
end
end
| 26.098901 | 77 | 0.656 |
73d429216c68b37c5bf7cb1ff096f0a1cd23d5bc | 621 | ex | Elixir | hangman/lib/hangman/server.ex | CraigTreptow/elixir-for-programmers | b2a96f12b1dd8621d337e7d15c504f12b079cd3e | [
"MIT"
] | 1 | 2018-10-17T15:13:34.000Z | 2018-10-17T15:13:34.000Z | hangman/lib/hangman/server.ex | CraigTreptow/elixir-for-programmers | b2a96f12b1dd8621d337e7d15c504f12b079cd3e | [
"MIT"
] | null | null | null | hangman/lib/hangman/server.ex | CraigTreptow/elixir-for-programmers | b2a96f12b1dd8621d337e7d15c504f12b079cd3e | [
"MIT"
] | null | null | null | defmodule Hangman.Server do
@moduledoc """
This is documentation for the module
"""
alias Hangman.Game
use GenServer
def start_link( ) do
GenServer.start_link( __MODULE__, nil )
end
def init( _ ) do
# the state of our server is defined by the result of this call
{ :ok, Game.new_game( ) }
end
# GenServer.call( pid, { :make_move, "a" } )
def handle_call( { :make_move, guess }, _from, game ) do
{ game, tally } = Game.make_move( game, guess )
{ :reply, tally, game }
end
def handle_call( { :tally }, _from, game ) do
{ :reply, Game.tally( game ), game }
end
end
| 22.178571 | 67 | 0.626409 |
73d44223f11899636ab5d9d1497d4bcae109957a | 3,419 | exs | Elixir | golden/test/golden_web/live/user_live_test.exs | NduatiK/marker | a0af6f083feba029ae02221b0ab44cfe865c43bb | [
"Apache-2.0"
] | null | null | null | golden/test/golden_web/live/user_live_test.exs | NduatiK/marker | a0af6f083feba029ae02221b0ab44cfe865c43bb | [
"Apache-2.0"
] | null | null | null | golden/test/golden_web/live/user_live_test.exs | NduatiK/marker | a0af6f083feba029ae02221b0ab44cfe865c43bb | [
"Apache-2.0"
] | null | null | null | defmodule GoldenWeb.UserLiveTest do
use GoldenWeb.ConnCase
import Phoenix.LiveViewTest
import Golden.AccountsFixtures
@create_attrs %{age: 42, name: "some name"}
@update_attrs %{age: 43, name: "some updated name"}
@invalid_attrs %{age: nil, name: nil}
defp create_user(_) do
user = user_fixture()
%{user: user}
end
describe "Index" do
setup [:create_user]
test "lists all users", %{conn: conn, user: user} do
{:ok, _index_live, html} = live(conn, Routes.user_index_path(conn, :index))
assert html =~ "Listing Users"
assert html =~ user.name
end
test "saves new user", %{conn: conn} do
{:ok, index_live, _html} = live(conn, Routes.user_index_path(conn, :index))
assert index_live |> element("a", "New User") |> render_click() =~
"New User"
assert_patch(index_live, Routes.user_index_path(conn, :new))
assert index_live
|> form("#user-form", user: @invalid_attrs)
|> render_change() =~ "can't be blank"
{:ok, _, html} =
index_live
|> form("#user-form", user: @create_attrs)
|> render_submit()
|> follow_redirect(conn, Routes.user_index_path(conn, :index))
assert html =~ "User created successfully"
assert html =~ "some name"
end
test "updates user in listing", %{conn: conn, user: user} do
{:ok, index_live, _html} = live(conn, Routes.user_index_path(conn, :index))
assert index_live |> element("#user-#{user.id} a", "Edit") |> render_click() =~
"Edit User"
assert_patch(index_live, Routes.user_index_path(conn, :edit, user))
assert index_live
|> form("#user-form", user: @invalid_attrs)
|> render_change() =~ "can't be blank"
{:ok, _, html} =
index_live
|> form("#user-form", user: @update_attrs)
|> render_submit()
|> follow_redirect(conn, Routes.user_index_path(conn, :index))
assert html =~ "User updated successfully"
assert html =~ "some updated name"
end
test "deletes user in listing", %{conn: conn, user: user} do
{:ok, index_live, _html} = live(conn, Routes.user_index_path(conn, :index))
assert index_live |> element("#user-#{user.id} a", "Delete") |> render_click()
refute has_element?(index_live, "#user-#{user.id}")
end
end
describe "Show" do
setup [:create_user]
test "displays user", %{conn: conn, user: user} do
{:ok, _show_live, html} = live(conn, Routes.user_show_path(conn, :show, user))
assert html =~ "Show User"
assert html =~ user.name
end
test "updates user within modal", %{conn: conn, user: user} do
{:ok, show_live, _html} = live(conn, Routes.user_show_path(conn, :show, user))
assert show_live |> element("a", "Edit") |> render_click() =~
"Edit User"
assert_patch(show_live, Routes.user_show_path(conn, :edit, user))
assert show_live
|> form("#user-form", user: @invalid_attrs)
|> render_change() =~ "can't be blank"
{:ok, _, html} =
show_live
|> form("#user-form", user: @update_attrs)
|> render_submit()
|> follow_redirect(conn, Routes.user_show_path(conn, :show, user))
assert html =~ "User updated successfully"
assert html =~ "some updated name"
end
end
end
| 30.801802 | 85 | 0.603393 |
73d4517b8b0a1863bd20938e3011b71690082ba2 | 68,363 | exs | Elixir | test/mint/http2/conn_test.exs | sthagen/mint | 7aaf0377343f63dca87d8007fd404253a44598b5 | [
"Apache-2.0"
] | 1 | 2021-10-02T12:56:15.000Z | 2021-10-02T12:56:15.000Z | test/mint/http2/conn_test.exs | sthagen/mint | 7aaf0377343f63dca87d8007fd404253a44598b5 | [
"Apache-2.0"
] | null | null | null | test/mint/http2/conn_test.exs | sthagen/mint | 7aaf0377343f63dca87d8007fd404253a44598b5 | [
"Apache-2.0"
] | null | null | null | defmodule Mint.HTTP2Test do
use ExUnit.Case, async: true
import Mint.HTTP2.Frame
import ExUnit.CaptureLog
alias Mint.{
HTTPError,
HTTP2,
HTTP2.TestServer,
TransportError
}
require Mint.HTTP
@moduletag :capture_log
setup :start_connection
defmacrop assert_recv_frames([]) do
quote do: refute_receive({:ssl, _socket, _data})
end
defmacrop assert_recv_frames(frames) when is_list(frames) do
quote do: unquote(frames) = recv_next_frames(unquote(length(frames)))
end
defmacrop assert_http2_error(error, expected_reason) do
quote do
error = unquote(error)
assert %HTTPError{reason: unquote(expected_reason)} = error
message = Exception.message(error)
refute message =~ "got FunctionClauseError"
assert message != inspect(error.reason)
end
end
defmacrop assert_transport_error(error, expected_reason) do
quote do
error = unquote(error)
assert %TransportError{reason: unquote(expected_reason)} = error
message = Exception.message(error)
refute message =~ "got FunctionClauseError"
assert message != inspect(error.reason)
end
end
# TODO: Remove check once we depend on Elixir 1.10+.
if Version.match?(System.version(), ">= 1.10.0") do
describe "Mint.HTTP.is_mint_message/2" do
test "the guard works with HTTP2 connections", %{conn: conn} do
import Mint.HTTP, only: [is_connection_message: 2]
assert is_connection_message(conn, {:tcp, conn.socket, "foo"}) == true
assert is_connection_message(conn, {:tcp_closed, conn.socket}) == true
assert is_connection_message(conn, {:tcp_error, conn.socket, :nxdomain}) == true
assert is_connection_message(conn, {:tcp, :not_a_socket, "foo"}) == false
assert is_connection_message(conn, {:tcp_closed, :not_a_socket}) == false
assert is_connection_message(_conn = %HTTP2{}, {:tcp, conn.socket, "foo"}) == false
# If the first argument is not a connection struct, we return false.
assert is_connection_message(%{socket: conn.socket}, {:tcp, conn.socket, "foo"}) == false
assert is_connection_message(%URI{}, {:tcp, conn.socket, "foo"}) == false
end
end
end
describe "handling unknown frames from the server" do
test "handle origin frame from the server", %{conn: conn} do
{conn, ref} = open_request(conn)
assert_recv_frames [headers(stream_id: stream_id)]
origin_payload =
Base.decode16!("001c68747470733a2f2f6472616e642e636c6f7564666c6172652e636f6d",
case: :lower
)
frame = HTTP2.Frame.encode_raw(12, 0, 0, origin_payload)
{:ok, conn, responses} =
HTTP2.stream(
conn,
{:ssl, conn.socket, IO.iodata_to_binary(frame)}
)
assert responses == []
hbf = server_encode_headers([{":status", "200"}])
assert {:ok, %HTTP2{} = _conn, responses} =
stream_frames(conn, [
headers(
stream_id: stream_id,
hbf: hbf,
flags: set_flags(:headers, [:end_headers, :end_stream])
)
])
assert responses == [
{:status, ref, 200},
{:headers, ref, []},
{:done, ref}
]
assert HTTP2.open?(conn)
end
end
describe "stream/2 with unknown messages or error messages" do
test "unknown message", %{conn: conn} do
assert HTTP2.stream(conn, :unknown_message) == :unknown
end
test "socket error messages are treated as errors", %{conn: conn} do
message = {:ssl_error, conn.socket, :etimeout}
assert {:error, %HTTP2{} = conn, %TransportError{reason: :etimeout}, []} =
HTTP2.stream(conn, message)
refute HTTP2.open?(conn)
end
end
describe "closed streams" do
test "server closes a stream with RST_STREAM", %{conn: conn} do
{conn, ref} = open_request(conn)
assert_recv_frames [headers(stream_id: stream_id)]
assert {:ok, %HTTP2{} = conn, responses} =
stream_frames(conn, [
rst_stream(stream_id: stream_id, error_code: :protocol_error)
])
assert [{:error, ^ref, error}] = responses
assert_http2_error error, {:server_closed_request, :protocol_error}
assert HTTP2.open?(conn)
end
test "when server sends frames after sending RST_STREAM, they are ignored",
%{conn: conn} do
{conn, ref} = open_request(conn)
assert_recv_frames [headers(stream_id: stream_id)]
assert {:ok, %HTTP2{} = conn, responses} =
stream_frames(conn, [
rst_stream(stream_id: stream_id, error_code: :cancel),
{:headers, stream_id, [{":status", "200"}], [:end_headers, :end_stream]}
])
assert [{:error, ^ref, error}] = responses
assert_http2_error error, {:server_closed_request, :cancel}
assert HTTP2.open?(conn)
end
test "client closes a stream with cancel_request/2", %{conn: conn} do
{conn, ref} = open_request(conn)
{:ok, conn} = HTTP2.cancel_request(conn, ref)
assert_recv_frames [
headers(stream_id: stream_id),
rst_stream(stream_id: stream_id, error_code: :cancel)
]
# If the server replies next, we ignore the replies.
assert {:ok, %HTTP2{} = conn, []} =
stream_frames(conn, [
{:headers, stream_id, [{":status", "200"}], [:end_headers]},
data(stream_id: stream_id, data: "hello", flags: set_flags(:data, [:end_stream]))
])
assert HTTP2.open?(conn)
end
test "client closes a non-existent request with cancel_request/2", %{conn: conn} do
assert {:ok, ^conn} = HTTP2.cancel_request(conn, make_ref())
end
test "receiving a RST_STREAM on a closed stream is ignored", %{conn: conn} do
{conn, ref} = open_request(conn)
assert_recv_frames [headers(stream_id: stream_id)]
assert {:ok, %HTTP2{} = conn, responses} =
stream_frames(conn, [
{:headers, stream_id, [{":status", "200"}], [:end_headers, :end_stream]}
])
assert [{:status, ^ref, 200}, {:headers, ^ref, []}, {:done, ^ref}] = responses
assert_recv_frames [rst_stream(stream_id: ^stream_id)]
assert {:ok, %HTTP2{} = conn, []} =
stream_frames(conn, [
rst_stream(stream_id: stream_id, error_code: :no_error),
rst_stream(stream_id: stream_id, error_code: :no_error)
])
assert HTTP2.open?(conn)
end
end
describe "stream state transitions" do
test "if client receives HEADERS after receiving a END_STREAM flag, it ignores it",
%{conn: conn} do
{conn, ref} = open_request(conn)
assert_recv_frames [headers(stream_id: stream_id)]
assert {:ok, %HTTP2{} = conn, responses} =
stream_frames(conn, [
{:headers, stream_id, [{":status", "200"}], [:end_headers, :end_stream]},
{:headers, stream_id, [{":status", "200"}], [:end_headers, :end_stream]}
])
assert [{:status, ^ref, 200}, {:headers, ^ref, []}, {:done, ^ref}] = responses
assert HTTP2.open?(conn)
end
test "if client receives DATA after receiving a END_STREAM flag, it ignores it",
%{conn: conn} do
{conn, ref} = open_request(conn)
assert_recv_frames [headers(stream_id: stream_id)]
assert {:ok, %HTTP2{} = conn, responses} =
stream_frames(conn, [
{:headers, stream_id, [{":status", "200"}], [:end_headers, :end_stream]},
data(stream_id: stream_id, data: "hello", flags: set_flags(:data, [:end_stream]))
])
assert [{:status, ^ref, 200}, {:headers, ^ref, []}, {:done, ^ref}] = responses
assert HTTP2.open?(conn)
end
end
describe "server closes the connection" do
test "with GOAWAY with :protocol_error", %{conn: conn} do
{conn, _ref} = open_request(conn)
{conn, ref1} = open_request(conn)
{conn, ref2} = open_request(conn)
assert_recv_frames [headers(stream_id: first_stream_id), headers(), headers()]
assert {:error, %HTTP2{} = conn, error, responses} =
stream_frames(conn, [
goaway(
last_stream_id: first_stream_id,
error_code: :protocol_error,
debug_data: "debug data"
)
])
assert_http2_error error, {
:server_closed_connection,
:protocol_error,
"debug data"
}
assert [{:error, server_ref1, error1}, {:error, server_ref2, error2}] = responses
assert MapSet.new([server_ref1, server_ref2]) == MapSet.new([ref1, ref2])
assert_http2_error error1, :unprocessed
assert_http2_error error2, :unprocessed
assert HTTP2.open_request_count(conn) == 1
refute HTTP2.open?(conn, :write)
assert HTTP2.open?(conn, :read)
end
test "with GOAWAY with :no_error and responses after the GOAWAY frame", %{conn: conn} do
{conn, ref} = open_request(conn)
assert_recv_frames [headers(stream_id: stream_id)]
assert {:ok, %HTTP2{} = conn, responses} =
stream_frames(conn, [
goaway(last_stream_id: stream_id, error_code: :no_error, debug_data: ""),
headers(
stream_id: stream_id,
hbf: server_encode_headers([{":status", "200"}]),
flags: set_flags(:headers, [:end_headers])
),
data(stream_id: stream_id, data: "hello", flags: set_flags(:data, [:end_stream]))
])
assert [{:status, ^ref, 200}, {:headers, ^ref, []}, {:data, ^ref, "hello"}, {:done, ^ref}] =
responses
# the client would normally send two window_updates and a rst_stream, but since the
# connection is now read-only, it should send nothing
assert_recv_frames []
assert HTTP2.open_request_count(conn) == 0
refute HTTP2.open?(conn, :write)
assert HTTP2.open?(conn, :read)
end
test "with GOAWAY followed by another GOAWAY then the error reason is from the last GOAWAY",
%{conn: conn} do
assert {:error, %HTTP2{} = conn, error, []} =
stream_frames(conn, [
goaway(last_stream_id: 1, error_code: :no_error, debug_data: "1"),
goaway(last_stream_id: 1, error_code: :flow_control_error, debug_data: "2"),
goaway(last_stream_id: 1, error_code: :protocol_error, debug_data: "3")
])
assert_http2_error error, {:server_closed_connection, :protocol_error, "3"}
refute HTTP2.open?(conn, :write)
assert HTTP2.open?(conn, :read)
end
test "with direct socket close and no in-flight requests", %{conn: conn} do
assert {:ok, %HTTP2{} = conn, []} = HTTP2.stream(conn, {:ssl_closed, conn.socket})
refute HTTP2.open?(conn)
end
test "with direct socket close and in-flight requests", %{conn: conn} do
{conn, _ref} = open_request(conn)
assert {:error, %HTTP2{} = conn, error, []} = HTTP2.stream(conn, {:ssl_closed, conn.socket})
assert %TransportError{reason: :closed} = error
refute HTTP2.open?(conn)
end
end
describe "closed connection" do
test "client closes the connection with close/1", %{conn: conn} do
assert {:ok, conn} = HTTP2.close(conn)
assert_recv_frames [goaway(error_code: :no_error)]
refute HTTP2.open?(conn)
end
test "close/1 an already closed connection with default inet_backend does not cause error", %{
conn: conn
} do
assert HTTP2.open?(conn)
# ignore the returned conn, otherwise transport.close/1 will not be called
assert {:ok, _conn} = HTTP2.close(conn)
assert {:ok, conn} = HTTP2.close(conn)
refute HTTP2.open?(conn)
end
test "request/5 returns error if the connection is closed",
%{conn: conn} do
assert {:error, %HTTP2{} = conn, _error, []} =
stream_frames(conn, [
goaway(
stream_id: 0,
last_stream_id: 3,
error_code: :protocol_error,
debug_data: "debug data"
)
])
expected_window_size = HTTP2.get_window_size(conn, :connection)
test_bodies = [nil, :stream, "XX"]
conn =
Enum.reduce(test_bodies, conn, fn body, conn ->
assert {:error, %HTTP2{} = conn, error} = HTTP2.request(conn, "GET", "/", [], body)
assert_http2_error error, :closed_for_writing
assert HTTP2.open_request_count(conn) == 0
assert HTTP2.get_window_size(conn, :connection) == expected_window_size
conn
end)
assert {:ok, conn} = HTTP2.close(conn)
Enum.reduce(test_bodies, conn, fn body, conn ->
assert {:error, %HTTP2{} = conn, error} = HTTP2.request(conn, "GET", "/", [], body)
assert_http2_error error, :closed
assert HTTP2.open_request_count(conn) == 0
assert HTTP2.get_window_size(conn, :connection) == expected_window_size
conn
end)
end
end
describe "client errors" do
@tag server_settings: [max_concurrent_streams: 1]
test "when the client tries to open too many concurrent requests", %{conn: conn} do
{conn, _ref} = open_request(conn)
assert HTTP2.open_request_count(conn) == 1
expected_window_size = HTTP2.get_window_size(conn, :connection)
Enum.reduce([nil, :stream, "XX"], conn, fn body, conn ->
assert {:error, %HTTP2{} = conn, error} = HTTP2.request(conn, "GET", "/", [], body)
assert_http2_error error, :too_many_concurrent_requests
assert HTTP2.open_request_count(conn) == 1
assert HTTP2.open?(conn)
assert HTTP2.get_window_size(conn, :connection) == expected_window_size
conn
end)
end
test "when an ssl timeout is triggered on request", %{conn: conn} do
# force the transport to one that always times out on send
conn = %{conn | transport: Mint.HTTP2.TestTransportSendTimeout}
expected_window_size = HTTP2.get_window_size(conn, :connection)
Enum.reduce([nil, :stream, "XX"], conn, fn body, conn ->
assert {:error, %HTTP2{} = conn, error} = HTTP2.request(conn, "GET", "/", [], body)
assert_transport_error error, :timeout
assert HTTP2.open_request_count(conn) == 0
assert HTTP2.open?(conn)
assert HTTP2.get_window_size(conn, :connection) == expected_window_size
conn
end)
end
test "when an ssl timeout is triggered on stream request body", %{conn: conn} do
# open a streaming request.
{conn, ref} = open_request(conn, :stream)
assert_recv_frames [headers()]
# force the transport to one that always times out on send
conn = %{conn | transport: Mint.HTTP2.TestTransportSendTimeout}
expected_window_size = HTTP2.get_window_size(conn, :connection)
data = :binary.copy(<<0>>, HTTP2.get_window_size(conn, {:request, ref}))
assert {:error, %HTTP2{} = conn, error} = HTTP2.stream_request_body(conn, ref, data)
assert_transport_error error, :timeout
assert HTTP2.open_request_count(conn) == 1
assert HTTP2.open?(conn)
assert HTTP2.get_window_size(conn, :connection) == expected_window_size
end
end
describe "headers and continuation" do
test "server splits headers into multiple CONTINUATION frames", %{conn: conn} do
{conn, ref} = open_request(conn)
assert_recv_frames [headers(stream_id: stream_id)]
<<hbf1::1-bytes, hbf2::1-bytes, hbf3::binary>> =
server_encode_headers([{":status", "200"}, {"foo", "bar"}, {"baz", "bong"}])
assert {:ok, %HTTP2{} = conn, responses} =
stream_frames(conn, [
headers(stream_id: stream_id, hbf: hbf1, flags: set_flags(:headers, [])),
continuation(
stream_id: stream_id,
hbf: hbf2,
flags: set_flags(:continuation, [])
),
continuation(
stream_id: stream_id,
hbf: hbf3,
flags: set_flags(:continuation, [:end_headers])
)
])
assert [{:status, ^ref, 200}, {:headers, ^ref, headers}] = responses
assert headers == [{"foo", "bar"}, {"baz", "bong"}]
assert HTTP2.open?(conn)
end
test "server sends a badly encoded header block fragment", %{conn: conn} do
{conn, _ref} = open_request(conn)
assert_recv_frames [headers(stream_id: stream_id)]
assert {:error, %HTTP2{} = conn, error, []} =
stream_frames(conn, [
headers(
stream_id: stream_id,
hbf: "not a good hbf",
flags: set_flags(:headers, [:end_headers])
)
])
assert_http2_error error, {:compression_error, debug_data}
assert debug_data =~ "unable to decode headers: :bad_binary_encoding"
assert_recv_frames [goaway(error_code: :compression_error)]
refute HTTP2.open?(conn)
end
test "server sends a CONTINUATION frame outside of headers streaming",
%{conn: conn} do
{conn, _ref} = open_request(conn)
assert_recv_frames [headers(stream_id: stream_id)]
assert {:error, %HTTP2{} = conn, error, []} =
stream_frames(conn, [continuation(stream_id: stream_id, hbf: "hbf")])
assert_http2_error error, {:protocol_error, debug_data}
assert debug_data =~ "CONTINUATION received outside of headers streaming"
assert_recv_frames [goaway(error_code: :protocol_error)]
refute HTTP2.open?(conn)
end
test "server sends a non-CONTINUATION frame while streaming headers",
%{conn: conn} do
{conn, _ref} = open_request(conn)
assert_recv_frames [headers(stream_id: stream_id)]
assert {:error, %HTTP2{} = conn, error, []} =
stream_frames(conn, [
headers(stream_id: stream_id, hbf: "hbf", flags: set_flags(:headers, [])),
data(stream_id: stream_id, data: "hello")
])
assert_http2_error error, {:protocol_error, debug_data}
assert debug_data =~ "headers are streaming but got a :data frame"
assert_recv_frames [goaway(error_code: :protocol_error)]
refute HTTP2.open?(conn)
end
test "server sends HEADERS with END_STREAM but no END_HEADERS and then sends CONTINUATIONs",
%{conn: conn} do
{conn, ref} = open_request(conn)
<<hbf1::1-bytes, hbf2::1-bytes, hbf3::binary>> =
server_encode_headers([{":status", "200"}, {"foo", "bar"}, {"baz", "bong"}])
assert_recv_frames [headers(stream_id: stream_id)]
{:ok, %HTTP2{} = conn, responses} =
stream_frames(conn, [
headers(stream_id: stream_id, hbf: hbf1, flags: set_flags(:headers, [:end_stream])),
continuation(stream_id: stream_id, hbf: hbf2, flags: set_flags(:continuation, [])),
continuation(
stream_id: stream_id,
hbf: hbf3,
flags: set_flags(:continuation, [:end_headers])
)
])
assert [{:status, ^ref, 200}, {:headers, ^ref, _headers}, {:done, ^ref}] = responses
assert_recv_frames [rst_stream(error_code: :no_error)]
assert HTTP2.open?(conn)
end
test "server sends a response without a :status header", %{conn: conn} do
{conn, ref} = open_request(conn)
assert_recv_frames [headers(stream_id: stream_id)]
assert {:ok, %HTTP2{} = conn, responses} =
stream_frames(conn, [
{:headers, stream_id, [{"foo", "bar"}, {"baz", "bong"}],
[:end_headers, :end_stream]}
])
assert [{:error, ^ref, error}] = responses
assert_http2_error error, :missing_status_header
assert_recv_frames [rst_stream(error_code: :protocol_error)]
assert HTTP2.open?(conn)
end
test "client has to split headers because of max frame size", %{conn: conn} do
# This is an empirical number of headers so that the minimum max frame size (~16kb) fits
# between 2 and 3 times (so that we can test the behaviour above).
headers = for i <- 1..400, do: {"a#{i}", String.duplicate("a", 100)}
assert {:ok, conn, _ref} = HTTP2.request(conn, "GET", "/", headers, nil)
assert_recv_frames [
headers(stream_id: stream_id, hbf: hbf1, flags: flags1),
continuation(stream_id: stream_id, hbf: hbf2, flags: flags2),
continuation(stream_id: stream_id, hbf: hbf3, flags: flags3)
]
assert flag_set?(flags1, :headers, :end_stream)
refute flag_set?(flags1, :headers, :end_headers)
refute flag_set?(flags2, :continuation, :end_headers)
assert flag_set?(flags3, :continuation, :end_headers)
headers = server_decode_headers(hbf1 <> hbf2 <> hbf3)
assert [{":method", "GET"}, {":path", "/"}, {":scheme", "https"} | _] = headers
assert HTTP2.open?(conn)
end
@tag server_settings: [max_header_list_size: 20]
test "an error is returned if client exceeds SETTINGS_MAX_HEADER_LIST_SIZE", %{conn: conn} do
# With such a low max_header_list_size, even the default :special headers (such as
# :method or :path) exceed the size.
expected_window_size = HTTP2.get_window_size(conn, :connection)
Enum.reduce([nil, :stream, "XX"], conn, fn body, conn ->
assert {:error, %HTTP2{} = conn, error} = HTTP2.request(conn, "GET", "/", [], body)
assert_http2_error error, {:max_header_list_size_exceeded, _, 20}
assert HTTP2.open_request_count(conn) == 0
assert HTTP2.open?(conn)
assert HTTP2.get_window_size(conn, :connection) == expected_window_size
conn
end)
end
test ":authority pseudo-header includes port", %{conn: conn} do
{conn, _ref} = open_request(conn)
assert_recv_frames [headers(hbf: hbf)]
assert {":authority", authority} =
hbf
|> server_decode_headers()
|> List.keyfind(":authority", 0)
assert authority == "#{conn.hostname}:#{conn.port}"
assert HTTP2.open?(conn)
end
test ":authority pseudo-header does not include port if it is the scheme's default",
%{conn: conn} do
default_https_port = URI.default_port("https")
try do
# Override default https port for this test
URI.default_port("https", conn.port)
{conn, _ref} = open_request(conn)
assert_recv_frames [headers(hbf: hbf)]
assert {":authority", authority} =
hbf
|> server_decode_headers()
|> List.keyfind(":authority", 0)
assert authority == conn.hostname
assert HTTP2.open?(conn)
after
URI.default_port("https", default_https_port)
end
end
test "when there's a request body, the content-length header is passed if not present",
%{conn: conn} do
{conn, _ref} = open_request(conn, "hello")
assert_recv_frames [headers(hbf: hbf), data()]
assert hbf
|> server_decode_headers()
|> List.keyfind("content-length", 0) == {"content-length", "5"}
# Let's check that content-length is not overridden if already present.
headers = [{"content-length", "10"}]
assert {:ok, conn, _ref} = HTTP2.request(conn, "GET", "/", headers, "XX")
assert_recv_frames [headers(hbf: hbf), data()]
assert hbf
|> server_decode_headers()
|> List.keyfind("content-length", 0) == {"content-length", "10"}
# Let's make sure content-length isn't added if the body is nil or :stream.
{conn, _ref} = open_request(conn, nil)
assert_recv_frames [headers(hbf: hbf)]
refute hbf
|> server_decode_headers()
|> List.keymember?("content-length", 0)
assert HTTP2.open?(conn)
end
test "the Cookie header is joined into a single value if present multiple times",
%{conn: conn} do
{conn, ref} = open_request(conn)
assert_recv_frames [headers(stream_id: stream_id)]
hbf =
server_encode_headers([
{":status", "200"},
{"accept", "text/plain"},
{"cookie", "a=b"},
{"Cookie", "c=d; e=f"},
{"content-type", "application/json"},
{"cookie", "g=h"},
{"x-header", "value"}
])
assert {:ok, %HTTP2{} = _conn, responses} =
stream_frames(conn, [
headers(
stream_id: stream_id,
hbf: hbf,
flags: set_flags(:headers, [:end_headers])
)
])
assert [{:status, ^ref, 200}, {:headers, ^ref, headers}] = responses
assert [{"cookie", cookie}, {"accept", _}, {"content-type", _}, {"x-header", _}] = headers
assert cookie == "a=b; c=d; e=f; g=h"
end
test "a CONNECT request omits :scheme and :path pseudo-headers", %{conn: conn} do
assert {:ok, conn, _ref} = HTTP2.request(conn, "CONNECT", "/", [], nil)
assert_recv_frames [headers(hbf: hbf)]
refute hbf
|> server_decode_headers()
|> List.keymember?(":scheme", 0)
refute hbf
|> server_decode_headers()
|> List.keymember?(":path", 0)
assert HTTP2.open?(conn)
end
test "explicitly passed pseudo-headers are sorted to the front of the headers list", %{
conn: conn
} do
headers = [
{":scheme", conn.scheme},
{":path", "/ws"},
{":protocol", "websocket"}
]
assert {:ok, conn, _ref} = HTTP2.request(conn, "CONNECT", "/", headers, :stream)
assert_recv_frames [headers(hbf: hbf)]
assert [
{":method", "CONNECT"},
{":authority", _},
{":scheme", _},
{":path", "/ws"},
{":protocol", "websocket"},
{"user-agent", _}
] = server_decode_headers(hbf)
assert HTTP2.open?(conn)
end
end
describe "interim responses (1xx)" do
test "multiple before a single HEADERS", %{conn: conn} do
info_status1 = Enum.random(100..199)
info_status2 = Enum.random(100..199)
{conn, ref} = open_request(conn)
assert_recv_frames [headers(stream_id: stream_id)]
<<info_hbf1_part1::1-bytes, info_hbf1_part2::binary>> =
server_encode_headers([
{":status", Integer.to_string(info_status1)},
{"x-info-header1", "this is an info"}
])
info_hbf2 =
server_encode_headers([
{":status", Integer.to_string(info_status2)},
{"x-info-header2", "this is an info"}
])
hbf =
server_encode_headers([
{":status", "200"},
{"content-type", "application/json"}
])
assert {:ok, %HTTP2{} = _conn, responses} =
stream_frames(conn, [
headers(
stream_id: stream_id,
hbf: info_hbf1_part1,
flags: set_flags(:headers, [])
),
continuation(
stream_id: stream_id,
hbf: info_hbf1_part2,
flags: set_flags(:continuation, [:end_headers])
),
headers(
stream_id: stream_id,
hbf: info_hbf2,
flags: set_flags(:headers, [:end_headers])
),
headers(
stream_id: stream_id,
hbf: hbf,
flags: set_flags(:headers, [:end_headers, :end_stream])
)
])
assert [
{:status, ^ref, ^info_status1},
{:headers, ^ref, [{"x-info-header1", "this is an info"}]},
{:status, ^ref, ^info_status2},
{:headers, ^ref, [{"x-info-header2", "this is an info"}]},
{:status, ^ref, 200},
{:headers, ^ref, [{"content-type", "application/json"}]},
{:done, ^ref}
] = responses
assert HTTP2.open?(conn)
end
test "protocol error if interim response has END_STREAM set", %{conn: conn} do
{conn, ref} = open_request(conn)
assert_recv_frames [headers(stream_id: stream_id)]
info_hbf =
server_encode_headers([
{":status", "101"},
{"x-info-header1", "this is an info"}
])
assert {:ok, %HTTP2{} = _conn, responses} =
stream_frames(conn, [
headers(
stream_id: stream_id,
hbf: info_hbf,
flags: set_flags(:headers, [:end_headers, :end_stream])
)
])
assert [{:error, ^ref, error}] = responses
assert_http2_error error, {:protocol_error, debug_data}
assert debug_data =~ "informational response (1xx) must not have the END_STREAM flag set"
assert HTTP2.open?(conn)
end
test "protocol error if interim response HEADERS comes after final HEADERS", %{conn: conn} do
{conn, ref} = open_request(conn)
assert_recv_frames [headers(stream_id: stream_id)]
hbf = server_encode_headers([{":status", "200"}])
info_hbf = server_encode_headers([{":status", "101"}])
assert {:ok, %HTTP2{} = _conn, responses} =
stream_frames(conn, [
headers(
stream_id: stream_id,
hbf: hbf,
flags: set_flags(:headers, [:end_headers])
),
headers(
stream_id: stream_id,
hbf: info_hbf,
flags: set_flags(:headers, [:end_headers])
)
])
assert [{:status, ^ref, 200}, {:headers, ^ref, []}, {:error, ^ref, error}] = responses
assert_http2_error error, {:protocol_error, debug_data}
assert debug_data =~
"informational response (1xx) must appear before final response, got a 101 status"
assert HTTP2.open?(conn)
end
end
describe "trailing headers" do
test "sent by the server with a normal response", %{conn: conn} do
{conn, ref} = open_request(conn)
assert_recv_frames [headers(stream_id: stream_id)]
hbf = server_encode_headers([{":status", "200"}])
<<trailing_hbf1::1-bytes, trailing_hbf2::binary>> =
server_encode_headers([{"x-trailing", "some value"}])
assert {:ok, %HTTP2{} = conn, responses} =
stream_frames(conn, [
headers(
stream_id: stream_id,
hbf: hbf,
flags: set_flags(:headers, [:end_headers])
),
data(stream_id: stream_id, data: "some data", flags: set_flags(:data, [])),
headers(
stream_id: stream_id,
hbf: trailing_hbf1,
flags: set_flags(:headers, [:end_stream])
),
continuation(
stream_id: stream_id,
hbf: trailing_hbf2,
flags: set_flags(:continuation, [:end_headers])
)
])
assert [
{:status, ^ref, 200},
{:headers, ^ref, []},
{:data, ^ref, "some data"},
{:headers, ^ref, trailing_headers},
{:done, ^ref}
] = responses
assert trailing_headers == [{"x-trailing", "some value"}]
assert HTTP2.open?(conn)
end
test "sent by the server directly after the \"opening\" headers (without data in between)",
%{conn: conn} do
{conn, ref} = open_request(conn)
assert_recv_frames [headers(stream_id: stream_id)]
hbf = server_encode_headers([{":status", "200"}])
trailing_hbf = server_encode_headers([{"x-trailing", "some value"}])
assert {:ok, %HTTP2{} = conn, responses} =
stream_frames(conn, [
headers(
stream_id: stream_id,
hbf: hbf,
flags: set_flags(:headers, [:end_headers])
),
headers(
stream_id: stream_id,
hbf: trailing_hbf,
flags: set_flags(:headers, [:end_stream, :end_headers])
)
])
assert [
{:status, ^ref, 200},
{:headers, ^ref, []},
{:headers, ^ref, [{"x-trailing", "some value"}]},
{:done, ^ref}
] = responses
assert HTTP2.open?(conn)
end
test "with a push promise request", %{conn: conn} do
promised_stream_id = 4
{conn, ref} = open_request(conn)
assert_recv_frames [headers(stream_id: stream_id)]
promised_hbf = server_encode_headers([{":method", "GET"}])
hbf1 = server_encode_headers([{":status", "200"}])
hbf2 = server_encode_headers([{":status", "200"}])
trailing_hbf = server_encode_headers([{"x-trailing", "some value"}])
assert {:ok, %HTTP2{} = conn, responses} =
stream_frames(conn, [
push_promise(
stream_id: stream_id,
hbf: promised_hbf,
promised_stream_id: promised_stream_id,
flags: set_flags(:push_promise, [:end_headers])
),
headers(
stream_id: stream_id,
hbf: hbf1,
flags: set_flags(:headers, [:end_stream, :end_headers])
),
# Promised stream with trailing headers.
headers(
stream_id: promised_stream_id,
hbf: hbf2,
flags: set_flags(:headers, [:end_headers])
),
headers(
stream_id: promised_stream_id,
hbf: trailing_hbf,
flags: set_flags(:headers, [:end_headers, :end_stream])
)
])
assert [
{:push_promise, ^ref, promised_ref, [{":method", "GET"}]},
{:status, ^ref, 200},
{:headers, ^ref, []},
{:done, ^ref},
{:status, promised_ref, 200},
{:headers, promised_ref, []},
{:headers, promised_ref, [{"x-trailing", "some value"}]},
{:done, promised_ref}
] = responses
assert HTTP2.open?(conn)
end
test "protocol error if trailing headers don't have END_STREAM set", %{conn: conn} do
{conn, ref} = open_request(conn)
assert_recv_frames [headers(stream_id: stream_id)]
hbf = server_encode_headers([{":status", "200"}])
trailing_hbf = server_encode_headers([{"x-trailing", "some value"}])
assert {:ok, %HTTP2{} = conn, responses} =
stream_frames(conn, [
headers(
stream_id: stream_id,
hbf: hbf,
flags: set_flags(:headers, [:end_headers])
),
data(stream_id: stream_id, data: "some data", flags: set_flags(:data, [])),
headers(
stream_id: stream_id,
hbf: trailing_hbf,
flags: set_flags(:headers, [:end_headers])
)
])
assert [
{:status, ^ref, 200},
{:headers, ^ref, []},
{:data, ^ref, "some data"},
{:error, ^ref, error}
] = responses
assert_http2_error error, {:protocol_error, debug_data}
assert debug_data =~ "trailing headers didn't set the END_STREAM flag"
assert HTTP2.open?(conn)
end
test "unallowed headers are removed", %{conn: conn} do
{conn, ref} = open_request(conn)
assert_recv_frames [headers(stream_id: stream_id)]
hbf = server_encode_headers([{":status", "200"}])
# Note that headers are lowercase in HTTP/2 responses because the spec
# says so.
trailing_hbf = server_encode_headers([{"x-trailing", "value"}, {"host", "example.com"}])
assert {:ok, %HTTP2{} = conn, responses} =
stream_frames(conn, [
headers(
stream_id: stream_id,
hbf: hbf,
flags: set_flags(:headers, [:end_headers])
),
headers(
stream_id: stream_id,
hbf: trailing_hbf,
flags: set_flags(:headers, [:end_headers, :end_stream])
)
])
assert [
{:status, ^ref, 200},
{:headers, ^ref, []},
{:headers, ^ref, trailing_headers},
{:done, ^ref}
] = responses
assert trailing_headers == [{"x-trailing", "value"}]
assert HTTP2.open?(conn)
end
end
describe "server pushes" do
test "a PUSH_PROMISE frame and a few CONTINUATION frames are received",
%{conn: conn} do
promised_stream_id = 4
{conn, ref} = open_request(conn)
assert_recv_frames [headers(stream_id: stream_id)]
# Promised headers.
headers = [{":method", "GET"}, {"foo", "bar"}, {"baz", "bong"}]
<<hbf1::1-bytes, hbf2::1-bytes, hbf3::binary>> = server_encode_headers(headers)
# Normal headers.
hbf = server_encode_headers([{":status", "200"}, {"push", "promise"}])
assert {:ok, %HTTP2{} = conn, responses} =
stream_frames(conn, [
push_promise(
stream_id: stream_id,
hbf: hbf1,
promised_stream_id: promised_stream_id
),
continuation(stream_id: stream_id, hbf: hbf2),
continuation(
stream_id: stream_id,
hbf: hbf3,
flags: set_flags(:continuation, [:end_headers])
),
headers(
stream_id: stream_id,
hbf: hbf,
flags: set_flags(:headers, [:end_stream, :end_headers])
)
])
assert [
{:push_promise, ^ref, promised_ref, headers},
{:status, ^ref, 200},
{:headers, ^ref, [{"push", "promise"}]},
{:done, ^ref}
] = responses
assert is_reference(promised_ref)
assert headers == [{":method", "GET"}, {"foo", "bar"}, {"baz", "bong"}]
assert {:ok, %HTTP2{} = conn, responses} =
stream_frames(conn, [
headers(
stream_id: promised_stream_id,
hbf: hbf,
flags: set_flags(:headers, [:end_headers])
),
data(
stream_id: promised_stream_id,
data: "hello",
flags: set_flags(:data, [:end_stream])
)
])
assert [
{:status, ^promised_ref, 200},
{:headers, ^promised_ref, [{"push", "promise"}]},
{:data, ^promised_ref, "hello"},
{:done, ^promised_ref}
] = responses
assert HTTP2.open?(conn)
end
@tag connect_options: [client_settings: [enable_push: false]]
test "receiving PUSH_PROMISE frame when SETTINGS_ENABLE_PUSH is false causes an error",
%{conn: conn} do
{conn, _ref} = open_request(conn)
assert_recv_frames [headers(stream_id: stream_id)]
hbf = server_encode_headers([{":method", "GET"}])
assert {:error, %HTTP2{} = conn, error, []} =
stream_frames(conn, [
push_promise(
stream_id: stream_id,
hbf: hbf,
promised_stream_id: 4,
flags: set_flags(:push_promise, [:end_headers])
)
])
assert_http2_error error, {:protocol_error, debug_data}
assert debug_data =~ "received PUSH_PROMISE frame when SETTINGS_ENABLE_PUSH was false"
assert_recv_frames [goaway(error_code: :protocol_error)]
refute HTTP2.open?(conn)
end
test "if the server tries to reserve an already existing stream the connection errors",
%{conn: conn} do
{conn, _ref} = open_request(conn)
assert_recv_frames [headers(stream_id: stream_id)]
promised_headers_hbf = server_encode_headers([{":method", "GET"}])
normal_headers_hbf = server_encode_headers([{":status", "200"}])
assert {:error, %HTTP2{} = conn, error, _responses} =
stream_frames(conn, [
push_promise(
stream_id: stream_id,
hbf: promised_headers_hbf,
promised_stream_id: 4,
flags: set_flags(:headers, [:end_headers])
),
push_promise(
stream_id: stream_id,
hbf: promised_headers_hbf,
promised_stream_id: 4,
flags: set_flags(:headers, [:end_headers])
),
headers(
stream_id: stream_id,
hbf: normal_headers_hbf,
flags: set_flags(:headers, [:end_stream, :end_headers])
)
])
assert_http2_error error, {:protocol_error, debug_data}
assert debug_data =~ "stream with ID 4 already exists and can't be reserved by the server"
refute HTTP2.open?(conn)
end
@tag connect_options: [client_settings: [max_concurrent_streams: 1]]
test "if the server reaches the max number of client streams, the client sends an error",
%{conn: conn} do
{conn, ref} = open_request(conn)
assert_recv_frames [headers(stream_id: stream_id)]
promised_headers_hbf = server_encode_headers([{":method", "GET"}])
normal_headers_hbf = server_encode_headers([{":status", "200"}])
assert {:ok, %HTTP2{} = conn, responses} =
stream_frames(conn, [
push_promise(
stream_id: stream_id,
hbf: promised_headers_hbf,
promised_stream_id: 4,
flags: set_flags(:headers, [:end_headers])
),
push_promise(
stream_id: stream_id,
hbf: promised_headers_hbf,
promised_stream_id: 6,
flags: set_flags(:headers, [:end_headers])
),
headers(
stream_id: stream_id,
hbf: normal_headers_hbf,
flags: set_flags(:headers, [:end_stream, :end_headers])
)
])
assert [
{:push_promise, ^ref, promised_ref1, _},
{:push_promise, ^ref, _promised_ref2, _},
{:status, ^ref, 200},
{:headers, ^ref, []},
{:done, ^ref}
] = responses
assert_recv_frames [rst_stream(stream_id: ^stream_id, error_code: :no_error)]
# Here we send headers for the two promised streams. Note that neither of the
# header frames have the END_STREAM flag set otherwise we close the streams and
# they don't count towards the open stream count.
assert {:ok, %HTTP2{} = conn, responses} =
stream_frames(conn, [
headers(
stream_id: 4,
hbf: normal_headers_hbf,
flags: set_flags(:headers, [:end_headers])
),
headers(
stream_id: 6,
hbf: normal_headers_hbf,
flags: set_flags(:headers, [:end_headers])
)
])
assert [{:status, ^promised_ref1, 200}, {:headers, ^promised_ref1, []}] = responses
assert_recv_frames [
rst_stream(stream_id: 6, error_code: :refused_stream)
]
assert HTTP2.open?(conn)
end
end
describe "misbehaving server" do
test "sends a frame with the wrong stream id", %{conn: conn} do
{conn, _ref} = open_request(conn)
assert_recv_frames [headers()]
data = IO.iodata_to_binary(encode_raw(_ping = 0x06, 0x00, 3, <<0::64>>))
assert {:error, %HTTP2{} = conn, error, []} = HTTP2.stream(conn, {:ssl, conn.socket, data})
assert_http2_error error, {:protocol_error, debug_data}
assert debug_data =~ "frame :ping only allowed at the connection level"
assert_recv_frames [goaway(error_code: :protocol_error)]
refute HTTP2.open?(conn)
end
test "sends a frame with a bad size", %{conn: conn} do
{conn, _ref} = open_request(conn)
assert_recv_frames [headers()]
# Payload should be 8 bytes long, but is empty here.
data = IO.iodata_to_binary(encode_raw(_ping = 0x06, 0x00, 3, <<>>))
assert {:error, %HTTP2{} = conn, error, []} = HTTP2.stream(conn, {:ssl, conn.socket, data})
assert_http2_error error, {:frame_size_error, debug_data}
assert debug_data =~ "error with size of frame: :ping"
assert_recv_frames [goaway(error_code: :frame_size_error)]
refute HTTP2.open?(conn)
end
test "sends a frame on a stream with a stream ID bigger than client's biggest",
%{conn: conn} do
{conn, _ref} = open_request(conn)
assert_recv_frames [headers(stream_id: stream_id)]
bad_stream_id = stream_id + 10
assert {:error, %HTTP2{} = conn, error, []} =
stream_frames(conn, [
{:headers, bad_stream_id, [{":status", "200"}], [:end_headers]}
])
assert_http2_error error, {:protocol_error, debug_data}
assert debug_data =~ "frame with stream ID #{bad_stream_id} has not been opened yet"
assert_recv_frames [goaway(error_code: :protocol_error)]
refute HTTP2.open?(conn)
end
end
describe "flow control" do
test "client sends data that goes over window size of a stream/connection when streaming",
%{conn: conn} do
# First we decrease the connection size by 5 bytes, so that the connection window
# size is smaller than the stream window size.
{conn, _ref} = open_request(conn, "XXXXX")
assert_recv_frames [headers(), data()]
# Then we open a streaming request.
{conn, ref} = open_request(conn, :stream)
assert_recv_frames [headers()]
data = :binary.copy(<<0>>, HTTP2.get_window_size(conn, {:request, ref}) + 1)
assert {:error, %HTTP2{} = conn, error} = HTTP2.stream_request_body(conn, ref, data)
assert_http2_error error, {:exceeds_window_size, :request, window_size}
assert is_integer(window_size) and window_size >= 0
data = :binary.copy(<<0>>, HTTP2.get_window_size(conn, :connection) + 1)
assert {:error, %HTTP2{} = conn, error} = HTTP2.stream_request_body(conn, ref, data)
assert_http2_error error, {:exceeds_window_size, :connection, window_size}
assert is_integer(window_size) and window_size >= 0
assert HTTP2.open?(conn)
end
@tag server_settings: [initial_window_size: 1]
test "if client's request goes over window size, no HEADER frames are sent", %{conn: conn} do
expected_window_size = HTTP2.get_window_size(conn, :connection)
assert {:error, %HTTP2{} = conn, error} = HTTP2.request(conn, "GET", "/", [], "XX")
assert_http2_error error, {:exceeds_window_size, :request, 1}
assert HTTP2.open?(conn)
assert HTTP2.open_request_count(conn) == 0
assert HTTP2.get_window_size(conn, :connection) == expected_window_size
refute_receive {:ssl, _, _}
end
test "server sends a WINDOW_UPDATE with too big of a size on a stream",
%{conn: conn} do
{conn, ref} = open_request(conn)
assert_recv_frames [headers(stream_id: stream_id)]
assert {:ok, %HTTP2{} = conn, responses} =
stream_frames(conn, [
window_update(
stream_id: stream_id,
window_size_increment: _max_window_size = 2_147_483_647
)
])
assert [{:error, ^ref, error}] = responses
assert_http2_error error, {:flow_control_error, debug_data}
assert debug_data =~ "window size too big"
assert_recv_frames [rst_stream(stream_id: ^stream_id, error_code: :flow_control_error)]
assert HTTP2.open?(conn)
end
test "server sends a WINDOW_UPDATE with too big of a size on the connection level",
%{conn: conn} do
{conn, _ref} = open_request(conn)
assert_recv_frames [headers(stream_id: _stream_id)]
assert {:error, %HTTP2{} = conn, error, []} =
stream_frames(conn, [
window_update(
stream_id: 0,
window_size_increment: _max_window_size = 2_147_483_647
)
])
assert_http2_error error, {:flow_control_error, debug_data}
assert debug_data =~ "window size too big"
assert_recv_frames [goaway(error_code: :flow_control_error)]
refute HTTP2.open?(conn)
end
test "server violates client's max frame size", %{conn: conn} do
{conn, _ref} = open_request(conn)
assert_recv_frames [headers(stream_id: stream_id)]
assert {:error, %HTTP2{} = conn, error, []} =
stream_frames(conn, [
data(stream_id: stream_id, data: :binary.copy(<<0>>, 100_000))
])
assert_http2_error error, {:frame_size_error, debug_data}
assert debug_data =~ "frame payload exceeds connection's max frame size"
assert_recv_frames [goaway(error_code: :frame_size_error)]
refute HTTP2.open?(conn)
end
test "client splits data automatically based on server's max frame size",
%{conn: conn} do
max_frame_size = HTTP2.get_server_setting(conn, :max_frame_size)
body = :binary.copy(<<0>>, max_frame_size + 1)
{conn, _ref} = open_request(conn, body)
assert_recv_frames [
headers(stream_id: stream_id),
data(stream_id: stream_id, flags: flags1, data: data1),
data(stream_id: stream_id, flags: flags2, data: data2)
]
assert flags1 == set_flags(:data, [])
assert data1 == :binary.copy(<<0>>, max_frame_size)
assert flags2 == set_flags(:data, [:end_stream])
assert data2 == <<0>>
assert HTTP2.open?(conn)
end
test "window size of the connection and single requests can be read with get_window_size/2",
%{conn: conn} do
{conn, ref} = open_request(conn, :stream)
initial_conn_window_size = HTTP2.get_window_size(conn, :connection)
initial_request_window_size = HTTP2.get_window_size(conn, {:request, ref})
assert is_integer(initial_conn_window_size) and initial_conn_window_size > 0
assert is_integer(initial_request_window_size) and initial_request_window_size > 0
body_chunk = "hello"
{:ok, conn} = HTTP2.stream_request_body(conn, ref, body_chunk)
new_conn_window_size = HTTP2.get_window_size(conn, :connection)
new_request_window_size = HTTP2.get_window_size(conn, {:request, ref})
assert new_conn_window_size == initial_conn_window_size - byte_size(body_chunk)
assert new_request_window_size == initial_request_window_size - byte_size(body_chunk)
end
test "if the server sends an empty DATA frame, we don't send WINDOW_UPDATE back",
%{conn: conn} do
{conn, ref} = open_request(conn, :stream)
assert_recv_frames [headers(stream_id: stream_id)]
assert {:ok, %HTTP2{} = _conn, responses} =
stream_frames(conn, [
data(stream_id: stream_id, data: "", flags: set_flags(:data, [:end_stream]))
])
assert_recv_frames [rst_stream(stream_id: ^stream_id, error_code: :no_error)]
assert responses == [{:data, ref, ""}, {:done, ref}]
end
test "get_window_size/2 raises if the request is not found", %{conn: conn} do
assert_raise ArgumentError, ~r/request with request reference .+ was not found/, fn ->
HTTP2.get_window_size(conn, {:request, make_ref()})
end
end
end
describe "settings" do
test "put_settings/2 can be used to send settings to server", %{conn: conn} do
{:ok, conn} = HTTP2.put_settings(conn, max_concurrent_streams: 123)
assert_recv_frames [settings() = frame]
assert settings(frame, :params) == [max_concurrent_streams: 123]
assert settings(frame, :flags) == set_flags(:settings, [])
assert {:ok, %HTTP2{} = conn, []} =
stream_frames(conn, [
settings(flags: set_flags(:settings, [:ack]), params: [])
])
assert HTTP2.open?(conn)
end
test "put_settings/2 fails with unknown settings", %{conn: conn} do
assert_raise ArgumentError, ":header_table_size must be an integer, got: :oops", fn ->
HTTP2.put_settings(conn, header_table_size: :oops)
end
assert_raise ArgumentError, "unknown setting parameter :oops", fn ->
HTTP2.put_settings(conn, oops: 1)
end
end
test "get_server_setting/2 can be used to read server settings", %{conn: conn} do
assert HTTP2.get_server_setting(conn, :max_concurrent_streams) == 100
assert HTTP2.get_server_setting(conn, :enable_push) == true
assert HTTP2.get_server_setting(conn, :enable_connect_protocol) == false
end
test "get_server_setting/2 fails with unknown settings", %{conn: conn} do
assert_raise ArgumentError, "unknown HTTP/2 setting: :unknown", fn ->
HTTP2.get_server_setting(conn, :unknown)
end
end
test "server can update the initial window size and affect open streams",
%{conn: conn} do
{conn, ref} = open_request(conn)
assert_recv_frames [headers()]
{:ok, %HTTP2{} = conn, []} =
stream_frames(conn, [settings(params: [initial_window_size: 100])])
assert HTTP2.get_server_setting(conn, :initial_window_size) == 100
# This stream is half_closed_local, so there's not point in updating its window size since
# we won't send anything on it anymore.
assert HTTP2.get_window_size(conn, {:request, ref}) == 65535
assert_recv_frames [settings(flags: flags)]
assert flags == set_flags(:settings, [:ack])
end
# TODO: We're skipping this test for now because we need to find a good way
# to assert on the errors that might be returned by HTTP2.connect/4. Right
# now the connect/4 calls happens when setting up the connection to the test
# server and we assert that a successful connection is established in that code.
# An example of an invalid setting is "max_frame_size: 1".
@tag :skip
test "protocol error when server sends an invalid setting"
end
describe "stream_request_body/3" do
test "streaming a request", %{conn: conn} do
{conn, ref} = open_request(conn, :stream)
assert {:ok, conn} = HTTP2.stream_request_body(conn, ref, "foo")
assert {:ok, conn} = HTTP2.stream_request_body(conn, ref, "bar")
assert {:ok, conn} = HTTP2.stream_request_body(conn, ref, :eof)
assert_recv_frames [
headers(stream_id: stream_id) = headers,
data(stream_id: stream_id, data: "foo") = data1,
data(stream_id: stream_id, data: "bar") = data2,
data(stream_id: stream_id, data: "") = data3
]
refute flag_set?(headers(headers, :flags), :headers, :end_stream)
refute flag_set?(data(data1, :flags), :data, :end_stream)
refute flag_set?(data(data2, :flags), :data, :end_stream)
assert flag_set?(data(data3, :flags), :data, :end_stream)
assert HTTP2.open?(conn)
end
test "streaming a request on a request that wasn't opened with :stream errors out",
%{conn: conn} do
{conn, ref} = open_request(conn)
assert HTTP2.open_request_count(conn) == 1
expected_window_size = HTTP2.get_window_size(conn, :connection)
assert {:error, %HTTP2{} = conn, error} = HTTP2.stream_request_body(conn, ref, "foo")
assert_http2_error error, :request_is_not_streaming
assert HTTP2.get_window_size(conn, :connection) == expected_window_size
assert HTTP2.open_request_count(conn) == 1
assert HTTP2.open?(conn)
end
test "streaming to an unknown request returns an error", %{conn: conn} do
assert HTTP2.open_request_count(conn) == 0
expected_window_size = HTTP2.get_window_size(conn, :connection)
assert {:error, %HTTP2{} = conn, error} = HTTP2.stream_request_body(conn, make_ref(), "x")
assert_http2_error error, :unknown_request_to_stream
assert HTTP2.get_window_size(conn, :connection) == expected_window_size
assert HTTP2.open_request_count(conn) == 0
assert HTTP2.open?(conn)
end
test "streaming a request with trailing headers", %{conn: conn} do
{conn, ref} = open_request(conn, :stream)
# Using 1000 headers will go over the default max_frame_size so that the
# HEADERS frame for the trailing headers will also be split into a HEADERS
# plus CONTINUATION frames.
trailing_headers = for index <- 1..1000, do: {"my-trailing-#{index}", "value"}
assert {:ok, _conn} = HTTP2.stream_request_body(conn, ref, {:eof, trailing_headers})
assert_recv_frames [
headers(stream_id: stream_id) = headers,
headers(stream_id: stream_id, hbf: trailing_hbf1) = trailing_headers1,
continuation(stream_id: stream_id, hbf: trailing_hbf2) = trailing_headers2
]
assert flag_set?(headers(headers, :flags), :headers, :end_headers)
refute flag_set?(headers(headers, :flags), :headers, :end_stream)
refute flag_set?(headers(trailing_headers1, :flags), :headers, :end_headers)
assert flag_set?(headers(trailing_headers1, :flags), :headers, :end_stream)
assert flag_set?(continuation(trailing_headers2, :flags), :continuation, :end_headers)
assert server_decode_headers(trailing_hbf1 <> trailing_hbf2) == trailing_headers
end
test "unallowed trailing headers cause an error", %{conn: conn} do
{conn, ref} = open_request(conn, :stream)
assert HTTP2.open_request_count(conn) == 1
expected_window_size = HTTP2.get_window_size(conn, :connection)
trailing_headers = [{"x-trailing", "value"}, {"Host", "example.com"}]
assert {:error, %HTTP2{} = _conn, error} =
HTTP2.stream_request_body(conn, ref, {:eof, trailing_headers})
assert_http2_error error, {:unallowed_trailing_header, {"host", "example.com"}}
assert HTTP2.get_window_size(conn, :connection) == expected_window_size
assert HTTP2.open_request_count(conn) == 1
end
test "streaming to a closed connection returns an error", %{conn: conn} do
{conn, ref} = open_request(conn, :stream)
{:ok, closed_conn} = HTTP2.close(conn)
assert {:error, conn, error} = HTTP2.stream_request_body(closed_conn, ref, :eof)
assert_http2_error error, :closed
refute HTTP2.open?(conn)
end
test "streaming to a connection that got GOAWAY returns an error", %{conn: conn} do
{conn, ref} = open_request(conn, :stream)
assert_recv_frames [headers(stream_id: stream_id)]
assert {:error, conn, _goaway_error, _responses} =
stream_frames(conn, [
goaway(
last_stream_id: stream_id,
error_code: :protocol_error,
debug_data: "debug data"
)
])
assert {:error, _conn, error} = HTTP2.stream_request_body(conn, ref, :eof)
assert_http2_error error, :closed_for_writing
end
end
describe "open_request_count/1" do
test "returns the number of client-initiated open streams", %{conn: conn} do
assert HTTP2.open_request_count(conn) == 0
{conn, _ref} = open_request(conn)
assert HTTP2.open_request_count(conn) == 1
{conn, _ref} = open_request(conn)
assert HTTP2.open_request_count(conn) == 2
assert_recv_frames [headers(stream_id: stream_id1), headers()]
assert {:ok, %HTTP2{} = conn, _responses} =
stream_frames(conn, [
headers(
stream_id: stream_id1,
hbf: server_encode_headers([{":status", "200"}]),
flags: set_flags(:headers, [:end_headers, :end_stream])
)
])
assert HTTP2.open_request_count(conn) == 1
end
end
describe "connection modes" do
@tag connect_options: [mode: :passive]
test "starting a connection with :passive mode and using recv/3", %{conn: conn} do
{conn, ref} = open_request(conn)
assert_recv_frames [headers(stream_id: stream_id)]
data =
server_encode_frames([
headers(
stream_id: stream_id,
hbf: server_encode_headers([{":status", "200"}]),
flags: set_flags(:headers, [:end_headers, :end_stream])
)
])
:ok = :ssl.send(server_get_socket(), data)
assert {:ok, conn, responses} = HTTP2.recv(conn, 0, 100)
assert responses == [
{:status, ref, 200},
{:headers, ref, []},
{:done, ref}
]
assert HTTP2.open?(conn)
end
test "changing the mode of a connection with set_mode/2", %{conn: conn} do
assert_raise ArgumentError, ~r"^can't use recv/3", fn ->
HTTP2.recv(conn, 0, 100)
end
assert {:ok, %HTTP2{} = conn} = HTTP2.set_mode(conn, :passive)
{conn, ref} = open_request(conn)
assert_recv_frames [headers(stream_id: stream_id)]
data =
server_encode_frames([
headers(
stream_id: stream_id,
hbf: server_encode_headers([{":status", "200"}]),
flags: set_flags(:headers, [:end_headers, :end_stream])
)
])
:ok = :ssl.send(server_get_socket(), data)
assert {:ok, conn, responses} = HTTP2.recv(conn, 0, 100)
assert responses == [
{:status, ref, 200},
{:headers, ref, []},
{:done, ref}
]
assert {:ok, %HTTP2{} = conn} = HTTP2.set_mode(conn, :active)
assert_raise ArgumentError, ~r"^can't use recv/3", fn ->
HTTP2.recv(conn, 0, 100)
end
assert HTTP2.open?(conn)
end
end
describe "ping" do
test "if we send a PING we then get a :pong reply", %{conn: conn} do
assert {:ok, conn, ref} = HTTP2.ping(conn)
assert_recv_frames [ping(opaque_data: opaque_data)]
assert {:ok, %HTTP2{} = conn, responses} =
stream_frames(conn, [
ping(flags: set_flags(:ping, [:ack]), opaque_data: opaque_data)
])
assert responses == [{:pong, ref}]
assert HTTP2.open?(conn)
end
test "if the server sends a PING we reply automatically", %{conn: conn} do
opaque_data = :binary.copy(<<0>>, 8)
assert {:ok, %HTTP2{}, []} = stream_frames(conn, [ping(opaque_data: opaque_data)])
assert_recv_frames [ping(opaque_data: ^opaque_data)]
end
test "if the server sends a PING ack but no PING requests are pending we emit a warning",
%{conn: conn} do
opaque_data = :binary.copy(<<0>>, 8)
assert capture_log(fn ->
assert {:ok, %HTTP2{}, []} =
stream_frames(conn, [
ping(opaque_data: opaque_data, flags: set_flags(:ping, [:ack]))
])
end) =~ "Received PING ack but no PING requests are pending"
end
test "if the server sends a PING ack but no PING requests match we emit a warning",
%{conn: conn} do
assert {:ok, conn, _ref} = HTTP2.ping(conn, <<1, 2, 3, 4, 5, 6, 7, 8>>)
opaque_data = <<1, 2, 3, 4, 5, 6, 7, 0>>
assert capture_log(fn ->
assert {:ok, %HTTP2{}, []} =
stream_frames(conn, [
ping(opaque_data: opaque_data, flags: set_flags(:ping, [:ack]))
])
end) =~ "Received PING ack that doesn't match next PING request in the queue"
end
end
describe "stream priority" do
test "PRIORITY frames are ignored", %{conn: conn} do
{conn, _ref} = open_request(conn)
assert_recv_frames [headers(stream_id: stream_id)]
assert capture_log(fn ->
assert {:ok, %HTTP2{} = conn, []} =
stream_frames(conn, [
priority(
stream_id: stream_id,
exclusive?: false,
stream_dependency: 1,
weight: 1
)
])
assert HTTP2.open?(conn)
end) =~ "Ignoring PRIORITY frame"
end
end
describe "controlling process" do
test "changing the controlling process with controlling_process/2", %{conn: conn} do
parent = self()
ref = make_ref()
new_pid =
spawn_link(fn ->
receive do
message ->
send(parent, {ref, message})
Process.sleep(:infinity)
end
end)
{conn, request_ref} = open_request(conn)
assert_recv_frames [headers(stream_id: stream_id)]
data =
server_encode_frames([
headers(
stream_id: stream_id,
hbf: server_encode_headers([{":status", "200"}]),
flags: set_flags(:headers, [:end_headers, :end_stream])
)
])
{:ok, %HTTP2{} = conn} = HTTP2.controlling_process(conn, new_pid)
:ok = :ssl.send(server_get_socket(), data)
assert_receive {^ref, message}
assert {:ok, %HTTP2{} = conn, responses} = HTTP2.stream(conn, message)
assert responses == [
{:status, request_ref, 200},
{:headers, request_ref, []},
{:done, request_ref}
]
assert HTTP2.open?(conn)
end
end
@pdict_key {__MODULE__, :http2_test_server}
defp start_connection(context) do
default_options = [transport_opts: [verify: :verify_none]]
options = Keyword.merge(default_options, context[:connect_options] || [])
{conn, server} = TestServer.connect(options, context[:server_settings] || [])
Process.put(@pdict_key, server)
[conn: conn]
end
defp recv_next_frames(n) do
server = Process.get(@pdict_key)
TestServer.recv_next_frames(server, n)
end
defp stream_frames(conn, frames) do
data = server_encode_frames(frames)
HTTP2.stream(conn, {:ssl, conn.socket, data})
end
defp server_get_socket() do
server = Process.get(@pdict_key)
TestServer.get_socket(server)
end
defp server_encode_frames(frames) do
server = Process.get(@pdict_key)
{server, data} = TestServer.encode_frames(server, frames)
Process.put(@pdict_key, server)
data
end
defp server_encode_headers(headers) do
server = Process.get(@pdict_key)
{server, hbf} = TestServer.encode_headers(server, headers)
Process.put(@pdict_key, server)
hbf
end
defp server_decode_headers(hbf) do
server = Process.get(@pdict_key)
{server, headers} = TestServer.decode_headers(server, hbf)
Process.put(@pdict_key, server)
headers
end
defp open_request(conn, body \\ nil) do
assert {:ok, %HTTP2{} = conn, ref} = HTTP2.request(conn, "GET", "/", [], body)
assert is_reference(ref)
{conn, ref}
end
end
| 34.631712 | 98 | 0.579334 |
73d4d371f8c233eb4996d101828a9610ebf47de0 | 1,901 | exs | Elixir | test/stampscale_test.exs | byjpr/Shopify-Plug | 683ae6670d72acf42a7fc411067bbe20aaae4b81 | [
"MIT"
] | 1 | 2019-03-07T11:30:54.000Z | 2019-03-07T11:30:54.000Z | test/stampscale_test.exs | byjord/Shopify-Plug | 683ae6670d72acf42a7fc411067bbe20aaae4b81 | [
"MIT"
] | 1 | 2018-09-25T18:05:59.000Z | 2018-09-25T18:05:59.000Z | test/stampscale_test.exs | byjord/Shopify-Plug | 683ae6670d72acf42a7fc411067bbe20aaae4b81 | [
"MIT"
] | null | null | null | defmodule ShopifyPlug.StampscaleTest do
use ExUnit.Case
import PlugHelper
test "Expired timestamp" do
%{url: "/stampscale", query: "extra=1&extra=2&shop=shop-name.myshopify.com&path_prefix=%2Fapps%2Fawesome_reviews×tamp=1317327555&signature=a9718877bea71c2484f91608a7eaea1532bdf71f5c56825065fa4ccabe549ef3"}
|> make_request()
|> fetch_all()
|> ShopifyPlug.StampScale.call([])
|> assert_unauthorized()
end
test "No timestamp param" do
%{url: "/stampscale", query: "extra=1&extra=2&shop=shop-name.myshopify.com&path_prefix=%2Fapps%2Fawesome_reviews"}
|> make_request()
|> fetch_all()
|> ShopifyPlug.StampScale.call([])
|> assert_unauthorized()
end
test "No params" do
%{url: "/stampscale", query: ""}
|> make_request()
|> fetch_all()
|> ShopifyPlug.StampScale.call([])
|> assert_unauthorized()
end
test "No call/init options" do
%{url: "/stampscale", query: "extra=1&extra=2&shop=shop-name.myshopify.com&path_prefix=%2Fapps%2Fawesome_reviews×tamp=1317327555&signature=a9718877bea71c2484f91608a7eaea1532bdf71f5c56825065fa4ccabe549ef3"}
|> make_request()
|> fetch_all()
|> ShopifyPlug.StampScale.call()
|> assert_unauthorized()
end
test "No init options" do
init = ShopifyPlug.StampScale.init()
assert init == []
end
test "init options" do
init = ShopifyPlug.StampScale.init([sample: "options"])
assert init == [sample: "options"]
end
test "Time now" do
timestamp = DateTime.utc_now()
|> DateTime.to_unix(:millisecond)
%{url: "/stampscale", query: "extra=1&extra=2&shop=shop-name.myshopify.com&path_prefix=%2Fapps%2Fawesome_reviews×tamp=#{timestamp}&signature=a9718877bea71c2484f91608a7eaea1532bdf71f5c56825065fa4ccabe549ef3"}
|> make_request()
|> fetch_all()
|> ShopifyPlug.StampScale.call([])
|> assert_authorized()
end
end
| 32.775862 | 216 | 0.705418 |
73d52c58e222b1807ba1ea8db9bfef3f8cff96ea | 211 | exs | Elixir | test/honeydew_web/controllers/page_controller_test.exs | elixir-cqrs/honeydew | 888f86c829187eaca28ef1af69a40a337e46630a | [
"MIT"
] | null | null | null | test/honeydew_web/controllers/page_controller_test.exs | elixir-cqrs/honeydew | 888f86c829187eaca28ef1af69a40a337e46630a | [
"MIT"
] | null | null | null | test/honeydew_web/controllers/page_controller_test.exs | elixir-cqrs/honeydew | 888f86c829187eaca28ef1af69a40a337e46630a | [
"MIT"
] | null | null | null | defmodule HoneydewWeb.PageControllerTest do
use HoneydewWeb.ConnCase
test "GET /", %{conn: conn} do
conn = get(conn, "/")
assert html_response(conn, 200) =~ "Honeydew · Phoenix Framework"
end
end
| 23.444444 | 69 | 0.691943 |
73d537d87ea5a8bef440958188b9a4c50b189455 | 410 | ex | Elixir | apps/rtc/lib/rtc/conduit/subscriber.ex | michaeljguarino/forge | 50ee583ecb4aad5dee4ef08fce29a8eaed1a0824 | [
"Apache-2.0"
] | 59 | 2021-09-16T19:29:39.000Z | 2022-03-31T20:44:24.000Z | apps/rtc/lib/rtc/conduit/subscriber.ex | svilenkov/plural | ac6c6cc15ac4b66a3b5e32ed4a7bee4d46d1f026 | [
"Apache-2.0"
] | 111 | 2021-08-15T09:56:37.000Z | 2022-03-31T23:59:32.000Z | apps/rtc/lib/rtc/conduit/subscriber.ex | svilenkov/plural | ac6c6cc15ac4b66a3b5e32ed4a7bee4d46d1f026 | [
"Apache-2.0"
] | 4 | 2021-12-13T09:43:01.000Z | 2022-03-29T18:08:44.000Z | defmodule Rtc.Conduit.Subscriber do
use Conduit.Subscriber
import Conduit.Message
def process(message, _opts) do
case publish_event(message.body) do
:ok -> ack(message)
_ -> nack(message)
end
end
def publish_event(event) do
with {object, topics} <- Rtc.Channels.Negotiator.negotiate(event),
do: Absinthe.Subscription.publish(RtcWeb.Endpoint, object, topics)
end
end
| 24.117647 | 72 | 0.707317 |
73d54b1d8b285db5d32b569a8b89072d68818bc2 | 1,696 | ex | Elixir | v5 - separate VPP server (coupling & cohesion)/lib/virtual_power_plant.ex | s3cur3/genserver_architecture | e33dcc0bd62507132d15744990775178afcc88f4 | [
"MIT"
] | 3 | 2021-10-15T18:19:21.000Z | 2022-02-07T19:02:44.000Z | v5 - separate VPP server (coupling & cohesion)/lib/virtual_power_plant.ex | s3cur3/genserver_architecture | e33dcc0bd62507132d15744990775178afcc88f4 | [
"MIT"
] | null | null | null | v5 - separate VPP server (coupling & cohesion)/lib/virtual_power_plant.ex | s3cur3/genserver_architecture | e33dcc0bd62507132d15744990775178afcc88f4 | [
"MIT"
] | null | null | null | defmodule VirtualPowerPlant do
@moduledoc """
A "virtual power plant" is a collection of grid-connected batteries
that we can control remotely.
We ask these batteries to either pump power onto to the grid to meet
increased demand, or we ask them to absorb and store power that was
generated in excess of demand (e.g., by solar installations).
"""
defstruct batteries: []
@doc """
Associates this battery with our virtual power plant, allowing us to
control it to meet grid needs.
"""
def add_battery(%__MODULE__{} = vpp, %Battery{} = battery) do
%{vpp | batteries: [battery | vpp.batteries]}
end
@doc "The collection of battery structs we control"
def batteries(%__MODULE__{} = vpp) do
vpp.batteries
end
@doc """
The total wattage our virtual power plant is contributing to the grid
(positive values) or absorbing off the grid (negative values).
"""
def current_power(%__MODULE__{batteries: battery_collection} = vpp) do
battery_collection
|> Enum.map(& &1.current_power_watts)
|> Enum.sum()
end
@doc """
Attempt to change our batteries' state to contribute or absorb this many watts to the grid.
Overrides any previous requests.
"""
def set_power(%__MODULE__{batteries: battery_collection} = vpp, needed_watts) do
{updated_batteries, _unmet_need} =
Enum.reduce(battery_collection, {[], needed_watts}, fn battery, {updated_batteries, need} ->
updated_battery = Battery.update_current_power(battery, need)
updated_need = need - updated_battery.current_power_watts
{[updated_battery | updated_batteries], updated_need}
end)
%{vpp | batteries: updated_batteries}
end
end
| 33.92 | 98 | 0.714033 |
73d580eb187a2196a00d70b305453a12be7e39ad | 1,430 | exs | Elixir | mix.exs | gen1321/simple_graphql_client | 50b8240475d14a838a52734b9f22aa566f533dde | [
"MIT"
] | 15 | 2018-10-14T23:30:56.000Z | 2021-02-20T21:49:42.000Z | mix.exs | gen1321/simple_graphql_client | 50b8240475d14a838a52734b9f22aa566f533dde | [
"MIT"
] | 1 | 2019-04-29T06:29:30.000Z | 2019-08-05T20:26:52.000Z | mix.exs | gen1321/simple_graphql_client | 50b8240475d14a838a52734b9f22aa566f533dde | [
"MIT"
] | 4 | 2019-03-14T23:07:49.000Z | 2019-08-05T18:34:37.000Z | defmodule SimpleGraphqlClient.MixProject do
use Mix.Project
@version "0.2.1"
@github_url "https://github.com/gen1321/simple_graphql_client"
def project do
[
app: :simple_graphql_client,
description: "Elixir graphql client",
start_permanent: Mix.env() == :prod,
version: @version,
elixir: "~> 1.3",
package: package(),
docs: docs(),
source_url: @github_url,
deps: deps()
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:logger]
]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
{:httpoison, "~> 1.3.1"},
{:poison, "~> 3.1"},
{:websockex, "~> 0.4.0"},
{:dialyxir, "~> 1.0.0-rc.3", only: [:dev], runtime: false},
{:credo, "~> 0.10.0", only: [:dev, :test], runtime: false},
{:elixir_uuid, "~> 1.2"},
{:mock, "~> 0.3.0", only: :test},
{:ex_doc, "~> 0.19", only: :dev, runtime: false}
]
end
defp package do
[
files: [
"lib",
"mix.exs",
"README.md",
"LICENSE"
],
links: %{"github" => @github_url},
maintainers: ["Boris Beginin <[email protected]>"],
licenses: ["MIT"]
]
end
defp docs do
[
source_ref: "v#{@version}",
main: "SimpleGraphqlClient",
extras: ["README.md", "CHANGELOG.md"]
]
end
end
| 22.698413 | 65 | 0.537762 |
73d5830a1fa616075a917a5542a61756785369f8 | 459 | ex | Elixir | lib/sise/mcast/supervisor.ex | aytchell/sise | 69d4a4598a4742a9f5e59fcb101e7db5582d3fbe | [
"Apache-2.0"
] | null | null | null | lib/sise/mcast/supervisor.ex | aytchell/sise | 69d4a4598a4742a9f5e59fcb101e7db5582d3fbe | [
"Apache-2.0"
] | null | null | null | lib/sise/mcast/supervisor.ex | aytchell/sise | 69d4a4598a4742a9f5e59fcb101e7db5582d3fbe | [
"Apache-2.0"
] | null | null | null | defmodule Sise.MCast.Supervisor do
# SPDX-License-Identifier: Apache-2.0
@moduledoc false
use Supervisor
def start_link(opts) do
Supervisor.start_link(__MODULE__, :ok, opts)
end
@impl true
def init(:ok) do
children = [
{Sise.MCast.Listener, name: Sise.MCast.Supervisor},
{Task.Supervisor, name: Sise.MCast.ProcessorSupervisor, strategy: :one_for_one}
]
Supervisor.init(children, strategy: :one_for_one)
end
end
| 21.857143 | 85 | 0.703704 |
73d59900db55112e1daceb59189214387e6909df | 2,653 | ex | Elixir | lib/brando/users/users.ex | univers-agency/brando | 69c3c52498a3f64518da3522cd9f27294a52cc68 | [
"Apache-2.0"
] | 1 | 2020-04-26T09:53:02.000Z | 2020-04-26T09:53:02.000Z | lib/brando/users/users.ex | univers-agency/brando | 69c3c52498a3f64518da3522cd9f27294a52cc68 | [
"Apache-2.0"
] | 198 | 2019-08-20T16:16:07.000Z | 2020-07-03T15:42:07.000Z | lib/brando/users/users.ex | univers-agency/brando | 69c3c52498a3f64518da3522cd9f27294a52cc68 | [
"Apache-2.0"
] | null | null | null | defmodule Brando.Users do
@moduledoc """
Context for Users.
"""
use BrandoAdmin, :context
use Brando.Query
alias Brando.Users.User
alias Brando.Users.UserToken
alias Brando.Utils
import Ecto.Query
@type user :: User.t()
query :list, User do
fn q -> from(t in q) end
end
filters User do
fn
{:active, active}, q -> from t in q, where: t.active == ^active
{:name, name}, q -> from t in q, where: ilike(t.name, ^"%#{name}%")
{:email, email}, q -> from t in q, where: ilike(t.email, ^"%#{email}%")
end
end
query :single, User do
fn q -> from(t in q) end
end
matches User do
fn
{:id, id}, q -> from t in q, where: t.id == ^id
{:email, email}, q -> from t in q, where: t.email == ^email
{:password, password}, q -> from t in q, where: t.password == ^password
{:active, active}, q -> from t in q, where: t.active == ^active
{field, value}, q -> from t in q, where: field(t, ^field) == ^value
end
end
mutation :create, User
mutation :update, User
mutation :delete, User
@doc """
Bumps `user`'s `last_login` to current time.
"""
@spec set_last_login(user) :: {:ok, user}
def set_last_login(user) do
current_time = NaiveDateTime.truncate(NaiveDateTime.utc_now(), :second)
Utils.Schema.update_field(user, last_login: current_time)
end
@doc """
Set user status
"""
def set_active(user_id, status, user) do
update_user(user_id, %{active: status}, user)
end
@doc """
Checks if `user` has access to admin area.
"""
@spec can_login?(user) :: boolean
def can_login?(user) do
{:ok, role} = Brando.Type.Role.dump(user.role)
(role > 0 && true) || false
end
@doc """
Generates a session token.
"""
def generate_user_session_token(user) do
{token, user_token} = UserToken.build_session_token(user)
Brando.repo().insert!(user_token)
token
end
@doc """
Gets the user with the given signed token.
"""
def get_user_by_session_token(token) do
{:ok, query} = UserToken.verify_session_token_query(token)
query
|> Brando.repo().one()
|> Brando.repo().preload(:avatar)
end
@doc """
Deletes the signed token with the given context.
"""
def delete_session_token(token) do
Brando.repo().delete_all(UserToken.token_and_context_query(token, "session"))
:ok
end
def build_token(id) do
Phoenix.Token.sign(Brando.endpoint(), "user_token", id)
end
def verify_token(token) do
Phoenix.Token.verify(Brando.endpoint(), "user_token", token, max_age: 86_400)
end
def reset_user_password(_user, _attrs) do
raise "TODO"
end
end
| 24.33945 | 81 | 0.63513 |
73d59f5c39f6132878a371e1baf9809b096d85de | 8,742 | exs | Elixir | test/hexpm_web/controllers/api/package_controller_test.exs | Benjamin-Philip/hexpm | 6f38244f81bbabd234c660f46ea973849ba77a7f | [
"Apache-2.0"
] | 691 | 2017-03-08T09:15:45.000Z | 2022-03-23T22:04:47.000Z | test/hexpm_web/controllers/api/package_controller_test.exs | Benjamin-Philip/hexpm | 6f38244f81bbabd234c660f46ea973849ba77a7f | [
"Apache-2.0"
] | 491 | 2017-03-07T12:58:42.000Z | 2022-03-29T23:32:54.000Z | test/hexpm_web/controllers/api/package_controller_test.exs | Benjamin-Philip/hexpm | 6f38244f81bbabd234c660f46ea973849ba77a7f | [
"Apache-2.0"
] | 200 | 2017-03-12T23:03:39.000Z | 2022-03-05T17:55:52.000Z | defmodule HexpmWeb.API.PackageControllerTest do
use HexpmWeb.ConnCase, async: true
alias Hexpm.Repository.Packages
setup do
user = insert(:user)
unauthorized_user = insert(:user)
repository = insert(:repository)
package1 =
insert(
:package,
inserted_at: ~N[2030-01-01 00:00:00]
)
package2 = insert(:package, updated_at: ~N[2030-01-01 00:00:00])
package3 = insert(:package, repository_id: repository.id, updated_at: ~N[2030-01-01 00:00:00])
package4 = insert(:package)
insert(:release,
package: package1,
version: "0.0.1",
has_docs: true,
meta: build(:release_metadata, app: package1.name)
)
insert(:release, package: package2, version: "0.0.1", has_docs: true)
insert(:release, package: package3, version: "0.0.1", has_docs: true)
insert(
:release,
package: package4,
version: "0.0.1",
retirement: %{reason: "other", message: "not backward compatible"}
)
insert(:release, package: package4, version: "1.0.0")
insert(:organization_user, organization: repository.organization, user: user)
%{
package1: Packages.preload(package1),
package2: Packages.preload(package2),
package3: Packages.preload(package3),
package4: Packages.preload(package4),
repository: repository,
user: user,
unauthorized_user: unauthorized_user
}
end
describe "GET /api/packages" do
test "multiple packages", %{package1: package1} do
conn = get(build_conn(), "api/packages")
result = json_response(conn, 200)
assert length(result) == 3
releases = List.first(result)["releases"]
for release <- releases do
assert length(Map.keys(release)) == 4
assert Map.has_key?(release, "url")
assert Map.has_key?(release, "version")
assert Map.has_key?(release, "has_docs")
end
conn = get(build_conn(), "api/packages?search=#{package1.name}")
[package] = json_response(conn, 200)
[release] = package["releases"]
assert release["has_docs"] == true
conn = get(build_conn(), "api/packages?search=name%3A#{package1.name}*")
assert [_] = json_response(conn, 200)
conn = get(build_conn(), "api/packages?page=1")
assert [_, _, _] = json_response(conn, 200)
conn = get(build_conn(), "api/packages?page=2")
assert [] = json_response(conn, 200)
end
test "sort order", %{package1: package1, package2: package2} do
conn = get(build_conn(), "api/packages?sort=updated_at")
result = json_response(conn, 200)
assert hd(result)["name"] == package2.name
conn = get(build_conn(), "api/packages?sort=inserted_at")
result = json_response(conn, 200)
assert hd(result)["name"] == package1.name
end
test "show private packages", %{user: user, package3: package3} do
result =
build_conn()
# TODO: change to web_login/api_login helper
|> put_req_header("authorization", key_for(user))
|> get("api/packages")
|> json_response(200)
assert length(result) == 4
assert package3.name in Enum.map(result, & &1["name"])
end
test "show private packages in organization", %{
user: user,
repository: repository,
package3: package3
} do
result =
build_conn()
# TODO: change to web_login/api_login helper
|> put_req_header("authorization", key_for(user))
|> get("api/repos/#{repository.name}/packages")
|> json_response(200)
assert length(result) == 1
assert package3.name in Enum.map(result, & &1["name"])
end
test "show private packages in organization with service account", %{
repository: repository,
package3: package3
} do
user = insert(:user, service: true)
result =
build_conn()
# TODO: change to web_login/api_login helper
|> put_req_header("authorization", key_for(user))
|> get("api/repos/#{repository.name}/packages")
|> json_response(200)
assert length(result) == 1
assert package3.name in Enum.map(result, & &1["name"])
end
test "show private packages in organization authorizes", %{
repository: repository,
unauthorized_user: unauthorized_user
} do
build_conn()
|> get("api/repos/#{repository.name}/packages")
|> json_response(403)
build_conn()
# TODO: change to web_login/api_login helper
|> put_req_header("authorization", key_for(unauthorized_user))
|> get("api/repos/#{repository.name}/packages")
|> json_response(403)
end
end
describe "GET /api/packages/:name" do
test "get package", %{package1: package1} do
conn = get(build_conn(), "api/packages/#{package1.name}")
result = json_response(conn, 200)
assert result["name"] == package1.name
assert result["inserted_at"] == "2030-01-01T00:00:00.000000Z"
# updated_at ISO8601 datetime string should include a Z to indicate UTC
assert String.slice(result["updated_at"], -1, 1) == "Z"
assert result["url"] == "http://localhost:5000/api/packages/#{package1.name}"
assert result["html_url"] == "http://localhost:5000/packages/#{package1.name}"
assert result["docs_html_url"] == "http://localhost:5002/#{package1.name}/"
assert result["latest_version"] == "0.0.1"
assert result["latest_stable_version"] == "0.0.1"
assert result["configs"]["mix.exs"] == ~s({:#{package1.name}, "~> 0.0.1"})
release = List.first(result["releases"])
assert release["url"] ==
"http://localhost:5000/api/packages/#{package1.name}/releases/0.0.1"
assert release["version"] == "0.0.1"
end
test "get package for non namespaced private organization", %{user: user, package3: package3} do
build_conn()
|> put_req_header("authorization", key_for(user))
|> get("api/packages/#{package3.name}")
|> json_response(404)
end
test "get package for unauthenticated private organization", %{
repository: repository,
package3: package3
} do
build_conn()
|> get("api/repos/#{repository.name}/packages/#{package3.name}")
|> json_response(403)
end
test "get package returns 403 for unknown organization", %{package1: package1} do
build_conn()
|> get("api/repos/UNKNOWN_REPOSITORY/packages/#{package1.name}")
|> json_response(403)
end
test "get package returns 403 for unknown package if you are not authorized", %{
repository: repository
} do
build_conn()
|> get("api/repos/#{repository.name}/packages/UNKNOWN_PACKAGE")
|> json_response(403)
end
test "get package returns 404 for unknown package if you are authorized", %{
user: user,
repository: repository
} do
build_conn()
|> put_req_header("authorization", key_for(user))
|> get("api/repos/#{repository.name}/packages/UNKNOWN_PACKAGE")
|> json_response(404)
end
test "get package for authenticated private organization", %{
user: user,
repository: repository,
package3: package3
} do
result =
build_conn()
|> put_req_header("authorization", key_for(user))
|> get("api/repos/#{repository.name}/packages/#{package3.name}")
|> json_response(200)
assert result["name"] == package3.name
assert result["repository"] == repository.name
assert result["url"] ==
"http://localhost:5000/api/repos/#{repository.name}/packages/#{package3.name}"
assert result["html_url"] ==
"http://localhost:5000/packages/#{repository.name}/#{package3.name}"
assert result["docs_html_url"] ==
"http://#{repository.name}.localhost:5002/#{package3.name}/"
end
test "get package with retired versions", %{package4: package4} do
conn = get(build_conn(), "api/packages/#{package4.name}")
result = json_response(conn, 200)
assert result["retirements"] == %{
"0.0.1" => %{"message" => "not backward compatible", "reason" => "other"}
}
end
end
describe "GET /api/packages/:name/audit_logs" do
test "returns the first page of audit_logs related to this package when params page is not specified",
%{package1: package} do
insert(:audit_log,
action: "test.package.audit_logs",
params: %{package: %{id: package.id}}
)
conn =
build_conn()
|> get("/api/packages/#{package.name}/audit_logs")
assert [%{"action" => "test.package.audit_logs"}] = json_response(conn, :ok)
end
end
end
| 32.988679 | 106 | 0.624685 |
73d5c70d3835136c124ccd034c9c9532827fe1c2 | 84 | exs | Elixir | test/views/page_view_test.exs | thluiz/quartoElugRJ | 4988fe0fd05ed92a43f92f5d43ebbc2983021c30 | [
"MIT"
] | 1 | 2016-05-06T00:00:10.000Z | 2016-05-06T00:00:10.000Z | test/views/page_view_test.exs | thluiz/quartoElugRJ | 4988fe0fd05ed92a43f92f5d43ebbc2983021c30 | [
"MIT"
] | null | null | null | test/views/page_view_test.exs | thluiz/quartoElugRJ | 4988fe0fd05ed92a43f92f5d43ebbc2983021c30 | [
"MIT"
] | null | null | null | defmodule QuartoElugRJ.PageViewTest do
use QuartoElugRJ.ConnCase, async: true
end
| 21 | 40 | 0.833333 |
73d602850fb4c47c63acf8082a927c4e8df28b74 | 2,921 | exs | Elixir | test/pg/ecto_enum_test.exs | jesenko/ecto_enum | 9639bb23fb56bbac8a0c71d6824d724f243780fd | [
"MIT"
] | null | null | null | test/pg/ecto_enum_test.exs | jesenko/ecto_enum | 9639bb23fb56bbac8a0c71d6824d724f243780fd | [
"MIT"
] | null | null | null | test/pg/ecto_enum_test.exs | jesenko/ecto_enum | 9639bb23fb56bbac8a0c71d6824d724f243780fd | [
"MIT"
] | 1 | 2020-02-06T12:59:54.000Z | 2020-02-06T12:59:54.000Z | defmodule EctoEnumTest do
use ExUnit.Case
import EctoEnum
defenum(StatusEnum, registered: 0, active: 1, inactive: 2, archived: 3)
defmodule User do
use Ecto.Schema
schema "users" do
field(:status, StatusEnum)
end
end
alias Ecto.Integration.TestRepo
test "accepts int, atom and string on save" do
user = TestRepo.insert!(%User{status: 0})
user = TestRepo.get(User, user.id)
assert user.status == :registered
user = Ecto.Changeset.change(user, status: :active)
user = TestRepo.update!(user)
assert user.status == :active
user = Ecto.Changeset.change(user, status: "inactive")
user = TestRepo.update!(user)
assert user.status == "inactive"
user = TestRepo.get(User, user.id)
assert user.status == :inactive
TestRepo.insert!(%User{status: :archived})
user = TestRepo.get_by(User, status: :archived)
assert user.status == :archived
end
test "casts int and binary to atom" do
%{changes: changes} = Ecto.Changeset.cast(%User{}, %{"status" => "active"}, [:status])
assert changes.status == :active
%{changes: changes} = Ecto.Changeset.cast(%User{}, %{"status" => 3}, [:status])
assert changes.status == :archived
%{changes: changes} = Ecto.Changeset.cast(%User{}, %{"status" => :inactive}, [:status])
assert changes.status == :inactive
end
test "raises when input is not in the enum map" do
error = {:status, {"is invalid", [type: EctoEnumTest.StatusEnum, validation: :cast]}}
changeset = Ecto.Changeset.cast(%User{}, %{"status" => "retroactive"}, [:status])
assert error in changeset.errors
changeset = Ecto.Changeset.cast(%User{}, %{"status" => :retroactive}, [:status])
assert error in changeset.errors
changeset = Ecto.Changeset.cast(%User{}, %{"status" => 4}, [:status])
assert error in changeset.errors
assert_raise Ecto.ChangeError, error_msg("retroactive"), fn ->
TestRepo.insert!(%User{status: "retroactive"})
end
assert_raise Ecto.ChangeError, error_msg(:retroactive), fn ->
TestRepo.insert!(%User{status: :retroactive})
end
assert_raise Ecto.ChangeError, error_msg(5), fn ->
TestRepo.insert!(%User{status: 5})
end
end
test "reflection" do
assert StatusEnum.__enum_map__() == [registered: 0, active: 1, inactive: 2, archived: 3]
assert StatusEnum.__valid_values__() == [
0,
1,
2,
3,
:registered,
:active,
:inactive,
:archived,
"active",
"archived",
"inactive",
"registered"
]
end
test "defenum/2 can accept variables" do
x = 0
defenum(TestEnum, zero: x)
end
def error_msg(value) do
"value `#{inspect(value)}` for `EctoEnumTest.User.status` in `insert` does not match type EctoEnumTest.StatusEnum"
end
end
| 28.637255 | 118 | 0.624101 |
73d603b531928cdea8cf90ff67da1f7fc8ff27bd | 119 | ex | Elixir | lib/web/views/registration_view.ex | christhekeele/ex_venture | 4f4b329f50a133e219969f9823144a4cb9bf738d | [
"MIT"
] | 1 | 2021-12-28T20:57:32.000Z | 2021-12-28T20:57:32.000Z | lib/web/views/registration_view.ex | christhekeele/ex_venture | 4f4b329f50a133e219969f9823144a4cb9bf738d | [
"MIT"
] | null | null | null | lib/web/views/registration_view.ex | christhekeele/ex_venture | 4f4b329f50a133e219969f9823144a4cb9bf738d | [
"MIT"
] | null | null | null | defmodule Web.RegistrationView do
use Web, :view
import Web.Gettext, only: [gettext: 1]
alias Web.FormView
end
| 14.875 | 40 | 0.731092 |
73d6303aee72b92433ea17c80737eb3a7c187abf | 172 | exs | Elixir | test/test_helper.exs | joakimk/exremit | 6c0a5fb32208b98cc1baac11d6a7bd248a1aa3bc | [
"Unlicense",
"MIT"
] | 27 | 2016-09-21T09:11:25.000Z | 2020-12-16T04:04:50.000Z | test/test_helper.exs | barsoom/exremit | 6c0a5fb32208b98cc1baac11d6a7bd248a1aa3bc | [
"Unlicense",
"MIT"
] | 2 | 2016-12-02T08:05:13.000Z | 2020-03-27T08:07:59.000Z | test/test_helper.exs | barsoom/exremit | 6c0a5fb32208b98cc1baac11d6a7bd248a1aa3bc | [
"Unlicense",
"MIT"
] | 4 | 2016-09-25T09:58:17.000Z | 2020-04-27T15:07:36.000Z | {:ok, _} = Application.ensure_all_started(:hound)
{:ok, _} = Application.ensure_all_started(:ex_machina)
ExUnit.start
Ecto.Adapters.SQL.Sandbox.mode(Review.Repo, :manual)
| 28.666667 | 54 | 0.767442 |
73d6581055b3e1e5516bf350397f5adb1710a8f6 | 774 | ex | Elixir | lib/excommerce/addresses/zone.ex | roger120981/planet | a662006551d11427c08cf6cdbacd37d377bcd9c5 | [
"MIT"
] | 1 | 2019-04-01T19:14:16.000Z | 2019-04-01T19:14:16.000Z | lib/excommerce/addresses/zone.ex | roger120981/planet | a662006551d11427c08cf6cdbacd37d377bcd9c5 | [
"MIT"
] | null | null | null | lib/excommerce/addresses/zone.ex | roger120981/planet | a662006551d11427c08cf6cdbacd37d377bcd9c5 | [
"MIT"
] | 1 | 2019-03-24T01:50:48.000Z | 2019-03-24T01:50:48.000Z | defmodule Excommerce.Addresses.Zone do
use Ecto.Schema
import Ecto.Changeset
alias Excommerce.Addresses.{Zone, ZoneMember}
schema "zones" do
field :name, :string
field :description, :string
field :type, :string
has_many :country_zone_members, {"country_zone_members", ZoneMember}, on_replace: :delete
has_many :state_zone_members, {"state_zone_members", ZoneMember}
timestamps()
end
@required_fields ~w(name description type)a
@optional_fields ~w()a
@zone_types ~w(Country State)
def changeset(zone, attrs \\ %{}) do
zone
|> cast(attrs, @required_fields ++ @optional_fields)
|> validate_required(@required_fields)
|> validate_inclusion(:type, @zone_types)
end
def zone_types, do: @zone_types
end
| 22.764706 | 93 | 0.706718 |
73d6693982b5d8f2aad335484ade7dc7bfb0a16c | 374 | ex | Elixir | elixir-match/app/lib/match/find_user.ex | jim80net/elixir_tutorial_projects | db19901a9305b297faa90642bebcc08455621b52 | [
"Unlicense"
] | null | null | null | elixir-match/app/lib/match/find_user.ex | jim80net/elixir_tutorial_projects | db19901a9305b297faa90642bebcc08455621b52 | [
"Unlicense"
] | null | null | null | elixir-match/app/lib/match/find_user.ex | jim80net/elixir_tutorial_projects | db19901a9305b297faa90642bebcc08455621b52 | [
"Unlicense"
] | null | null | null | defmodule Match.FindUser do
alias Match.Password
def with_username_and_password(users, username, password) do
case Enum.filter(users, fn {_, {k, _, _}} -> k == username end) do
[{id, {_username, _icon, hash}}] ->
if Password.verify(password, hash) do
id
end
[] ->
Password.dummy_verify()
nil
end
end
end
| 22 | 70 | 0.588235 |
73d697a614406795d1d36e5fe7eff2e0f6e7820f | 1,861 | ex | Elixir | lib/bagheera_web/telemetry.ex | hansjhoffman/bagheera | a816a38a4dd8a4ac841accb0234dfdc15f1aeec2 | [
"MIT"
] | 3 | 2020-02-19T14:57:45.000Z | 2020-02-26T03:38:09.000Z | lib/bagheera_web/telemetry.ex | hansjhoffman/bagheera | a816a38a4dd8a4ac841accb0234dfdc15f1aeec2 | [
"MIT"
] | 1 | 2021-09-30T13:51:51.000Z | 2021-09-30T13:51:51.000Z | lib/bagheera_web/telemetry.ex | hansjhoffman/bagheera | a816a38a4dd8a4ac841accb0234dfdc15f1aeec2 | [
"MIT"
] | null | null | null | defmodule BagheeraWeb.Telemetry do
@moduledoc """
Telemetry
"""
use Supervisor
import Telemetry.Metrics
def start_link(arg) do
Supervisor.start_link(__MODULE__, arg, name: __MODULE__)
end
@impl true
def init(_arg) do
children = [
# Telemetry poller will execute the given period measurements
# every 10_000ms. Learn more here: https://hexdocs.pm/telemetry_metrics
{:telemetry_poller, measurements: periodic_measurements(), period: 10_000}
# Add reporters as children of your supervision tree.
# {Telemetry.Metrics.ConsoleReporter, metrics: metrics()}
]
Supervisor.init(children, strategy: :one_for_one)
end
def metrics do
[
# Phoenix Metrics
summary("phoenix.endpoint.stop.duration",
unit: {:native, :millisecond}
),
summary("phoenix.router_dispatch.stop.duration",
tags: [:route],
unit: {:native, :millisecond}
),
# Database Metrics
summary("bagheera.repo.query.total_time", unit: {:native, :millisecond}),
summary("bagheera.repo.query.decode_time", unit: {:native, :millisecond}),
summary("bagheera.repo.query.query_time", unit: {:native, :millisecond}),
summary("bagheera.repo.query.queue_time", unit: {:native, :millisecond}),
summary("bagheera.repo.query.idle_time", unit: {:native, :millisecond}),
# VM Metrics
summary("vm.memory.total", unit: {:byte, :kilobyte}),
summary("vm.total_run_queue_lengths.total"),
summary("vm.total_run_queue_lengths.cpu"),
summary("vm.total_run_queue_lengths.io")
]
end
defp periodic_measurements do
[
# A module, function and arguments to be invoked periodically.
# This function must call :telemetry.execute/3 and a metric must be added above.
# {BagheeraWeb, :count_users, []}
]
end
end
| 31.542373 | 86 | 0.67007 |
73d6b902825593bdf189916e4e5011a45481d6bc | 3,597 | exs | Elixir | test/socializer_web/resolvers/message_resolver_test.exs | adam-h/socializer | 22bbb34a6e707af9cf608c5eeb6189d1f067a95b | [
"MIT"
] | 311 | 2019-04-21T22:15:08.000Z | 2022-01-23T14:07:03.000Z | test/socializer_web/resolvers/message_resolver_test.exs | adam-h/socializer | 22bbb34a6e707af9cf608c5eeb6189d1f067a95b | [
"MIT"
] | 9 | 2020-09-07T09:38:58.000Z | 2022-02-26T18:07:44.000Z | test/socializer_web/resolvers/message_resolver_test.exs | adam-h/socializer | 22bbb34a6e707af9cf608c5eeb6189d1f067a95b | [
"MIT"
] | 49 | 2019-04-22T01:29:50.000Z | 2022-03-23T04:34:35.000Z | defmodule SocializerWeb.MessageResolverTest do
use SocializerWeb.ConnCase
alias SocializerWeb.Resolvers.MessageResolver
describe "#list" do
it "returns messages for a conversation" do
conversation = insert(:conversation)
user = insert(:user)
insert(:conversation_user, conversation: conversation, user: user)
message_a = insert(:message, conversation: conversation, user: user)
message_b = insert(:message, conversation: conversation, user: user)
{:ok, results} =
MessageResolver.list(nil, %{conversation_id: conversation.id}, %{
context: %{current_user: user}
})
assert length(results) == 2
assert List.first(results).id == message_a.id
assert List.last(results).id == message_b.id
end
it "returns not found when user is not in conversation" do
conversation = insert(:conversation)
user = insert(:user)
insert(:message, conversation: conversation, user: user)
insert(:message, conversation: conversation, user: user)
{:error, error} =
MessageResolver.list(nil, %{conversation_id: conversation.id}, %{
context: %{current_user: user}
})
assert error == "Not found"
end
it "returns not found when conversation does not exist" do
user = insert(:user)
{:error, error} =
MessageResolver.list(nil, %{conversation_id: -1}, %{context: %{current_user: user}})
assert error == "Not found"
end
it "returns unauthenticated with no current user" do
conversation = insert(:conversation)
{:error, error} = MessageResolver.list(nil, %{conversation_id: conversation.id}, nil)
assert error == "Unauthenticated"
end
end
describe "#create" do
it "creates valid message with authenticated user" do
conversation = insert(:conversation)
user = insert(:user)
insert(:conversation_user, conversation: conversation, user: user)
{:ok, message} =
MessageResolver.create(nil, %{conversation_id: conversation.id, body: "Hello"}, %{
context: %{current_user: user}
})
assert message.body == "Hello"
assert message.conversation_id == conversation.id
assert message.user_id == user.id
end
it "returns not found when user is not in conversation" do
conversation = insert(:conversation)
user = insert(:user)
{:error, error} =
MessageResolver.create(nil, %{conversation_id: conversation.id, body: "Hello"}, %{
context: %{current_user: user}
})
assert error == "Not found"
end
it "returns not found when conversation does not exist" do
user = insert(:user)
{:error, error} =
MessageResolver.create(nil, %{conversation_id: -1, body: "Hello"}, %{
context: %{current_user: user}
})
assert error == "Not found"
end
it "returns error for missing params" do
conversation = insert(:conversation)
user = insert(:user)
insert(:conversation_user, conversation: conversation, user: user)
{:error, error} =
MessageResolver.create(nil, %{conversation_id: conversation.id}, %{
context: %{current_user: user}
})
assert error == [[field: :body, message: "Can't be blank"]]
end
it "returns error for unauthenticated user" do
conversation = insert(:conversation)
{:error, error} =
MessageResolver.create(nil, %{conversation_id: conversation.id, body: "Hello"}, nil)
assert error == "Unauthenticated"
end
end
end
| 30.74359 | 92 | 0.640256 |
73d6ed84a33dc4f8d6aa79c9a050c7d012c98176 | 1,136 | exs | Elixir | complete/mix.exs | PJUllrich/e2e-testing-with-cypress | 19806edb347d829eef1fd13079ceb905fc2e4a39 | [
"MIT"
] | 2 | 2019-11-03T20:42:36.000Z | 2019-12-03T14:12:52.000Z | complete/mix.exs | PJUllrich/e2e-testing-with-cypress | 19806edb347d829eef1fd13079ceb905fc2e4a39 | [
"MIT"
] | 1 | 2021-03-09T20:17:35.000Z | 2021-03-09T20:17:35.000Z | incomplete/mix.exs | PJUllrich/e2e-testing-with-cypress | 19806edb347d829eef1fd13079ceb905fc2e4a39 | [
"MIT"
] | null | null | null | defmodule E2eWithCypress.MixProject do
use Mix.Project
def project do
[
app: :e2e_with_cypress,
version: "0.1.0",
elixir: "~> 1.5",
elixirc_paths: elixirc_paths(Mix.env()),
compilers: [:phoenix, :gettext] ++ Mix.compilers(),
start_permanent: Mix.env() == :prod,
deps: deps()
]
end
# Configuration for the OTP application.
#
# Type `mix help compile.app` for more information.
def application do
[
mod: {E2eWithCypress.Application, []},
extra_applications: [:logger, :runtime_tools]
]
end
# Specifies which paths to compile per environment.
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
# Specifies your project dependencies.
#
# Type `mix help deps` for examples and options.
defp deps do
[
{:phoenix, "~> 1.4.9"},
{:phoenix_pubsub, "~> 1.1"},
{:phoenix_html, "~> 2.11"},
{:phoenix_live_reload, "~> 1.2", only: :dev},
{:phoenix_live_view, "~> 0.3.1"},
{:gettext, "~> 0.11"},
{:jason, "~> 1.0"},
{:plug_cowboy, "~> 2.0"}
]
end
end
| 24.695652 | 57 | 0.581866 |
73d6f231a9e921ae8f666ff7b82f551b5ffc2dda | 854 | ex | Elixir | lib/honest_chat/rooms.ex | noozo/honest_chat | b3c45f72fd9c579404d05ce308eb13a0a2997760 | [
"Apache-2.0"
] | 3 | 2021-11-19T07:18:30.000Z | 2021-12-23T22:19:48.000Z | lib/honest_chat/rooms.ex | noozo/honest_chat | b3c45f72fd9c579404d05ce308eb13a0a2997760 | [
"Apache-2.0"
] | null | null | null | lib/honest_chat/rooms.ex | noozo/honest_chat | b3c45f72fd9c579404d05ce308eb13a0a2997760 | [
"Apache-2.0"
] | null | null | null | defmodule HonestChat.Rooms do
@moduledoc """
The Rooms context.
"""
import Ecto.Query, warn: false
import Ecto.Changeset
alias HonestChat.Accounts
alias HonestChat.Accounts.User
alias HonestChat.Repo
alias HonestChat.Rooms.Room
def get_user_rooms(%User{} = user) do
user
|> Repo.preload(:rooms)
|> Map.get(:rooms)
end
def get_room!(id) do
Repo.get!(Room, id)
end
def get_room_by!(opts) do
Repo.get_by!(Room, opts)
end
def join(user, room) do
room = Repo.preload(room, :members)
room
|> Room.changeset(%{})
|> put_assoc(:members, [user | room.members])
|> Repo.update()
end
def create_room(attrs) when is_map(attrs) do
user = Accounts.get_user!(attrs.user_id)
%Room{}
|> Room.changeset(attrs)
|> put_assoc(:members, [user])
|> Repo.insert()
end
end
| 18.977778 | 49 | 0.642857 |
73d709a757c89f71283c22a3fd84ba115d04efad | 143 | ex | Elixir | text_client/lib/text_client.ex | johncoleman83/elixir-for-programmers | 062d0a761e59b30909377694151655c946f48bb4 | [
"MIT"
] | null | null | null | text_client/lib/text_client.ex | johncoleman83/elixir-for-programmers | 062d0a761e59b30909377694151655c946f48bb4 | [
"MIT"
] | null | null | null | text_client/lib/text_client.ex | johncoleman83/elixir-for-programmers | 062d0a761e59b30909377694151655c946f48bb4 | [
"MIT"
] | null | null | null | defmodule TextClient do
@moduledoc """
Documentation for TextClient
"""
defdelegate start(), to: TextClient.Interact, as: :start
end
| 15.888889 | 58 | 0.713287 |
73d7715811c3f8df423314722bb9ec12845d65ba | 2,395 | ex | Elixir | lib/ex_jenga/send_money/pesalink_to_mobile.ex | beamkenya/ex_jenga | 03a936a04d99614043d120d0e3ee787f1b8a5b8d | [
"AML",
"MIT"
] | 1 | 2021-09-14T09:50:22.000Z | 2021-09-14T09:50:22.000Z | lib/ex_jenga/send_money/pesalink_to_mobile.ex | beamkenya/ex_jenga | 03a936a04d99614043d120d0e3ee787f1b8a5b8d | [
"AML",
"MIT"
] | 15 | 2021-04-23T11:28:49.000Z | 2021-06-23T04:42:35.000Z | lib/ex_jenga/send_money/pesalink_to_mobile.ex | beamkenya/ex_jenga | 03a936a04d99614043d120d0e3ee787f1b8a5b8d | [
"AML",
"MIT"
] | null | null | null | defmodule ExJenga.SendMoney.PesalinkToMobile do
@moduledoc """
This module enables you to Send Money To Other People's mobile phone through PesaLink
"""
import ExJenga.JengaBase
alias ExJenga.Signature
@doc """
Send Money To Other People's mobile phone through PesaLink
## Parameters
attrs: - a map containing:
- `source` - a map containing; `countryCode[string]`, `name[string]` and `accountNumber[string]`
- `destination` - a map containing; `type[string]`, `countryCode[string]`, `name[string]`, `bankCode[string]`, `mobileNumber[string]`
- `transfer` - a map containing; `type[string]`, `amount[string]`, `currencyCode[string]`, `reference[string]`, `date[string]` and `description[string]`
Read More about the parameters' descriptions here: https://developer.jengaapi.io/reference#pesalink2mobile
## Example
iex> ExJenga.SendMoney.PesalinkToMobile.request(%{ source: %{ countryCode: "KE", name: "John Doe", accountNumber: "0770194201783" }, destination: %{ type: "bank", countryCode: "KE", name: "John Doe", bankCode: "07", mobileNumber: "0722000000" }, transfer: %{ type: "PesaLink", amount: "1000", currencyCode: "KES", reference: "692194625821", date: "2020-12-03", description: "some remarks here" } })
{:ok,
%{
"transactionId" => "10000345333355",
"status" => "SUCCESS"
}
}
"""
@spec request(map()) :: {:error, any()} | {:ok, any()}
def request(
%{
source: %{
countryCode: countryCode,
name: _senderName,
accountNumber: accountNumber
},
destination: %{
type: _destType,
countryCode: countryCode,
name: recipientName,
bankCode: _bankCode,
mobileNumber: _mobileNumber
},
transfer: %{
type: "PesaLink",
amount: amount,
currencyCode: currencyCode,
reference: reference,
date: _date,
description: _description
}
} = requestBody
) do
message = "#{amount}#{currencyCode}#{reference}#{recipientName}#{accountNumber}"
make_request("/transaction/v2/remittance#pesalinkmobile", requestBody, [
{"signature", Signature.sign(message)}
])
end
def request(_), do: {:error, "Required Parameters missing, check your request body"}
end
| 37.421875 | 404 | 0.618372 |
73d7a5fd60c9f0cf8b0895f5ca42781700315511 | 822 | ex | Elixir | lib/repo/schema_type.ex | scripbox/ex_audit | 6478d72816e9357c9ba4eb5eeafd0244010133b4 | [
"MIT"
] | null | null | null | lib/repo/schema_type.ex | scripbox/ex_audit | 6478d72816e9357c9ba4eb5eeafd0244010133b4 | [
"MIT"
] | null | null | null | lib/repo/schema_type.ex | scripbox/ex_audit | 6478d72816e9357c9ba4eb5eeafd0244010133b4 | [
"MIT"
] | null | null | null | defmodule ExAudit.Type.Schema do
@behaviour Ecto.Type
def cast(schema) when is_atom(schema) do
case Enum.member?(schemas(), schema) do
true -> {:ok, schema}
_ -> :error
end
end
def cast(schema) when is_binary(schema) do
load(schema)
end
def cast(_), do: :error
def load(schema) do
case get_schema_by_table(schema) do
nil -> :error
schema -> {:ok, schema}
end
end
def dump(schema) do
case Enum.member?(schemas(), schema) do
true -> {:ok, schema.__schema__(:source)}
_ -> :error
end
end
defp get_schema_by_table(table) do
schemas() |> Enum.find(fn schema ->
schema.__schema__(:source) == table
end)
end
def type, do: :string
defp schemas do
Application.get_env(:ex_audit, :tracked_schemas, [])
end
end
| 19.116279 | 56 | 0.621655 |
73d7ad25ffdbc4e4242f38ff44b894b02140addc | 9,885 | ex | Elixir | lib/elixir/lib/module/locals_tracker.ex | davidsulc/elixir | dd4fd6ab742acd75862e34e26dbdb86e0cf6453f | [
"Apache-2.0"
] | null | null | null | lib/elixir/lib/module/locals_tracker.ex | davidsulc/elixir | dd4fd6ab742acd75862e34e26dbdb86e0cf6453f | [
"Apache-2.0"
] | null | null | null | lib/elixir/lib/module/locals_tracker.ex | davidsulc/elixir | dd4fd6ab742acd75862e34e26dbdb86e0cf6453f | [
"Apache-2.0"
] | null | null | null | # This is an Elixir module responsible for tracking
# calls in order to extract Elixir modules' behaviour
# during compilation time.
#
# ## Implementation
#
# The implementation uses the digraph module to track
# all dependencies. The graph starts with one main vertex:
#
# * `:local` - points to local functions
#
# We can also have the following vertices:
#
# * `Module` - a module that was invoked via an import
# * `{name, arity}` - a local function/arity pair
# * `{:import, name, arity}` - an invoked function/arity import
#
# Each of those vertices can associate to other vertices
# as described below:
#
# * `Module`
# * in neighbours: `{:import, name, arity}`
#
# * `{name, arity}`
# * in neighbours: `:local`, `{name, arity}`
# * out neighbours: `{:import, name, arity}`
#
# * `{:import, name, arity}`
# * in neighbours: `{name, arity}`
# * out neighbours: `Module`
#
# Note that since this is required for bootstrap, we can't use
# any of the `GenServer` conveniences.
defmodule Module.LocalsTracker do
@moduledoc false
@timeout 30_000
@behaviour :gen_server
@type ref :: pid | module
@type name :: atom
@type name_arity :: {name, arity}
@type local :: {name, arity}
@type import :: {:import, name, arity}
# Public API
@doc """
Returns all imported modules that had the given
`{name, arity}` invoked.
"""
@spec imports_with_dispatch(ref, name_arity) :: [module]
def imports_with_dispatch(ref, {name, arity}) do
d = :gen_server.call(to_pid(ref), :digraph, @timeout)
:digraph.out_neighbours(d, {:import, name, arity})
end
@doc """
Returns all locals that are reachable.
By default, all public functions are reachable.
A private function is only reachable if it has
a public function that it invokes directly.
"""
@spec reachable(ref) :: [local]
def reachable(ref) do
ref
|> to_pid()
|> :gen_server.call(:digraph, @timeout)
|> reachable_from(:local)
|> :sets.to_list()
end
defp reachable_from(d, starting) do
reduce_reachable(d, starting, :sets.new)
end
defp reduce_reachable(d, vertex, vertices) do
neighbours = :digraph.out_neighbours(d, vertex)
neighbours = (for {_, _} = t <- neighbours, do: t) |> :sets.from_list
remaining = :sets.subtract(neighbours, vertices)
vertices = :sets.union(neighbours, vertices)
:sets.fold(&reduce_reachable(d, &1, &2), vertices, remaining)
end
defp to_pid(pid) when is_pid(pid), do: pid
defp to_pid(mod) when is_atom(mod) do
table = :elixir_module.data_table(mod)
:ets.lookup_element(table, {:elixir, :locals_tracker}, 2)
end
# Internal API
# Starts the tracker and returns its PID.
@doc false
def start_link do
:gen_server.start_link(__MODULE__, [], [])
end
# Adds a definition into the tracker. A public
# definition is connected with the :local node
# while a private one is left unreachable until
# a call is made to.
@doc false
def add_definition(pid, kind, tuple) when kind in [:def, :defp, :defmacro, :defmacrop] do
:gen_server.cast(pid, {:add_definition, kind, tuple})
end
# Adds and tracks defaults for a definition into the tracker.
@doc false
def add_defaults(pid, kind, tuple, defaults) when kind in [:def, :defp, :defmacro, :defmacrop] do
:gen_server.cast(pid, {:add_defaults, kind, tuple, defaults})
end
# Adds a local dispatch to the given target.
def add_local(pid, to) when is_tuple(to) do
:gen_server.cast(pid, {:add_local, :local, to})
end
# Adds a local dispatch from-to the given target.
@doc false
def add_local(pid, from, to) when is_tuple(from) and is_tuple(to) do
:gen_server.cast(pid, {:add_local, from, to})
end
# Adds an import dispatch to the given target.
@doc false
def add_import(pid, function, module, target) when is_atom(module) and is_tuple(target) do
:gen_server.cast(pid, {:add_import, function, module, target})
end
# Yanks a local node. Returns its in and out vertices in a tuple.
@doc false
def yank(pid, local) do
:gen_server.call(to_pid(pid), {:yank, local}, @timeout)
end
# Reattach a previously yanked node
@doc false
def reattach(pid, tuple, kind, function, neighbours) do
:gen_server.cast(to_pid(pid), {:reattach, tuple, kind, function, neighbours})
end
# Collecting all conflicting imports with the given functions
@doc false
def collect_imports_conflicts(pid, all_defined) do
d = :gen_server.call(pid, :digraph, @timeout)
for {{name, arity}, _, meta, _} <- all_defined,
:digraph.in_neighbours(d, {:import, name, arity}) != [],
n = :digraph.out_neighbours(d, {:import, name, arity}),
n != [] do
{meta, {n, name, arity}}
end
end
# Collect all unused definitions based on the private
# given, also accounting the expected number of default
# clauses a private function have.
@doc false
def collect_unused_locals(ref, private) do
d = :gen_server.call(to_pid(ref), :digraph, @timeout)
reachable = reachable_from(d, :local)
reattached = :digraph.out_neighbours(d, :reattach)
{unreachable(reachable, reattached, private), collect_warnings(reachable, private)}
end
defp unreachable(reachable, reattached, private) do
for {tuple, kind, _, _} <- private,
not reachable?(tuple, kind, reachable, reattached),
do: tuple
end
defp reachable?(tuple, :defmacrop, reachable, reattached) do
# All private micros are unreachable unless they have been
# reattached and they are reachable.
:lists.member(tuple, reattached) and :sets.is_element(tuple, reachable)
end
defp reachable?(tuple, :defp, reachable, _reattached) do
:sets.is_element(tuple, reachable)
end
defp collect_warnings(reachable, private) do
:lists.foldl(&collect_warnings(&1, &2, reachable), [], private)
end
defp collect_warnings({_, _, false, _}, acc, _reachable) do
acc
end
defp collect_warnings({tuple, kind, meta, 0}, acc, reachable) do
if :sets.is_element(tuple, reachable) do
acc
else
[{meta, {:unused_def, tuple, kind}} | acc]
end
end
defp collect_warnings({tuple, kind, meta, default}, acc, reachable) when default > 0 do
{name, arity} = tuple
min = arity - default
max = arity
case min_reachable_default(max, min, :none, name, reachable) do
:none -> [{meta, {:unused_def, tuple, kind}} | acc]
^min -> acc
^max -> [{meta, {:unused_args, tuple}} | acc]
diff -> [{meta, {:unused_args, tuple, diff}} | acc]
end
end
defp min_reachable_default(max, min, last, name, reachable) when max >= min do
case :sets.is_element({name, max}, reachable) do
true -> min_reachable_default(max - 1, min, max, name, reachable)
false -> min_reachable_default(max - 1, min, last, name, reachable)
end
end
defp min_reachable_default(_max, _min, last, _name, _reachable) do
last
end
# Stops the gen server
@doc false
def stop(pid) do
:gen_server.cast(pid, :stop)
end
# Callbacks
def init([]) do
d = :digraph.new([:protected])
:digraph.add_vertex(d, :local)
:digraph.add_vertex(d, :reattach)
{:ok, d}
end
def handle_call({:yank, local}, _from, d) do
out_vertices = :digraph.out_neighbours(d, local)
:digraph.del_edges(d, :digraph.out_edges(d, local))
{:reply, {[], out_vertices}, d}
end
def handle_call(:digraph, _from, d) do
{:reply, d, d}
end
@doc false
def handle_info(_msg, d) do
{:noreply, d}
end
def handle_cast({:add_local, from, to}, d) do
handle_add_local(d, from, to)
{:noreply, d}
end
def handle_cast({:add_import, function, module, {name, arity}}, d) do
handle_import(d, function, module, name, arity)
{:noreply, d}
end
def handle_cast({:add_definition, kind, tuple}, d) do
handle_add_definition(d, kind, tuple)
{:noreply, d}
end
def handle_cast({:add_defaults, kind, {name, arity}, defaults}, d) do
for i <- :lists.seq(arity - defaults, arity - 1) do
handle_add_definition(d, kind, {name, i})
handle_add_local(d, {name, i}, {name, arity})
end
{:noreply, d}
end
def handle_cast({:reattach, tuple, kind, function, {in_neigh, out_neigh}}, d) do
# Reattach the old function
for from <- in_neigh do
:digraph.add_vertex(d, from)
replace_edge!(d, from, function)
end
for to <- out_neigh do
:digraph.add_vertex(d, to)
replace_edge!(d, function, to)
end
# Add the new definition
handle_add_definition(d, kind, tuple)
# Make a call from the old function to the new one
if function != tuple do
handle_add_local(d, function, tuple)
end
# Finally marked the new one as reattached
replace_edge!(d, :reattach, tuple)
{:noreply, d}
end
def handle_cast(:stop, d) do
{:stop, :normal, d}
end
@doc false
def terminate(_reason, _state) do
:ok
end
@doc false
def code_change(_old, state, _extra) do
{:ok, state}
end
defp handle_import(d, function, module, name, arity) do
:digraph.add_vertex(d, module)
tuple = {:import, name, arity}
:digraph.add_vertex(d, tuple)
replace_edge!(d, tuple, module)
if function != nil do
replace_edge!(d, function, tuple)
end
:ok
end
defp handle_add_local(d, from, to) do
:digraph.add_vertex(d, to)
replace_edge!(d, from, to)
end
defp handle_add_definition(d, public, tuple) when public in [:def, :defmacro] do
:digraph.add_vertex(d, tuple)
replace_edge!(d, :local, tuple)
end
defp handle_add_definition(d, private, tuple) when private in [:defp, :defmacrop] do
:digraph.add_vertex(d, tuple)
end
defp replace_edge!(d, from, to) do
_ = unless :lists.member(to, :digraph.out_neighbours(d, from)) do
[:"$e" | _] = :digraph.add_edge(d, from, to)
end
:ok
end
end
| 28.162393 | 99 | 0.663429 |
73d7d478c3634c210342b10c88712531ef0ed8fc | 1,808 | ex | Elixir | lib/xdr/transactions/operations/path_payment_strict_receive_result.ex | einerzg/stellar_base | 2d10c5fc3b8159efc5de10b5c7c665e3b57b3d8f | [
"MIT"
] | 3 | 2021-08-17T20:32:45.000Z | 2022-03-13T20:26:02.000Z | lib/xdr/transactions/operations/path_payment_strict_receive_result.ex | einerzg/stellar_base | 2d10c5fc3b8159efc5de10b5c7c665e3b57b3d8f | [
"MIT"
] | 45 | 2021-08-12T20:19:41.000Z | 2022-03-27T21:00:10.000Z | lib/xdr/transactions/operations/path_payment_strict_receive_result.ex | einerzg/stellar_base | 2d10c5fc3b8159efc5de10b5c7c665e3b57b3d8f | [
"MIT"
] | 2 | 2021-09-22T23:11:13.000Z | 2022-01-23T03:19:11.000Z | defmodule StellarBase.XDR.Operations.PathPaymentStrictReceiveResult do
@moduledoc """
Representation of Stellar `PathPaymentStrictReceiveResult` type.
"""
alias StellarBase.XDR.{Asset, Void}
alias StellarBase.XDR.Operations.{
PathPaymentStrictReceiveResultCode,
PathPaymentStrictResultSuccess
}
@behaviour XDR.Declaration
@arms [
PATH_PAYMENT_STRICT_RECEIVE_SUCCESS: PathPaymentStrictResultSuccess,
PATH_PAYMENT_STRICT_RECEIVE_NO_ISSUER: Asset,
default: Void
]
@type result :: PathPaymentStrictResultSuccess.t() | Asset.t() | any()
@type t :: %__MODULE__{result: result(), code: PathPaymentStrictReceiveResultCode.t()}
defstruct [:result, :code]
@spec new(result :: result(), code :: PathPaymentStrictReceiveResultCode.t()) :: t()
def new(result, %PathPaymentStrictReceiveResultCode{} = code),
do: %__MODULE__{result: result, code: code}
@impl true
def encode_xdr(%__MODULE__{result: result, code: code}) do
code
|> XDR.Union.new(@arms, result)
|> XDR.Union.encode_xdr()
end
@impl true
def encode_xdr!(%__MODULE__{result: result, code: code}) do
code
|> XDR.Union.new(@arms, result)
|> XDR.Union.encode_xdr!()
end
@impl true
def decode_xdr(bytes, spec \\ union_spec())
def decode_xdr(bytes, spec) do
case XDR.Union.decode_xdr(bytes, spec) do
{:ok, {{code, result}, rest}} -> {:ok, {new(result, code), rest}}
error -> error
end
end
@impl true
def decode_xdr!(bytes, spec \\ union_spec())
def decode_xdr!(bytes, spec) do
{{code, result}, rest} = XDR.Union.decode_xdr!(bytes, spec)
{new(result, code), rest}
end
@spec union_spec() :: XDR.Union.t()
defp union_spec do
nil
|> PathPaymentStrictReceiveResultCode.new()
|> XDR.Union.new(@arms)
end
end
| 26.202899 | 88 | 0.688053 |
73d7d879f0c0f72126458e60cd0447c7f8847d7e | 784 | ex | Elixir | lib/pool/orphan.ex | fantypants/elixium_core | 54071b6126af2b20a310bc7beb92efc24e7acabd | [
"MIT"
] | null | null | null | lib/pool/orphan.ex | fantypants/elixium_core | 54071b6126af2b20a310bc7beb92efc24e7acabd | [
"MIT"
] | null | null | null | lib/pool/orphan.ex | fantypants/elixium_core | 54071b6126af2b20a310bc7beb92efc24e7acabd | [
"MIT"
] | null | null | null | defmodule Elixium.Pool.Orphan do
@moduledoc """
Convinient interface functions for interacting with fork blocks
"""
@pool_name :orphan_pool
def initialize, do: :ets.new(@pool_name, [:bag, :public, :named_table])
@spec add(Elixium.Block) :: none
def add(block), do: :ets.insert(@pool_name, {block.index, block})
def remove(block) do
exact_object =
block.index
|> blocks_at_height()
|> Enum.find(fn {_i, blk} -> blk.hash == block.hash end)
if exact_object do
:ets.delete_object(@pool_name, exact_object)
end
end
@doc """
Returns a list of all blocks forked at a given height
"""
@spec blocks_at_height(number) :: list
def blocks_at_height(height) when is_number(height), do: :ets.lookup(@pool_name, height)
end
| 25.290323 | 90 | 0.673469 |
73d81a2d100b10418d05a4ed3662bd0069186a37 | 661 | ex | Elixir | samples/client/petstore/elixir/lib/swagger_petstore/model/order.ex | lob/swagger-codegen | cd4aaa272342b473f940576913d38378f0392991 | [
"Apache-2.0"
] | null | null | null | samples/client/petstore/elixir/lib/swagger_petstore/model/order.ex | lob/swagger-codegen | cd4aaa272342b473f940576913d38378f0392991 | [
"Apache-2.0"
] | 1 | 2022-01-06T22:28:02.000Z | 2022-01-06T22:28:02.000Z | samples/client/petstore/elixir/lib/swagger_petstore/model/order.ex | lob/swagger-codegen | cd4aaa272342b473f940576913d38378f0392991 | [
"Apache-2.0"
] | null | null | null | # NOTE: This class is auto generated by the swagger code generator program.
# https://github.com/swagger-api/swagger-codegen.git
# Do not edit the class manually.
defmodule SwaggerPetstore.Model.Order do
@moduledoc """
"""
@derive [Poison.Encoder]
defstruct [
:id,
:petId,
:quantity,
:shipDate,
:status,
:complete
]
@type t :: %__MODULE__{
:id => integer(),
:petId => integer(),
:quantity => integer(),
:shipDate => DateTime.t,
:status => String.t,
:complete => boolean()
}
end
defimpl Poison.Decoder, for: SwaggerPetstore.Model.Order do
def decode(value, _options) do
value
end
end
| 18.361111 | 75 | 0.635401 |
73d86c46470cf39e210e5d9e40bf2ee0568b4f01 | 1,375 | ex | Elixir | lib/manager/firebase_manager.ex | LuisSas/Le_Etoile_Learning_Manager | 532f8a3d245b116c1235c74e93ac5d57fe061b1b | [
"MIT"
] | null | null | null | lib/manager/firebase_manager.ex | LuisSas/Le_Etoile_Learning_Manager | 532f8a3d245b116c1235c74e93ac5d57fe061b1b | [
"MIT"
] | null | null | null | lib/manager/firebase_manager.ex | LuisSas/Le_Etoile_Learning_Manager | 532f8a3d245b116c1235c74e93ac5d57fe061b1b | [
"MIT"
] | null | null | null | defmodule Etoile.FirebaseManager do
alias Etoile.TaskManager
alias Etoile.Parser
def add_task( title ) do
task =
TaskManager.create_task( title )
|> Poison.encode!
{:ok, _} = HTTPoison.post "https://gameofchats-db1b4.firebaseio.com/tasks.json", task
end
def show_tasks() do
response = HTTPoison.get! "https://gameofchats-db1b4.firebaseio.com/tasks.json"
Poison.decode!( response.body ) |> parser_payload
end
def parser_payload( nil ), do: []
def parser_payload( payload ) do
for task <- payload do
{ id, attributes } = task
Map.put( attributes, "firebase_uuid", id)
end
end
def update_task( task_id, status ) do
{ uuid, task }= show_tasks() |> TaskManager.find_task( task_id, status )
request = update_request( uuid, task )
end
def update_request( uuid, task) do
payload = task |> Poison.encode!
"https://gameofchats-db1b4.firebaseio.com/tasks/#{uuid}.json" |> HTTPoison.put( payload )
Parser.print_with_color "\n TASK #{task["title"]}... was set to: #{task["status"]} 🎉", :color214
end
def delete_task( task_id ) do
{ uuid, task }= show_tasks() |> TaskManager.find_task( task_id, "DELETE" )
"https://gameofchats-db1b4.firebaseio.com/tasks/#{uuid}.json" |> HTTPoison.delete
Parser.print_with_color "\n TASK #{task["title"]}... was DELETED! 🗑 ", :color214
end
end
| 31.25 | 98 | 0.671273 |
73d86de038a7246d6d2e4c2bdd670076da0afac3 | 994 | ex | Elixir | lib/wanon/dispatcher.ex | graffic/wanon-elixir | 65fcde17cbbeb1af3fda5f6423dba112dfa3b9a9 | [
"MIT"
] | 1 | 2018-11-28T07:44:28.000Z | 2018-11-28T07:44:28.000Z | lib/wanon/dispatcher.ex | graffic/wanon-elixir | 65fcde17cbbeb1af3fda5f6423dba112dfa3b9a9 | [
"MIT"
] | 1 | 2018-10-24T20:59:09.000Z | 2018-10-24T20:59:09.000Z | lib/wanon/dispatcher.ex | graffic/wanon-elixir | 65fcde17cbbeb1af3fda5f6423dba112dfa3b9a9 | [
"MIT"
] | null | null | null | defmodule Wanon.Dispatcher do
use GenStage
require Logger
@commands [
Wanon.Cache.Edit,
Wanon.Cache.Add,
Wanon.Quotes.RQuote,
Wanon.Quotes.AddQuote
]
def start_link() do
GenStage.start_link(Wanon.Dispatcher, :ok)
end
def init(:ok) do
{:consumer, :ok,
subscribe_to: [
{
Wanon.Telegram.Updates,
max_demand: 10, selector: &selector/1
}
]}
end
defp selector(%{"message" => msg}), do: filter_chat(msg)
defp selector(%{"edited_message" => msg}), do: filter_chat(msg)
defp selector(_), do: false
defp filter_chat(%{"chat" => %{"id" => id}}) do
Application.get_env(:wanon, __MODULE__)
|> MapSet.member?(id)
end
def handle_events([], _from, state) do
{:noreply, [], state}
end
def handle_events([event | tail], from, state) do
@commands
|> Enum.filter(fn c -> c.selector(event) end)
|> Enum.each(fn c -> c.execute(event) end)
handle_events(tail, from, state)
end
end
| 21.148936 | 65 | 0.6167 |
73d8748cc35695aaec4be20c7a6b4d4ad2a6932a | 5,201 | ex | Elixir | clients/deployment_manager/lib/google_api/deployment_manager/v2/model/deployment.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/deployment_manager/lib/google_api/deployment_manager/v2/model/deployment.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/deployment_manager/lib/google_api/deployment_manager/v2/model/deployment.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.DeploymentManager.V2.Model.Deployment do
@moduledoc """
## Attributes
* `description` (*type:* `String.t`, *default:* `nil`) - An optional user-provided description of the deployment.
* `fingerprint` (*type:* `String.t`, *default:* `nil`) - Provides a fingerprint to use in requests to modify a deployment, such as update(), stop(), and cancelPreview() requests. A fingerprint is a randomly generated value that must be provided with update(), stop(), and cancelPreview() requests to perform optimistic locking. This ensures optimistic concurrency so that only one request happens at a time.
The fingerprint is initially generated by Deployment Manager and changes after every request to modify data. To get the latest fingerprint value, perform a get() request to a deployment.
* `id` (*type:* `String.t`, *default:* `nil`) -
* `insertTime` (*type:* `String.t`, *default:* `nil`) - Output only. Creation timestamp in RFC3339 text format.
* `labels` (*type:* `list(GoogleApi.DeploymentManager.V2.Model.DeploymentLabelEntry.t)`, *default:* `nil`) - Map of labels; provided by the client when the resource is created or updated. Specifically: Label keys must be between 1 and 63 characters long and must conform to the following regular expression: [a-z]([-a-z0-9]*[a-z0-9])? Label values must be between 0 and 63 characters long and must conform to the regular expression ([a-z]([-a-z0-9]*[a-z0-9])?)?
* `manifest` (*type:* `String.t`, *default:* `nil`) - Output only. URL of the manifest representing the last manifest that was successfully deployed.
* `name` (*type:* `String.t`, *default:* `nil`) - Name of the resource; provided by the client when the resource is created. The name must be 1-63 characters long, and comply with RFC1035. Specifically, the name must be 1-63 characters long and match the regular expression [a-z]([-a-z0-9]*[a-z0-9])? which means the first character must be a lowercase letter, and all following characters must be a dash, lowercase letter, or digit, except the last character, which cannot be a dash.
* `operation` (*type:* `GoogleApi.DeploymentManager.V2.Model.Operation.t`, *default:* `nil`) - Output only. The Operation that most recently ran, or is currently running, on this deployment.
* `selfLink` (*type:* `String.t`, *default:* `nil`) - Output only. Server defined URL for the resource.
* `target` (*type:* `GoogleApi.DeploymentManager.V2.Model.TargetConfiguration.t`, *default:* `nil`) - [Input Only] The parameters that define your deployment, including the deployment configuration and relevant templates.
* `update` (*type:* `GoogleApi.DeploymentManager.V2.Model.DeploymentUpdate.t`, *default:* `nil`) - Output only. If Deployment Manager is currently updating or previewing an update to this deployment, the updated configuration appears here.
* `updateTime` (*type:* `String.t`, *default:* `nil`) - Output only. Update timestamp in RFC3339 text format.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:description => String.t(),
:fingerprint => String.t(),
:id => String.t(),
:insertTime => String.t(),
:labels => list(GoogleApi.DeploymentManager.V2.Model.DeploymentLabelEntry.t()),
:manifest => String.t(),
:name => String.t(),
:operation => GoogleApi.DeploymentManager.V2.Model.Operation.t(),
:selfLink => String.t(),
:target => GoogleApi.DeploymentManager.V2.Model.TargetConfiguration.t(),
:update => GoogleApi.DeploymentManager.V2.Model.DeploymentUpdate.t(),
:updateTime => String.t()
}
field(:description)
field(:fingerprint)
field(:id)
field(:insertTime)
field(:labels, as: GoogleApi.DeploymentManager.V2.Model.DeploymentLabelEntry, type: :list)
field(:manifest)
field(:name)
field(:operation, as: GoogleApi.DeploymentManager.V2.Model.Operation)
field(:selfLink)
field(:target, as: GoogleApi.DeploymentManager.V2.Model.TargetConfiguration)
field(:update, as: GoogleApi.DeploymentManager.V2.Model.DeploymentUpdate)
field(:updateTime)
end
defimpl Poison.Decoder, for: GoogleApi.DeploymentManager.V2.Model.Deployment do
def decode(value, options) do
GoogleApi.DeploymentManager.V2.Model.Deployment.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.DeploymentManager.V2.Model.Deployment do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 63.426829 | 488 | 0.720631 |
73d875b24467428fcbd444432b4e8d64e9400b25 | 1,582 | ex | Elixir | lib/lou_web.ex | ConnorRigby/lou | 5fd68f99b72c82bd4cdd9a6ddeccbe967f7a74a1 | [
"MIT"
] | 1 | 2019-10-23T12:17:34.000Z | 2019-10-23T12:17:34.000Z | lib/lou_web.ex | ConnorRigby/lou | 5fd68f99b72c82bd4cdd9a6ddeccbe967f7a74a1 | [
"MIT"
] | null | null | null | lib/lou_web.ex | ConnorRigby/lou | 5fd68f99b72c82bd4cdd9a6ddeccbe967f7a74a1 | [
"MIT"
] | null | null | null | defmodule LouWeb do
@moduledoc """
The entrypoint for defining your web interface, such
as controllers, views, channels and so on.
This can be used in your application as:
use LouWeb, :controller
use LouWeb, :view
The definitions below will be executed for every view,
controller, etc, so keep them short and clean, focused
on imports, uses and aliases.
Do NOT define functions inside the quoted expressions
below. Instead, define any helper function in modules
and import those modules here.
"""
def controller do
quote do
use Phoenix.Controller, namespace: LouWeb
import Plug.Conn
import LouWeb.Gettext
alias LouWeb.Router.Helpers, as: Routes
end
end
def view do
quote do
use Phoenix.View,
root: "lib/lou_web/templates",
namespace: LouWeb
# Import convenience functions from controllers
import Phoenix.Controller, only: [get_flash: 1, get_flash: 2, view_module: 1]
# Use all HTML functionality (forms, tags, etc)
use Phoenix.HTML
import LouWeb.ErrorHelpers
import LouWeb.Gettext
alias LouWeb.Router.Helpers, as: Routes
end
end
def router do
quote do
use Phoenix.Router
import Plug.Conn
import Phoenix.Controller
end
end
def channel do
quote do
use Phoenix.Channel
import LouWeb.Gettext
end
end
@doc """
When used, dispatch to the appropriate controller/view/etc.
"""
defmacro __using__(which) when is_atom(which) do
apply(__MODULE__, which, [])
end
end
| 22.6 | 83 | 0.67952 |
73d8a3b54777e18c41245f7bb1fe964b80121319 | 884 | ex | Elixir | clients/app_engine/lib/google_api/app_engine/v1/metadata.ex | Contractbook/elixir-google-api | 342751041aaf8c2e7f76f9922cf24b9c5895802b | [
"Apache-2.0"
] | 1 | 2021-10-01T09:20:41.000Z | 2021-10-01T09:20:41.000Z | clients/app_engine/lib/google_api/app_engine/v1/metadata.ex | Contractbook/elixir-google-api | 342751041aaf8c2e7f76f9922cf24b9c5895802b | [
"Apache-2.0"
] | null | null | null | clients/app_engine/lib/google_api/app_engine/v1/metadata.ex | Contractbook/elixir-google-api | 342751041aaf8c2e7f76f9922cf24b9c5895802b | [
"Apache-2.0"
] | null | null | null | # Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.AppEngine.V1 do
@moduledoc """
API client metadata for GoogleApi.AppEngine.V1.
"""
@discovery_revision "20210613"
def discovery_revision(), do: @discovery_revision
end
| 32.740741 | 74 | 0.75905 |
73d8d10a8743649d6d0d547e04611f0805cbe07d | 622 | ex | Elixir | lib/ex_money/accounts_balance_history_worker.ex | van-mronov/ex_money | 39010f02fd822657e3b5694e08b872bd2ab72c26 | [
"0BSD"
] | 184 | 2015-11-23T20:51:50.000Z | 2022-03-30T01:01:39.000Z | lib/ex_money/accounts_balance_history_worker.ex | van-mronov/ex_money | 39010f02fd822657e3b5694e08b872bd2ab72c26 | [
"0BSD"
] | 15 | 2015-11-26T16:00:20.000Z | 2018-05-25T20:13:39.000Z | lib/ex_money/accounts_balance_history_worker.ex | van-mronov/ex_money | 39010f02fd822657e3b5694e08b872bd2ab72c26 | [
"0BSD"
] | 21 | 2015-11-26T21:34:40.000Z | 2022-03-26T02:56:42.000Z | defmodule ExMoney.AccountsBalanceHistoryWorker do
use GenServer
alias ExMoney.{Repo, Account}
alias ExMoney.Accounts.BalanceHistory
def start_link(_opts \\ []) do
GenServer.start_link(__MODULE__, :ok, name: :accounts_balance_history_worker)
end
def handle_call(:store_current_balance, _from, state) do
accounts_state =
Account
|> Repo.all
|> Enum.reduce(%{}, fn(account, acc) ->
Map.put(acc, account.id, account.balance)
end)
%BalanceHistory{}
|> BalanceHistory.changeset(%{state: accounts_state})
|> Repo.insert!
{:reply, :stored, state}
end
end
| 23.923077 | 81 | 0.681672 |
73d8feafd28c3dc2df92978a7ea9286a27568ae8 | 407 | ex | Elixir | lib/validation/rules/credit_card/amex.ex | elixir-validation/validation | 164a62c881e03a307172e4a20413f81260abb2dc | [
"MIT"
] | 60 | 2019-09-13T13:37:01.000Z | 2021-01-06T05:20:32.000Z | lib/validation/rules/credit_card/amex.ex | elixir-validation/validation | 164a62c881e03a307172e4a20413f81260abb2dc | [
"MIT"
] | 1 | 2019-12-16T13:57:22.000Z | 2019-12-16T13:57:22.000Z | lib/validation/rules/credit_card/amex.ex | elixir-validation/validation | 164a62c881e03a307172e4a20413f81260abb2dc | [
"MIT"
] | 5 | 2019-09-13T19:14:24.000Z | 2019-11-26T17:33:08.000Z | defmodule Validation.Rules.CreditCard.Amex do
@moduledoc false
alias Validation.Rules.CreditCard
@spec validate?(String.t) :: boolean
def validate?(input) when is_binary(input) do
# only numbers
input = Regex.replace(~r/\D/, input, "")
CreditCard.validate?(input) and amex_validation(input)
end
defp amex_validation(input) do
String.match?(input, ~r/^3[47]\d{13}$/)
end
end
| 23.941176 | 58 | 0.700246 |
73d923d6c80d03c8a87e6d740227f740b76f67ec | 345 | ex | Elixir | lib/beam/ksp/unquote.ex | itsemilano/beam | afa79d7f31d2e2d4ded381656735d08cabd4f9ea | [
"Apache-2.0"
] | null | null | null | lib/beam/ksp/unquote.ex | itsemilano/beam | afa79d7f31d2e2d4ded381656735d08cabd4f9ea | [
"Apache-2.0"
] | null | null | null | lib/beam/ksp/unquote.ex | itsemilano/beam | afa79d7f31d2e2d4ded381656735d08cabd4f9ea | [
"Apache-2.0"
] | null | null | null | defmodule Unquote do
defmacro defkv(kv) do
Enum.map(kv, fn {k, v} ->
quote do
def unquote(k)(), do: unquote(v)
end
end)
end
end
defmodule Unquote2 do
defmacro defkv(kv) do
quote bind_quoted: [kv: kv] do
Enum.each(kv, fn {k, v} ->
def unquote(k)(), do: unquote(v)
end)
end
end
end
| 17.25 | 40 | 0.55942 |
73d92500ef24f8e43d614e065cd77f49798e11cf | 492 | exs | Elixir | config/test.exs | MihailoIsakov/LixLint | 5edba3068b929417d49b2084bb15b21057e9ca0b | [
"MIT"
] | null | null | null | config/test.exs | MihailoIsakov/LixLint | 5edba3068b929417d49b2084bb15b21057e9ca0b | [
"MIT"
] | null | null | null | config/test.exs | MihailoIsakov/LixLint | 5edba3068b929417d49b2084bb15b21057e9ca0b | [
"MIT"
] | null | null | null | use Mix.Config
# We don't run a server during test. If one is required,
# you can enable the server option below.
config :statika, Statika.Endpoint,
http: [port: 4001],
server: false
# Print only warnings and errors during test
config :logger, level: :warn
# Configure your database
config :statika, Statika.Repo,
adapter: Ecto.Adapters.Postgres,
username: "postgres",
password: "postgres",
database: "statika_test",
size: 1 # Use a single connection for transactional tests
| 25.894737 | 59 | 0.737805 |
73d93fc485769da7a953249bc8ec398ddf2a360c | 1,648 | ex | Elixir | channels/lib/im/slack/slack_processor.ex | DeComX/hypertrons | 535e86f02a1ef474d022e0f22b18a4fffd3ff9be | [
"Apache-2.0"
] | null | null | null | channels/lib/im/slack/slack_processor.ex | DeComX/hypertrons | 535e86f02a1ef474d022e0f22b18a4fffd3ff9be | [
"Apache-2.0"
] | null | null | null | channels/lib/im/slack/slack_processor.ex | DeComX/hypertrons | 535e86f02a1ef474d022e0f22b18a4fffd3ff9be | [
"Apache-2.0"
] | null | null | null | defmodule Hypertrons.Channels.SlackProcessor do
alias Hypertrons.Channels.IMMessage.Request, as: MessageRequest
alias Hypertrons.Channels.IMProcessor
@behaviour IMProcessor
@impl IMProcessor
def handle_request(%MessageRequest{
platform: :slack,
ts: nil
} = req) do
Slack.Client.Chat.post_message(
token(),
req.conversation,
optional_params(req)
)
end
@impl IMProcessor
def handle_request(%MessageRequest{
platform: :slack
} = req) do
Slack.Client.Chat.schedule_message(
token(),
req.conversation,
req.ts,
optional_params(req)
)
end
defp token() do
Application.fetch_env(:hypertrons_channels, :slack_token)
end
defp optional_params(%MessageRequest{
optional_params: params
} = req) do
check_content(params, req.content)
|> check_parent(req.parent, req.conversation)
|> check_blocks(req.content)
end
defp check_content(params, [format: :slack_blocks, value: value]) do
Map.merge(%{blocks: Jason.encode!(value)}, params)
end
defp check_content(params, [format: _, value: value]) do
Map.merge(%{text: value}, params)
end
defp check_blocks(params, [type: :slack_blocks, value: blocks]) do
Map.merge(%{blocks: Jason.encode!(blocks)}, params)
end
defp check_blocks(params, _) do
params
end
defp check_parent(params, nil, _) do
params
end
defp check_parent(
params,
[
event_type: :im_message,
platform: :slack,
conversation: conversation,
local_id: local_id
],
conversation
) do
Map.merge(%{thread_ts: local_id}, params)
end
end
| 21.684211 | 70 | 0.67415 |
73d9596745c310e2c14a4143a14a0008519aede1 | 335 | ex | Elixir | back/lib/api/auth/team.ex | HugoLefebvre/Epitech_TM_MP03 | 0ed161c956f7a10aec245fe2e17eb5a9b55f6075 | [
"MIT"
] | null | null | null | back/lib/api/auth/team.ex | HugoLefebvre/Epitech_TM_MP03 | 0ed161c956f7a10aec245fe2e17eb5a9b55f6075 | [
"MIT"
] | null | null | null | back/lib/api/auth/team.ex | HugoLefebvre/Epitech_TM_MP03 | 0ed161c956f7a10aec245fe2e17eb5a9b55f6075 | [
"MIT"
] | null | null | null | defmodule Api.Auth.Team do
use Ecto.Schema
import Ecto.Changeset
schema "teams" do
field :name, :string
many_to_many :users, Api.Auth.User, join_through: "teams_users"
timestamps()
end
@doc false
def changeset(team, attrs) do
team
|> cast(attrs, [:name])
|> validate_required([:name])
end
end
| 16.75 | 67 | 0.659701 |
73d9641d0b9fa4ee55e1b60a20f1431c243afebf | 11,990 | ex | Elixir | clients/you_tube/lib/google_api/you_tube/v3/api/comment_threads.ex | kolorahl/elixir-google-api | 46bec1e092eb84c6a79d06c72016cb1a13777fa6 | [
"Apache-2.0"
] | null | null | null | clients/you_tube/lib/google_api/you_tube/v3/api/comment_threads.ex | kolorahl/elixir-google-api | 46bec1e092eb84c6a79d06c72016cb1a13777fa6 | [
"Apache-2.0"
] | null | null | null | clients/you_tube/lib/google_api/you_tube/v3/api/comment_threads.ex | kolorahl/elixir-google-api | 46bec1e092eb84c6a79d06c72016cb1a13777fa6 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.YouTube.V3.Api.CommentThreads do
@moduledoc """
API calls for all endpoints tagged `CommentThreads`.
"""
alias GoogleApi.YouTube.V3.Connection
alias GoogleApi.Gax.{Request, Response}
@library_version Mix.Project.config() |> Keyword.get(:version, "")
@doc """
Inserts a new resource into this collection.
## Parameters
* `connection` (*type:* `GoogleApi.YouTube.V3.Connection.t`) - Connection to server
* `part` (*type:* `list(String.t)`) - The <code><strong>part</strong></code> parameter identifies the properties
that the API response will include. Set the parameter value to
<code>snippet</code>. The <code>snippet</code> part has a quota cost of 2
units.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:body` (*type:* `GoogleApi.YouTube.V3.Model.CommentThread.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.YouTube.V3.Model.CommentThread{}}` on success
* `{:error, info}` on failure
"""
@spec youtube_comment_threads_insert(Tesla.Env.client(), list(String.t()), keyword(), keyword()) ::
{:ok, GoogleApi.YouTube.V3.Model.CommentThread.t()}
| {:ok, Tesla.Env.t()}
| {:error, any()}
def youtube_comment_threads_insert(connection, part, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:post)
|> Request.url("/youtube/v3/commentThreads", %{})
|> Request.add_param(:query, :part, part)
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.YouTube.V3.Model.CommentThread{}])
end
@doc """
Retrieves a list of resources, possibly filtered.
## Parameters
* `connection` (*type:* `GoogleApi.YouTube.V3.Connection.t`) - Connection to server
* `part` (*type:* `list(String.t)`) - The <code><strong>part</strong></code> parameter specifies a
comma-separated list of one or more <code>commentThread</code> resource
properties that the API response will include.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:allThreadsRelatedToChannelId` (*type:* `String.t`) - Returns the comment threads of all videos of the channel and the
channel comments as well.
* `:channelId` (*type:* `String.t`) - Returns the comment threads for all the channel comments (ie does not
include comments left on videos).
* `:id` (*type:* `list(String.t)`) - Returns the comment threads with the given IDs for Stubby or Apiary.
* `:maxResults` (*type:* `integer()`) - The <code><strong>maxResults</strong></code> parameter specifies the
maximum number of items that should be returned in the result set.
* `:moderationStatus` (*type:* `String.t`) - Limits the returned comment threads to those with the specified
moderation status. Not compatible with the 'id' filter.
Valid values: published, heldForReview, likelySpam.
* `:order` (*type:* `String.t`) -
* `:pageToken` (*type:* `String.t`) - The <code><strong>pageToken</strong></code> parameter identifies a specific
page in the result set that should be returned. In an API response, the
<code>nextPageToken</code> and <code>prevPageToken</code> properties
identify other pages that could be retrieved.
* `:searchTerms` (*type:* `String.t`) - Limits the returned comment threads to those matching the specified
key words. Not compatible with the 'id' filter.
* `:textFormat` (*type:* `String.t`) - The requested text format for the returned comments.
* `:videoId` (*type:* `String.t`) - Returns the comment threads of the specified video.
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.YouTube.V3.Model.CommentThreadListResponse{}}` on success
* `{:error, info}` on failure
"""
@spec youtube_comment_threads_list(Tesla.Env.client(), list(String.t()), keyword(), keyword()) ::
{:ok, GoogleApi.YouTube.V3.Model.CommentThreadListResponse.t()}
| {:ok, Tesla.Env.t()}
| {:error, any()}
def youtube_comment_threads_list(connection, part, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:allThreadsRelatedToChannelId => :query,
:channelId => :query,
:id => :query,
:maxResults => :query,
:moderationStatus => :query,
:order => :query,
:pageToken => :query,
:searchTerms => :query,
:textFormat => :query,
:videoId => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/youtube/v3/commentThreads", %{})
|> Request.add_param(:query, :part, part)
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.YouTube.V3.Model.CommentThreadListResponse{}])
end
@doc """
Updates an existing resource.
## Parameters
* `connection` (*type:* `GoogleApi.YouTube.V3.Connection.t`) - Connection to server
* `part` (*type:* `list(String.t)`) - The <code><strong>part</strong></code> parameter specifies a
comma-separated list of <code>commentThread</code> resource properties that
the API response will include. You must at least include the
<code>snippet</code> part in the parameter value since that part contains
all of the properties that the API request can update.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `:body` (*type:* `GoogleApi.YouTube.V3.Model.CommentThread.t`) -
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.YouTube.V3.Model.CommentThread{}}` on success
* `{:error, info}` on failure
"""
@spec youtube_comment_threads_update(Tesla.Env.client(), list(String.t()), keyword(), keyword()) ::
{:ok, GoogleApi.YouTube.V3.Model.CommentThread.t()}
| {:ok, Tesla.Env.t()}
| {:error, any()}
def youtube_comment_threads_update(connection, part, optional_params \\ [], opts \\ []) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query,
:body => :body
}
request =
Request.new()
|> Request.method(:put)
|> Request.url("/youtube/v3/commentThreads", %{})
|> Request.add_param(:query, :part, part)
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.YouTube.V3.Model.CommentThread{}])
end
end
| 49.139344 | 196 | 0.635696 |
73d96532962b2eafa138b563a91e6aee4e8d8841 | 103 | exs | Elixir | elixir-primer/v01/ch14/maps1.exs | Atla0903/Study-Elixir | 155fbf30e508e513278425b89262a0a444e0936c | [
"Unlicense"
] | null | null | null | elixir-primer/v01/ch14/maps1.exs | Atla0903/Study-Elixir | 155fbf30e508e513278425b89262a0a444e0936c | [
"Unlicense"
] | null | null | null | elixir-primer/v01/ch14/maps1.exs | Atla0903/Study-Elixir | 155fbf30e508e513278425b89262a0a444e0936c | [
"Unlicense"
] | null | null | null | ages =%{"Alice" => 23 , "Bob" => 15, "Charlie" => 48}
IO.inspect ages["Bob"]
IO.inspect ages["David"]
| 20.6 | 53 | 0.572816 |
73d99e6ab970d266c2261d5669dde02951204092 | 931 | exs | Elixir | mix.exs | tensiondriven/phoenix_slime | b8f17de61bebde7d6d8d31645e9fa049e2cb8c9d | [
"MIT"
] | 1 | 2022-03-27T20:20:15.000Z | 2022-03-27T20:20:15.000Z | mix.exs | tensiondriven/phoenix_slime | b8f17de61bebde7d6d8d31645e9fa049e2cb8c9d | [
"MIT"
] | null | null | null | mix.exs | tensiondriven/phoenix_slime | b8f17de61bebde7d6d8d31645e9fa049e2cb8c9d | [
"MIT"
] | 2 | 2022-03-08T09:08:41.000Z | 2022-03-08T09:09:58.000Z | defmodule PhoenixSlime.Mixfile do
use Mix.Project
@version "0.13.0"
def project do
[
app: :phoenix_slime,
deps: deps(),
description: "Phoenix Template Engine for Slim-like templates",
elixir: "~> 1.4",
package: package(),
version: @version
]
end
def application do
[applications: [:phoenix, :slime]]
end
def deps do
[
{:phoenix, "~> 1.6.0"},
{:phoenix_html, "~> 3.0"},
{:phoenix_live_view, "~> 0.17.2"},
{:jason, "~> 1.0", optional: true},
{:slime, github: "tensiondriven/slime"},
{:ex_doc, ">= 0.0.0", only: :dev},
{:earmark, ">= 0.0.0", only: :dev}
]
end
defp package do
[
maintainers: ["Sean Callan", "Alexander Stanko"],
files: ["lib", "priv", "mix.exs", "README*", "LICENSE*"],
licenses: ["MIT"],
links: %{github: "https://github.com/slime-lang/phoenix_slime"}
]
end
end
| 22.166667 | 69 | 0.543502 |
73d99f087239d16b76876305b47de37ad7e17b9c | 180 | exs | Elixir | config/config.exs | mneudert/instream | 9a86c43014fcc490fe4a931026f74cd896f78cc7 | [
"Apache-2.0"
] | 204 | 2015-06-04T06:38:57.000Z | 2022-01-30T08:56:33.000Z | config/config.exs | mneudert/instream | 9a86c43014fcc490fe4a931026f74cd896f78cc7 | [
"Apache-2.0"
] | 71 | 2015-05-24T02:08:28.000Z | 2022-01-10T17:13:27.000Z | config/config.exs | mneudert/instream | 9a86c43014fcc490fe4a931026f74cd896f78cc7 | [
"Apache-2.0"
] | 41 | 2015-09-05T21:24:04.000Z | 2021-11-10T02:52:48.000Z | use Mix.Config
if Mix.env() == :test do
config :logger, :console,
format: "\n$time $metadata[$level] $levelpad$message\n",
metadata: [:query_time, :response_status]
end
| 22.5 | 60 | 0.672222 |
73d9b400455c16f6b52936808d434a7692360b29 | 581 | ex | Elixir | lib/hl7/2.3.1/segments/prd.ex | calvinb/elixir-hl7 | 5e953fa11f9184857c0ec4dda8662889f35a6bec | [
"Apache-2.0"
] | null | null | null | lib/hl7/2.3.1/segments/prd.ex | calvinb/elixir-hl7 | 5e953fa11f9184857c0ec4dda8662889f35a6bec | [
"Apache-2.0"
] | null | null | null | lib/hl7/2.3.1/segments/prd.ex | calvinb/elixir-hl7 | 5e953fa11f9184857c0ec4dda8662889f35a6bec | [
"Apache-2.0"
] | null | null | null | defmodule HL7.V2_3_1.Segments.PRD do
@moduledoc false
require Logger
alias HL7.V2_3_1.{DataTypes}
use HL7.Segment,
fields: [
segment: nil,
provider_role: DataTypes.Ce,
provider_name: DataTypes.Xpn,
provider_address: DataTypes.Xad,
provider_location: DataTypes.Pl,
provider_communication_information: DataTypes.Xtn,
preferred_method_of_contact: DataTypes.Ce,
provider_identifiers: DataTypes.Pi,
effective_start_date_of_provider_role: DataTypes.Ts,
effective_end_date_of_provider_role: DataTypes.Ts
]
end
| 27.666667 | 58 | 0.73494 |
73d9edb46e61f2b84302bab935a0fc50b0040b73 | 2,210 | ex | Elixir | clients/dfa_reporting/lib/google_api/dfa_reporting/v33/model/click_tag.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | 1 | 2018-12-03T23:43:10.000Z | 2018-12-03T23:43:10.000Z | clients/dfa_reporting/lib/google_api/dfa_reporting/v33/model/click_tag.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/dfa_reporting/lib/google_api/dfa_reporting/v33/model/click_tag.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.DFAReporting.V33.Model.ClickTag do
@moduledoc """
Creative Click Tag.
## Attributes
* `clickThroughUrl` (*type:* `GoogleApi.DFAReporting.V33.Model.CreativeClickThroughUrl.t`, *default:* `nil`) - Parameter value for the specified click tag. This field contains a click-through url.
* `eventName` (*type:* `String.t`, *default:* `nil`) - Advertiser event name associated with the click tag. This field is used by DISPLAY_IMAGE_GALLERY and HTML5_BANNER creatives. Applicable to DISPLAY when the primary asset type is not HTML_IMAGE.
* `name` (*type:* `String.t`, *default:* `nil`) - Parameter name for the specified click tag. For DISPLAY_IMAGE_GALLERY creative assets, this field must match the value of the creative asset's creativeAssetId.name field.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:clickThroughUrl => GoogleApi.DFAReporting.V33.Model.CreativeClickThroughUrl.t(),
:eventName => String.t(),
:name => String.t()
}
field(:clickThroughUrl, as: GoogleApi.DFAReporting.V33.Model.CreativeClickThroughUrl)
field(:eventName)
field(:name)
end
defimpl Poison.Decoder, for: GoogleApi.DFAReporting.V33.Model.ClickTag do
def decode(value, options) do
GoogleApi.DFAReporting.V33.Model.ClickTag.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.DFAReporting.V33.Model.ClickTag do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 41.698113 | 252 | 0.743891 |
73da299f50baf49027ee0eacdf53738b38fcc09f | 3,769 | exs | Elixir | test/searcher_test.exs | codeflows/jonne | ea23cf6b253972975e85d5f028910638ac2dc5fc | [
"Apache-2.0"
] | null | null | null | test/searcher_test.exs | codeflows/jonne | ea23cf6b253972975e85d5f028910638ac2dc5fc | [
"Apache-2.0"
] | 1 | 2018-03-28T07:55:04.000Z | 2019-08-20T14:15:19.000Z | test/searcher_test.exs | codeflows/jonne | ea23cf6b253972975e85d5f028910638ac2dc5fc | [
"Apache-2.0"
] | null | null | null | defmodule TimeParser do
def parse_time(time_string) do
Timex.parse!(time_string, "{YYYY}-{0M}-{0D}") |> Timex.shift(hours: 5)
end
end
defmodule SearcherTest do
alias Jonne.{Searcher, Elasticsearch.MockClient}
import Mox
use ExUnit.Case, async: false
setup :verify_on_exit!
@current_time TimeParser.parse_time("2016-02-28")
@current_index "my-index-2016.02.28"
test "returns nil initial sort if index has no documents" do
expect_search(payload: initial_search(), result: {:hits, []})
assert Searcher.get_initial_position(@current_time) == %{
sort: nil,
current_index: @current_index
}
end
test "returns initial sort based on the sort order of the latest document" do
expect_search(payload: initial_search(), result: {:hits, [document_with_sort([100])]})
assert Searcher.get_initial_position(@current_time) == %{
sort: [100],
current_index: @current_index
}
end
test "subsequent search is done without search_after if sort is nil" do
expect_search(payload: subsequent_search(), result: {:hits, []})
current_position = %{current_index: @current_index, sort: nil}
assert Searcher.get_new_messages(@current_time, current_position) == %{
position: current_position,
hits: []
}
end
test "subsequent search returns current sort if no new results are found" do
expect_search(payload: subsequent_search(search_after: [1]), result: {:hits, []})
current_position = %{current_index: @current_index, sort: [1]}
assert Searcher.get_new_messages(@current_time, current_position) == %{
position: current_position,
hits: []
}
end
test "subsequent search returns new hits and new sort position if results are found" do
new_document = document_with_sort([2])
expect_search(payload: subsequent_search(search_after: [1]), result: {:hits, [new_document]})
current_position = %{current_index: @current_index, sort: [1]}
result = Searcher.get_new_messages(@current_time, current_position)
assert result.position == %{current_index: @current_index, sort: [2]}
assert result.hits == [new_document]
end
test "search uses two indices when date changes, and returns the new index in the position" do
new_documents = [document_with_sort([2]), document_with_sort([3])]
next_day = TimeParser.parse_time("2016-03-01")
next_day_index = "my-index-2016.03.01"
expect_search(
indices: [@current_index, next_day_index],
payload: subsequent_search(search_after: [1]),
result: {:hits, new_documents}
)
current_position = %{current_index: @current_index, sort: [1]}
result = Searcher.get_new_messages(next_day, current_position)
assert result.position == %{current_index: next_day_index, sort: [3]}
assert result.hits == new_documents
end
defp expect_search(options) do
search = keywords_to_map(options)
payload = search.payload
indices = Map.get(search, :indices, [@current_index])
MockClient |> expect(:search, fn _url, ^indices, ^payload -> search.result end)
end
defp initial_search do
%{
size: 1,
sort: [%{"@timestamp" => "desc"}]
}
end
defp subsequent_search(extra_fields \\ []) do
payload = %{
size: 1000,
sort: ["@timestamp"],
query: %{
query_string: %{
query: "bad hombre"
}
}
}
keywords_to_map(extra_fields) |> Map.merge(payload)
end
defp document_with_sort(sort) do
%{
"sort" => sort,
"_source" => %{
"message" => "Document content"
}
}
end
defp keywords_to_map(keywords) do
Enum.into(keywords, %{})
end
end
| 29.677165 | 97 | 0.659326 |
73da36b5a7aa447800f9dbff678efea65f71e92a | 934 | exs | Elixir | mix.exs | erickgnavar/pokedex-api | ed6dbe831192e684e9605b35844bde80b085ab2e | [
"MIT"
] | null | null | null | mix.exs | erickgnavar/pokedex-api | ed6dbe831192e684e9605b35844bde80b085ab2e | [
"MIT"
] | null | null | null | mix.exs | erickgnavar/pokedex-api | ed6dbe831192e684e9605b35844bde80b085ab2e | [
"MIT"
] | null | null | null | defmodule Pokedex.MixProject do
use Mix.Project
def project do
[
app: :pokedex,
version: "0.1.0",
elixir: "~> 1.10",
start_permanent: Mix.env() == :prod,
deps: deps(),
aliases: aliases()
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:logger],
mod: {Pokedex.Application, []}
]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
{:mojito, "~> 0.6.4"},
{:floki, "~> 0.26.0"},
{:ecto, "~> 3.4"},
{:ecto_sql, "~> 3.4"},
{:postgrex, ">= 0.0.0"},
{:plug, "~> 1.10"},
{:plug_cowboy, "~> 2.0"},
{:jason, "~> 1.2"}
]
end
defp aliases do
[
"ecto.setup": ["ecto.create", "ecto.migrate"],
"ecto.reset": ["ecto.drop", "ecto.setup"],
test: ["ecto.create --quiet", "ecto.migrate", "test"]
]
end
end
| 20.755556 | 59 | 0.505353 |
73da45d0c09574b975368939cb23ea4d8cdb7ce4 | 1,207 | ex | Elixir | apps/examples/test/support/e2e/log_spread.ex | ccamateur/tai | 41c4b3e09dafc77987fa3f6b300c15461d981e16 | [
"MIT"
] | 276 | 2018-01-16T06:36:06.000Z | 2021-03-20T21:48:01.000Z | apps/examples/test/support/e2e/log_spread.ex | ccamateur/tai | 41c4b3e09dafc77987fa3f6b300c15461d981e16 | [
"MIT"
] | 73 | 2018-10-05T18:45:06.000Z | 2021-02-08T05:46:33.000Z | apps/examples/test/support/e2e/log_spread.ex | ccamateur/tai | 41c4b3e09dafc77987fa3f6b300c15461d981e16 | [
"MIT"
] | 43 | 2018-06-09T09:54:51.000Z | 2021-03-07T07:35:17.000Z | defmodule ExamplesSupport.E2E.LogSpread do
alias Tai.TestSupport.Mocks
import Tai.TestSupport.Mock
@venue :test_exchange_a
@product_symbol :btc_usd
def seed_mock_responses(:log_spread) do
Mocks.Responses.Products.for_venue(
@venue,
[
%{symbol: @product_symbol},
%{symbol: :ltc_usd}
]
)
end
def seed_venues(:log_spread) do
{:ok, _} =
Tai.Venue
|> struct(
id: @venue,
adapter: Tai.VenueAdapters.Mock,
credentials: %{},
accounts: "*",
products: "*",
order_books: "*",
quote_depth: 1,
timeout: 1000
)
|> Tai.Venues.VenueStore.put()
end
def push_stream_market_data({:log_spread, :snapshot, venue_id, product_symbol})
when venue_id == @venue and product_symbol == @product_symbol do
push_market_data_snapshot(
%Tai.Markets.Location{
venue_id: @venue,
product_symbol: @product_symbol
},
%{6500.1 => 1.1},
%{6500.11 => 1.2}
)
end
def fleet_config(:log_spread) do
%{
advisor: Examples.LogSpread.Advisor,
factory: Tai.Advisors.Factories.OnePerProduct,
quotes: "*"
}
end
end
| 22.351852 | 81 | 0.599834 |
73da5eec8c861bd4f023356375b7605d2608883d | 6,985 | exs | Elixir | test/teslamate/mqtt/pubsub/vehicle_subscriber_test.exs | mnadvornik/teslamate | 91af0029bb4272495b94abde6f1d2fb8b2c79fc4 | [
"MIT"
] | 1 | 2021-05-04T18:06:35.000Z | 2021-05-04T18:06:35.000Z | test/teslamate/mqtt/pubsub/vehicle_subscriber_test.exs | mnadvornik/teslamate | 91af0029bb4272495b94abde6f1d2fb8b2c79fc4 | [
"MIT"
] | 171 | 2020-07-08T18:42:57.000Z | 2022-03-23T00:55:30.000Z | test/teslamate/mqtt/pubsub/vehicle_subscriber_test.exs | virtualm2000/teslamate | b2dad66d992b8e04d8213f2657492fa75872ece5 | [
"MIT"
] | null | null | null | defmodule TeslaMate.Mqtt.PubSub.VehicleSubscriberTest do
use TeslaMate.DataCase, async: true
alias TeslaMate.Mqtt.PubSub.VehicleSubscriber
alias TeslaMate.Vehicles.Vehicle.Summary
alias TeslaMate.Locations.GeoFence
defp start_subscriber(name, car_id, namespace \\ nil) do
publisher_name = :"mqtt_publisher_#{name}"
vehicles_name = :"vehicles_#{name}"
{:ok, _pid} = start_supervised({MqttPublisherMock, name: publisher_name, pid: self()})
{:ok, _pid} = start_supervised({VehiclesMock, name: vehicles_name, pid: self()})
start_supervised(
{VehicleSubscriber,
[
name: name,
car_id: car_id,
namespace: namespace,
deps_publisher: {MqttPublisherMock, publisher_name},
deps_vehicles: {VehiclesMock, vehicles_name}
]}
)
end
test "publishes vehicle data", %{test: name} do
{:ok, pid} = start_subscriber(name, 0)
assert_receive {VehiclesMock, {:subscribe_to_summary, 0}}
summary = %Summary{
healthy: true,
display_name: "Foo",
odometer: 42_000,
windows_open: true,
doors_open: true,
shift_state: "D",
state: :online,
since: DateTime.utc_now(),
latitude: 37.889602,
longitude: 41.129182,
speed: 40,
heading: 340,
outside_temp: 15,
inside_temp: 20.0,
locked: true,
sentry_mode: false,
plugged_in: false,
version: "2019.42",
update_available: false,
update_version: "2019.43",
is_preconditioning: true,
is_user_present: false,
is_climate_on: true,
geofence: %GeoFence{id: 0, name: "Home", latitude: 0.0, longitude: 0.0, radius: 20},
model: "S",
trim_badging: "P100D",
exterior_color: "White",
spoiler_type: "None",
wheel_type: "AeroTurbine19",
frunk_open: true,
trunk_open: false,
elevation: 100
}
send(pid, summary)
for {key, val} <- Map.from_struct(summary),
not is_nil(val) and key not in [:since, :geofence] do
topic = "teslamate/cars/0/#{key}"
data = to_string(val)
assert_receive {MqttPublisherMock, {:publish, ^topic, ^data, [retain: true, qos: 1]}}
end
iso_time = DateTime.to_iso8601(summary.since)
assert_receive {MqttPublisherMock,
{:publish, "teslamate/cars/0/since", ^iso_time, [retain: true, qos: 1]}}
assert_receive {MqttPublisherMock,
{:publish, "teslamate/cars/0/geofence", "Home", [retain: true, qos: 1]}}
for key <- [
:charge_energy_added,
:charger_actual_current,
:charger_phases,
:charger_power,
:charger_voltage,
:scheduled_charging_start_time,
:time_to_full_charge
] do
topic = "teslamate/cars/0/#{key}"
assert_receive {MqttPublisherMock, {:publish, ^topic, "", [retain: true, qos: 1]}}
end
refute_receive _
end
test "publishes charging data", %{test: name} do
{:ok, pid} = start_subscriber(name, 0)
assert_receive {VehiclesMock, {:subscribe_to_summary, 0}}
summary = %Summary{
plugged_in: false,
battery_level: 60.0,
usable_battery_level: 59,
charge_energy_added: 25,
charge_limit_soc: 90,
charge_port_door_open: false,
charger_actual_current: 42,
charger_phases: 3,
charger_power: 50,
charger_voltage: 16,
est_battery_range_km: 220.05,
ideal_battery_range_km: 230.52,
rated_battery_range_km: 230.52,
scheduled_charging_start_time: DateTime.utc_now() |> DateTime.add(60 * 60 * 10, :second),
time_to_full_charge: 2.5
}
send(pid, summary)
for {key, val} <- Map.from_struct(summary),
not is_nil(val) and key != :scheduled_charging_start_time do
topic = "teslamate/cars/0/#{key}"
data = to_string(val)
assert_receive {MqttPublisherMock, {:publish, ^topic, ^data, [retain: true, qos: 1]}}
end
# Formated dates
iso_time = DateTime.to_iso8601(summary.scheduled_charging_start_time)
assert_receive {MqttPublisherMock,
{:publish, "teslamate/cars/0/scheduled_charging_start_time", ^iso_time,
[retain: true, qos: 1]}}
# Always published
assert_receive {MqttPublisherMock,
{:publish, "teslamate/cars/0/shift_state", "", [retain: true, qos: 1]}}
assert_receive {MqttPublisherMock,
{:publish, "teslamate/cars/0/geofence", "", [retain: true, qos: 1]}}
assert_receive {MqttPublisherMock,
{:publish, "teslamate/cars/0/trim_badging", "", [retain: true, qos: 1]}}
refute_receive _
end
test "publishes geofence only if it has changed", %{test: name} do
{:ok, pid} = start_subscriber(name, 0)
assert_receive {VehiclesMock, {:subscribe_to_summary, 0}}
geofence = %GeoFence{id: 0, name: "Home", latitude: 0.0, longitude: 0.0, radius: 20}
other_geofence = %GeoFence{id: 0, name: "Work", latitude: 0.0, longitude: 0.0, radius: 20}
# Send geofence
send(pid, %Summary{geofence: geofence, version: "1"})
assert_receive {MqttPublisherMock, {:publish, "teslamate/cars/0/geofence", "Home", _}}
assert_receive {MqttPublisherMock, {:publish, "teslamate/cars/0/version", "1", _}}
# Send geofence again and expect no message
send(pid, %Summary{geofence: geofence, version: "2"})
refute_receive {MqttPublisherMock, {:publish, "teslamate/cars/0/geofence", _, _}}
assert_receive {MqttPublisherMock, {:publish, "teslamate/cars/0/version", "2", _}}
# Send another geofence
send(pid, %Summary{geofence: other_geofence, version: "3"})
assert_receive {MqttPublisherMock, {:publish, "teslamate/cars/0/geofence", "Work", _}}
assert_receive {MqttPublisherMock, {:publish, "teslamate/cars/0/version", "3", _}}
end
test "allows namespaces", %{test: name} do
{:ok, pid} = start_subscriber(name, 0, "account_0")
assert_receive {VehiclesMock, {:subscribe_to_summary, 0}}
summary = %Summary{
display_name: "Foo",
state: :online
}
send(pid, summary)
assert_receive {MqttPublisherMock,
{:publish, "teslamate/account_0/cars/0/display_name", "Foo",
[retain: true, qos: 1]}}
assert_receive {MqttPublisherMock,
{:publish, "teslamate/account_0/cars/0/state", "online",
[retain: true, qos: 1]}}
# Always published
for key <- [
:charge_energy_added,
:charger_actual_current,
:charger_phases,
:charger_power,
:charger_voltage,
:scheduled_charging_start_time,
:time_to_full_charge,
:shift_state,
:geofence,
:trim_badging
] do
topic = "teslamate/account_0/cars/0/#{key}"
assert_receive {MqttPublisherMock, {:publish, ^topic, "", [retain: true, qos: 1]}}
end
refute_receive _
end
end
| 32.337963 | 95 | 0.628203 |
73da8eb78183d96f71e74d119e99611a24f768f8 | 2,673 | ex | Elixir | lib/arcdown/parsers/article_parser.ex | functionhaus/arcdown | 26c9de0d2fadc09e6b74ac2f7855088440845c2f | [
"Apache-2.0"
] | 7 | 2019-01-21T04:17:50.000Z | 2020-03-08T06:40:54.000Z | lib/arcdown/parsers/article_parser.ex | functionhaus/arcdown | 26c9de0d2fadc09e6b74ac2f7855088440845c2f | [
"Apache-2.0"
] | 3 | 2019-09-19T19:13:09.000Z | 2019-09-26T14:39:23.000Z | lib/arcdown/parsers/article_parser.ex | functionhaus/arcdown | 26c9de0d2fadc09e6b74ac2f7855088440845c2f | [
"Apache-2.0"
] | null | null | null | defmodule Arcdown.Parsers.ArticleParser do
@moduledoc """
Module responsible for parsing the entire content and filename of a single
article file and generating an %Article{} struct populated with all
relevant attributes and metadata
"""
alias Arcdown.Article
alias Arcdown.Parsers.HeaderParser
@patterns %{
divider: ~r/\n{2}---\n{2}/,
empty_file: ~r/^$/,
whitespace_only: ~r/^[\n\s]*$/,
divider_only: ~r/^[\n\s]*---[\n\s]*$/,
content_only: ~r/^---\n\n/,
header_only: ~r/^(?<header>[\w\d]+.*)(?!=(\n\n---\n{0,2}))$/,
full_article: ~r/^(?<header>.*)\n{2}---\n{2}(?<content>.*$)/,
ambiguous: ~r/\n{0,2}---\n{0,2}/
}
@doc """
Read a full Arcdown article from a given filee path, split the header and
content, construct an Article with content, and parse the header data.
"""
@spec parse_file(binary()) :: {atom(), Article.t()|binary()}
def parse_file path do
{:ok, file_text} = File.read path
case parse_text file_text do
{:ok, parsed_article} -> {:ok, parsed_article}
_ -> {:error, "Failed to parse article text."}
end
end
@doc """
Take in a full Arcdown article as a single string, split the header and
content, construct an Article with content, and parse the header data.
"""
@spec parse_text(binary()) :: {atom(), Article.t()|binary()}
def parse_text text do
{:ok, header, content} = match_parts text
case {header, content} do
{nil, nil} ->
{:ok, %Article{}}
{nil, content} ->
{:ok, %Article{content: content}}
{header, nil} ->
HeaderParser.parse_header header
{header, content} ->
HeaderParser.parse_header header, %Article{content: content}
end
end
@spec match_parts(binary()) :: {atom(), binary()|nil, binary()|nil}
def match_parts text do
cond do
Enum.any?([:empty_file, :whitespace_only, :divider_only], &(Regex.match? @patterns[&1], text)) ->
{:ok, nil, nil}
Regex.match? @patterns[:content_only], text ->
{:ok, nil, Regex.replace(@patterns[:content_only], text, "", global: false)}
Regex.match? @patterns[:header_only], text ->
%{"header" => header} = Regex.named_captures @patterns[:header_only], text
{:ok, header, nil}
Regex.match? @patterns[:divider], text ->
[header, content] = Regex.split @patterns[:divider], text, parts: 2
{:ok, header, content}
Regex.match? @patterns[:ambiguous], text ->
%{"content" => content, "header" => header} = Regex.named_captures @patterns[:full_article], text
{:ok, header, content}
true ->
{:ok, text, nil}
end
end
end
| 31.447059 | 105 | 0.607931 |
73daa46b65f5547b5ffa205002379ca1301f28a8 | 2,645 | ex | Elixir | lib/mars_rover_kata/planet.ex | lucazulian/mars-rover-kata | 1c7e5ebfee09aca74dceb69f84a7e95aae6defce | [
"MIT"
] | 2 | 2021-05-10T09:47:22.000Z | 2022-02-20T10:41:24.000Z | lib/mars_rover_kata/planet.ex | lucazulian/mars-rover-kata | 1c7e5ebfee09aca74dceb69f84a7e95aae6defce | [
"MIT"
] | 2 | 2021-05-08T09:28:41.000Z | 2021-12-23T09:58:53.000Z | lib/mars_rover_kata/planet.ex | lucazulian/mars-rover-kata | 1c7e5ebfee09aca74dceb69f84a7e95aae6defce | [
"MIT"
] | 1 | 2021-05-07T08:54:50.000Z | 2021-05-07T08:54:50.000Z | defmodule MarsRoverKata.Planet do
@moduledoc """
Represent the planet in which the robot moves.
The planet is represented by a grid of max_x * max_y shape on zero based
system and is a sphere so connects vertical edges towards themselves are
in inverted coordinates.
"""
alias MarsRoverKata.Point
alias MarsRoverKata.Position
@type t :: %__MODULE__{
max_x: integer(),
max_y: integer(),
obstacles: list(Point.t())
}
defstruct max_x: 0,
max_y: 0,
obstacles: []
@doc ~S"""
Transforms a relative position to an absolute based a planet shape
## Examples
iex> MarsRoverKata.Planet.to_absolute_position(%MarsRoverKata.Position{point: MarsRoverKata.Point.new(-1, -1)}, %MarsRoverKata.Planet{max_x: 5, max_y: 5})
%MarsRoverKata.Position{point: %MarsRoverKata.Point{x: 4, y: 4}}
iex> MarsRoverKata.Planet.to_absolute_position(%MarsRoverKata.Position{point: MarsRoverKata.Point.new(12, 12)}, %MarsRoverKata.Planet{max_x: 5, max_y: 5})
%MarsRoverKata.Position{point: %MarsRoverKata.Point{x: 2, y: 2}}
iex> MarsRoverKata.Planet.to_absolute_position(%MarsRoverKata.Position{point: MarsRoverKata.Point.new(-1, 1)}, %MarsRoverKata.Planet{max_x: 5, max_y: 5})
%MarsRoverKata.Position{point: %MarsRoverKata.Point{x: 4, y: 1}}
iex> MarsRoverKata.Planet.to_absolute_position(%MarsRoverKata.Position{point: MarsRoverKata.Point.new(1, -1)}, %MarsRoverKata.Planet{max_x: 5, max_y: 5})
%MarsRoverKata.Position{point: %MarsRoverKata.Point{x: 4, y: 1}}
"""
@spec to_absolute_position(__MODULE__.t(), Position.t()) :: Position.t()
def to_absolute_position(
%Position{
point: %Point{
x: x,
y: y
},
direction: direction
},
%__MODULE__{max_x: max_x, max_y: max_y}
) do
point =
if crossing_vertical_edges?(max_y, y) do
Point.new(
Integer.mod(y, max_y),
Integer.mod(x, max_x)
)
else
Point.new(
Integer.mod(x, max_x),
Integer.mod(y, max_y)
)
end
%Position{
point: point,
direction: direction
}
end
@spec has_obstacles?(__MODULE__.t(), Point.t()) :: boolean
def has_obstacles?(%__MODULE__{obstacles: obstacles}, point) do
Enum.any?(obstacles, &(&1 == point))
end
defp crossing_vertical_edges?(max_y, y) do
0 > y || y > max_y
end
end
defimpl String.Chars, for: MarsRoverKata.Planet do
alias MarsRoverKata.Planet
def to_string(%Planet{max_x: max_x, max_y: max_y}) do
"#{max_x}:#{max_y}"
end
end
| 30.056818 | 160 | 0.643856 |
73dacad9c3a7dc3cb83a13eff2eff8ec0783cf09 | 1,156 | ex | Elixir | test/support/conn_case.ex | gleidsonduarte/phoenix-elixir-hello-world | b89f7682ac63df8fb8235b2f426b8a82e4cec826 | [
"MIT"
] | null | null | null | test/support/conn_case.ex | gleidsonduarte/phoenix-elixir-hello-world | b89f7682ac63df8fb8235b2f426b8a82e4cec826 | [
"MIT"
] | null | null | null | test/support/conn_case.ex | gleidsonduarte/phoenix-elixir-hello-world | b89f7682ac63df8fb8235b2f426b8a82e4cec826 | [
"MIT"
] | null | null | null | defmodule PhoenixElixirHelloWorldWeb.ConnCase do
@moduledoc """
This module defines the test case to be used by
tests that require setting up a connection.
Such tests rely on `Phoenix.ConnTest` and also
import other functionality to make it easier
to build common data structures and query the data layer.
Finally, if the test case interacts with the database,
we enable the SQL sandbox, so changes done to the database
are reverted at the end of every test. If you are using
PostgreSQL, you can even run database tests asynchronously
by setting `use PhoenixElixirHelloWorldWeb.ConnCase, async: true`, although
this option is not recommended for other databases.
"""
use ExUnit.CaseTemplate
using do
quote do
# Import conveniences for testing with connections
import Plug.Conn
import Phoenix.ConnTest
import PhoenixElixirHelloWorldWeb.ConnCase
alias PhoenixElixirHelloWorldWeb.Router.Helpers, as: Routes
# The default endpoint for testing
@endpoint PhoenixElixirHelloWorldWeb.Endpoint
end
end
setup _tags do
{:ok, conn: Phoenix.ConnTest.build_conn()}
end
end
| 30.421053 | 77 | 0.752595 |
73dad39b3a38106fd4935c8712059fd9aee1e5e0 | 510 | ex | Elixir | components_demo/lib/components_demo_web/router.ex | inspired-consulting/gen_components | 47c567fb2d6e918341e2efa07c2f7345038a8a90 | [
"MIT"
] | 2 | 2022-03-28T21:33:39.000Z | 2022-03-30T17:14:34.000Z | components_demo/lib/components_demo_web/router.ex | inspired-consulting/gen_components | 47c567fb2d6e918341e2efa07c2f7345038a8a90 | [
"MIT"
] | null | null | null | components_demo/lib/components_demo_web/router.ex | inspired-consulting/gen_components | 47c567fb2d6e918341e2efa07c2f7345038a8a90 | [
"MIT"
] | null | null | null | defmodule ComponentsDemoWeb.Router do
use ComponentsDemoWeb, :router
pipeline :browser do
plug :accepts, ["html"]
plug :fetch_session
plug :fetch_live_flash
plug :put_root_layout, {ComponentsDemoWeb.LayoutView, :root}
plug :protect_from_forgery
plug :put_secure_browser_headers
end
scope "/", ComponentsDemoWeb do
pipe_through :browser
import ComponentsDemoWeb.ComponentsCatalogueLive
catalogue_routes("/catalogue")
get "/", PageController, :index
end
end
| 23.181818 | 64 | 0.741176 |
73dae42268edef0bb09a368eb17031554a2852ef | 88 | ex | Elixir | elixir/elixir-elm-playground/lib/playground_web/views/coherence/email_view.ex | marcinbiegun/exercises | 36ad942e8d40d6471136326a3f6d09285bbd90aa | [
"MIT"
] | 1 | 2018-12-11T14:09:14.000Z | 2018-12-11T14:09:14.000Z | elixir/elixir-elm-playground/lib/playground_web/views/coherence/email_view.ex | marcinbiegun/exercises | 36ad942e8d40d6471136326a3f6d09285bbd90aa | [
"MIT"
] | null | null | null | elixir/elixir-elm-playground/lib/playground_web/views/coherence/email_view.ex | marcinbiegun/exercises | 36ad942e8d40d6471136326a3f6d09285bbd90aa | [
"MIT"
] | null | null | null | defmodule PlaygroundWeb.Coherence.EmailView do
use PlaygroundWeb.Coherence, :view
end
| 22 | 46 | 0.840909 |
73daea104104b5c0035ea839aab6052c817ada54 | 1,006 | exs | Elixir | mix.exs | lorenzosinisi/woke | 91823d8a5dacd450287887adf3e5a9072388833a | [
"Apache-2.0"
] | null | null | null | mix.exs | lorenzosinisi/woke | 91823d8a5dacd450287887adf3e5a9072388833a | [
"Apache-2.0"
] | null | null | null | mix.exs | lorenzosinisi/woke | 91823d8a5dacd450287887adf3e5a9072388833a | [
"Apache-2.0"
] | null | null | null | defmodule Woke.MixProject do
use Mix.Project
def project do
[
app: :woke,
version: "0.1.1",
elixir: "~> 1.10",
deps: deps(),
ddocs: [extras: ["README.md"], main: "RADME.md", source_ref: "v0.1.1"],
source_url: "https://github.com/lorenzosinisi/woke",
description: description(),
package: package(),
deps: deps()
]
end
defp description do
"""
Watchdog design patter library for Elixir applications
"""
end
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:logger]
]
end
defp package do
[
files: ["lib", "mix.exs", "README.md", "LICENSE"],
maintainers: ["Lorenzo Sinisi"],
licenses: ["Apache 2.0"],
links: %{"GitHub" => "https://github.com/lorenzosinisi/woke"}
]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
{:ex_doc, ">= 0.0.0", only: :dev, runtime: false}
]
end
end
| 21.404255 | 77 | 0.576541 |
73daf04cc481bace152c2ed1447b5b38306ab2b9 | 143 | ex | Elixir | apps/cronitex_web/lib/cronitex_web/controllers/page_controller.ex | alayers2/cronitex | cf8da9553b8e93b9171f5e9a0f0a08a24a324689 | [
"MIT"
] | 1 | 2020-11-05T15:38:53.000Z | 2020-11-05T15:38:53.000Z | apps/cronitex_web/lib/cronitex_web/controllers/page_controller.ex | alayers2/cronitex | cf8da9553b8e93b9171f5e9a0f0a08a24a324689 | [
"MIT"
] | 36 | 2020-10-24T01:28:42.000Z | 2022-02-07T11:11:37.000Z | apps/cronitex_web/lib/cronitex_web/controllers/page_controller.ex | alayers2/cronitex | cf8da9553b8e93b9171f5e9a0f0a08a24a324689 | [
"MIT"
] | null | null | null | defmodule CronitexWeb.PageController do
use CronitexWeb, :controller
def index(conn, _params) do
render(conn, "index.html")
end
end
| 17.875 | 39 | 0.741259 |
73daf753dca4309eb1f19cfce557c2a497675e19 | 946 | ex | Elixir | test/support/contorller_test_helper.ex | kenta-aktsk/media_sample | a0368236bd81c18ac136a54aa7383332a5e3cf36 | [
"MIT"
] | 99 | 2016-04-19T11:11:57.000Z | 2021-12-12T14:38:02.000Z | test/support/contorller_test_helper.ex | ogamw/media_sample | a0368236bd81c18ac136a54aa7383332a5e3cf36 | [
"MIT"
] | 48 | 2016-04-06T02:28:46.000Z | 2016-05-31T06:56:56.000Z | test/support/contorller_test_helper.ex | ogamw/media_sample | a0368236bd81c18ac136a54aa7383332a5e3cf36 | [
"MIT"
] | 32 | 2016-04-19T11:12:00.000Z | 2021-03-25T18:19:52.000Z | defmodule MediaSample.ControllerTestHelper do
defmacro __using__(opts) do
quote location: :keep do
use Plug.Test
@controller unquote(opts)[:controller] || raise ":controller must be given."
def action(conn, action, params \\ %{}) do
conn =
conn
|> put_private(:phoenix_controller, @controller)
|> Phoenix.Controller.put_view(Phoenix.Controller.__view__(@controller))
apply(@controller, action, [conn, params])
end
@signing_opts Plug.Session.init(
store: :cookie,
key: "_app",
encryption_salt: "encrypted cookie salt",
signing_salt: "signing salt"
)
defp with_session_and_flash(conn) do
conn
|> Map.put(:secret_key_base, String.duplicate("abcdefgh", 8))
|> Plug.Session.call(@signing_opts)
|> Plug.Conn.fetch_session
|> Phoenix.ConnTest.fetch_flash
end
end
end
end
| 28.666667 | 82 | 0.622622 |
73db2f54d72c3eeb492033c767ab14227ae5f36a | 693 | ex | Elixir | web/controllers/donation_goal_controller.ex | superdev999/Phoenix-project | ab13ac9366cdd0aa9581da7faf993b11aaa5344c | [
"MIT"
] | null | null | null | web/controllers/donation_goal_controller.ex | superdev999/Phoenix-project | ab13ac9366cdd0aa9581da7faf993b11aaa5344c | [
"MIT"
] | null | null | null | web/controllers/donation_goal_controller.ex | superdev999/Phoenix-project | ab13ac9366cdd0aa9581da7faf993b11aaa5344c | [
"MIT"
] | null | null | null | defmodule CodeCorps.DonationGoalController do
use CodeCorps.Web, :controller
use JaResource
import CodeCorps.Helpers.Query, only: [id_filter: 2]
alias CodeCorps.DonationGoal
alias CodeCorps.Services.DonationGoalsService
plug :load_and_authorize_changeset, model: DonationGoal, only: [:create]
plug :load_and_authorize_resource, model: DonationGoal, only: [:update, :delete]
plug JaResource
def filter(_conn, query, "id", id_list), do: id_filter(query, id_list)
def handle_create(_conn, attributes) do
attributes |> DonationGoalsService.create
end
def handle_update(_conn, record, attributes) do
record |> DonationGoalsService.update(attributes)
end
end
| 28.875 | 82 | 0.773449 |
73db49eb27cad8d58bf552466b9bfb6cb8076d8b | 1,071 | ex | Elixir | lib/struct_a.ex | marick/flow_assertions | 50936d03c7e0d482f13f94c96438627d344592c0 | [
"Unlicense"
] | 8 | 2020-09-04T19:38:41.000Z | 2021-06-05T18:59:49.000Z | lib/struct_a.ex | marick/flow_assertions | 50936d03c7e0d482f13f94c96438627d344592c0 | [
"Unlicense"
] | null | null | null | lib/struct_a.ex | marick/flow_assertions | 50936d03c7e0d482f13f94c96438627d344592c0 | [
"Unlicense"
] | null | null | null | defmodule FlowAssertions.StructA do
use FlowAssertions.Define
alias FlowAssertions.Messages
@moduledoc """
Assertions that apply only to structs, not maps.
"""
@doc """
Assert that the value is a particular module's struct.
```
make_user("fred") |> assert_struct_named(User)
```
For structs, this has the same purpose as
`FlowAssertions.MiscA.assert_shape/2`. However, because
it's not a macro, the second argument can be a variable. That makes it useful
for building up larger assertion functions.
"""
defchain assert_struct_named(value_to_check, module_name) do
boom! = fn msg ->
elaborate_flunk(msg, left: value_to_check)
end
cond do
is_struct(value_to_check) ->
actual_name = value_to_check.__struct__
if actual_name != module_name,
do: boom!.(Messages.wrong_struct_name(actual_name, module_name))
is_map(value_to_check) ->
boom!.(Messages.map_not_struct(module_name))
:else ->
boom!.(Messages.very_wrong_struct(module_name))
end
end
end
| 27.461538 | 79 | 0.69281 |
73db7cd7be52109e6e2ead1cae921fb573841189 | 963 | exs | Elixir | config/config.exs | Fabian-Fynn/elixir-socket-test | a292d8397c331bdc988f32b601f85f477a20d58c | [
"MIT"
] | null | null | null | config/config.exs | Fabian-Fynn/elixir-socket-test | a292d8397c331bdc988f32b601f85f477a20d58c | [
"MIT"
] | null | null | null | config/config.exs | Fabian-Fynn/elixir-socket-test | a292d8397c331bdc988f32b601f85f477a20d58c | [
"MIT"
] | null | null | null | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
#
# This configuration file is loaded before any dependency and
# is restricted to this project.
use Mix.Config
# Configures the endpoint
config :socket_test, SocketTest.Endpoint,
url: [host: "localhost"],
root: Path.dirname(__DIR__),
secret_key_base: "qUmsKfHk6y3KVHafyi2jIqq90Cbu1NYxqOm6RBtaZpXQsUOBKnyCa/4YtbjPGjDx",
render_errors: [accepts: ~w(html json)],
pubsub: [name: SocketTest.PubSub,
adapter: Phoenix.PubSub.PG2]
# Configures Elixir's Logger
config :logger, :console,
format: "$time $metadata[$level] $message\n",
metadata: [:request_id]
# Import environment specific config. This must remain at the bottom
# of this file so it overrides the configuration defined above.
import_config "#{Mix.env}.exs"
# Configure phoenix generators
config :phoenix, :generators,
migration: true,
binary_id: false
| 32.1 | 86 | 0.760125 |
73db99b5d9bd1d207c30cdda45af6ed563d96f5e | 509 | ex | Elixir | lib/mix_systemd_deploy_web/views/error_view.ex | cogini/mix-systemd-deploy-example | 7b0720a46a14fd0f7f61f88f7fdc26fdd7f05772 | [
"Apache-2.0"
] | 4 | 2019-03-01T07:26:26.000Z | 2021-11-05T13:53:07.000Z | lib/mix_systemd_deploy_web/views/error_view.ex | cogini/mix-systemd-deploy-example | 7b0720a46a14fd0f7f61f88f7fdc26fdd7f05772 | [
"Apache-2.0"
] | null | null | null | lib/mix_systemd_deploy_web/views/error_view.ex | cogini/mix-systemd-deploy-example | 7b0720a46a14fd0f7f61f88f7fdc26fdd7f05772 | [
"Apache-2.0"
] | null | null | null | defmodule MixSystemdDeployWeb.ErrorView do
use MixSystemdDeployWeb, :view
# If you want to customize a particular status code
# for a certain format, you may uncomment below.
# def render("500.html", _assigns) do
# "Internal Server Error"
# end
# By default, Phoenix returns the status message from
# the template name. For example, "404.html" becomes
# "Not Found".
def template_not_found(template, _assigns) do
Phoenix.Controller.status_message_from_template(template)
end
end
| 29.941176 | 61 | 0.744597 |
73dba717a583abf1b4d6e9bfa2e5e941d3b4f986 | 1,241 | ex | Elixir | clients/search_console/lib/google_api/search_console/v1/connection.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/search_console/lib/google_api/search_console/v1/connection.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/search_console/lib/google_api/search_console/v1/connection.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.SearchConsole.V1.Connection do
@moduledoc """
Handle Tesla connections for GoogleApi.SearchConsole.V1.
"""
@type t :: Tesla.Env.client()
use GoogleApi.Gax.Connection,
scopes: [
# View and manage Search Console data for your verified sites
"https://www.googleapis.com/auth/webmasters",
# View Search Console data for your verified sites
"https://www.googleapis.com/auth/webmasters.readonly"
],
otp_app: :google_api_search_console,
base_url: "https://searchconsole.googleapis.com/"
end
| 34.472222 | 74 | 0.73892 |
73dbaa14dbe38973fd4d7c2214942818f0e4e802 | 826 | exs | Elixir | apps/ewallet_db/priv/repo/migrations/20180627142121_add_default_exchange_address_to_api_keys.exs | AndonMitev/EWallet | 898cde38933d6f134734528b3e594eedf5fa50f3 | [
"Apache-2.0"
] | 322 | 2018-02-28T07:38:44.000Z | 2020-05-27T23:09:55.000Z | apps/ewallet_db/priv/repo/migrations/20180627142121_add_default_exchange_address_to_api_keys.exs | AndonMitev/EWallet | 898cde38933d6f134734528b3e594eedf5fa50f3 | [
"Apache-2.0"
] | 643 | 2018-02-28T12:05:20.000Z | 2020-05-22T08:34:38.000Z | apps/ewallet_db/priv/repo/migrations/20180627142121_add_default_exchange_address_to_api_keys.exs | AndonMitev/EWallet | 898cde38933d6f134734528b3e594eedf5fa50f3 | [
"Apache-2.0"
] | 63 | 2018-02-28T10:57:06.000Z | 2020-05-27T23:10:38.000Z | # Copyright 2018-2019 OmiseGO Pte Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
defmodule EWalletDB.Repo.Migrations.AddDefaultExchangeAddressToApiKeys do
use Ecto.Migration
def change do
alter table(:api_key) do
add :exchange_address, references(:wallet, type: :string, column: :address)
end
end
end
| 34.416667 | 81 | 0.75908 |
73dbaf23ce40c68ee30717a93fc746d1a1cb6a22 | 2,659 | ex | Elixir | lib/ex_aws/config/auth_cache.ex | sanchapereira/ex_aws | 1e5a03a939763424302a0511c56294d572a2ebfb | [
"MIT"
] | null | null | null | lib/ex_aws/config/auth_cache.ex | sanchapereira/ex_aws | 1e5a03a939763424302a0511c56294d572a2ebfb | [
"MIT"
] | 2 | 2021-01-29T21:12:46.000Z | 2021-01-30T06:11:16.000Z | lib/ex_aws/config/auth_cache.ex | sanchapereira/ex_aws | 1e5a03a939763424302a0511c56294d572a2ebfb | [
"MIT"
] | null | null | null | defmodule ExAws.Config.AuthCache do
@moduledoc false
use GenServer
# http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/iam-roles-for-amazon-ec2.html
defmodule AuthConfigAdapter do
@moduledoc false
@doc "Compute the awscli auth information."
@callback adapt_auth_config(auth :: map, profile :: String.t(), expiration :: integer) :: any
end
def start_link(opts \\ []) do
GenServer.start_link(__MODULE__, :ok, opts)
end
def get(config) do
case :ets.lookup(__MODULE__, :aws_instance_auth) do
[{:aws_instance_auth, auth_config}] -> auth_config
[] -> GenServer.call(__MODULE__, {:refresh_config, config}, 30_000)
end
end
def get(profile, expiration) do
case :ets.lookup(__MODULE__, :awscli) do
[{:awscli, auth_config}] ->
auth_config
[] ->
GenServer.call(__MODULE__, {:refresh_awscli_config, profile, expiration}, 30_000)
end
end
## Callbacks
def init(:ok) do
ets = :ets.new(__MODULE__, [:named_table, read_concurrency: true])
{:ok, ets}
end
def handle_call({:refresh_config, config}, _from, ets) do
auth = refresh_config(config, ets)
{:reply, auth, ets}
end
def handle_call({:refresh_awscli_config, profile, expiration}, _from, ets) do
auth = refresh_awscli_config(profile, expiration, ets)
{:reply, auth, ets}
end
def handle_info({:refresh_config, config}, ets) do
refresh_config(config, ets)
{:noreply, ets}
end
def handle_info({:refresh_awscli_config, profile, expiration}, ets) do
refresh_awscli_config(profile, expiration, ets)
{:noreply, ets}
end
def refresh_awscli_config(profile, expiration, ets) do
Process.send_after(self(), {:refresh_awscli_config, profile, expiration}, expiration)
auth = ExAws.CredentialsIni.security_credentials(profile)
auth =
case ExAws.Config.awscli_auth_adapter() do
nil ->
auth
adapter ->
adapter.adapt_auth_config(auth, profile, expiration)
end
:ets.insert(ets, {:awscli, auth})
auth
end
def refresh_config(config, ets) do
auth = ExAws.InstanceMeta.security_credentials(config)
:ets.insert(ets, {:aws_instance_auth, auth})
Process.send_after(self(), {:refresh_config, config}, refresh_in(auth[:expiration]))
auth
end
def refresh_in(expiration) do
expiration = expiration |> ExAws.Utils.iso_z_to_secs()
time_to_expiration = expiration - ExAws.Utils.now_in_seconds()
# check five mins prior to expiration
refresh_in = time_to_expiration - 5 * 60
# check now if we should have checked in the past
max(0, refresh_in * 1000)
end
end
| 27.132653 | 97 | 0.68522 |
73dbd9cc38e8671c2362d54ed84b66e9db88ce70 | 293 | exs | Elixir | priv/repo/migrations/20201229170714_add_contacts_table.exs | jfcloutier/freegiving | 2ab3821595996fc295c5b55515d6f60cbce05181 | [
"Unlicense"
] | null | null | null | priv/repo/migrations/20201229170714_add_contacts_table.exs | jfcloutier/freegiving | 2ab3821595996fc295c5b55515d6f60cbce05181 | [
"Unlicense"
] | null | null | null | priv/repo/migrations/20201229170714_add_contacts_table.exs | jfcloutier/freegiving | 2ab3821595996fc295c5b55515d6f60cbce05181 | [
"Unlicense"
] | null | null | null | defmodule Freegiving.Repo.Migrations.AddContactsTable do
use Ecto.Migration
def change do
create table("contacts") do
add :name, :string, null: false
add :email, :string, null: false
add :phone, :string
end
create unique_index("contacts", :email)
end
end
| 20.928571 | 56 | 0.675768 |
73dc1f8ca9abd19446e065cc05222b40e9af1f75 | 1,341 | exs | Elixir | config/config.exs | flyingalex/another-phoenix-trello | c8844c4f4ad64df2f809a3f87637a416561c71b6 | [
"MIT"
] | null | null | null | config/config.exs | flyingalex/another-phoenix-trello | c8844c4f4ad64df2f809a3f87637a416561c71b6 | [
"MIT"
] | null | null | null | config/config.exs | flyingalex/another-phoenix-trello | c8844c4f4ad64df2f809a3f87637a416561c71b6 | [
"MIT"
] | null | null | null | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
#
# This configuration file is loaded before any dependency and
# is restricted to this project.
use Mix.Config
config :phoenix_trello,
namespace: PhoenixTrello,
ecto_repos: [PhoenixTrello.Repo]
# Configures the endpoint
config :phoenix_trello, PhoenixTrello.Endpoint,
url: [host: "localhost"],
root: Path.dirname(__DIR__),
secret_key_base: "hWbd3QwLuaWKwJY5qYOKLGSBboxjnW46c4TzBAa+cMODz26RokgHQIJo6Nej3DGr",
render_errors: [accepts: ~w(html json)],
pubsub_server: PhoenixTrello.PubSub
# Configures Elixir's Logger
config :logger, :console,
format: "$time $metadata[$level] $message\n",
metadata: [:request_id]
# Configure phoenix generators
config :phoenix, :generators,
migration: true,
binary_id: false
# Configure guardian
config :phoenix_trello, Guardian,
issuer: "PhoenixTrello",
ttl: { 3, :days },
verify_issuer: true,
serializer: PhoenixTrello.GuardianSerializer
# Start Hound for PhantomJs
config :hound, driver: "chrome_driver"
# Use Jason for JSON parsing in Phoenix
config :phoenix, :json_library, Jason
# Import environment specific config. This must remain at the bottom
# of this file so it overrides the configuration defined above.
import_config "#{Mix.env}.exs"
| 28.531915 | 86 | 0.770321 |
73dc268113d4a28a12ef56f686a676024cac87af | 19,936 | ex | Elixir | deps/phoenix_pubsub/lib/phoenix/tracker/state.ex | rpillar/Top5_Elixir | 9c450d2e9b291108ff1465dc066dfe442dbca822 | [
"MIT"
] | null | null | null | deps/phoenix_pubsub/lib/phoenix/tracker/state.ex | rpillar/Top5_Elixir | 9c450d2e9b291108ff1465dc066dfe442dbca822 | [
"MIT"
] | null | null | null | deps/phoenix_pubsub/lib/phoenix/tracker/state.ex | rpillar/Top5_Elixir | 9c450d2e9b291108ff1465dc066dfe442dbca822 | [
"MIT"
] | null | null | null | defmodule Phoenix.Tracker.State do
@moduledoc """
Provides an ORSWOT CRDT.
"""
alias Phoenix.Tracker.{State, Clock}
@type name :: term
@type topic :: String.t
@type key :: term
@type meta :: map
@type ets_id :: :ets.tid
@type clock :: pos_integer
@type tag :: {name, clock}
@type cloud :: MapSet.t
@type clouds :: %{name => cloud}
@type context :: %{name => clock}
@type values :: ets_id | :extracted | %{tag => {pid, topic, key, meta}}
@type value :: {{topic, pid, key}, meta, tag}
@type key_meta :: {key, meta}
@type delta :: %State{mode: :delta}
@type pid_lookup :: {pid, topic, key}
@type t :: %State{
replica: name,
context: context,
clouds: clouds,
values: values,
pids: ets_id,
mode: :unset | :delta | :normal,
delta: :unset | delta,
replicas: %{name => :up | :down},
range: {context, context}
}
defstruct replica: nil,
context: %{},
clouds: %{},
values: nil,
pids: nil,
mode: :unset,
delta: :unset,
replicas: %{},
range: {%{}, %{}}
@compile {:inline, tag: 1, clock: 1, put_tag: 2, delete_tag: 2, remove_delta_tag: 2}
@doc """
Creates a new set for the replica.
## Examples
iex> Phoenix.Tracker.State.new(:replica1)
%Phoenix.Tracker.State{...}
"""
@spec new(name, atom) :: t
def new(replica, shard_name) do
reset_delta(%State{
replica: replica,
context: %{replica => 0},
mode: :normal,
values: :ets.new(shard_name, [:named_table, :protected, :ordered_set]),
pids: :ets.new(:pids, [:duplicate_bag]),
replicas: %{replica => :up}})
end
@doc """
Returns the causal context for the set.
"""
@spec clocks(t) :: {name, context}
def clocks(%State{replica: rep, context: ctx}), do: {rep, ctx}
@doc """
Adds a new element to the set.
"""
@spec join(t, pid, topic, key, meta) :: t
def join(%State{} = state, pid, topic, key, meta \\ %{}) do
add(state, pid, topic, key, meta)
end
@doc """
Removes an element from the set.
"""
@spec leave(t, pid, topic, key) :: t
def leave(%State{pids: pids} = state, pid, topic, key) do
pids
|> :ets.match_object({pid, topic, key})
|> case do
[{^pid, ^topic, ^key}] -> remove(state, pid, topic, key)
[] -> state
end
end
@doc """
Removes all elements from the set for the given pid.
"""
@spec leave(t, pid) :: t
def leave(%State{pids: pids} = state, pid) do
pids
|> :ets.lookup(pid)
|> Enum.reduce(state, fn {^pid, topic, key}, acc ->
remove(acc, pid, topic, key)
end)
end
@doc """
Returns a list of elements in the set belonging to an online replica.
"""
@spec online_list(t) :: [value]
def online_list(%State{values: values} = state) do
replicas = down_replicas(state)
:ets.select(values, [{ {:_, :_, {:"$1", :_}},
not_in(:"$1", replicas), [:"$_"]}])
end
@doc """
Returns a list of elements for the topic who belong to an online replica.
"""
@spec get_by_topic(t, topic) :: [key_meta]
def get_by_topic(%State{values: values} = state, topic) do
tracked_values(values, topic, down_replicas(state))
end
@doc """
Returns a list of elements for the topic who belong to an online replica.
"""
@spec get_by_key(t, topic, key) :: [key_meta]
def get_by_key(%State{values: values} = state, topic, key) do
case tracked_key(values, topic, key, down_replicas(state)) do
[] -> []
[_|_] = metas -> metas
end
end
@doc """
Performs table lookup for tracked elements in the topic.
Filters out those present on downed replicas.
"""
def tracked_values(table, topic, down_replicas) do
:ets.select(table,
[{{{topic, :_, :"$1"}, :"$2", {:"$3", :_}},
not_in(:"$3", down_replicas),
[{{:"$1", :"$2"}}]}])
end
@doc """
Performs table lookup for tracked key in the topic.
Filters out those present on downed replicas.
"""
def tracked_key(table, topic, key, down_replicas) do
:ets.select(table,
[{{{topic, :"$1", key}, :"$2", {:"$3", :_}},
not_in(:"$3", down_replicas),
[{{:"$1", :"$2"}}]}])
end
defp not_in(_pos, []), do: []
defp not_in(pos, replicas), do: [not: ors(pos, replicas)]
defp ors(pos, [rep]), do: {:"=:=", pos, {rep}}
defp ors(pos, [rep | rest]), do: {:or, {:"=:=", pos, {rep}}, ors(pos, rest)}
@doc """
Returns the element matching the pid, topic, and key.
"""
@spec get_by_pid(t, pid, topic, key) :: value | nil
def get_by_pid(%State{values: values}, pid, topic, key) do
case :ets.lookup(values, {topic, pid, key}) do
[] -> nil
[one] -> one
end
end
@doc """
Returns all elements for the pid.
"""
@spec get_by_pid(t, pid) :: [value]
def get_by_pid(%State{pids: pids, values: values}, pid) do
case :ets.lookup(pids, pid) do
[] -> []
matches ->
:ets.select(values, Enum.map(matches, fn {^pid, topic, key} ->
{{{topic, pid, key}, :_, :_}, [], [:"$_"]}
end))
end
end
@doc """
Checks if set has a non-empty delta.
"""
@spec has_delta?(t) :: boolean
def has_delta?(%State{delta: %State{clouds: clouds}}) do
Enum.find(clouds, fn {_name, cloud} -> MapSet.size(cloud) != 0 end)
end
@doc """
Resets the set's delta.
"""
@spec reset_delta(t) :: t
def reset_delta(%State{context: ctx, replica: replica} = state) do
delta_ctx = Map.take(ctx, [replica])
delta = %State{replica: replica,
values: %{},
range: {delta_ctx, delta_ctx},
mode: :delta}
%State{state | delta: delta}
end
@doc """
Extracts the set's elements from ets into a mergeable list.
Used when merging two sets.
"""
@spec extract(t, remote_ref :: name, context) :: t | {t, values}
def extract(%State{mode: :delta, values: values, clouds: clouds} = state, remote_ref, remote_context) do
{start_ctx, end_ctx} = state.range
known_keys = Map.keys(remote_context)
pruned_clouds = Map.take(clouds, known_keys)
pruned_start = Map.take(start_ctx, known_keys)
pruned_end = Map.take(end_ctx, known_keys)
map = Enum.reduce(values, [], fn
{{^remote_ref, _clock}, _data}, acc -> acc
{{replica, _clock} = tag, data}, acc ->
if Map.has_key?(remote_context, replica) do
[{tag, data} | acc]
else
acc
end
end) |> :maps.from_list()
%State{state | values: map, clouds: pruned_clouds, range: {pruned_start, pruned_end}}
end
def extract(%State{mode: :normal, values: values, clouds: clouds} = state, remote_ref, remote_context) do
known_keys = Map.keys(remote_context)
pruned_clouds = Map.take(clouds, known_keys)
pruned_context = Map.take(state.context, known_keys)
# fn {{topic, pid, key}, meta, {replica, clock}} when replica !== remote_ref ->
# {{replica, clock}, {pid, topic, key, meta}}
# end
ms = [{
{{:"$1", :"$2", :"$3"}, :"$4", {:"$5", :"$6"}},
[{:"=/=", :"$5", {:const, remote_ref}}],
[{{{{:"$5", :"$6"}}, {{:"$2", :"$1", :"$3", :"$4"}}}}]
}]
data =
foldl(values, [], ms, fn {{replica, _} = tag, data}, acc ->
if match?(%{^replica => _}, remote_context) do
[{tag, data} | acc]
else
acc
end
end)
{%State{state |
clouds: pruned_clouds,
context: pruned_context,
pids: nil,
values: nil,
delta: :unset}, Map.new(data)}
end
@doc """
Merges two sets, or a delta into a set.
Returns a 3-tuple of the updated set, and the joined and left elements.
## Examples
iex> {s1, joined, left} =
Phoenix.Tracker.State.merge(s1, Phoenix.Tracker.State.extract(s2))
{%Phoenix.Tracker.State{}, [...], [...]}
"""
@spec merge(local :: t, {remote :: t, values} | delta) :: {new_local :: t, joins :: [value], leaves :: [value]}
def merge(%State{} = local, %State{mode: :delta, values: remote_map} = remote) do
merge(local, remote, remote_map)
end
def merge(%State{} = local, {%State{} = remote, remote_map}) do
merge(local, remote, remote_map)
end
defp merge(local, remote, remote_map) do
{pids, joins} = accumulate_joins(local, remote_map)
{clouds, delta, leaves} = observe_removes(local, remote, remote_map)
true = :ets.insert(local.values, joins)
true = :ets.insert(local.pids, pids)
known_remote_context = Map.take(remote.context, Map.keys(local.context))
ctx = Clock.upperbound(local.context, known_remote_context)
new_state =
%State{local | clouds: clouds, delta: delta}
|> put_context(ctx)
|> compact()
{new_state, joins, leaves}
end
@spec accumulate_joins(t, values) :: joins :: {[pid_lookup], [values]}
defp accumulate_joins(local, remote_map) do
%State{context: context, clouds: clouds} = local
Enum.reduce(remote_map, {[], []}, fn {{replica, _} = tag, {pid, topic, key, meta}}, {pids, adds} ->
if not match?(%{^replica => _}, context) or in?(context, clouds, tag) do
{pids, adds}
else
{[{pid, topic, key} | pids], [{{topic, pid, key}, meta, tag} | adds]}
end
end)
end
@spec observe_removes(t, t, map) :: {clouds, delta, leaves :: [value]}
defp observe_removes(%State{pids: pids, values: values, delta: delta} = local, remote, remote_map) do
unioned_clouds = union_clouds(local, remote)
%State{context: remote_context, clouds: remote_clouds} = remote
init = {unioned_clouds, delta, []}
local_replica = local.replica
# fn {_, _, {replica, _}} = result when replica != local_replica -> result end
ms = [{
{:_, :_, {:"$1", :_}},
[{:"/=", :"$1", {:const, local_replica}}],
[:"$_"]
}]
foldl(values, init, ms, fn {{topic, pid, key} = values_key, _, tag} = el, {clouds, delta, leaves} ->
if not match?(%{^tag => _}, remote_map) and in?(remote_context, remote_clouds, tag) do
:ets.delete(values, values_key)
:ets.match_delete(pids, {pid, topic, key})
{delete_tag(clouds, tag), remove_delta_tag(delta, tag), [el | leaves]}
else
{clouds, delta, leaves}
end
end)
end
defp put_tag(clouds, {name, _clock} = tag) do
case clouds do
%{^name => cloud} -> %{clouds | name => MapSet.put(cloud, tag)}
_ -> Map.put(clouds, name, MapSet.new([tag]))
end
end
defp delete_tag(clouds, {name, _clock} = tag) do
case clouds do
%{^name => cloud} -> %{clouds | name => MapSet.delete(cloud, tag)}
_ -> clouds
end
end
defp union_clouds(%State{mode: :delta} = local, %State{} = remote) do
Enum.reduce(remote.clouds, local.clouds, fn {name, remote_cloud}, acc ->
Map.update(acc, name, remote_cloud, &MapSet.union(&1, remote_cloud))
end)
end
defp union_clouds(%State{mode: :normal, context: local_ctx} = local, %State{} = remote) do
Enum.reduce(remote.clouds, local.clouds, fn {name, remote_cloud}, acc ->
if Map.has_key?(local_ctx, name) do
Map.update(acc, name, remote_cloud, &MapSet.union(&1, remote_cloud))
else
acc
end
end)
end
def merge_deltas(%State{mode: :delta} = local, %State{mode: :delta, values: remote_values} = remote) do
%{values: local_values, range: {local_start, local_end}, context: local_context, clouds: local_clouds} = local
%{range: {remote_start, remote_end}, context: remote_context, clouds: remote_clouds} = remote
if (Clock.dominates_or_equal?(remote_end, local_start) and
Clock.dominates_or_equal?(local_end, remote_start)) or
(Clock.dominates_or_equal?(local_end, remote_start) and
Clock.dominates_or_equal?(remote_end, local_start)) do
new_start = Clock.lowerbound(local_start, remote_start)
new_end = Clock.upperbound(local_end, remote_end)
clouds = union_clouds(local, remote)
filtered_locals = for {tag, value} <- local_values,
match?(%{^tag => _}, remote_values) or not in?(remote_context, remote_clouds, tag),
do: {tag, value}
merged_vals = for {tag, value} <- remote_values,
not match?(%{^tag => _}, local_values) and not in?(local_context, local_clouds, tag),
into: filtered_locals,
do: {tag, value}
{:ok, %State{local | clouds: clouds, values: Map.new(merged_vals), range: {new_start, new_end}}}
else
{:error, :not_contiguous}
end
end
@doc """
Marks a replica as up in the set and returns rejoined users.
"""
@spec replica_up(t, name) :: {t, joins :: [values], leaves :: []}
def replica_up(%State{replicas: replicas, context: ctx} = state, replica) do
{%State{state |
context: Map.put_new(ctx, replica, 0),
replicas: Map.put(replicas, replica, :up)}, replica_users(state, replica), []}
end
@doc """
Marks a replica as down in the set and returns left users.
"""
@spec replica_down(t, name) :: {t, joins:: [], leaves :: [values]}
def replica_down(%State{replicas: replicas} = state, replica) do
{%State{state | replicas: Map.put(replicas, replica, :down)}, [], replica_users(state, replica)}
end
@doc """
Removes all elements for replicas that are permanently gone.
"""
@spec remove_down_replicas(t, name) :: t
def remove_down_replicas(%State{mode: :normal, context: ctx, values: values, pids: pids} = state, replica) do
new_ctx = Map.delete(ctx, replica)
# fn {key, _, {^replica, _}} -> key end
ms = [{{:"$1", :_, {replica, :_}}, [], [:"$1"]}]
foldl(values, nil, ms, fn {topic, pid, key} = values_key, _ ->
:ets.delete(values, values_key)
:ets.match_delete(pids, {pid, topic, key})
nil
end)
new_clouds = Map.delete(state.clouds, replica)
new_delta = remove_down_replicas(state.delta, replica)
%State{state | context: new_ctx, clouds: new_clouds, delta: new_delta}
end
def remove_down_replicas(%State{mode: :delta, range: range} = delta, replica) do
{start_ctx, end_ctx} = range
new_start = Map.delete(start_ctx, replica)
new_end = Map.delete(end_ctx, replica)
new_clouds = Map.delete(delta.clouds, replica)
new_vals = Enum.reduce(delta.values, delta.values, fn
{{^replica, _clock} = tag, {_pid, _topic, _key, _meta}}, vals ->
Map.delete(vals, tag)
{{_replica, _clock} = _tag, {_pid, _topic, _key, _meta}}, vals ->
vals
end)
%State{delta | range: {new_start, new_end}, clouds: new_clouds, values: new_vals}
end
@doc """
Returns the dize of the delta.
"""
@spec delta_size(delta) :: pos_integer
def delta_size(%State{mode: :delta, clouds: clouds, values: values}) do
Enum.reduce(clouds, map_size(values), fn {_name, cloud}, sum ->
sum + MapSet.size(cloud)
end)
end
@spec add(t, pid, topic, key, meta) :: t
defp add(%State{} = state, pid, topic, key, meta) do
state
|> bump_clock()
|> do_add(pid, topic, key, meta)
end
defp do_add(%State{delta: delta} = state, pid, topic, key, meta) do
tag = tag(state)
true = :ets.insert(state.values, {{topic, pid, key}, meta, tag})
true = :ets.insert(state.pids, {pid, topic, key})
new_delta = %State{delta | values: Map.put(delta.values, tag, {pid, topic, key, meta})}
%State{state | delta: new_delta}
end
@spec remove(t, pid, topic, key) :: t
defp remove(%State{pids: pids, values: values} = state, pid, topic, key) do
[{{^topic, ^pid, ^key}, _meta, tag}] = :ets.lookup(values, {topic, pid, key})
1 = :ets.select_delete(values, [{{{topic, pid, key}, :_, :_}, [], [true]}])
1 = :ets.select_delete(pids, [{{pid, topic, key}, [], [true]}])
pruned_clouds = delete_tag(state.clouds, tag)
new_delta = remove_delta_tag(state.delta, tag)
bump_clock(%State{state | clouds: pruned_clouds, delta: new_delta})
end
@spec remove_delta_tag(delta, tag) :: delta
defp remove_delta_tag(%{mode: :delta, values: values, clouds: clouds} = delta, tag) do
%{delta | clouds: put_tag(clouds, tag), values: Map.delete(values, tag)}
end
@doc """
Compacts a sets causal history.
Called as needed and after merges.
"""
@spec compact(t) :: t
def compact(%State{context: ctx, clouds: clouds} = state) do
{new_ctx, new_clouds} =
Enum.reduce(clouds, {ctx, clouds}, fn {name, cloud}, {ctx_acc, clouds_acc} ->
{new_ctx, new_cloud} = do_compact(ctx_acc, Enum.sort(MapSet.to_list(cloud)))
{new_ctx, Map.put(clouds_acc, name, MapSet.new(new_cloud))}
end)
put_context(%State{state | clouds: new_clouds}, new_ctx)
end
@spec do_compact(context, sorted_cloud_list :: list) :: {context, cloud}
defp do_compact(ctx, cloud) do
Enum.reduce(cloud, {ctx, []}, fn {replica, clock} = tag, {ctx_acc, cloud_acc} ->
case ctx_acc do
%{^replica => ctx_clock} when ctx_clock + 1 == clock ->
{%{ctx_acc | replica => clock}, cloud_acc}
%{^replica => ctx_clock} when ctx_clock >= clock ->
{ctx_acc, cloud_acc}
_ when clock == 1 ->
{Map.put(ctx_acc, replica, clock), cloud_acc}
_ ->
{ctx_acc, [tag | cloud_acc]}
end
end)
end
@compile {:inline, in?: 3, in_ctx?: 3, in_clouds?: 3}
defp in?(context, clouds, {replica, clock} = tag) do
in_ctx?(context, replica, clock) or in_clouds?(clouds, replica, tag)
end
defp in_ctx?(ctx, replica, clock) do
case ctx do
%{^replica => replica_clock} -> replica_clock >= clock
_ -> false
end
end
defp in_clouds?(clouds, replica, tag) do
case clouds do
%{^replica => cloud} -> MapSet.member?(cloud, tag)
_ -> false
end
end
@spec tag(t) :: tag
defp tag(%State{replica: rep} = state), do: {rep, clock(state)}
@spec clock(t) :: clock
defp clock(%State{replica: rep, context: ctx}), do: Map.get(ctx, rep, 0)
@spec bump_clock(t) :: t
defp bump_clock(%State{mode: :normal, replica: rep, clouds: clouds, context: ctx, delta: delta} = state) do
new_clock = clock(state) + 1
new_ctx = Map.put(ctx, rep, new_clock)
%State{state |
clouds: put_tag(clouds, {rep, new_clock}),
delta: %State{delta | clouds: put_tag(delta.clouds, {rep, new_clock})}}
|> put_context(new_ctx)
end
defp put_context(%State{delta: delta, replica: rep} = state, new_ctx) do
{start_clock, end_clock} = delta.range
new_end = Map.put(end_clock, rep, Map.get(new_ctx, rep, 0))
%State{state |
context: new_ctx,
delta: %State{delta | range: {start_clock, new_end}}}
end
@spec down_replicas(t) :: [name]
defp down_replicas(%State{replicas: replicas}) do
for {replica, :down} <- replicas, do: replica
end
@spec replica_users(t, name) :: [value]
defp replica_users(%State{values: values}, replica) do
:ets.match_object(values, {:_, :_, {replica, :_}})
end
@fold_batch_size 1000
defp foldl(table, initial, ms, func) do
foldl(:ets.select(table, ms, @fold_batch_size), initial, func)
end
defp foldl(:"$end_of_table", acc, _func), do: acc
defp foldl({objects, cont}, acc, func) do
foldl(:ets.select(cont), Enum.reduce(objects, acc, func), func)
end
end
| 34.491349 | 115 | 0.582364 |
73dc392c90d6e1e53b17da204f92ba8a995dd79f | 1,374 | exs | Elixir | mix.exs | luizParreira/ecto_cast_to_string | 89254d62c460e11aac0eddbe329c1036d8c97f7e | [
"MIT"
] | 1 | 2022-03-10T16:40:27.000Z | 2022-03-10T16:40:27.000Z | mix.exs | luizParreira/ecto_cast_to_string | 89254d62c460e11aac0eddbe329c1036d8c97f7e | [
"MIT"
] | null | null | null | mix.exs | luizParreira/ecto_cast_to_string | 89254d62c460e11aac0eddbe329c1036d8c97f7e | [
"MIT"
] | 1 | 2021-03-04T07:52:14.000Z | 2021-03-04T07:52:14.000Z | defmodule EctoCastToString.Mixfile do
use Mix.Project
@description "EctoCastToString is a simple library that enables ecto casting from int/float to string."
@version "0.1.1"
def project do
[app: :ecto_cast_to_string,
version: @version,
elixir: "~> 1.3",
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
description: @description,
source_url: "https://github.com/luizParreira/ecto_cast_to_string",
docs: [extras: ["README.md"], main: "EctoCastToString"],
deps: deps(),
package: package()]
end
# Configuration for the OTP application
#
# Type "mix help compile.app" for more information
def application do
# Specify extra applications you'll use from Erlang/Elixir
[extra_applications: [:logger]]
end
def package do
[
maintainers: ["Luiz Parreira"],
licenses: ["MIT"],
files: ["lib", "mix.exs", "README.md", "LICENSE"],
links: %{"GitHub" => "https://github.com/luizParreira/ecto_cast_to_string"}
]
end
# Dependencies can be Hex packages:
#
# {:my_dep, "~> 0.3.0"}
#
# Or git/path repositories:
#
# {:my_dep, git: "https://github.com/elixir-lang/my_dep.git", tag: "0.1.0"}
#
# Type "mix help deps" for more examples and options
defp deps do
[{:ecto, "~> 2.1"},
{:ex_doc, "~> 0.14", only: :dev}]
end
end
| 26.941176 | 105 | 0.631732 |
73dc73c43a85318877d0db2ebfb0c1f8c467b7fc | 790 | ex | Elixir | lib/loaded_bike/web/uploaders/photo_uploader.ex | GBH/pedal | a2d68c3561f186ee3017a21b4170127b1625e18d | [
"MIT"
] | 48 | 2017-04-25T16:02:08.000Z | 2021-01-23T01:57:29.000Z | lib/loaded_bike/web/uploaders/photo_uploader.ex | GBH/pedal | a2d68c3561f186ee3017a21b4170127b1625e18d | [
"MIT"
] | 5 | 2018-03-09T20:17:55.000Z | 2018-07-23T16:29:21.000Z | lib/loaded_bike/web/uploaders/photo_uploader.ex | GBH/pedal | a2d68c3561f186ee3017a21b4170127b1625e18d | [
"MIT"
] | 4 | 2017-05-21T14:38:38.000Z | 2017-12-29T11:09:54.000Z | defmodule LoadedBike.Web.PhotoUploader do
use Arc.Definition
use Arc.Ecto.Definition
@versions [:large]
# local storage
def __storage, do: Arc.Storage.Local
# Whitelist file extensions:
def validate({file, _}) do
ext = file.file_name
|> Path.extname
|> String.downcase
Enum.member?(~w(.jpg .jpeg), ext)
end
# Define a thumbnail transformation:
def transform(:large, _) do
{:convert, "-strip -thumbnail 930x -gravity center -extent 930x -format jpg", :jpg}
end
# Override the persisted filenames:
def filename(version, _) do
version
end
# Override the storage directory:
def storage_dir(_version, {_file, photo}) do
"uploads/tours/#{photo.waypoint.tour_id}/waypoints/#{photo.waypoint.id}/photos/#{photo.uuid}/"
end
end
| 23.939394 | 98 | 0.688608 |
73dcbd77902f1ab86ae57a9d6549d6243fc046aa | 176 | ex | Elixir | lib/money/phoenix_html_safe.ex | SafeBoda/money | 249af6078039825b4d332931838dded5e15bdfaf | [
"MIT"
] | 1 | 2020-01-30T22:44:45.000Z | 2020-01-30T22:44:45.000Z | lib/money/phoenix_html_safe.ex | SafeBoda/money | 249af6078039825b4d332931838dded5e15bdfaf | [
"MIT"
] | 1 | 2019-04-16T11:22:26.000Z | 2019-04-16T11:52:48.000Z | lib/money/phoenix_html_safe.ex | SafeBoda/money | 249af6078039825b4d332931838dded5e15bdfaf | [
"MIT"
] | 2 | 2019-03-01T12:05:22.000Z | 2019-03-26T12:11:07.000Z | if Code.ensure_compiled?(Phoenix.HTML.Safe) do
defimpl Phoenix.HTML.Safe, for: Money do
def to_iodata(money), do: Phoenix.HTML.Safe.to_iodata(to_string(money))
end
end
| 29.333333 | 75 | 0.755682 |
73dcc6167f496b788d09952bdc7faac4f3f7c8d4 | 683 | ex | Elixir | solutions/lib/ninety_nine_elixir_problems/solutions/problem06.ex | cgerling/ninety-nine-elixir-problems | 45c1702a654d24e53c4010658c246185d1a04aa0 | [
"Unlicense"
] | 7 | 2019-12-06T13:36:19.000Z | 2022-03-02T19:38:37.000Z | solutions/lib/ninety_nine_elixir_problems/solutions/problem06.ex | cgerling/ninety-nine-elixir-problems | 45c1702a654d24e53c4010658c246185d1a04aa0 | [
"Unlicense"
] | null | null | null | solutions/lib/ninety_nine_elixir_problems/solutions/problem06.ex | cgerling/ninety-nine-elixir-problems | 45c1702a654d24e53c4010658c246185d1a04aa0 | [
"Unlicense"
] | 1 | 2021-06-27T16:53:49.000Z | 2021-06-27T16:53:49.000Z | defmodule NinetyNineElixirProblems.Solutions.Problem06 do
@doc ~S"""
Check if a list or string are palindrome (can be read forward and backward)
## Examples:
iex> Problem06.is_palindrome?([1, 2, 4, 8, 16, 8, 4, 2, 1])
true
iex> Problem06.is_palindrome?("detartrated")
true
iex> Problem06.is_palindrome?([1, 2, 3])
false
iex> Problem06.is_palindrome?("ninetynine")
false
"""
def is_palindrome?(word) when is_binary(word) do
word
|> String.graphemes()
|> is_palindrome?()
end
def is_palindrome?(list) when is_list(list) do
list |> Enum.reverse() |> Enum.zip(list) |> Enum.all?(fn {st, nd} -> st == nd end)
end
end
| 22.766667 | 86 | 0.644217 |
73dcf6f3607b10912ec657865697837b2bc98839 | 702 | ex | Elixir | tags/lib/tags_web/gettext.ex | GustavoSrr/nlw-heat | 7d92dbd94d6db303d1c0d604b85707de2135c2dc | [
"MIT"
] | null | null | null | tags/lib/tags_web/gettext.ex | GustavoSrr/nlw-heat | 7d92dbd94d6db303d1c0d604b85707de2135c2dc | [
"MIT"
] | null | null | null | tags/lib/tags_web/gettext.ex | GustavoSrr/nlw-heat | 7d92dbd94d6db303d1c0d604b85707de2135c2dc | [
"MIT"
] | null | null | null | defmodule TagsWeb.Gettext do
@moduledoc """
A module providing Internationalization with a gettext-based API.
By using [Gettext](https://hexdocs.pm/gettext),
your module gains a set of macros for translations, for example:
import TagsWeb.Gettext
# Simple translation
gettext("Here is the string to translate")
# Plural translation
ngettext("Here is the string to translate",
"Here are the strings to translate",
3)
# Domain-based translation
dgettext("errors", "Here is the error message to translate")
See the [Gettext Docs](https://hexdocs.pm/gettext) for detailed usage.
"""
use Gettext, otp_app: :tags
end
| 28.08 | 72 | 0.673789 |
73dd29823091db6b2e5966fb4886d10ad5f3464b | 2,332 | ex | Elixir | clients/content/lib/google_api/content/v2/model/inventory_custom_batch_request_entry.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/content/lib/google_api/content/v2/model/inventory_custom_batch_request_entry.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/content/lib/google_api/content/v2/model/inventory_custom_batch_request_entry.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Content.V2.Model.InventoryCustomBatchRequestEntry do
@moduledoc """
A batch entry encoding a single non-batch inventory request.
## Attributes
* `batchId` (*type:* `integer()`, *default:* `nil`) - An entry ID, unique within the batch request.
* `inventory` (*type:* `GoogleApi.Content.V2.Model.Inventory.t`, *default:* `nil`) - Price and availability of the product.
* `merchantId` (*type:* `String.t`, *default:* `nil`) - The ID of the managing account.
* `productId` (*type:* `String.t`, *default:* `nil`) - The ID of the product for which to update price and availability.
* `storeCode` (*type:* `String.t`, *default:* `nil`) - The code of the store for which to update price and availability. Use `online` to update price and availability of an online product.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:batchId => integer(),
:inventory => GoogleApi.Content.V2.Model.Inventory.t(),
:merchantId => String.t(),
:productId => String.t(),
:storeCode => String.t()
}
field(:batchId)
field(:inventory, as: GoogleApi.Content.V2.Model.Inventory)
field(:merchantId)
field(:productId)
field(:storeCode)
end
defimpl Poison.Decoder, for: GoogleApi.Content.V2.Model.InventoryCustomBatchRequestEntry do
def decode(value, options) do
GoogleApi.Content.V2.Model.InventoryCustomBatchRequestEntry.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Content.V2.Model.InventoryCustomBatchRequestEntry do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 39.525424 | 192 | 0.713551 |
73dd67e752a52e688a7c8b97505d5872706f151b | 1,074 | exs | Elixir | clients/display_video/test/test_helper.exs | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/display_video/test/test_helper.exs | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/display_video/test/test_helper.exs | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
ExUnit.start()
defmodule GoogleApi.DisplayVideo.TestHelper do
defmacro __using__(opts) do
quote do
use ExUnit.Case, unquote(opts)
import GoogleApi.DisplayVideo.TestHelper
end
end
def for_scope(scopes) when is_list(scopes), do: for_scope(Enum.join(scopes, " "))
def for_scope(scope) do
{:ok, token} = Goth.Token.for_scope(scope)
token.token
end
end
| 29.833333 | 83 | 0.741155 |
73dd7767616b2d24b4147b6a1de9826155fb359c | 481 | exs | Elixir | test/phoenix_container_example_web/views/error_view_test.exs | wwaldner-amtelco/phoenix_container_example | aeee424b40f444fe6bbfeab4d57b78d201397701 | [
"Apache-2.0"
] | 19 | 2020-07-21T06:03:36.000Z | 2022-03-21T22:35:22.000Z | test/phoenix_container_example_web/views/error_view_test.exs | wwaldner-amtelco/phoenix_container_example | aeee424b40f444fe6bbfeab4d57b78d201397701 | [
"Apache-2.0"
] | 1 | 2022-03-08T10:26:55.000Z | 2022-03-08T10:26:55.000Z | test/phoenix_container_example_web/views/error_view_test.exs | wwaldner-amtelco/phoenix_container_example | aeee424b40f444fe6bbfeab4d57b78d201397701 | [
"Apache-2.0"
] | 1 | 2022-02-09T01:25:09.000Z | 2022-02-09T01:25:09.000Z | defmodule PhoenixContainerExampleWeb.ErrorViewTest do
use PhoenixContainerExampleWeb.ConnCase, async: true
# Bring render/3 and render_to_string/3 for testing custom views
import Phoenix.View
test "renders 404.html" do
assert render_to_string(PhoenixContainerExampleWeb.ErrorView, "404.html", []) == "Not Found"
end
test "renders 500.html" do
assert render_to_string(PhoenixContainerExampleWeb.ErrorView, "500.html", []) == "Internal Server Error"
end
end
| 32.066667 | 108 | 0.769231 |
73dd8fc036cd919d50025d1d6573ab41140adeb3 | 2,257 | ex | Elixir | apps/definition_dictionary/lib/dictionary/access.ex | rucker/hindsight | 876a5d344c5d8eebbea37684ee07e0a91e4430f0 | [
"Apache-2.0"
] | null | null | null | apps/definition_dictionary/lib/dictionary/access.ex | rucker/hindsight | 876a5d344c5d8eebbea37684ee07e0a91e4430f0 | [
"Apache-2.0"
] | null | null | null | apps/definition_dictionary/lib/dictionary/access.ex | rucker/hindsight | 876a5d344c5d8eebbea37684ee07e0a91e4430f0 | [
"Apache-2.0"
] | null | null | null | defmodule Dictionary.Access do
@type access_fun :: Access.access_fun(data :: struct | map, get_value :: term())
@type opts :: [
spread: boolean
]
@spec to_access_path(String.t() | [String.t()], opts) :: [access_fun]
def to_access_path(input, opts \\ []) do
input
|> List.wrap()
|> Enum.map(&key(&1, nil, opts))
end
@spec key(term, term, opts) :: access_fun
def key(key, default \\ nil, opts \\ []) do
&access_fun(key, default, &1, &2, &3, opts)
end
defp access_fun(key, default, :get, %module{} = data, next, _opts) do
case module.fetch(data, key) do
{:ok, value} -> next.(value)
:error -> next.(default)
end
end
defp access_fun(key, default, :get, list, next, opts) when is_list(list) do
Enum.map(list, &access_fun(key, default, :get, &1, next, opts))
end
defp access_fun(key, default, :get, data, next, _opts) do
next.(Map.get(data, key, default))
end
defp access_fun(key, _default, :get_and_update, %module{} = data, next, _opts) do
module.get_and_update(data, key, next)
end
defp access_fun(key, default, :get_and_update, list, next, opts) when is_list(list) do
spread? = Keyword.get(opts, :spread, false)
{gets, updates} =
Enum.with_index(list)
|> Enum.map(fn {entry, index} ->
wrapper = fn value ->
with {get_value, update_value} <- next.(value) do
case is_spreadable?(update_value, spread?) do
true ->
{get_value, Enum.at(update_value, index)}
false ->
{get_value, update_value}
end
end
end
access_fun(key, default, :get_and_update, entry, wrapper, opts)
end)
|> Enum.reduce({[], []}, fn {get, update}, {get_acc, update_acc} ->
{[get | get_acc], [update | update_acc]}
end)
{Enum.reverse(gets), Enum.reverse(updates)}
end
defp access_fun(key, default, :get_and_update, data, next, _opts) do
value = Map.get(data, key, default)
case next.(value) do
{get, update} -> {get, Map.put(data, key, update)}
:pop -> {value, Map.delete(data, key)}
end
end
defp is_spreadable?(value, spread?) do
is_list(value) && spread?
end
end
| 28.56962 | 88 | 0.593265 |
73dd9ccbf250ffde6bafdcf40ab3500a13b11eaf | 2,280 | ex | Elixir | clients/admin/lib/google_api/admin/directory_v1/model/calendar_resources.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/admin/lib/google_api/admin/directory_v1/model/calendar_resources.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/admin/lib/google_api/admin/directory_v1/model/calendar_resources.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Admin.Directory_v1.Model.CalendarResources do
@moduledoc """
Public API: Resources.calendars
## Attributes
* `etag` (*type:* `String.t`, *default:* `nil`) - ETag of the resource.
* `items` (*type:* `list(GoogleApi.Admin.Directory_v1.Model.CalendarResource.t)`, *default:* `nil`) - The CalendarResources in this page of results.
* `kind` (*type:* `String.t`, *default:* `admin#directory#resources#calendars#calendarResourcesList`) - Identifies this as a collection of CalendarResources. This is always admin#directory#resources#calendars#calendarResourcesList.
* `nextPageToken` (*type:* `String.t`, *default:* `nil`) - The continuation token, used to page through large result sets. Provide this value in a subsequent request to return the next page of results.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:etag => String.t(),
:items => list(GoogleApi.Admin.Directory_v1.Model.CalendarResource.t()),
:kind => String.t(),
:nextPageToken => String.t()
}
field(:etag)
field(:items, as: GoogleApi.Admin.Directory_v1.Model.CalendarResource, type: :list)
field(:kind)
field(:nextPageToken)
end
defimpl Poison.Decoder, for: GoogleApi.Admin.Directory_v1.Model.CalendarResources do
def decode(value, options) do
GoogleApi.Admin.Directory_v1.Model.CalendarResources.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Admin.Directory_v1.Model.CalendarResources do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 40.714286 | 235 | 0.73114 |
73dda217d8e8316945ac96556b443184c1258f7f | 2,184 | ex | Elixir | lib/bitcoin/secp256k1.ex | anthdm/bitcoin-elixir | 5ca9f8bf4e9b2b38527670e80568a85e0aa612c0 | [
"Apache-2.0"
] | 81 | 2017-04-20T17:42:59.000Z | 2022-02-08T03:49:22.000Z | lib/bitcoin/secp256k1.ex | anthdm/bitcoin-elixir | 5ca9f8bf4e9b2b38527670e80568a85e0aa612c0 | [
"Apache-2.0"
] | 2 | 2019-04-22T04:24:39.000Z | 2019-04-26T07:03:59.000Z | lib/bitcoin/secp256k1.ex | anthdm/bitcoin-elixir | 5ca9f8bf4e9b2b38527670e80568a85e0aa612c0 | [
"Apache-2.0"
] | 22 | 2017-08-16T14:19:44.000Z | 2021-12-22T04:36:57.000Z | defmodule Bitcoin.Secp256k1 do
@moduledoc """
ECDSA Secp256k1 curve operations.
By default erlang's :crypto.verify is used to make it less problematic when using
as a library (no need for gcc when you just want to parse something).
However, if :libsecp256k1 NIF is available, it's used. To enable it just uncomment
appropriate line in mix.exs deps.
libsecp256k1: https://github.com/bitcoin-core/secp256k1
If gcc and git dependencies are not a problem, use NIF. It's much faster and it's
the proper way to do it consensus-wise. Do note that even though it's unlikely, an error
in the NIF or libsecp256k1 will bring the whole erlang VM down (not just the process)
"""
alias Bitcoin.DERSig
require Logger
@using_nif Code.ensure_loaded? :libsecp256k1
@doc """
Verify signed message.
Returns `true` if the signature matches public key.
`msg` should already be a 32 byte long digest.
"""
@spec verify(binary, binary, binary) :: boolean
def verify(msg, sig, pk) do
do_verify(msg, DERSig.normalize(sig), pk)
end
@doc """
Secp256k1 parameters.
http://www.secg.org/sec2-v2.pdf
"""
@spec params :: map
def params do
%{
p: 0xFFFFFFFF_FFFFFFFF_FFFFFFFF_FFFFFFFF_FFFFFFFF_FFFFFFFF_FFFFFFFE_FFFFFC2F,
a: 0x00000000_00000000_00000000_00000000_00000000_00000000_00000000_00000000,
b: 0x00000000_00000000_00000000_00000000_00000000_00000000_00000000_00000007,
G: 0x04_79BE667E_F9DCBBAC_55A06295_CE870B07_029BFCDB_2DCE28D9_59F2815B_16F81798_483ADA77_26A3C465_5DA4FBFC_0E1108A8_FD17B448_A6855419_9C47D08F_FB10D4B8,
n: 0xFFFFFFFF_FFFFFFFF_FFFFFFFF_FFFFFFFE_BAAEDCE6_AF48A03B_BFD25E8C_D0364141,
h: 0x01
}
end
if @using_nif do
Logger.info "Using libsecp256k1 NIF for ECDSA operations."
@spec do_verify(binary, binary, binary) :: boolean
defp do_verify(msg, sig, pk), do: :libsecp256k1.ecdsa_verify(msg, sig, pk) == :ok
else
Logger.info "Using erlang implementation for ECDSA operations."
@spec do_verify(binary, binary, binary) :: boolean
defp do_verify(msg, sig, pk), do: :crypto.verify(:ecdsa, :sha256, {:digest, msg}, sig, [pk, :secp256k1])
end
end
| 31.652174 | 158 | 0.744048 |
73ddcc6d126f0c0237ac98ecdbbb6896ebaba28b | 455 | exs | Elixir | test/support/bart_mock.exs | cdale77/bart_scrape | 8696b303f1111a29cfdbea80f15823da3ea5747d | [
"MIT"
] | null | null | null | test/support/bart_mock.exs | cdale77/bart_scrape | 8696b303f1111a29cfdbea80f15823da3ea5747d | [
"MIT"
] | null | null | null | test/support/bart_mock.exs | cdale77/bart_scrape | 8696b303f1111a29cfdbea80f15823da3ea5747d | [
"MIT"
] | null | null | null | defmodule HTTPoison.Response do
defstruct body: nil, headers: nil, status_code: nil
end
defmodule BartScrape.BartMock do
def fetch_data do
{ :ok, successful_response }
end
defp successful_response do
%HTTPoison.Response{
body: "{\"access_token\":\"a_valid_access_token\",\"token_type\":\"Bearer\",
\"expires_in\":3600,\"a_valid_refresh_token\":\"refresh_token\"}",
headers: [],
status_code: 200
}
end
end
| 23.947368 | 82 | 0.676923 |
73dde2c2aeb852f2cffc234cbae34a3e85c48e88 | 3,683 | exs | Elixir | test/phxcrd_web/live/authority_live_test.exs | spapas/phxcrd | 84877896f56400b0cc8624fe96c4fe4f5fd8053c | [
"MIT"
] | 18 | 2019-06-21T09:55:46.000Z | 2022-02-16T18:44:17.000Z | test/phxcrd_web/live/authority_live_test.exs | spapas/phxcrd | 84877896f56400b0cc8624fe96c4fe4f5fd8053c | [
"MIT"
] | 16 | 2019-05-21T20:19:44.000Z | 2020-05-12T08:30:42.000Z | test/phxcrd_web/live/authority_live_test.exs | spapas/phxcrd | 84877896f56400b0cc8624fe96c4fe4f5fd8053c | [
"MIT"
] | 2 | 2019-09-05T00:35:27.000Z | 2020-10-17T16:36:35.000Z | defmodule PhxcrdWeb.AuthorityLiveTest do
use PhxcrdWeb.ConnCase
import Phoenix.LiveViewTest
alias Phxcrd.Auth
@create_attrs %{}
@update_attrs %{}
@invalid_attrs %{}
defp fixture(:authority) do
{:ok, authority} = Auth.create_authority(@create_attrs)
authority
end
defp create_authority(_) do
authority = fixture(:authority)
%{authority: authority}
end
describe "Index" do
setup [:create_authority]
test "lists all authorities", %{conn: conn, authority: authority} do
{:ok, _index_live, html} = live(conn, AdminRoutes.authority_index_path(conn, :index))
assert html =~ "Listing Authorities"
end
test "saves new authority", %{conn: conn} do
{:ok, index_live, _html} = live(conn, AdminRoutes.authority_index_path(conn, :index))
assert index_live |> element("a", "New Authority") |> render_click() =~
"New Authority"
assert_patch(index_live, AdminRoutes.authority_index_path(conn, :new))
assert index_live
|> form("#authority-form", authority: @invalid_attrs)
|> render_change() =~ "can't be blank"
{:ok, _, html} =
index_live
|> form("#authority-form", authority: @create_attrs)
|> render_submit()
|> follow_redirect(conn, AdminRoutes.authority_index_path(conn, :index))
assert html =~ "Authority created successfully"
end
test "updates authority in listing", %{conn: conn, authority: authority} do
{:ok, index_live, _html} = live(conn, AdminRoutes.authority_index_path(conn, :index))
assert index_live |> element("#authority-#{authority.id} a", "Edit") |> render_click() =~
"Edit Authority"
assert_patch(index_live, AdminRoutes.authority_index_path(conn, :edit, authority))
assert index_live
|> form("#authority-form", authority: @invalid_attrs)
|> render_change() =~ "can't be blank"
{:ok, _, html} =
index_live
|> form("#authority-form", authority: @update_attrs)
|> render_submit()
|> follow_redirect(conn, AdminRoutes.authority_index_path(conn, :index))
assert html =~ "Authority updated successfully"
end
test "deletes authority in listing", %{conn: conn, authority: authority} do
{:ok, index_live, _html} = live(conn, AdminRoutes.authority_index_path(conn, :index))
assert index_live |> element("#authority-#{authority.id} a", "Delete") |> render_click()
refute has_element?(index_live, "#authority-#{authority.id}")
end
end
describe "Show" do
setup [:create_authority]
test "displays authority", %{conn: conn, authority: authority} do
{:ok, _show_live, html} =
live(conn, AdminRoutes.authority_show_path(conn, :show, authority))
assert html =~ "Show Authority"
end
test "updates authority within modal", %{conn: conn, authority: authority} do
{:ok, show_live, _html} =
live(conn, AdminRoutes.authority_show_path(conn, :show, authority))
assert show_live |> element("a", "Edit") |> render_click() =~
"Edit Authority"
assert_patch(show_live, AdminRoutes.authority_show_path(conn, :edit, authority))
assert show_live
|> form("#authority-form", authority: @invalid_attrs)
|> render_change() =~ "can't be blank"
{:ok, _, html} =
show_live
|> form("#authority-form", authority: @update_attrs)
|> render_submit()
|> follow_redirect(conn, AdminRoutes.authority_show_path(conn, :show, authority))
assert html =~ "Authority updated successfully"
end
end
end
| 32.307018 | 95 | 0.645126 |
73de10ce7725ef61863657f8a3d763977bcd6bc7 | 1,835 | ex | Elixir | lib/militerm/systems/groups.ex | jgsmith/militerm | c4252d0a93f5620b90750ac2b61baf282e9ef7eb | [
"Apache-2.0"
] | 6 | 2017-06-16T10:26:35.000Z | 2021-04-07T15:01:00.000Z | lib/militerm/systems/groups.ex | jgsmith/militerm | c4252d0a93f5620b90750ac2b61baf282e9ef7eb | [
"Apache-2.0"
] | 2 | 2020-04-14T02:17:46.000Z | 2021-03-10T11:09:05.000Z | lib/militerm/systems/groups.ex | jgsmith/militerm | c4252d0a93f5620b90750ac2b61baf282e9ef7eb | [
"Apache-2.0"
] | null | null | null | defmodule Militerm.Systems.Groups do
@moduledoc """
The Groups system provides a way to manage privilege escalation and
use.
Escalated group memberships are ephemeral and disappear when the
character leaves the game.
"""
use Militerm.ECS.System
alias Militerm.Components.EphemeralGroup
alias Militerm.English
alias Militerm.Systems.Entity
defcommand su(bits), for: %{"this" => {:thing, entity_id} = this} = args do
# the user has to have been granted the group membership in order to
# add it to their session
case String.split(bits, ~r{\s+}, trim: true) do
[] ->
# list out the current groups turned on
list =
entity_id
|> EphemeralGroup.get_groups()
Entity.receive_message(
this,
"cmd",
"You have #{English.consolidate(Enum.count(list), "group")} active: #{
English.item_list(list)
}"
)
groups ->
candidates = groups -- EphemeralGroup.get_groups(entity_id)
list =
candidates
|> Enum.filter(&EphemeralGroup.set_value(entity_id, [&1], true))
Entity.receive_message(
this,
"cmd",
"Activated #{English.consolidate(Enum.count(list), "group")}: #{English.item_list(list)}"
)
end
end
defcommand unsu(bits), for: %{"this" => {:thing, entity_id} = this} = args do
list =
case String.split(bits, ~r{\s+}, trim: true) do
[] -> EphemeralGroup.get_groups(entity_id) -- ["players"]
groups -> groups -- ["players"]
end
for group <- list, do: EphemeralGroup.set_value(entity_id, [group], false)
Entity.receive_message(
this,
"cmd",
"Deactivated #{English.consolidate(Enum.count(list), "group")}: #{English.item_list(list)}"
)
end
end
| 28.230769 | 99 | 0.608174 |
73de16c1699a5e25627ab94ee199089cfad47565 | 2,625 | ex | Elixir | lib/protobuf/protoc/generator.ex | zolakeith/protobuf | 2d412b260c48be8f90e05408f8569cef2f6d3ace | [
"MIT"
] | 209 | 2020-11-19T11:17:27.000Z | 2022-03-20T12:47:31.000Z | lib/protobuf/protoc/generator.ex | zolakeith/protobuf | 2d412b260c48be8f90e05408f8569cef2f6d3ace | [
"MIT"
] | 101 | 2020-11-22T20:20:11.000Z | 2022-03-06T16:09:26.000Z | lib/protobuf/protoc/generator.ex | zolakeith/protobuf | 2d412b260c48be8f90e05408f8569cef2f6d3ace | [
"MIT"
] | 35 | 2020-11-26T14:45:13.000Z | 2022-03-22T23:22:00.000Z | defmodule Protobuf.Protoc.Generator do
@moduledoc false
alias Protobuf.Protoc.Context
alias Protobuf.Protoc.Generator
@spec generate(Context.t(), %Google.Protobuf.FileDescriptorProto{}) ::
[Google.Protobuf.Compiler.CodeGeneratorResponse.File.t()]
def generate(%Context{} = ctx, %Google.Protobuf.FileDescriptorProto{} = desc) do
module_definitions =
ctx
|> generate_module_definitions(desc)
|> Enum.reject(&is_nil/1)
if ctx.one_file_per_module? do
Enum.map(module_definitions, fn {mod_name, content} ->
file_name = Macro.underscore(mod_name) <> ".pb.ex"
Google.Protobuf.Compiler.CodeGeneratorResponse.File.new(
name: file_name,
content: content
)
end)
else
# desc.name is the filename, ending in ".proto".
file_name = Path.rootname(desc.name) <> ".pb.ex"
content =
module_definitions
|> Enum.map(fn {_mod_name, contents} -> [contents, ?\n] end)
|> IO.iodata_to_binary()
[
Google.Protobuf.Compiler.CodeGeneratorResponse.File.new(
name: file_name,
content: content
)
]
end
end
defp generate_module_definitions(ctx, %Google.Protobuf.FileDescriptorProto{} = desc) do
ctx =
%Context{
ctx
| syntax: syntax(desc.syntax),
package: desc.package,
dep_type_mapping: get_dep_type_mapping(ctx, desc.dependency, desc.name)
}
|> Protobuf.Protoc.Context.custom_file_options_from_file_desc(desc)
nested_extensions = Generator.Extension.get_nested_extensions(ctx, desc.message_type)
enum_defmodules = Enum.map(desc.enum_type, &Generator.Enum.generate(ctx, &1))
{nested_enum_defmodules, message_defmodules} =
Generator.Message.generate_list(ctx, desc.message_type)
extension_defmodules = Generator.Extension.generate(ctx, desc, nested_extensions)
service_defmodules =
if "grpc" in ctx.plugins do
Enum.map(desc.service, &Generator.Service.generate(ctx, &1))
else
[]
end
List.flatten([
enum_defmodules,
nested_enum_defmodules,
message_defmodules,
service_defmodules,
extension_defmodules
])
end
defp get_dep_type_mapping(%Context{global_type_mapping: global_mapping}, deps, file_name) do
mapping =
Enum.reduce(deps, %{}, fn dep, acc ->
Map.merge(acc, global_mapping[dep])
end)
Map.merge(mapping, global_mapping[file_name])
end
defp syntax("proto3"), do: :proto3
defp syntax("proto2"), do: :proto2
defp syntax(nil), do: :proto2
end
| 29.166667 | 94 | 0.662857 |
73de1ce651f962eb07062e051163e4405b56e65c | 850 | ex | Elixir | apps/omg_child_chain_rpc/lib/omg_child_chain_rpc/web/views/alarm.ex | karmonezz/elixir-omg | 3b26fc072fa553992277e1b9c4bad37b3d61ec6a | [
"Apache-2.0"
] | 1 | 2020-05-01T12:30:09.000Z | 2020-05-01T12:30:09.000Z | apps/omg_child_chain_rpc/lib/omg_child_chain_rpc/web/views/alarm.ex | karmonezz/elixir-omg | 3b26fc072fa553992277e1b9c4bad37b3d61ec6a | [
"Apache-2.0"
] | null | null | null | apps/omg_child_chain_rpc/lib/omg_child_chain_rpc/web/views/alarm.ex | karmonezz/elixir-omg | 3b26fc072fa553992277e1b9c4bad37b3d61ec6a | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 OmiseGO Pte Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
defmodule OMG.ChildChainRPC.Web.View.Alarm do
@moduledoc """
The status view for rendering json
"""
use OMG.ChildChainRPC.Web, :view
alias OMG.Utils.HttpRPC.Response
def render("alarm.json", %{response: alarms}) do
Response.serialize(alarms)
end
end
| 31.481481 | 74 | 0.748235 |
73de296f0310425b5e5653c0c1bc58f3cef8edf0 | 1,771 | ex | Elixir | lib/dnsapp_web/controllers/record_controller.ex | tashavanes/dnsapp | 92d1069e3ef06df6a990047297d87e91a782a75b | [
"MIT"
] | null | null | null | lib/dnsapp_web/controllers/record_controller.ex | tashavanes/dnsapp | 92d1069e3ef06df6a990047297d87e91a782a75b | [
"MIT"
] | null | null | null | lib/dnsapp_web/controllers/record_controller.ex | tashavanes/dnsapp | 92d1069e3ef06df6a990047297d87e91a782a75b | [
"MIT"
] | null | null | null | defmodule DnsappWeb.RecordController do
use DnsappWeb, :controller
alias Dnsapp.Entries
alias Dnsapp.Entries.Record
def index(conn, _params) do
records = Entries.list_records()
render(conn, "index.html", records: records)
end
def new(conn, _params) do
changeset = Entries.change_record(%Record{})
render(conn, "new.html", changeset: changeset)
end
def create(conn, %{"record" => record_params}) do
case Entries.create_record(record_params) do
{:ok, record} ->
conn
|> put_flash(:info, "Record created successfully.")
|> redirect(to: record_path(conn, :show, record))
{:error, %Ecto.Changeset{} = changeset} ->
render(conn, "new.html", changeset: changeset)
end
end
def show(conn, %{"id" => id}) do
record = Entries.get_record!(id)
render(conn, "show.html", record: record)
end
def edit(conn, %{"id" => id}) do
record = Entries.get_record!(id)
changeset = Entries.change_record(record)
render(conn, "edit.html", record: record, changeset: changeset)
end
def update(conn, %{"id" => id, "record" => record_params}) do
record = Entries.get_record!(id)
case Entries.update_record(record, record_params) do
{:ok, record} ->
conn
|> put_flash(:info, "Record updated successfully.")
|> redirect(to: record_path(conn, :show, record))
{:error, %Ecto.Changeset{} = changeset} ->
render(conn, "edit.html", record: record, changeset: changeset)
end
end
def delete(conn, %{"id" => id}) do
record = Entries.get_record!(id)
{:ok, _record} = Entries.delete_record(record)
conn
|> put_flash(:info, "Record deleted successfully.")
|> redirect(to: record_path(conn, :index))
end
end
| 29.032787 | 71 | 0.640881 |
73de52ef23ddeadd9daf9e842b78cdbfb3dc0105 | 479 | ex | Elixir | lib/open_telemetry/observer.ex | ostera/opentelemetry-erlang-api | c03c64e4f3127365942077b592c9350b15206862 | [
"Apache-2.0"
] | null | null | null | lib/open_telemetry/observer.ex | ostera/opentelemetry-erlang-api | c03c64e4f3127365942077b592c9350b15206862 | [
"Apache-2.0"
] | null | null | null | lib/open_telemetry/observer.ex | ostera/opentelemetry-erlang-api | c03c64e4f3127365942077b592c9350b15206862 | [
"Apache-2.0"
] | null | null | null | defmodule OpenTelemetry.Observer do
@moduledoc """
require OpenTelemetry.Observer
OpenTelemetry.Observer.set_callback(\"some.counter\", fn o -> OpenTelemetry.Observer.observe(o, 33, []))
"""
defmacro set_callback(observer, callback) do
quote do
:ot_meter.set_observer_callback(:opentelemetry.get_meter(__MODULE__), unquote(observer), unquote(callback))
end
end
defdelegate observe(observer_result, number, label_set), to: :ot_observer
end
| 28.176471 | 113 | 0.736952 |
73de692e858fb63304f3393d6cf8e3376b04104d | 67 | ex | Elixir | lib/schema/error.ex | fiodorbaczynski/trivial_csv | 5d80a260f385bf2cbbd296a5ca359c9865510317 | [
"Apache-2.0"
] | 20 | 2019-08-02T08:25:51.000Z | 2021-05-18T21:28:47.000Z | lib/schema/error.ex | fiodorbaczynski/trivial_csv | 5d80a260f385bf2cbbd296a5ca359c9865510317 | [
"Apache-2.0"
] | null | null | null | lib/schema/error.ex | fiodorbaczynski/trivial_csv | 5d80a260f385bf2cbbd296a5ca359c9865510317 | [
"Apache-2.0"
] | null | null | null | defmodule DataQuacker.SchemaError do
defexception [:message]
end
| 16.75 | 36 | 0.820896 |
73de82610c6a32682fc42a80112835f8f01b0ee1 | 74 | exs | Elixir | test/views/layout_view_test.exs | m4ta1l/bors-ng | a4d04fd740e24d03833cd46a76f0f9e5be96f818 | [
"Apache-2.0"
] | 1,223 | 2017-01-11T19:25:08.000Z | 2022-03-21T12:31:28.000Z | test/views/layout_view_test.exs | seanpm2001/bors-ng | 2482df1c2f4680b33534620571f04075b6b4390a | [
"Apache-2.0"
] | 1,428 | 2017-01-09T16:36:18.000Z | 2022-03-28T14:19:40.000Z | test/views/layout_view_test.exs | seanpm2001/bors-ng | 2482df1c2f4680b33534620571f04075b6b4390a | [
"Apache-2.0"
] | 184 | 2017-01-10T18:08:06.000Z | 2022-03-14T18:53:24.000Z | defmodule BorsNG.LayoutViewTest do
use BorsNG.ConnCase, async: true
end
| 18.5 | 34 | 0.810811 |
73ded7fa2239472789ded080af7f8b98fd410005 | 906 | ex | Elixir | clients/big_query_data_transfer/lib/google_api/big_query_data_transfer/v1/metadata.ex | kyleVsteger/elixir-google-api | 3a0dd498af066a4361b5b0fd66ffc04a57539488 | [
"Apache-2.0"
] | null | null | null | clients/big_query_data_transfer/lib/google_api/big_query_data_transfer/v1/metadata.ex | kyleVsteger/elixir-google-api | 3a0dd498af066a4361b5b0fd66ffc04a57539488 | [
"Apache-2.0"
] | null | null | null | clients/big_query_data_transfer/lib/google_api/big_query_data_transfer/v1/metadata.ex | kyleVsteger/elixir-google-api | 3a0dd498af066a4361b5b0fd66ffc04a57539488 | [
"Apache-2.0"
] | null | null | null | # Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.BigQueryDataTransfer.V1 do
@moduledoc """
API client metadata for GoogleApi.BigQueryDataTransfer.V1.
"""
@discovery_revision "20210418"
def discovery_revision(), do: @discovery_revision
end
| 33.555556 | 74 | 0.764901 |
73dee0cfb779fbabfe4239bad924dd41f91c2826 | 374 | ex | Elixir | lib/hl7/2.4/datatypes/xtn.ex | calvinb/elixir-hl7 | 5e953fa11f9184857c0ec4dda8662889f35a6bec | [
"Apache-2.0"
] | null | null | null | lib/hl7/2.4/datatypes/xtn.ex | calvinb/elixir-hl7 | 5e953fa11f9184857c0ec4dda8662889f35a6bec | [
"Apache-2.0"
] | null | null | null | lib/hl7/2.4/datatypes/xtn.ex | calvinb/elixir-hl7 | 5e953fa11f9184857c0ec4dda8662889f35a6bec | [
"Apache-2.0"
] | null | null | null | defmodule HL7.V2_4.DataTypes.Xtn do
@moduledoc false
use HL7.DataType,
fields: [
"999_9999999_x99999c_any_text": nil,
telecommunication_use_code: nil,
telecommunication_equipment_type_id: nil,
email_address: nil,
country_code: nil,
areacity_code: nil,
phone_number: nil,
extension: nil,
any_text: nil
]
end
| 22 | 47 | 0.668449 |
73deffe386fcc175913847009be611cfdfef43c9 | 2,988 | ex | Elixir | lib/mix/tasks/gen.api.ex | theblitzapp/phoenix_config | 984223f874405f4980babb9d377c3b709f10cd3b | [
"MIT"
] | null | null | null | lib/mix/tasks/gen.api.ex | theblitzapp/phoenix_config | 984223f874405f4980babb9d377c3b709f10cd3b | [
"MIT"
] | null | null | null | lib/mix/tasks/gen.api.ex | theblitzapp/phoenix_config | 984223f874405f4980babb9d377c3b709f10cd3b | [
"MIT"
] | null | null | null | defmodule Mix.Tasks.PhoenixConfig.Gen.Api do
use Mix.Task
alias Mix.PhoenixConfigHelpers
alias PhoenixConfig.{AbsintheTypeMerge, AbsintheSchemaBuilder}
@shortdoc "Utilizes all the config files and generates a GraphQL API"
@moduledoc """
Once you have a few resource config files created by
using the `mix phoenix_config.gen.resource` command, you can use
this command to generate all the api files for Absinthe
"""
def run(args) do
PhoenixConfigHelpers.ensure_not_in_umbrella!("phoenix_config.gen.api")
{opts, _extra_args, _} = OptionParser.parse(args,
switches: [
dirname: :string,
file_name: :string,
force: :boolean,
quiet: :boolean
]
)
a = opts[:dirname]
|> PhoenixConfigHelpers.get_phoenix_config_file_path(opts[:file_name])
|> eval_config_file
|> expand_crud_types
|> pre_merge_types
|> AbsintheSchemaBuilder.generate
|> run_config_functions
|> AbsintheTypeMerge.maybe_merge_types
require IEx
IEx.pry
a
|> generate_templates
|> write_generated_templates(Keyword.take(opts, [:force, :quiet]))
end
defp pre_merge_types(generation_items) do
{functions, generation_structs} = Enum.split_with(generation_items, &is_function/1)
AbsintheTypeMerge.maybe_merge_types(generation_structs) ++ functions
end
defp expand_crud_types(generation_items) do
Enum.flat_map(generation_items, fn
%AbsintheGenerator.CrudResource{} = generation_item ->
generation_item |> AbsintheGenerator.CrudResource.run |> Enum.map(&elem(&1, 0))
generation_item -> [generation_item]
end)
end
defp eval_config_file(file_path) do
{resources, _} = Code.eval_file(file_path)
List.flatten(resources)
end
defp run_config_functions(generation_items) do
{config_functions, generation_structs} = Enum.split_with(generation_items, &is_function/1)
Enum.reduce(config_functions, generation_structs, fn func, items_acc ->
func.(items_acc)
end)
end
defp generate_templates(generation_structs) do
generation_structs
|> Enum.reduce([], fn
(generation_item, acc) ->
case AbsintheGenerator.run(generation_item) do
[str | _] = template when is_binary(str) ->
[{generation_item, template} | acc]
generation_item_children -> generation_item_children ++ acc
end
end)
|> Enum.reverse
end
defp write_generated_templates(generation_items, opts) do
Enum.map(generation_items, fn
{_generation_struct, [multi_templates | _] = struct_template_tuples} when is_tuple(multi_templates) ->
Enum.map(struct_template_tuples, fn {generation_struct_item, template} ->
AbsintheGenerator.FileWriter.write(generation_struct_item, template, opts)
end)
{generation_struct, template} ->
AbsintheGenerator.FileWriter.write(generation_struct, template, opts)
end)
end
end
| 31.125 | 108 | 0.703481 |
73df143fa4aec3e68a87110edf30e6482b360365 | 4,291 | ex | Elixir | clients/artifact_registry/lib/google_api/artifact_registry/v1/model/binding.ex | MMore/elixir-google-api | 0574ec1439d9bbfe22d63965be1681b0f45a94c9 | [
"Apache-2.0"
] | null | null | null | clients/artifact_registry/lib/google_api/artifact_registry/v1/model/binding.ex | MMore/elixir-google-api | 0574ec1439d9bbfe22d63965be1681b0f45a94c9 | [
"Apache-2.0"
] | null | null | null | clients/artifact_registry/lib/google_api/artifact_registry/v1/model/binding.ex | MMore/elixir-google-api | 0574ec1439d9bbfe22d63965be1681b0f45a94c9 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.ArtifactRegistry.V1.Model.Binding do
@moduledoc """
Associates `members`, or principals, with a `role`.
## Attributes
* `condition` (*type:* `GoogleApi.ArtifactRegistry.V1.Model.Expr.t`, *default:* `nil`) - The condition that is associated with this binding. If the condition evaluates to `true`, then this binding applies to the current request. If the condition evaluates to `false`, then this binding does not apply to the current request. However, a different role binding might grant the same role to one or more of the principals in this binding. To learn which resources support conditions in their IAM policies, see the [IAM documentation](https://cloud.google.com/iam/help/conditions/resource-policies).
* `members` (*type:* `list(String.t)`, *default:* `nil`) - Specifies the principals requesting access for a Cloud Platform resource. `members` can have the following values: * `allUsers`: A special identifier that represents anyone who is on the internet; with or without a Google account. * `allAuthenticatedUsers`: A special identifier that represents anyone who is authenticated with a Google account or a service account. * `user:{emailid}`: An email address that represents a specific Google account. For example, `[email protected]` . * `serviceAccount:{emailid}`: An email address that represents a service account. For example, `[email protected]`. * `group:{emailid}`: An email address that represents a Google group. For example, `[email protected]`. * `deleted:user:{emailid}?uid={uniqueid}`: An email address (plus unique identifier) representing a user that has been recently deleted. For example, `[email protected]?uid=123456789012345678901`. If the user is recovered, this value reverts to `user:{emailid}` and the recovered user retains the role in the binding. * `deleted:serviceAccount:{emailid}?uid={uniqueid}`: An email address (plus unique identifier) representing a service account that has been recently deleted. For example, `[email protected]?uid=123456789012345678901`. If the service account is undeleted, this value reverts to `serviceAccount:{emailid}` and the undeleted service account retains the role in the binding. * `deleted:group:{emailid}?uid={uniqueid}`: An email address (plus unique identifier) representing a Google group that has been recently deleted. For example, `[email protected]?uid=123456789012345678901`. If the group is recovered, this value reverts to `group:{emailid}` and the recovered group retains the role in the binding. * `domain:{domain}`: The G Suite domain (primary) that represents all the users of that domain. For example, `google.com` or `example.com`.
* `role` (*type:* `String.t`, *default:* `nil`) - Role that is assigned to the list of `members`, or principals. For example, `roles/viewer`, `roles/editor`, or `roles/owner`.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:condition => GoogleApi.ArtifactRegistry.V1.Model.Expr.t() | nil,
:members => list(String.t()) | nil,
:role => String.t() | nil
}
field(:condition, as: GoogleApi.ArtifactRegistry.V1.Model.Expr)
field(:members, type: :list)
field(:role)
end
defimpl Poison.Decoder, for: GoogleApi.ArtifactRegistry.V1.Model.Binding do
def decode(value, options) do
GoogleApi.ArtifactRegistry.V1.Model.Binding.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.ArtifactRegistry.V1.Model.Binding do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 80.962264 | 1,972 | 0.753904 |
73df1f219169d4f561e128c8f39a639b75556a89 | 998 | ex | Elixir | associations_lister.ex | JakTomczak/AssociationsLister | 0556f03f48e77f6eaf029dbce3f5a4909240ad23 | [
"Unlicense"
] | null | null | null | associations_lister.ex | JakTomczak/AssociationsLister | 0556f03f48e77f6eaf029dbce3f5a4909240ad23 | [
"Unlicense"
] | null | null | null | associations_lister.ex | JakTomczak/AssociationsLister | 0556f03f48e77f6eaf029dbce3f5a4909240ad23 | [
"Unlicense"
] | null | null | null | defmodule AssociationsLister do
@spec run(Ecto.Repo.t()) :: :ok
def run(repo) do
file_name = "associations.csv"
file = File.open!(file_name, [:write, :utf8])
[~w(From To Association_name)] # columns names
|> CSV.encode()
|> Enum.each(&IO.write(file, &1))
query = """
SELECT
(tc.table_name, ccu.table_name, tc.constraint_name)
FROM
information_schema.table_constraints AS tc
JOIN information_schema.key_column_usage AS kcu ON
tc.constraint_name = kcu.constraint_name
JOIN information_schema.constraint_column_usage AS ccu ON
ccu.constraint_name = tc.constraint_name
WHERE constraint_type = 'FOREIGN KEY'
"""
{:ok, results} = Ecto.Adapters.SQL.query(repo, query, [])
results.rows
|> Enum.map(fn [{from, to, name}] ->
name = String.slice(name, (String.length(from)+1)..-9)
[from, to, name]
end)
|> CSV.encode()
|> Enum.each(&IO.write(file, &1))
end
end
| 29.352941 | 63 | 0.624248 |
73df34d23d226fb5de01ba251ed6e77da34702e6 | 689 | exs | Elixir | portut/mix.exs | Redorb/ElixirTestbed | 7673452d21bba2126ebd833dd296f487cf0ae4c7 | [
"MIT"
] | null | null | null | portut/mix.exs | Redorb/ElixirTestbed | 7673452d21bba2126ebd833dd296f487cf0ae4c7 | [
"MIT"
] | null | null | null | portut/mix.exs | Redorb/ElixirTestbed | 7673452d21bba2126ebd833dd296f487cf0ae4c7 | [
"MIT"
] | null | null | null | defmodule Portut.Mixfile do
use Mix.Project
def project do
[app: :portut,
version: "0.0.1",
elixir: "~> 1.2",
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
deps: deps]
end
# Configuration for the OTP application
#
# Type "mix help compile.app" for more information
def application do
[applications: [:logger],
mod: {Portut, []}]
end
# Dependencies can be Hex packages:
#
# {:mydep, "~> 0.3.0"}
#
# Or git/path repositories:
#
# {:mydep, git: "https://github.com/elixir-lang/mydep.git", tag: "0.1.0"}
#
# Type "mix help deps" for more examples and options
defp deps do
[]
end
end
| 20.264706 | 77 | 0.599419 |
73df38567b984e4a908d497dac642acc12f5ac65 | 328 | ex | Elixir | lib/events_tools_web/accounts/abilities.ex | community-tools/community-tools | 40b0e6cc9234b44593d2ab60bb2303d7224deb30 | [
"Apache-2.0"
] | 2 | 2017-10-06T01:14:35.000Z | 2017-11-18T16:44:44.000Z | lib/events_tools_web/accounts/abilities.ex | community-tools/community-tools | 40b0e6cc9234b44593d2ab60bb2303d7224deb30 | [
"Apache-2.0"
] | 6 | 2017-10-06T00:04:59.000Z | 2017-10-06T00:09:27.000Z | lib/events_tools_web/accounts/abilities.ex | apps-team/community-tools | 40b0e6cc9234b44593d2ab60bb2303d7224deb30 | [
"Apache-2.0"
] | 1 | 2017-10-06T01:17:35.000Z | 2017-10-06T01:17:35.000Z | defmodule CommunityTools.Abilities do
# alias CommunityTools.Accounts.User
#alias CommunityTools.Posts.Post
# defimpl Canada.Can, for: User do
# def can?(%User{ id: user_id }, action, %Post{ user_id: user_id })
# when action in [:show], do: true
# def can?(%User{ id: user_id }, _, _), do: false
# end
end
| 20.5 | 70 | 0.661585 |
73df38bd78246465046674706a2e2a81540063d0 | 7,235 | ex | Elixir | clients/android_enterprise/lib/google_api/android_enterprise/v1/api/grouplicenses.ex | kolorahl/elixir-google-api | 46bec1e092eb84c6a79d06c72016cb1a13777fa6 | [
"Apache-2.0"
] | null | null | null | clients/android_enterprise/lib/google_api/android_enterprise/v1/api/grouplicenses.ex | kolorahl/elixir-google-api | 46bec1e092eb84c6a79d06c72016cb1a13777fa6 | [
"Apache-2.0"
] | null | null | null | clients/android_enterprise/lib/google_api/android_enterprise/v1/api/grouplicenses.ex | kolorahl/elixir-google-api | 46bec1e092eb84c6a79d06c72016cb1a13777fa6 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.AndroidEnterprise.V1.Api.Grouplicenses do
@moduledoc """
API calls for all endpoints tagged `Grouplicenses`.
"""
alias GoogleApi.AndroidEnterprise.V1.Connection
alias GoogleApi.Gax.{Request, Response}
@library_version Mix.Project.config() |> Keyword.get(:version, "")
@doc """
Retrieves details of an enterprise's group license for a product.
## Parameters
* `connection` (*type:* `GoogleApi.AndroidEnterprise.V1.Connection.t`) - Connection to server
* `enterprise_id` (*type:* `String.t`) - The ID of the enterprise.
* `group_license_id` (*type:* `String.t`) - The ID of the product the group license is for, e.g.
"app:com.google.android.gm".
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.AndroidEnterprise.V1.Model.GroupLicense{}}` on success
* `{:error, info}` on failure
"""
@spec androidenterprise_grouplicenses_get(
Tesla.Env.client(),
String.t(),
String.t(),
keyword(),
keyword()
) ::
{:ok, GoogleApi.AndroidEnterprise.V1.Model.GroupLicense.t()}
| {:ok, Tesla.Env.t()}
| {:error, any()}
def androidenterprise_grouplicenses_get(
connection,
enterprise_id,
group_license_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url(
"/androidenterprise/v1/enterprises/{enterpriseId}/groupLicenses/{groupLicenseId}",
%{
"enterpriseId" => URI.encode(enterprise_id, &URI.char_unreserved?/1),
"groupLicenseId" => URI.encode(group_license_id, &URI.char_unreserved?/1)
}
)
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(opts ++ [struct: %GoogleApi.AndroidEnterprise.V1.Model.GroupLicense{}])
end
@doc """
Retrieves IDs of all products for which the enterprise has a group license.
## Parameters
* `connection` (*type:* `GoogleApi.AndroidEnterprise.V1.Connection.t`) - Connection to server
* `enterprise_id` (*type:* `String.t`) - The ID of the enterprise.
* `optional_params` (*type:* `keyword()`) - Optional parameters
* `:"$.xgafv"` (*type:* `String.t`) - V1 error format.
* `:access_token` (*type:* `String.t`) - OAuth access token.
* `:alt` (*type:* `String.t`) - Data format for response.
* `:callback` (*type:* `String.t`) - JSONP
* `:fields` (*type:* `String.t`) - Selector specifying which fields to include in a partial response.
* `:key` (*type:* `String.t`) - API key. Your API key identifies your project and provides you with API access, quota, and reports. Required unless you provide an OAuth 2.0 token.
* `:oauth_token` (*type:* `String.t`) - OAuth 2.0 token for the current user.
* `:prettyPrint` (*type:* `boolean()`) - Returns response with indentations and line breaks.
* `:quotaUser` (*type:* `String.t`) - Available to use for quota purposes for server-side applications. Can be any arbitrary string assigned to a user, but should not exceed 40 characters.
* `:uploadType` (*type:* `String.t`) - Legacy upload protocol for media (e.g. "media", "multipart").
* `:upload_protocol` (*type:* `String.t`) - Upload protocol for media (e.g. "raw", "multipart").
* `opts` (*type:* `keyword()`) - Call options
## Returns
* `{:ok, %GoogleApi.AndroidEnterprise.V1.Model.GroupLicensesListResponse{}}` on success
* `{:error, info}` on failure
"""
@spec androidenterprise_grouplicenses_list(Tesla.Env.client(), String.t(), keyword(), keyword()) ::
{:ok, GoogleApi.AndroidEnterprise.V1.Model.GroupLicensesListResponse.t()}
| {:ok, Tesla.Env.t()}
| {:error, any()}
def androidenterprise_grouplicenses_list(
connection,
enterprise_id,
optional_params \\ [],
opts \\ []
) do
optional_params_config = %{
:"$.xgafv" => :query,
:access_token => :query,
:alt => :query,
:callback => :query,
:fields => :query,
:key => :query,
:oauth_token => :query,
:prettyPrint => :query,
:quotaUser => :query,
:uploadType => :query,
:upload_protocol => :query
}
request =
Request.new()
|> Request.method(:get)
|> Request.url("/androidenterprise/v1/enterprises/{enterpriseId}/groupLicenses", %{
"enterpriseId" => URI.encode(enterprise_id, &URI.char_unreserved?/1)
})
|> Request.add_optional_params(optional_params_config, optional_params)
|> Request.library_version(@library_version)
connection
|> Connection.execute(request)
|> Response.decode(
opts ++ [struct: %GoogleApi.AndroidEnterprise.V1.Model.GroupLicensesListResponse{}]
)
end
end
| 42.309942 | 196 | 0.633863 |
73df4b828f73869a901888f71acef5698f6bfab9 | 926 | exs | Elixir | apps/datapio_k8s/mix.exs | datapio/opencore | 6e766c3b3a2ad8b07295c7fd27cffc0923284197 | [
"Apache-2.0"
] | 5 | 2021-05-14T22:01:08.000Z | 2021-09-21T16:28:09.000Z | apps/datapio_k8s/mix.exs | datapio/opencore | 6e766c3b3a2ad8b07295c7fd27cffc0923284197 | [
"Apache-2.0"
] | 198 | 2019-10-17T12:22:25.000Z | 2022-03-16T02:14:14.000Z | apps/datapio_k8s/mix.exs | datapio/opencore | 6e766c3b3a2ad8b07295c7fd27cffc0923284197 | [
"Apache-2.0"
] | 1 | 2022-03-10T08:54:36.000Z | 2022-03-10T08:54:36.000Z | defmodule Datapio.K8s.MixProject do
use Mix.Project
def project do
[
app: :datapio_k8s,
version: "0.1.0",
build_path: "../../_build",
config_path: "../../config/config.exs",
deps_path: "../../deps",
lockfile: "../../mix.lock",
elixir: "~> 1.12",
start_permanent: Mix.env() == :prod,
deps: deps()
]
end
def application do
[
extra_applications: [:logger]
]
end
defp deps do
[
{
# Kubernetes Client
:k8s, "~> 1.0"
},
{
# JSON Schema validation
:json_xema, "~> 0.6"
},
{
# JSON Encoder/Decoder
:jason, "~> 1.2"
},
{
# extended DateTime library
:calendar, "~> 1.0.0"
},
# Dev Dependencies
{
# Mocking framework
:mock, "~> 0.3",
only: :test,
runtime: false
}
]
end
end
| 17.807692 | 45 | 0.455724 |