hexsha
stringlengths 40
40
| size
int64 2
991k
| ext
stringclasses 2
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
208
| max_stars_repo_name
stringlengths 6
106
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
sequence | max_stars_count
int64 1
33.5k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
208
| max_issues_repo_name
stringlengths 6
106
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
sequence | max_issues_count
int64 1
16.3k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
208
| max_forks_repo_name
stringlengths 6
106
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
sequence | max_forks_count
int64 1
6.91k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 2
991k
| avg_line_length
float64 1
36k
| max_line_length
int64 1
977k
| alphanum_fraction
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
f7b0bf3de781f7a222ccac2d21dbf1b4bee850be | 776 | exs | Elixir | priv/repo/migrations/20210412174903_create_admins_auth_tables.exs | RatioPBC/epi-contacts | 6c43eea52cbfe2097f48b02e3d0c8fce3b46f1ee | [
"Apache-2.0"
] | null | null | null | priv/repo/migrations/20210412174903_create_admins_auth_tables.exs | RatioPBC/epi-contacts | 6c43eea52cbfe2097f48b02e3d0c8fce3b46f1ee | [
"Apache-2.0"
] | 13 | 2021-06-29T04:35:41.000Z | 2022-02-09T04:25:39.000Z | priv/repo/migrations/20210412174903_create_admins_auth_tables.exs | RatioPBC/epi-contacts | 6c43eea52cbfe2097f48b02e3d0c8fce3b46f1ee | [
"Apache-2.0"
] | null | null | null | defmodule EpiContacts.Repo.Migrations.CreateAdminsAuthTables do
use Ecto.Migration
def change do
execute "CREATE EXTENSION IF NOT EXISTS citext", ""
create table(:admins) do
add :email, :citext, null: false
add :hashed_password, :string, null: false
add :totp_secret, :binary, null: false
timestamps()
end
create unique_index(:admins, [:email])
create table(:admins_tokens) do
add :admin_id, references(:admins, on_delete: :delete_all), null: false
add :token, :binary, null: false
add :context, :string, null: false
add :sent_to, :string
timestamps(updated_at: false)
end
create index(:admins_tokens, [:admin_id])
create unique_index(:admins_tokens, [:context, :token])
end
end
| 27.714286 | 77 | 0.675258 |
f7b0e838cb71bef89e87c84ea83c4ed89255025d | 1,657 | ex | Elixir | clients/sql_admin/lib/google_api/sql_admin/v1beta4/model/flags_list_response.ex | kyleVsteger/elixir-google-api | 3a0dd498af066a4361b5b0fd66ffc04a57539488 | [
"Apache-2.0"
] | 1 | 2021-10-01T09:20:41.000Z | 2021-10-01T09:20:41.000Z | clients/sql_admin/lib/google_api/sql_admin/v1beta4/model/flags_list_response.ex | kyleVsteger/elixir-google-api | 3a0dd498af066a4361b5b0fd66ffc04a57539488 | [
"Apache-2.0"
] | null | null | null | clients/sql_admin/lib/google_api/sql_admin/v1beta4/model/flags_list_response.ex | kyleVsteger/elixir-google-api | 3a0dd498af066a4361b5b0fd66ffc04a57539488 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.SQLAdmin.V1beta4.Model.FlagsListResponse do
@moduledoc """
Flags list response.
## Attributes
* `items` (*type:* `list(GoogleApi.SQLAdmin.V1beta4.Model.Flag.t)`, *default:* `nil`) - List of flags.
* `kind` (*type:* `String.t`, *default:* `nil`) - This is always *sql#flagsList*.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:items => list(GoogleApi.SQLAdmin.V1beta4.Model.Flag.t()) | nil,
:kind => String.t() | nil
}
field(:items, as: GoogleApi.SQLAdmin.V1beta4.Model.Flag, type: :list)
field(:kind)
end
defimpl Poison.Decoder, for: GoogleApi.SQLAdmin.V1beta4.Model.FlagsListResponse do
def decode(value, options) do
GoogleApi.SQLAdmin.V1beta4.Model.FlagsListResponse.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.SQLAdmin.V1beta4.Model.FlagsListResponse do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 33.14 | 106 | 0.722993 |
f7b10a314b0575e0c18b76926ac4528e79cd566f | 378 | exs | Elixir | config/dev.exs | TheFirstAvenger/umbrella_streamline_formatter | 59cc735d45b11b5abce5fbb852ab2911836f8483 | [
"MIT"
] | 1 | 2019-09-27T02:40:01.000Z | 2019-09-27T02:40:01.000Z | config/dev.exs | TheFirstAvenger/umbrella_streamline_formatter | 59cc735d45b11b5abce5fbb852ab2911836f8483 | [
"MIT"
] | 4 | 2019-06-03T19:50:46.000Z | 2019-06-03T19:50:47.000Z | config/dev.exs | TheFirstAvenger/logic_sim | 07d3bfd4f940870f00f344bc8807cef49fafed31 | [
"MIT"
] | null | null | null | use Mix.Config
config :git_hooks,
hooks: [
pre_commit: [
verbose: true,
mix_tasks: [
"format --check-formatted --dry-run --check-equivalent"
]
],
pre_push: [
verbose: true,
mix_tasks: [
"clean",
"compile --warnings-as-errors",
"credo --strict",
"dialyzer --halt-exit-status"
]
]
]
| 18 | 63 | 0.507937 |
f7b11360bb3d00fd340eb7dc647f4fac2f1bb53b | 1,422 | ex | Elixir | apps/gitgud/lib/gitgud/application.ex | EdmondFrank/gitgud | 1952c16130564357aa6f23e35f48f19e3a50d4dd | [
"MIT"
] | 449 | 2018-03-06T01:05:55.000Z | 2022-03-23T21:03:56.000Z | apps/gitgud/lib/gitgud/application.ex | EdmondFrank/gitgud | 1952c16130564357aa6f23e35f48f19e3a50d4dd | [
"MIT"
] | 69 | 2018-03-06T09:26:41.000Z | 2022-03-21T22:43:09.000Z | apps/gitgud/lib/gitgud/application.ex | EdmondFrank/gitgud | 1952c16130564357aa6f23e35f48f19e3a50d4dd | [
"MIT"
] | 41 | 2018-03-06T01:06:07.000Z | 2021-11-21T17:55:04.000Z | defmodule GitGud.Application do
@moduledoc false
use Application
def start(_type, _args) do
import Supervisor.Spec, warn: false
telemetry_attach_git_agent()
telemetry_attach_git_wire_protocol()
telemetry_attach_graphql()
children = [
{Cluster.Supervisor, [Application.get_env(:libcluster, :topologies, []), [name: GitGud.ClusterSupervisor]]},
{GitGud.DB, []},
{GitGud.RepoSupervisor, []},
{GitGud.SSHServer, []},
]
Supervisor.start_link(children, strategy: :one_for_one, name: GitGud.Supervisor)
end
#
# Helpers
#
defp telemetry_attach_git_agent do
:telemetry.attach_many("git-agent",
[
[:gitrekt, :git_agent, :call],
[:gitrekt, :git_agent, :call_stream],
[:gitrekt, :git_agent, :execute],
[:gitrekt, :git_agent, :stream],
[:gitrekt, :git_agent, :transaction_start]
],
&GitGud.Telemetry.GitLoggerHandler.handle_event/4, %{}
)
end
defp telemetry_attach_git_wire_protocol do
:telemetry.attach_many("git-wire-protocol",
[
[:gitrekt, :wire_protocol, :start],
[:gitrekt, :wire_protocol, :stop]
],
&GitGud.Telemetry.GitLoggerHandler.handle_event/4, %{}
)
end
defp telemetry_attach_graphql do
:telemetry.attach("graphql", [:absinthe, :execute, :operation, :stop], &GitGud.Telemetry.GraphQLLoggerHandler.handle_event/4, %{})
end
end
| 27.346154 | 134 | 0.658228 |
f7b132ecb70498ac54117fe24d5585f843a780bb | 3,929 | exs | Elixir | test/blockchain/account_test.exs | poanetwork/blockchain | 408287adeab1b7dbb7d55fd7398dd9320e37b30f | [
"MIT"
] | null | null | null | test/blockchain/account_test.exs | poanetwork/blockchain | 408287adeab1b7dbb7d55fd7398dd9320e37b30f | [
"MIT"
] | null | null | null | test/blockchain/account_test.exs | poanetwork/blockchain | 408287adeab1b7dbb7d55fd7398dd9320e37b30f | [
"MIT"
] | null | null | null | defmodule Blockchain.AccountTest do
use ExUnit.Case, async: true
doctest Blockchain.Account
alias Blockchain.Account
test "serialize and deserialize" do
acct = %Account{nonce: 5, balance: 10, storage_root: <<0x00, 0x01>>, code_hash: <<0x01, 0x02>>}
assert acct == acct |> Account.serialize |> ExRLP.encode |> ExRLP.decode |> Account.deserialize
end
test "valid empty state_root" do
db = MerklePatriciaTree.Test.random_ets_db()
state = MerklePatriciaTree.Trie.new(db)
assert state.root_hash == <<86, 232, 31, 23, 27, 204, 85, 166, 255, 131, 69, 230, 146, 192, 248, 110, 91, 72, 224, 27, 153, 108, 173, 192, 1, 98, 47, 181, 227, 99, 180, 33>>
end
test "valid state_root with one empty account" do
db = MerklePatriciaTree.Test.random_ets_db()
state = MerklePatriciaTree.Trie.new(db)
state = state
|> Blockchain.Account.put_account(<<0x01::160>>, %Blockchain.Account{
nonce: 0,
balance: 0,
code_hash: <<>> |> BitHelper.kec(),
storage_root: ExRLP.encode(<<>>) |> BitHelper.kec(),
})
assert state.root_hash == <<166, 181, 213, 15, 123, 60, 57, 185, 105, 194, 254, 143, 237, 9, 25, 57, 198, 116, 254, 244, 155, 72, 38, 48, 156, 182, 153, 67, 97, 227, 155, 113>>
end
test "valid state root with an updated storage value" do
db = MerklePatriciaTree.Test.random_ets_db()
address = <<0x01::160>>
state = MerklePatriciaTree.Trie.new(db)
state = state
|> Blockchain.Account.put_account(address, %Blockchain.Account{
nonce: 0,
balance: 0,
code_hash: <<>> |> BitHelper.kec(),
storage_root: ExRLP.encode(<<>>) |> BitHelper.kec(),
})
|> Blockchain.Account.put_storage(address, 1, 1)
assert state.root_hash == <<100, 231, 49, 195, 57, 235, 18, 88, 149, 202, 124, 230, 118, 223, 241, 190, 56, 214, 7, 199, 253, 154, 5, 187, 181, 217, 116, 222, 172, 24, 209, 217>>
end
test "valid state root for an account with code set" do
db = MerklePatriciaTree.Test.random_ets_db()
state = MerklePatriciaTree.Trie.new(db)
address = <<0x01::160>>
state = state
|> Blockchain.Account.put_account(address, %Blockchain.Account{
nonce: 0,
balance: 0,
code_hash: <<>> |> BitHelper.kec(),
storage_root: ExRLP.encode(<<>>) |> BitHelper.kec(),
})
|> Blockchain.Account.put_code(address, <<1, 2, 3>>)
assert state.root_hash == <<57, 201, 95, 169, 186, 185, 65, 138, 89, 184, 108, 249, 63, 187, 179, 237, 59, 248, 230, 221, 33, 72, 223, 183, 87, 146, 198, 9, 43, 48, 48, 168>>
end
test "valid state root after nonce has been incremented" do
db = MerklePatriciaTree.Test.random_ets_db()
state = MerklePatriciaTree.Trie.new(db)
address = <<0x01::160>>
state = state
|> Blockchain.Account.put_account(address, %Blockchain.Account{
nonce: 99,
balance: 0,
code_hash: <<>> |> BitHelper.kec(),
storage_root: ExRLP.encode(<<>>) |> BitHelper.kec(),
})
|> Blockchain.Account.increment_nonce(address)
assert state.root_hash == <<216, 110, 244, 57, 70, 173, 157, 118, 183, 112, 181, 20, 47, 193, 5, 3, 244, 142, 211, 183, 134, 195, 74, 102, 249, 240, 226, 192, 75, 163, 199, 197>>
end
test "valid state root with an account balance set" do
db = MerklePatriciaTree.Test.random_ets_db()
state = MerklePatriciaTree.Trie.new(db)
address = <<0x01::160>>
state = state
|> Blockchain.Account.put_account(address, %Blockchain.Account{
nonce: 0,
balance: 10,
code_hash: <<>> |> BitHelper.kec(),
storage_root: ExRLP.encode(<<>>) |> BitHelper.kec(),
})
|> Blockchain.Account.add_wei(address, 10)
assert state.root_hash == <<192, 238, 234, 193, 139, 21, 7, 152, 194, 188, 80, 192, 211, 109, 186, 215, 229, 222, 21, 222, 121, 230, 139, 179, 23, 132, 217, 128, 6, 17, 167, 54>>
end
end
| 40.505155 | 182 | 0.618987 |
f7b140bbac2c52df89b6fc14c17e33b9566cfb7a | 1,353 | ex | Elixir | lib/utils.ex | henry232323/NewsBot | 2bff23ecd5964913890d117dc87dba45e331f519 | [
"MIT"
] | 4 | 2018-08-11T01:42:41.000Z | 2021-06-28T10:37:54.000Z | lib/utils.ex | henry232323/NewsBot | 2bff23ecd5964913890d117dc87dba45e331f519 | [
"MIT"
] | 1 | 2018-07-22T21:03:09.000Z | 2018-07-22T23:00:43.000Z | lib/utils.ex | henry232323/NewsBot | 2bff23ecd5964913890d117dc87dba45e331f519 | [
"MIT"
] | null | null | null | defmodule Utils do
alias Nostrum.Api
def parse_name(text, guild_id) do
if String.starts_with?(text, "<@") and String.ends_with?(text, ">") do
id = text
|> String.replace_prefix("<@", "")
|> String.replace_suffix(">", "")
|> String.replace_prefix("!", "")
Api.get_member(guild_id, id)
else
guild = Api.get_guild!(guild_id).members
|> get(name: text)
end
end
def get_id(enum, id) do
enum
|> Enum.find(fn i -> i["id"] == id end)
end
def get(enum, args \\ []) do
[attribute | [value | _]] = args
enum
|> Enum.find(fn i -> Map.from_struct(i)[attribute] == value end)
end
def get_all(enum, args \\ []) do
[attribute | [value | _]] = args
enum
|> Enum.filter(fn i -> Map.from_struct(i)[attribute] == value end)
end
def create_msg(content) do
{:ok, author} = Api.get_user(122739797646245899)
message = %Nostrum.Struct.Message{attachments: [], author: author,
channel_id: 166687679021449216, content: content, edited_timestamp: "",
embeds: [], id: 0, mention_everyone: false, mention_roles: [],
mentions: [], nonce: 0, pinned: false, timestamp: "",
tts: false, type: 0}
ElixirBot.handle_event({:MESSAGE_CREATE, {message}, nil}, nil)
end
end
| 28.1875 | 78 | 0.574279 |
f7b14a6e7a327439d9a4831f9ca9412f95799b29 | 1,355 | ex | Elixir | lib/ex_oauth2_provider/oauth2/authorization/utils.ex | heroinbob/ex_oauth2_provider | 80c21a53bba0955ab3b66f1bd32cc81db0f04f49 | [
"MIT"
] | null | null | null | lib/ex_oauth2_provider/oauth2/authorization/utils.ex | heroinbob/ex_oauth2_provider | 80c21a53bba0955ab3b66f1bd32cc81db0f04f49 | [
"MIT"
] | null | null | null | lib/ex_oauth2_provider/oauth2/authorization/utils.ex | heroinbob/ex_oauth2_provider | 80c21a53bba0955ab3b66f1bd32cc81db0f04f49 | [
"MIT"
] | null | null | null | defmodule ExOauth2Provider.Authorization.Utils do
@moduledoc false
alias ExOauth2Provider.{Applications, Utils.Error}
alias Ecto.Schema
@doc false
@spec prehandle_request(Schema.t() | nil, map(), keyword()) :: {:ok, map()} | {:error, map()}
def prehandle_request(resource_owner, request, config, opts \\ []) do
resource_owner
|> new_params(request)
|> load_client(config, opts)
|> set_defaults()
end
defp new_params(resource_owner, request) do
{:ok, %{resource_owner: resource_owner, request: request}}
end
defp load_client({:ok, %{request: %{"client_id" => client_id}} = params}, config, opts) do
case Applications.get_application(client_id, config) do
nil -> Error.add_error({:ok, params}, Error.invalid_client(opts))
client -> {:ok, Map.put(params, :client, client)}
end
end
defp load_client({:ok, params}, _config, _opts),
do: Error.add_error({:ok, params}, Error.invalid_request())
defp set_defaults({:error, params}), do: {:error, params}
defp set_defaults({:ok, %{request: request, client: client} = params}) do
[redirect_uri | _rest] = String.split(client.redirect_uri)
request =
Map.new()
|> Map.put("redirect_uri", redirect_uri)
|> Map.put("scope", nil)
|> Map.merge(request)
{:ok, Map.put(params, :request, request)}
end
end
| 30.795455 | 95 | 0.664207 |
f7b179129951a49d21b1c82926589a27fc1c1abd | 329 | ex | Elixir | lib/Structs/Guild/guild_member.ex | BenAlbin/alchemy | c31bce54e13a692acdba3c3b581ff49090da7604 | [
"MIT"
] | 1 | 2019-02-24T03:13:22.000Z | 2019-02-24T03:13:22.000Z | lib/Structs/Guild/guild_member.ex | appositum/alchemy | 2e4c06e198fa8c824183782508610815395d7c0e | [
"MIT"
] | null | null | null | lib/Structs/Guild/guild_member.ex | appositum/alchemy | 2e4c06e198fa8c824183782508610815395d7c0e | [
"MIT"
] | null | null | null | defmodule Alchemy.Guild.GuildMember do
alias Alchemy.User
import Alchemy.Structs
@moduledoc false
defstruct [:user,
:nick,
:roles,
:joined_at,
:deaf,
:mute]
def from_map(map) do
map
|> field("user", User)
|> to_struct(__MODULE__)
end
end
| 17.315789 | 38 | 0.547112 |
f7b190afe2d206ed75c0a5bba648f1ea79b9ee63 | 243 | exs | Elixir | catcasts/test/catcasts_web/templates/layout/navigation_test.exs | cashmann/phoenix-tutorial | ea37b9d54a79df9bc1351a948eb8f8400c5e62ff | [
"MIT"
] | null | null | null | catcasts/test/catcasts_web/templates/layout/navigation_test.exs | cashmann/phoenix-tutorial | ea37b9d54a79df9bc1351a948eb8f8400c5e62ff | [
"MIT"
] | 3 | 2021-03-09T20:36:45.000Z | 2021-05-10T17:47:02.000Z | catcasts/test/catcasts_web/templates/layout/navigation_test.exs | cashmann/phoenix-tutorial | ea37b9d54a79df9bc1351a948eb8f8400c5e62ff | [
"MIT"
] | null | null | null | defmodule CatcastsWeb.NavigationTest do
use CatcastsWeb.ConnCase
test "shows a sign in with Google link when not signed in", %{conn: conn} do
conn = get conn, "/"
assert html_response(conn, 200) =~ "Sign in with Google"
end
end | 27 | 78 | 0.707819 |
f7b1a53910ee1f1239d50f2add5447fd64c318e3 | 537 | ex | Elixir | todo/lib/todo_web/controllers/pokemon_controller.ex | mpeseke/literate-waffle | 1b8da264d841d9f1d076729936b060c79f93e1aa | [
"Apache-2.0"
] | null | null | null | todo/lib/todo_web/controllers/pokemon_controller.ex | mpeseke/literate-waffle | 1b8da264d841d9f1d076729936b060c79f93e1aa | [
"Apache-2.0"
] | null | null | null | todo/lib/todo_web/controllers/pokemon_controller.ex | mpeseke/literate-waffle | 1b8da264d841d9f1d076729936b060c79f93e1aa | [
"Apache-2.0"
] | null | null | null | defmodule TodoWeb.PokemonController do
@moduledoc """
Manages incoming pokemon information
"""
use TodoWeb, :controller
@manager Application.get_env(:todo, :pokemon_manager)
def get_pokemon(conn, %{"pokemon_name" => pokemon_name}) do
case @manager.get_info(String.downcase(pokemon_name)) do
{:ok, pokemon} ->
conn
|> put_status(200)
|> render("show.json", pokemon: pokemon)
{:error, error} ->
conn
|> put_status(:not_found)
|> json(error)
end
end
end
| 24.409091 | 61 | 0.627561 |
f7b1b033fa1bd5d85b0c4bb9da1dba3aab624467 | 722 | ex | Elixir | lib/boilerplate/web/gettext.ex | ivanmauricio/widget-spike | a084fb75904b81b360443de2910911cf1f98b1cf | [
"MIT"
] | 2 | 2017-07-24T09:35:32.000Z | 2018-03-04T05:35:54.000Z | lib/boilerplate/web/gettext.ex | ivanmauricio/phoenix-boilerplate | a084fb75904b81b360443de2910911cf1f98b1cf | [
"MIT"
] | null | null | null | lib/boilerplate/web/gettext.ex | ivanmauricio/phoenix-boilerplate | a084fb75904b81b360443de2910911cf1f98b1cf | [
"MIT"
] | null | null | null | defmodule Boilerplate.Web.Gettext do
@moduledoc """
A module providing Internationalization with a gettext-based API.
By using [Gettext](https://hexdocs.pm/gettext),
your module gains a set of macros for translations, for example:
import Boilerplate.Web.Gettext
# Simple translation
gettext "Here is the string to translate"
# Plural translation
ngettext "Here is the string to translate",
"Here are the strings to translate",
3
# Domain-based translation
dgettext "errors", "Here is the error message to translate"
See the [Gettext Docs](https://hexdocs.pm/gettext) for detailed usage.
"""
use Gettext, otp_app: :boilerplate
end
| 28.88 | 72 | 0.684211 |
f7b1ca76f03a3ba085d6f6e13394a7c7f284f4a7 | 453 | ex | Elixir | lesson_06/demo/user_cats_umbrella/apps/user_cats/lib/user_cats/user_context/user.ex | martijnmeeldijk/ip_major | 867f09975aa8db0b308081216ace639c5677446b | [
"BSD-3-Clause"
] | 1 | 2021-09-22T09:56:35.000Z | 2021-09-22T09:56:35.000Z | lesson_06/demo/user_cats_umbrella/apps/user_cats/lib/user_cats/user_context/user.ex | martijnmeeldijk/ip_major | 867f09975aa8db0b308081216ace639c5677446b | [
"BSD-3-Clause"
] | 7 | 2020-03-14T19:30:29.000Z | 2022-02-27T01:20:40.000Z | lesson_06/demo/user_cats_umbrella/apps/user_cats/lib/user_cats/user_context/user.ex | martijnmeeldijk/ip_major | 867f09975aa8db0b308081216ace639c5677446b | [
"BSD-3-Clause"
] | 11 | 2020-02-13T14:52:45.000Z | 2020-08-03T12:18:56.000Z | defmodule UserCats.UserContext.User do
use Ecto.Schema
import Ecto.Changeset
alias UserCats.CatContext.Cat
schema "users" do
field :date_of_birth, :date
field :first_name, :string
field :last_name, :string
has_many :cats, Cat
end
@doc false
def changeset(user, attrs) do
user
|> cast(attrs, [:first_name, :last_name, :date_of_birth])
|> validate_required([:first_name, :last_name, :date_of_birth])
end
end
| 21.571429 | 67 | 0.699779 |
f7b1f485d41eb9a5fb2ea40353449c0bd7498cfd | 1,067 | ex | Elixir | elixir/lib/homework_web/schema.ex | joenarus/web-homework | 9f5e1436ecc731807f9d28e9ab0bb28a72770bf2 | [
"MIT"
] | null | null | null | elixir/lib/homework_web/schema.ex | joenarus/web-homework | 9f5e1436ecc731807f9d28e9ab0bb28a72770bf2 | [
"MIT"
] | null | null | null | elixir/lib/homework_web/schema.ex | joenarus/web-homework | 9f5e1436ecc731807f9d28e9ab0bb28a72770bf2 | [
"MIT"
] | null | null | null | defmodule HomeworkWeb.Schema do
@moduledoc """
Defines the graphql schema for this project.
"""
use Absinthe.Schema
alias HomeworkWeb.Resolvers.MerchantsResolver
alias HomeworkWeb.Resolvers.TransactionsResolver
alias HomeworkWeb.Resolvers.UsersResolver
alias HomeworkWeb.Resolvers.CompaniesResolver
import_types(HomeworkWeb.Schemas.Types)
query do
@desc "Get all Transactions"
field(:transactions, list_of(:transaction)) do
resolve(&TransactionsResolver.transactions/3)
end
@desc "Get all Users"
field(:users, list_of(:user)) do
resolve(&UsersResolver.users/3)
end
@desc "Get all Merchants"
field(:merchants, list_of(:merchant)) do
resolve(&MerchantsResolver.merchants/3)
end
@desc "Get all Companies"
field(:companies, list_of(:company)) do
resolve(&CompaniesResolver.companies/3)
end
end
mutation do
import_fields(:transaction_mutations)
import_fields(:user_mutations)
import_fields(:merchant_mutations)
import_fields(:company_mutations)
end
end
| 26.02439 | 51 | 0.733833 |
f7b1fb4cc57fd393d9f8ff0f8fef14a633a5d5aa | 180 | ex | Elixir | lib/cog/permissions/eval/var.ex | matusf/cog | 71708301c7dc570fb0d3498a50f47a70ef957788 | [
"Apache-2.0"
] | 1,003 | 2016-02-23T17:21:12.000Z | 2022-02-20T14:39:35.000Z | lib/cog/permissions/eval/var.ex | matusf/cog | 71708301c7dc570fb0d3498a50f47a70ef957788 | [
"Apache-2.0"
] | 906 | 2016-02-22T22:54:19.000Z | 2022-03-11T15:19:43.000Z | lib/cog/permissions/eval/var.ex | matusf/cog | 71708301c7dc570fb0d3498a50f47a70ef957788 | [
"Apache-2.0"
] | 95 | 2016-02-23T13:42:31.000Z | 2021-11-30T14:39:55.000Z | defimpl Cog.Eval, for: Piper.Permissions.Ast.Var do
alias Piper.Permissions.Ast
def value_of(%Ast.Var{name: "command"}, context) do
{context.command, context}
end
end
| 18 | 53 | 0.716667 |
f7b2013a11e5f8b686f4e452f4d4b37c1fcc9860 | 550 | exs | Elixir | programming/elixir/hello-world-elixir-web-app-in-3-variants/variant-1-cowboy/mix.exs | NomikOS/learning | 268f94605214f6861ef476ca7573e68c068ccbe5 | [
"Unlicense"
] | null | null | null | programming/elixir/hello-world-elixir-web-app-in-3-variants/variant-1-cowboy/mix.exs | NomikOS/learning | 268f94605214f6861ef476ca7573e68c068ccbe5 | [
"Unlicense"
] | null | null | null | programming/elixir/hello-world-elixir-web-app-in-3-variants/variant-1-cowboy/mix.exs | NomikOS/learning | 268f94605214f6861ef476ca7573e68c068ccbe5 | [
"Unlicense"
] | null | null | null | defmodule HelloWorld.Mixfile do
use Mix.Project
def project do
[
app: :hello_world,
version: "0.1.0",
elixir: "~> 1.3",
deps: deps()
]
end
def application do
[
mod: {HelloWorld, []},
applications: applications(Mix.env())
]
end
defp applications(:dev), do: applications(:all) ++ [:remix]
defp applications(_), do: [:cowboy]
defp deps do
[
{:cowboy, tag: "2.0.0-pre.3", git: "https://github.com/ninenines/cowboy"},
{:remix, "~> 0.0.1", only: :dev}
]
end
end
| 18.333333 | 80 | 0.550909 |
f7b2151562247a51f3e2ee22a4a96c331db9e0df | 3,131 | ex | Elixir | clients/cloud_build/lib/google_api/cloud_build/v1/model/git_hub_enterprise_secrets.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/cloud_build/lib/google_api/cloud_build/v1/model/git_hub_enterprise_secrets.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/cloud_build/lib/google_api/cloud_build/v1/model/git_hub_enterprise_secrets.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.CloudBuild.V1.Model.GitHubEnterpriseSecrets do
@moduledoc """
GitHubEnterpriseSecrets represents the names of all necessary secrets in Secret Manager for a GitHub Enterprise server. Format is: projects//secrets/.
## Attributes
* `oauthClientIdName` (*type:* `String.t`, *default:* `nil`) - The resource name for the OAuth client ID secret in Secret Manager.
* `oauthClientIdVersionName` (*type:* `String.t`, *default:* `nil`) - The resource name for the OAuth client ID secret version in Secret Manager.
* `oauthSecretName` (*type:* `String.t`, *default:* `nil`) - The resource name for the OAuth secret in Secret Manager.
* `oauthSecretVersionName` (*type:* `String.t`, *default:* `nil`) - The resource name for the OAuth secret secret version in Secret Manager.
* `privateKeyName` (*type:* `String.t`, *default:* `nil`) - The resource name for the private key secret.
* `privateKeyVersionName` (*type:* `String.t`, *default:* `nil`) - The resource name for the private key secret version.
* `webhookSecretName` (*type:* `String.t`, *default:* `nil`) - The resource name for the webhook secret in Secret Manager.
* `webhookSecretVersionName` (*type:* `String.t`, *default:* `nil`) - The resource name for the webhook secret secret version in Secret Manager.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:oauthClientIdName => String.t() | nil,
:oauthClientIdVersionName => String.t() | nil,
:oauthSecretName => String.t() | nil,
:oauthSecretVersionName => String.t() | nil,
:privateKeyName => String.t() | nil,
:privateKeyVersionName => String.t() | nil,
:webhookSecretName => String.t() | nil,
:webhookSecretVersionName => String.t() | nil
}
field(:oauthClientIdName)
field(:oauthClientIdVersionName)
field(:oauthSecretName)
field(:oauthSecretVersionName)
field(:privateKeyName)
field(:privateKeyVersionName)
field(:webhookSecretName)
field(:webhookSecretVersionName)
end
defimpl Poison.Decoder, for: GoogleApi.CloudBuild.V1.Model.GitHubEnterpriseSecrets do
def decode(value, options) do
GoogleApi.CloudBuild.V1.Model.GitHubEnterpriseSecrets.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.CloudBuild.V1.Model.GitHubEnterpriseSecrets do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 46.044118 | 152 | 0.717343 |
f7b2199ed52293bc9e4744671e80ee95a47818bc | 3,510 | ex | Elixir | lib/elixir_boilerplate_web/endpoint.ex | Vincent-Legros/mirego-boilerplate | 20d94568133b6dd67862d864a420296217bd7b81 | [
"BSD-3-Clause"
] | null | null | null | lib/elixir_boilerplate_web/endpoint.ex | Vincent-Legros/mirego-boilerplate | 20d94568133b6dd67862d864a420296217bd7b81 | [
"BSD-3-Clause"
] | null | null | null | lib/elixir_boilerplate_web/endpoint.ex | Vincent-Legros/mirego-boilerplate | 20d94568133b6dd67862d864a420296217bd7b81 | [
"BSD-3-Clause"
] | null | null | null | defmodule ElixirBoilerplateWeb.Endpoint do
use Sentry.PlugCapture
use Phoenix.Endpoint, otp_app: :elixir_boilerplate
alias Plug.Conn
@plug_ssl Plug.SSL.init(rewrite_on: [:x_forwarded_proto])
socket("/socket", ElixirBoilerplateWeb.Socket)
plug(:ping)
plug(:canonical_host)
plug(:force_ssl)
plug(:cors)
plug(:basic_auth)
# Serve at "/" the static files from "priv/static" directory.
#
# You should set gzip to true if you are running phoenix.digest
# when deploying your static files in production.
plug(Plug.Static,
at: "/",
from: :elixir_boilerplate,
gzip: true,
only: ~w(assets fonts images favicon.ico robots.txt)
)
# Code reloading can be explicitly enabled under the
# :code_reloader configuration of your endpoint.
if code_reloading? do
socket("/phoenix/live_reload/socket", Phoenix.LiveReloader.Socket)
plug(Phoenix.LiveReloader)
plug(Phoenix.CodeReloader)
end
plug(Plug.RequestId)
plug(Plug.Telemetry, event_prefix: [:phoenix, :endpoint])
plug(
Plug.Parsers,
parsers: [:urlencoded, :multipart, :json],
pass: ["*/*"],
json_decoder: Phoenix.json_library()
)
plug(Sentry.PlugContext)
plug(Plug.MethodOverride)
plug(Plug.Head)
plug(ElixirBoilerplateHealth.Router)
plug(ElixirBoilerplateGraphQL.Router)
plug(:halt_if_sent)
plug(ElixirBoilerplateWeb.Router)
@doc """
Callback invoked for dynamically configuring the endpoint.
It receives the endpoint configuration and checks if
configuration should be loaded from the system environment.
"""
def init(_key, config) do
if config[:load_from_system_env] do
port = Application.get_env(:elixir_boilerplate, __MODULE__)[:http][:port] || raise "expected the PORT environment variable to be set"
{:ok, Keyword.put(config, :http, [:inet6, port: port])}
else
{:ok, config}
end
end
# sobelow_skip ["XSS.SendResp"]
defp ping(%{request_path: "/ping"} = conn, _opts) do
version = Application.get_env(:elixir_boilerplate, :version)
response = Jason.encode!(%{status: "ok", version: version})
conn
|> Conn.put_resp_header("content-type", "application/json")
|> Conn.send_resp(200, response)
|> Conn.halt()
end
defp ping(conn, _opts), do: conn
defp canonical_host(%{request_path: "/health"} = conn, _opts), do: conn
defp canonical_host(conn, _opts) do
opts = PlugCanonicalHost.init(canonical_host: Application.get_env(:elixir_boilerplate, :canonical_host))
PlugCanonicalHost.call(conn, opts)
end
defp force_ssl(%{request_path: "/health"} = conn, _opts), do: conn
defp force_ssl(conn, _opts) do
if Application.get_env(:elixir_boilerplate, :force_ssl) do
Plug.SSL.call(conn, @plug_ssl)
else
conn
end
end
defp cors(conn, _opts) do
opts = Corsica.init(Application.get_env(:elixir_boilerplate, Corsica))
Corsica.call(conn, opts)
end
defp basic_auth(conn, _opts) do
basic_auth_config = Application.get_env(:elixir_boilerplate, :basic_auth)
if basic_auth_config[:username] do
Plug.BasicAuth.basic_auth(conn, basic_auth_config)
else
conn
end
end
# Splitting routers in separate modules has a negative side effect:
# Phoenix.Router does not check the Plug.Conn state and tries to match the
# route even if it was already handled/sent by another router.
defp halt_if_sent(%{state: :sent, halted: false} = conn, _opts), do: halt(conn)
defp halt_if_sent(conn, _opts), do: conn
end
| 28.306452 | 139 | 0.709402 |
f7b21c82056857d933574f3ce158a7ba71028188 | 176 | ex | Elixir | Chapter01/working_with_collections/multiply_with_tail_recursion.ex | sthagen/Mastering-Elixir | 1b52ee79afe6b2ae80767a5e55c2be51df3c4c1d | [
"MIT"
] | 28 | 2018-08-09T05:05:29.000Z | 2022-03-14T06:59:07.000Z | Chapter01/working_with_collections/multiply_with_tail_recursion.ex | sthagen/Mastering-Elixir | 1b52ee79afe6b2ae80767a5e55c2be51df3c4c1d | [
"MIT"
] | 1 | 2019-02-11T09:11:33.000Z | 2019-05-06T06:40:19.000Z | Chapter01/working_with_collections/multiply_with_tail_recursion.ex | sthagen/Mastering-Elixir | 1b52ee79afe6b2ae80767a5e55c2be51df3c4c1d | [
"MIT"
] | 8 | 2018-08-09T14:53:02.000Z | 2020-12-14T19:31:21.000Z | defmodule Recursion do
def multiply(list, accum \\ 1)
def multiply([], accum), do: accum
def multiply([head | tail], accum) do
multiply(tail, head * accum)
end
end
| 22 | 39 | 0.664773 |
f7b24b49c21124b4933cb636e5eba761c97892e6 | 1,604 | exs | Elixir | test/catalog_api/url_test.exs | mbramson/catalog_api | b7d1351196829b0bd58f4346610609da3cb5756c | [
"MIT"
] | 2 | 2018-03-03T18:23:07.000Z | 2018-05-03T00:36:02.000Z | test/catalog_api/url_test.exs | mbramson/catalog_api | b7d1351196829b0bd58f4346610609da3cb5756c | [
"MIT"
] | 1 | 2018-03-04T17:55:11.000Z | 2018-03-04T17:55:11.000Z | test/catalog_api/url_test.exs | mbramson/catalog_api | b7d1351196829b0bd58f4346610609da3cb5756c | [
"MIT"
] | null | null | null | defmodule CatalogApi.UrlTest do
use ExUnit.Case
doctest CatalogApi.Url, except: [url_for: 2], import: true
alias CatalogApi.Url
import CatalogApi.FormatHelper
describe "url_for/2" do
test "produces correct url with no extra parameters" do
url = Url.url_for("view_cart")
[base_url, params] = url |> String.split("?")
assert base_url == "https://test-user.dev.catalogapi.com/v1/rest/view_cart"
decoded_params = URI.decode_query(params)
assert :ok = decoded_params["creds_checksum"] |> is_valid_checksum
assert :ok = decoded_params["creds_datetime"] |> is_iso8601_datetime_string
assert :ok = decoded_params["creds_uuid"] |> is_valid_uuid
end
test "produces correct url with extra parameters" do
extra_params = %{socket_id: "123", catalog_item_id: "456"}
url = Url.url_for("view_item", extra_params)
[base_url, params] = url |> String.split("?")
assert base_url == "https://test-user.dev.catalogapi.com/v1/rest/view_item"
decoded_params = URI.decode_query(params)
assert :ok = decoded_params["creds_checksum"] |> is_valid_checksum
assert :ok = decoded_params["creds_datetime"] |> is_iso8601_datetime_string
assert :ok = decoded_params["creds_uuid"] |> is_valid_uuid
assert "123" == decoded_params["socket_id"]
assert "456" == decoded_params["catalog_item_id"]
end
end
describe "base_url/0" do
test "compiles using testing application environment" do
assert "https://test-user.dev.catalogapi.com/v1/rest/view_cart" == Url.base_url("view_cart")
end
end
end
| 41.128205 | 98 | 0.699501 |
f7b24c5f1562913f9fd06d05286115db1e1bd3fe | 990 | ex | Elixir | lib/encryption.ex | danielberkompas/phoenix_ecto_encryption_sample | 1fd0f337eb214cb6a5b7348a70ad27e50968d851 | [
"MIT"
] | 4 | 2015-08-11T04:01:14.000Z | 2019-09-17T04:47:02.000Z | lib/encryption.ex | danielberkompas/phoenix_ecto_encryption_sample | 1fd0f337eb214cb6a5b7348a70ad27e50968d851 | [
"MIT"
] | null | null | null | lib/encryption.ex | danielberkompas/phoenix_ecto_encryption_sample | 1fd0f337eb214cb6a5b7348a70ad27e50968d851 | [
"MIT"
] | null | null | null | defmodule Encryption do
use Application
# See http://elixir-lang.org/docs/stable/elixir/Application.html
# for more information on OTP Applications
def start(_type, _args) do
import Supervisor.Spec, warn: false
children = [
# Start the endpoint when the application starts
supervisor(Encryption.Endpoint, []),
# Start the Ecto repository
worker(Encryption.Repo, []),
# Here you could define other workers and supervisors as children
# worker(Encryption.Worker, [arg1, arg2, arg3]),
]
# See http://elixir-lang.org/docs/stable/elixir/Supervisor.html
# for other strategies and supported options
opts = [strategy: :one_for_one, name: Encryption.Supervisor]
Supervisor.start_link(children, opts)
end
# Tell Phoenix to update the endpoint configuration
# whenever the application is updated.
def config_change(changed, _new, removed) do
Encryption.Endpoint.config_change(changed, removed)
:ok
end
end
| 31.935484 | 71 | 0.715152 |
f7b25ac93f9c214715cbc8ddc04fdbc107b75d86 | 578 | exs | Elixir | test/event_bus/managers/topic_test.exs | yknx4/event_bus | 49027b459afc325ebf71a1e5001fb8718b4e7d80 | [
"MIT"
] | 557 | 2018-01-24T13:34:57.000Z | 2022-03-31T20:19:09.000Z | test/event_bus/managers/topic_test.exs | yknx4/event_bus | 49027b459afc325ebf71a1e5001fb8718b4e7d80 | [
"MIT"
] | 62 | 2018-02-05T05:14:11.000Z | 2022-02-26T13:04:24.000Z | test/event_bus/managers/topic_test.exs | yknx4/event_bus | 49027b459afc325ebf71a1e5001fb8718b4e7d80 | [
"MIT"
] | 37 | 2018-02-23T16:49:15.000Z | 2021-09-03T18:11:06.000Z | defmodule EventBus.Manager.TopicTest do
use ExUnit.Case, async: false
alias EventBus.Manager.Topic
doctest Topic
setup do
on_exit(fn ->
topics = [:t1, :t2]
Enum.each(topics, fn topic -> Topic.unregister(topic) end)
end)
:ok
end
test "exist?" do
topic = :metrics_received_1
Topic.register(topic)
assert Topic.exist?(topic)
end
test "register_topic" do
assert :ok == Topic.register(:t1)
end
test "unregister_topic" do
topic = :t2
Topic.register(topic)
assert :ok == Topic.unregister(topic)
end
end
| 17 | 64 | 0.650519 |
f7b27838de04f4c090470f6dcfe47a005612dbb7 | 1,019 | ex | Elixir | lib/insta_api/application.ex | kevinmartiniano/elixir-insta-api | 5025e02f111a4952279839d92f64f960ca08026a | [
"Apache-2.0"
] | 2 | 2021-01-07T23:46:50.000Z | 2021-01-08T12:16:00.000Z | lib/insta_api/application.ex | kevinmartiniano/elixir-insta-api | 5025e02f111a4952279839d92f64f960ca08026a | [
"Apache-2.0"
] | null | null | null | lib/insta_api/application.ex | kevinmartiniano/elixir-insta-api | 5025e02f111a4952279839d92f64f960ca08026a | [
"Apache-2.0"
] | null | null | null | defmodule InstaApi.Application do
# See https://hexdocs.pm/elixir/Application.html
# for more information on OTP Applications
@moduledoc false
use Application
def start(_type, _args) do
children = [
# Start the Ecto repository
InstaApi.Repo,
# Start the Telemetry supervisor
InstaApiWeb.Telemetry,
# Start the PubSub system
{Phoenix.PubSub, name: InstaApi.PubSub},
# Start the Endpoint (http/https)
InstaApiWeb.Endpoint
# Start a worker by calling: InstaApi.Worker.start_link(arg)
# {InstaApi.Worker, arg}
]
# See https://hexdocs.pm/elixir/Supervisor.html
# for other strategies and supported options
opts = [strategy: :one_for_one, name: InstaApi.Supervisor]
Supervisor.start_link(children, opts)
end
# Tell Phoenix to update the endpoint configuration
# whenever the application is updated.
def config_change(changed, _new, removed) do
InstaApiWeb.Endpoint.config_change(changed, removed)
:ok
end
end
| 29.114286 | 66 | 0.704612 |
f7b2b9dac20ba1267bdae55c84a99c8bd36197bf | 853 | ex | Elixir | lib/tw/v1_1/coordinates.ex | en30/tw | 10c705953e9b3bf97abf9b4c221486976eac32d4 | [
"MIT"
] | null | null | null | lib/tw/v1_1/coordinates.ex | en30/tw | 10c705953e9b3bf97abf9b4c221486976eac32d4 | [
"MIT"
] | null | null | null | lib/tw/v1_1/coordinates.ex | en30/tw | 10c705953e9b3bf97abf9b4c221486976eac32d4 | [
"MIT"
] | null | null | null | defmodule Tw.V1_1.Coordinates do
@moduledoc """
Coordinates data structure and related functions.
https://developer.twitter.com/en/docs/twitter-api/v1/data-dictionary/object-model/geo
"""
@enforce_keys [:coordinates, :type]
defstruct([:coordinates, :type])
@typedoc """
> | field | description |
> | - | - |
> | `coordinates` | The longitude and latitude of the Tweet’s location, as a collection in the form [longitude, latitude]. Example: `[-97.51087576,35.46500176] `. |
> | `type` | The type of data encoded in the coordinates property. This will be “Point” for Tweet coordinates fields. Example: `\"Point\" `. |
>
"""
@type t :: %__MODULE__{coordinates: list(float), type: binary}
@spec decode!(map) :: t
@doc """
Decode JSON-decoded map into `t:t/0`
"""
def decode!(json), do: struct(__MODULE__, json)
end
| 35.541667 | 167 | 0.66823 |
f7b2d1a99ac79b94bb8654d4e3a3a20c8eb4fdd6 | 1,796 | ex | Elixir | phoenix0/test/support/model_case.ex | JacobOscarson/elexir-0 | f4e67bb4a68c6a0cba5b410d80427e721ac7826a | [
"MIT"
] | null | null | null | phoenix0/test/support/model_case.ex | JacobOscarson/elexir-0 | f4e67bb4a68c6a0cba5b410d80427e721ac7826a | [
"MIT"
] | null | null | null | phoenix0/test/support/model_case.ex | JacobOscarson/elexir-0 | f4e67bb4a68c6a0cba5b410d80427e721ac7826a | [
"MIT"
] | null | null | null | defmodule Phoenix0.ModelCase do
@moduledoc """
This module defines the test case to be used by
model tests.
You may define functions here to be used as helpers in
your model tests. See `errors_on/2`'s definition as reference.
Finally, if the test case interacts with the database,
it cannot be async. For this reason, every test runs
inside a transaction which is reset at the beginning
of the test unless the test case is marked as async.
"""
use ExUnit.CaseTemplate
using do
quote do
alias Phoenix0.Repo
import Ecto
import Ecto.Changeset
import Ecto.Query
import Phoenix0.ModelCase
end
end
setup tags do
:ok = Ecto.Adapters.SQL.Sandbox.checkout(Phoenix0.Repo)
unless tags[:async] do
Ecto.Adapters.SQL.Sandbox.mode(Phoenix0.Repo, {:shared, self()})
end
:ok
end
@doc """
Helper for returning list of errors in a struct when given certain data.
## Examples
Given a User schema that lists `:name` as a required field and validates
`:password` to be safe, it would return:
iex> errors_on(%User{}, %{password: "password"})
[password: "is unsafe", name: "is blank"]
You could then write your assertion like:
assert {:password, "is unsafe"} in errors_on(%User{}, %{password: "password"})
You can also create the changeset manually and retrieve the errors
field directly:
iex> changeset = User.changeset(%User{}, password: "password")
iex> {:password, "is unsafe"} in changeset.errors
true
"""
def errors_on(struct, data) do
struct.__struct__.changeset(struct, data)
|> Ecto.Changeset.traverse_errors(&Phoenix0.ErrorHelpers.translate_error/1)
|> Enum.flat_map(fn {key, errors} -> for msg <- errors, do: {key, msg} end)
end
end
| 27.212121 | 84 | 0.685969 |
f7b317ec69735d1f9b6e740bafa4f97ee39be892 | 1,957 | ex | Elixir | lib/credo/check/consistency/unused_variable_names/collector.ex | iagomoreira/credo | 0a2a89442d3633799b457a285aba851dd6b3d04f | [
"MIT"
] | null | null | null | lib/credo/check/consistency/unused_variable_names/collector.ex | iagomoreira/credo | 0a2a89442d3633799b457a285aba851dd6b3d04f | [
"MIT"
] | null | null | null | lib/credo/check/consistency/unused_variable_names/collector.ex | iagomoreira/credo | 0a2a89442d3633799b457a285aba851dd6b3d04f | [
"MIT"
] | 1 | 2020-09-25T11:48:49.000Z | 2020-09-25T11:48:49.000Z | defmodule Credo.Check.Consistency.UnusedVariableNames.Collector do
@moduledoc false
use Credo.Check.Consistency.Collector
alias Credo.Code
def collect_matches(source_file, _params) do
unused_variable_recorder = &record_unused_variable/2
Code.prewalk(source_file, &traverse(unused_variable_recorder, &1, &2), %{})
end
def find_locations_not_matching(expected, source_file) do
location_recorder = &record_not_matching(expected, &1, &2)
source_file
|> Code.prewalk(&traverse(location_recorder, &1, &2), [])
|> Enum.reverse()
end
defp traverse(callback, {:=, _, params} = ast, acc) do
{ast, reduce_unused_variables(params, callback, acc)}
end
defp traverse(callback, {def, _, [{_, _, params} | _]} = ast, acc)
when def in [:def, :defp] do
{ast, reduce_unused_variables(params, callback, acc)}
end
defp traverse(callback, {:->, _, [params | _]} = ast, acc) do
{ast, reduce_unused_variables(params, callback, acc)}
end
defp traverse(_callback, ast, acc), do: {ast, acc}
defp reduce_unused_variables(ast, callback, acc) do
Enum.reduce(ast, acc, &if(unused_variable_name?(&1), do: callback.(&1, &2), else: &2))
end
defp unused_variable_name?({:_, _, _}), do: true
defp unused_variable_name?({name, _, _}) when is_atom(name),
do: String.starts_with?(Atom.to_string(name), "_")
defp unused_variable_name?(_), do: false
defp record_unused_variable({:_, _, _}, acc), do: Map.update(acc, :anonymous, 1, &(&1 + 1))
defp record_unused_variable(_, acc), do: Map.update(acc, :meaningful, 1, &(&1 + 1))
defp record_not_matching(expected, {name, meta, _}, acc) do
case {expected, Atom.to_string(name)} do
{:anonymous, "_" <> rest = trigger} when rest != "" ->
[[line_no: meta[:line], trigger: trigger] | acc]
{:meaningful, "_" = trigger} ->
[[line_no: meta[:line], trigger: trigger] | acc]
_ ->
acc
end
end
end
| 30.578125 | 93 | 0.659172 |
f7b321d5a8288b5405e74c82b6bdb967ce46dccf | 1,928 | ex | Elixir | lib/util.ex | srfsh/pointers | ed1b4d743c410ba192fc5b92385e2fe39c623780 | [
"Apache-2.0"
] | null | null | null | lib/util.ex | srfsh/pointers | ed1b4d743c410ba192fc5b92385e2fe39c623780 | [
"Apache-2.0"
] | null | null | null | lib/util.ex | srfsh/pointers | ed1b4d743c410ba192fc5b92385e2fe39c623780 | [
"Apache-2.0"
] | null | null | null | defmodule Pointers.Util do
@moduledoc false
@bad_source "You must provide a binary :source option."
@bad_otp_app "You must provide a valid atom :otp_app option."
# maps a tuple flip over a list
def flip(list) when is_list(list),
do: Enum.map(list, fn {k, v} -> {v, k} end)
def add_binaries(list) when is_list(list) do
Enum.flat_map(list, fn {k, v} when is_atom(k) and is_atom(v) ->
[{k, v}, {Atom.to_string(k), Atom.to_string(v)}]
end)
end
# renames keys in a map or keyword list
def rename(map, changes) when is_map(map) do
Enum.reduce(changes, map, fn {k, l}, map ->
case map do
%{^k => v} -> Map.put(Map.delete(map, k), l, v)
_ -> map
end
end)
end
def rename(kw, changes) when is_list(kw) do
Enum.reduce(changes, kw, fn {k, l}, kw ->
case Keyword.fetch(kw, k) do
{:ok, v} -> [{l, v} | Keyword.delete(kw, k)]
_ -> kw
end
end)
end
# option processing
def get_source(opts), do: check_source(Keyword.get(opts, :source))
defp check_source(x) when is_binary(x), do: x
defp check_source(_), do: raise ArgumentError, message: @bad_source
def get_otp_app(opts), do: check_otp_app(Keyword.get(opts, :otp_app))
defp check_otp_app(x) when is_atom(x), do: x
defp check_otp_app(_), do: raise ArgumentError, message: @bad_otp_app
# expands to putting the attribute if it does not already exist
def put_new_attribute(module, attribute, value) do
if not Module.has_attribute?(module, attribute) do
quote do
Module.put_attribute(unquote(module), unquote(attribute), unquote(value))
end
end
end
# defaults the foreign key type to ULID
def schema_foreign_key_type(module),
do: put_new_attribute(module, :foreign_key_type, Pointers.ULID)
def pointers_clause(arg, value) do
quote do
def __pointers__(unquote(arg)), do: unquote(value)
end
end
end
| 28.776119 | 81 | 0.658195 |
f7b3434f109ad349b21455e287e98bf3979e8211 | 73 | ex | Elixir | lib/mysimplelist_web/views/layout_view.ex | ScorpionResponse/mysimplelist | 3c792373cc372ab5e196fe109b9dae68b97b7220 | [
"Apache-2.0"
] | null | null | null | lib/mysimplelist_web/views/layout_view.ex | ScorpionResponse/mysimplelist | 3c792373cc372ab5e196fe109b9dae68b97b7220 | [
"Apache-2.0"
] | null | null | null | lib/mysimplelist_web/views/layout_view.ex | ScorpionResponse/mysimplelist | 3c792373cc372ab5e196fe109b9dae68b97b7220 | [
"Apache-2.0"
] | null | null | null | defmodule MysimplelistWeb.LayoutView do
use MysimplelistWeb, :view
end
| 18.25 | 39 | 0.835616 |
f7b357ceeb93fedae1cea14a0c9968e66310c0b8 | 1,956 | ex | Elixir | clients/firestore/lib/google_api/firestore/v1beta1/model/google_firestore_admin_v1beta1_list_indexes_response.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/firestore/lib/google_api/firestore/v1beta1/model/google_firestore_admin_v1beta1_list_indexes_response.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/firestore/lib/google_api/firestore/v1beta1/model/google_firestore_admin_v1beta1_list_indexes_response.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Firestore.V1beta1.Model.GoogleFirestoreAdminV1beta1ListIndexesResponse do
@moduledoc """
The response for FirestoreAdmin.ListIndexes.
## Attributes
* `indexes` (*type:* `list(GoogleApi.Firestore.V1beta1.Model.GoogleFirestoreAdminV1beta1Index.t)`, *default:* `nil`) - The indexes.
* `nextPageToken` (*type:* `String.t`, *default:* `nil`) - The standard List next-page token.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:indexes =>
list(GoogleApi.Firestore.V1beta1.Model.GoogleFirestoreAdminV1beta1Index.t()),
:nextPageToken => String.t()
}
field(:indexes,
as: GoogleApi.Firestore.V1beta1.Model.GoogleFirestoreAdminV1beta1Index,
type: :list
)
field(:nextPageToken)
end
defimpl Poison.Decoder,
for: GoogleApi.Firestore.V1beta1.Model.GoogleFirestoreAdminV1beta1ListIndexesResponse do
def decode(value, options) do
GoogleApi.Firestore.V1beta1.Model.GoogleFirestoreAdminV1beta1ListIndexesResponse.decode(
value,
options
)
end
end
defimpl Poison.Encoder,
for: GoogleApi.Firestore.V1beta1.Model.GoogleFirestoreAdminV1beta1ListIndexesResponse do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 32.6 | 135 | 0.744888 |
f7b37d277bbbe8ee6937a10448f013a50119b47f | 928 | exs | Elixir | config/config.exs | Catharz/fireball | bb9f123c0d2d71b16eda01d3838565e6e762de20 | [
"MIT"
] | null | null | null | config/config.exs | Catharz/fireball | bb9f123c0d2d71b16eda01d3838565e6e762de20 | [
"MIT"
] | null | null | null | config/config.exs | Catharz/fireball | bb9f123c0d2d71b16eda01d3838565e6e762de20 | [
"MIT"
] | null | null | null | # This file is responsible for configuring your application
# and its dependencies with the aid of the Mix.Config module.
#
# This configuration file is loaded before any dependency and
# is restricted to this project.
use Mix.Config
# General application configuration
config :fireball, ecto_repos: [Fireball.Repo]
# Configures the endpoint
config :fireball, FireballWeb.Endpoint,
url: [host: "localhost"],
secret_key_base: "IPNQJwNSIhefI0xlS+gfVRk+rj8zUZ7sDUGq9Vub472hZAj0LTTZopth5QshHSVO",
render_errors: [view: FireballWeb.ErrorView, accepts: ~w(html json)],
pubsub: [name: Fireball.PubSub, adapter: Phoenix.PubSub.PG2]
# Configures Elixir's Logger
config :logger, :console,
format: "$time $metadata[$level] $message\n",
metadata: [:user_id]
# Import environment specific config. This must remain at the bottom
# of this file so it overrides the configuration defined above.
import_config "#{Mix.env()}.exs"
| 35.692308 | 86 | 0.772629 |
f7b37d4ea3f4f5f0bc30cc0a48bf12afe83e5ab6 | 2,020 | ex | Elixir | clients/vision/lib/google_api/vision/v1/model/google_cloud_vision_v1p1beta1_color_info.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | 1 | 2018-12-03T23:43:10.000Z | 2018-12-03T23:43:10.000Z | clients/vision/lib/google_api/vision/v1/model/google_cloud_vision_v1p1beta1_color_info.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/vision/lib/google_api/vision/v1/model/google_cloud_vision_v1p1beta1_color_info.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.Vision.V1.Model.GoogleCloudVisionV1p1beta1ColorInfo do
@moduledoc """
Color information consists of RGB channels, score, and the fraction of
the image that the color occupies in the image.
## Attributes
* `color` (*type:* `GoogleApi.Vision.V1.Model.Color.t`, *default:* `nil`) - RGB components of the color.
* `pixelFraction` (*type:* `number()`, *default:* `nil`) - The fraction of pixels the color occupies in the image.
Value in range [0, 1].
* `score` (*type:* `number()`, *default:* `nil`) - Image-specific score for this color. Value in range [0, 1].
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:color => GoogleApi.Vision.V1.Model.Color.t(),
:pixelFraction => number(),
:score => number()
}
field(:color, as: GoogleApi.Vision.V1.Model.Color)
field(:pixelFraction)
field(:score)
end
defimpl Poison.Decoder, for: GoogleApi.Vision.V1.Model.GoogleCloudVisionV1p1beta1ColorInfo do
def decode(value, options) do
GoogleApi.Vision.V1.Model.GoogleCloudVisionV1p1beta1ColorInfo.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Vision.V1.Model.GoogleCloudVisionV1p1beta1ColorInfo do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 36.727273 | 118 | 0.722772 |
f7b3842a3d5e903fe86db8e3ad61d996b4df7469 | 4,820 | ex | Elixir | web/controllers/ref_monitor_controller.ex | crosscloudci/ci_status_repository | 335e8b89bbf59e6cf63e49541ce3ea6b60167e52 | [
"Apache-2.0"
] | 2 | 2019-03-05T16:29:10.000Z | 2020-01-17T14:11:48.000Z | web/controllers/ref_monitor_controller.ex | crosscloudci/ci_status_repository | 335e8b89bbf59e6cf63e49541ce3ea6b60167e52 | [
"Apache-2.0"
] | 3 | 2019-03-18T20:26:48.000Z | 2020-06-25T14:31:13.000Z | web/controllers/ref_monitor_controller.ex | crosscloudci/ci_status_repository | 335e8b89bbf59e6cf63e49541ce3ea6b60167e52 | [
"Apache-2.0"
] | 1 | 2018-06-16T15:32:25.000Z | 2018-06-16T15:32:25.000Z | defmodule CncfDashboardApi.RefMonitorController do
use CncfDashboardApi.Web, :controller
alias CncfDashboardApi.RefMonitor
def index(conn, _params) do
# ref_monitor = Repo.all(RefMonitor)
ref_monitor = CncfDashboardApi.Repo.all(from ref_monitors in CncfDashboardApi.RefMonitor,
left_join: dashboard_badge_statuses in assoc(ref_monitors, :dashboard_badge_statuses),
left_join: cloud in assoc(dashboard_badge_statuses, :cloud),
preload: [dashboard_badge_statuses: dashboard_badge_statuses,
dashboard_badge_statuses: {dashboard_badge_statuses, cloud: cloud },
] )
render(conn, "index.json", ref_monitor: ref_monitor)
end
def create(conn, %{"ref_monitor" => ref_monitor_params}) do
changeset = RefMonitor.changeset(%RefMonitor{}, ref_monitor_params)
case Repo.insert(changeset) do
{:ok, ref_monitor} ->
ref_monitor = CncfDashboardApi.Repo.all(from ref_monitors in CncfDashboardApi.RefMonitor,
left_join: dashboard_badge_statuses in assoc(ref_monitors, :dashboard_badge_statuses),
left_join: cloud in assoc(dashboard_badge_statuses, :cloud),
where: ref_monitors.id == ^ref_monitor.id,
preload: [dashboard_badge_statuses: dashboard_badge_statuses,
dashboard_badge_statuses: {dashboard_badge_statuses, cloud: cloud },
] )
|> List.first
conn
|> put_status(:created)
|> put_resp_header("location", ref_monitor_path(conn, :show, ref_monitor))
|> render("show.json", ref_monitor: ref_monitor)
{:error, changeset} ->
conn
|> put_status(:unprocessable_entity)
|> render(CncfDashboardApi.ChangesetView, "error.json", changeset: changeset)
end
end
def show(conn, %{"id" => id}) do
# ref_monitor = Repo.get!(RefMonitor, id)
ref_monitor = CncfDashboardApi.Repo.all(from ref_monitors in CncfDashboardApi.RefMonitor,
left_join: dashboard_badge_statuses in assoc(ref_monitors, :dashboard_badge_statuses),
left_join: cloud in assoc(dashboard_badge_statuses, :cloud),
where: ref_monitors.id == ^id,
preload: [dashboard_badge_statuses: dashboard_badge_statuses,
dashboard_badge_statuses: {dashboard_badge_statuses, cloud: cloud },
] )
|> List.first
case ref_monitor do
%{} ->
render(conn, "show.json", ref_monitor: ref_monitor)
nil ->
conn
|> put_status(404)
|> render(CncfDashboardApi.ErrorView, "404.json", ref_monitor: ref_monitor)
end
end
def update(conn, %{"id" => id, "ref_monitor" => ref_monitor_params}) do
# ref_monitor = Repo.get!(RefMonitor, id)
ref_monitor = CncfDashboardApi.Repo.all(from ref_monitors in CncfDashboardApi.RefMonitor,
left_join: dashboard_badge_statuses in assoc(ref_monitors, :dashboard_badge_statuses),
left_join: cloud in assoc(dashboard_badge_statuses, :cloud),
where: ref_monitors.id == ^id,
preload: [dashboard_badge_statuses: dashboard_badge_statuses,
dashboard_badge_statuses: {dashboard_badge_statuses, cloud: cloud },
] )
|> List.first
changeset = RefMonitor.changeset(ref_monitor, ref_monitor_params)
case Repo.update(changeset) do
{:ok, ref_monitor} ->
render(conn, "show.json", ref_monitor: ref_monitor)
{:error, changeset} ->
conn
|> put_status(:unprocessable_entity)
|> render(CncfDashboardApi.ChangesetView, "error.json", changeset: changeset)
end
end
def delete(conn, %{"id" => id}) do
ref_monitor = Repo.get!(RefMonitor, id)
# Here we use delete! (with a bang) because we expect
# it to always work (and if it does not, it will raise).
Repo.delete!(ref_monitor)
send_resp(conn, :no_content, "")
end
end
| 51.827957 | 134 | 0.547095 |
f7b3bdaef385c6aad724849480e278a14912aa52 | 254 | exs | Elixir | priv/repo/migrations/20201028045249_update_character_sessions.exs | Bentheburrito/caidata | 31ee94691293b926de77194585714d75c80fea0d | [
"MIT"
] | null | null | null | priv/repo/migrations/20201028045249_update_character_sessions.exs | Bentheburrito/caidata | 31ee94691293b926de77194585714d75c80fea0d | [
"MIT"
] | null | null | null | priv/repo/migrations/20201028045249_update_character_sessions.exs | Bentheburrito/caidata | 31ee94691293b926de77194585714d75c80fea0d | [
"MIT"
] | null | null | null | defmodule CAIData.Repo.Migrations.UpdateCharacterSessions do
use Ecto.Migration
def change do
alter table(:character_sessions) do
add :nanites_destroyed, :integer
add :nanites_lost, :integer
add :br_ups, {:array, :string}
end
end
end
| 21.166667 | 60 | 0.748031 |
f7b3d1634e91830ce471ab57f15cd2fa226a47f2 | 583 | exs | Elixir | test/views/error_view_test.exs | davidgrupp/Option-Calc | 0b0e4607d827412e207d5fc5102aee001e93a821 | [
"Apache-2.0"
] | null | null | null | test/views/error_view_test.exs | davidgrupp/Option-Calc | 0b0e4607d827412e207d5fc5102aee001e93a821 | [
"Apache-2.0"
] | null | null | null | test/views/error_view_test.exs | davidgrupp/Option-Calc | 0b0e4607d827412e207d5fc5102aee001e93a821 | [
"Apache-2.0"
] | null | null | null | defmodule OptionCalc.ErrorViewTest do
use OptionCalc.ConnCase, async: true
# Bring render/3 and render_to_string/3 for testing custom views
import Phoenix.View
test "renders 404.html" do
assert render_to_string(OptionCalc.ErrorView, "404.html", []) ==
"Page not found"
end
test "render 500.html" do
assert render_to_string(OptionCalc.ErrorView, "500.html", []) ==
"Internal server error"
end
test "render any other" do
assert render_to_string(OptionCalc.ErrorView, "505.html", []) ==
"Internal server error"
end
end
| 26.5 | 68 | 0.684391 |
f7b44ab65500564c3b35bb6e4cb1d861440597e0 | 130 | exs | Elixir | test/test_helper.exs | verrchu/gruf | dc70f09c82418aac02d3fe742098ef69b47cc59f | [
"MIT"
] | null | null | null | test/test_helper.exs | verrchu/gruf | dc70f09c82418aac02d3fe742098ef69b47cc59f | [
"MIT"
] | null | null | null | test/test_helper.exs | verrchu/gruf | dc70f09c82418aac02d3fe742098ef69b47cc59f | [
"MIT"
] | null | null | null | {:ok, _apps} = Application.ensure_all_started(:propcheck)
[{_mod, _bin}] = Code.require_file("util.exs", "./test")
ExUnit.start()
| 32.5 | 57 | 0.7 |
f7b4827e06ec05756a63045dfd8083cb6e8f77ce | 1,674 | ex | Elixir | template/$PROJECT_NAME$/test/support/data_case.ex | paridin/gen_template_dino | fa584b063558b3b38b7584fb951a7822a818b28b | [
"Apache-2.0"
] | null | null | null | template/$PROJECT_NAME$/test/support/data_case.ex | paridin/gen_template_dino | fa584b063558b3b38b7584fb951a7822a818b28b | [
"Apache-2.0"
] | null | null | null | template/$PROJECT_NAME$/test/support/data_case.ex | paridin/gen_template_dino | fa584b063558b3b38b7584fb951a7822a818b28b | [
"Apache-2.0"
] | null | null | null | defmodule <%= @project_name_camel_case %>.DataCase do
@moduledoc """
This module defines the setup for tests requiring
access to the application's data layer.
You may define functions here to be used as helpers in
your tests.
Finally, if the test case interacts with the database,
we enable the SQL sandbox, so changes done to the database
are reverted at the end of every test. If you are using
PostgreSQL, you can even run database tests asynchronously
by setting `use <%= @project_name_camel_case %>.DataCase, async: true`, although
this option is not recommended for other databases.
"""
use ExUnit.CaseTemplate
alias Ecto.Adapters.SQL.Sandbox
using do
quote do
alias <%= @project_name_camel_case %>.Repo
import Ecto
import Ecto.Changeset
import Ecto.Query
import <%= @project_name_camel_case %>.DataCase
end
end
setup tags do
:ok = Sandbox.checkout(<%= @project_name_camel_case %>.Repo)
unless tags[:async] do
Sandbox.mode(<%= @project_name_camel_case %>.Repo, {:shared, self()})
end
:ok
end
@doc """
A helper that transforms changeset errors into a map of messages.
assert {:error, changeset} = Accounts.create_user(%{password: "short"})
assert "password is too short" in errors_on(changeset).password
assert %{password: ["password is too short"]} = errors_on(changeset)
"""
def errors_on(changeset) do
Ecto.Changeset.traverse_errors(changeset, fn {message, opts} ->
Regex.replace(~r"%{(\w+)}", message, fn _, key ->
opts |> Keyword.get(String.to_existing_atom(key), key) |> to_string()
end)
end)
end
end
| 29.368421 | 82 | 0.685783 |
f7b4ba2635b60881f24b563b5129394df6300f5f | 1,248 | exs | Elixir | implementations/elixir/ockam/ockam/test/ockam/transport/tcp/address_test.exs | piotr-cla/ockam | 914cd3ec1620181ac64ad2fb13d843f3eb184464 | [
"Apache-2.0"
] | 1 | 2021-08-25T03:33:52.000Z | 2021-08-25T03:33:52.000Z | implementations/elixir/ockam/ockam/test/ockam/transport/tcp/address_test.exs | piotr-cla/ockam | 914cd3ec1620181ac64ad2fb13d843f3eb184464 | [
"Apache-2.0"
] | 110 | 2021-08-06T17:16:52.000Z | 2022-03-28T17:20:54.000Z | implementations/elixir/ockam/ockam/test/ockam/transport/tcp/address_test.exs | piotr-cla/ockam | 914cd3ec1620181ac64ad2fb13d843f3eb184464 | [
"Apache-2.0"
] | null | null | null | defmodule Ockam.Transport.TCPAddress.Tests do
use ExUnit.Case, async: true
doctest Ockam.Transport.TCPAddress
alias Ockam.Address
alias Ockam.Transport.TCPAddress
@tcp 1
@four_thousand_encoded <<160, 15>>
@localhost_binary <<0, 127, 0, 0, 1>>
describe "Ockam.Transport.TCPAddress" do
test "1 is the TCP address type" do
address = %TCPAddress{ip: {127, 0, 0, 1}, port: 4000}
assert 1 == Address.type(address)
end
test "can be serialized and then deserialized back to the original address" do
address = %TCPAddress{ip: {127, 0, 0, 1}, port: 4000}
serialized = Ockam.Serializable.serialize(address)
deserialized = TCPAddress.deserialize(serialized)
assert address === deserialized
end
test "Serializing an address produces expected binary" do
address = %TCPAddress{ip: {127, 0, 0, 1}, port: 4000}
assert %{type: @tcp, value: <<0, 127, 0, 0, 1, 160, 15>>} ==
Ockam.Serializable.serialize(address)
end
test "Deserializing an address produces expected struct" do
serialized = [@localhost_binary, @four_thousand_encoded]
assert %TCPAddress{ip: {127, 0, 0, 1}, port: 4000} == TCPAddress.deserialize(serialized)
end
end
end
| 32 | 94 | 0.673077 |
f7b4d4287f637a261c24fb153a0242d4b2d52775 | 9,658 | exs | Elixir | test/phoenix_live_view_test.exs | davydog187/phoenix_live_view | c15d2811a7e91f71f448f4d9d1b9a520e9d0bf91 | [
"MIT"
] | null | null | null | test/phoenix_live_view_test.exs | davydog187/phoenix_live_view | c15d2811a7e91f71f448f4d9d1b9a520e9d0bf91 | [
"MIT"
] | null | null | null | test/phoenix_live_view_test.exs | davydog187/phoenix_live_view | c15d2811a7e91f71f448f4d9d1b9a520e9d0bf91 | [
"MIT"
] | null | null | null | defmodule Phoenix.LiveViewUnitTest do
use ExUnit.Case, async: true
import Phoenix.LiveView
alias Phoenix.LiveView.{Utils, Socket}
alias Phoenix.LiveViewTest.Endpoint
@socket Utils.configure_socket(
%Socket{
endpoint: Endpoint,
router: Phoenix.LiveViewTest.Router,
view: Phoenix.LiveViewTest.ParamCounterLive,
root_view: Phoenix.LiveViewTest.ParamCounterLive
},
%{connect_params: %{}, connect_info: %{}},
nil,
%{},
URI.parse("https://www.example.com")
)
describe "flash" do
test "get and put" do
assert put_flash(@socket, :hello, "world").assigns.flash == %{"hello" => "world"}
assert put_flash(@socket, :hello, :world).assigns.flash == %{"hello" => :world}
end
test "clear" do
socket = put_flash(@socket, :hello, "world")
assert clear_flash(socket).assigns.flash == %{}
assert clear_flash(socket, :hello).assigns.flash == %{}
assert clear_flash(socket, "hello").assigns.flash == %{}
assert clear_flash(socket, "other").assigns.flash == %{"hello" => "world"}
end
end
describe "get_connect_params" do
test "raises when not in mounting state and connected" do
socket = Utils.post_mount_prune(%{@socket | connected?: true})
assert_raise RuntimeError, ~r/attempted to read connect_params/, fn ->
get_connect_params(socket)
end
end
test "raises when not in mounting state and disconnected" do
socket = Utils.post_mount_prune(%{@socket | connected?: false})
assert_raise RuntimeError, ~r/attempted to read connect_params/, fn ->
get_connect_params(socket)
end
end
test "returns nil when disconnected" do
socket = %{@socket | connected?: false}
assert get_connect_params(socket) == nil
end
test "returns params connected and mounting" do
socket = %{@socket | connected?: true}
assert get_connect_params(socket) == %{}
end
end
describe "get_connect_info" do
test "raises when not in mounting state and connected" do
socket = Utils.post_mount_prune(%{@socket | connected?: true})
assert_raise RuntimeError, ~r/attempted to read connect_info/, fn ->
get_connect_info(socket)
end
end
test "raises when not in mounting state and disconnected" do
socket = Utils.post_mount_prune(%{@socket | connected?: false})
assert_raise RuntimeError, ~r/attempted to read connect_info/, fn ->
get_connect_info(socket)
end
end
test "returns nil when disconnected" do
socket = %{@socket | connected?: false}
assert get_connect_info(socket) == nil
end
test "returns params connected and mounting" do
socket = %{@socket | connected?: true}
assert get_connect_info(socket) == %{}
end
end
describe "static_changed?" do
test "raises when not in mounting state and connected" do
socket = Utils.post_mount_prune(%{@socket | connected?: true})
assert_raise RuntimeError, ~r/attempted to read static_changed?/, fn ->
static_changed?(socket)
end
end
test "raises when not in mounting state and disconnected" do
socket = Utils.post_mount_prune(%{@socket | connected?: false})
assert_raise RuntimeError, ~r/attempted to read static_changed?/, fn ->
static_changed?(socket)
end
end
test "returns false when disconnected" do
socket = %{@socket | connected?: false}
assert static_changed?(socket) == false
end
test "returns true when connected and static do not match" do
refute static_changed?([], %{})
refute static_changed?(["foo/bar.css"], nil)
assert static_changed?(["foo/bar.css"], %{})
refute static_changed?(["foo/bar.css"], %{"foo/bar.css" => "foo/bar-123456.css"})
refute static_changed?(
["domain.com/foo/bar.css"],
%{"foo/bar.css" => "foo/bar-123456.css"}
)
refute static_changed?(
["//domain.com/foo/bar.css"],
%{"foo/bar.css" => "foo/bar-123456.css"}
)
refute static_changed?(
["//domain.com/foo/bar.css?vsn=d"],
%{"foo/bar.css" => "foo/bar-123456.css"}
)
refute static_changed?(
["//domain.com/foo/bar-123456.css"],
%{"foo/bar.css" => "foo/bar-123456.css"}
)
refute static_changed?(
["//domain.com/foo/bar-123456.css?vsn=d"],
%{"foo/bar.css" => "foo/bar-123456.css"}
)
assert static_changed?(
["//domain.com/foo/bar-654321.css"],
%{"foo/bar.css" => "foo/bar-123456.css"}
)
assert static_changed?(
["foo/bar.css", "baz/bat.js"],
%{"foo/bar.css" => "foo/bar-123456.css"}
)
assert static_changed?(
["foo/bar.css", "baz/bat.js"],
%{"foo/bar.css" => "foo/bar-123456.css", "p/baz/bat.js" => "p/baz/bat-123456.js"}
)
refute static_changed?(
["foo/bar.css", "baz/bat.js"],
%{"foo/bar.css" => "foo/bar-123456.css", "baz/bat.js" => "baz/bat-123456.js"}
)
end
defp static_changed?(client, latest) do
socket = %{@socket | connected?: true}
Process.put(:cache_static_manifest_latest, latest)
socket = put_in(socket.private.connect_params["_cache_static_manifest_latest"], client)
static_changed?(socket)
end
end
describe "assign" do
test "tracks changes" do
socket = assign(@socket, existing: "foo")
assert socket.changed.existing == true
socket = Utils.clear_changed(socket)
assert assign(socket, existing: "foo").changed == %{}
end
test "keeps whole maps in changes" do
socket = assign(@socket, existing: %{foo: :bar})
socket = Utils.clear_changed(socket)
socket = assign(socket, existing: %{foo: :baz})
assert socket.changed.existing == %{foo: :bar}
socket = assign(socket, existing: %{foo: :bat})
assert socket.changed.existing == %{foo: :bar}
end
end
describe "assign_new" do
test "uses socket assigns if no parent assigns are present" do
socket =
@socket
|> assign(existing: "existing")
|> assign_new(:existing, fn -> "new-existing" end)
|> assign_new(:notexisting, fn -> "new-notexisting" end)
assert socket.assigns == %{
existing: "existing",
notexisting: "new-notexisting",
live_module: Phoenix.LiveViewTest.ParamCounterLive,
live_action: nil,
flash: %{}
}
end
test "uses parent assigns when present and falls back to socket assigns" do
socket =
put_in(@socket.private[:assign_new], {%{existing: "existing-parent"}, []})
|> assign(existing2: "existing2")
|> assign_new(:existing, fn -> "new-existing" end)
|> assign_new(:existing2, fn -> "new-existing2" end)
|> assign_new(:notexisting, fn -> "new-notexisting" end)
assert socket.assigns == %{
existing: "existing-parent",
existing2: "existing2",
notexisting: "new-notexisting",
live_module: Phoenix.LiveViewTest.ParamCounterLive,
live_action: nil,
flash: %{}
}
end
end
describe "redirect/2" do
test "requires local path on to" do
assert_raise ArgumentError, ~r"the :to option in redirect/2 expects a path", fn ->
redirect(@socket, to: "http://foo.com")
end
assert_raise ArgumentError, ~r"the :to option in redirect/2 expects a path", fn ->
redirect(@socket, to: "//foo.com")
end
assert redirect(@socket, to: "/foo").redirected == {:redirect, %{to: "/foo"}}
end
test "allows external paths" do
assert redirect(@socket, external: "http://foo.com/bar").redirected ==
{:redirect, %{to: "http://foo.com/bar"}}
end
end
describe "push_redirect/2" do
test "requires local path on to" do
assert_raise ArgumentError, ~r"the :to option in push_redirect/2 expects a path", fn ->
push_redirect(@socket, to: "http://foo.com")
end
assert_raise ArgumentError, ~r"the :to option in push_redirect/2 expects a path", fn ->
push_redirect(@socket, to: "//foo.com")
end
assert push_redirect(@socket, to: "/counter/123").redirected ==
{:live, :redirect, %{kind: :push, to: "/counter/123"}}
end
end
describe "push_patch/2" do
test "requires local path on to pointing to the same LiveView" do
assert_raise ArgumentError, ~r"the :to option in push_patch/2 expects a path", fn ->
push_patch(@socket, to: "http://foo.com")
end
assert_raise ArgumentError, ~r"the :to option in push_patch/2 expects a path", fn ->
push_patch(@socket, to: "//foo.com")
end
assert_raise ArgumentError,
~r"cannot push_patch/2 to \"/counter/123\" because the given path does not point to the current root view",
fn ->
push_patch(%{@socket | root_view: __MODULE__}, to: "/counter/123")
end
socket = %{@socket | view: Phoenix.LiveViewTest.ParamCounterLive}
assert push_patch(socket, to: "/counter/123").redirected ==
{:live, {%{"id" => "123"}, nil}, %{kind: :push, to: "/counter/123"}}
end
end
end
| 33.534722 | 126 | 0.591737 |
f7b4d9efcaf46b78fc58b60792c5104b265cc7a1 | 1,750 | ex | Elixir | clients/books/lib/google_api/books/v1/model/dictlayerdata.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/books/lib/google_api/books/v1/model/dictlayerdata.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/books/lib/google_api/books/v1/model/dictlayerdata.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Books.V1.Model.Dictlayerdata do
@moduledoc """
## Attributes
* `common` (*type:* `GoogleApi.Books.V1.Model.DictlayerdataCommon.t`, *default:* `nil`) -
* `dict` (*type:* `GoogleApi.Books.V1.Model.DictlayerdataDict.t`, *default:* `nil`) -
* `kind` (*type:* `String.t`, *default:* `nil`) -
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:common => GoogleApi.Books.V1.Model.DictlayerdataCommon.t(),
:dict => GoogleApi.Books.V1.Model.DictlayerdataDict.t(),
:kind => String.t()
}
field(:common, as: GoogleApi.Books.V1.Model.DictlayerdataCommon)
field(:dict, as: GoogleApi.Books.V1.Model.DictlayerdataDict)
field(:kind)
end
defimpl Poison.Decoder, for: GoogleApi.Books.V1.Model.Dictlayerdata do
def decode(value, options) do
GoogleApi.Books.V1.Model.Dictlayerdata.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Books.V1.Model.Dictlayerdata do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 33.018868 | 94 | 0.713714 |
f7b4f2147a1f11fd9d1de2f8f2a5c2c9aabf1b02 | 656 | exs | Elixir | config/test.exs | jacrdn/web_dev_hw08 | 1727810bb56a9a30733cb93cd77a8db7f5f747ed | [
"MIT"
] | null | null | null | config/test.exs | jacrdn/web_dev_hw08 | 1727810bb56a9a30733cb93cd77a8db7f5f747ed | [
"MIT"
] | null | null | null | config/test.exs | jacrdn/web_dev_hw08 | 1727810bb56a9a30733cb93cd77a8db7f5f747ed | [
"MIT"
] | null | null | null | use Mix.Config
# Configure your database
#
# The MIX_TEST_PARTITION environment variable can be used
# to provide built-in test partitioning in CI environment.
# Run `mix help test` for more information.
config :events, Events.Repo,
username: "postgres",
password: "postgres",
database: "events_test#{System.get_env("MIX_TEST_PARTITION")}",
hostname: "localhost",
pool: Ecto.Adapters.SQL.Sandbox
# We don't run a server during test. If one is required,
# you can enable the server option below.
config :events, EventsWeb.Endpoint,
http: [port: 4002],
server: false
# Print only warnings and errors during test
config :logger, level: :warn
| 28.521739 | 65 | 0.746951 |
f7b5612fd20781e2e49905d05f809678a1bbe4e2 | 1,669 | ex | Elixir | clients/ad_exchange_buyer/lib/google_api/ad_exchange_buyer/v2beta1/model/video_content.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/ad_exchange_buyer/lib/google_api/ad_exchange_buyer/v2beta1/model/video_content.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/ad_exchange_buyer/lib/google_api/ad_exchange_buyer/v2beta1/model/video_content.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.AdExchangeBuyer.V2beta1.Model.VideoContent do
@moduledoc """
Video content for a creative.
## Attributes
* `videoUrl` (*type:* `String.t`, *default:* `nil`) - The URL to fetch a video ad.
* `videoVastXml` (*type:* `String.t`, *default:* `nil`) - The contents of a VAST document for a video ad. This document should conform to the VAST 2.0 or 3.0 standard.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:videoUrl => String.t() | nil,
:videoVastXml => String.t() | nil
}
field(:videoUrl)
field(:videoVastXml)
end
defimpl Poison.Decoder, for: GoogleApi.AdExchangeBuyer.V2beta1.Model.VideoContent do
def decode(value, options) do
GoogleApi.AdExchangeBuyer.V2beta1.Model.VideoContent.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.AdExchangeBuyer.V2beta1.Model.VideoContent do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 33.38 | 171 | 0.726183 |
f7b5682e0cd42c9817e3c4b4fc15eb073d4debdf | 25,089 | exs | Elixir | integration_test/cases/repo.exs | rbishop/ecto | a8a3215c9e2e35f7556f54c8d47d78a3670796d8 | [
"Apache-2.0"
] | null | null | null | integration_test/cases/repo.exs | rbishop/ecto | a8a3215c9e2e35f7556f54c8d47d78a3670796d8 | [
"Apache-2.0"
] | null | null | null | integration_test/cases/repo.exs | rbishop/ecto | a8a3215c9e2e35f7556f54c8d47d78a3670796d8 | [
"Apache-2.0"
] | null | null | null | Code.require_file "../support/types.exs", __DIR__
defmodule Ecto.Integration.RepoTest do
use Ecto.Integration.Case
alias Ecto.Integration.TestRepo
import Ecto.Query
alias Ecto.Integration.Post
alias Ecto.Integration.User
alias Ecto.Integration.PostUsecTimestamps
alias Ecto.Integration.Comment
alias Ecto.Integration.Permalink
alias Ecto.Integration.Custom
alias Ecto.Integration.Barebone
test "returns already started for started repos" do
assert {:error, {:already_started, _}} = TestRepo.start_link
end
test "fetch empty" do
assert [] == TestRepo.all(Post)
assert [] == TestRepo.all(from p in Post)
end
test "fetch with in" do
TestRepo.insert!(%Post{title: "hello"})
assert [] = TestRepo.all from p in Post, where: p.title in []
assert [] = TestRepo.all from p in Post, where: p.title in ["1", "2", "3"]
assert [] = TestRepo.all from p in Post, where: p.title in ^[]
assert [_] = TestRepo.all from p in Post, where: not p.title in []
assert [_] = TestRepo.all from p in Post, where: p.title in ["1", "hello", "3"]
assert [_] = TestRepo.all from p in Post, where: p.title in ["1", ^"hello", "3"]
assert [_] = TestRepo.all from p in Post, where: p.title in ^["1", "hello", "3"]
end
test "fetch without model" do
%Post{} = TestRepo.insert!(%Post{title: "title1"})
%Post{} = TestRepo.insert!(%Post{title: "title2"})
assert ["title1", "title2"] =
TestRepo.all(from(p in "posts", order_by: p.title, select: p.title))
assert [_] =
TestRepo.all(from(p in "posts", where: p.title == "title1", select: p.id))
end
test "insert, update and delete" do
post = %Post{title: "create and delete single", text: "fetch empty"}
meta = post.__meta__
deleted_meta = put_in meta.state, :deleted
assert %Post{} = to_be_deleted = TestRepo.insert!(post)
assert %Post{__meta__: ^deleted_meta} = TestRepo.delete!(to_be_deleted)
loaded_meta = put_in meta.state, :loaded
assert %Post{__meta__: ^loaded_meta} = TestRepo.insert!(post)
post = TestRepo.one(Post)
assert post.__meta__.state == :loaded
assert post.inserted_at
post = %{post | text: "coming very soon..."}
post = put_in post.__meta__.state, :built
assert %Post{__meta__: ^loaded_meta} = TestRepo.update!(post)
end
test "insert and update with changeset" do
# On insert we merge the fields and changes
changeset = Ecto.Changeset.cast(%Post{text: "x", title: "wrong"},
%{"title" => "hello", "temp" => "unknown"}, ~w(title temp), ~w())
post = TestRepo.insert!(changeset)
assert %Post{text: "x", title: "hello", temp: "unknown"} = post
assert %Post{text: "x", title: "hello", temp: "temp"} = TestRepo.get!(Post, post.id)
# On update we merge only fields, direct model changes are discarded
changeset = Ecto.Changeset.cast(%{post | text: "y"},
%{"title" => "world", "temp" => "unknown"}, ~w(title temp), ~w())
assert %Post{text: "y", title: "world", temp: "unknown"} = TestRepo.update!(changeset)
assert %Post{text: "x", title: "world", temp: "temp"} = TestRepo.get!(Post, post.id)
end
test "insert and update with empty changeset" do
# On insert we merge the fields and changes
changeset = Ecto.Changeset.cast(%Permalink{}, %{}, ~w(), ~w())
assert %Permalink{} = permalink = TestRepo.insert!(changeset)
# Assert we can update the same value twice,
# without changes, without triggering stale errors.
changeset = Ecto.Changeset.cast(permalink, %{}, ~w(), ~w())
assert TestRepo.update!(changeset) == permalink
assert TestRepo.update!(changeset) == permalink
end
test "insert with no primary key" do
assert %Barebone{num: nil} = TestRepo.insert!(%Barebone{})
assert %Barebone{num: 13} = TestRepo.insert!(%Barebone{num: 13})
end
@tag :read_after_writes
test "insert and update with changeset read after writes" do
defmodule RAW do
use Ecto.Model
schema "posts" do
field :counter, :integer, read_after_writes: true
field :visits, :integer
end
end
changeset = Ecto.Changeset.cast(struct(RAW, %{}), %{}, ~w(), ~w())
# There is no dirty tracking on insert, even with changesets,
# so database defaults never actually kick in.
assert %{id: cid, counter: nil} = raw = TestRepo.insert!(changeset)
# Set the counter to 11, so we can read it soon
TestRepo.update!(%{raw | counter: 11})
# Now, a combination of dirty tracking with read_after_writes,
# allow us to see the actual counter value.
changeset = Ecto.Changeset.cast(raw, %{"visits" => "0"}, ~w(visits), ~w())
assert %{id: ^cid, counter: 11, visits: 0} = TestRepo.update!(changeset)
end
test "insert autogenerates for custom type" do
post = TestRepo.insert!(%Post{uuid: nil})
assert byte_size(post.uuid) == 36
assert TestRepo.get_by(Post, uuid: post.uuid) == post
end
@tag :id_type
test "insert autogenerates for custom id type" do
defmodule ID do
use Ecto.Model
@primary_key {:id, Elixir.Custom.Permalink, autogenerate: true}
schema "posts" do
end
end
id = TestRepo.insert!(struct(ID, id: nil))
assert id.id
assert TestRepo.get_by(ID, id: "#{id.id}-hello") == id
end
@tag :id_type
@tag :assigns_id_type
test "insert with user-assigned primary key" do
assert %Post{id: 1} = TestRepo.insert!(%Post{id: 1})
end
@tag :id_type
@tag :assigns_id_type
test "insert and update with user-assigned primary key in changeset" do
changeset = Ecto.Changeset.cast(%Post{id: 11}, %{"id" => "13"}, ~w(id), ~w())
assert %Post{id: 13} = post = TestRepo.insert!(changeset)
changeset = Ecto.Changeset.cast(post, %{"id" => "15"}, ~w(id), ~w())
assert %Post{id: 15} = TestRepo.update!(changeset)
end
test "insert autogenerates for binary id type" do
custom = TestRepo.insert!(%Custom{bid: nil})
assert custom.bid
assert TestRepo.get(Custom, custom.bid)
assert TestRepo.delete!(custom)
refute TestRepo.get(Custom, custom.bid)
end
@tag :uses_usec
test "insert and fetch a model with timestamps with usec" do
p1 = TestRepo.insert!(%PostUsecTimestamps{title: "hello"})
assert [p1] == TestRepo.all(PostUsecTimestamps)
end
test "optimistic locking in update/delete operations" do
import Ecto.Changeset, only: [cast: 4]
base_post = TestRepo.insert!(%Comment{})
cs_ok = cast(base_post, %{"text" => "foo.bar"}, ~w(text), ~w())
TestRepo.update!(cs_ok)
cs_stale = cast(base_post, %{"text" => "foo.baz"}, ~w(text), ~w())
assert_raise Ecto.StaleModelError, fn -> TestRepo.update!(cs_stale) end
assert_raise Ecto.StaleModelError, fn -> TestRepo.delete!(base_post) end
end
@tag :unique_constraint
test "unique constraint" do
changeset = Ecto.Changeset.change(%Post{}, uuid: Ecto.UUID.generate())
{:ok, _} = TestRepo.insert(changeset)
exception =
assert_raise Ecto.ConstraintError, ~r/constraint error when attempting to insert model/, fn ->
changeset
|> TestRepo.insert()
end
assert exception.message =~ "unique: posts_uuid_index"
assert exception.message =~ "The changeset has not defined any constraint."
message = ~r/constraint error when attempting to insert model/
exception =
assert_raise Ecto.ConstraintError, message, fn ->
changeset
|> Ecto.Changeset.unique_constraint(:uuid, name: :posts_email_changeset)
|> TestRepo.insert()
end
assert exception.message =~ "unique: posts_email_changeset"
{:error, changeset} =
changeset
|> Ecto.Changeset.unique_constraint(:uuid)
|> TestRepo.insert()
assert changeset.errors == [uuid: "has already been taken"]
assert changeset.model.__meta__.state == :built
end
@tag :id_type
@tag :unique_constraint
test "unique constraint with binary_id" do
changeset = Ecto.Changeset.change(%Custom{}, uuid: Ecto.UUID.generate())
{:ok, _} = TestRepo.insert(changeset)
{:error, changeset} =
changeset
|> Ecto.Changeset.unique_constraint(:uuid)
|> TestRepo.insert()
assert changeset.errors == [uuid: "has already been taken"]
assert changeset.model.__meta__.state == :built
end
@tag :foreign_key_constraint
test "foreign key constraint" do
changeset = Ecto.Changeset.change(%Comment{post_id: 0})
exception =
assert_raise Ecto.ConstraintError, ~r/constraint error when attempting to insert model/, fn ->
changeset
|> TestRepo.insert()
end
assert exception.message =~ "foreign_key: comments_post_id_fkey"
assert exception.message =~ "The changeset has not defined any constraint."
message = ~r/constraint error when attempting to insert model/
exception =
assert_raise Ecto.ConstraintError, message, fn ->
changeset
|> Ecto.Changeset.foreign_key_constraint(:post_id, name: :comments_post_id_other)
|> TestRepo.insert()
end
assert exception.message =~ "foreign_key: comments_post_id_other"
{:error, changeset} =
changeset
|> Ecto.Changeset.foreign_key_constraint(:post_id)
|> TestRepo.insert()
assert changeset.errors == [post_id: "does not exist"]
end
@tag :foreign_key_constraint
test "assoc constraint" do
changeset = Ecto.Changeset.change(%Comment{post_id: 0})
exception =
assert_raise Ecto.ConstraintError, ~r/constraint error when attempting to insert model/, fn ->
changeset
|> TestRepo.insert()
end
assert exception.message =~ "foreign_key: comments_post_id_fkey"
assert exception.message =~ "The changeset has not defined any constraint."
message = ~r/constraint error when attempting to insert model/
exception =
assert_raise Ecto.ConstraintError, message, fn ->
changeset
|> Ecto.Changeset.assoc_constraint(:post, name: :comments_post_id_other)
|> TestRepo.insert()
end
assert exception.message =~ "foreign_key: comments_post_id_other"
{:error, changeset} =
changeset
|> Ecto.Changeset.assoc_constraint(:post)
|> TestRepo.insert()
assert changeset.errors == [post: "does not exist"]
end
@tag :foreign_key_constraint
test "no assoc constraint" do
user = TestRepo.insert!(%User{})
TestRepo.insert!(%Permalink{user_id: user.id})
exception =
assert_raise Ecto.ConstraintError, ~r/constraint error when attempting to delete model/, fn ->
TestRepo.delete!(user)
end
assert exception.message =~ "foreign_key: permalinks_user_id_fkey"
assert exception.message =~ "The changeset has not defined any constraint."
message = ~r/constraint error when attempting to delete model/
exception =
assert_raise Ecto.ConstraintError, message, fn ->
user
|> Ecto.Changeset.change
|> Ecto.Changeset.no_assoc_constraint(:permalinks, name: :permalinks_user_id_pther)
|> TestRepo.delete()
end
assert exception.message =~ "foreign_key: permalinks_user_id_pther"
{:error, changeset} =
user
|> Ecto.Changeset.change
|> Ecto.Changeset.no_assoc_constraint(:permalinks)
|> TestRepo.delete()
assert changeset.errors == [permalinks: "are still associated to this entry"]
end
test "get(!)" do
post1 = TestRepo.insert!(%Post{title: "1", text: "hai"})
post2 = TestRepo.insert!(%Post{title: "2", text: "hai"})
assert post1 == TestRepo.get(Post, post1.id)
assert post2 == TestRepo.get(Post, to_string post2.id) # With casting
assert post1 == TestRepo.get!(Post, post1.id)
assert post2 == TestRepo.get!(Post, to_string post2.id) # With casting
TestRepo.delete!(post1)
assert nil == TestRepo.get(Post, post1.id)
assert_raise Ecto.NoResultsError, fn ->
TestRepo.get!(Post, post1.id)
end
end
test "get(!) with custom source" do
custom = Ecto.Model.put_source(%Custom{}, "posts")
custom = TestRepo.insert!(custom)
bid = custom.bid
assert %Custom{bid: ^bid, __meta__: %{source: {nil, "posts"}}} =
TestRepo.get(from(c in {"posts", Custom}), bid)
end
test "get_by(!)" do
post1 = TestRepo.insert!(%Post{title: "1", text: "hai"})
post2 = TestRepo.insert!(%Post{title: "2", text: "hello"})
assert post1 == TestRepo.get_by(Post, id: post1.id)
assert post1 == TestRepo.get_by(Post, text: post1.text)
assert post1 == TestRepo.get_by(Post, id: post1.id, text: post1.text)
assert post2 == TestRepo.get_by(Post, id: to_string(post2.id)) # With casting
assert nil == TestRepo.get_by(Post, text: "hey")
assert nil == TestRepo.get_by(Post, id: post2.id, text: "hey")
assert post1 == TestRepo.get_by!(Post, id: post1.id)
assert post1 == TestRepo.get_by!(Post, text: post1.text)
assert post1 == TestRepo.get_by!(Post, id: post1.id, text: post1.text)
assert post2 == TestRepo.get_by!(Post, id: to_string(post2.id)) # With casting
assert_raise Ecto.NoResultsError, fn ->
TestRepo.get_by!(Post, id: post2.id, text: "hey")
end
end
test "one(!)" do
post1 = TestRepo.insert!(%Post{title: "1", text: "hai"})
post2 = TestRepo.insert!(%Post{title: "2", text: "hai"})
assert post1 == TestRepo.one(from p in Post, where: p.id == ^post1.id)
assert post2 == TestRepo.one(from p in Post, where: p.id == ^to_string post2.id) # With casting
assert nil == TestRepo.one(from p in Post, where: is_nil(p.id))
assert post1 == TestRepo.one!(from p in Post, where: p.id == ^post1.id)
assert post2 == TestRepo.one!(from p in Post, where: p.id == ^to_string post2.id) # With casting
assert_raise Ecto.NoResultsError, fn ->
TestRepo.one!(from p in Post, where: is_nil(p.id))
end
end
test "one(!) with multiple results" do
assert %Post{} = TestRepo.insert!(%Post{title: "hai"})
assert %Post{} = TestRepo.insert!(%Post{title: "hai"})
assert_raise Ecto.MultipleResultsError, fn ->
TestRepo.one(from p in Post, where: p.title == "hai")
end
assert_raise Ecto.MultipleResultsError, fn ->
TestRepo.one!(from p in Post, where: p.title == "hai")
end
end
test "update all" do
assert %Post{id: id1} = TestRepo.insert!(%Post{title: "1"})
assert %Post{id: id2} = TestRepo.insert!(%Post{title: "2"})
assert %Post{id: id3} = TestRepo.insert!(%Post{title: "3"})
assert {3, nil} = TestRepo.update_all(Post, set: [title: "x"])
assert %Post{title: "x"} = TestRepo.get(Post, id1)
assert %Post{title: "x"} = TestRepo.get(Post, id2)
assert %Post{title: "x"} = TestRepo.get(Post, id3)
assert {3, nil} = TestRepo.update_all("posts", set: [title: nil])
assert %Post{title: nil} = TestRepo.get(Post, id1)
assert %Post{title: nil} = TestRepo.get(Post, id2)
assert %Post{title: nil} = TestRepo.get(Post, id3)
end
test "update all with filter" do
assert %Post{id: id1} = TestRepo.insert!(%Post{title: "1"})
assert %Post{id: id2} = TestRepo.insert!(%Post{title: "2"})
assert %Post{id: id3} = TestRepo.insert!(%Post{title: "3"})
query = from(p in Post, where: p.title == "1" or p.title == "2",
update: [set: [text: ^"y"]])
assert {2, nil} = TestRepo.update_all(query, set: [title: "x"])
assert %Post{title: "x", text: "y"} = TestRepo.get(Post, id1)
assert %Post{title: "x", text: "y"} = TestRepo.get(Post, id2)
assert %Post{title: "3", text: nil} = TestRepo.get(Post, id3)
end
test "update all no entries" do
assert %Post{id: id1} = TestRepo.insert!(%Post{title: "1"})
assert %Post{id: id2} = TestRepo.insert!(%Post{title: "2"})
assert %Post{id: id3} = TestRepo.insert!(%Post{title: "3"})
query = from(p in Post, where: p.title == "4")
assert {0, nil} = TestRepo.update_all(query, set: [title: "x"])
assert %Post{title: "1"} = TestRepo.get(Post, id1)
assert %Post{title: "2"} = TestRepo.get(Post, id2)
assert %Post{title: "3"} = TestRepo.get(Post, id3)
end
test "update all increment syntax" do
assert %Post{id: id1} = TestRepo.insert!(%Post{title: "1", visits: 0})
assert %Post{id: id2} = TestRepo.insert!(%Post{title: "2", visits: 1})
# Positive
query = from p in Post, where: not is_nil(p.id), update: [inc: [visits: 2]]
assert {2, nil} = TestRepo.update_all(query, [])
assert %Post{visits: 2} = TestRepo.get(Post, id1)
assert %Post{visits: 3} = TestRepo.get(Post, id2)
# Negative
query = from p in Post, where: not is_nil(p.id), update: [inc: [visits: -1]]
assert {2, nil} = TestRepo.update_all(query, [])
assert %Post{visits: 1} = TestRepo.get(Post, id1)
assert %Post{visits: 2} = TestRepo.get(Post, id2)
end
@tag :id_type
test "update all with casting and dumping on id type field" do
assert %Post{id: id1} = TestRepo.insert!(%Post{})
assert {1, nil} = TestRepo.update_all(Post, set: [counter: to_string(id1)])
assert %Post{counter: ^id1} = TestRepo.get(Post, id1)
end
test "update all with casting and dumping" do
text = "hai"
date = Ecto.DateTime.utc
assert %Post{id: id1} = TestRepo.insert!(%Post{})
assert {1, nil} = TestRepo.update_all(Post, set: [text: text, inserted_at: date])
assert %Post{text: "hai", inserted_at: ^date} = TestRepo.get(Post, id1)
end
test "delete all" do
assert %Post{} = TestRepo.insert!(%Post{title: "1", text: "hai"})
assert %Post{} = TestRepo.insert!(%Post{title: "2", text: "hai"})
assert %Post{} = TestRepo.insert!(%Post{title: "3", text: "hai"})
assert {3, nil} = TestRepo.delete_all(Post)
assert [] = TestRepo.all(Post)
end
test "delete all with filter" do
assert %Post{} = TestRepo.insert!(%Post{title: "1", text: "hai"})
assert %Post{} = TestRepo.insert!(%Post{title: "2", text: "hai"})
assert %Post{} = TestRepo.insert!(%Post{title: "3", text: "hai"})
query = from(p in Post, where: p.title == "1" or p.title == "2")
assert {2, nil} = TestRepo.delete_all(query)
assert [%Post{}] = TestRepo.all(Post)
end
test "delete all no entries" do
assert %Post{id: id1} = TestRepo.insert!(%Post{title: "1", text: "hai"})
assert %Post{id: id2} = TestRepo.insert!(%Post{title: "2", text: "hai"})
assert %Post{id: id3} = TestRepo.insert!(%Post{title: "3", text: "hai"})
query = from(p in Post, where: p.title == "4")
assert {0, nil} = TestRepo.delete_all(query)
assert %Post{title: "1"} = TestRepo.get(Post, id1)
assert %Post{title: "2"} = TestRepo.get(Post, id2)
assert %Post{title: "3"} = TestRepo.get(Post, id3)
end
test "virtual field" do
assert %Post{id: id} = TestRepo.insert!(%Post{title: "1", text: "hai"})
assert TestRepo.get(Post, id).temp == "temp"
end
## Assocs
test "has_many assoc" do
p1 = TestRepo.insert!(%Post{title: "1"})
p2 = TestRepo.insert!(%Post{title: "1"})
%Comment{id: cid1} = TestRepo.insert!(%Comment{text: "1", post_id: p1.id})
%Comment{id: cid2} = TestRepo.insert!(%Comment{text: "2", post_id: p1.id})
%Comment{id: cid3} = TestRepo.insert!(%Comment{text: "3", post_id: p2.id})
[c1, c2] = TestRepo.all Ecto.Model.assoc(p1, :comments)
assert c1.id == cid1
assert c2.id == cid2
[c1, c2, c3] = TestRepo.all Ecto.Model.assoc([p1, p2], :comments)
assert c1.id == cid1
assert c2.id == cid2
assert c3.id == cid3
end
test "has_one assoc" do
p1 = TestRepo.insert!(%Post{title: "1"})
p2 = TestRepo.insert!(%Post{title: "2"})
%Permalink{id: lid1} = TestRepo.insert!(%Permalink{url: "1", post_id: p1.id})
%Permalink{} = TestRepo.insert!(%Permalink{url: "2"})
%Permalink{id: lid3} = TestRepo.insert!(%Permalink{url: "3", post_id: p2.id})
[l1, l3] = TestRepo.all Ecto.Model.assoc([p1, p2], :permalink)
assert l1.id == lid1
assert l3.id == lid3
end
test "belongs_to assoc" do
%Post{id: pid1} = TestRepo.insert!(%Post{title: "1"})
%Post{id: pid2} = TestRepo.insert!(%Post{title: "2"})
l1 = TestRepo.insert!(%Permalink{url: "1", post_id: pid1})
l2 = TestRepo.insert!(%Permalink{url: "2"})
l3 = TestRepo.insert!(%Permalink{url: "3", post_id: pid2})
assert [p1, p2] = TestRepo.all Ecto.Model.assoc([l1, l2, l3], :post)
assert p1.id == pid1
assert p2.id == pid2
end
test "has_one nested assoc" do
changeset = Ecto.Changeset.change(%Post{title: "1"}, permalink: %Permalink{url: "1"})
p1 = TestRepo.insert!(changeset)
assert p1.permalink.id
assert p1.permalink.post_id == p1.id
assert p1.permalink.url == "1"
p1 = TestRepo.get!(from(p in Post, preload: [:permalink]), p1.id)
assert p1.permalink.url == "1"
changeset = Ecto.Changeset.change(p1, permalink: %Permalink{url: "2"})
p1 = TestRepo.update!(changeset)
assert p1.permalink.id
assert p1.permalink.post_id == p1.id
assert p1.permalink.url == "2"
p1 = TestRepo.get!(from(p in Post, preload: [:permalink]), p1.id)
assert p1.permalink.url == "2"
changeset = Ecto.Changeset.change(p1, permalink: nil)
p1 = TestRepo.update!(changeset)
refute p1.permalink
p1 = TestRepo.get!(from(p in Post, preload: [:permalink]), p1.id)
refute p1.permalink
assert [0] == TestRepo.all(from(p in Permalink, select: count(p.id)))
end
test "has_many nested assoc" do
c1 = %Comment{text: "1"}
c2 = %Comment{text: "2"}
changeset = Ecto.Changeset.change(%Post{title: "1"}, comments: [c1])
p1 = TestRepo.insert!(changeset)
[c1] = p1.comments
assert c1.id
assert c1.post_id == p1.id
p1 = TestRepo.get!(from(p in Post, preload: [:comments]), p1.id)
[c1] = p1.comments
assert c1.text == "1"
changeset = Ecto.Changeset.change(p1, comments: [c1, c2])
p1 = TestRepo.update!(changeset)
[_c1, c2] = p1.comments |> Enum.sort_by(&(&1.id))
assert c2.id
assert c2.post_id == p1.id
p1 = TestRepo.get!(from(p in Post, preload: [:comments]), p1.id)
[c1, c2] = p1.comments |> Enum.sort_by(&(&1.id))
assert c1.text == "1"
assert c2.text == "2"
changeset = Ecto.Changeset.change(p1, comments: [])
p1 = TestRepo.update!(changeset)
assert p1.comments == []
p1 = TestRepo.get!(from(p in Post, preload: [:comments]), p1.id)
assert p1.comments == []
assert [0] == TestRepo.all(from(c in Comment, select: count(c.id)))
end
@tag :unique_constraint
test "has_many assoc with constraints" do
author = TestRepo.insert!(%User{name: "john doe"})
p1 = TestRepo.insert!(%Post{title: "hello", author_id: author.id})
TestRepo.insert!(%Post{title: "world", author_id: author.id})
# Asserts that `unique_constraint` for `uuid` exists
assert_raise Ecto.ConstraintError, fn ->
TestRepo.insert!(%Post{title: "another", author_id: author.id, uuid: p1.uuid})
end
author = TestRepo.preload author, [:posts]
posts_params = Enum.map author.posts, fn %Post{uuid: u} ->
%{"uuid": u, "title": "fresh"}
end
# This will only work if we delete before performing inserts
changeset = Ecto.Changeset.cast(author, %{"posts" => posts_params}, ~w(posts))
author = TestRepo.update! changeset
assert Enum.map(author.posts, &(&1.title)) == ["fresh", "fresh"]
end
@tag :transaction
test "rollbacks failed nested assocs" do
permalink_changeset = %{Ecto.Changeset.change(%Permalink{url: "1"}) | valid?: false}
changeset = Ecto.Changeset.change(%Post{title: "1"}, permalink: permalink_changeset)
assert {:error, changeset} = TestRepo.insert(changeset)
assert changeset.model.__struct__ == Post
refute changeset.valid?
assert [0] == TestRepo.all(from(p in Post, select: count(p.id)))
assert [0] == TestRepo.all(from(p in Permalink, select: count(p.id)))
end
## Dependent
test "has_many assoc on delete deletes all" do
post = TestRepo.insert!(%Post{})
TestRepo.insert!(%Comment{post_id: post.id})
TestRepo.insert!(%Comment{post_id: post.id})
TestRepo.delete!(post)
assert TestRepo.all(Comment) == []
refute Process.get(Comment)
end
test "has_many assoc on delete fetches and deletes" do
post = TestRepo.insert!(%Post{})
TestRepo.insert!(%Permalink{post_id: post.id})
TestRepo.delete!(post)
assert TestRepo.all(Permalink) == []
assert Process.get(Permalink) == :on_delete
end
test "has_many assoc on delete nilifies all" do
user = TestRepo.insert!(%User{})
TestRepo.insert!(%Comment{author_id: user.id})
TestRepo.insert!(%Comment{author_id: user.id})
TestRepo.delete!(user)
author_ids = Comment |> TestRepo.all() |> Enum.map(fn(comment) -> comment.author_id end)
assert author_ids == [nil, nil]
refute Process.get(Comment)
end
test "has_many assoc on delete does nothing" do
user = TestRepo.insert!(%User{})
TestRepo.insert!(%Post{author_id: user.id})
TestRepo.delete!(user)
assert Enum.count(TestRepo.all(Post)) == 1
end
end
| 35.739316 | 101 | 0.645622 |
f7b56e8b5e3b7cb7c02a063c76d438eda16481f8 | 3,926 | ex | Elixir | lib/vintage_net_mobile/signal_monitor.ex | dognotdog/vintage_net_mobile | a709d8f8ce3af71c444d11ff07483b4be180c4c5 | [
"Apache-2.0"
] | 18 | 2020-03-08T21:06:10.000Z | 2022-02-05T00:44:55.000Z | lib/vintage_net_mobile/signal_monitor.ex | dognotdog/vintage_net_mobile | a709d8f8ce3af71c444d11ff07483b4be180c4c5 | [
"Apache-2.0"
] | 27 | 2020-02-28T16:07:07.000Z | 2022-01-19T03:51:02.000Z | lib/vintage_net_mobile/signal_monitor.ex | dognotdog/vintage_net_mobile | a709d8f8ce3af71c444d11ff07483b4be180c4c5 | [
"Apache-2.0"
] | 7 | 2020-03-25T20:08:22.000Z | 2021-10-22T03:20:17.000Z | defmodule VintageNetMobile.SignalMonitor do
@moduledoc """
Monitor the cell signal levels
This monitor queries the modem for cell signal level information and posts it to
VintageNet properties.
The following properties are reported:
| Property | Values | Description |
| -------------- | ------------ | ----------------------------- |
| `signal_asu` | `0-31,99` | This is the raw Arbitrary Strength Unit (ASU) reported. It's interpretation depends on the modem and possibly the connection technology. |
| `signal_4bars` | `0-4` | The signal level in "bars" for presentation to a user. |
| `signal_dbm` | `-144 - -44` | The signal level in dBm. Interpretation depends on the connection technology. |
"""
use GenServer
require Logger
alias VintageNet.PropertyTable
alias VintageNetMobile.{ExChat, ATParser, ASUCalculator}
@rssi_unknown ASUCalculator.from_gsm_asu(99)
@typedoc """
The options for the monitor are:
* `:signal_check_interval` - the number of milliseconds to wait before asking
the modem for the signal quality (default 5 seconds)
* `:ifname` - the interface name the mobile connection is using
* `:tty` - the tty name that is used to send AT commands
"""
@type opt ::
{:signal_check_interval, non_neg_integer()} | {:ifname, String.t()} | {:tty, String.t()}
defmodule State do
@moduledoc false
defstruct signal_check_interval: nil, ifname: nil, tty: nil
end
@spec start_link([opt]) :: GenServer.on_start()
def start_link(opts) do
GenServer.start_link(__MODULE__, opts)
end
@impl true
def init(opts) do
interval = Keyword.get(opts, :signal_check_interval, 5_000)
ifname = Keyword.fetch!(opts, :ifname)
tty = Keyword.fetch!(opts, :tty)
Process.send_after(self(), :signal_check, interval)
us = self()
ExChat.register(tty, "+CSQ", fn message -> send(us, {:handle_csq, message}) end)
{:ok, %State{signal_check_interval: interval, ifname: ifname, tty: tty}}
end
@impl true
def handle_info(:signal_check, state) do
if connected?(state) do
# Only poll if connected, since some modems don't like it when they're not connected
# The AT+CSQ response should be quick. Quectel specifies a max response of 300 ms for
# the BG96 and EC25. Other modules should be similar.
ExChat.send_best_effort(state.tty, "AT+CSQ", timeout: 500)
:ok
else
post_signal_rssi(@rssi_unknown, state.ifname)
end
Process.send_after(self(), :signal_check, state.signal_check_interval)
{:noreply, state}
end
def handle_info({:handle_csq, message}, state) do
message
|> ATParser.parse()
|> csq_response_to_rssi()
|> maybe_pet_power_control(state.ifname)
|> post_signal_rssi(state.ifname)
{:noreply, state}
end
defp csq_response_to_rssi({:ok, _header, [asu, _error_rate]}) when is_integer(asu) do
ASUCalculator.from_gsm_asu(asu)
end
defp csq_response_to_rssi(anything_else) do
Logger.warn("Unexpected AT+CSQ response: #{inspect(anything_else)}")
@rssi_unknown
end
defp post_signal_rssi(%{asu: asu, dbm: dbm, bars: bars}, ifname) do
PropertyTable.put(VintageNet, ["interface", ifname, "mobile", "signal_asu"], asu)
PropertyTable.put(VintageNet, ["interface", ifname, "mobile", "signal_dbm"], dbm)
PropertyTable.put(VintageNet, ["interface", ifname, "mobile", "signal_4bars"], bars)
end
defp connected?(state) do
VintageNet.get(["interface", state.ifname, "connection"]) == :internet
end
# Report that the LTE modem is doing ok if it's connected to a tower
# with 1 or more bars. 0 means that there's no connection.
defp maybe_pet_power_control(%{bars: bars} = report, ifname) when bars > 0 do
VintageNet.PowerManager.PMControl.pet_watchdog(ifname)
report
end
defp maybe_pet_power_control(report, _ifname), do: report
end
| 34.13913 | 174 | 0.684921 |
f7b5b0c26b44e82934e7e0ec0e7436aa7ec9a7b0 | 894 | ex | Elixir | clients/private_ca/lib/google_api/private_ca/v1beta1/metadata.ex | myskoach/elixir-google-api | 4f8cbc2fc38f70ffc120fd7ec48e27e46807b563 | [
"Apache-2.0"
] | null | null | null | clients/private_ca/lib/google_api/private_ca/v1beta1/metadata.ex | myskoach/elixir-google-api | 4f8cbc2fc38f70ffc120fd7ec48e27e46807b563 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/private_ca/lib/google_api/private_ca/v1beta1/metadata.ex | myskoach/elixir-google-api | 4f8cbc2fc38f70ffc120fd7ec48e27e46807b563 | [
"Apache-2.0"
] | null | null | null | # Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.PrivateCA.V1beta1 do
@moduledoc """
API client metadata for GoogleApi.PrivateCA.V1beta1.
"""
@discovery_revision "20201203"
def discovery_revision(), do: @discovery_revision
end
| 33.111111 | 74 | 0.761745 |
f7b5c0ca75675cc8850977b46042171f3abec9f4 | 5,696 | ex | Elixir | lib/elixir/priv/unicode.ex | bruce/elixir | d77ccf941541959079e5f677f8717da24b486fac | [
"Apache-2.0"
] | 1 | 2017-09-09T20:59:04.000Z | 2017-09-09T20:59:04.000Z | lib/elixir/priv/unicode.ex | bruce/elixir | d77ccf941541959079e5f677f8717da24b486fac | [
"Apache-2.0"
] | null | null | null | lib/elixir/priv/unicode.ex | bruce/elixir | d77ccf941541959079e5f677f8717da24b486fac | [
"Apache-2.0"
] | null | null | null | # This file has its own compilation step because
# it needs to parse String.Unicode data and
# compile a digested module.
defmodule String.Unicode do
@moduledoc false
def version, do: {6,2,0}
to_binary = fn
"" ->
nil
codepoints ->
codepoints = :binary.split(codepoints, " ", [:global])
Enum.reduce codepoints, "", fn(codepoint, acc) ->
acc <> << binary_to_integer(codepoint, 16) :: utf8 >>
end
end
data_path = Path.join(__DIR__, "UnicodeData.txt")
{ codes, whitespace } = Enum.reduce File.stream!(data_path), { [], [] }, fn(line, { cacc, wacc }) ->
[ codepoint, _name, _category,
_class, bidi, _decomposition,
_numeric_1, _numeric_2, _numeric_3,
_bidi_mirror, _unicode_1, _iso,
upper, lower, title ] = :binary.split(line, ";", [:global])
title = :binary.part(title, 0, size(title) - 1)
cond do
upper != "" or lower != "" or title != "" ->
{ [{ to_binary.(codepoint), to_binary.(upper), to_binary.(lower), to_binary.(title) } | cacc], wacc }
bidi in ["B", "S", "WS"] ->
{ cacc, [to_binary.(codepoint) | wacc] }
true ->
{ cacc, wacc }
end
end
special_path = Path.join(__DIR__, "SpecialCasing.txt")
codes = Enum.reduce File.stream!(special_path), codes, fn(line, acc) ->
[ codepoint, lower, title, upper, _comment ] = :binary.split(line, "; ", [:global])
key = to_binary.(codepoint)
:lists.keystore(key, 1, acc, { key, to_binary.(upper), to_binary.(lower), to_binary.(title) })
end
seqs_path = Path.join(__DIR__, "NamedSequences.txt")
seqs = Enum.map File.stream!(seqs_path), fn(line) ->
[ _name, codepoints ] = :binary.split(line, ";", [:global])
codepoints = :binary.split(codepoints, " ", [:global])
codepoints = Enum.map codepoints, Regex.replace(%r/\s+/, &1, "")
codepoints = Enum.filter codepoints, fn(x) -> size(x) > 0 end
Enum.map codepoints, to_binary.(&1)
end
# Downcase
def downcase(string), do: do_downcase(string) |> iolist_to_binary
lc { codepoint, _upper, lower, _title } inlist codes, lower && lower != codepoint do
defp do_downcase(unquote(codepoint) <> rest) do
unquote(:binary.bin_to_list(lower)) ++ downcase(rest)
end
end
defp do_downcase(<< char, rest :: binary >>) do
[char|do_downcase(rest)]
end
defp do_downcase(""), do: []
# Upcase
def upcase(string), do: do_upcase(string) |> iolist_to_binary
lc { codepoint, upper, _lower, _title } inlist codes, upper && upper != codepoint do
defp do_upcase(unquote(codepoint) <> rest) do
unquote(:binary.bin_to_list(upper)) ++ do_upcase(rest)
end
end
defp do_upcase(<< char, rest :: binary >>) do
[char|do_upcase(rest)]
end
defp do_upcase(""), do: []
# Titlecase once
def titlecase_once(""), do: { "", "" }
lc { codepoint, _upper, _lower, title } inlist codes, title && title != codepoint do
def titlecase_once(unquote(codepoint) <> rest) do
{ unquote(title), rest }
end
end
def titlecase_once(<< char, rest :: binary >>) do
{ << char >>, rest }
end
# Strip
def lstrip(""), do: ""
lc codepoint inlist whitespace do
def lstrip(unquote(codepoint) <> rest) do
lstrip(rest)
end
end
def lstrip(other) when is_binary(other), do: other
def rstrip(string) when is_binary(string) do
do_rstrip(string, [], [])
end
lc codepoint inlist whitespace do
c = :binary.bin_to_list(codepoint) |> :lists.reverse
defp do_rstrip(unquote(codepoint) <> rest, acc1, acc2) do
do_rstrip(rest, unquote(c) ++ (acc1 || acc2), acc2)
end
end
defp do_rstrip(<< char, rest :: binary >>, nil, acc2) do
do_rstrip(rest, nil, [char|acc2])
end
defp do_rstrip(<< char, rest :: binary >>, acc1, _acc2) do
do_rstrip(rest, nil, [char|acc1])
end
defp do_rstrip(<<>>, _acc1, acc2), do: acc2 |> :lists.reverse |> iolist_to_binary
# Split
def split(""), do: [""]
def split(string) when is_binary(string) do
:lists.reverse do_split(string, "", [])
end
lc codepoint inlist whitespace do
defp do_split(unquote(codepoint) <> rest, buffer, acc) do
do_split(rest, "", add_buffer_to_acc(buffer, acc))
end
end
defp do_split(<< char, rest :: binary >>, buffer, acc) do
do_split(rest, << buffer :: binary, char >>, acc)
end
defp do_split(<<>>, buffer, acc) do
add_buffer_to_acc(buffer, acc)
end
@compile { :inline, add_buffer_to_acc: 2 }
defp add_buffer_to_acc("", acc), do: acc
defp add_buffer_to_acc(buffer, acc), do: [buffer|acc]
# Graphemes
lc codepoints inlist seqs do
def next_grapheme(<< unquote_splicing(codepoints), t :: binary >>) do
{ << unquote_splicing(codepoints) >>, t }
end
end
def next_grapheme(<<>>) do
:no_grapheme
end
def next_grapheme(binary) when is_binary(binary) do
case next_codepoint(binary) do
:no_codepoint -> :no_grapheme
other -> other
end
end
def graphemes(binary) when is_binary(binary) do
do_graphemes(next_grapheme(binary))
end
defp do_graphemes({ c, rest }) do
[c|do_graphemes(next_grapheme(rest))]
end
defp do_graphemes(:no_grapheme) do
[]
end
# Codepoints
def next_codepoint(<< cp :: utf8, rest :: binary >>) do
{ <<cp :: utf8>>, rest }
end
def next_codepoint(<< cp, rest :: binary >>) do
{ <<cp>>, rest }
end
def next_codepoint(<<>>) do
:no_codepoint
end
def codepoints(binary) when is_binary(binary) do
do_codepoints(next_codepoint(binary))
end
defp do_codepoints({ c, rest }) do
[c|do_codepoints(next_codepoint(rest))]
end
defp do_codepoints(:no_codepoint) do
[]
end
end
| 25.657658 | 109 | 0.630091 |
f7b5cb921ef8918f2eb0a2b44bdd803447a3ee15 | 8,477 | ex | Elixir | lib/format/datetime/formatters/default.ex | Adzz/timex | a9c97e2dd9cb5bed286d5a1d688f3eea8e73e6ea | [
"MIT"
] | null | null | null | lib/format/datetime/formatters/default.ex | Adzz/timex | a9c97e2dd9cb5bed286d5a1d688f3eea8e73e6ea | [
"MIT"
] | null | null | null | lib/format/datetime/formatters/default.ex | Adzz/timex | a9c97e2dd9cb5bed286d5a1d688f3eea8e73e6ea | [
"MIT"
] | null | null | null | defmodule Timex.Format.DateTime.Formatters.Default do
@moduledoc """
Date formatting language used by default by the formatting functions in Timex.
This is a novel formatting language introduced with `DateFormat`. Its main
advantage is simplicity and usage of mnemonics that are easy to memorize.
## Directive format
A directive is an optional _padding specifier_ followed by a _mnemonic_, both
enclosed in braces (`{` and `}`):
{<padding><mnemonic>}
Supported padding specifiers:
* `0` -- pads the number with zeros. Applicable to mnemonics that produce numerical result.
* `_` -- pads the number with spaces. Applicable to mnemonics that produce numerical result.
When padding specifier is omitted, numbers will not be padded.
## List of all directives
### Years and centuries
* `{YYYY}` - full year number (0..9999)
* `{YY}` - the last two digits of the year number (0.99)
* `{C}` - century number (0..99)
* `{WYYYY}` - year number (4 digits) corresponding to the date's ISO week (0000..9999)
* `{WYY}` - year number (2 digits) corresponding to the date's ISO week (00.99)
### Months
* `{M}` - month number (1..12)
* `{Mshort}` - abbreviated month name (Jan..Dec, no padding)
* `{Mfull}` - full month name (January..December, no padding)
### Days and weekdays
* `{D}` - day number (1..31)
* `{Dord}` - ordinal day of the year (1..366)
* `{WDmon}` - weekday, Monday first (0..6, no padding)
* `{WDsun}` - weekday, Sunday first (0..6, no padding)
* `{WDshort}` - abbreviated weekday name (Mon..Sun, no padding)
* `{WDfull}` - full weekday name (Monday..Sunday, no padding)
### Weeks
* `{Wiso}` - ISO week number (01..53)
* `{Wmon}` - week number of the year, Monday first (01..53)
* `{Wsun}` - week number of the year, Sunday first (01..53)
### Time
* `{h24}` - hour of the day (00..23)
* `{h12}` - hour of the day (1..12)
* `{m}` - minutes of the hour (00..59)
* `{s}` - seconds of the minute (00..60)
* `{ss}` - fractional second, based on precision of microseconds given (.xxx == ms, .xxxxxx == us)
* `{s-epoch}` - number of seconds since UNIX epoch
* `{am}` - lowercase am or pm (no padding)
* `{AM}` - uppercase AM or PM (no padding)
### Time zones
* `{Zname}` - time zone name, e.g. `UTC` (no padding)
* `{Zabbr}` - time zone abbreviation, e.g. `CST` (no padding)
* `{Z}` - time zone offset in the form `+0230` (no padding)
* `{Z:}` - time zone offset in the form `-07:30` (no padding)
* `{Z::}` - time zone offset in the form `-07:30:00` (no padding)
### Compound directives
These are shortcut directives corresponding to parts of the ISO 8601
specification. The benefit of using these over manually constructed ISO
formats is that these directives convert the date to UTC for you.
* `{ISO:Basic}` - `<date>T<time><offset>`. Full date and time
specification without separators.
* `{ISO:Basic:Z}` - `<date>T<time>Z`. Full date and time in UTC without
separators (e.g. `20070813T134801Z`)
* `{ISO:Extended}` - `<date>T<time><offset>`. Full date and time
specification with separators. (e.g. `2007-08-13T16:48:01 +03:00`)
* `{ISO:Extended:Z}` - `<date>T<time>Z`. Full date and time in UTC. (e.g. `2007-08-13T13:48:01Z`)
* `{ISOdate}` - `YYYY-MM-DD`. That is, 4-digit year number, followed by
2-digit month and day numbers (e.g. `2007-08-13`)
* `{ISOtime}` - `hh:mm:ss`. That is, 2-digit hour, minute, and second,
separated by colons (e.g. `13:04:05`). Midnight is 00 hours.
* `{ISOweek}` - `YYYY-Www`. That is, ISO week-based year, followed by ISO
week number (e.g. `2007-W09`)
* `{ISOweek-day}` - `YYYY-Www-D`. That is, an `{ISOweek}`, additionally
followed by weekday (e.g. `2007-W09-1`)
* `{ISOord}` - `YYYY-DDD`. That is, year number, followed by the ordinal
day number (e.g. `2007-113`)
* `{ASN1:UTCtime}` - `YYMMDD<time>Z`. Full 2-digit year date and time in UTC without
separators (e.g. `070813134801Z`)
* `{ASN1:GeneralizedTime}` - `YYYYMMDD<time>`. Full 4-digit year date and time in local timezone without
separators and with optional fractional seconds (e.g. `20070813134801.032`)
* `{ASN1:GeneralizedTime:Z}` - `YYYYMMDD<time>Z`. Full 4-digit year date and time in UTC without
separators and with optional fractional seconds (e.g. `20070813134801.032Z`)
* `{ASN1:GeneralizedTime:TZ}` - `YYYYMMDD<time><offset>`. Full 4-digit year date and time in UTC without
separators and with optional fractional seconds (e.g. `20070813134801.032-0500`)
These directives provide support for miscellaneous common formats:
* `{RFC822}` - e.g. `Mon, 05 Jun 14 23:20:59 UT`
* `{RFC822z}` - e.g. `Mon, 05 Jun 14 23:20:59 Z`
* `{RFC1123}` - e.g. `Tue, 05 Mar 2013 23:25:19 +0200`
* `{RFC1123z}` - e.g. `Tue, 05 Mar 2013 23:25:19 Z`
* `{RFC3339}` - e.g. `2013-03-05T23:25:19+02:00`
* `{RFC3339z}` - e.g. `2013-03-05T23:25:19Z`
* `{ANSIC}` - e.g. `Tue Mar 5 23:25:19 2013`
* `{UNIX}` - e.g. `Tue Mar 5 23:25:19 PST 2013`
* `{ASN1:UTCtime}` - e.g. `130305232519Z`
* `{ASN1:GeneralizedTime}` - e.g. `20130305232519.928`
* `{ASN1:GeneralizedTime:Z}` - e.g. `20130305232519.928Z`
* `{ASN1:GeneralizedTime:TZ}` - e.g. `20130305232519.928-0500`
* `{kitchen}` - e.g. `3:25PM`
"""
use Timex.Format.DateTime.Formatter
alias Timex.Format.FormatError
alias Timex.Parse.DateTime.Tokenizers.Default, as: Tokenizer
alias Timex.{Types, Translator}
@spec tokenize(String.t()) :: {:ok, [Directive.t()]} | {:error, term}
defdelegate tokenize(format_string), to: Tokenizer
def format!(date, format_string), do: lformat!(date, format_string, Translator.default_locale())
def format(date, format_string), do: lformat(date, format_string, Translator.default_locale())
def format(date, format_string, tokenizer),
do: lformat(date, format_string, tokenizer, Translator.default_locale())
@spec lformat!(Types.calendar_types(), String.t(), String.t()) :: String.t() | no_return
def lformat!(date, format_string, locale) do
case lformat(date, format_string, locale) do
{:ok, result} -> result
{:error, reason} -> raise FormatError, message: reason
end
end
@spec lformat(Types.calendar_types(), String.t(), String.t()) ::
{:ok, String.t()} | {:error, term}
def lformat(date, format_string, locale) do
case tokenize(format_string) do
{:ok, []} ->
{:error, "There were no formatting directives in the provided string."}
{:ok, dirs} when is_list(dirs) ->
do_format(locale, date, dirs, <<>>)
{:error, reason} ->
{:error, {:format, reason}}
end
end
@doc """
If one wants to use the default formatting semantics with a different
tokenizer, this is the way.
"""
@spec lformat(Types.calendar_types(), String.t(), atom, String.t()) ::
{:ok, String.t()} | {:error, term}
def lformat(date, format_string, tokenizer, locale) do
case tokenizer.tokenize(format_string) do
{:ok, []} ->
{:error, "There were no formatting directives in the provided string."}
{:ok, dirs} when is_list(dirs) ->
do_format(locale, date, dirs, <<>>)
{:error, reason} ->
{:error, {:format, reason}}
end
end
defp do_format(_locale, _date, [], result), do: {:ok, result}
defp do_format(locale, date, [%Directive{type: :literal, value: char} | dirs], result)
when is_binary(char) do
do_format(locale, date, dirs, <<result::binary, char::binary>>)
end
defp do_format(
locale,
date,
[%Directive{type: type, modifiers: mods, flags: flags, width: width} | dirs],
result
) do
case format_token(locale, type, date, mods, flags, width) do
{:error, _} = err -> err
formatted -> do_format(locale, date, dirs, <<result::binary, formatted::binary>>)
end
end
end
| 40.951691 | 106 | 0.600566 |
f7b5e0803deefb5b2d9f1ecf6100da0b64772a66 | 1,570 | ex | Elixir | lib/new_relixir/plug/phoenix.ex | nikhilbelchada/new-relixir | 6595b7ef713617e4c69ff414371c01c0e168a706 | [
"MIT"
] | 192 | 2016-03-18T23:58:14.000Z | 2022-02-04T13:41:32.000Z | lib/new_relixir/plug/phoenix.ex | nikhilbelchada/new-relixir | 6595b7ef713617e4c69ff414371c01c0e168a706 | [
"MIT"
] | 53 | 2016-05-06T17:57:06.000Z | 2019-01-25T21:11:52.000Z | lib/new_relixir/plug/phoenix.ex | nikhilbelchada/new-relixir | 6595b7ef713617e4c69ff414371c01c0e168a706 | [
"MIT"
] | 55 | 2016-04-29T09:45:36.000Z | 2020-05-27T23:35:04.000Z | defmodule NewRelixir.Plug.Phoenix do
@moduledoc """
WARNING: this module is deprecated.
A plug that instruments Phoenix controllers and records their response times in New Relic.
Inside an instrumented controller's actions, instrumented `Repo` calls will be scoped
under the current web transaction.
```
defmodule MyApp.UserController do
use Phoenix.Controller
plug NewRelixir.Plug.Phoenix
alias MyApp.Repo.NewRelic, as: Repo
def index(conn, _params) do
users = Repo.all(User)
# This database call is recorded as `Repo.all` under `UserController#index`.
render(conn, "index.html", users: users)
end
end
```
"""
@behaviour Plug
alias NewRelixir.{CurrentTransaction, Transaction, Utils}
alias Plug.Conn
def init(opts) do
IO.warn """
`NewRelixir.Plug.Phoenix` is deprecated; use `NewRelixir.Instrumenters.Phoenix`
instead. For Plug-based non-Phoenix projects, use `NewRelixir.Instrumenters.Plug`.
""", []
opts
end
def call(conn, _config) do
if NewRelixir.active? do
record_transaction(conn)
else
conn
end
end
defp record_transaction(conn) do
transaction = Utils.transaction_name(conn)
CurrentTransaction.set(transaction)
start = System.monotonic_time()
Conn.register_before_send(conn, fn conn ->
stop = System.monotonic_time()
elapsed_microseconds = System.convert_time_unit(stop - start, :native, :microsecond)
Transaction.record_web(transaction, elapsed_microseconds)
conn
end)
end
end
| 23.787879 | 92 | 0.702548 |
f7b5f87048235cca43dcdad5d5a4ec05674e083b | 1,950 | ex | Elixir | clients/compute/lib/google_api/compute/v1/model/allocation_specific_sku_allocation_allocated_instance_properties_reserved_disk.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | 1 | 2018-12-03T23:43:10.000Z | 2018-12-03T23:43:10.000Z | clients/compute/lib/google_api/compute/v1/model/allocation_specific_sku_allocation_allocated_instance_properties_reserved_disk.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/compute/lib/google_api/compute/v1/model/allocation_specific_sku_allocation_allocated_instance_properties_reserved_disk.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.Compute.V1.Model.AllocationSpecificSKUAllocationAllocatedInstancePropertiesReservedDisk do
@moduledoc """
## Attributes
* `diskSizeGb` (*type:* `String.t`, *default:* `nil`) - Specifies the size of the disk in base-2 GB.
* `interface` (*type:* `String.t`, *default:* `nil`) - Specifies the disk interface to use for attaching this disk, which is either SCSI or NVME. The default is SCSI. For performance characteristics of SCSI over NVMe, see Local SSD performance.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:diskSizeGb => String.t(),
:interface => String.t()
}
field(:diskSizeGb)
field(:interface)
end
defimpl Poison.Decoder,
for:
GoogleApi.Compute.V1.Model.AllocationSpecificSKUAllocationAllocatedInstancePropertiesReservedDisk do
def decode(value, options) do
GoogleApi.Compute.V1.Model.AllocationSpecificSKUAllocationAllocatedInstancePropertiesReservedDisk.decode(
value,
options
)
end
end
defimpl Poison.Encoder,
for:
GoogleApi.Compute.V1.Model.AllocationSpecificSKUAllocationAllocatedInstancePropertiesReservedDisk do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 34.210526 | 248 | 0.747692 |
f7b603b2df0f117077321015280beb2366e52789 | 6,262 | exs | Elixir | test/groupher_server_web/mutation/cms/flags/drink_flag_test.exs | coderplanets/coderplanets_server | 3663e56340d6d050e974c91f7e499d8424fc25e9 | [
"Apache-2.0"
] | 240 | 2018-11-06T09:36:54.000Z | 2022-02-20T07:12:36.000Z | test/groupher_server_web/mutation/cms/flags/drink_flag_test.exs | coderplanets/coderplanets_server | 3663e56340d6d050e974c91f7e499d8424fc25e9 | [
"Apache-2.0"
] | 363 | 2018-07-11T03:38:14.000Z | 2021-12-14T01:42:40.000Z | test/groupher_server_web/mutation/cms/flags/drink_flag_test.exs | mydearxym/mastani_server | f24034a4a5449200165cf4a547964a0961793eab | [
"Apache-2.0"
] | 22 | 2019-01-27T11:47:56.000Z | 2021-02-28T13:17:52.000Z | defmodule GroupherServer.Test.Mutation.Flags.DrinkFlag do
use GroupherServer.TestTools
alias GroupherServer.CMS
alias CMS.Model.Community
alias Helper.ORM
setup do
{:ok, user} = db_insert(:user)
{:ok, community} = db_insert(:community)
{:ok, drink} = CMS.create_article(community, :drink, mock_attrs(:drink), user)
guest_conn = simu_conn(:guest)
user_conn = simu_conn(:user)
owner_conn = simu_conn(:user, user)
{:ok, ~m(user_conn guest_conn owner_conn community user drink)a}
end
describe "[mutation drink flag curd]" do
@query """
mutation($id: ID!){
markDeleteDrink(id: $id) {
id
markDelete
}
}
"""
test "auth user can markDelete drink", ~m(drink)a do
variables = %{id: drink.id}
passport_rules = %{"drink.mark_delete" => true}
rule_conn = simu_conn(:user, cms: passport_rules)
updated = rule_conn |> mutation_result(@query, variables, "markDeleteDrink")
assert updated["id"] == to_string(drink.id)
assert updated["markDelete"] == true
end
test "mark delete drink should update drink's communities meta count", ~m(user)a do
community_attrs = mock_attrs(:community) |> Map.merge(%{user_id: user.id})
{:ok, community} = CMS.create_community(community_attrs)
{:ok, drink} = CMS.create_article(community, :drink, mock_attrs(:drink), user)
{:ok, community} = ORM.find(Community, community.id)
assert community.meta.drinks_count == 1
variables = %{id: drink.id}
passport_rules = %{"drink.mark_delete" => true}
rule_conn = simu_conn(:user, cms: passport_rules)
rule_conn |> mutation_result(@query, variables, "markDeleteDrink")
{:ok, community} = ORM.find(Community, community.id)
assert community.meta.drinks_count == 0
end
test "unauth user markDelete drink fails", ~m(user_conn guest_conn drink)a do
variables = %{id: drink.id}
rule_conn = simu_conn(:user, cms: %{"what.ever" => true})
assert user_conn |> mutation_get_error?(@query, variables, ecode(:passport))
assert guest_conn |> mutation_get_error?(@query, variables, ecode(:account_login))
assert rule_conn |> mutation_get_error?(@query, variables, ecode(:passport))
end
@query """
mutation($id: ID!){
undoMarkDeleteDrink(id: $id) {
id
markDelete
}
}
"""
test "auth user can undo markDelete drink", ~m(drink)a do
variables = %{id: drink.id}
{:ok, _} = CMS.mark_delete_article(:drink, drink.id)
passport_rules = %{"drink.undo_mark_delete" => true}
rule_conn = simu_conn(:user, cms: passport_rules)
updated = rule_conn |> mutation_result(@query, variables, "undoMarkDeleteDrink")
assert updated["id"] == to_string(drink.id)
assert updated["markDelete"] == false
end
test "undo mark delete drink should update drink's communities meta count", ~m(user)a do
community_attrs = mock_attrs(:community) |> Map.merge(%{user_id: user.id})
{:ok, community} = CMS.create_community(community_attrs)
{:ok, drink} = CMS.create_article(community, :drink, mock_attrs(:drink), user)
{:ok, _} = CMS.mark_delete_article(:drink, drink.id)
{:ok, community} = ORM.find(Community, community.id)
assert community.meta.drinks_count == 0
variables = %{id: drink.id}
passport_rules = %{"drink.undo_mark_delete" => true}
rule_conn = simu_conn(:user, cms: passport_rules)
rule_conn |> mutation_result(@query, variables, "undoMarkDeleteDrink")
{:ok, community} = ORM.find(Community, community.id)
assert community.meta.drinks_count == 1
end
test "unauth user undo markDelete drink fails", ~m(user_conn guest_conn drink)a do
variables = %{id: drink.id}
rule_conn = simu_conn(:user, cms: %{"what.ever" => true})
assert user_conn |> mutation_get_error?(@query, variables, ecode(:passport))
assert guest_conn |> mutation_get_error?(@query, variables, ecode(:account_login))
assert rule_conn |> mutation_get_error?(@query, variables, ecode(:passport))
end
@query """
mutation($id: ID!, $communityId: ID!){
pinDrink(id: $id, communityId: $communityId) {
id
}
}
"""
test "auth user can pin drink", ~m(community drink)a do
variables = %{id: drink.id, communityId: community.id}
passport_rules = %{community.raw => %{"drink.pin" => true}}
rule_conn = simu_conn(:user, cms: passport_rules)
updated = rule_conn |> mutation_result(@query, variables, "pinDrink")
assert updated["id"] == to_string(drink.id)
end
test "unauth user pin drink fails", ~m(user_conn guest_conn community drink)a do
variables = %{id: drink.id, communityId: community.id}
rule_conn = simu_conn(:user, cms: %{"what.ever" => true})
assert user_conn |> mutation_get_error?(@query, variables, ecode(:passport))
assert guest_conn |> mutation_get_error?(@query, variables, ecode(:account_login))
assert rule_conn |> mutation_get_error?(@query, variables, ecode(:passport))
end
@query """
mutation($id: ID!, $communityId: ID!){
undoPinDrink(id: $id, communityId: $communityId) {
id
isPinned
}
}
"""
test "auth user can undo pin drink", ~m(community drink)a do
variables = %{id: drink.id, communityId: community.id}
passport_rules = %{community.raw => %{"drink.undo_pin" => true}}
rule_conn = simu_conn(:user, cms: passport_rules)
CMS.pin_article(:drink, drink.id, community.id)
updated = rule_conn |> mutation_result(@query, variables, "undoPinDrink")
assert updated["id"] == to_string(drink.id)
end
test "unauth user undo pin drink fails", ~m(user_conn guest_conn community drink)a do
variables = %{id: drink.id, communityId: community.id}
rule_conn = simu_conn(:user, cms: %{"what.ever" => true})
assert user_conn |> mutation_get_error?(@query, variables, ecode(:passport))
assert guest_conn |> mutation_get_error?(@query, variables, ecode(:account_login))
assert rule_conn |> mutation_get_error?(@query, variables, ecode(:passport))
end
end
end
| 34.98324 | 92 | 0.654104 |
f7b60c8fd15e71d6fb998ea06e7a1e9565ed95d5 | 562 | exs | Elixir | apps/neoscan_web/test/neoscan_web/controllers/blocks_controller_test.exs | vincentgeneste/neo-scan | 4a654575331eeb3eb12d4fd61696a7bd6dbca3ce | [
"MIT"
] | 75 | 2017-07-23T02:45:32.000Z | 2021-12-13T11:04:17.000Z | apps/neoscan_web/test/neoscan_web/controllers/blocks_controller_test.exs | vincentgeneste/neo-scan | 4a654575331eeb3eb12d4fd61696a7bd6dbca3ce | [
"MIT"
] | 252 | 2017-07-13T19:36:00.000Z | 2021-07-28T18:40:00.000Z | apps/neoscan_web/test/neoscan_web/controllers/blocks_controller_test.exs | vincentgeneste/neo-scan | 4a654575331eeb3eb12d4fd61696a7bd6dbca3ce | [
"MIT"
] | 87 | 2017-07-23T02:45:34.000Z | 2022-03-02T14:54:27.000Z | defmodule NeoscanWeb.BlocksControllerTest do
use NeoscanWeb.ConnCase
import NeoscanWeb.Factory
test "/blocks/:page", %{conn: conn} do
block = insert(:block)
insert(:block)
insert(:block)
conn = get(conn, "/blocks/1")
body = html_response(conn, 200)
assert body =~ Base.encode16(block.hash, case: :lower)
conn = get(conn, "/blocks/2")
body = html_response(conn, 200)
assert not (body =~ Base.encode16(block.hash, case: :lower))
conn = get(conn, "/blocks/====")
assert "/" == redirected_to(conn, 302)
end
end
| 26.761905 | 64 | 0.645907 |
f7b634e400e2a4630d5ba75fff1d14b2766f78a4 | 3,704 | ex | Elixir | elixir_backend/clubhouse_data/lib/clubhouse_data/users.ex | yusufaine/nus-clubhouse | eae8223d26111bab5e981c95b539c964293908e8 | [
"MIT"
] | 1 | 2021-05-16T08:17:37.000Z | 2021-05-16T08:17:37.000Z | elixir_backend/clubhouse_data/lib/clubhouse_data/users.ex | yusufaine/nus-clubhouse | eae8223d26111bab5e981c95b539c964293908e8 | [
"MIT"
] | 116 | 2021-05-29T16:32:51.000Z | 2021-08-13T16:05:29.000Z | elixir_backend/clubhouse_data/lib/clubhouse_data/users.ex | yusufaine/nus-clubhouse | eae8223d26111bab5e981c95b539c964293908e8 | [
"MIT"
] | 2 | 2021-05-23T07:12:40.000Z | 2021-10-11T02:59:40.000Z | defmodule ClubhouseData.Users do
@moduledoc """
The Users context.
"""
import Ecto.Query, warn: false
alias ClubhouseData.{Users.User, Repo}
@type changeset_error :: {:error, Ecto.Changeset.t()}
@doc """
Returns the list of users.
"""
@spec list_users() :: [User.t()]
def list_users() do
users = Repo.all(User)
preloaded_users = Enum.map(users, fn user -> get_by(%{"email" => user.email}) end)
preloaded_users
end
@doc """
Gets a single user.
"""
@spec get_user!(integer) :: User.t() | no_return
def get_user!(id) do
Repo.preload(Repo.get!(User, id), [:created_rooms, :rooms, :followers, :following])
end
@doc """
Gets a user based on email.
This is used by Phauxth to get user information.
"""
@spec get_by(map) :: User.t() | nil
def get_by(%{"email" => email}) do
case Repo.get_by(User, email: email) do
user ->
user = Repo.preload(user, [:created_rooms, :rooms, :followers, :following])
user
nil ->
nil
end
end
@doc """
Creates a user.
"""
@spec create_user(map) :: {:ok, User.t()} | changeset_error
def create_user(attrs) do
%User{}
|> User.create_changeset(attrs)
|> Repo.insert()
end
@doc """
Updates a user.
"""
@spec update_user(User.t(), map) :: {:ok, User.t()} | changeset_error
def update_user(%User{} = user, attrs) do
user
|> User.changeset(attrs)
|> Repo.update()
end
@doc """
Deletes a user.
"""
@spec delete_user(User.t()) :: {:ok, User.t()} | changeset_error
def delete_user(%User{} = user) do
Repo.delete(user)
end
@doc """
Returns an `%Ecto.Changeset{}` for tracking user changes.
"""
@spec change_user(User.t()) :: Ecto.Changeset.t()
def change_user(%User{} = user) do
User.changeset(user, %{})
end
@doc """
Confirms a user's email.
"""
@spec confirm_user(User.t()) :: {:ok, User.t()} | changeset_error
def confirm_user(%User{} = user) do
user
|> User.confirm_changeset(DateTime.truncate(DateTime.utc_now(), :second))
|> Repo.update()
end
@doc """
Makes a password reset request.
"""
@spec create_password_reset(map) :: {:ok, User.t()} | nil
def create_password_reset(attrs) do
with %User{} = user <- get_by(attrs) do
user
|> User.password_reset_changeset(DateTime.truncate(DateTime.utc_now(), :second))
|> Repo.update()
end
end
@doc """
Updates a user's password.
"""
@spec update_password(User.t(), map) :: {:ok, User.t()} | changeset_error
def update_password(%User{} = user, attrs) do
user
|> User.update_password_changeset(attrs)
|> Repo.update()
end
@doc """
Add user to following.
"""
@spec follow_user(User.t(), User.t()) :: {:ok, User.t()} | changeset_error
def follow_user(%User{} = user, %User{} = userToFollow) do
user = Repo.preload(user, [:following, :followers])
userToFollow = Repo.preload(userToFollow, [:following, :followers])
following = [userToFollow | user.following]
user
|> Ecto.Changeset.change()
|> Ecto.Changeset.put_assoc(:following, following)
|> Repo.insert_or_update()
end
@doc """
Set user online status to true when logged in.
"""
@spec set_user_online(User.t()) :: {:ok, User.t()} | changeset_error
def set_user_online(%User{} = user) do
update_user(user, %{isOnline: true})
end
end | 28.060606 | 91 | 0.565065 |
f7b6882dcc744577d70eb307d66f319e5d47d607 | 689 | exs | Elixir | language-practice/Elixir/elixir-concurrency-basics/portal/mix.exs | cjschneider2/practice_code | 5d9b793eccca39262fb452fa1f3f53e6b54bc7aa | [
"Unlicense"
] | null | null | null | language-practice/Elixir/elixir-concurrency-basics/portal/mix.exs | cjschneider2/practice_code | 5d9b793eccca39262fb452fa1f3f53e6b54bc7aa | [
"Unlicense"
] | null | null | null | language-practice/Elixir/elixir-concurrency-basics/portal/mix.exs | cjschneider2/practice_code | 5d9b793eccca39262fb452fa1f3f53e6b54bc7aa | [
"Unlicense"
] | null | null | null | defmodule Portal.Mixfile do
use Mix.Project
def project do
[app: :portal,
version: "0.0.1",
elixir: "~> 1.0",
build_embedded: Mix.env == :prod,
start_permanent: Mix.env == :prod,
deps: deps]
end
# Configuration for the OTP application
#
# Type `mix help compile.app` for more information
def application do
[applications: [:logger],
mod: {Portal, []}]
end
# Dependencies can be Hex packages:
#
# {:mydep, "~> 0.3.0"}
#
# Or git/path repositories:
#
# {:mydep, git: "https://github.com/elixir-lang/mydep.git", tag: "0.1.0"}
#
# Type `mix help deps` for more examples and options
defp deps do
[]
end
end
| 20.264706 | 77 | 0.599419 |
f7b68babbc01ee5f71a9f40ea9d702fce71fc4d3 | 260 | ex | Elixir | lib/heat_tags/messages/get.ex | DPechetti/nlw-heat-elixir | c3fe71244a4c1d49bb28668f9334c5b4c803aa54 | [
"MIT"
] | 2 | 2021-10-30T11:53:58.000Z | 2022-02-06T02:17:06.000Z | lib/heat_tags/messages/get.ex | DPechetti/nlw-heat-elixir | c3fe71244a4c1d49bb28668f9334c5b4c803aa54 | [
"MIT"
] | 1 | 2021-10-30T14:24:28.000Z | 2021-10-30T14:24:28.000Z | lib/heat_tags/messages/get.ex | DPechetti/nlw-heat-elixir | c3fe71244a4c1d49bb28668f9334c5b4c803aa54 | [
"MIT"
] | null | null | null | defmodule HeatTags.Messages.Get do
import Ecto.Query
alias HeatTags.{Message, Repo}
def todayMessages do
today = Date.utc_today()
query = from message in Message, where: type(message.inserted_at, :date) == ^today
Repo.all(query)
end
end
| 20 | 86 | 0.707692 |
f7b6a8633c9e46724068515cce636e0145d7106f | 10,245 | ex | Elixir | lib/iex/lib/iex/evaluator.ex | wstrinz/elixir | 1048b34d6c816f8e5dbd4fdbaaf9baa41b4f0d95 | [
"Apache-2.0"
] | 1 | 2021-04-28T21:35:01.000Z | 2021-04-28T21:35:01.000Z | lib/iex/lib/iex/evaluator.ex | wstrinz/elixir | 1048b34d6c816f8e5dbd4fdbaaf9baa41b4f0d95 | [
"Apache-2.0"
] | 1 | 2018-09-10T23:36:45.000Z | 2018-09-10T23:36:45.000Z | lib/iex/lib/iex/evaluator.ex | wstrinz/elixir | 1048b34d6c816f8e5dbd4fdbaaf9baa41b4f0d95 | [
"Apache-2.0"
] | 1 | 2018-09-10T23:32:56.000Z | 2018-09-10T23:32:56.000Z | defmodule IEx.Evaluator do
@moduledoc false
@doc """
Eval loop for an IEx session. Its responsibilities include:
* loading of .iex files
* evaluating code
* trapping exceptions in the code being evaluated
* keeping expression history
"""
def init(command, server, leader, opts) do
old_leader = Process.group_leader()
Process.group_leader(self(), leader)
evaluator = Process.get(:iex_evaluator)
Process.put(:iex_evaluator, command)
state = loop_state(server, IEx.History.init(), opts)
command == :ack && :proc_lib.init_ack(self())
try do
loop(state)
after
Process.group_leader(self(), old_leader)
cond do
is_nil(evaluator) ->
Process.delete(:iex_evaluator)
evaluator != :ack ->
# Ensure propagation to non-root level evaluators
send(self(), {:done, server})
true ->
:ok
end
:ok
end
end
@doc """
Gets a value out of the binding, using the provided
variable name and map key path.
"""
@spec value_from_binding(pid, pid, atom, [atom]) :: {:ok, any} | :error
def value_from_binding(evaluator, server, var_name, map_key_path) do
ref = make_ref()
send(evaluator, {:value_from_binding, server, ref, self(), var_name, map_key_path})
receive do
{^ref, result} -> result
after
5000 -> :error
end
end
@doc """
Gets a list of variables out of the binding that match the passed
variable prefix.
"""
@spec variables_from_binding(pid, pid, String.t()) :: [String.t()]
def variables_from_binding(evaluator, server, variable_prefix) do
ref = make_ref()
send(evaluator, {:variables_from_binding, server, ref, self(), variable_prefix})
receive do
{^ref, result} -> result
after
5000 -> []
end
end
@doc """
Returns the named fields from the current session environment.
"""
@spec fields_from_env(pid, pid, [atom]) :: %{optional(atom) => term}
def fields_from_env(evaluator, server, fields) do
ref = make_ref()
send(evaluator, {:fields_from_env, server, ref, self(), fields})
receive do
{^ref, result} -> result
after
5000 -> %{}
end
end
defp loop(%{server: server} = state) do
receive do
{:eval, ^server, code, iex_state} ->
{result, state} = eval(code, iex_state, state)
send(server, {:evaled, self(), result})
loop(state)
{:fields_from_env, ^server, ref, receiver, fields} ->
send(receiver, {ref, Map.take(state.env, fields)})
loop(state)
{:value_from_binding, ^server, ref, receiver, var_name, map_key_path} ->
value = traverse_binding(state.binding, var_name, map_key_path)
send(receiver, {ref, value})
loop(state)
{:variables_from_binding, ^server, ref, receiver, var_prefix} ->
value = find_matched_variables(state.binding, var_prefix)
send(receiver, {ref, value})
loop(state)
{:done, ^server} ->
:ok
end
end
defp traverse_binding(binding, var_name, map_key_path) do
accumulator = Keyword.fetch(binding, var_name)
Enum.reduce(map_key_path, accumulator, fn
key, {:ok, map} when is_map(map) -> Map.fetch(map, key)
_key, _acc -> :error
end)
end
defp find_matched_variables(binding, var_prefix) do
for {var_name, _value} <- binding,
is_atom(var_name),
var_name = Atom.to_string(var_name),
String.starts_with?(var_name, var_prefix),
do: var_name
end
defp loop_state(server, history, opts) do
env = opts[:env] || :elixir.env_for_eval(file: "iex")
env = %{env | prematch_vars: :apply}
{_, _, env, scope} = :elixir.eval('import IEx.Helpers', [], env)
stacktrace = opts[:stacktrace]
binding = Keyword.get(opts, :binding, [])
state = %{
binding: binding,
scope: scope,
env: env,
server: server,
history: history,
stacktrace: stacktrace
}
case opts[:dot_iex_path] do
"" -> state
path -> load_dot_iex(state, path)
end
end
defp load_dot_iex(state, path) do
candidates =
if path do
[path]
else
Enum.map([".iex.exs", "~/.iex.exs"], &Path.expand/1)
end
path = Enum.find(candidates, &File.regular?/1)
if is_nil(path) do
state
else
eval_dot_iex(state, path)
end
end
defp eval_dot_iex(state, path) do
try do
code = File.read!(path)
env = :elixir.env_for_eval(state.env, file: path, line: 1)
# Evaluate the contents in the same environment server_loop will run in
{_result, binding, env, _scope} = :elixir.eval(String.to_charlist(code), state.binding, env)
%{state | binding: binding, env: :elixir.env_for_eval(env, file: "iex", line: 1)}
catch
kind, error ->
io_result("Error while evaluating: #{path}")
print_error(kind, error, __STACKTRACE__)
System.halt(1)
end
end
# Instead of doing just :elixir.eval, we first parse the expression to see
# if it's well formed. If parsing succeeds, we evaluate the AST as usual.
#
# If parsing fails, this might be a TokenMissingError which we treat in
# a special way (to allow for continuation of an expression on the next
# line in IEx).
#
# Returns updated state.
#
# The first two clauses provide support for the break-trigger allowing to
# break out from a pending incomplete expression. See
# https://github.com/elixir-lang/elixir/issues/1089 for discussion.
@break_trigger '#iex:break\n'
defp eval(code, iex_state, state) do
try do
do_eval(String.to_charlist(code), iex_state, state)
catch
kind, error ->
print_error(kind, error, __STACKTRACE__)
{%{iex_state | cache: ''}, state}
end
end
defp do_eval(@break_trigger, %IEx.State{cache: ''} = iex_state, state) do
{iex_state, state}
end
defp do_eval(@break_trigger, iex_state, _state) do
:elixir_errors.parse_error(iex_state.counter, "iex", "incomplete expression", "")
end
defp do_eval(latest_input, iex_state, state) do
code = iex_state.cache ++ latest_input
line = iex_state.counter
put_history(state)
put_whereami(state)
quoted = Code.string_to_quoted(code, line: line, file: "iex")
handle_eval(quoted, code, line, iex_state, state)
after
Process.delete(:iex_history)
Process.delete(:iex_whereami)
end
defp put_history(%{history: history}) do
Process.put(:iex_history, history)
end
defp put_whereami(%{env: %{file: "iex"}}) do
:ok
end
defp put_whereami(%{env: %{file: file, line: line}, stacktrace: stacktrace}) do
Process.put(:iex_whereami, {file, line, stacktrace})
end
defp handle_eval({:ok, forms}, code, line, iex_state, state) do
{result, binding, env, scope} =
:elixir.eval_forms(forms, state.binding, state.env, state.scope)
unless result == IEx.dont_display_result() do
io_inspect(result)
end
iex_state = %{iex_state | cache: '', counter: iex_state.counter + 1}
state = %{state | env: env, scope: scope, binding: binding}
{iex_state, update_history(state, line, code, result)}
end
defp handle_eval({:error, {_, _, ""}}, code, _line, iex_state, state) do
# Update iex_state.cache so that IEx continues to add new input to
# the unfinished expression in "code"
{%{iex_state | cache: code}, state}
end
defp handle_eval({:error, {line, error, token}}, _code, _line, _iex_state, _state) do
# Encountered malformed expression
:elixir_errors.parse_error(line, "iex", error, token)
end
defp update_history(state, counter, _cache, result) do
history_size = IEx.Config.history_size()
update_in(state.history, &IEx.History.append(&1, {counter, result}, history_size))
end
defp io_inspect(result) do
io_result(inspect(result, IEx.inspect_opts()))
end
defp io_result(result) do
IO.puts(:stdio, IEx.color(:eval_result, result))
end
## Error handling
defp print_error(kind, reason, stacktrace) do
{blamed, stacktrace} = Exception.blame(kind, reason, stacktrace)
ansidata =
case blamed do
%FunctionClauseError{} ->
{_, inspect_opts} = pop_in(IEx.inspect_opts()[:syntax_colors][:reset])
banner = Exception.format_banner(kind, reason, stacktrace)
blame = FunctionClauseError.blame(blamed, &inspect(&1, inspect_opts), &blame_match/2)
[IEx.color(:eval_error, banner), pad(blame)]
_ ->
[IEx.color(:eval_error, Exception.format_banner(kind, blamed, stacktrace))]
end
stackdata = Exception.format_stacktrace(prune_stacktrace(stacktrace))
IO.write(:stdio, [ansidata, ?\n, IEx.color(:stack_info, stackdata)])
end
defp pad(string) do
" " <> String.replace(string, "\n", "\n ")
end
defp blame_match(%{match?: true, node: node}, _), do: Macro.to_string(node)
defp blame_match(%{match?: false, node: node}, _), do: blame_ansi(:blame_diff, "-", node)
defp blame_match(_, string), do: string
defp blame_ansi(color, no_ansi, node) do
case IEx.Config.color(color) do
nil ->
no_ansi <> Macro.to_string(node) <> no_ansi
ansi ->
[ansi | Macro.to_string(node)]
|> IO.ANSI.format(true)
|> IO.iodata_to_binary()
end
end
@elixir_internals [:elixir, :elixir_expand, :elixir_compiler, :elixir_module] ++
[:elixir_clauses, :elixir_lexical, :elixir_def, :elixir_map] ++
[:elixir_erl, :elixir_erl_clauses, :elixir_erl_pass]
defp prune_stacktrace(stacktrace) do
# The order in which each drop_while is listed is important.
# For example, the user may call Code.eval_string/2 in IEx
# and if there is an error we should not remove erl_eval
# and eval_bits information from the user stacktrace.
stacktrace
|> Enum.reverse()
|> Enum.drop_while(&(elem(&1, 0) == :proc_lib))
|> Enum.drop_while(&(elem(&1, 0) == __MODULE__))
|> Enum.drop_while(&(elem(&1, 0) == :elixir))
|> Enum.drop_while(&(elem(&1, 0) in [:erl_eval, :eval_bits]))
|> Enum.reverse()
|> Enum.reject(&(elem(&1, 0) in @elixir_internals))
end
end
| 29.781977 | 98 | 0.643631 |
f7b6c02a31eed6676bb7ae053be05a9f6dd73eea | 3,212 | exs | Elixir | config/runtime.exs | sergiotapia/ekeko | e7b8ea523a8ae7dd6983d099021c9790504bcc0c | [
"MIT"
] | 8 | 2022-02-21T00:34:21.000Z | 2022-03-12T08:13:26.000Z | config/runtime.exs | sergiotapia/ekeko | e7b8ea523a8ae7dd6983d099021c9790504bcc0c | [
"MIT"
] | null | null | null | config/runtime.exs | sergiotapia/ekeko | e7b8ea523a8ae7dd6983d099021c9790504bcc0c | [
"MIT"
] | null | null | null | import Config
# config/runtime.exs is executed for all environments, including
# during releases. It is executed after compilation and before the
# system starts, so it is typically used to load production configuration
# and secrets from environment variables or elsewhere. Do not define
# any compile-time configuration in here, as it won't be applied.
# The block below contains prod specific runtime configuration.
# Start the phoenix server if environment is set and running in a release
if System.get_env("PHX_SERVER") && System.get_env("RELEASE_NAME") do
config :ekeko, EkekoWeb.Endpoint, server: true
end
if config_env() == :prod do
database_url =
System.get_env("DATABASE_URL") ||
raise """
environment variable DATABASE_URL is missing.
For example: ecto://USER:PASS@HOST/DATABASE
"""
maybe_ipv6 = if System.get_env("ECTO_IPV6"), do: [:inet6], else: []
config :ekeko, Ekeko.Repo,
# ssl: true,
url: database_url,
pool_size: String.to_integer(System.get_env("POOL_SIZE") || "10"),
socket_options: maybe_ipv6
# The secret key base is used to sign/encrypt cookies and other secrets.
# A default value is used in config/dev.exs and config/test.exs but you
# want to use a different value for prod and you most likely don't want
# to check this value into version control, so we use an environment
# variable instead.
secret_key_base =
System.get_env("SECRET_KEY_BASE") ||
raise """
environment variable SECRET_KEY_BASE is missing.
You can generate one by calling: mix phx.gen.secret
"""
host = System.get_env("PHX_HOST") || "example.com"
port = String.to_integer(System.get_env("PORT") || "4000")
config :ekeko, EkekoWeb.Endpoint,
url: [host: host, port: 443],
http: [
# Enable IPv6 and bind on all interfaces.
# Set it to {0, 0, 0, 0, 0, 0, 0, 1} for local network only access.
# See the documentation on https://hexdocs.pm/plug_cowboy/Plug.Cowboy.html
# for details about using IPv6 vs IPv4 and loopback vs public addresses.
ip: {0, 0, 0, 0, 0, 0, 0, 0},
port: port
],
secret_key_base: secret_key_base
# ## Using releases
#
# If you are doing OTP releases, you need to instruct Phoenix
# to start each relevant endpoint:
#
# config :ekeko, EkekoWeb.Endpoint, server: true
#
# Then you can assemble a release by calling `mix release`.
# See `mix help release` for more information.
# ## Configuring the mailer
#
# In production you need to configure the mailer to use a different adapter.
# Also, you may need to configure the Swoosh API client of your choice if you
# are not using SMTP. Here is an example of the configuration:
#
# config :ekeko, Ekeko.Mailer,
# adapter: Swoosh.Adapters.Mailgun,
# api_key: System.get_env("MAILGUN_API_KEY"),
# domain: System.get_env("MAILGUN_DOMAIN")
#
# For this example you need include a HTTP client required by Swoosh API client.
# Swoosh supports Hackney and Finch out of the box:
#
# config :swoosh, :api_client, Swoosh.ApiClient.Hackney
#
# See https://hexdocs.pm/swoosh/Swoosh.html#module-installation for details.
end
| 37.348837 | 82 | 0.696762 |
f7b6ca88d8f7fe4987326a8936147b8ae4203fa3 | 134 | ex | Elixir | test/helpers/connections/not_found_connection.ex | qgadrian/instream | 3dc828fe476817d442b83dc5da58ceca56e9886f | [
"Apache-2.0"
] | null | null | null | test/helpers/connections/not_found_connection.ex | qgadrian/instream | 3dc828fe476817d442b83dc5da58ceca56e9886f | [
"Apache-2.0"
] | null | null | null | test/helpers/connections/not_found_connection.ex | qgadrian/instream | 3dc828fe476817d442b83dc5da58ceca56e9886f | [
"Apache-2.0"
] | null | null | null | defmodule Instream.TestHelpers.Connections.NotFoundConnection do
@moduledoc false
use Instream.Connection, otp_app: :instream
end
| 26.8 | 64 | 0.835821 |
f7b6f66247b919ce2d220ef214d5dc7853bbd736 | 3,274 | ex | Elixir | clients/sheets/lib/google_api/sheets/v4/model/histogram_chart_spec.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | null | null | null | clients/sheets/lib/google_api/sheets/v4/model/histogram_chart_spec.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-12-18T09:25:12.000Z | 2020-12-18T09:25:12.000Z | clients/sheets/lib/google_api/sheets/v4/model/histogram_chart_spec.ex | MasashiYokota/elixir-google-api | 975dccbff395c16afcb62e7a8e411fbb58e9ab01 | [
"Apache-2.0"
] | 1 | 2020-10-04T10:12:44.000Z | 2020-10-04T10:12:44.000Z | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Sheets.V4.Model.HistogramChartSpec do
@moduledoc """
A histogram chart. A histogram chart groups data items into bins, displaying each bin as a column of stacked items. Histograms are used to display the distribution of a dataset. Each column of items represents a range into which those items fall. The number of bins can be chosen automatically or specified explicitly.
## Attributes
* `bucketSize` (*type:* `float()`, *default:* `nil`) - By default the bucket size (the range of values stacked in a single column) is chosen automatically, but it may be overridden here. E.g., A bucket size of 1.5 results in buckets from 0 - 1.5, 1.5 - 3.0, etc. Cannot be negative. This field is optional.
* `legendPosition` (*type:* `String.t`, *default:* `nil`) - The position of the chart legend.
* `outlierPercentile` (*type:* `float()`, *default:* `nil`) - The outlier percentile is used to ensure that outliers do not adversely affect the calculation of bucket sizes. For example, setting an outlier percentile of 0.05 indicates that the top and bottom 5% of values when calculating buckets. The values are still included in the chart, they will be added to the first or last buckets instead of their own buckets. Must be between 0.0 and 0.5.
* `series` (*type:* `list(GoogleApi.Sheets.V4.Model.HistogramSeries.t)`, *default:* `nil`) - The series for a histogram may be either a single series of values to be bucketed or multiple series, each of the same length, containing the name of the series followed by the values to be bucketed for that series.
* `showItemDividers` (*type:* `boolean()`, *default:* `nil`) - Whether horizontal divider lines should be displayed between items in each column.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:bucketSize => float(),
:legendPosition => String.t(),
:outlierPercentile => float(),
:series => list(GoogleApi.Sheets.V4.Model.HistogramSeries.t()),
:showItemDividers => boolean()
}
field(:bucketSize)
field(:legendPosition)
field(:outlierPercentile)
field(:series, as: GoogleApi.Sheets.V4.Model.HistogramSeries, type: :list)
field(:showItemDividers)
end
defimpl Poison.Decoder, for: GoogleApi.Sheets.V4.Model.HistogramChartSpec do
def decode(value, options) do
GoogleApi.Sheets.V4.Model.HistogramChartSpec.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Sheets.V4.Model.HistogramChartSpec do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 55.491525 | 452 | 0.735492 |
f7b6fa55608e4ddb1fc0c3ab87bfa595b1ed17d1 | 1,531 | exs | Elixir | mix.exs | thestakeholdercompany/icalendar | 1e36af22a9ea07917f08a6350636e9cfcbf98f29 | [
"MIT"
] | 1 | 2020-06-09T10:42:58.000Z | 2020-06-09T10:42:58.000Z | mix.exs | thestakeholdercompany/icalendar | 1e36af22a9ea07917f08a6350636e9cfcbf98f29 | [
"MIT"
] | null | null | null | mix.exs | thestakeholdercompany/icalendar | 1e36af22a9ea07917f08a6350636e9cfcbf98f29 | [
"MIT"
] | null | null | null | defmodule ICalex.MixProject do
use Mix.Project
@version "0.1.0"
def project do
[
app: :icalex,
version: @version,
elixir: "~> 1.8",
description: description(),
package: package(),
aliases: aliases(),
start_permanent: Mix.env() == :prod,
deps: deps(),
docs: docs()
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:logger, :timex]
]
end
defp description do
"""
A complete set of tools to build and parse iCalendar specification
"""
end
defp package do
[
files: ["lib", "mix.exs", "README.md", "LICENSE.md"],
maintainers: ["Vincent Dupont"],
licenses: ["MIT"],
links: %{
Changelog: "https://github.com/thestakeholdercompany/icalex/blob/master/CHANGELOG.md",
GitHub: "https://github.com/thestakeholdercompany/icalex"
}
]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
{:elixir_uuid, "~> 1.2", only: [:test]},
{:timex, "~> 3.6"},
{:mix_test_watch, "~> 0.8", only: [:test]},
{:ex_doc, "~> 0.21", only: :dev, runtime: false}
]
end
defp docs do
[
main: "ICalex",
formatter_opts: [gfm: true],
source_ref: @version,
source_url: "https://github.com/thestakeholdercompany/icalex",
extras: [
"CHANGELOG.md"
]
]
end
defp aliases do
[
"test.watch": ["test.watch --stale"]
]
end
end
| 21.263889 | 94 | 0.563031 |
f7b737e64c42b4d9354a70fe9820feb9efcda6d3 | 284 | exs | Elixir | backend_initial/priv/repo/migrations/20200126100615_create_todos.exs | bego-talks/e2e-workshop | 75a1d2a691814539ddb8adfe9012483efc53cfe4 | [
"MIT"
] | 7 | 2021-05-08T13:37:05.000Z | 2022-02-16T17:54:00.000Z | backend_initial/priv/repo/migrations/20200126100615_create_todos.exs | bego-talks/e2e-workshop | 75a1d2a691814539ddb8adfe9012483efc53cfe4 | [
"MIT"
] | null | null | null | backend_initial/priv/repo/migrations/20200126100615_create_todos.exs | bego-talks/e2e-workshop | 75a1d2a691814539ddb8adfe9012483efc53cfe4 | [
"MIT"
] | null | null | null | defmodule TodoList.Repo.Migrations.CreateTodos do
use Ecto.Migration
def change do
create table(:todos) do
add(:title, :string)
add(:user_id, references(:users, on_delete: :delete_all))
timestamps()
end
create(index(:todos, [:user_id]))
end
end
| 18.933333 | 63 | 0.665493 |
f7b76d80201359bbc7a85405a79f239940efe871 | 7,168 | ex | Elixir | implementations/elixir/ockam/ockam/lib/ockam/stream/client/publisher.ex | plaxi0s/ockam | 2cc911dff720228bca950d83faf095ea84717981 | [
"Apache-2.0"
] | 1,912 | 2019-01-10T14:17:00.000Z | 2022-03-30T19:16:44.000Z | implementations/elixir/ockam/ockam/lib/ockam/stream/client/publisher.ex | plaxi0s/ockam | 2cc911dff720228bca950d83faf095ea84717981 | [
"Apache-2.0"
] | 1,473 | 2019-01-16T15:14:47.000Z | 2022-03-31T23:44:50.000Z | implementations/elixir/ockam/ockam/lib/ockam/stream/client/publisher.ex | plaxi0s/ockam | 2cc911dff720228bca950d83faf095ea84717981 | [
"Apache-2.0"
] | 219 | 2019-01-11T03:35:13.000Z | 2022-03-31T10:25:56.000Z | defmodule Ockam.Stream.Client.Publisher do
@moduledoc false
use Ockam.Worker
use Ockam.Protocol.Mapping
alias Ockam.Message
alias Ockam.Protocol.Stream, as: StreamProtocol
require Logger
defstruct address: nil,
stream_name: nil,
stream_route: nil,
last_message: 0,
unconfirmed: %{},
unsent: [],
request_timeout: nil,
service_route: nil,
partitions: nil
@request_timeout 10_000
@type message() :: %{request_id: integer(), data: binary()}
@type request_id() :: integer()
@type state() :: %__MODULE__{}
@protocol_mapping Ockam.Protocol.Mapping.mapping([
{:client, StreamProtocol.Create},
{:client, StreamProtocol.Partitioned.Create},
{:client, StreamProtocol.Push},
{:client, Ockam.Protocol.Error},
{:server, Ockam.Protocol.Binary}
])
@impl true
def protocol_mapping() do
@protocol_mapping
end
@impl true
def setup(options, state) do
service_route = Keyword.fetch!(options, :service_route)
stream_name = Keyword.fetch!(options, :stream_name)
partitions = Keyword.fetch!(options, :partitions)
state =
Map.merge(state, %{
stream_name: stream_name,
service_route: service_route,
partitions: partitions
})
state = create_stream(state)
{:ok, struct(__MODULE__, state)}
end
@impl true
def handle_message(%{payload: _} = message, state) do
payload = Message.payload(message)
case decode_payload(payload) do
{:ok, StreamProtocol.Create, %{stream_name: stream_name}} ->
state =
state
|> clear_request_timeout()
|> add_stream(stream_name, Message.return_route(message))
|> send_unsent()
{:ok, state}
## TODO: support multiple partitions
{:ok, StreamProtocol.Partitioned.Create, %{stream_name: stream_name, partition: 0}} ->
state =
state
|> clear_request_timeout()
|> add_stream(stream_name, Message.return_route(message))
|> send_unsent()
{:ok, state}
{:ok, StreamProtocol.Push, %{status: :ok, request_id: request_id, index: index}} ->
Logger.debug("Push response")
state = clear_request_timeout(state)
state = message_confirmed(request_id, index, state)
{:ok, state}
{:ok, StreamProtocol.Push, %{status: :error, request_id: request_id}} ->
## Resend doesn't change the state currently
Logger.error("Resend message #{inspect(request_id)}")
state = clear_request_timeout(state)
state = resend_message(request_id, state)
{:ok, state}
{:ok, Ockam.Protocol.Error, %{reason: reason}} ->
Logger.error("Stream error: #{inspect(reason)}")
{:ok, state}
{:ok, Ockam.Protocol.Binary, data} ->
state = send_message(data, state)
{:ok, state}
other ->
Logger.error("Unexpected message #{inspect(other)}")
{:ok, state}
end
end
def handle_message(:request_timeout, state) do
state = clear_request_timeout(state)
unconfirmed_messages =
Map.get(state, :unconfirmed, %{})
|> Enum.sort_by(fn {id, _msg} -> id end)
|> Enum.map(fn {_id, %{data: msg}} -> msg end)
Logger.info("Messages to re-send: #{inspect(unconfirmed_messages)}")
new_unsent = Map.get(state, :unsent, []) ++ unconfirmed_messages
state =
Map.merge(state, %{
stream_route: nil,
unconfirmed: %{},
unsent: new_unsent
})
state = create_stream(state)
{:ok, state}
end
@spec send_message(binary(), state()) :: state()
def send_message(data, state) do
case initialized?(state) do
true ->
next = state.last_message + 1
message = %{request_id: next, data: data}
Logger.debug("Send push")
state = route_push(message, state)
add_unconfirmed(next, message, state)
false ->
add_unsent(data, state)
end
end
def initialized?(state) do
case Map.get(state, :stream_route) do
nil -> false
_address -> true
end
end
def add_stream(state, stream_name, stream_route) do
Map.merge(state, %{stream_name: stream_name, stream_route: stream_route})
end
def send_unsent(state) do
unsent = Enum.reverse(Map.get(state, :unsent, []))
without_unsent = Map.put(state, :unsent, [])
Enum.reduce(unsent, without_unsent, fn data, send_state ->
send_message(data, send_state)
end)
end
def add_unsent(data, state) do
Map.update(state, :unsent, [data], fn unsent -> [data | unsent] end)
end
def add_unconfirmed(request_id, message, state) do
Map.update(state, :unconfirmed, %{request_id => message}, fn unconfirmed ->
Map.put(unconfirmed, request_id, message)
end)
end
def remove_unconfirmed(request_id, state) do
Map.update(state, :unconfirmed, %{}, fn unconfirmed -> Map.delete(unconfirmed, request_id) end)
end
def get_unconfirmed(request_id, state) do
state |> Map.get(:unconfirmed, %{}) |> Map.fetch(request_id)
end
@spec resend_message(request_id(), state()) :: state()
def resend_message(request_id, state) do
case get_unconfirmed(request_id, state) do
{:ok, message} ->
route_push(message, state)
:error ->
state
end
end
def message_confirmed(request_id, index, state) do
Logger.debug("Message confirmed with index #{inspect(index)}")
remove_unconfirmed(request_id, state)
end
@spec route_push(message(), state()) :: state()
def route_push(message, state) do
encoded = encode_payload(StreamProtocol.Push, message)
route(encoded, Map.get(state, :stream_route), state)
end
@spec create_stream(state()) :: state()
def create_stream(state) do
%{service_route: service_route, stream_name: stream_name, partitions: partitions} = state
Logger.info("create stream #{inspect({service_route, stream_name})}")
encoded =
encode_payload(StreamProtocol.Partitioned.Create, %{
stream_name: stream_name,
partitions: partitions
})
route(encoded, service_route, state)
end
@spec route(binary(), [Ockam.Address.t()], state()) :: state()
def route(payload, route, state) do
Ockam.Router.route(%{
onward_route: route,
return_route: [Map.get(state, :address)],
payload: payload
})
set_request_timeout(state)
end
def set_request_timeout(state) do
state = clear_request_timeout(state)
mon_ref = Process.send_after(self(), :request_timeout, @request_timeout)
Map.put(state, :request_timeout, mon_ref)
end
def clear_request_timeout(state) do
case Map.get(state, :request_timeout) do
nil ->
state
ref ->
Process.cancel_timer(ref)
## Flush the timeout message if it's already received
receive do
:request_timeout -> :ok
after
0 -> :ok
end
Map.put(state, :request_timeout, nil)
end
end
end
| 27.782946 | 99 | 0.626116 |
f7b7750e4ec219d0b8f7d3739d80dc18e7f0914b | 15,946 | ex | Elixir | lib/aws/generated/sfn.ex | qyon-brazil/aws-elixir | f7f21bebffc6776f95ffe9ef563cf368773438af | [
"Apache-2.0"
] | null | null | null | lib/aws/generated/sfn.ex | qyon-brazil/aws-elixir | f7f21bebffc6776f95ffe9ef563cf368773438af | [
"Apache-2.0"
] | null | null | null | lib/aws/generated/sfn.ex | qyon-brazil/aws-elixir | f7f21bebffc6776f95ffe9ef563cf368773438af | [
"Apache-2.0"
] | 1 | 2020-10-28T08:56:54.000Z | 2020-10-28T08:56:54.000Z | # WARNING: DO NOT EDIT, AUTO-GENERATED CODE!
# See https://github.com/aws-beam/aws-codegen for more details.
defmodule AWS.SFN do
@moduledoc """
AWS Step Functions
AWS Step Functions is a service that lets you coordinate the components of
distributed applications and microservices using visual workflows.
You can use Step Functions to build applications from individual components,
each of which performs a discrete function, or *task*, allowing you to scale and
change applications quickly. Step Functions provides a console that helps
visualize the components of your application as a series of steps. Step
Functions automatically triggers and tracks each step, and retries steps when
there are errors, so your application executes predictably and in the right
order every time. Step Functions logs the state of each step, so you can quickly
diagnose and debug any issues.
Step Functions manages operations and underlying infrastructure to ensure your
application is available at any scale. You can run tasks on AWS, your own
servers, or any system that has access to AWS. You can access and use Step
Functions using the console, the AWS SDKs, or an HTTP API. For more information
about Step Functions, see the * [AWS Step Functions Developer Guide](https://docs.aws.amazon.com/step-functions/latest/dg/welcome.html) *.
"""
alias AWS.Client
alias AWS.Request
def metadata do
%AWS.ServiceMetadata{
abbreviation: "AWS SFN",
api_version: "2016-11-23",
content_type: "application/x-amz-json-1.0",
credential_scope: nil,
endpoint_prefix: "states",
global?: false,
protocol: "json",
service_id: "SFN",
signature_version: "v4",
signing_name: "states",
target_prefix: "AWSStepFunctions"
}
end
@doc """
Creates an activity.
An activity is a task that you write in any programming language and host on any
machine that has access to AWS Step Functions. Activities must poll Step
Functions using the `GetActivityTask` API action and respond using `SendTask*`
API actions. This function lets Step Functions know the existence of your
activity and returns an identifier for use in a state machine and when polling
from the activity.
This operation is eventually consistent. The results are best effort and may not
reflect very recent updates and changes.
`CreateActivity` is an idempotent API. Subsequent requests won’t create a
duplicate resource if it was already created. `CreateActivity`'s idempotency
check is based on the activity `name`. If a following request has different
`tags` values, Step Functions will ignore these differences and treat it as an
idempotent request of the previous. In this case, `tags` will not be updated,
even if they are different.
"""
def create_activity(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateActivity", input, options)
end
@doc """
Creates a state machine.
A state machine consists of a collection of states that can do work (`Task`
states), determine to which states to transition next (`Choice` states), stop an
execution with an error (`Fail` states), and so on. State machines are specified
using a JSON-based, structured language. For more information, see [Amazon States
Language](https://docs.aws.amazon.com/step-functions/latest/dg/concepts-amazon-states-language.html)
in the AWS Step Functions User Guide.
This operation is eventually consistent. The results are best effort and may not
reflect very recent updates and changes.
`CreateStateMachine` is an idempotent API. Subsequent requests won’t create a
duplicate resource if it was already created. `CreateStateMachine`'s idempotency
check is based on the state machine `name`, `definition`, `type`,
`LoggingConfiguration` and `TracingConfiguration`. If a following request has a
different `roleArn` or `tags`, Step Functions will ignore these differences and
treat it as an idempotent request of the previous. In this case, `roleArn` and
`tags` will not be updated, even if they are different.
"""
def create_state_machine(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateStateMachine", input, options)
end
@doc """
Deletes an activity.
"""
def delete_activity(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteActivity", input, options)
end
@doc """
Deletes a state machine.
This is an asynchronous operation: It sets the state machine's status to
`DELETING` and begins the deletion process.
For `EXPRESS`state machines, the deletion will happen eventually (usually less
than a minute). Running executions may emit logs after `DeleteStateMachine` API
is called.
"""
def delete_state_machine(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteStateMachine", input, options)
end
@doc """
Describes an activity.
This operation is eventually consistent. The results are best effort and may not
reflect very recent updates and changes.
"""
def describe_activity(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeActivity", input, options)
end
@doc """
Describes an execution.
This operation is eventually consistent. The results are best effort and may not
reflect very recent updates and changes.
This API action is not supported by `EXPRESS` state machines.
"""
def describe_execution(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeExecution", input, options)
end
@doc """
Describes a state machine.
This operation is eventually consistent. The results are best effort and may not
reflect very recent updates and changes.
"""
def describe_state_machine(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeStateMachine", input, options)
end
@doc """
Describes the state machine associated with a specific execution.
This operation is eventually consistent. The results are best effort and may not
reflect very recent updates and changes.
This API action is not supported by `EXPRESS` state machines.
"""
def describe_state_machine_for_execution(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DescribeStateMachineForExecution", input, options)
end
@doc """
Used by workers to retrieve a task (with the specified activity ARN) which has
been scheduled for execution by a running state machine.
This initiates a long poll, where the service holds the HTTP connection open and
responds as soon as a task becomes available (i.e. an execution of a task of
this type is needed.) The maximum time the service holds on to the request
before responding is 60 seconds. If no task is available within 60 seconds, the
poll returns a `taskToken` with a null string.
Workers should set their client side socket timeout to at least 65 seconds (5
seconds higher than the maximum time the service may hold the poll request).
Polling with `GetActivityTask` can cause latency in some implementations. See
[Avoid Latency When Polling for Activity Tasks](https://docs.aws.amazon.com/step-functions/latest/dg/bp-activity-pollers.html)
in the Step Functions Developer Guide.
"""
def get_activity_task(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetActivityTask", input, options)
end
@doc """
Returns the history of the specified execution as a list of events.
By default, the results are returned in ascending order of the `timeStamp` of
the events. Use the `reverseOrder` parameter to get the latest events first.
If `nextToken` is returned, there are more results available. The value of
`nextToken` is a unique pagination token for each page. Make the call again
using the returned token to retrieve the next page. Keep all other arguments
unchanged. Each pagination token expires after 24 hours. Using an expired
pagination token will return an *HTTP 400 InvalidToken* error.
This API action is not supported by `EXPRESS` state machines.
"""
def get_execution_history(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetExecutionHistory", input, options)
end
@doc """
Lists the existing activities.
If `nextToken` is returned, there are more results available. The value of
`nextToken` is a unique pagination token for each page. Make the call again
using the returned token to retrieve the next page. Keep all other arguments
unchanged. Each pagination token expires after 24 hours. Using an expired
pagination token will return an *HTTP 400 InvalidToken* error.
This operation is eventually consistent. The results are best effort and may not
reflect very recent updates and changes.
"""
def list_activities(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListActivities", input, options)
end
@doc """
Lists the executions of a state machine that meet the filtering criteria.
Results are sorted by time, with the most recent execution first.
If `nextToken` is returned, there are more results available. The value of
`nextToken` is a unique pagination token for each page. Make the call again
using the returned token to retrieve the next page. Keep all other arguments
unchanged. Each pagination token expires after 24 hours. Using an expired
pagination token will return an *HTTP 400 InvalidToken* error.
This operation is eventually consistent. The results are best effort and may not
reflect very recent updates and changes.
This API action is not supported by `EXPRESS` state machines.
"""
def list_executions(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListExecutions", input, options)
end
@doc """
Lists the existing state machines.
If `nextToken` is returned, there are more results available. The value of
`nextToken` is a unique pagination token for each page. Make the call again
using the returned token to retrieve the next page. Keep all other arguments
unchanged. Each pagination token expires after 24 hours. Using an expired
pagination token will return an *HTTP 400 InvalidToken* error.
This operation is eventually consistent. The results are best effort and may not
reflect very recent updates and changes.
"""
def list_state_machines(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListStateMachines", input, options)
end
@doc """
List tags for a given resource.
Tags may only contain Unicode letters, digits, white space, or these symbols: `_
. : / = + - @`.
"""
def list_tags_for_resource(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListTagsForResource", input, options)
end
@doc """
Used by activity workers and task states using the
[callback](https://docs.aws.amazon.com/step-functions/latest/dg/connect-to-resource.html#connect-wait-token)
pattern to report that the task identified by the `taskToken` failed.
"""
def send_task_failure(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "SendTaskFailure", input, options)
end
@doc """
Used by activity workers and task states using the
[callback](https://docs.aws.amazon.com/step-functions/latest/dg/connect-to-resource.html#connect-wait-token) pattern to report to Step Functions that the task represented by the specified
`taskToken` is still making progress.
This action resets the `Heartbeat` clock. The `Heartbeat` threshold is specified
in the state machine's Amazon States Language definition (`HeartbeatSeconds`).
This action does not in itself create an event in the execution history.
However, if the task times out, the execution history contains an
`ActivityTimedOut` entry for activities, or a `TaskTimedOut` entry for for tasks
using the [job
run](https://docs.aws.amazon.com/step-functions/latest/dg/connect-to-resource.html#connect-sync)
or
[callback](https://docs.aws.amazon.com/step-functions/latest/dg/connect-to-resource.html#connect-wait-token)
pattern.
The `Timeout` of a task, defined in the state machine's Amazon States Language
definition, is its maximum allowed duration, regardless of the number of
`SendTaskHeartbeat` requests received. Use `HeartbeatSeconds` to configure the
timeout interval for heartbeats.
"""
def send_task_heartbeat(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "SendTaskHeartbeat", input, options)
end
@doc """
Used by activity workers and task states using the
[callback](https://docs.aws.amazon.com/step-functions/latest/dg/connect-to-resource.html#connect-wait-token)
pattern to report that the task identified by the `taskToken` completed
successfully.
"""
def send_task_success(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "SendTaskSuccess", input, options)
end
@doc """
Starts a state machine execution.
`StartExecution` is idempotent. If `StartExecution` is called with the same name
and input as a running execution, the call will succeed and return the same
response as the original request. If the execution is closed or if the input is
different, it will return a 400 `ExecutionAlreadyExists` error. Names can be
reused after 90 days.
"""
def start_execution(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "StartExecution", input, options)
end
@doc """
Starts a Synchronous Express state machine execution.
"""
def start_sync_execution(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "StartSyncExecution", input, options)
end
@doc """
Stops an execution.
This API action is not supported by `EXPRESS` state machines.
"""
def stop_execution(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "StopExecution", input, options)
end
@doc """
Add a tag to a Step Functions resource.
An array of key-value pairs. For more information, see [Using Cost Allocation Tags](https://docs.aws.amazon.com/awsaccountbilling/latest/aboutv2/cost-alloc-tags.html)
in the *AWS Billing and Cost Management User Guide*, and [Controlling Access Using IAM
Tags](https://docs.aws.amazon.com/IAM/latest/UserGuide/access_iam-tags.html).
Tags may only contain Unicode letters, digits, white space, or these symbols: `_
. : / = + - @`.
"""
def tag_resource(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "TagResource", input, options)
end
@doc """
Remove a tag from a Step Functions resource
"""
def untag_resource(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UntagResource", input, options)
end
@doc """
Updates an existing state machine by modifying its `definition`, `roleArn`, or
`loggingConfiguration`.
Running executions will continue to use the previous `definition` and `roleArn`.
You must include at least one of `definition` or `roleArn` or you will receive a
`MissingRequiredParameter` error.
All `StartExecution` calls within a few seconds will use the updated
`definition` and `roleArn`. Executions started immediately after calling
`UpdateStateMachine` may use the previous state machine `definition` and
`roleArn`.
"""
def update_state_machine(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UpdateStateMachine", input, options)
end
end | 42.981132 | 189 | 0.740875 |
f7b78603f4da18ec6d94c1327db621d2db734d4b | 447 | exs | Elixir | test/phoenix/live_dashboard/live/os_mon_live_test.exs | Blond11516/phoenix_live_dashboard | 9b70670d4ad1dc49ebe2eef45f2f082ff6b03a1b | [
"MIT"
] | null | null | null | test/phoenix/live_dashboard/live/os_mon_live_test.exs | Blond11516/phoenix_live_dashboard | 9b70670d4ad1dc49ebe2eef45f2f082ff6b03a1b | [
"MIT"
] | null | null | null | test/phoenix/live_dashboard/live/os_mon_live_test.exs | Blond11516/phoenix_live_dashboard | 9b70670d4ad1dc49ebe2eef45f2f082ff6b03a1b | [
"MIT"
] | 1 | 2021-02-04T03:06:20.000Z | 2021-02-04T03:06:20.000Z | defmodule Phoenix.LiveDashboard.OSMonLiveTest do
use ExUnit.Case, async: true
import Phoenix.ConnTest
import Phoenix.LiveViewTest
@endpoint Phoenix.LiveDashboardTest.Endpoint
describe "OS mon page" do
test "displays section titles" do
{:ok, _live, rendered} = live(build_conn(), "/dashboard/nonode@nohost/os")
assert rendered =~ "CPU"
assert rendered =~ "Memory"
assert rendered =~ "Disk"
end
end
end
| 26.294118 | 80 | 0.702461 |
f7b794e9b9e8a89bcf30d88ce95f558729e11a4d | 1,385 | ex | Elixir | lib/asciinema/file_store/cached.ex | AnotherKamila/asciinema-server | cafdba3c0461eb77ca0d1b66de07aa943b91f700 | [
"Apache-2.0"
] | null | null | null | lib/asciinema/file_store/cached.ex | AnotherKamila/asciinema-server | cafdba3c0461eb77ca0d1b66de07aa943b91f700 | [
"Apache-2.0"
] | null | null | null | lib/asciinema/file_store/cached.ex | AnotherKamila/asciinema-server | cafdba3c0461eb77ca0d1b66de07aa943b91f700 | [
"Apache-2.0"
] | null | null | null | defmodule Asciinema.FileStore.Cached do
use Asciinema.FileStore
def url(path) do
remote_store().url(path)
end
def put_file(dst_path, src_local_path, content_type, compress \\ false) do
with :ok <- remote_store().put_file(dst_path, src_local_path, content_type, compress),
:ok <- cache_store().put_file(dst_path, src_local_path, content_type, compress) do
:ok
end
end
def serve_file(conn, path, filename) do
remote_store().serve_file(conn, path, filename)
end
def open_file(path, function \\ nil) do
case cache_store().open_file(path, function) do
{:ok, f} ->
{:ok, f}
{:error, :enoent} ->
with {:ok, tmp_path} <- Briefly.create(),
:ok <- remote_store().download_file(path, tmp_path),
:ok <- cache_store().put_file(path, tmp_path, MIME.from_path(path)),
:ok <- File.rm(tmp_path) do
cache_store().open_file(path, function)
end
otherwise ->
otherwise
end
end
def delete_file(path) do
with :ok <- cache_store().delete_file(path),
:ok <- remote_store().delete_file(path) do
:ok
end
end
defp config do
Application.get_env(:asciinema, __MODULE__)
end
defp remote_store do
Keyword.get(config(), :remote_store)
end
defp cache_store do
Keyword.get(config(), :cache_store)
end
end
| 25.648148 | 91 | 0.636823 |
f7b79b952cda74691e6819f0d860f42f1a5769a4 | 1,650 | ex | Elixir | elixir-to-java/elixir_api_server/lib/elixir_api_server_web.ex | poad/grpc-example | d1b775f6d2e89279cd29191d5d4dbec265bf0bf0 | [
"Apache-2.0"
] | null | null | null | elixir-to-java/elixir_api_server/lib/elixir_api_server_web.ex | poad/grpc-example | d1b775f6d2e89279cd29191d5d4dbec265bf0bf0 | [
"Apache-2.0"
] | 64 | 2021-08-30T23:54:04.000Z | 2022-03-14T21:06:11.000Z | elixir-to-java/elixir_api_server/lib/elixir_api_server_web.ex | poad/grpc-example | d1b775f6d2e89279cd29191d5d4dbec265bf0bf0 | [
"Apache-2.0"
] | null | null | null | defmodule ElixirApiServerWeb do
@moduledoc """
The entrypoint for defining your web interface, such
as controllers, views, channels and so on.
This can be used in your application as:
use ElixirApiServerWeb, :controller
use ElixirApiServerWeb, :view
The definitions below will be executed for every view,
controller, etc, so keep them short and clean, focused
on imports, uses and aliases.
Do NOT define functions inside the quoted expressions
below. Instead, define any helper function in modules
and import those modules here.
"""
def controller do
quote do
use Phoenix.Controller, namespace: ElixirApiServerWeb
import Plug.Conn
import ElixirApiServerWeb.Gettext
alias ElixirApiServerWeb.Router.Helpers, as: Routes
end
end
def view do
quote do
use Phoenix.View,
root: "lib/elixir_api_server_web/templates",
namespace: ElixirApiServerWeb
# Import convenience functions from controllers
import Phoenix.Controller, only: [get_flash: 1, get_flash: 2, view_module: 1]
import ElixirApiServerWeb.ErrorHelpers
import ElixirApiServerWeb.Gettext
alias ElixirApiServerWeb.Router.Helpers, as: Routes
end
end
def router do
quote do
use Phoenix.Router
import Plug.Conn
import Phoenix.Controller
end
end
def channel do
quote do
use Phoenix.Channel
import ElixirApiServerWeb.Gettext
end
end
@doc """
When used, dispatch to the appropriate controller/view/etc.
"""
defmacro __using__(which) when is_atom(which) do
apply(__MODULE__, which, [])
end
end
| 24.626866 | 83 | 0.710303 |
f7b7b852f648bae2154ef0c29dc7efdfe8ecbe7d | 1,646 | exs | Elixir | strega/mix.exs | ThePeej/strega | b9146bb3b401e9c2d0c297ea62a0050415099b29 | [
"MIT"
] | null | null | null | strega/mix.exs | ThePeej/strega | b9146bb3b401e9c2d0c297ea62a0050415099b29 | [
"MIT"
] | null | null | null | strega/mix.exs | ThePeej/strega | b9146bb3b401e9c2d0c297ea62a0050415099b29 | [
"MIT"
] | null | null | null | defmodule Strega.MixProject do
use Mix.Project
def project do
[
app: :strega,
version: "0.1.0",
elixir: "~> 1.5",
elixirc_paths: elixirc_paths(Mix.env()),
compilers: [:phoenix, :gettext] ++ Mix.compilers(),
start_permanent: Mix.env() == :prod,
aliases: aliases(),
deps: deps()
]
end
# Configuration for the OTP application.
#
# Type `mix help compile.app` for more information.
def application do
[
mod: {Strega.Application, []},
extra_applications: [:logger, :runtime_tools]
]
end
# Specifies which paths to compile per environment.
defp elixirc_paths(:test), do: ["lib", "test/support"]
defp elixirc_paths(_), do: ["lib"]
# Specifies your project dependencies.
#
# Type `mix help deps` for examples and options.
defp deps do
[
{:phoenix, "~> 1.4.0"},
{:phoenix_pubsub, "~> 1.1"},
{:phoenix_ecto, "~> 4.0"},
{:ecto_sql, "~> 3.0"},
{:postgrex, ">= 0.0.0"},
{:phoenix_html, "~> 2.11"},
{:phoenix_live_reload, "~> 1.2", only: :dev},
{:gettext, "~> 0.11"},
{:jason, "~> 1.0"},
{:plug_cowboy, "~> 2.0"}
]
end
# Aliases are shortcuts or tasks specific to the current project.
# For example, to create, migrate and run the seeds file at once:
#
# $ mix ecto.setup
#
# See the documentation for `Mix` for more info on aliases.
defp aliases do
[
"ecto.setup": ["ecto.create", "ecto.migrate", "run priv/repo/seeds.exs"],
"ecto.reset": ["ecto.drop", "ecto.setup"],
test: ["ecto.create --quiet", "ecto.migrate", "test"]
]
end
end
| 26.126984 | 79 | 0.575942 |
f7b7befe1bbde686b4e409dcea6fc2c9f2107e18 | 1,896 | exs | Elixir | config/dev.exs | r-frederick/inmytime | c5e6ad5d8c09d42dcd77ec74c09e6da8f044b929 | [
"MIT"
] | 1 | 2018-11-16T14:30:41.000Z | 2018-11-16T14:30:41.000Z | config/dev.exs | r-frederick/inmytime | c5e6ad5d8c09d42dcd77ec74c09e6da8f044b929 | [
"MIT"
] | 9 | 2018-05-17T14:02:04.000Z | 2018-10-03T21:57:18.000Z | config/dev.exs | r-frederick/inmytime | c5e6ad5d8c09d42dcd77ec74c09e6da8f044b929 | [
"MIT"
] | null | null | null | use Mix.Config
# For development, we disable any cache and enable
# debugging and code reloading.
#
# The watchers configuration can be used to run external
# watchers to your application. For example, we use it
# with brunch.io to recompile .js and .css sources.
config :inmytime, InmytimeWeb.Endpoint,
http: [port: 4000],
debug_errors: true,
code_reloader: true,
check_origin: false,
watchers: [node: ["node_modules/brunch/bin/brunch", "watch", "--stdin",
cd: Path.expand("../assets", __DIR__)]]
# ## SSL Support
#
# In order to use HTTPS in development, a self-signed
# certificate can be generated by running the following
# command from your terminal:
#
# openssl req -new -newkey rsa:4096 -days 365 -nodes -x509 -subj "/C=US/ST=Denial/L=Springfield/O=Dis/CN=www.example.com" -keyout priv/server.key -out priv/server.pem
#
# The `http:` config above can be replaced with:
#
# https: [port: 4000, keyfile: "priv/server.key", certfile: "priv/server.pem"],
#
# If desired, both `http:` and `https:` keys can be
# configured to run both http and https servers on
# different ports.
# Watch static and templates for browser reloading.
config :inmytime, InmytimeWeb.Endpoint,
live_reload: [
patterns: [
~r{priv/static/.*(js|css|png|jpeg|jpg|gif|svg)$},
~r{priv/gettext/.*(po)$},
~r{lib/inmytime_web/views/.*(ex)$},
~r{lib/inmytime_web/templates/.*(eex)$}
]
]
# Geolix Configuration
config :geolix,
databases: [
%{
id: :city,
adapter: Geolix.Adapter.MMDB2,
source: "../geoip/GeoLite2-City.mmdb"
}
]
# Do not include metadata nor timestamps in development logs
config :logger, :console, format: "[$level] $message\n"
# Set a higher stacktrace during development. Avoid configuring such
# in production as building large stacktraces may be expensive.
config :phoenix, :stacktrace_depth, 20
| 31.6 | 170 | 0.690401 |
f7b7d6c3114b0dc4d347459a8dba7cbf42588c7d | 12,563 | ex | Elixir | lib/livebook/runtime.ex | apoorv-2204/elixir_learn_work_notebook_setup | 7ea75c1b53e0e9ce7ae89d386cc16b7277a1d86e | [
"Apache-2.0"
] | null | null | null | lib/livebook/runtime.ex | apoorv-2204/elixir_learn_work_notebook_setup | 7ea75c1b53e0e9ce7ae89d386cc16b7277a1d86e | [
"Apache-2.0"
] | null | null | null | lib/livebook/runtime.ex | apoorv-2204/elixir_learn_work_notebook_setup | 7ea75c1b53e0e9ce7ae89d386cc16b7277a1d86e | [
"Apache-2.0"
] | null | null | null | defprotocol Livebook.Runtime do
@moduledoc false
# This protocol defines an interface for code evaluation backends.
#
# Usually a runtime involves a set of processes responsible for
# evaluation, which could be running on a different node, however
# the protocol does not require that.
@typedoc """
An arbitrary term identifying an evaluation container.
A container is an abstraction of an isolated group of evaluations.
Containers are mostly independent and therefore can be evaluated
concurrently (if possible).
Note that every evaluation can use the resulting binding and env
of any previous evaluation, even from a different container.
"""
@type container_ref :: term()
@typedoc """
An arbitrary term identifying an evaluation.
"""
@type evaluation_ref :: term()
@typedoc """
A pair identifying evaluation together with its container.
When the evaluation reference is `nil`, the `locator` points to
a container and may be used to represent its default evaluation
context.
"""
@type locator :: {container_ref(), evaluation_ref() | nil}
@typedoc """
An output emitted during evaluation or as the final result.
For more details on output types see `t:Kino.Output.t/0`.
"""
@type output ::
:ignored
# IO output, adjacent such outputs are treated as a whole
| {:stdout, binary()}
# Standalone text block
| {:text, binary()}
# Markdown content
| {:markdown, binary()}
# A raw image in the given format
| {:image, content :: binary(), mime_type :: binary()}
# JavaScript powered output
| {:js, info :: map()}
# Outputs placeholder
| {:frame, outputs :: list(output()), info :: map()}
# An input field
| {:input, attrs :: map()}
# A control element
| {:control, attrs :: map()}
# Internal output format for errors
| {:error, message :: binary(), type :: :other | :runtime_restart_required}
@typedoc """
Additional information about a complted evaluation.
"""
@type evaluation_response_metadata :: %{
evaluation_time_ms: non_neg_integer(),
code_error: code_error(),
memory_usage: runtime_memory()
}
@typedoc """
Recognised intellisense request.
"""
@type intellisense_request ::
completion_request()
| details_request()
| signature_request()
| format_request()
@typedoc """
Expected intellisense response.
Responding with `nil` indicates there is no relevant reply and
effectively aborts the request, so it's suitable for error cases.
"""
@type intellisense_response ::
nil
| completion_response()
| details_response()
| signature_response()
| format_response()
@typedoc """
Looks up a list of identifiers that are suitable code completions
for the given hint.
"""
@type completion_request :: {:completion, hint :: String.t()}
@type completion_response :: %{
items: list(completion_item())
}
@type completion_item :: %{
label: String.t(),
kind: completion_item_kind(),
detail: String.t() | nil,
documentation: String.t() | nil,
insert_text: String.t()
}
@type completion_item_kind ::
:function | :module | :struct | :interface | :type | :variable | :field | :keyword
@typedoc """
Looks up more details about an identifier found in `column` in
`line`.
"""
@type details_request :: {:details, line :: String.t(), column :: pos_integer()}
@type details_response :: %{
range: %{
from: non_neg_integer(),
to: non_neg_integer()
},
contents: list(String.t())
}
@typedoc """
Looks up a list of function signatures matching the given hint.
The resulting information includes current position in the argument
list.
"""
@type signature_request :: {:signature, hint :: String.t()}
@type signature_response :: %{
active_argument: non_neg_integer(),
signature_items: list(signature_item())
}
@type signature_item :: %{
signature: String.t(),
arguments: list(String.t()),
documentation: String.t() | nil
}
@typedoc """
Formats the given code snippet.
"""
@type format_request :: {:format, code :: String.t()}
@type format_response :: %{
code: String.t() | nil,
code_error: code_error() | nil
}
@typedoc """
A descriptive error pointing to a specific line in the code.
"""
@type code_error :: %{line: pos_integer(), description: String.t()}
@typedoc """
A detailed runtime memory usage.
The runtime may periodically send memory usage updates as
* `{:runtime_memory_usage, runtime_memory()}`
"""
@type runtime_memory :: %{
atom: size_in_bytes(),
binary: size_in_bytes(),
code: size_in_bytes(),
ets: size_in_bytes(),
other: size_in_bytes(),
processes: size_in_bytes(),
total: size_in_bytes()
}
@type size_in_bytes :: non_neg_integer()
@typedoc """
An information about a smart cell kind.
The `kind` attribute is an opaque identifier.
Whenever new smart cells become available the runtime should send
the updated list as
* `{:runtime_smart_cell_definitions, list(smart_cell_definition())}`
"""
@type smart_cell_definition :: %{
kind: String.t(),
name: String.t()
}
@typedoc """
A JavaScript view definition.
See `t:Kino.Output.js_view/0` for details.
"""
@type js_view :: %{
ref: String.t(),
pid: Process.dest(),
assets: %{
archive_path: String.t(),
hash: String.t(),
js_path: String.t()
}
}
@type smart_cell_ref :: String.t()
@type smart_cell_attrs :: map()
@doc """
Connects the caller to the given runtime.
The caller becomes the runtime owner, which makes it the target
for most of the runtime messages and ties the runtime life to the
Sets the caller as runtime owner.
It is advised for each runtime to have a leading process that is
coupled to the lifetime of the underlying runtime resources. In
such case the `connect` function may start monitoring this process
and return the monitor reference. This way the caller is notified
when the runtime goes down by listening to the :DOWN message with
that reference.
## Options
* `:runtime_broadcast_to` - the process to send runtime broadcast
events to. Defaults to the owner
"""
@spec connect(t(), keyword()) :: reference()
def connect(runtime, opts \\ [])
@doc """
Disconnects the current owner from the runtime.
This should cleanup the underlying node/processes.
"""
@spec disconnect(t()) :: :ok
def disconnect(runtime)
@doc """
Asynchronously parses and evaluates the given code.
The given `locator` identifies the container where the code should
be evaluated as well as the evaluation reference to store the
resulting context under.
Additionally, `base_locator` points to a previous evaluation to be
used as the starting point of this evaluation. If not applicable,
the previous evaluation reference may be specified as `nil`.
## Communication
During evaluation a number of messages may be sent to the runtime
owner. All captured outputs have the form:
* `{:runtime_evaluation_output, evaluation_ref, output}`
When the evaluation completes, the resulting output and metadata
is sent as:
* `{:runtime_evaluation_response, evaluation_ref, output, metadata}`
Outputs may include input fields. The evaluation may then request
the current value of a previously rendered input by sending
* `{:runtime_evaluation_input, evaluation_ref, reply_to, input_id}`
to the runtime owner who is supposed to reply with
`{:runtime_evaluation_input_reply, reply}` where `reply` is either
`{:ok, value}` or `:error` if no matching input can be found.
If the evaluation state within a container is lost (for example when
a process goes down), the runtime may send
* `{:runtime_container_down, container_ref, message}`
to notify the owner.
## Options
* `:file` - the file considered as the source during evaluation.
This information is relevant for errors formatting and imparts
the value of `__DIR__`
* `:smart_cell_ref` - a reference of the smart cell which code is
to be evaluated, if applicable
"""
@spec evaluate_code(t(), String.t(), locator(), locator(), keyword()) :: :ok
def evaluate_code(runtime, code, locator, base_locator, opts \\ [])
@doc """
Disposes of an evaluation identified by the given locator.
This can be used to cleanup resources related to an old evaluation
if it is no longer needed.
"""
@spec forget_evaluation(t(), locator()) :: :ok
def forget_evaluation(runtime, locator)
@doc """
Disposes of an evaluation container identified by the given ref.
This should be used to cleanup resources keeping track of the
container all of its evaluations.
"""
@spec drop_container(t(), container_ref()) :: :ok
def drop_container(runtime, container_ref)
@doc """
Asynchronously handles an intellisense request.
This part of runtime functionality is used to provide language-
and context-specific intellisense features in the text editor.
The response is sent to the `send_to` process as
* `{:runtime_intellisense_response, ref, request, response}`.
The given `base_locator` idenfities an evaluation that may be
used as the context when resolving the request (if relevant).
"""
@spec handle_intellisense(t(), pid(), reference(), intellisense_request(), locator()) :: :ok
def handle_intellisense(runtime, send_to, ref, request, base_locator)
@doc """
Synchronously starts a runtime of the same type with the same
parameters.
"""
@spec duplicate(Runtime.t()) :: {:ok, Runtime.t()} | {:error, String.t()}
def duplicate(runtime)
@doc """
Returns true if the given runtime is self-contained.
A standalone runtime always starts fresh and frees all resources
on termination. This may not be the case for for runtimes that
connect to an external running system and use it for code evaluation.
"""
@spec standalone?(Runtime.t()) :: boolean()
def standalone?(runtime)
@doc """
Reads file at the given absolute path within the runtime file system.
"""
@spec read_file(Runtime.t(), String.t()) :: {:ok, binary()} | {:error, String.t()}
def read_file(runtime, path)
@doc """
Starts a smart cell of the given kind.
`kind` must point to an available `t:smart_cell_definition/0`, which
was reported by the runtime. The cell gets initialized with `attrs`,
which represent the persisted cell state and determine the current
version of the generated source code. The given `ref` is used to
identify the cell.
The cell may depend on evaluation context to provide a better user
experience, for instance it may suggest relevant variable names.
Similarly to `evaluate_code/5`, `base_locator` must be specified
pointing to the evaluation to use as the context. When the locator
changes, it can be updated with `set_smart_cell_base_locator/3`.
Once the cell starts, the runtime sends the following message
* `{:runtime_smart_cell_started, ref, %{js_view: js_view(), source: String.t()}}`
## Communication
Apart from the regular JS view communication, the cell sends updates
to the runtime owner whenever attrs and the generated source code
change.
* `{:runtime_smart_cell_update, ref, attrs, source}`
The attrs are persisted and may be used to restore the smart cell
state later. Note that for persistence they get serialized and
deserialized as JSON.
"""
@spec start_smart_cell(t(), String.t(), smart_cell_ref(), smart_cell_attrs(), locator()) :: :ok
def start_smart_cell(runtime, kind, ref, attrs, base_locator)
@doc """
Updates the locator used by a smart cell as its context.
See `start_smart_cell/5` for more details.
"""
@spec set_smart_cell_base_locator(t(), smart_cell_ref(), locator()) :: :ok
def set_smart_cell_base_locator(runtime, ref, base_locator)
@doc """
Stops smart cell identified by the given reference.
"""
@spec stop_smart_cell(t(), smart_cell_ref()) :: :ok
def stop_smart_cell(runtime, ref)
end
| 31.329177 | 97 | 0.672292 |
f7b7dcd12e939a8bff31e326822e69780326b3aa | 1,938 | ex | Elixir | day14/lib/day14.ex | bjorng/advent-of-code-2021 | 82c22dfa0ba7e9134e39b9dbc95227bb99f62c8d | [
"Apache-2.0"
] | 10 | 2021-12-01T08:49:00.000Z | 2022-03-24T13:24:50.000Z | day14/lib/day14.ex | bjorng/advent-of-code-2021 | 82c22dfa0ba7e9134e39b9dbc95227bb99f62c8d | [
"Apache-2.0"
] | null | null | null | day14/lib/day14.ex | bjorng/advent-of-code-2021 | 82c22dfa0ba7e9134e39b9dbc95227bb99f62c8d | [
"Apache-2.0"
] | 1 | 2021-12-16T07:09:11.000Z | 2021-12-16T07:09:11.000Z | defmodule Day14 do
def part1(input) do
{template, rules} = parse(input)
rules = Map.new(rules)
Stream.iterate(template, &(next_part1(&1, rules)))
|> Stream.drop(10)
|> Enum.take(1)
|> hd
|> Enum.frequencies
|> Enum.min_max_by(fn {_, freq} -> freq end)
|> result
end
def part2(input) do
{template, rules} = parse(input)
pairs = Enum.zip(template, tl(template))
|> Enum.frequencies
letters = Enum.frequencies(template)
state = {pairs, letters}
Stream.iterate(state, &(next_part2(&1, rules)))
|> Stream.drop(40)
|> Enum.take(1)
|> hd
|> elem(1)
|> Enum.min_max_by(fn {_, freq} -> freq end)
|> result
end
defp result({{_, min}, {_, max}}), do: max - min
defp next_part1([e1, e2 | es], rules) do
key = {e1, e2}
case rules do
%{^key => sub} ->
[e1, sub | next_part1([e2 | es], rules)]
%{} ->
[e1 | next_part1([e2 | es], rules)]
end
end
defp next_part1([_] = es, _), do: es
defp next_part2({oldpairs, letters}, rules) do
Enum.reduce(rules, {oldpairs, letters}, fn {{a, b}, c}, {pairs, letters} ->
count = Map.get(oldpairs, {a, b}, 0)
pairs = pairs
|> Map.update({a, b}, -count, &(&1 - count))
|> Map.update({a, c}, count, &(&1 + count))
|> Map.update({c, b}, count, &(&1 + count))
letters = Map.update(letters, c, count, &(&1 + count))
{pairs, letters}
end)
end
defp parse(input) do
[template | input] = input
template = String.codepoints(template)
|> Enum.map(&String.to_atom/1)
{template,
Enum.map(input, fn line ->
String.split(line, " -> ")
end)
|> Enum.map(fn transformation ->
[[a, b], [c]] =
Enum.map(transformation, fn string ->
string
|> String.codepoints
|> Enum.map(&String.to_atom/1)
end)
{{a, b}, c}
end)}
end
end
| 24.225 | 79 | 0.53612 |
f7b7faedde2e0168d446aeeb0d97f80d25702472 | 1,077 | ex | Elixir | lib/phoenixcicd_web/channels/user_socket.ex | JackMaarek/phoenix-cicd | 746c66a7b248e089b1c138a81f5e4169e73aac67 | [
"MIT"
] | 1 | 2021-05-20T12:03:40.000Z | 2021-05-20T12:03:40.000Z | lib/phoenixcicd_web/channels/user_socket.ex | JackMaarek/phoenix-cicd | 746c66a7b248e089b1c138a81f5e4169e73aac67 | [
"MIT"
] | null | null | null | lib/phoenixcicd_web/channels/user_socket.ex | JackMaarek/phoenix-cicd | 746c66a7b248e089b1c138a81f5e4169e73aac67 | [
"MIT"
] | 1 | 2021-05-20T12:04:10.000Z | 2021-05-20T12:04:10.000Z | defmodule PhoenixcicdWeb.UserSocket do
use Phoenix.Socket
## Channels
# channel "room:*", PhoenixcicdWeb.RoomChannel
# Socket params are passed from the client and can
# be used to verify and authenticate a user. After
# verification, you can put default assigns into
# the socket that will be set for all channels, ie
#
# {:ok, assign(socket, :user_id, verified_user_id)}
#
# To deny connection, return `:error`.
#
# See `Phoenix.Token` documentation for examples in
# performing token verification on connect.
def connect(_params, socket, _connect_info) do
{:ok, socket}
end
# Socket id's are topics that allow you to identify all sockets for a given user:
#
# def id(socket), do: "user_socket:#{socket.assigns.user_id}"
#
# Would allow you to broadcast a "disconnect" event and terminate
# all active sockets and channels for a given user:
#
# PhoenixcicdWeb.Endpoint.broadcast("user_socket:#{user.id}", "disconnect", %{})
#
# Returning `nil` makes this socket anonymous.
def id(_socket), do: nil
end
| 31.676471 | 86 | 0.700093 |
f7b7fc092d06fdc6690a053ee9151cafe384c009 | 613 | ex | Elixir | lib/phone/br.ex | net/phone | 18e1356d2f8d32fe3f95638c3c44bceab0164fb2 | [
"Apache-2.0"
] | null | null | null | lib/phone/br.ex | net/phone | 18e1356d2f8d32fe3f95638c3c44bceab0164fb2 | [
"Apache-2.0"
] | null | null | null | lib/phone/br.ex | net/phone | 18e1356d2f8d32fe3f95638c3c44bceab0164fb2 | [
"Apache-2.0"
] | null | null | null | defmodule Phone.BR do
@moduledoc false
use Helper.Country
def country, do: "Brazil"
def a2, do: "BR"
def a3, do: "BRA"
matcher :modules, [Phone.BR.AC, Phone.BR.AL, Phone.BR.AM, Phone.BR.AP, Phone.BR.BA,
Phone.BR.CE, Phone.BR.DF, Phone.BR.ES, Phone.BR.GO, Phone.BR.MA,
Phone.BR.MT, Phone.BR.MS, Phone.BR.MG, Phone.BR.PA, Phone.BR.PB,
Phone.BR.PE, Phone.BR.PI, Phone.BR.PR, Phone.BR.RJ, Phone.BR.RN,
Phone.BR.RO, Phone.BR.RR, Phone.BR.RS, Phone.BR.SC, Phone.BR.SE,
Phone.BR.SP, Phone.BR.TO]
end
| 36.058824 | 85 | 0.562806 |
f7b807a1c7fde25121b5800d3bc65a89447ac177 | 309 | exs | Elixir | todoapi/priv/repo/migrations/20180105203759_create_todos_users.exs | BruceBC/BakeOffElixirPhoenix | b674b0d2ddc9ba04ff00f21e0eb1586543002467 | [
"MIT"
] | null | null | null | todoapi/priv/repo/migrations/20180105203759_create_todos_users.exs | BruceBC/BakeOffElixirPhoenix | b674b0d2ddc9ba04ff00f21e0eb1586543002467 | [
"MIT"
] | null | null | null | todoapi/priv/repo/migrations/20180105203759_create_todos_users.exs | BruceBC/BakeOffElixirPhoenix | b674b0d2ddc9ba04ff00f21e0eb1586543002467 | [
"MIT"
] | null | null | null | defmodule Todoapi.Repo.Migrations.CreateTodosUsers do
use Ecto.Migration
def change do
create table(:todos_users) do
add :todo_id, references(:todos, type: :uuid)
add :user_id, references(:users, type: :uuid)
end
create unique_index(:todos_users, [:todo_id, :user_id])
end
end
| 23.769231 | 59 | 0.702265 |
f7b814e32c51e90d875f1a7b635e5c17d6d38d05 | 7,521 | exs | Elixir | test/sandbox_test.exs | ckruse/elixir_sandbox | 6c7f76754c84f184f852f408f6169b69440eddaa | [
"Apache-2.0"
] | 22 | 2019-08-08T02:48:15.000Z | 2022-01-23T14:04:07.000Z | test/sandbox_test.exs | ckruse/elixir_sandbox | 6c7f76754c84f184f852f408f6169b69440eddaa | [
"Apache-2.0"
] | 1 | 2020-05-27T00:37:00.000Z | 2020-08-27T02:20:32.000Z | test/sandbox_test.exs | ckruse/elixir_sandbox | 6c7f76754c84f184f852f408f6169b69440eddaa | [
"Apache-2.0"
] | 2 | 2020-11-15T18:20:12.000Z | 2022-03-09T13:58:01.000Z | defmodule SandboxTest do
use ExUnit.Case
doctest Sandbox
def mobility(state, _args) do
state
|> Sandbox.set!("x", 3)
|> Sandbox.set!("feeling", "poo")
|> Sandbox.set!("hunger", 7)
|> Sandbox.let_elixir_run!("move", &SandboxTest.move/2)
|> Sandbox.let_elixir_eval!("feels", fn _state, [p | _] -> to_string(p) <> " feels" end)
end
def move(state, [d | _rest]) do
x = state |> Sandbox.get!("x")
result = x + d
new_state = state |> Sandbox.set!("x", result)
{result, new_state}
end
test "can set value" do
output =
Sandbox.init()
|> Sandbox.set!("some_variable", "some_value")
|> Sandbox.eval!("return some_variable")
assert output == "some_value"
end
test "can set value at path" do
output =
Sandbox.init()
|> Sandbox.set!("some_table", [])
|> Sandbox.set!(["some_table", "some_variable"], "some_value")
|> Sandbox.eval!("return some_table.some_variable")
assert output == "some_value"
end
test "can set value at path with dot notation" do
output =
Sandbox.init()
|> Sandbox.set!("some_table", [])
|> Sandbox.set!("some_table.some_variable", "some_value")
|> Sandbox.eval!("return some_table.some_variable")
assert output == "some_value"
end
test "can set value at path with dot notation and fail with missing table" do
assert catch_error(
Sandbox.init()
|> Sandbox.set!("some_table.some_variable", "some_value")
|> Sandbox.eval!("return some_table.some_variable")
)
end
test "can set value at path with dot notation and force missing table creation" do
output =
Sandbox.init()
|> Sandbox.set!("some_table.some_variable", "some_value", true)
|> Sandbox.eval!("return some_table.some_variable")
assert output == "some_value"
end
test "can set value at path and not need forced table creation" do
output =
Sandbox.init()
|> Sandbox.set!("some_table", [], true)
|> Sandbox.set!(["some_table", "some_variable"], "some_value", true)
|> Sandbox.eval!("return some_table.some_variable")
assert output == "some_value"
end
test "can get value at path with get!" do
output =
Sandbox.init()
|> Sandbox.set!("some_table", [])
|> Sandbox.set!("some_table.some_variable", "some_value")
|> Sandbox.get!(["some_table", "some_variable"])
assert output == "some_value"
end
test "can call function at path" do
output =
Sandbox.init()
|> Sandbox.play_file!("test/lua/animal.lua")
|> Sandbox.eval_function!(["speak"], ["bunny"])
assert output == "silence"
end
test "can call function at path as string" do
output =
Sandbox.init()
|> Sandbox.play_file!("test/lua/animal.lua")
|> Sandbox.eval_function!("speak", ["cow"], 0)
assert output == "moo"
end
test "can call function returning an object" do
output =
Sandbox.init()
|> Sandbox.play_file!("test/lua/animal.lua")
|> Sandbox.eval_function!("voices", [], 0)
assert output == [
{"bunny", "silence"},
{"cat", "meow"},
{"cow", "moo"},
{"dog", "woof"}
]
end
test "can call function at path with single arg wrapped as array" do
output =
Sandbox.init()
|> Sandbox.play_file!("test/lua/animal.lua")
|> Sandbox.eval_function!("speak", "dog", 100_000)
assert output == "woof"
end
test "can handle chunks" do
state = Sandbox.init()
code =
state
|> Sandbox.chunk!("return 7")
output = Sandbox.eval!(state, code)
assert output == 7
end
test "can chunk against file defined functions" do
state = Sandbox.init()
code =
state
|> Sandbox.chunk!("return 7")
output = Sandbox.eval!(state, code)
assert output == 7
end
test "can expose Elixir function" do
state = Sandbox.init()
output =
state
|> Sandbox.let_elixir_eval!("puppy", fn _state, p -> to_string(p) <> " is cute" end)
|> Sandbox.eval_function!("puppy", "dog", 10000)
assert output == "dog is cute"
end
test "can expose Elixir function that reaches reduction limit" do
state = Sandbox.init()
long_function = fn ->
state
|> Sandbox.let_elixir_eval!("puppy", fn _state, p ->
Enum.map(1..10000, fn _ -> to_string(p) <> " is cute" end)
|> List.last()
end)
|> Sandbox.eval_function!("puppy", "dog", 2000)
end
assert_raise(RuntimeError, Sandbox.reduction_error(), long_function)
end
test "can play a Lua function that updates the Lua state" do
state = Sandbox.init()
output =
state
|> Sandbox.play_file!("test/lua/animal.lua")
|> Sandbox.play_function!(["talk"], 4, 10000)
|> Sandbox.get!("counter")
assert output == 4
end
test "can play a Lua function without arguments that updates the Lua state" do
state = Sandbox.init()
output =
state
|> Sandbox.play_file!("test/lua/animal.lua")
|> Sandbox.play_function!("sleep")
|> Sandbox.get!("sleeping")
assert output == true
end
test "can run Lua to update the Lua state with no return value" do
state = Sandbox.init()
{:ok, {_result, new_state}} =
state
|> Sandbox.play_file!("test/lua/animal.lua")
|> Sandbox.run("sleeping = true")
output = new_state |> Sandbox.get!("sleeping")
assert output == true
end
test "can run a Lua function that updates the Lua state" do
state = Sandbox.init()
{output, _new_state} =
state
|> Sandbox.play_file!("test/lua/animal.lua")
|> Sandbox.run_function!("talk", 4, 10000)
assert output == 4
end
test "can chunk a Lua function and then use it" do
state = Sandbox.init()
code = "function growl(n)\nreturn n + 2\nend"
chunk = Sandbox.chunk!(state, code)
output =
state
|> Sandbox.play!(chunk)
|> Sandbox.eval_function!("growl", 7)
assert output == 9
end
test "can play functionality to state through Elixir" do
state = Sandbox.init()
output =
state
|> Sandbox.let_elixir_play!("inherit_mobility", &SandboxTest.mobility/2)
|> Sandbox.eval_file!("test/lua/mobility.lua")
assert output == "happy feels"
end
test "can play functionality to state through Elixir with ok-error tuple" do
state = Sandbox.init()
{:ok, output} =
state
|> Sandbox.let_elixir_play!("inherit_mobility", &SandboxTest.mobility/2)
|> Sandbox.eval_file("test/lua/mobility.lua")
assert output == "happy feels"
end
test "can play functionality to state through Elixir with ok-error tuple and hit reduction limit" do
state = Sandbox.init()
output =
state
|> Sandbox.let_elixir_play!("inherit_mobility", &SandboxTest.mobility/2)
# |> Sandbox.eval_function!("waste_cycles", [1000])
|> Sandbox.eval_file("test/lua/mobility.lua", 1000)
assert {:error, {:reductions, _}} = output
end
test "can get value" do
output =
Sandbox.init()
|> Sandbox.set!("some_variable", "some_value")
|> Sandbox.get!("some_variable")
assert output == "some_value"
end
test "can get value from unsafe init" do
output =
Sandbox.unsafe_init()
|> Sandbox.set!("some_variable", "some_value")
|> Sandbox.get!("some_variable")
assert output == "some_value"
end
end
| 26.024221 | 102 | 0.611089 |
f7b83adad44f8207d61fce92effbcb96862d4e8c | 11,862 | ex | Elixir | lib/aws/generated/sms.ex | onno-vos-dev/aws-elixir | 00f02c2bce689b932948b6a4d603fd44bb5fc0e9 | [
"Apache-2.0"
] | null | null | null | lib/aws/generated/sms.ex | onno-vos-dev/aws-elixir | 00f02c2bce689b932948b6a4d603fd44bb5fc0e9 | [
"Apache-2.0"
] | null | null | null | lib/aws/generated/sms.ex | onno-vos-dev/aws-elixir | 00f02c2bce689b932948b6a4d603fd44bb5fc0e9 | [
"Apache-2.0"
] | null | null | null | # WARNING: DO NOT EDIT, AUTO-GENERATED CODE!
# See https://github.com/aws-beam/aws-codegen for more details.
defmodule AWS.SMS do
@moduledoc """
## Product update
As of March 31, 2022, Amazon Web Services will discontinue Server Migration
Service (Amazon Web Services SMS).
Going forward, we recommend [Amazon Web Services Application Migration Service](http://aws.amazon.com/application-migration-service) (Amazon Web
Services MGN) as the primary migration service for lift-and-shift migrations.
You can initiate new migration jobs in Server Migration Service until January 1,
2022. Complete these active migration projects by March 31, 2022. For more
information, see [When to Choose AWS Application Migration Service](http://aws.amazon.com/application-migration-service/when-to-choose-aws-mgn/).
Server Migration Service (Server Migration Service) makes it easier and faster
for you to migrate your on-premises workloads to Amazon Web Services. To learn
more about Server Migration Service, see the following resources:
* [Server Migration Service product page](http://aws.amazon.com/server-migration-service/)
* [Server Migration Service User Guide](https://docs.aws.amazon.com/server-migration-service/latest/userguide/)
"""
alias AWS.Client
alias AWS.Request
def metadata do
%AWS.ServiceMetadata{
abbreviation: "SMS",
api_version: "2016-10-24",
content_type: "application/x-amz-json-1.1",
credential_scope: nil,
endpoint_prefix: "sms",
global?: false,
protocol: "json",
service_id: "SMS",
signature_version: "v4",
signing_name: "sms",
target_prefix: "AWSServerMigrationService_V2016_10_24"
}
end
@doc """
Creates an application.
An application consists of one or more server groups. Each server group contain
one or more servers.
"""
def create_app(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateApp", input, options)
end
@doc """
Creates a replication job.
The replication job schedules periodic replication runs to replicate your server
to Amazon Web Services. Each replication run creates an Amazon Machine Image
(AMI).
"""
def create_replication_job(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "CreateReplicationJob", input, options)
end
@doc """
Deletes the specified application.
Optionally deletes the launched stack associated with the application and all
Server Migration Service replication jobs for servers in the application.
"""
def delete_app(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteApp", input, options)
end
@doc """
Deletes the launch configuration for the specified application.
"""
def delete_app_launch_configuration(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteAppLaunchConfiguration", input, options)
end
@doc """
Deletes the replication configuration for the specified application.
"""
def delete_app_replication_configuration(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteAppReplicationConfiguration", input, options)
end
@doc """
Deletes the validation configuration for the specified application.
"""
def delete_app_validation_configuration(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteAppValidationConfiguration", input, options)
end
@doc """
Deletes the specified replication job.
After you delete a replication job, there are no further replication runs.
Amazon Web Services deletes the contents of the Amazon S3 bucket used to store
Server Migration Service artifacts. The AMIs created by the replication runs are
not deleted.
"""
def delete_replication_job(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteReplicationJob", input, options)
end
@doc """
Deletes all servers from your server catalog.
"""
def delete_server_catalog(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DeleteServerCatalog", input, options)
end
@doc """
Disassociates the specified connector from Server Migration Service.
After you disassociate a connector, it is no longer available to support
replication jobs.
"""
def disassociate_connector(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "DisassociateConnector", input, options)
end
@doc """
Generates a target change set for a currently launched stack and writes it to an
Amazon S3 object in the customer’s Amazon S3 bucket.
"""
def generate_change_set(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GenerateChangeSet", input, options)
end
@doc """
Generates an CloudFormation template based on the current launch configuration
and writes it to an Amazon S3 object in the customer’s Amazon S3 bucket.
"""
def generate_template(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GenerateTemplate", input, options)
end
@doc """
Retrieve information about the specified application.
"""
def get_app(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetApp", input, options)
end
@doc """
Retrieves the application launch configuration associated with the specified
application.
"""
def get_app_launch_configuration(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetAppLaunchConfiguration", input, options)
end
@doc """
Retrieves the application replication configuration associated with the
specified application.
"""
def get_app_replication_configuration(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetAppReplicationConfiguration", input, options)
end
@doc """
Retrieves information about a configuration for validating an application.
"""
def get_app_validation_configuration(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetAppValidationConfiguration", input, options)
end
@doc """
Retrieves output from validating an application.
"""
def get_app_validation_output(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetAppValidationOutput", input, options)
end
@doc """
Describes the connectors registered with the Server Migration Service.
"""
def get_connectors(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetConnectors", input, options)
end
@doc """
Describes the specified replication job or all of your replication jobs.
"""
def get_replication_jobs(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetReplicationJobs", input, options)
end
@doc """
Describes the replication runs for the specified replication job.
"""
def get_replication_runs(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetReplicationRuns", input, options)
end
@doc """
Describes the servers in your server catalog.
Before you can describe your servers, you must import them using
`ImportServerCatalog`.
"""
def get_servers(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "GetServers", input, options)
end
@doc """
Allows application import from Migration Hub.
"""
def import_app_catalog(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ImportAppCatalog", input, options)
end
@doc """
Gathers a complete list of on-premises servers.
Connectors must be installed and monitoring all servers to import.
This call returns immediately, but might take additional time to retrieve all
the servers.
"""
def import_server_catalog(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ImportServerCatalog", input, options)
end
@doc """
Launches the specified application as a stack in CloudFormation.
"""
def launch_app(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "LaunchApp", input, options)
end
@doc """
Retrieves summaries for all applications.
"""
def list_apps(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "ListApps", input, options)
end
@doc """
Provides information to Server Migration Service about whether application
validation is successful.
"""
def notify_app_validation_output(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "NotifyAppValidationOutput", input, options)
end
@doc """
Creates or updates the launch configuration for the specified application.
"""
def put_app_launch_configuration(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "PutAppLaunchConfiguration", input, options)
end
@doc """
Creates or updates the replication configuration for the specified application.
"""
def put_app_replication_configuration(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "PutAppReplicationConfiguration", input, options)
end
@doc """
Creates or updates a validation configuration for the specified application.
"""
def put_app_validation_configuration(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "PutAppValidationConfiguration", input, options)
end
@doc """
Starts replicating the specified application by creating replication jobs for
each server in the application.
"""
def start_app_replication(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "StartAppReplication", input, options)
end
@doc """
Starts an on-demand replication run for the specified application.
"""
def start_on_demand_app_replication(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "StartOnDemandAppReplication", input, options)
end
@doc """
Starts an on-demand replication run for the specified replication job.
This replication run starts immediately. This replication run is in addition to
the ones already scheduled.
There is a limit on the number of on-demand replications runs that you can
request in a 24-hour period.
"""
def start_on_demand_replication_run(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "StartOnDemandReplicationRun", input, options)
end
@doc """
Stops replicating the specified application by deleting the replication job for
each server in the application.
"""
def stop_app_replication(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "StopAppReplication", input, options)
end
@doc """
Terminates the stack for the specified application.
"""
def terminate_app(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "TerminateApp", input, options)
end
@doc """
Updates the specified application.
"""
def update_app(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UpdateApp", input, options)
end
@doc """
Updates the specified settings for the specified replication job.
"""
def update_replication_job(%Client{} = client, input, options \\ []) do
Request.request_post(client, metadata(), "UpdateReplicationJob", input, options)
end
end
| 35.945455 | 147 | 0.721463 |
f7b841de540300a20407130c1da4bc47af99f7f5 | 20,503 | exs | Elixir | test/courtbot_web/workflows/idaho_test.exs | blakedietz/courtbot | b18d7eb84fd1405b359c8da980e1175bb1738841 | [
"ISC"
] | 6 | 2018-07-25T18:35:57.000Z | 2018-11-03T17:01:02.000Z | test/courtbot_web/workflows/idaho_test.exs | blakedietz/courtbot | b18d7eb84fd1405b359c8da980e1175bb1738841 | [
"ISC"
] | 19 | 2018-11-12T05:29:44.000Z | 2020-04-12T01:04:41.000Z | test/courtbot_web/workflows/idaho_test.exs | blakedietz/courtbot | b18d7eb84fd1405b359c8da980e1175bb1738841 | [
"ISC"
] | 1 | 2020-04-12T00:36:50.000Z | 2020-04-12T00:36:50.000Z | defmodule CourtbotWeb.Workflow.IdahoTest do
use CourtbotTest.Helper.Case
use ExUnitProperties
alias Courtbot.{Case, Repo}
setup do
Repo.insert(CourtbotTest.Helper.Configuration.idaho())
cases = %{
valid:
%Case{}
|> Case.changeset(%{
case_number: "CR01-16-00001",
county: "A",
type: "criminal",
parties: [
%{case_name: "Joe Doe vs Idaho"}
],
hearings: [
%{time: ~T[09:00:00], date: Date.utc_today()}
]
})
|> Repo.insert!(),
no_upcoming_hearings:
%Case{}
|> Case.changeset(%{
case_number: "CR01-16-00002",
county: "A",
type: "criminal",
parties: [
%{case_name: "Joe Doe vs Idaho"}
],
hearings: [
%{time: ~T[09:00:00], date: Date.add(Date.utc_today(), -1)}
]
})
|> Repo.insert!(),
duplicate_case_number_a:
%Case{}
|> Case.changeset(%{
case_number: "CR01-16-00003",
county: "A",
type: "criminal",
parties: [
%{case_name: "Joe Doe vs Idaho"}
],
hearings: [
%{time: ~T[10:00:00], date: Date.utc_today()}
]
})
|> Repo.insert!(),
duplicate_case_number_b:
%Case{}
|> Case.changeset(%{
case_number: "CR01-16-00003",
county: "B",
type: "criminal",
parties: [
%{case_name: "Joe Doe vs Idaho"}
],
hearings: [
%{time: ~T[11:00:00], date: Date.utc_today()}
]
})
|> Repo.insert!()
}
{:ok, cases}
end
@tag :skip
property "check that arbitrary data will yield a sane response", %{valid: case_details} do
check all input <- StreamData.binary() do
try do
for_case case_details do
new_conversation()
|> text(input)
|> response(
"Reply with a case number to sign up for reminders. For example: CR00-19-00011"
)
end
rescue
_e in Plug.Conn.CookieOverflowError -> assert String.length(input) > 4000
end
end
end
test "send hi or help", %{valid: case_details} do
for_case case_details do
new_conversation()
|> text("hi")
|> response("Reply with a case number to sign up for reminders. For example: CR00-19-00011")
new_conversation()
|> text("help")
|> response("Reply with a case number to sign up for reminders. For example: CR00-19-00011")
end
end
test "subscribe to case with county", %{valid: case_details} do
for_case case_details do
new_conversation()
|> text("{case_number}")
|> response("We need more information to find your case. Which county is this case in?")
|> text("{county}")
|> response(
"We found a case for {parties} in {county} County. The next hearing is on {date}, at {time}. Would you like a reminder a day before the next hearing date?"
)
|> text("yes")
|> response(
"OK. We will text you a courtesy reminder the day before the hearing date. Note that court schedules may change. You should always confirm your hearing date and time by going to {court_url}."
)
end
end
test "reject subscription to case with county", %{valid: case_details} do
for_case case_details do
new_conversation()
|> text("{case_number}")
|> response("We need more information to find your case. Which county is this case in?")
|> text("{county}")
|> response(
"We found a case for {parties} in {county} County. The next hearing is on {date}, at {time}. Would you like a reminder a day before the next hearing date?"
)
|> text("no")
|> response(
"You said \"No\" so we won’t text you a reminder. You should always confirm your hearing date and time by going to {court_url}."
)
end
end
test "send invalid response when asked about subscription to case with county", %{
valid: case_details
} do
for_case case_details do
new_conversation()
|> text("{case_number}")
|> response("We need more information to find your case. Which county is this case in?")
|> text("{county}")
|> response(
"We found a case for {parties} in {county} County. The next hearing is on {date}, at {time}. Would you like a reminder a day before the next hearing date?"
)
|> text("asdf")
|> response(
"Sorry, I did not understand. Would you like a courtesy reminder a day before the hearing? Reply YES or NO"
)
end
end
test "attempt to subscribe to case with county but have an invalid county", %{
valid: case_details
} do
for_case case_details do
new_conversation()
|> text("{case_number}")
|> response("We need more information to find your case. Which county is this case in?")
|> text("invalid")
|> response(
"We did not find case {case_number} in that county. Please check your case number and county. Reply with a case number to sign up for reminders. For example: CR00-19-00011"
)
end
end
test "send start and receive you are not subscribed to any cases", %{valid: case_details} do
for_case case_details do
new_conversation()
|> text("start")
|> response("You are not subscribed to any cases. Reply with a case number to sign up for reminders. For example: CR00-19-00011")
end
end
test "send start when you are subscribed you should get reply with case number", %{valid: case_details} do
for_case case_details do
new_conversation()
|> text("{case_number}")
|> response("We need more information to find your case. Which county is this case in?")
|> text("{county}")
|> response("We found a case for {parties} in {county} County. The next hearing is on {date}, at {time}. Would you like a reminder a day before the next hearing date?")
|> text("yes")
new_conversation()
|> text("start")
|> response("Reply with a case number to sign up for reminders. For example: CR00-19-00011")
end
end
test "attempt to subscribe to a case with county but have an invalid county with valid retry",
%{valid: case_details} do
for_case case_details do
new_conversation()
|> text("{case_number}")
|> response("We need more information to find your case. Which county is this case in?")
|> text("invalid")
|> response(
"We did not find case {case_number} in that county. Please check your case number and county. Reply with a case number to sign up for reminders. For example: CR00-19-00011"
)
|> text("{case_number}")
|> response("We need more information to find your case. Which county is this case in?")
|> text("{county}")
|> response(
"We found a case for {parties} in {county} County. The next hearing is on {date}, at {time}. Would you like a reminder a day before the next hearing date?"
)
|> text("yes")
|> response(
"OK. We will text you a courtesy reminder the day before the hearing date. Note that court schedules may change. You should always confirm your hearing date and time by going to {court_url}."
)
end
end
test "attempt to subscribe to a case with county but have an invalid county with invalid retry",
%{valid: case_details} do
for_case case_details do
new_conversation()
|> text("{case_number}")
|> response("We need more information to find your case. Which county is this case in?")
|> text("invalid")
|> response(
"We did not find case {case_number} in that county. Please check your case number and county. Reply with a case number to sign up for reminders. For example: CR00-19-00011"
)
|> text("{county}")
|> response("Reply with a case number to sign up for reminders. For example: CR00-19-00011")
end
end
test "attempt to subscribe to a case you are already subscribed to", %{valid: case_details} do
for_case case_details do
new_conversation()
|> text("{case_number}")
|> response("We need more information to find your case. Which county is this case in?")
|> text("{county}")
|> response(
"We found a case for {parties} in {county} County. The next hearing is on {date}, at {time}. Would you like a reminder a day before the next hearing date?"
)
|> text("yes")
|> response(
"OK. We will text you a courtesy reminder the day before the hearing date. Note that court schedules may change. You should always confirm your hearing date and time by going to {court_url}."
)
new_conversation()
|> text("{case_number}")
|> response("We need more information to find your case. Which county is this case in?")
|> text("{county}")
|> response(
"You are already subscribed to this case. To stop getting reminders reply with DELETE."
)
end
end
test "delete subscription to a case you are already subscribed to", %{valid: case_details} do
for_case case_details do
new_conversation()
|> text("{case_number}")
|> response("We need more information to find your case. Which county is this case in?")
|> text("{county}")
|> response(
"We found a case for {parties} in {county} County. The next hearing is on {date}, at {time}. Would you like a reminder a day before the next hearing date?"
)
|> text("yes")
|> response(
"OK. We will text you a courtesy reminder the day before the hearing date. Note that court schedules may change. You should always confirm your hearing date and time by going to {court_url}."
)
new_conversation()
|> text("delete {case_number}")
|> response("Are you sure you want to stop getting reminders for {cases}?")
|> text("yes")
|> response(
"OK. We will stop sending reminders. Reply with a case number to sign up for a reminder. For example: CR00-19-00011"
)
end
end
test "reject delete subscription", %{valid: case_details} do
for_case case_details do
new_conversation()
|> text("{case_number}")
|> response("We need more information to find your case. Which county is this case in?")
|> text("{county}")
|> response(
"We found a case for {parties} in {county} County. The next hearing is on {date}, at {time}. Would you like a reminder a day before the next hearing date?"
)
|> text("yes")
|> response(
"OK. We will text you a courtesy reminder the day before the hearing date. Note that court schedules may change. You should always confirm your hearing date and time by going to {court_url}."
)
new_conversation()
|> text("delete {case_number}")
|> response("Are you sure you want to stop getting reminders for {cases}?")
|> text("no")
|> response("OK. You said \"No\" so we will still send you reminders.")
end
end
test "send gibberish when deleting subscription", %{valid: case_details} do
for_case case_details do
new_conversation()
|> text("{case_number}")
|> response("We need more information to find your case. Which county is this case in?")
|> text("{county}")
|> response(
"We found a case for {parties} in {county} County. The next hearing is on {date}, at {time}. Would you like a reminder a day before the next hearing date?"
)
|> text("yes")
|> response(
"OK. We will text you a courtesy reminder the day before the hearing date. Note that court schedules may change. You should always confirm your hearing date and time by going to {court_url}."
)
new_conversation()
|> text("delete {case_number}")
|> response("Are you sure you want to stop getting reminders for {cases}?")
|> text("asdf")
|> response(
"Sorry, I did not understand. Do you want to stop getting reminders for {cases}? Reply YES or NO"
)
|> text("yes")
|> response(
"OK. We will stop sending reminders. Reply with a case number to sign up for a reminder. For example: CR00-19-00011"
)
end
end
test "delete subscription when you have no subscriptions", %{valid: case_details} do
for_case case_details do
new_conversation()
|> text("delete")
|> response(
"You are not subscribed to any cases. We won't send you any reminders. Reply with a case number to sign up for a reminder. For example: CR00-19-00011"
)
end
end
test "delete all subscriptions when you have subscriptions", %{valid: case_details} do
for_case case_details do
new_conversation()
|> text("{case_number}")
|> response("We need more information to find your case. Which county is this case in?")
|> text("{county}")
|> response(
"We found a case for {parties} in {county} County. The next hearing is on {date}, at {time}. Would you like a reminder a day before the next hearing date?"
)
|> text("yes")
|> response(
"OK. We will text you a courtesy reminder the day before the hearing date. Note that court schedules may change. You should always confirm your hearing date and time by going to {court_url}."
)
new_conversation()
|> text("delete")
|> response("Are you sure you want to stop getting reminders for {cases}?")
|> text("yes")
|> response(
"OK. We will stop sending reminders. Reply with a case number to sign up for a reminder. For example: CR00-19-00011"
)
end
end
test "reject deleting all subscriptions when you have subscriptions", %{valid: case_details} do
for_case case_details do
new_conversation()
|> text("{case_number}")
|> response("We need more information to find your case. Which county is this case in?")
|> text("{county}")
|> response(
"We found a case for {parties} in {county} County. The next hearing is on {date}, at {time}. Would you like a reminder a day before the next hearing date?"
)
|> text("yes")
|> response(
"OK. We will text you a courtesy reminder the day before the hearing date. Note that court schedules may change. You should always confirm your hearing date and time by going to {court_url}."
)
new_conversation()
|> text("delete")
|> response("Are you sure you want to stop getting reminders for {cases}?")
|> text("no")
|> response("OK. You said \"No\" so we will still send you reminders.")
end
end
test "check that case sensitivity is not an issue when deleting ", %{valid: case_details} do
for_case case_details do
new_conversation()
|> text("{case_number}")
|> response("We need more information to find your case. Which county is this case in?")
|> text("{county}")
|> response(
"We found a case for {parties} in {county} County. The next hearing is on {date}, at {time}. Would you like a reminder a day before the next hearing date?"
)
|> text("yes")
|> response(
"OK. We will text you a courtesy reminder the day before the hearing date. Note that court schedules may change. You should always confirm your hearing date and time by going to {court_url}."
)
new_conversation()
|> text("Delete")
|> response("Are you sure you want to stop getting reminders for {cases}?")
|> text("no")
|> response("OK. You said \"No\" so we will still send you reminders.")
end
end
test "respond with gibberish when deleting all subscriptions when you have subscriptions", %{
valid: case_details
} do
for_case case_details do
new_conversation()
|> text("{case_number}")
|> response("We need more information to find your case. Which county is this case in?")
|> text("{county}")
|> response(
"We found a case for {parties} in {county} County. The next hearing is on {date}, at {time}. Would you like a reminder a day before the next hearing date?"
)
|> text("yes")
|> response(
"OK. We will text you a courtesy reminder the day before the hearing date. Note that court schedules may change. You should always confirm your hearing date and time by going to {court_url}."
)
new_conversation()
|> text("delete")
|> response("Are you sure you want to stop getting reminders for {cases}?")
|> text("asdf")
|> response(
"Sorry, I did not understand. Do you want to stop getting reminders for {cases}? Reply YES or NO"
)
|> text("yes")
|> response(
"OK. We will stop sending reminders. Reply with a case number to sign up for a reminder. For example: CR00-19-00011"
)
end
end
test "attempt to subscribe to a case without any upcoming hearings", %{
no_upcoming_hearings: case_details
} do
for_case case_details do
new_conversation()
|> text("{case_number}")
|> response("We need more information to find your case. Which county is this case in?")
|> text("{county}")
|> response(
"We found a case for {parties} in {county} County. We do not see any future hearings scheduled. You should always confirm your hearing date and time by going to {court_url}. Would you like to be notified when a hearing is scheduled?"
)
|> text("yes")
|> response(
"OK. We will text you when a hearing is scheduled for case {cases}. Note that court schedules may change. You should always confirm your hearing date and time by going to {court_url}."
)
end
end
test "reject attempt to subscribe to a case without any upcoming hearings", %{
no_upcoming_hearings: case_details
} do
for_case case_details do
new_conversation()
|> text("{case_number}")
|> response("We need more information to find your case. Which county is this case in?")
|> text("{county}")
|> response(
"We found a case for {parties} in {county} County. We do not see any future hearings scheduled. You should always confirm your hearing date and time by going to {court_url}. Would you like to be notified when a hearing is scheduled?"
)
|> text("no")
|> response(
"You said \"No\" so we won’t text you a reminder. You should always confirm your hearing date and time by going to {court_url}."
)
end
end
test "respond with gibberish while attempting to subscribe to a case without any upcoming hearings",
%{no_upcoming_hearings: case_details} do
for_case case_details do
new_conversation()
|> text("{case_number}")
|> response("We need more information to find your case. Which county is this case in?")
|> text("{county}")
|> response(
"We found a case for {parties} in {county} County. We do not see any future hearings scheduled. You should always confirm your hearing date and time by going to {court_url}. Would you like to be notified when a hearing is scheduled?"
)
|> text("asdf")
|> response(
"Sorry, I did not understand. Would you like to be notified when a hearing is scheduled? Reply YES or NO"
)
end
end
test "attempt to subscribe to a case with multiple counties", %{
duplicate_case_number_a: case_details_a,
duplicate_case_number_b: case_details_b
} do
for_case case_details_a do
new_conversation()
|> text("{case_number}")
|> response("We need more information to find your case. Which county is this case in?")
|> text("{county}")
|> response(
"We found a case for {parties} in A County. The next hearing is on {date}, at 10:00 AM. Would you like a reminder a day before the next hearing date?"
)
end
for_case case_details_b do
new_conversation()
|> text("{case_number}")
|> response("We need more information to find your case. Which county is this case in?")
|> text("{county}")
|> response(
"We found a case for {parties} in B County. The next hearing is on {date}, at 11:00 AM. Would you like a reminder a day before the next hearing date?"
)
end
end
end
| 40.044922 | 241 | 0.628542 |
f7b861fe7b182ef20db5f5d53b49b38ba2422cd8 | 555 | ex | Elixir | test/support/bad_power_manager.ex | nerves-networking/vintage_net | 8d4251a0ec995babf8f4d7aa7cc1d74b70646c72 | [
"Apache-2.0"
] | 85 | 2019-05-09T14:54:38.000Z | 2022-02-08T16:52:04.000Z | test/support/bad_power_manager.ex | fhunleth/vintage_net | 215495533cb642eeb172daba08208a454f19b36f | [
"Apache-2.0"
] | 132 | 2019-05-09T15:57:59.000Z | 2022-02-28T16:31:22.000Z | test/support/bad_power_manager.ex | fhunleth/vintage_net | 215495533cb642eeb172daba08208a454f19b36f | [
"Apache-2.0"
] | 14 | 2019-07-08T19:18:23.000Z | 2022-02-08T16:52:05.000Z | defmodule VintageNetTest.BadPowerManager do
@moduledoc false
@behaviour VintageNet.PowerManager
@impl VintageNet.PowerManager
def init(_args) do
{:ok, :no_state}
end
@impl VintageNet.PowerManager
def power_on(_state) do
raise RuntimeError, "oops"
end
@impl VintageNet.PowerManager
def start_powering_off(state) do
{:ok, state, 0}
end
@impl VintageNet.PowerManager
def power_off(state) do
{:ok, state, 0}
end
@impl VintageNet.PowerManager
def handle_info(_msg, state) do
{:noreply, state}
end
end
| 18.5 | 43 | 0.715315 |
f7b87461d39d26049ea5752354b92a7d64027037 | 2,520 | ex | Elixir | clients/content/lib/google_api/content/v2/model/orders_update_shipment_request.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/content/lib/google_api/content/v2/model/orders_update_shipment_request.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | clients/content/lib/google_api/content/v2/model/orders_update_shipment_request.ex | medikent/elixir-google-api | 98a83d4f7bfaeac15b67b04548711bb7e49f9490 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.Content.V2.Model.OrdersUpdateShipmentRequest do
@moduledoc """
## Attributes
* `carrier` (*type:* `String.t`, *default:* `nil`) - The carrier handling the shipment. Not updated if missing. See shipments[].carrier in the Orders resource representation for a list of acceptable values.
* `deliveryDate` (*type:* `String.t`, *default:* `nil`) - Date on which the shipment has been delivered, in ISO 8601 format. Optional and can be provided only if status is delivered.
* `operationId` (*type:* `String.t`, *default:* `nil`) - The ID of the operation. Unique across all operations for a given order.
* `shipmentId` (*type:* `String.t`, *default:* `nil`) - The ID of the shipment.
* `status` (*type:* `String.t`, *default:* `nil`) - New status for the shipment. Not updated if missing.
Acceptable values are:
- "delivered"
- "undeliverable"
* `trackingId` (*type:* `String.t`, *default:* `nil`) - The tracking ID for the shipment. Not updated if missing.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:carrier => String.t(),
:deliveryDate => String.t(),
:operationId => String.t(),
:shipmentId => String.t(),
:status => String.t(),
:trackingId => String.t()
}
field(:carrier)
field(:deliveryDate)
field(:operationId)
field(:shipmentId)
field(:status)
field(:trackingId)
end
defimpl Poison.Decoder, for: GoogleApi.Content.V2.Model.OrdersUpdateShipmentRequest do
def decode(value, options) do
GoogleApi.Content.V2.Model.OrdersUpdateShipmentRequest.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.Content.V2.Model.OrdersUpdateShipmentRequest do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 38.181818 | 211 | 0.696429 |
f7b87c7195ecae9d365b0dddb42b5fb870cbec90 | 1,062 | ex | Elixir | lib/uf/ap.ex | edmaarcosta/IEEx | fb457be8c03b7a147463cdd2736f9699a699e22d | [
"MIT"
] | null | null | null | lib/uf/ap.ex | edmaarcosta/IEEx | fb457be8c03b7a147463cdd2736f9699a699e22d | [
"MIT"
] | 1 | 2017-12-05T15:58:10.000Z | 2017-12-05T15:58:10.000Z | lib/uf/ap.ex | edmaarcosta/ieex | fb457be8c03b7a147463cdd2736f9699a699e22d | [
"MIT"
] | 1 | 2018-10-01T19:35:47.000Z | 2018-10-01T19:35:47.000Z | defmodule IEEx.UF.AP do
alias IEEx.Util
@peso [9, 8, 7, 6, 5, 4, 3, 2]
def is_valid?(input) do # 030123459
ie = Util.only_numbers(input)
if (String.at(ie, 0) == "0" && String.at(ie, 1) == "3") do
# transforma a IE em uma lista de inteiros
l_ie = Util.parse_ie(ie)
# digito verificador
[f_dig] = Util.get_digs(l_ie, 1)
# remove o ultimo dígito
rest_ie = List.delete_at(l_ie, -1)
ie_join = Enum.join(rest_ie)
# verifica em qual intervalo a IE está
{p, d} =
cond do
(ie_join >= "03000001" && ie_join <= "03017000") ->
{5, 0}
(ie_join >= "03017001" && ie_join <= "03019022") ->
{9, 1}
true ->
{0, 0}
end
dig =
rest_ie
|> Util.calc_peso(@peso)
|> calc_mod_11(p, d)
dig == f_dig
else
false
end
end
defp calc_mod_11(value, p, d) do
result = (11 - rem((value + p), 11))
case result do
10 -> 0
11 -> d
_ -> result
end
end
end
| 20.823529 | 62 | 0.498117 |
f7b880b5aa7f48428108319d259bd252086f20e1 | 1,737 | ex | Elixir | clients/cloud_debugger/lib/google_api/cloud_debugger/v2/model/update_active_breakpoint_request.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | 1 | 2018-12-03T23:43:10.000Z | 2018-12-03T23:43:10.000Z | clients/cloud_debugger/lib/google_api/cloud_debugger/v2/model/update_active_breakpoint_request.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | clients/cloud_debugger/lib/google_api/cloud_debugger/v2/model/update_active_breakpoint_request.ex | matehat/elixir-google-api | c1b2523c2c4cdc9e6ca4653ac078c94796b393c3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This class is auto generated by the elixir code generator program.
# Do not edit the class manually.
defmodule GoogleApi.CloudDebugger.V2.Model.UpdateActiveBreakpointRequest do
@moduledoc """
Request to update an active breakpoint.
## Attributes
* `breakpoint` (*type:* `GoogleApi.CloudDebugger.V2.Model.Breakpoint.t`, *default:* `nil`) - Updated breakpoint information.
The field `id` must be set.
The agent must echo all Breakpoint specification fields in the update.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:breakpoint => GoogleApi.CloudDebugger.V2.Model.Breakpoint.t()
}
field(:breakpoint, as: GoogleApi.CloudDebugger.V2.Model.Breakpoint)
end
defimpl Poison.Decoder, for: GoogleApi.CloudDebugger.V2.Model.UpdateActiveBreakpointRequest do
def decode(value, options) do
GoogleApi.CloudDebugger.V2.Model.UpdateActiveBreakpointRequest.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.CloudDebugger.V2.Model.UpdateActiveBreakpointRequest do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 35.44898 | 128 | 0.757628 |
f7b8fcbf0793b4c76b4676df0ab50ae2b500a65d | 1,968 | ex | Elixir | lib/job_board_web/controllers/state_controller.ex | TDogVoid/job_board | 23793917bd1cc4e68bccce737b971093030a31eb | [
"MIT"
] | null | null | null | lib/job_board_web/controllers/state_controller.ex | TDogVoid/job_board | 23793917bd1cc4e68bccce737b971093030a31eb | [
"MIT"
] | null | null | null | lib/job_board_web/controllers/state_controller.ex | TDogVoid/job_board | 23793917bd1cc4e68bccce737b971093030a31eb | [
"MIT"
] | null | null | null | defmodule JobBoardWeb.StateController do
use JobBoardWeb, :controller
alias JobBoard.States
alias JobBoard.States.State
plug JobBoardWeb.Plugs.RequireAuth
plug JobBoardWeb.Plugs.RequireAdmin
def index(conn, _params) do
states = States.list_states()
render(conn, "index.html", states: states, pagetitle: "List of States")
end
def new(conn, _params) do
changeset = States.change_state(%State{})
render(conn, "new.html", changeset: changeset, pagetitle: "New State")
end
def create(conn, %{"state" => state_params}) do
case States.create_state(state_params) do
{:ok, state} ->
conn
|> put_flash(:info, "State created successfully.")
|> redirect(to: Routes.state_path(conn, :show, state))
{:error, %Ecto.Changeset{} = changeset} ->
render(conn, "new.html", changeset: changeset, pagetitle: "New State")
end
end
def show(conn, %{"id" => id}) do
state = States.get_state!(id)
render(conn, "show.html", state: state, pagetitle: state.name)
end
def edit(conn, %{"id" => id}) do
state = States.get_state!(id)
changeset = States.change_state(state)
render(conn, "edit.html", state: state, changeset: changeset, pagetitle: "Edit State")
end
def update(conn, %{"id" => id, "state" => state_params}) do
state = States.get_state!(id)
case States.update_state(state, state_params) do
{:ok, state} ->
conn
|> put_flash(:info, "State updated successfully.")
|> redirect(to: Routes.state_path(conn, :show, state))
{:error, %Ecto.Changeset{} = changeset} ->
render(conn, "edit.html", state: state, changeset: changeset, pagetitle: "Edit State")
end
end
def delete(conn, %{"id" => id}) do
state = States.get_state!(id)
{:ok, _state} = States.delete_state(state)
conn
|> put_flash(:info, "State deleted successfully.")
|> redirect(to: Routes.state_path(conn, :index))
end
end
| 30.75 | 94 | 0.647866 |
f7b92a638d70e9fa0e42e666eb97ab794cf9ae28 | 82 | ex | Elixir | testData/org/elixir_lang/parser_definition/matched_comparison_operation_parsing_test_case/UnaryNonNumericOperation.ex | keyno63/intellij-elixir | 4033e319992c53ddd42a683ee7123a97b5e34f02 | [
"Apache-2.0"
] | 1,668 | 2015-01-03T05:54:27.000Z | 2022-03-25T08:01:20.000Z | testData/org/elixir_lang/parser_definition/matched_comparison_operation_parsing_test_case/UnaryNonNumericOperation.ex | keyno63/intellij-elixir | 4033e319992c53ddd42a683ee7123a97b5e34f02 | [
"Apache-2.0"
] | 2,018 | 2015-01-01T22:43:39.000Z | 2022-03-31T20:13:08.000Z | testData/org/elixir_lang/parser_definition/matched_comparison_operation_parsing_test_case/UnaryNonNumericOperation.ex | keyno63/intellij-elixir | 4033e319992c53ddd42a683ee7123a97b5e34f02 | [
"Apache-2.0"
] | 145 | 2015-01-15T11:37:16.000Z | 2021-12-22T05:51:02.000Z | !one != not two
!one == not two
!one =~ not two
!one !== not two
!one === not two
| 13.666667 | 16 | 0.54878 |
f7b941d70014b1f6d1a83bd83bf9f150080a9885 | 360 | exs | Elixir | 074.exs | jotakami/ex-euler | 0b7f1b2343c5339ccbaaaba9b8f8d0088b112df2 | [
"MIT"
] | null | null | null | 074.exs | jotakami/ex-euler | 0b7f1b2343c5339ccbaaaba9b8f8d0088b112df2 | [
"MIT"
] | null | null | null | 074.exs | jotakami/ex-euler | 0b7f1b2343c5339ccbaaaba9b8f8d0088b112df2 | [
"MIT"
] | null | null | null | target = 60
memo = Map.new([
{ 169, 3},
{ 871, 2},
{ 872, 2},
{ 1454, 3},
{ 45361, 2},
{ 45362, 2},
{363601, 3}
])
Euler.count_stream
|> Stream.take(999999)
|> Enum.reduce({0, memo}, fn x, {c, memo} ->
{n, memo} = Euler.digit_factorial_chain(x, memo)
if n == target, do: {c+1, memo}, else: {c, memo}
end)
|> elem(0)
|> IO.puts | 20 | 53 | 0.516667 |
f7b943dda6a47befa6f2b8a7f224303397ac723a | 118 | ex | Elixir | lib/cowguest/models/post.ex | cedretaber/cowguest | dd41ca95f19820de3707e4b1afa04f901a9e9670 | [
"MIT"
] | null | null | null | lib/cowguest/models/post.ex | cedretaber/cowguest | dd41ca95f19820de3707e4b1afa04f901a9e9670 | [
"MIT"
] | null | null | null | lib/cowguest/models/post.ex | cedretaber/cowguest | dd41ca95f19820de3707e4b1afa04f901a9e9670 | [
"MIT"
] | null | null | null | defmodule Cowguest.Models.Post do
@moduledoc false
@derive [Poison.Encoder]
defstruct [:id, :text, :name]
end
| 14.75 | 33 | 0.711864 |
f7b94fe932f6b36bd1e67c4f143a097948909409 | 433 | ex | Elixir | test/support/repo/postgres.ex | prehnRA/ecto_cellar | b3ff3ab4dd3932600b58df7f71ba2fdf4a20015a | [
"Apache-2.0"
] | 42 | 2022-03-18T21:54:17.000Z | 2022-03-31T22:03:19.000Z | test/support/repo/postgres.ex | prehnRA/ecto_cellar | b3ff3ab4dd3932600b58df7f71ba2fdf4a20015a | [
"Apache-2.0"
] | 8 | 2022-03-17T06:04:18.000Z | 2022-03-31T01:36:03.000Z | test/support/repo/postgres.ex | tashirosota/model_changes | 3ff04b676d9e7eafad177be1760216cd46d3d34a | [
"Apache-2.0"
] | 1 | 2022-03-29T14:24:39.000Z | 2022-03-29T14:24:39.000Z | defmodule Postgres.Repo do
use Ecto.Repo,
otp_app: :ecto_cellar,
adapter: Ecto.Adapters.Postgres
def init(_, opts) do
{:ok,
Keyword.merge(opts,
username: "postgres",
password: "postgres",
database: "ecto_cellar_postgres_test",
hostname: "localhost",
pool: Ecto.Adapters.SQL.Sandbox,
show_sensitive_data_on_connection_error: true,
pool_size: 10
)}
end
end
| 22.789474 | 53 | 0.644342 |
f7b956bd84910eb3c6e159123f8f6d15413c1ff7 | 557 | exs | Elixir | Elixir/elixirexamples/test/keywordslist_test.exs | kujua/erlang-elixir-imperative-bookcompanion | 7bc9f033bacd0f8744ec6bcee3932794d594fe69 | [
"Apache-2.0"
] | 8 | 2016-08-14T12:35:16.000Z | 2021-01-26T04:05:31.000Z | Elixir/elixirexamples/test/keywordslist_test.exs | kujua/erlang-elixir-imperative-bookcompanion | 7bc9f033bacd0f8744ec6bcee3932794d594fe69 | [
"Apache-2.0"
] | null | null | null | Elixir/elixirexamples/test/keywordslist_test.exs | kujua/erlang-elixir-imperative-bookcompanion | 7bc9f033bacd0f8744ec6bcee3932794d594fe69 | [
"Apache-2.0"
] | 5 | 2016-08-18T22:12:19.000Z | 2020-02-17T18:52:41.000Z | defmodule KeywordListsTest do
use ExUnit.Case
test "create_pizza_order" do
ret = KeywordLists.create_pizza_order
assert ret == [{:margerita, 1},{:calzone, 2}]
end
test "add_to_order" do
ret = KeywordLists.add_to_order
assert ret == [margerita: 1, calzone: 2, roma: 1]
end
test "jump_the_queue" do
ret = KeywordLists.jump_the_queue
assert ret == [roma: 1, margerita: 1, calzone: 2]
end
test "add_the_same" do
ret = KeywordLists.add_the_same
assert ret == [margerita: 1, calzone: 2, margerita: 2]
end
end
| 23.208333 | 58 | 0.680431 |
f7b9784d17db33c7ed68b6a861c1742001e76a86 | 692 | exs | Elixir | mix.exs | ygunayer/potcu | d7b82ad9227161c93aedec03c4a1902c1d43780b | [
"MIT"
] | 1 | 2020-05-14T18:52:05.000Z | 2020-05-14T18:52:05.000Z | mix.exs | ygunayer/potcu | d7b82ad9227161c93aedec03c4a1902c1d43780b | [
"MIT"
] | null | null | null | mix.exs | ygunayer/potcu | d7b82ad9227161c93aedec03c4a1902c1d43780b | [
"MIT"
] | null | null | null | defmodule Potcu.MixProject do
use Mix.Project
def project do
[
app: :potcu,
version: "0.1.0",
elixir: "~> 1.10",
start_permanent: Mix.env() == :prod,
deps: deps()
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
mod: {Potcu.Application, []},
extra_applications: [:logger]
]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
{:nostrum, git: "[email protected]:ygunayer/nostrum.git"},
{:poison, "~> 3.1"}
# {:dep_from_hexpm, "~> 0.3.0"},
# {:dep_from_git, git: "https://github.com/elixir-lang/my_dep.git", tag: "0.1.0"}
]
end
end
| 21.625 | 87 | 0.567919 |
f7b97ba04451a0ad4feeb36429feee51d405f3ca | 128 | exs | Elixir | deps/mojito/.formatter.exs | PrecisionNutrition/frogger | 96374fe0ac0ea616205f6678fe088802572e922e | [
"MIT"
] | null | null | null | deps/mojito/.formatter.exs | PrecisionNutrition/frogger | 96374fe0ac0ea616205f6678fe088802572e922e | [
"MIT"
] | null | null | null | deps/mojito/.formatter.exs | PrecisionNutrition/frogger | 96374fe0ac0ea616205f6678fe088802572e922e | [
"MIT"
] | null | null | null | # Used by "mix format"
[
inputs: ["mix.exs", "{config,lib,test}/**/*.{ex,exs}"],
line_length: 80,
trailing_comma: true,
]
| 18.285714 | 57 | 0.59375 |
f7b9ce6443e69c40ec9d1f3f61be7526d1a5d1fc | 213 | exs | Elixir | create_fun_umbrella/apps/create_fun_endpoint/test/create_fun_endpoint/controllers/page_controller_test.exs | Vorzious/CreateFun | 5744c913ef706bc29062fa90a8ec5de12d267dab | [
"MIT"
] | null | null | null | create_fun_umbrella/apps/create_fun_endpoint/test/create_fun_endpoint/controllers/page_controller_test.exs | Vorzious/CreateFun | 5744c913ef706bc29062fa90a8ec5de12d267dab | [
"MIT"
] | 9 | 2018-06-17T09:54:03.000Z | 2018-06-17T09:55:20.000Z | create_fun_umbrella/apps/create_fun_endpoint/test/create_fun_endpoint/controllers/page_controller_test.exs | Vorzious/CreateFun | 5744c913ef706bc29062fa90a8ec5de12d267dab | [
"MIT"
] | 1 | 2018-06-05T18:38:01.000Z | 2018-06-05T18:38:01.000Z | defmodule CreateFunEndpoint.PageControllerTest do
use CreateFunEndpoint.ConnCase
test "GET /", %{conn: conn} do
conn = get conn, "/"
assert html_response(conn, 200) =~ "Welcome to Phoenix!"
end
end
| 23.666667 | 60 | 0.704225 |
f7b9dd4b93010cb7aab7a2f2f9d5d6c9a3816a43 | 4,517 | ex | Elixir | lib/grapevine/telnet/mssp_client.ex | shanesveller/grapevine | fe74ade1adff88dfe4c1ab55fee3902dbb4664fe | [
"MIT"
] | null | null | null | lib/grapevine/telnet/mssp_client.ex | shanesveller/grapevine | fe74ade1adff88dfe4c1ab55fee3902dbb4664fe | [
"MIT"
] | null | null | null | lib/grapevine/telnet/mssp_client.ex | shanesveller/grapevine | fe74ade1adff88dfe4c1ab55fee3902dbb4664fe | [
"MIT"
] | null | null | null | defmodule Grapevine.Telnet.MSSPClient do
@moduledoc """
Callbacks for specifically checking MSSP data
"""
require Logger
alias Grapevine.Telnet
alias Grapevine.Telnet.Client
alias Grapevine.Telnet.MSSP
alias Grapevine.Telnet.MSSPClient.Check
alias Grapevine.Telnet.MSSPClient.Record
alias Grapevine.Telnet.Options
@behaviour Client
def start_link(opts) do
Client.start_link(__MODULE__, opts)
end
@impl true
def init(state, opts) do
Process.send_after(self(), {:text_mssp_request}, 10_000)
Process.send_after(self(), {:stop}, 20_000)
generate_state(state, opts)
end
defp generate_state(state, opts) do
case opts[:type] do
:check ->
Check.init(opts, state)
:record ->
Record.init(opts, state)
end
end
@impl true
def connected(_state), do: :ok
@impl true
def disconnected(_state), do: :ok
@impl true
def process_option(state, {:mssp, data}) do
maybe_forward("mssp/received", data, state)
state.mssp_module.record_option(state, data)
:telemetry.execute([:grapevine, :telnet, :mssp, :option, :success], 1, state)
Logger.debug("Shutting down MSSP check", type: :mssp)
{:stop, :normal, state}
end
def process_option(state, _option), do: {:noreply, state}
@impl true
def receive(state, data) do
state = Map.put(state, :mssp_buffer, Map.get(state, :mssp_buffer, "") <> data)
case Options.text_mssp?(state.mssp_buffer) do
true ->
record_text_mssp(state)
false ->
{:noreply, state}
end
end
@impl true
def handle_info({:text_mssp_request}, state) do
:gen_tcp.send(state.socket, "mssp-request\n")
:telemetry.execute([:grapevine, :telnet, :mssp, :text, :sent], 1, state)
{:noreply, Map.put(state, :mssp_buffer, <<>>)}
end
def handle_info({:stop}, state) do
maybe_forward("mssp/terminated", %{}, state)
state.mssp_module.record_fail(state)
Telnet.record_no_mssp(state.host, state.port)
:telemetry.execute([:grapevine, :telnet, :mssp, :failed], 1, state)
{:stop, :normal, state}
end
@doc """
Record MSSP data sent via plain text
"""
def record_text_mssp(state) do
case MSSP.parse_text(state.mssp_buffer) do
:error ->
{:noreply, state}
data ->
maybe_forward("mssp/received", data, state)
state.mssp_module.record_text(state, data)
:telemetry.execute([:grapevine, :telnet, :mssp, :text, :success], 1, state)
{:stop, :normal, state}
end
end
defp maybe_forward(event, message, state) do
case Map.get(state, :channel) do
nil ->
:ok
channel ->
Web.Endpoint.broadcast("mssp:#{channel}", event, message)
end
end
defmodule Record do
@moduledoc """
Record player counts from MSSP
"""
alias Grapevine.Games
alias Grapevine.Statistics
def init(opts, state) do
connection = Keyword.get(opts, :connection)
state
|> Map.put(:mssp_module, __MODULE__)
|> Map.put(:connection, connection)
|> Map.put(:game, connection.game)
|> Map.put(:host, connection.host)
|> Map.put(:port, connection.port)
end
def record_option(state, data) do
Games.seen_on_mssp(state.game)
Games.connection_has_mssp(state.connection)
maybe_set_user_agent(state, data)
players = String.to_integer(data["PLAYERS"])
Statistics.record_mssp_players(state.game, players, Timex.now())
end
def record_text(state, data) do
record_option(state, data)
end
def record_fail(state) do
Games.connection_has_no_mssp(state.connection)
end
defp maybe_set_user_agent(state, data) do
case Map.get(data, "CODEBASE") do
nil ->
:ok
codebase ->
Games.record_metadata(state.game, %{user_agent: codebase})
end
end
end
defmodule Check do
@moduledoc """
Check MSSP for a game
"""
alias Grapevine.Telnet
def init(opts, state) do
state
|> Map.put(:mssp_module, __MODULE__)
|> Map.put(:host, Keyword.get(opts, :host))
|> Map.put(:port, Keyword.get(opts, :port))
|> Map.put(:channel, Keyword.get(opts, :channel))
end
def record_option(state, data) do
Telnet.record_mssp_response(state.host, state.port, data)
end
def record_text(state, data) do
Telnet.record_mssp_response(state.host, state.port, data)
end
def record_fail(_state) do
:ok
end
end
end
| 23.773684 | 83 | 0.64534 |
f7b9f4c188cdcc4dfdc3c8fc450f0b48271d2614 | 710 | exs | Elixir | apps/dtask_codec/mix.exs | fehu/elixir-dtask | 93b39a1acb616cdc7b4fffb4950e82021ef5b0f6 | [
"MIT"
] | null | null | null | apps/dtask_codec/mix.exs | fehu/elixir-dtask | 93b39a1acb616cdc7b4fffb4950e82021ef5b0f6 | [
"MIT"
] | null | null | null | apps/dtask_codec/mix.exs | fehu/elixir-dtask | 93b39a1acb616cdc7b4fffb4950e82021ef5b0f6 | [
"MIT"
] | null | null | null | defmodule DTask.Task.Codec.MixProject do
use Mix.Project
def project do
[
app: :dtask_codec,
version: "0.2.1",
build_path: "../../_build",
config_path: "../../config/config.exs",
deps_path: "../../deps",
lockfile: "../../mix.lock",
elixir: "~> 1.12",
start_permanent: Mix.env() == :prod,
deps: deps()
]
end
# Run "mix help compile.app" to learn about applications.
def application do
[
extra_applications: [:logger]
]
end
# Run "mix help deps" to learn about dependencies.
defp deps do
[
{:dtask, in_umbrella: true},
{:jason, "~> 1.2"},
{:stream_data, "~> 0.5", only: [:test]}
]
end
end
| 20.882353 | 59 | 0.546479 |
f7b9fe84fd849ebde5af269f8e5af59ef87ecd5e | 4,057 | ex | Elixir | apps/nerves_hub_www/lib/nerves_hub_www_web/live/deployment_live/show.ex | acrogenesis/nerves_hub_web | 27f651dd64b40a034254b50805884f4efd679957 | [
"Apache-2.0"
] | null | null | null | apps/nerves_hub_www/lib/nerves_hub_www_web/live/deployment_live/show.ex | acrogenesis/nerves_hub_web | 27f651dd64b40a034254b50805884f4efd679957 | [
"Apache-2.0"
] | null | null | null | apps/nerves_hub_www/lib/nerves_hub_www_web/live/deployment_live/show.ex | acrogenesis/nerves_hub_web | 27f651dd64b40a034254b50805884f4efd679957 | [
"Apache-2.0"
] | null | null | null | defmodule NervesHubWWWWeb.DeploymentLive.Show do
use NervesHubWWWWeb, :live_view
alias NervesHubWebCore.{Accounts, Deployments, Products}
def render(assigns) do
NervesHubWWWWeb.DeploymentView.render("show.html", assigns)
end
def mount(
_params,
%{
"auth_user_id" => user_id,
"org_id" => org_id,
"product_id" => product_id,
"deployment_id" => deployment_id
},
socket
) do
socket =
socket
|> assign_new(:user, fn -> Accounts.get_user!(user_id) end)
|> assign_new(:org, fn -> Accounts.get_org!(org_id) end)
|> assign_new(:product, fn -> Products.get_product!(product_id) end)
|> assign_new(:deployment, fn -> Deployments.get_deployment!(deployment_id) end)
|> audit_log_assigns()
{:ok, socket}
rescue
e ->
socket_error(socket, live_view_error(e))
end
# Catch-all to handle when LV sessions change.
# Typically this is after a deploy when the
# session structure in the module has changed
# for mount/3
def mount(_, _, socket) do
socket_error(socket, live_view_error(:update))
end
def handle_event(
"delete",
_val,
%{assigns: %{org: org, deployment: deployment, product: product, user: user}} = socket
) do
case Deployments.delete_deployment(deployment) do
{:ok, _} ->
AuditLogs.audit!(user, deployment, :delete, %{id: deployment.id, name: deployment.name})
socket =
socket
|> put_flash(:info, "Deployment deleted")
|> redirect(to: Routes.deployment_path(socket, :index, org.name, product.name))
{:noreply, socket}
{:error, error} ->
{:noreply,
put_flash(socket, :error, "Error occurred deleting deployment: #{inspect(error)}")}
end
end
def handle_event(
"paginate",
%{"page" => page_num},
%{assigns: %{audit_log_ids: ids, paginate_opts: paginate_opts}} = socket
) do
# This LiveView stores an array of all its audit log's ids. On paginate
# call, it gets the the index offset based on the page it is currently on
# then slices out the number of ids equal to the set page_size starting
# at that index. Then we query AuditLogs for only those specific records
page_num = String.to_integer(page_num)
start_index = (page_num - 1) * paginate_opts.page_size
audit_logs = Enum.slice(ids, start_index, paginate_opts.page_size) |> AuditLogs.from_ids()
socket =
socket
|> assign(:audit_logs, audit_logs)
|> assign(:paginate_opts, %{paginate_opts | page_number: page_num})
{:noreply, socket}
end
def handle_event(
"toggle_active",
%{"isactive" => value},
%{assigns: %{deployment: deployment, user: user}} = socket
) do
{:ok, updated_deployment} = Deployments.update_deployment(deployment, %{is_active: value})
AuditLogs.audit!(user, deployment, :update, %{is_active: value})
{:noreply, assign(socket, :deployment, updated_deployment)}
end
def handle_event(
"toggle_health_state",
_params,
%{assigns: %{deployment: deployment, user: user}} = socket
) do
params = %{healthy: !deployment.healthy}
socket =
case Deployments.update_deployment(deployment, params) do
{:ok, updated_deployment} ->
AuditLogs.audit!(user, deployment, :update, params)
assign(socket, :deployment, updated_deployment)
{:error, _changeset} ->
put_flash(socket, :error, "Failed to mark health state")
end
{:noreply, socket}
end
defp audit_log_assigns(%{assigns: %{deployment: deployment}} = socket) do
all_logs = AuditLogs.logs_for_feed(deployment)
paginate_opts = %{page_number: 1, page_size: 10}
socket
|> assign(:audit_logs, Enum.slice(all_logs, 0, paginate_opts.page_size))
|> assign(:audit_log_ids, Enum.map(all_logs, & &1.id))
|> assign(:paginate_opts, paginate_opts)
|> assign(:resource_id, deployment.id)
end
end
| 32.198413 | 96 | 0.643825 |
f7ba0a182f88b375131ee8ebcfe732b4d1d13df1 | 960 | ex | Elixir | lib/sbanken_monitor/agent.ex | entertainyou/sbanken_monitor | 6125a7d29c80b049233b2db74184091a9d67721a | [
"MIT"
] | null | null | null | lib/sbanken_monitor/agent.ex | entertainyou/sbanken_monitor | 6125a7d29c80b049233b2db74184091a9d67721a | [
"MIT"
] | 1 | 2018-11-28T10:48:41.000Z | 2018-11-28T12:49:05.000Z | lib/sbanken_monitor/agent.ex | entertainyou/sbanken_monitor | 6125a7d29c80b049233b2db74184091a9d67721a | [
"MIT"
] | null | null | null | defmodule SbankenMonitor.Agent do
@moduledoc false
defmacro __using__(opts) do
interval = Keyword.get(opts, :interval)
immediate = Keyword.get(opts, :immediate, false)
quote do
use GenServer
require Logger
def start_link(args) do
Logger.debug("#{__MODULE__} start_link args: #{inspect(args)}")
GenServer.start_link(__MODULE__, args, name: __MODULE__)
end
def init(_args) do
Logger.debug("#{__MODULE__} init")
state = %__MODULE__.State{}
schedule()
if unquote(immediate) do
{:ok, do_work(state)}
else
{:ok, state}
end
end
defp schedule() do
Process.send_after(self(), :work, unquote(interval))
end
def handle_info(:work, state) do
schedule()
new_state = do_work(state)
Logger.debug("#{__MODULE__} handle_info")
{:noreply, new_state}
end
end
end
end
| 23.414634 | 71 | 0.59375 |
f7ba18cea0fc560a1a0552e3ad17ae3605258dc6 | 80 | exs | Elixir | test/hexedio_web/views/page_view_test.exs | netsudo/hexedio | 3e0e1ff7cfffcd704ba60394d0a38c4d8608e100 | [
"MIT"
] | null | null | null | test/hexedio_web/views/page_view_test.exs | netsudo/hexedio | 3e0e1ff7cfffcd704ba60394d0a38c4d8608e100 | [
"MIT"
] | 3 | 2020-07-16T05:37:16.000Z | 2022-03-16T08:58:28.000Z | test/hexedio_web/views/page_view_test.exs | netsudo/hexedio | 3e0e1ff7cfffcd704ba60394d0a38c4d8608e100 | [
"MIT"
] | null | null | null | defmodule HexedioWeb.PageViewTest do
use HexedioWeb.ConnCase, async: true
end
| 20 | 38 | 0.825 |
f7ba268fa995f36ddaa4afa8941583a7dd967a24 | 37,849 | exs | Elixir | lib/elixir/test/elixir/enum_test.exs | ashneyderman/elixir | 9c4646848a0b57ce918e6c5dc7494e9b4e0a0ec2 | [
"Apache-2.0"
] | null | null | null | lib/elixir/test/elixir/enum_test.exs | ashneyderman/elixir | 9c4646848a0b57ce918e6c5dc7494e9b4e0a0ec2 | [
"Apache-2.0"
] | null | null | null | lib/elixir/test/elixir/enum_test.exs | ashneyderman/elixir | 9c4646848a0b57ce918e6c5dc7494e9b4e0a0ec2 | [
"Apache-2.0"
] | null | null | null | Code.require_file "test_helper.exs", __DIR__
defmodule EnumTest.List do
use ExUnit.Case, async: true
test :empty? do
assert Enum.empty?([])
refute Enum.empty?([1, 2, 3])
refute Enum.empty?(1..3)
end
test :member? do
assert Enum.member?([1, 2, 3], 2)
refute Enum.member?([], 0)
refute Enum.member?([1, 2, 3], 0)
assert Enum.member?(1..3, 2)
refute Enum.member?(1..3, 0)
end
test :count do
assert Enum.count([1, 2, 3]) == 3
assert Enum.count([]) == 0
end
test :count_fun do
assert Enum.count([1, 2, 3], fn(x) -> rem(x, 2) == 0 end) == 1
assert Enum.count([], fn(x) -> rem(x, 2) == 0 end) == 0
end
test :all? do
assert Enum.all?([2, 4, 6], fn(x) -> rem(x, 2) == 0 end)
refute Enum.all?([2, 3, 4], fn(x) -> rem(x, 2) == 0 end)
assert Enum.all?([2, 4, 6])
refute Enum.all?([2, nil, 4])
assert Enum.all?([])
end
test :any? do
refute Enum.any?([2, 4, 6], fn(x) -> rem(x, 2) == 1 end)
assert Enum.any?([2, 3, 4], fn(x) -> rem(x, 2) == 1 end)
refute Enum.any?([false, false, false])
assert Enum.any?([false, true, false])
assert Enum.any?([:foo, false, false])
refute Enum.any?([false, nil, false])
refute Enum.any?([])
end
test :at do
assert Enum.at([2, 4, 6], 0) == 2
assert Enum.at([2, 4, 6], 2) == 6
assert Enum.at([2, 4, 6], 4) == nil
assert Enum.at([2, 4, 6], 4, :none) == :none
assert Enum.at([2, 4, 6], -2) == 4
assert Enum.at([2, 4, 6], -4) == nil
end
test :concat_1 do
assert Enum.concat([[1, [2], 3], [4], [5, 6]]) == [1, [2], 3, 4, 5, 6]
assert Enum.concat(1..3, []) == [1, 2, 3]
assert Enum.concat([[], []]) == []
assert Enum.concat([[]]) == []
assert Enum.concat([]) == []
assert Enum.concat([1..5, fn acc, _ -> acc end, [1]]) == [1, 2, 3, 4, 5, 1]
end
test :concat_2 do
assert Enum.concat([], [1]) == [1]
assert Enum.concat([1, [2], 3], [4, 5]) == [1, [2], 3, 4, 5]
assert Enum.concat(1..3, []) == [1, 2, 3]
assert Enum.concat([], []) == []
assert Enum.concat(fn acc, _ -> acc end, [1]) == [1]
end
test :fetch! do
assert Enum.fetch!([2, 4, 6], 0) == 2
assert Enum.fetch!([2, 4, 6], 2) == 6
assert Enum.fetch!([2, 4, 6], -2) == 4
assert_raise Enum.OutOfBoundsError, fn ->
Enum.fetch!([2, 4, 6], 4)
end
assert_raise Enum.OutOfBoundsError, fn ->
Enum.fetch!([2, 4, 6], -4)
end
end
test :dedup do
assert Enum.dedup([1, 1, 2, 1, 1, 2, 1]) == [1, 2, 1, 2, 1]
assert Enum.dedup([2, 1, 1, 2, 1]) == [2, 1, 2, 1]
assert Enum.dedup([1, 2, 3, 4]) == [1, 2, 3, 4]
assert Enum.dedup([1, 1.0, 2.0, 2]) == [1, 1.0, 2.0, 2]
assert Enum.dedup([]) == []
assert Enum.dedup([nil, nil, true, {:value, true}]) == [nil, true, {:value, true}]
assert Enum.dedup([nil]) == [nil]
end
test :dedup_by do
assert Enum.dedup_by([{1, :x}, {2, :y}, {2, :z}, {1, :x}], fn {x, _} -> x end)
== [{1, :x}, {2, :y}, {1, :x}]
assert Enum.dedup_by([5, 1, 2, 3, 2, 1], fn x -> x > 2 end) == [5, 1, 3, 2]
end
test :drop do
assert Enum.drop([1, 2, 3], 0) == [1, 2, 3]
assert Enum.drop([1, 2, 3], 1) == [2, 3]
assert Enum.drop([1, 2, 3], 2) == [3]
assert Enum.drop([1, 2, 3], 3) == []
assert Enum.drop([1, 2, 3], 4) == []
assert Enum.drop([1, 2, 3], -1) == [1, 2]
assert Enum.drop([1, 2, 3], -2) == [1]
assert Enum.drop([1, 2, 3], -4) == []
assert Enum.drop([], 3) == []
end
test :drop_while do
assert Enum.drop_while([1, 2, 3, 4, 3, 2, 1], fn(x) -> x <= 3 end) == [4, 3, 2, 1]
assert Enum.drop_while([1, 2, 3], fn(_) -> false end) == [1, 2, 3]
assert Enum.drop_while([1, 2, 3], fn(x) -> x <= 3 end) == []
assert Enum.drop_while([], fn(_) -> false end) == []
end
test :find do
assert Enum.find([2, 4, 6], fn(x) -> rem(x, 2) == 1 end) == nil
assert Enum.find([2, 4, 6], 0, fn(x) -> rem(x, 2) == 1 end) == 0
assert Enum.find([2, 3, 4], fn(x) -> rem(x, 2) == 1 end) == 3
end
test :find_value do
assert Enum.find_value([2, 4, 6], fn(x) -> rem(x, 2) == 1 end) == nil
assert Enum.find_value([2, 4, 6], 0, fn(x) -> rem(x, 2) == 1 end) == 0
assert Enum.find_value([2, 3, 4], fn(x) -> rem(x, 2) == 1 end)
end
test :find_index do
assert Enum.find_index([2, 4, 6], fn(x) -> rem(x, 2) == 1 end) == nil
assert Enum.find_index([2, 3, 4], fn(x) -> rem(x, 2) == 1 end) == 1
end
test :each do
assert Enum.each([], fn(x) -> x end) == :ok
assert Enum.each([1, 2, 3], fn(x) -> Process.put(:enum_test_each, x * 2) end) == :ok
assert Process.get(:enum_test_each) == 6
after
Process.delete(:enum_test_each)
end
test :fetch do
assert Enum.fetch([2, 4, 6], 0) == {:ok, 2}
assert Enum.fetch([2, 4, 6], 2) == {:ok, 6}
assert Enum.fetch([2, 4, 6], 4) == :error
assert Enum.fetch([2, 4, 6], -2) == {:ok, 4}
assert Enum.fetch([2, 4, 6], -4) == :error
end
test :filter do
assert Enum.filter([1, 2, 3], fn(x) -> rem(x, 2) == 0 end) == [2]
assert Enum.filter([2, 4, 6], fn(x) -> rem(x, 2) == 0 end) == [2, 4, 6]
end
test :filter_with_match do
assert Enum.filter([1, 2, 3], &match?(1, &1)) == [1]
assert Enum.filter([1, 2, 3], &match?(x when x < 3, &1)) == [1, 2]
assert Enum.filter([1, 2, 3], &match?(_, &1)) == [1, 2, 3]
end
test :filter_map do
assert Enum.filter_map([1, 2, 3], fn(x) -> rem(x, 2) == 0 end, &(&1 * 2)) == [4]
assert Enum.filter_map([2, 4, 6], fn(x) -> rem(x, 2) == 0 end, &(&1 * 2)) == [4, 8, 12]
end
test :flat_map do
assert Enum.flat_map([], fn(x) -> [x, x] end) == []
assert Enum.flat_map([1, 2, 3], fn(x) -> [x, x] end) == [1, 1, 2, 2, 3, 3]
assert Enum.flat_map([1, 2, 3], fn(x) -> x..x+1 end) == [1, 2, 2, 3, 3, 4]
end
test :flat_map_reduce do
assert Enum.flat_map_reduce([1, 2, 3], 0, &{[&1, &2], &1 + &2}) ==
{[1, 0, 2, 1, 3, 3], 6}
assert Enum.flat_map_reduce(1..100, 0, fn i, acc ->
if acc < 3, do: {[i], acc + 1}, else: {:halt, acc}
end) == {[1, 2, 3], 3}
end
test :group_by do
assert Enum.group_by([], fn -> nil end) == %{}
assert Enum.group_by(1..6, &rem(&1, 3)) ==
%{0 => [6, 3], 1 => [4, 1], 2 => [5, 2]}
result = Enum.group_by(1..6, %{3 => :default}, &rem(&1, 3))
assert result[0] == [6, 3]
assert result[3] == :default
end
test :into do
assert Enum.into([a: 1, b: 2], %{}) == %{a: 1, b: 2}
assert Enum.into([a: 1, b: 2], %{c: 3}) == %{a: 1, b: 2, c: 3}
assert Enum.into(%{a: 1, b: 2}, []) == [a: 1, b: 2]
assert Enum.into([1, 2, 3], "numbers: ", &to_string/1) == "numbers: 123"
end
test :intersperse do
assert Enum.intersperse([], true) == []
assert Enum.intersperse([1], true) == [1]
assert Enum.intersperse([1, 2, 3], true) == [1, true, 2, true, 3]
end
test :join do
assert Enum.join([], " = ") == ""
assert Enum.join([1, 2, 3], " = ") == "1 = 2 = 3"
assert Enum.join([1, "2", 3], " = ") == "1 = 2 = 3"
assert Enum.join([1, 2, 3]) == "123"
assert Enum.join(["", "", 1, 2, "", 3, "", "\n"], ";") == ";;1;2;;3;;\n"
assert Enum.join([""]) == ""
end
test :map_join do
assert Enum.map_join([], " = ", &(&1 * 2)) == ""
assert Enum.map_join([1, 2, 3], " = ", &(&1 * 2)) == "2 = 4 = 6"
assert Enum.map_join([1, 2, 3], &(&1 * 2)) == "246"
assert Enum.map_join(["", "", 1, 2, "", 3, "", "\n"], ";", &(&1)) == ";;1;2;;3;;\n"
assert Enum.map_join([""], "", &(&1)) == ""
end
test :join_empty do
fun = fn (acc, _) -> acc end
assert Enum.join(fun, ".") == ""
assert Enum.map_join(fun, ".", &(&1 + 0)) == ""
end
test :map do
assert Enum.map([], fn x -> x * 2 end) == []
assert Enum.map([1, 2, 3], fn x -> x * 2 end) == [2, 4, 6]
end
test :map_reduce do
assert Enum.map_reduce([], 1, fn(x, acc) -> {x * 2, x + acc} end) == {[], 1}
assert Enum.map_reduce([1, 2, 3], 1, fn(x, acc) -> {x * 2, x + acc} end) == {[2, 4, 6], 7}
end
test :partition do
assert Enum.partition([1, 2, 3], fn(x) -> rem(x, 2) == 0 end) == {[2], [1, 3]}
assert Enum.partition([2, 4, 6], fn(x) -> rem(x, 2) == 0 end) == {[2, 4, 6], []}
end
test :reduce do
assert Enum.reduce([], 1, fn(x, acc) -> x + acc end) == 1
assert Enum.reduce([1, 2, 3], 1, fn(x, acc) -> x + acc end) == 7
assert Enum.reduce([1, 2, 3], fn(x, acc) -> x + acc end) == 6
assert_raise Enum.EmptyError, fn ->
Enum.reduce([], fn(x, acc) -> x + acc end)
end
end
test :reduce_while do
assert Enum.reduce_while(1..100, 0, fn i, acc ->
if i <= 3, do: {:cont, acc + i}, else: {:halt, acc}
end) == 6
assert Enum.reduce_while([1, 2, 3], 1, fn i, acc -> {:cont, acc + i} end) == 7
assert Enum.reduce_while([1, 2, 3], 1, fn _i, acc -> {:halt, acc} end) == 1
assert Enum.reduce_while([], 0, fn _i, acc -> {:cont, acc} end) == 0
end
test :reject do
assert Enum.reject([1, 2, 3], fn(x) -> rem(x, 2) == 0 end) == [1, 3]
assert Enum.reject([2, 4, 6], fn(x) -> rem(x, 2) == 0 end) == []
end
test :reverse do
assert Enum.reverse([]) == []
assert Enum.reverse([1, 2, 3]) == [3, 2, 1]
assert Enum.reverse([1, 2, 3], [4, 5, 6]) == [3, 2, 1, 4, 5, 6]
end
test :reverse_slice do
assert Enum.reverse_slice([], 1, 2) == []
assert Enum.reverse_slice([1, 2, 3], 0, 0) == [1, 2, 3]
assert Enum.reverse_slice([1, 2, 3], 0, 1) == [1, 2, 3]
assert Enum.reverse_slice([1, 2, 3], 0, 2) == [2, 1, 3]
assert Enum.reverse_slice([1, 2, 3], 0, 20000000) == [3, 2, 1]
assert Enum.reverse_slice([1, 2, 3], 100, 2) == [1, 2, 3]
assert Enum.reverse_slice([1, 2, 3], 10, 10) == [1, 2, 3]
end
test :random_1 do
# corner cases, independent of the seed
assert_raise Enum.EmptyError, fn -> Enum.random([]) end
assert Enum.random([1]) == 1
# set a fixed seed so the test can be deterministic
# please note the order of following assertions is important
seed1 = {1406, 407414, 139258}
seed2 = {1406, 421106, 567597}
:random.seed(seed1)
assert Enum.random([1, 2]) == 2
assert Enum.random([1, 2, 3]) == 2
assert Enum.random([1, 2, 3, 4]) == 4
assert Enum.random([1, 2, 3, 4, 5]) == 1
:random.seed(seed2)
assert Enum.random([1, 2]) == 1
assert Enum.random([1, 2, 3]) == 3
assert Enum.random([1, 2, 3, 4]) == 1
assert Enum.random([1, 2, 3, 4, 5]) == 5
end
test :random_2 do
# corner cases, independent of the seed
assert_raise FunctionClauseError, fn -> Enum.random([1, 2], -1) end
assert Enum.random([], 0) == []
assert Enum.random([], 3) == []
assert Enum.random([1], 0) == []
assert Enum.random([1], 2) == [1]
assert Enum.random([1, 2], 0) == []
# set a fixed seed so the test can be deterministic
# please note the order of following assertions is important
seed1 = {1406, 407414, 139258}
seed2 = {1406, 421106, 567597}
:random.seed(seed1)
assert Enum.random([1, 2, 3, 4, 5], 1) == [2]
assert Enum.random([1, 2, 3, 4, 5], 2) == [4, 1]
assert Enum.random([1, 2, 3, 4, 5], 3) == [1, 3, 5]
assert Enum.random([1, 2, 3, 4, 5], 4) == [3, 5, 1, 2]
assert Enum.random([1, 2, 3, 4, 5], 5) == [1, 2, 5, 4, 3]
assert Enum.random([1, 2, 3, 4, 5], 6) == [2, 4, 5, 3, 1]
:random.seed(seed2)
assert Enum.random([1, 2, 3, 4, 5], 1) == [1]
assert Enum.random([1, 2, 3, 4, 5], 2) == [1, 5]
assert Enum.random([1, 2, 3, 4, 5], 3) == [5, 1, 3]
assert Enum.random([1, 2, 3, 4, 5], 4) == [4, 5, 2, 1]
assert Enum.random([1, 2, 3, 4, 5], 5) == [4, 3, 1, 5, 2]
assert Enum.random([1, 2, 3, 4, 5], 6) == [5, 2, 1, 4, 3]
# assert that every item in the sample comes from the input list
list = for _<-1..100, do: make_ref
for x <- Enum.random(list, 50) do
assert Enum.find(list, &(&1 == x))
end
end
test :scan do
assert Enum.scan([1, 2, 3, 4, 5], &(&1 + &2)) == [1, 3, 6, 10, 15]
assert Enum.scan([], &(&1 + &2)) == []
assert Enum.scan([1, 2, 3, 4, 5], 0, &(&1 + &2)) == [1, 3, 6, 10, 15]
assert Enum.scan([], 0, &(&1 + &2)) == []
end
test :shuffle do
# set a fixed seed so the test can be deterministic
:random.seed(1374, 347975, 449264)
assert Enum.shuffle([1, 2, 3, 4, 5]) == [2, 4, 1, 5, 3]
end
test :sort do
assert Enum.sort([5, 3, 2, 4, 1]) == [1, 2, 3, 4, 5]
assert Enum.sort([5, 3, 2, 4, 1], &(&1 > &2)) == [5, 4, 3, 2, 1]
end
test :sort_by do
collection = [
[other_data: 1, sorted_data: 5],
[other_data: 3, sorted_data: 4],
[other_data: 4, sorted_data: 3],
[other_data: 2, sorted_data: 2],
[other_data: 5, sorted_data: 1]
]
assert Enum.sort_by(
collection,
&(&1[:sorted_data])
) == [
[other_data: 5, sorted_data: 1],
[other_data: 2, sorted_data: 2],
[other_data: 4, sorted_data: 3],
[other_data: 3, sorted_data: 4],
[other_data: 1, sorted_data: 5]
]
assert Enum.sort_by(collection, &(&1[:sorted_data]), &>=/2) == collection
end
test :split do
assert Enum.split([1, 2, 3], 0) == {[], [1, 2, 3]}
assert Enum.split([1, 2, 3], 1) == {[1], [2, 3]}
assert Enum.split([1, 2, 3], 2) == {[1, 2], [3]}
assert Enum.split([1, 2, 3], 3) == {[1, 2, 3], []}
assert Enum.split([1, 2, 3], 4) == {[1, 2, 3], []}
assert Enum.split([], 3) == {[], []}
assert Enum.split([1, 2, 3], -1) == {[1, 2], [3]}
assert Enum.split([1, 2, 3], -2) == {[1], [2, 3]}
assert Enum.split([1, 2, 3], -3) == {[], [1, 2, 3]}
assert Enum.split([1, 2, 3], -10) == {[], [1, 2, 3]}
end
test :split_while do
assert Enum.split_while([1, 2, 3], fn(_) -> false end) == {[], [1, 2, 3]}
assert Enum.split_while([1, 2, 3], fn(_) -> true end) == {[1, 2, 3], []}
assert Enum.split_while([1, 2, 3], fn(x) -> x > 2 end) == {[], [1, 2, 3]}
assert Enum.split_while([1, 2, 3], fn(x) -> x > 3 end) == {[], [1, 2, 3]}
assert Enum.split_while([1, 2, 3], fn(x) -> x < 3 end) == {[1, 2], [3]}
assert Enum.split_while([], fn(_) -> true end) == {[], []}
end
test :sum do
assert Enum.sum([]) == 0
assert Enum.sum([1]) == 1
assert Enum.sum([1, 2, 3]) == 6
assert Enum.sum([1.1, 2.2, 3.3]) == 6.6
assert_raise ArithmeticError, fn ->
Enum.sum([{}])
end
assert_raise ArithmeticError, fn ->
Enum.sum([1, {}])
end
end
test :take do
assert Enum.take([1, 2, 3], 0) == []
assert Enum.take([1, 2, 3], 1) == [1]
assert Enum.take([1, 2, 3], 2) == [1, 2]
assert Enum.take([1, 2, 3], 3) == [1, 2, 3]
assert Enum.take([1, 2, 3], 4) == [1, 2, 3]
assert Enum.take([1, 2, 3], -1) == [3]
assert Enum.take([1, 2, 3], -2) == [2, 3]
assert Enum.take([1, 2, 3], -4) == [1, 2, 3]
assert Enum.take([], 3) == []
end
test :take_every do
assert Enum.take_every([1, 2, 3, 4, 5, 6, 7, 8, 9, 10], 2) == [1, 3, 5, 7, 9]
assert Enum.take_every([], 2) == []
assert Enum.take_every([1, 2], 2) == [1]
assert Enum.take_every([1, 2, 3], 0) == []
assert_raise FunctionClauseError, fn ->
Enum.take_every([1, 2, 3], -1)
end
end
test :take_while do
assert Enum.take_while([1, 2, 3], fn(x) -> x > 3 end) == []
assert Enum.take_while([1, 2, 3], fn(x) -> x <= 1 end) == [1]
assert Enum.take_while([1, 2, 3], fn(x) -> x <= 3 end) == [1, 2, 3]
assert Enum.take_while([], fn(_) -> true end) == []
end
test :to_list do
assert Enum.to_list([]) == []
assert Enum.to_list(1 .. 3) == [1, 2, 3]
end
test :uniq_by do
assert Enum.uniq_by([1, 2, 3, 2, 1], fn x -> x end) == [1, 2, 3]
end
test :uniq do
assert Enum.uniq([5, 1, 2, 3, 2, 1]) == [5, 1, 2, 3]
assert Enum.uniq([1, 2, 3, 2, 1], fn x -> x end) == [1, 2, 3]
end
test :zip do
assert Enum.zip([:a, :b], [1, 2]) == [{:a, 1}, {:b, 2}]
assert Enum.zip([:a, :b], [1, 2, 3, 4]) == [{:a, 1}, {:b, 2}]
assert Enum.zip([:a, :b, :c, :d], [1, 2]) == [{:a, 1}, {:b, 2}]
assert Enum.zip([], [1]) == []
assert Enum.zip([1], []) == []
assert Enum.zip([], []) == []
end
test :unzip do
assert Enum.unzip([{:a, 1}, {:b, 2}, {:c, 3}]) == {[:a, :b, :c], [1, 2, 3]}
assert Enum.unzip([]) == {[], []}
assert Enum.unzip(%{a: 1, b: 2}) == {[:a, :b], [1, 2]}
assert Enum.unzip([foo: "a", bar: "b"]) == {[:foo, :bar], ["a", "b"]}
assert_raise FunctionClauseError, fn -> Enum.unzip([{:a, 1}, {:b, 2, "foo"}]) end
assert_raise FunctionClauseError, fn -> Enum.unzip([{1, 2, {3, {4, 5}}}]) end
assert_raise FunctionClauseError, fn -> Enum.unzip([1, 2, 3]) end
end
test :with_index do
assert Enum.with_index([]) == []
assert Enum.with_index([1, 2, 3]) == [{1, 0}, {2, 1}, {3, 2}]
end
test :max do
assert Enum.max([1]) == 1
assert Enum.max([1, 2, 3]) == 3
assert Enum.max([1, [], :a, {}]) == []
assert_raise Enum.EmptyError, fn ->
Enum.max([])
end
end
test :max_by do
assert Enum.max_by(["a", "aa", "aaa"], fn(x) -> String.length(x) end) == "aaa"
assert_raise Enum.EmptyError, fn ->
Enum.max_by([], fn(x) -> String.length(x) end)
end
end
test :min do
assert Enum.min([1]) == 1
assert Enum.min([1, 2, 3]) == 1
assert Enum.min([[], :a, {}]) == :a
assert_raise Enum.EmptyError, fn ->
Enum.min([])
end
end
test :min_by do
assert Enum.min_by(["a", "aa", "aaa"], fn(x) -> String.length(x) end) == "a"
assert_raise Enum.EmptyError, fn ->
Enum.min_by([], fn(x) -> String.length(x) end)
end
end
test :minmax do
assert Enum.minmax([1]) == {1, 1}
assert Enum.minmax([2, 3, 1]) == {1, 3}
assert Enum.minmax([[], :a, {}]) == {:a, []}
assert_raise Enum.EmptyError, fn ->
Enum.minmax([])
end
end
test :minmax_by do
assert Enum.minmax_by(["aaa", "a", "aa"], fn(x) -> String.length(x) end) == {"a", "aaa"}
assert_raise Enum.EmptyError, fn ->
Enum.minmax_by([], fn(x) -> String.length(x) end)
end
end
test :chunk do
assert Enum.chunk([1, 2, 3, 4, 5], 2) == [[1, 2], [3, 4]]
assert Enum.chunk([1, 2, 3, 4, 5], 2, 2, [6]) == [[1, 2], [3, 4], [5, 6]]
assert Enum.chunk([1, 2, 3, 4, 5, 6], 3, 2) == [[1, 2, 3], [3, 4, 5]]
assert Enum.chunk([1, 2, 3, 4, 5, 6], 2, 3) == [[1, 2], [4, 5]]
assert Enum.chunk([1, 2, 3, 4, 5, 6], 3, 2, []) == [[1, 2, 3], [3, 4, 5], [5, 6]]
assert Enum.chunk([1, 2, 3, 4, 5, 6], 3, 3, []) == [[1, 2, 3], [4, 5, 6]]
assert Enum.chunk([1, 2, 3, 4, 5], 4, 4, 6..10) == [[1, 2, 3, 4], [5, 6, 7, 8]]
end
test :chunk_by do
assert Enum.chunk_by([1, 2, 2, 3, 4, 4, 6, 7, 7], &(rem(&1, 2) == 1)) == [[1], [2, 2], [3], [4, 4, 6], [7, 7]]
assert Enum.chunk_by([1, 2, 3, 4], fn _ -> true end) == [[1, 2, 3, 4]]
assert Enum.chunk_by([], fn _ -> true end) == []
assert Enum.chunk_by([1], fn _ -> true end) == [[1]]
end
test :slice do
assert Enum.slice([1, 2, 3, 4, 5], 0, 0) == []
assert Enum.slice([1, 2, 3, 4, 5], 0, 1) == [1]
assert Enum.slice([1, 2, 3, 4, 5], 0, 2) == [1, 2]
assert Enum.slice([1, 2, 3, 4, 5], 1, 2) == [2, 3]
assert Enum.slice([1, 2, 3, 4, 5], 1, 0) == []
assert Enum.slice([1, 2, 3, 4, 5], 2, 5) == [3, 4, 5]
assert Enum.slice([1, 2, 3, 4, 5], 2, 6) == [3, 4, 5]
assert Enum.slice([1, 2, 3, 4, 5], 5, 5) == []
assert Enum.slice([1, 2, 3, 4, 5], 6, 5) == []
assert Enum.slice([1, 2, 3, 4, 5], 6, 0) == []
assert Enum.slice([1, 2, 3, 4, 5], -6, 0) == []
assert Enum.slice([1, 2, 3, 4, 5], -6, 5) == []
assert Enum.slice([1, 2, 3, 4, 5], -2, 5) == [4, 5]
assert Enum.slice([1, 2, 3, 4, 5], -3, 1) == [3]
end
test :slice_range do
assert Enum.slice([1, 2, 3, 4, 5], 0..0) == [1]
assert Enum.slice([1, 2, 3, 4, 5], 0..1) == [1, 2]
assert Enum.slice([1, 2, 3, 4, 5], 0..2) == [1, 2, 3]
assert Enum.slice([1, 2, 3, 4, 5], 1..2) == [2, 3]
assert Enum.slice([1, 2, 3, 4, 5], 1..0) == []
assert Enum.slice([1, 2, 3, 4, 5], 2..5) == [3, 4, 5]
assert Enum.slice([1, 2, 3, 4, 5], 2..6) == [3, 4, 5]
assert Enum.slice([1, 2, 3, 4, 5], 4..4) == [5]
assert Enum.slice([1, 2, 3, 4, 5], 5..5) == []
assert Enum.slice([1, 2, 3, 4, 5], 6..5) == []
assert Enum.slice([1, 2, 3, 4, 5], 6..0) == []
assert Enum.slice([1, 2, 3, 4, 5], -6..0) == []
assert Enum.slice([1, 2, 3, 4, 5], -6..5) == []
assert Enum.slice([1, 2, 3, 4, 5], -5..-1) == [1, 2, 3, 4, 5]
assert Enum.slice([1, 2, 3, 4, 5], -5..-3) == [1, 2, 3]
assert Enum.slice([1, 2, 3, 4, 5], -6..-1) == []
assert Enum.slice([1, 2, 3, 4, 5], -6..-3) == []
end
end
defmodule EnumTest.Range do
use ExUnit.Case, async: true
test :all? do
range = 0..5
refute Enum.all?(range, fn(x) -> rem(x, 2) == 0 end)
range = 0..1
assert Enum.all?(range, fn(x) -> x < 2 end)
assert Enum.all?(range)
range = 1..0
assert Enum.all?(range)
end
test :any? do
range = 0..5
refute Enum.any?(range, &(&1 > 10))
range = 0..5
assert Enum.any?(range, &(&1 > 3))
range = 1..0
assert Enum.any?(range)
end
test :fetch! do
assert Enum.fetch!(2..6, 0) == 2
assert Enum.fetch!(2..6, 4) == 6
assert Enum.fetch!(2..6, -1) == 6
assert Enum.fetch!(2..6, -2) == 5
assert Enum.fetch!(-2..-6, 0) == -2
assert Enum.fetch!(-2..-6, 4) == -6
assert_raise Enum.OutOfBoundsError, fn ->
Enum.fetch!(2..6, 8)
end
assert_raise Enum.OutOfBoundsError, fn ->
Enum.fetch!(-2..-6, 8)
end
assert_raise Enum.OutOfBoundsError, fn ->
Enum.fetch!(2..6, -8)
end
end
test :count do
range = 1..5
assert Enum.count(range) == 5
range = 1..1
assert Enum.count(range) == 1
assert Enum.count([1, true, false, nil]) == 4
end
test :count_fun do
range = 1..5
assert Enum.count(range, fn(x) -> rem(x, 2) == 0 end) == 2
range = 1..1
assert Enum.count(range, fn(x) -> rem(x, 2) == 0 end) == 0
assert Enum.count([1, true, false, nil], & &1) == 2
end
test :chunk do
assert Enum.chunk(1..5, 2) == [[1, 2], [3, 4]]
assert Enum.chunk(1..5, 2, 2, [6]) == [[1, 2], [3, 4], [5, 6]]
assert Enum.chunk(1..6, 3, 2) == [[1, 2, 3], [3, 4, 5]]
assert Enum.chunk(1..6, 2, 3) == [[1, 2], [4, 5]]
assert Enum.chunk(1..6, 3, 2, []) == [[1, 2, 3], [3, 4, 5], [5, 6]]
assert Enum.chunk(1..5, 4, 4, 6..10) == [[1, 2, 3, 4], [5, 6, 7, 8]]
end
test :chunk_by do
assert Enum.chunk_by(1..4, fn _ -> true end) == [[1, 2, 3, 4]]
assert Enum.chunk_by(1..4, &(rem(&1, 2) == 1)) == [[1], [2], [3], [4]]
end
test :dedup do
assert Enum.dedup(1..3) == [1, 2, 3]
end
test :dedup_by do
assert Enum.dedup_by(1..3, fn _ -> 1 end) == [1]
end
test :drop do
range = 1..3
assert Enum.drop(range, 0) == [1, 2, 3]
assert Enum.drop(range, 1) == [2, 3]
assert Enum.drop(range, 2) == [3]
assert Enum.drop(range, 3) == []
assert Enum.drop(range, 4) == []
assert Enum.drop(range, -1) == [1, 2]
assert Enum.drop(range, -2) == [1]
assert Enum.drop(range, -4) == []
range = 1..0
assert Enum.drop(range, 3) == []
end
test :drop_while do
range = 0..6
assert Enum.drop_while(range, fn(x) -> x <= 3 end) == [4, 5, 6]
assert Enum.drop_while(range, fn(_) -> false end) == [0, 1, 2, 3, 4, 5, 6]
range = 0..3
assert Enum.drop_while(range, fn(x) -> x <= 3 end) == []
range = 1..0
assert Enum.drop_while(range, fn(_) -> nil end) == [1, 0]
end
test :find do
range = 2..6
assert Enum.find(range, fn(x) -> rem(x, 2) == 0 end) == 2
assert Enum.find(range, fn(x) -> rem(x, 2) == 1 end) == 3
assert Enum.find(range, fn _ -> false end) == nil
assert Enum.find(range, 0, fn _ -> false end) == 0
end
test :find_value do
range = 2..6
assert Enum.find_value(range, fn(x) -> rem(x, 2) == 1 end)
end
test :find_index do
range = 2..6
assert Enum.find_index(range, fn(x) -> rem(x, 2) == 1 end) == 1
end
test :empty? do
range = 1..0
refute Enum.empty?(range)
range = 1..2
refute Enum.empty?(range)
end
test :each do
try do
range = 1..0
assert Enum.each(range, fn(x) -> x end) == :ok
range = 1..3
assert Enum.each(range, fn(x) -> Process.put(:enum_test_each, x * 2) end) == :ok
assert Process.get(:enum_test_each) == 6
after
Process.delete(:enum_test_each)
end
try do
range = -1..-3
assert Enum.each(range, fn(x) -> Process.put(:enum_test_each, x * 2) end) == :ok
assert Process.get(:enum_test_each) == -6
after
Process.delete(:enum_test_each)
end
end
test :filter do
range = 1..3
assert Enum.filter(range, fn(x) -> rem(x, 2) == 0 end) == [2]
range = 1..6
assert Enum.filter(range, fn(x) -> rem(x, 2) == 0 end) == [2, 4, 6]
assert Enum.filter([1, 2, false, 3, nil], & &1) == [1, 2, 3]
end
test :filter_with_match do
range = 1..3
assert Enum.filter(range, &match?(1, &1)) == [1]
assert Enum.filter(range, &match?(x when x < 3, &1)) == [1, 2]
assert Enum.filter(range, &match?(_, &1)) == [1, 2, 3]
end
test :filter_map do
range = 1..3
assert Enum.filter_map(range, fn(x) -> rem(x, 2) == 0 end, &(&1 * 2)) == [4]
range = 2..6
assert Enum.filter_map(range, fn(x) -> rem(x, 2) == 0 end, &(&1 * 2)) == [4, 8, 12]
end
test :flat_map do
range = 1..3
assert Enum.flat_map(range, fn(x) -> [x, x] end) == [1, 1, 2, 2, 3, 3]
end
test :intersperse do
range = 1..0
assert Enum.intersperse(range, true) == [1, true, 0]
range = 1..3
assert Enum.intersperse(range, false) == [1, false, 2, false, 3]
end
test :into do
assert Enum.into([a: 1, b: 2], %{}) == %{a: 1, b: 2}
assert Enum.into(%{a: 1, b: 2}, []) == [a: 1, b: 2]
assert Enum.into(3..5, [1, 2]) == [1, 2, 3, 4, 5]
assert Enum.into(1..5, []) == [1, 2, 3, 4, 5]
assert Enum.into(1..5, [], fn x -> x * 2 end) == [2, 4, 6, 8, 10]
assert Enum.into(1..3, "numbers: ", &to_string/1) == "numbers: 123"
end
test :join do
range = 1..0
assert Enum.join(range, " = ") == "1 = 0"
range = 1..3
assert Enum.join(range, " = ") == "1 = 2 = 3"
assert Enum.join(range) == "123"
end
test :map_join do
range = 1..0
assert Enum.map_join(range, " = ", &(&1 * 2)) == "2 = 0"
range = 1..3
assert Enum.map_join(range, " = ", &(&1 * 2)) == "2 = 4 = 6"
assert Enum.map_join(range, &(&1 * 2)) == "246"
end
test :map do
range = 1..3
assert Enum.map(range, fn x -> x * 2 end) == [2, 4, 6]
range = -1..-3
assert Enum.map(range, fn x -> x * 2 end) == [-2, -4, -6]
end
test :map_reduce do
range = 1..0
assert Enum.map_reduce(range, 1, fn(x, acc) -> {x * 2, x + acc} end) == {[2, 0], 2}
range = 1..3
assert Enum.map_reduce(range, 1, fn(x, acc) -> {x * 2, x + acc} end) == {[2, 4, 6], 7}
end
test :max do
assert Enum.max(1..1) == 1
assert Enum.max(1..3) == 3
assert Enum.max(3..1) == 3
end
test :max_by do
assert Enum.max_by(1..1, fn(x) -> :math.pow(-2, x) end) == 1
assert Enum.max_by(1..3, fn(x) -> :math.pow(-2, x) end) == 2
end
test :min do
assert Enum.min(1..1) == 1
assert Enum.min(1..3) == 1
end
test :min_by do
assert Enum.min_by(1..1, fn(x) -> :math.pow(-2, x) end) == 1
assert Enum.min_by(1..3, fn(x) -> :math.pow(-2, x) end) == 3
end
test :partition do
range = 1..3
assert Enum.partition(range, fn(x) -> rem(x, 2) == 0 end) == {[2], [1, 3]}
end
test :reduce do
range = 1..0
assert Enum.reduce(range, 1, fn(x, acc) -> x + acc end) == 2
range = 1..3
assert Enum.reduce(range, 1, fn(x, acc) -> x + acc end) == 7
range = 1..3
assert Enum.reduce(range, fn(x, acc) -> x + acc end) == 6
end
test :reject do
range = 1..3
assert Enum.reject(range, fn(x) -> rem(x, 2) == 0 end) == [1, 3]
range = 1..6
assert Enum.reject(range, fn(x) -> rem(x, 2) == 0 end) == [1, 3, 5]
assert Enum.reject([1, true, nil, false, 2], & &1) == [nil, false]
end
test :reverse do
assert Enum.reverse(0..0) == [0]
assert Enum.reverse(1..3) == [3, 2, 1]
assert Enum.reverse(1..3, 4..6) == [3, 2, 1, 4, 5, 6]
assert Enum.reverse([1, 2, 3], 4..6) == [3, 2, 1, 4, 5, 6]
assert Enum.reverse(1..3, [4, 5, 6]) == [3, 2, 1, 4, 5, 6]
end
test :reverse_slice do
assert Enum.reverse_slice(1..6, 2, 0) == [1, 2, 3, 4, 5, 6]
assert Enum.reverse_slice(1..6, 2, 2) == [1, 2, 4, 3, 5, 6]
assert Enum.reverse_slice(1..6, 2, 4) == [1, 2, 6, 5, 4, 3]
assert Enum.reverse_slice(1..6, 2, 10000000) == [1, 2, 6, 5, 4, 3]
assert Enum.reverse_slice(1..6, 10000000, 4) == [1, 2, 3, 4, 5, 6]
assert Enum.reverse_slice(1..6, 50, 50) == [1, 2, 3, 4, 5, 6]
end
test :random_1 do
# corner cases, independent of the seed
assert Enum.random(1..1) == 1
# set a fixed seed so the test can be deterministic
# please note the order of following assertions is important
seed1 = {1406, 407414, 139258}
seed2 = {1406, 421106, 567597}
:random.seed(seed1)
assert Enum.random(1..2) == 2
assert Enum.random(1..3) == 2
assert Enum.random(1..4) == 4
assert Enum.random(1..5) == 1
:random.seed(seed1)
assert Enum.random(?a..?z) == ?i
:random.seed(seed1)
assert Enum.random(?0..?9) == ?8
:random.seed(seed2)
assert Enum.random(1..2) == 1
assert Enum.random(1..3) == 3
assert Enum.random(1..4) == 1
assert Enum.random(1..5) == 5
:random.seed(seed2)
assert Enum.random(?a..?z) == ?a
:random.seed(seed2)
assert Enum.random(?0..?9) == ?0
end
test :random_2 do
# corner cases, independent of the seed
assert_raise FunctionClauseError, fn -> Enum.random(1..2, -1) end
assert Enum.random(1..1, 0) == []
assert Enum.random(1..1, 2) == [1]
assert Enum.random(1..2, 0) == []
# set a fixed seed so the test can be deterministic
# please note the order of following assertions is important
seed1 = {1406, 407414, 139258}
seed2 = {1406, 421106, 567597}
:random.seed(seed1)
assert Enum.random(1..5, 1) == [2]
assert Enum.random(1..5, 2) == [4, 1]
assert Enum.random(1..5, 3) == [1, 3, 5]
assert Enum.random(1..5, 4) == [3, 5, 1, 2]
assert Enum.random(1..5, 5) == [1, 2, 5, 4, 3]
assert Enum.random(1..5, 6) == [2, 4, 5, 3, 1]
:random.seed(seed2)
assert Enum.random(1..5, 1) == [1]
assert Enum.random(1..5, 2) == [1, 5]
assert Enum.random(1..5, 3) == [5, 1, 3]
assert Enum.random(1..5, 4) == [4, 5, 2, 1]
assert Enum.random(1..5, 5) == [4, 3, 1, 5, 2]
assert Enum.random(1..5, 6) == [5, 2, 1, 4, 3]
:random.seed(seed1)
assert Enum.random(?a..?z, 1) == 'i'
assert Enum.random(?a..?z, 2) == 'cm'
assert Enum.random(?a..?z, 3) == 'alp'
assert Enum.random(?a..?z, 4) == 'tzmd'
assert Enum.random(?a..?z, 5) == 'cuxvb'
:random.seed(seed2)
assert Enum.random(?a..?z, 1) == 'a'
assert Enum.random(?a..?z, 2) == 'wk'
assert Enum.random(?a..?z, 3) == 'ayj'
assert Enum.random(?a..?z, 4) == 'rbcm'
assert Enum.random(?a..?z, 5) == 'rhzju'
:random.seed(seed1)
assert Enum.random(?0..?9, 1) == '8'
assert Enum.random(?0..?9, 2) == '07'
assert Enum.random(?0..?9, 3) == '018'
assert Enum.random(?0..?9, 4) == '0856'
assert Enum.random(?0..?9, 5) == '03698'
:random.seed(seed2)
assert Enum.random(?0..?9, 1) == '0'
assert Enum.random(?0..?9, 2) == '48'
assert Enum.random(?0..?9, 3) == '590'
assert Enum.random(?0..?9, 4) == '4805'
assert Enum.random(?0..?9, 5) == '81945'
end
test :scan do
assert Enum.scan(1..5, &(&1 + &2)) == [1, 3, 6, 10, 15]
assert Enum.scan(1..5, 0, &(&1 + &2)) == [1, 3, 6, 10, 15]
end
test :shuffle do
# set a fixed seed so the test can be deterministic
:random.seed(1374, 347975, 449264)
assert Enum.shuffle(1..5) == [2, 4, 1, 5, 3]
end
test :slice do
assert Enum.slice(1..5, 0, 0) == []
assert Enum.slice(1..5, 0, 1) == [1]
assert Enum.slice(1..5, 0, 2) == [1, 2]
assert Enum.slice(1..5, 1, 2) == [2, 3]
assert Enum.slice(1..5, 1, 0) == []
assert Enum.slice(1..5, 2, 5) == [3, 4, 5]
assert Enum.slice(1..5, 2, 6) == [3, 4, 5]
assert Enum.slice(1..5, 5, 5) == []
assert Enum.slice(1..5, 6, 5) == []
assert Enum.slice(1..5, 6, 0) == []
assert Enum.slice(1..5, -6, 0) == []
assert Enum.slice(1..5, -6, 5) == []
assert Enum.slice(1..5, -2, 5) == [4, 5]
assert Enum.slice(1..5, -3, 1) == [3]
end
test :slice_range do
assert Enum.slice(1..5, 0..0) == [1]
assert Enum.slice(1..5, 0..1) == [1, 2]
assert Enum.slice(1..5, 0..2) == [1, 2, 3]
assert Enum.slice(1..5, 1..2) == [2, 3]
assert Enum.slice(1..5, 1..0) == []
assert Enum.slice(1..5, 2..5) == [3, 4, 5]
assert Enum.slice(1..5, 2..6) == [3, 4, 5]
assert Enum.slice(1..5, 4..4) == [5]
assert Enum.slice(1..5, 5..5) == []
assert Enum.slice(1..5, 6..5) == []
assert Enum.slice(1..5, 6..0) == []
assert Enum.slice(1..5, -6..0) == []
assert Enum.slice(1..5, -6..5) == []
assert Enum.slice(1..5, -5..-1) == [1, 2, 3, 4, 5]
assert Enum.slice(1..5, -5..-3) == [1, 2, 3]
assert Enum.slice(1..5, -6..-1) == []
assert Enum.slice(1..5, -6..-3) == []
end
test :sort do
assert Enum.sort(3..1) == [1, 2, 3]
assert Enum.sort(2..1) == [1, 2]
assert Enum.sort(1..1) == [1]
assert Enum.sort(3..1, &(&1 > &2)) == [3, 2, 1]
assert Enum.sort(2..1, &(&1 > &2)) == [2, 1]
assert Enum.sort(1..1, &(&1 > &2)) == [1]
end
test :split do
range = 1..3
assert Enum.split(range, 0) == {[], [1, 2, 3]}
assert Enum.split(range, 1) == {[1], [2, 3]}
assert Enum.split(range, 2) == {[1, 2], [3]}
assert Enum.split(range, 3) == {[1, 2, 3], []}
assert Enum.split(range, 4) == {[1, 2, 3], []}
assert Enum.split(range, -1) == {[1, 2], [3]}
assert Enum.split(range, -2) == {[1], [2, 3]}
assert Enum.split(range, -3) == {[], [1, 2, 3]}
assert Enum.split(range, -10) == {[], [1, 2, 3]}
range = 1..0
assert Enum.split(range, 3) == {[1, 0], []}
end
test :split_while do
range = 1..3
assert Enum.split_while(range, fn(_) -> false end) == {[], [1, 2, 3]}
assert Enum.split_while(range, fn(_) -> nil end) == {[], [1, 2, 3]}
assert Enum.split_while(range, fn(_) -> true end) == {[1, 2, 3], []}
assert Enum.split_while(range, fn(x) -> x > 2 end) == {[], [1, 2, 3]}
assert Enum.split_while(range, fn(x) -> x > 3 end) == {[], [1, 2, 3]}
assert Enum.split_while(range, fn(x) -> x < 3 end) == {[1, 2], [3]}
assert Enum.split_while(range, fn(x) -> x end) == {[1, 2, 3], []}
range = 1..0
assert Enum.split_while(range, fn(_) -> true end) == {[1, 0], []}
end
test :sum do
assert Enum.sum(1..1) == 1
assert Enum.sum(1..3) == 6
end
test :take do
range = 1..3
assert Enum.take(range, 0) == []
assert Enum.take(range, 1) == [1]
assert Enum.take(range, 2) == [1, 2]
assert Enum.take(range, 3) == [1, 2, 3]
assert Enum.take(range, 4) == [1, 2, 3]
assert Enum.take(range, -1) == [3]
assert Enum.take(range, -2) == [2, 3]
assert Enum.take(range, -4) == [1, 2, 3]
range = 1..0
assert Enum.take(range, 3) == [1, 0]
end
test :take_every do
assert Enum.take_every(1..10, 2) == [1, 3, 5, 7, 9]
assert Enum.take_every(1..2, 2) == [1]
assert Enum.take_every(1..3, 0) == []
assert_raise FunctionClauseError, fn ->
Enum.take_every(1..3, -1)
end
end
test :take_while do
range = 1..3
assert Enum.take_while(range, fn(x) -> x > 3 end) == []
assert Enum.take_while(range, fn(x) -> x <= 1 end) == [1]
assert Enum.take_while(range, fn(x) -> x <= 3 end) == [1, 2, 3]
assert Enum.take_while(range, fn(x) -> x end) == [1, 2, 3]
assert Enum.take_while(range, fn(_) -> nil end) == []
assert Enum.take_while([], fn(_) -> true end) == []
end
test :uniq do
assert Enum.uniq(1..3) == [1, 2, 3]
assert Enum.uniq(1..3, fn x -> x end) == [1, 2, 3]
end
test :zip do
assert Enum.zip([:a, :b], 1..2) == [{:a, 1}, {:b, 2}]
assert Enum.zip([:a, :b], 1..4) == [{:a, 1}, {:b, 2}]
assert Enum.zip([:a, :b, :c, :d], 1..2) == [{:a, 1}, {:b, 2}]
assert Enum.zip(1..2, [:a, :b]) == [{1, :a}, {2, :b}]
assert Enum.zip(1..4, [:a, :b]) == [{1, :a}, {2, :b}]
assert Enum.zip(1..2, [:a, :b, :c, :d]) == [{1, :a}, {2, :b}]
assert Enum.zip(1..2, 1..2) == [{1, 1}, {2, 2}]
assert Enum.zip(1..4, 1..2) == [{1, 1}, {2, 2}]
assert Enum.zip(1..2, 1..4) == [{1, 1}, {2, 2}]
end
test :with_index do
assert Enum.with_index(1..3) == [{1, 0}, {2, 1}, {3, 2}]
end
end
defmodule EnumTest.SideEffects do
use ExUnit.Case, async: true
import ExUnit.CaptureIO
import PathHelpers
test "take with side effects" do
stream = Stream.unfold(1, fn x -> IO.puts x; {x, x + 1} end)
assert capture_io(fn ->
Enum.take(stream, 1)
end) == "1\n"
end
test "take does not consume next without a need" do
path = tmp_path("oneliner.txt")
File.mkdir(Path.dirname(path))
try do
File.write!(path, "ONE")
File.open!(path, [], fn file ->
iterator = IO.stream(file, :line)
assert Enum.take(iterator, 1) == ["ONE"]
assert Enum.take(iterator, 5) == []
end)
after
File.rm(path)
end
end
test "take with no item works as no-op" do
iterator = File.stream!(fixture_path("unknown.txt"))
assert Enum.take(iterator, 0) == []
assert Enum.take(iterator, 0) == []
assert Enum.take(iterator, 0) == []
assert Enum.take(iterator, 0) == []
end
end
| 32.211915 | 114 | 0.505826 |
f7ba300e21c6e41441250eeb6f18c69e47534fba | 967 | ex | Elixir | lib/cforum_web/controllers/cite/vote_controller.ex | campingrider/cforum_ex | cf27684c47d6dc26c9c37a946f1c729a79d27c70 | [
"MIT"
] | null | null | null | lib/cforum_web/controllers/cite/vote_controller.ex | campingrider/cforum_ex | cf27684c47d6dc26c9c37a946f1c729a79d27c70 | [
"MIT"
] | null | null | null | lib/cforum_web/controllers/cite/vote_controller.ex | campingrider/cforum_ex | cf27684c47d6dc26c9c37a946f1c729a79d27c70 | [
"MIT"
] | null | null | null | defmodule CforumWeb.Cite.VoteController do
use CforumWeb, :controller
alias Cforum.Cites
def vote(conn, %{"type" => type}) when type in ["up", "down"] do
# take back the current vote in all cases; if the type is the same
# as already voted, we just take it back. If it is different, whe
# create a new vote with the chosen new value
only_take_back = Cites.voted?(conn.assigns.cite, conn.assigns[:current_user], type)
Cites.take_back_vote(conn.assigns.cite, conn.assigns[:current_user])
if not only_take_back,
do: Cites.vote(conn.assigns.cite, conn.assigns[:current_user], type)
conn
|> put_flash(:info, gettext("Successfully voted for cite."))
|> redirect(to: Path.cite_path(conn, :show, conn.assigns.cite))
end
def load_resource(conn), do: Plug.Conn.assign(conn, :cite, Cites.get_cite!(conn.params["id"]))
def allowed?(conn, _, _), do: Abilities.signed_in?(conn) && conn.assigns.cite.archived == false
end
| 38.68 | 97 | 0.701138 |
f7ba8ef4cee55f30974d0561ae0fd9761d3fb13b | 433 | exs | Elixir | test/twinkly_maha_web/views/error_view_test.exs | TraceyOnim/TwinklyMaHa | cb9d907d8807e00f1e6e44085fd6f39ae32370b6 | [
"MIT"
] | 1 | 2020-07-16T19:49:53.000Z | 2020-07-16T19:49:53.000Z | test/twinkly_maha_web/views/error_view_test.exs | TraceyOnim/TwinklyMaHa | cb9d907d8807e00f1e6e44085fd6f39ae32370b6 | [
"MIT"
] | 68 | 2021-06-16T15:30:11.000Z | 2022-03-07T08:38:19.000Z | test/twinkly_maha_web/views/error_view_test.exs | sFractal-Podii/TwinklyMaHa | cfcffc355aad7200bef3c4d8ab9b8e179f02b26c | [
"MIT"
] | 5 | 2020-07-14T05:03:08.000Z | 2021-06-15T18:21:19.000Z | defmodule TwinklyMahaWeb.ErrorViewTest do
use TwinklyMahaWeb.ConnCase, async: true
# Bring render/3 and render_to_string/3 for testing custom views
import Phoenix.View
test "renders 404.html" do
assert render_to_string(TwinklyMahaWeb.ErrorView, "404.html", []) == "Not Found"
end
test "renders 500.html" do
assert render_to_string(TwinklyMahaWeb.ErrorView, "500.html", []) == "Internal Server Error"
end
end
| 28.866667 | 96 | 0.743649 |
f7ba9e5484629347a3f9fe1a28c9b28136afcb55 | 2,824 | ex | Elixir | lib/xslt.ex | samacs/xslt | 503f4ca7de86eb63643c8e6288dd961e2f4c167e | [
"MIT"
] | null | null | null | lib/xslt.ex | samacs/xslt | 503f4ca7de86eb63643c8e6288dd961e2f4c167e | [
"MIT"
] | null | null | null | lib/xslt.ex | samacs/xslt | 503f4ca7de86eb63643c8e6288dd961e2f4c167e | [
"MIT"
] | null | null | null | defmodule Xslt do
@moduledoc """
Xslt is a lightweight wrapper around xsltproc. If you need to transform
XML using XSLT this is probably the easiest way to do it.
Using Porcelain, this library calls the xsltproc binary with the pathnames to
your XML files. It will then return the transformed XML in response.
"""
alias Porcelain.Result
@type result :: {:ok, String.t} | {:error, String.t}
@spec transform(template :: String.t, xml :: String.t) :: result
@doc """
Transforms XML using an XSLT template.
## Example:
iex> xml = Path.expand("./test/fixtures/product/simple_example.xml")
...> template = Path.expand("./test/fixtures/product/xslt_template.xml")
...> {:ok, html} = Xslt.transform(template, xml)
...> Regex.replace(~r/\\n/, html, "")
"<html> <head><title>Book Review</title></head> <body> <bookreview> <title lorem=\\"ipsum\\">wewt</title> </bookreview> </body></html>"
"""
def transform(template, xml) when is_bitstring(template) and is_bitstring(xml) do
Porcelain.shell("xsltproc #{template} #{xml}")
|> handle_output
end
def transform(_, xml) when is_bitstring(xml), do: File.read(xml)
def transform(_, _), do: {:error, :bad_arguments}
def transform(template, xml, params) when is_bitstring(template) and is_bitstring(xml) and is_bitstring(params) do
Porcelain.shell("xsltproc #{params} #{template} #{xml}")
|> handle_output
end
def transform(template, xml, _) when is_bitstring(template) and is_bitstring(xml), do: transform(template, xml)
def transform(_, xml, _) when is_bitstring(xml), do: File.read(xml)
def transform(_, _, _), do: {:error, :bad_arguments}
@spec handle_output(result :: Result.t) :: result
defp handle_output(%Result{status: 0, out: output}),
do: {:ok, output}
defp handle_output(%Result{status: 1}),
do: {:error, "No argument"}
defp handle_output(%Result{status: 2}),
do: {:error, "Too many parameters"}
defp handle_output(%Result{status: 3}),
do: {:error, "Unknown option"}
defp handle_output(%Result{status: 4}),
do: {:error, "Failed to parse the stylesheet"}
defp handle_output(%Result{status: 5}),
do: {:error, "Error in the stylesheet"}
defp handle_output(%Result{status: 6}),
do: {:error, "Error in one of the documents"}
defp handle_output(%Result{status: 7}),
do: {:error, "Unsupported xsl:output method"}
defp handle_output(%Result{status: 8}),
do: {:error, "String parameter contains both quote and double-quotes"}
defp handle_output(%Result{status: 9}),
do: {:error, "Internal Processing error"}
defp handle_output(%Result{status: 10}),
do: {:error, "Processing was stopped by a terminating message"}
defp handle_output(%Result{status: 11}),
do: {:error, "Could not write the result to the output file"}
end
| 34.439024 | 145 | 0.682011 |
f7bad383c5b1b2f0be2ee8d00fbaee0b46a07c8f | 6,067 | ex | Elixir | lib/elixir_ex_aliyun_ots_table_store_tunnel_get_rpo_request.ex | hou8/tablestore_protos | 1a3223326b92bbe196d57ce4dd19b5a8db1c728d | [
"MIT"
] | null | null | null | lib/elixir_ex_aliyun_ots_table_store_tunnel_get_rpo_request.ex | hou8/tablestore_protos | 1a3223326b92bbe196d57ce4dd19b5a8db1c728d | [
"MIT"
] | 1 | 2022-02-08T06:37:02.000Z | 2022-02-08T06:37:02.000Z | lib/elixir_ex_aliyun_ots_table_store_tunnel_get_rpo_request.ex | hou8/tablestore_protos | 1a3223326b92bbe196d57ce4dd19b5a8db1c728d | [
"MIT"
] | 2 | 2022-01-24T06:13:03.000Z | 2022-01-24T08:33:41.000Z | # credo:disable-for-this-file
defmodule(ExAliyunOts.TableStoreTunnel.GetRpoRequest) do
@moduledoc false
(
defstruct(tunnel_id: nil)
(
(
@spec encode(struct) :: {:ok, iodata} | {:error, any}
def(encode(msg)) do
try do
{:ok, encode!(msg)}
rescue
e in [Protox.EncodingError, Protox.RequiredFieldsError] ->
{:error, e}
end
end
@spec encode!(struct) :: iodata | no_return
def(encode!(msg)) do
[] |> encode_tunnel_id(msg)
end
)
[]
[
defp(encode_tunnel_id(acc, msg)) do
try do
case(msg.tunnel_id) do
nil ->
acc
_ ->
[acc, "\n", Protox.Encode.encode_string(msg.tunnel_id)]
end
rescue
ArgumentError ->
reraise(Protox.EncodingError.new(:tunnel_id, "invalid field value"), __STACKTRACE__)
end
end
]
[]
)
(
(
@spec decode(binary) :: {:ok, struct} | {:error, any}
def(decode(bytes)) do
try do
{:ok, decode!(bytes)}
rescue
e in [Protox.DecodingError, Protox.IllegalTagError, Protox.RequiredFieldsError] ->
{:error, e}
end
end
(
@spec decode!(binary) :: struct | no_return
def(decode!(bytes)) do
parse_key_value(bytes, struct(ExAliyunOts.TableStoreTunnel.GetRpoRequest))
end
)
)
(
@spec parse_key_value(binary, struct) :: struct
defp(parse_key_value(<<>>, msg)) do
msg
end
defp(parse_key_value(bytes, msg)) do
{field, rest} =
case(Protox.Decode.parse_key(bytes)) do
{0, _, _} ->
raise(%Protox.IllegalTagError{})
{1, _, bytes} ->
{len, bytes} = Protox.Varint.decode(bytes)
{delimited, rest} = Protox.Decode.parse_delimited(bytes, len)
{[tunnel_id: delimited], rest}
{tag, wire_type, rest} ->
{_, rest} = Protox.Decode.parse_unknown(tag, wire_type, rest)
{[], rest}
end
msg_updated = struct(msg, field)
parse_key_value(rest, msg_updated)
end
)
[]
)
(
@spec json_decode(iodata(), keyword()) :: {:ok, struct()} | {:error, any()}
def(json_decode(input, opts \\ [])) do
try do
{:ok, json_decode!(input, opts)}
rescue
e in Protox.JsonDecodingError ->
{:error, e}
end
end
@spec json_decode!(iodata(), keyword()) :: struct() | no_return()
def(json_decode!(input, opts \\ [])) do
{json_library_wrapper, json_library} = Protox.JsonLibrary.get_library(opts, :decode)
Protox.JsonDecode.decode!(
input,
ExAliyunOts.TableStoreTunnel.GetRpoRequest,
&json_library_wrapper.decode!(json_library, &1)
)
end
@spec json_encode(struct(), keyword()) :: {:ok, iodata()} | {:error, any()}
def(json_encode(msg, opts \\ [])) do
try do
{:ok, json_encode!(msg, opts)}
rescue
e in Protox.JsonEncodingError ->
{:error, e}
end
end
@spec json_encode!(struct(), keyword()) :: iodata() | no_return()
def(json_encode!(msg, opts \\ [])) do
{json_library_wrapper, json_library} = Protox.JsonLibrary.get_library(opts, :encode)
Protox.JsonEncode.encode!(msg, &json_library_wrapper.encode!(json_library, &1))
end
)
@deprecated "Use fields_defs()/0 instead"
@spec defs() :: %{
required(non_neg_integer) => {atom, Protox.Types.kind(), Protox.Types.type()}
}
def(defs()) do
%{1 => {:tunnel_id, {:scalar, ""}, :string}}
end
@deprecated "Use fields_defs()/0 instead"
@spec defs_by_name() :: %{
required(atom) => {non_neg_integer, Protox.Types.kind(), Protox.Types.type()}
}
def(defs_by_name()) do
%{tunnel_id: {1, {:scalar, ""}, :string}}
end
@spec fields_defs() :: list(Protox.Field.t())
def(fields_defs()) do
[
%{
__struct__: Protox.Field,
json_name: "tunnelId",
kind: {:scalar, ""},
label: :optional,
name: :tunnel_id,
tag: 1,
type: :string
}
]
end
[
@spec(field_def(atom) :: {:ok, Protox.Field.t()} | {:error, :no_such_field}),
(
def(field_def(:tunnel_id)) do
{:ok,
%{
__struct__: Protox.Field,
json_name: "tunnelId",
kind: {:scalar, ""},
label: :optional,
name: :tunnel_id,
tag: 1,
type: :string
}}
end
def(field_def("tunnelId")) do
{:ok,
%{
__struct__: Protox.Field,
json_name: "tunnelId",
kind: {:scalar, ""},
label: :optional,
name: :tunnel_id,
tag: 1,
type: :string
}}
end
def(field_def("tunnel_id")) do
{:ok,
%{
__struct__: Protox.Field,
json_name: "tunnelId",
kind: {:scalar, ""},
label: :optional,
name: :tunnel_id,
tag: 1,
type: :string
}}
end
),
def(field_def(_)) do
{:error, :no_such_field}
end
]
[]
@spec required_fields() :: []
def(required_fields()) do
[]
end
@spec syntax() :: atom
def(syntax()) do
:proto2
end
[
@spec(default(atom) :: {:ok, boolean | integer | String.t() | float} | {:error, atom}),
def(default(:tunnel_id)) do
{:ok, ""}
end,
def(default(_)) do
{:error, :no_such_field}
end
]
)
end | 25.817021 | 98 | 0.483435 |
f7bae28032b8ff60c34a39f54ab39d4f370f8156 | 2,878 | exs | Elixir | apps/nerves_hub_www/test/nerves_hub_www_web/controllers/device_controller_test.exs | verypossible/nerves_hub_web | 9c7e1fbb5232a43ee2706cd2a6b39a30fcb7dfed | [
"Apache-2.0"
] | 1 | 2020-08-04T14:13:24.000Z | 2020-08-04T14:13:24.000Z | apps/nerves_hub_www/test/nerves_hub_www_web/controllers/device_controller_test.exs | verypossible/nerves_hub_web | 9c7e1fbb5232a43ee2706cd2a6b39a30fcb7dfed | [
"Apache-2.0"
] | 1 | 2020-09-08T15:15:50.000Z | 2020-09-08T16:13:28.000Z | apps/nerves_hub_www/test/nerves_hub_www_web/controllers/device_controller_test.exs | verypossible/nerves_hub_web | 9c7e1fbb5232a43ee2706cd2a6b39a30fcb7dfed | [
"Apache-2.0"
] | null | null | null | defmodule NervesHubWWWWeb.DeviceControllerTest do
use NervesHubWWWWeb.ConnCase.Browser, async: false
alias NervesHubWebCore.Devices
alias NervesHubWebCore.Fixtures
alias NervesHubDevice.Presence
setup %{user: user, org: org} do
[product: Fixtures.product_fixture(user, org)]
end
describe "new device" do
test "renders form with valid request params", %{conn: conn, org: org, product: product} do
new_conn = get(conn, Routes.device_path(conn, :new, org.name, product.name))
assert html_response(new_conn, 200) =~ "Add Device"
end
end
describe "create device" do
test "redirects to show when data is valid", %{
conn: conn,
org: org,
product: product
} do
device_params = %{
identifier: "device_identifier",
tags: "beta, beta-edge"
}
# check that we end up in the right place
create_conn =
post(conn, Routes.device_path(conn, :create, org.name, product.name),
device: device_params
)
assert redirected_to(create_conn, 302) =~
Routes.device_path(conn, :index, org.name, product.name)
# check that the proper creation side effects took place
conn = get(conn, Routes.device_path(conn, :index, org.name, product.name))
assert html_response(conn, 200) =~ device_params.identifier
end
end
describe "delete device" do
test "deletes chosen device", %{conn: conn, org: org, product: product} do
org_key = Fixtures.org_key_fixture(org)
firmware = Fixtures.firmware_fixture(org_key, product)
Fixtures.device_fixture(org, product, firmware)
[to_delete | _] = Devices.get_devices_by_org_id_and_product_id(org.id, product.id)
conn =
delete(
conn,
Routes.device_path(conn, :delete, org.name, product.name, to_delete.identifier)
)
assert redirected_to(conn) == Routes.device_path(conn, :index, org.name, product.name)
conn =
get(conn, Routes.device_path(conn, :show, org.name, product.name, to_delete.identifier))
assert html_response(conn, 404)
end
end
describe "console" do
test "shows information about device", %{conn: conn, org: org, product: product} do
org_key = Fixtures.org_key_fixture(org)
firmware = Fixtures.firmware_fixture(org_key, product)
device = Fixtures.device_fixture(org, product, firmware)
Presence.track(self(), "product:#{product.id}:devices", device.id, %{
console_available: true,
console_version: "0.9.0"
})
result =
get(conn, Routes.device_path(conn, :console, org.name, product.name, device.identifier))
assert html_response(result, 200) =~ "<h1>#{device.identifier}</h1>"
assert html_response(result, 200) =~ "Health"
assert html_response(result, 200) =~ "Status"
end
end
end
| 32.337079 | 96 | 0.664698 |
f7baf3363690388c50f70ca66173d2af9b4e12c0 | 1,352 | exs | Elixir | test/subsets_test.exs | toddharding/subsets | c889e045edb21e99b9ab374c3d676fc910e09d4d | [
"BSD-3-Clause"
] | null | null | null | test/subsets_test.exs | toddharding/subsets | c889e045edb21e99b9ab374c3d676fc910e09d4d | [
"BSD-3-Clause"
] | null | null | null | test/subsets_test.exs | toddharding/subsets | c889e045edb21e99b9ab374c3d676fc910e09d4d | [
"BSD-3-Clause"
] | null | null | null | defmodule SubsetsTest do
use ExUnit.Case
doctest Subsets
test "number of subsets in [1, 2, 3]" do
assert Subsets.number_of_sets([1, 2, 3]) == 7
end
test "subsets of []" do
assert Subsets.generate([]) == []
end
test "subsets of [1]" do
assert Subsets.generate([1]) == [[1]]
end
test "subsets of [1, 2]" do
set = MapSet.new([1, 2])
subset = set
|> MapSet.to_list()
|> Subsets.generate
|> MapSet.new
wanted_subset = MapSet.new([[1], [2], [1, 2]])
assert MapSet.equal?(subset, wanted_subset)
end
test "subsets of [1, 2, 3]" do
set = MapSet.new([1, 2, 3])
subset = set
|> MapSet.to_list()
|> Subsets.generate
|> MapSet.new
wanted_subset = MapSet.new([[1, 2], [1, 3], [2, 3], [1], [2], [3], [1, 2, 3]])
assert MapSet.equal?(subset, wanted_subset)
end
test "subsets of [1, 2, 3, 4]" do
set = MapSet.new([1, 2, 3, 4])
subset = set
|> MapSet.to_list()
|> Subsets.generate
|> MapSet.new
# if length of set > 1 then generate subsets
wanted_subset = MapSet.new([
[1], [2], [3], [4], [1, 2, 3, 4],
[1, 2, 3], [1, 2, 4], [2, 3, 4], [1, 3, 4],
[1, 2], [1, 3], [2, 3],
[1, 4], [2, 4],
[2, 3], [2, 4], [3, 4]
])
assert MapSet.equal?(subset, wanted_subset)
end
end
| 22.533333 | 82 | 0.516272 |
f7bb3405083512f9c3e97ee70049fc0781800a71 | 134 | exs | Elixir | test/line_notify_client_test.exs | takkanm/line_notify_client | 39a4de6c05cef6e82758f1a702464e41b4102475 | [
"MIT"
] | null | null | null | test/line_notify_client_test.exs | takkanm/line_notify_client | 39a4de6c05cef6e82758f1a702464e41b4102475 | [
"MIT"
] | null | null | null | test/line_notify_client_test.exs | takkanm/line_notify_client | 39a4de6c05cef6e82758f1a702464e41b4102475 | [
"MIT"
] | null | null | null | defmodule LineNotifyClientTest do
use ExUnit.Case
doctest LineNotifyClient
test "the truth" do
assert 1 + 1 == 2
end
end
| 14.888889 | 33 | 0.716418 |
f7bb517d183aad92259c159f495bce1ca56314fc | 1,747 | ex | Elixir | clients/dfa_reporting/lib/google_api/dfa_reporting/v33/model/frequency_cap.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2021-12-20T03:40:53.000Z | 2021-12-20T03:40:53.000Z | clients/dfa_reporting/lib/google_api/dfa_reporting/v33/model/frequency_cap.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | 1 | 2020-08-18T00:11:23.000Z | 2020-08-18T00:44:16.000Z | clients/dfa_reporting/lib/google_api/dfa_reporting/v33/model/frequency_cap.ex | pojiro/elixir-google-api | 928496a017d3875a1929c6809d9221d79404b910 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# NOTE: This file is auto generated by the elixir code generator program.
# Do not edit this file manually.
defmodule GoogleApi.DFAReporting.V33.Model.FrequencyCap do
@moduledoc """
Frequency Cap.
## Attributes
* `duration` (*type:* `String.t`, *default:* `nil`) - Duration of time, in seconds, for this frequency cap. The maximum duration is 90 days. Acceptable values are 1 to 7776000, inclusive.
* `impressions` (*type:* `String.t`, *default:* `nil`) - Number of times an individual user can be served the ad within the specified duration. Acceptable values are 1 to 15, inclusive.
"""
use GoogleApi.Gax.ModelBase
@type t :: %__MODULE__{
:duration => String.t() | nil,
:impressions => String.t() | nil
}
field(:duration)
field(:impressions)
end
defimpl Poison.Decoder, for: GoogleApi.DFAReporting.V33.Model.FrequencyCap do
def decode(value, options) do
GoogleApi.DFAReporting.V33.Model.FrequencyCap.decode(value, options)
end
end
defimpl Poison.Encoder, for: GoogleApi.DFAReporting.V33.Model.FrequencyCap do
def encode(value, options) do
GoogleApi.Gax.ModelBase.encode(value, options)
end
end
| 34.94 | 191 | 0.728678 |